From 62cd7010ef91dcaa5a5a36790985053db0b38671 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Fri, 11 Jun 2021 15:09:58 +0000 Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master --- .gitlab/ci/global.gitlab-ci.yml | 23 ++ .gitlab/ci/rails.gitlab-ci.yml | 22 +- .gitlab/ci/rules.gitlab-ci.yml | 8 + app/assets/javascripts/api/analytics_api.js | 32 +++ .../cycle_analytics/components/base.vue | 18 +- .../javascripts/cycle_analytics/constants.js | 6 + .../javascripts/cycle_analytics/index.js | 4 +- .../cycle_analytics/store/actions.js | 87 +++++-- .../cycle_analytics/store/mutation_types.js | 10 + .../cycle_analytics/store/mutations.js | 43 +++- .../cycle_analytics/store/state.js | 4 + .../javascripts/cycle_analytics/utils.js | 49 +--- app/assets/javascripts/issuable_form.js | 2 +- .../javascripts/lib/utils/url_utility.js | 18 +- app/assets/javascripts/monitoring/utils.js | 6 +- .../packages_and_registries/shared/utils.js | 3 +- app/assets/javascripts/search/index.js | 5 +- .../filtered_search_utils.js | 4 +- .../stylesheets/startup/startup-dark.scss | 22 +- .../stylesheets/startup/startup-general.scss | 22 +- .../appearances_controller.rb | 12 +- .../import/bulk_imports_controller.rb | 4 +- app/models/bulk_imports/export_status.rb | 2 +- app/models/ci/build.rb | 46 +++- app/models/ci/pending_build.rb | 2 +- app/models/ci/running_build.rb | 28 ++ app/models/commit.rb | 2 +- app/models/concerns/enums/ci/commit_status.rb | 2 +- app/models/group_deploy_token.rb | 2 - .../boards/base_items_list_service.rb | 12 +- app/services/boards/issues/list_service.rb | 4 +- .../bulk_imports/file_download_service.rb | 2 +- .../drop_not_runnable_builds_service.rb | 91 ------- .../start_pipeline_service.rb | 3 +- app/services/ci/update_build_queue_service.rb | 41 +++ app/services/projects/create_service.rb | 27 +- .../projects/group_links/create_service.rb | 26 +- .../appearances/_form.html.haml | 12 +- .../_system_header_footer_form.html.haml | 0 .../appearances/preview_sign_in.html.haml | 0 .../appearances/show.html.haml | 0 app/views/layouts/header/_default.html.haml | 2 +- .../layouts/nav/sidebar/_admin.html.haml | 20 +- .../nav/sidebar/_context_menu_body.html.haml | 2 +- .../layouts/nav/sidebar/_group.html.haml | 4 +- app/views/projects/blob/edit.html.haml | 12 +- .../projects/cycle_analytics/show.html.haml | 2 +- .../shared/nav/_scope_menu_body.html.haml | 2 +- .../bulk_imports/export_request_worker.rb | 2 +- .../development/allow_group_deploy_token.yml | 8 - .../ci_track_shared_runner_builds.yml | 8 + .../ci_untrack_shared_runner_builds.yml | 8 + .../development/sidebar_refactor.yml | 2 +- ...ect_authorization_project_share_worker.yml | 8 - ...cialized_project_authorization_workers.yml | 8 - config/routes/admin.rb | 18 +- ...20210601123341_add_running_builds_table.rb | 17 ++ ...rary_index_on_security_findings_scan_id.rb | 18 ++ ...d_protected_attribute_to_pending_builds.rb | 7 + ...e_protected_attribute_to_pending_builds.rb | 26 ++ ...9_add_index_to_protected_pending_builds.rb | 17 ++ db/schema_migrations/20210601123341 | 1 + db/schema_migrations/20210607080044 | 1 + db/schema_migrations/20210610102410 | 1 + db/schema_migrations/20210610102413 | 1 + db/schema_migrations/20210610113229 | 1 + db/structure.sql | 45 +++- doc/api/projects.md | 3 +- doc/development/pipelines.md | 1 + doc/topics/autodevops/customize.md | 32 ++- doc/topics/autodevops/stages.md | 81 +++--- doc/user/admin_area/appearance.md | 2 +- lib/api/entities/snippet.rb | 10 +- lib/api/projects.rb | 1 + lib/bulk_imports/clients/http.rb | 2 +- .../common/extractors/rest_extractor.rb | 2 +- .../groups/extractors/subgroups_extractor.rb | 2 +- lib/gitlab/ci/queue/metrics.rb | 4 +- .../ci/templates/Jobs/Build.gitlab-ci.yml | 6 +- locale/gitlab.pot | 6 + qa/qa.rb | 28 +- qa/qa/page/group/menu.rb | 8 +- .../environments/index.rb | 2 +- .../environments/show.rb | 2 +- .../kubernetes/add.rb | 2 +- .../kubernetes/add_existing.rb | 4 +- .../kubernetes/index.rb | 2 +- .../kubernetes/show.rb | 4 +- qa/qa/page/project/menu.rb | 25 +- .../incidents/index.rb | 2 +- .../{operations => monitor}/metrics/show.rb | 4 +- .../settings/{operations.rb => monitor.rb} | 2 +- qa/qa/page/project/sub_menus/deployments.rb | 40 +++ .../page/project/sub_menus/infrastructure.rb | 40 +++ .../sub_menus/{operations.rb => monitor.rb} | 32 +-- qa/qa/page/project/sub_menus/project.rb | 6 +- qa/qa/page/project/sub_menus/settings.rb | 4 +- .../kubernetes_cluster/project_cluster.rb | 12 +- .../create_and_process_pipeline_spec.rb | 8 +- .../create_project_with_auto_devops_spec.rb | 6 +- .../kubernetes/kubernetes_integration_spec.rb | 4 +- .../all_monitor_core_features_spec.rb | 34 +-- scripts/debug-rubocop | 36 --- scripts/gitaly-test-build | 7 +- .../appearances_controller_spec.rb | 2 +- .../import/bulk_imports_controller_spec.rb | 4 +- spec/features/admin/admin_appearance_spec.rb | 20 +- .../admin/admin_search_settings_spec.rb | 2 +- spec/frontend/cycle_analytics/base_spec.js | 9 +- spec/frontend/cycle_analytics/mock_data.js | 95 ++++--- .../cycle_analytics/store/actions_spec.js | 165 ++++++++++-- .../cycle_analytics/store/mutations_spec.js | 34 ++- spec/frontend/cycle_analytics/utils_spec.js | 29 --- spec/frontend/lib/utils/url_utility_spec.js | 57 ++--- spec/lib/bulk_imports/clients/http_spec.rb | 2 +- .../common/extractors/rest_extractor_spec.rb | 2 +- .../extractors/subgroups_extractor_spec.rb | 2 +- ...tected_attribute_to_pending_builds_spec.rb | 33 +++ .../models/bulk_imports/export_status_spec.rb | 4 +- spec/models/ci/build_spec.rb | 58 +++++ spec/models/ci/pending_build_spec.rb | 2 +- spec/models/ci/running_build_spec.rb | 55 ++++ spec/models/concerns/issuable_spec.rb | 2 +- spec/models/deploy_token_spec.rb | 82 +++--- spec/requests/api/projects_spec.rb | 45 ++++ .../file_download_service_spec.rb | 2 +- .../ci/create_pipeline_service/needs_spec.rb | 42 +-- .../drop_not_runnable_builds_service_spec.rb | 80 ------ .../start_pipeline_service_spec.rb | 9 - spec/services/ci/retry_build_service_spec.rb | 2 +- .../ci/update_build_queue_service_spec.rb | 241 +++++++++++++----- spec/services/projects/create_service_spec.rb | 32 +-- .../group_links/create_service_spec.rb | 22 +- .../export_request_worker_spec.rb | 2 +- .../initial_pipeline_process_worker_spec.rb | 18 +- 135 files changed, 1537 insertions(+), 1030 deletions(-) create mode 100644 app/assets/javascripts/api/analytics_api.js rename app/controllers/admin/{ => application_settings}/appearances_controller.rb (68%) create mode 100644 app/models/ci/running_build.rb delete mode 100644 app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb rename app/views/admin/{ => application_settings}/appearances/_form.html.haml (80%) rename app/views/admin/{ => application_settings}/appearances/_system_header_footer_form.html.haml (100%) rename app/views/admin/{ => application_settings}/appearances/preview_sign_in.html.haml (100%) rename app/views/admin/{ => application_settings}/appearances/show.html.haml (100%) delete mode 100644 config/feature_flags/development/allow_group_deploy_token.yml create mode 100644 config/feature_flags/development/ci_track_shared_runner_builds.yml create mode 100644 config/feature_flags/development/ci_untrack_shared_runner_builds.yml delete mode 100644 config/feature_flags/development/specialized_project_authorization_project_share_worker.yml delete mode 100644 config/feature_flags/development/specialized_project_authorization_workers.yml create mode 100644 db/migrate/20210601123341_add_running_builds_table.rb create mode 100644 db/migrate/20210607080044_remove_temporary_index_on_security_findings_scan_id.rb create mode 100644 db/migrate/20210610102410_add_protected_attribute_to_pending_builds.rb create mode 100644 db/post_migrate/20210610102413_migrate_protected_attribute_to_pending_builds.rb create mode 100644 db/post_migrate/20210610113229_add_index_to_protected_pending_builds.rb create mode 100644 db/schema_migrations/20210601123341 create mode 100644 db/schema_migrations/20210607080044 create mode 100644 db/schema_migrations/20210610102410 create mode 100644 db/schema_migrations/20210610102413 create mode 100644 db/schema_migrations/20210610113229 rename qa/qa/page/project/{operations => deployments}/environments/index.rb (95%) rename qa/qa/page/project/{operations => deployments}/environments/show.rb (95%) rename qa/qa/page/project/{operations => infrastructure}/kubernetes/add.rb (93%) rename qa/qa/page/project/{operations => infrastructure}/kubernetes/add_existing.rb (95%) rename qa/qa/page/project/{operations => infrastructure}/kubernetes/index.rb (95%) rename qa/qa/page/project/{operations => infrastructure}/kubernetes/show.rb (98%) rename qa/qa/page/project/{operations => monitor}/incidents/index.rb (94%) rename qa/qa/page/project/{operations => monitor}/metrics/show.rb (96%) rename qa/qa/page/project/settings/{operations.rb => monitor.rb} (93%) create mode 100644 qa/qa/page/project/sub_menus/deployments.rb create mode 100644 qa/qa/page/project/sub_menus/infrastructure.rb rename qa/qa/page/project/sub_menus/{operations.rb => monitor.rb} (53%) delete mode 100755 scripts/debug-rubocop rename spec/controllers/admin/{ => application_settings}/appearances_controller_spec.rb (97%) create mode 100644 spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb create mode 100644 spec/models/ci/running_build_spec.rb delete mode 100644 spec/services/ci/pipeline_creation/drop_not_runnable_builds_service_spec.rb diff --git a/.gitlab/ci/global.gitlab-ci.yml b/.gitlab/ci/global.gitlab-ci.yml index 3ae7e961aa5..5d17fad0c47 100644 --- a/.gitlab/ci/global.gitlab-ci.yml +++ b/.gitlab/ci/global.gitlab-ci.yml @@ -36,6 +36,23 @@ <<: *gitaly-ruby-gems-cache policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up. +.gitaly-binaries-cache: &gitaly-binaries-cache + key: + files: + - GITALY_SERVER_VERSION + prefix: "gitaly-binaries" + paths: + - tmp/tests/gitaly/_build/bin/ + - tmp/tests/gitaly/config.toml + - tmp/tests/gitaly/gitaly2.config.toml + - tmp/tests/gitaly/internal/ + - tmp/tests/gitaly/internal_gitaly2/ + - tmp/tests/gitaly/internal_sockets/ + - tmp/tests/gitaly/Makefile + - tmp/tests/gitaly/praefect.config.toml + - tmp/tests/gitaly/ruby/ + policy: pull + .go-pkg-cache: &go-pkg-cache key: "go-pkg-v1" paths: @@ -97,6 +114,7 @@ cache: - *ruby-gems-cache - *gitaly-ruby-gems-cache + - *gitaly-binaries-cache - *go-pkg-cache .setup-test-env-cache-push: @@ -105,6 +123,11 @@ - *gitaly-ruby-gems-cache-push - *go-pkg-cache-push +.gitaly-binaries-cache-push: + cache: + - <<: *gitaly-binaries-cache + policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up. + .rails-cache: cache: - *ruby-gems-cache diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml index a30c4ebd413..c4d393eb16c 100644 --- a/.gitlab/ci/rails.gitlab-ci.yml +++ b/.gitlab/ci/rails.gitlab-ci.yml @@ -177,6 +177,14 @@ update-setup-test-env-cache: artifacts: paths: [] # This job's purpose is only to update the cache. +update-gitaly-binaries-cache: + extends: + - setup-test-env + - .gitaly-binaries-cache-push + - .shared:rules:update-gitaly-binaries-cache + artifacts: + paths: [] # This job's purpose is only to update the cache. + .coverage-base: extends: - .default-retry @@ -203,13 +211,6 @@ update-static-analysis-cache: - .shared:rules:update-cache stage: prepare script: - - git log -n 1 --pretty=format:%H -- .rubocop.yml - - git log -n 1 --pretty=format:%H -- .rubocop_manual_todo.yml - - git log -n 1 --pretty=format:%H -- .rubocop_todo.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-migrations.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-usage-data.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-code_reuse.yml - - bundle exec scripts/debug-rubocop spec/factories/namespace/aggregation_schedules.rb - run_timed_command "bundle exec rubocop --parallel" # For the moment we only cache `tmp/rubocop_cache` so we don't need to run all the tasks. static-analysis: @@ -220,13 +221,6 @@ static-analysis: parallel: 4 script: - run_timed_command "retry yarn install --frozen-lockfile" - - git log -n 1 --pretty=format:%H -- .rubocop.yml - - git log -n 1 --pretty=format:%H -- .rubocop_manual_todo.yml - - git log -n 1 --pretty=format:%H -- .rubocop_todo.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-migrations.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-usage-data.yml - - git log -n 1 --pretty=format:%H -- rubocop/rubocop-code_reuse.yml - - bundle exec scripts/debug-rubocop spec/factories/namespace/aggregation_schedules.rb - scripts/static-analysis static-analysis as-if-foss: diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index affbeedc25a..8dd97c1fe69 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -111,6 +111,9 @@ - ".gitlab/ci/build-images.gitlab-ci.yml" - ".gitlab/ci/qa.gitlab-ci.yml" +.gitaly-patterns: &gitaly-patterns + - "GITALY_SERVER_VERSION" + .workhorse-patterns: &workhorse-patterns - "GITLAB_WORKHORSE_VERSION" - "workhorse/**/*" @@ -310,6 +313,11 @@ - <<: *if-security-schedule - <<: *if-merge-request-title-update-caches +.shared:rules:update-gitaly-binaries-cache: + rules: + - <<: *if-merge-request-title-update-caches + - changes: *gitaly-patterns + ###################### # Build images rules # ###################### diff --git a/app/assets/javascripts/api/analytics_api.js b/app/assets/javascripts/api/analytics_api.js new file mode 100644 index 00000000000..58494c5a2b8 --- /dev/null +++ b/app/assets/javascripts/api/analytics_api.js @@ -0,0 +1,32 @@ +import axios from '~/lib/utils/axios_utils'; +import { buildApiUrl } from './api_utils'; + +const PROJECT_VSA_PATH_BASE = '/:project_path/-/analytics/value_stream_analytics/value_streams'; +const PROJECT_VSA_STAGES_PATH = `${PROJECT_VSA_PATH_BASE}/:value_stream_id/stages`; + +const buildProjectValueStreamPath = (projectPath, valueStreamId = null) => { + if (valueStreamId) { + return buildApiUrl(PROJECT_VSA_STAGES_PATH) + .replace(':project_path', projectPath) + .replace(':value_stream_id', valueStreamId); + } + return buildApiUrl(PROJECT_VSA_PATH_BASE).replace(':project_path', projectPath); +}; + +export const getProjectValueStreams = (projectPath) => { + const url = buildProjectValueStreamPath(projectPath); + return axios.get(url); +}; + +export const getProjectValueStreamStages = (projectPath, valueStreamId) => { + const url = buildProjectValueStreamPath(projectPath, valueStreamId); + return axios.get(url); +}; + +// NOTE: legacy VSA request use a different path +// the `requestPath` provides a full url for the request +export const getProjectValueStreamStageData = ({ requestPath, stageId, params }) => + axios.get(`${requestPath}/events/${stageId}`, { params }); + +export const getProjectValueStreamMetrics = (requestPath, params) => + axios.get(requestPath, { params }); diff --git a/app/assets/javascripts/cycle_analytics/components/base.vue b/app/assets/javascripts/cycle_analytics/components/base.vue index 8c1fecac3fc..8492f0b73e1 100644 --- a/app/assets/javascripts/cycle_analytics/components/base.vue +++ b/app/assets/javascripts/cycle_analytics/components/base.vue @@ -58,6 +58,7 @@ export default { 'stages', 'summary', 'startDate', + 'permissions', ]), ...mapGetters(['pathNavigationData']), displayStageEvents() { @@ -68,7 +69,7 @@ export default { return this.selectedStageReady && this.isEmptyStage; }, displayNoAccess() { - return this.selectedStageReady && !this.selectedStage.isUserAllowed; + return this.selectedStageReady && !this.isUserAllowed(this.selectedStage.id); }, selectedStageReady() { return !this.isLoadingStage && this.selectedStage; @@ -91,25 +92,18 @@ export default { ]), handleDateSelect(startDate) { this.setDateRange({ startDate }); - this.fetchCycleAnalyticsData(); - }, - isActiveStage(stage) { - return stage.slug === this.selectedStage.slug; }, onSelectStage(stage) { - if (this.isLoadingStage || this.selectedStage?.slug === stage?.slug) return; - this.setSelectedStage(stage); - if (!stage.isUserAllowed) { - return; - } - - this.fetchStageData(); }, dismissOverviewDialog() { this.isOverviewDialogDismissed = true; Cookies.set(OVERVIEW_DIALOG_COOKIE, '1', { expires: 365 }); }, + isUserAllowed(id) { + const { permissions } = this; + return Boolean(permissions?.[id]); + }, }, dayRangeOptions: [7, 30, 90], i18n: { diff --git a/app/assets/javascripts/cycle_analytics/constants.js b/app/assets/javascripts/cycle_analytics/constants.js index 50b5ebba583..96c89049e90 100644 --- a/app/assets/javascripts/cycle_analytics/constants.js +++ b/app/assets/javascripts/cycle_analytics/constants.js @@ -1,2 +1,8 @@ export const DEFAULT_DAYS_TO_DISPLAY = 30; export const OVERVIEW_STAGE_ID = 'overview'; + +export const DEFAULT_VALUE_STREAM = { + id: 'default', + slug: 'default', + name: 'default', +}; diff --git a/app/assets/javascripts/cycle_analytics/index.js b/app/assets/javascripts/cycle_analytics/index.js index 00192cc61f8..57cb220d9c9 100644 --- a/app/assets/javascripts/cycle_analytics/index.js +++ b/app/assets/javascripts/cycle_analytics/index.js @@ -8,10 +8,11 @@ Vue.use(Translate); export default () => { const store = createStore(); const el = document.querySelector('#js-cycle-analytics'); - const { noAccessSvgPath, noDataSvgPath, requestPath } = el.dataset; + const { noAccessSvgPath, noDataSvgPath, requestPath, fullPath } = el.dataset; store.dispatch('initializeVsa', { requestPath, + fullPath, }); // eslint-disable-next-line no-new @@ -24,6 +25,7 @@ export default () => { props: { noDataSvgPath, noAccessSvgPath, + fullPath, }, }), }); diff --git a/app/assets/javascripts/cycle_analytics/store/actions.js b/app/assets/javascripts/cycle_analytics/store/actions.js index 40e1c01f78b..faf1c37d86a 100644 --- a/app/assets/javascripts/cycle_analytics/store/actions.js +++ b/app/assets/javascripts/cycle_analytics/store/actions.js @@ -1,27 +1,60 @@ +import { + getProjectValueStreamStages, + getProjectValueStreams, + getProjectValueStreamStageData, + getProjectValueStreamMetrics, +} from '~/api/analytics_api'; import createFlash from '~/flash'; -import axios from '~/lib/utils/axios_utils'; import { __ } from '~/locale'; -import { DEFAULT_DAYS_TO_DISPLAY } from '../constants'; +import { DEFAULT_DAYS_TO_DISPLAY, DEFAULT_VALUE_STREAM } from '../constants'; import * as types from './mutation_types'; -export const fetchCycleAnalyticsData = ({ - state: { requestPath, startDate }, - dispatch, - commit, -}) => { +export const setSelectedValueStream = ({ commit, dispatch }, valueStream) => { + commit(types.SET_SELECTED_VALUE_STREAM, valueStream); + return dispatch('fetchValueStreamStages'); +}; + +export const fetchValueStreamStages = ({ commit, state }) => { + const { fullPath, selectedValueStream } = state; + commit(types.REQUEST_VALUE_STREAM_STAGES); + + return getProjectValueStreamStages(fullPath, selectedValueStream.id) + .then(({ data }) => commit(types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS, data)) + .catch(({ response: { status } }) => { + commit(types.RECEIVE_VALUE_STREAM_STAGES_ERROR, status); + }); +}; + +export const receiveValueStreamsSuccess = ({ commit, dispatch }, data = []) => { + commit(types.RECEIVE_VALUE_STREAMS_SUCCESS, data); + if (data.length) { + const [firstStream] = data; + return dispatch('setSelectedValueStream', firstStream); + } + return dispatch('setSelectedValueStream', DEFAULT_VALUE_STREAM); +}; + +export const fetchValueStreams = ({ commit, dispatch, state }) => { + const { fullPath } = state; + commit(types.REQUEST_VALUE_STREAMS); + + return getProjectValueStreams(fullPath) + .then(({ data }) => dispatch('receiveValueStreamsSuccess', data)) + .then(() => dispatch('setSelectedStage')) + .catch(({ response: { status } }) => { + commit(types.RECEIVE_VALUE_STREAMS_ERROR, status); + }); +}; + +export const fetchCycleAnalyticsData = ({ state: { requestPath, startDate }, commit }) => { commit(types.REQUEST_CYCLE_ANALYTICS_DATA); - return axios - .get(requestPath, { - params: { 'cycle_analytics[start_date]': startDate }, - }) + return getProjectValueStreamMetrics(requestPath, { 'cycle_analytics[start_date]': startDate }) .then(({ data }) => commit(types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS, data)) - .then(() => dispatch('setSelectedStage')) - .then(() => dispatch('fetchStageData')) .catch(() => { commit(types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR); createFlash({ - message: __('There was an error while fetching value stream analytics data.'), + message: __('There was an error while fetching value stream summary data.'), }); }); }; @@ -29,10 +62,11 @@ export const fetchCycleAnalyticsData = ({ export const fetchStageData = ({ state: { requestPath, selectedStage, startDate }, commit }) => { commit(types.REQUEST_STAGE_DATA); - return axios - .get(`${requestPath}/events/${selectedStage.name}.json`, { - params: { 'cycle_analytics[start_date]': startDate }, - }) + return getProjectValueStreamStageData({ + requestPath, + stageId: selectedStage.id, + params: { 'cycle_analytics[start_date]': startDate }, + }) .then(({ data }) => { // when there's a query timeout, the request succeeds but the error is encoded in the response data if (data?.error) { @@ -44,15 +78,26 @@ export const fetchStageData = ({ state: { requestPath, selectedStage, startDate .catch(() => commit(types.RECEIVE_STAGE_DATA_ERROR)); }; -export const setSelectedStage = ({ commit, state: { stages } }, selectedStage = null) => { +export const setSelectedStage = ({ dispatch, commit, state: { stages } }, selectedStage = null) => { const stage = selectedStage || stages[0]; commit(types.SET_SELECTED_STAGE, stage); + return dispatch('fetchStageData'); }; -export const setDateRange = ({ commit }, { startDate = DEFAULT_DAYS_TO_DISPLAY }) => +const refetchData = (dispatch, commit) => { + commit(types.SET_LOADING, true); + return Promise.resolve() + .then(() => dispatch('fetchValueStreams')) + .then(() => dispatch('fetchCycleAnalyticsData')) + .finally(() => commit(types.SET_LOADING, false)); +}; + +export const setDateRange = ({ dispatch, commit }, { startDate = DEFAULT_DAYS_TO_DISPLAY }) => { commit(types.SET_DATE_RANGE, { startDate }); + return refetchData(dispatch, commit); +}; export const initializeVsa = ({ commit, dispatch }, initialData = {}) => { commit(types.INITIALIZE_VSA, initialData); - return dispatch('fetchCycleAnalyticsData'); + return refetchData(dispatch, commit); }; diff --git a/app/assets/javascripts/cycle_analytics/store/mutation_types.js b/app/assets/javascripts/cycle_analytics/store/mutation_types.js index 00aae49ae9f..4f3d430ec9f 100644 --- a/app/assets/javascripts/cycle_analytics/store/mutation_types.js +++ b/app/assets/javascripts/cycle_analytics/store/mutation_types.js @@ -1,8 +1,18 @@ export const INITIALIZE_VSA = 'INITIALIZE_VSA'; +export const SET_LOADING = 'SET_LOADING'; +export const SET_SELECTED_VALUE_STREAM = 'SET_SELECTED_VALUE_STREAM'; export const SET_SELECTED_STAGE = 'SET_SELECTED_STAGE'; export const SET_DATE_RANGE = 'SET_DATE_RANGE'; +export const REQUEST_VALUE_STREAMS = 'REQUEST_VALUE_STREAMS'; +export const RECEIVE_VALUE_STREAMS_SUCCESS = 'RECEIVE_VALUE_STREAMS_SUCCESS'; +export const RECEIVE_VALUE_STREAMS_ERROR = 'RECEIVE_VALUE_STREAMS_ERROR'; + +export const REQUEST_VALUE_STREAM_STAGES = 'REQUEST_VALUE_STREAM_STAGES'; +export const RECEIVE_VALUE_STREAM_STAGES_SUCCESS = 'RECEIVE_VALUE_STREAM_STAGES_SUCCESS'; +export const RECEIVE_VALUE_STREAM_STAGES_ERROR = 'RECEIVE_VALUE_STREAM_STAGES_ERROR'; + export const REQUEST_CYCLE_ANALYTICS_DATA = 'REQUEST_CYCLE_ANALYTICS_DATA'; export const RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS = 'RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS'; export const RECEIVE_CYCLE_ANALYTICS_DATA_ERROR = 'RECEIVE_CYCLE_ANALYTICS_DATA_ERROR'; diff --git a/app/assets/javascripts/cycle_analytics/store/mutations.js b/app/assets/javascripts/cycle_analytics/store/mutations.js index 4d999b056b7..0ae80116cd2 100644 --- a/app/assets/javascripts/cycle_analytics/store/mutations.js +++ b/app/assets/javascripts/cycle_analytics/store/mutations.js @@ -1,34 +1,61 @@ +import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { decorateData, decorateEvents, formatMedianValues } from '../utils'; import * as types from './mutation_types'; export default { - [types.INITIALIZE_VSA](state, { requestPath }) { + [types.INITIALIZE_VSA](state, { requestPath, fullPath }) { state.requestPath = requestPath; + state.fullPath = fullPath; + }, + [types.SET_LOADING](state, loadingState) { + state.isLoading = loadingState; + }, + [types.SET_SELECTED_VALUE_STREAM](state, selectedValueStream = {}) { + state.selectedValueStream = convertObjectPropsToCamelCase(selectedValueStream, { deep: true }); }, [types.SET_SELECTED_STAGE](state, stage) { - state.isLoadingStage = true; state.selectedStage = stage; - state.isLoadingStage = false; }, [types.SET_DATE_RANGE](state, { startDate }) { state.startDate = startDate; }, + [types.REQUEST_VALUE_STREAMS](state) { + state.valueStreams = []; + }, + [types.RECEIVE_VALUE_STREAMS_SUCCESS](state, valueStreams = []) { + state.valueStreams = valueStreams; + }, + [types.RECEIVE_VALUE_STREAMS_ERROR](state) { + state.valueStreams = []; + }, + [types.REQUEST_VALUE_STREAM_STAGES](state) { + state.stages = []; + }, + [types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS](state, { stages = [] }) { + state.stages = stages.map((s) => ({ + ...convertObjectPropsToCamelCase(s, { deep: true }), + // NOTE: we set the component type here to match the current behaviour + // this can be removed when we migrate to the update stage table + // https://gitlab.com/gitlab-org/gitlab/-/issues/326704 + component: `stage-${s.id}-component`, + })); + }, + [types.RECEIVE_VALUE_STREAM_STAGES_ERROR](state) { + state.stages = []; + }, [types.REQUEST_CYCLE_ANALYTICS_DATA](state) { state.isLoading = true; - state.stages = []; state.hasError = false; }, [types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS](state, data) { - state.isLoading = false; - const { stages, summary, medians } = decorateData(data); - state.stages = stages; + const { summary, medians } = decorateData(data); + state.permissions = data.permissions; state.summary = summary; state.medians = formatMedianValues(medians); state.hasError = false; }, [types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR](state) { state.isLoading = false; - state.stages = []; state.hasError = true; }, [types.REQUEST_STAGE_DATA](state) { diff --git a/app/assets/javascripts/cycle_analytics/store/state.js b/app/assets/javascripts/cycle_analytics/store/state.js index b488340943a..02f953d9517 100644 --- a/app/assets/javascripts/cycle_analytics/store/state.js +++ b/app/assets/javascripts/cycle_analytics/store/state.js @@ -2,11 +2,14 @@ import { DEFAULT_DAYS_TO_DISPLAY } from '../constants'; export default () => ({ requestPath: '', + fullPath: '', startDate: DEFAULT_DAYS_TO_DISPLAY, stages: [], summary: [], analytics: [], stats: [], + valueStreams: [], + selectedValueStream: {}, selectedStage: {}, selectedStageEvents: [], selectedStageError: '', @@ -15,4 +18,5 @@ export default () => ({ isLoading: false, isLoadingStage: false, isEmptyStage: false, + permissions: {}, }); diff --git a/app/assets/javascripts/cycle_analytics/utils.js b/app/assets/javascripts/cycle_analytics/utils.js index 8aa4b88a374..40ad7d8b2fc 100644 --- a/app/assets/javascripts/cycle_analytics/utils.js +++ b/app/assets/javascripts/cycle_analytics/utils.js @@ -2,31 +2,9 @@ import { unescape } from 'lodash'; import { sanitize } from '~/lib/dompurify'; import { roundToNearestHalf, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { parseSeconds } from '~/lib/utils/datetime_utility'; -import { dasherize } from '~/lib/utils/text_utility'; -import { __, s__, sprintf } from '../locale'; +import { s__, sprintf } from '../locale'; import DEFAULT_EVENT_OBJECTS from './default_event_objects'; -const EMPTY_STAGE_TEXTS = { - issue: __( - 'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.', - ), - plan: __( - 'The planning stage shows the time from the previous step to pushing your first commit. This time will be added automatically once you push your first commit.', - ), - code: __( - 'The coding stage shows the time from the first commit to creating the merge request. The data will automatically be added here once you create your first merge request.', - ), - test: __( - 'The testing stage shows the time GitLab CI takes to run every pipeline for the related merge request. The data will automatically be added after your first pipeline finishes running.', - ), - review: __( - 'The review stage shows the time from creating the merge request to merging it. The data will automatically be added after you merge your first merge request.', - ), - staging: __( - 'The staging stage shows the time between merging the MR and deploying code to the production environment. The data will be automatically added once you deploy to production for the first time.', - ), -}; - /** * These `decorate` methods will be removed when me migrate to the * new table layout https://gitlab.com/gitlab-org/gitlab/-/issues/326704 @@ -43,33 +21,12 @@ const mapToEvent = (event, stage) => { export const decorateEvents = (events, stage) => events.map((event) => mapToEvent(event, stage)); -/* - * NOTE: We currently use the `name` field since the project level stages are in memory - * once we migrate to a default value stream https://gitlab.com/gitlab-org/gitlab/-/issues/326705 - * we can use the `id` to identify which median we are using - */ -const mapToStage = (permissions, { name, ...rest }) => { - const slug = dasherize(name.toLowerCase()); - return { - ...rest, - name, - id: name, - slug, - active: false, - isUserAllowed: permissions[slug], - emptyStageText: EMPTY_STAGE_TEXTS[slug], - component: `stage-${slug}-component`, - }; -}; - const mapToSummary = ({ value, ...rest }) => ({ ...rest, value: value || '-' }); -const mapToMedians = ({ id, value }) => ({ id, value }); +const mapToMedians = ({ name: id, value }) => ({ id, value }); export const decorateData = (data = {}) => { - const { permissions, stats, summary } = data; - const stages = stats?.map((item) => mapToStage(permissions, item)) || []; + const { stats: stages, summary } = data; return { - stages, summary: summary?.map((item) => mapToSummary(item)) || [], medians: stages?.map((item) => mapToMedians(item)) || [], }; diff --git a/app/assets/javascripts/issuable_form.js b/app/assets/javascripts/issuable_form.js index 9a1ab23e366..fd9e3d5c916 100644 --- a/app/assets/javascripts/issuable_form.js +++ b/app/assets/javascripts/issuable_form.js @@ -31,7 +31,7 @@ function organizeQuery(obj, isFallbackKey = false) { } function format(searchTerm, isFallbackKey = false) { - const queryObject = queryToObject(searchTerm); + const queryObject = queryToObject(searchTerm, { legacySpacesDecode: true }); const organizeQueryObject = organizeQuery(queryObject, isFallbackKey); const formattedQuery = objectToQuery(organizeQueryObject); diff --git a/app/assets/javascripts/lib/utils/url_utility.js b/app/assets/javascripts/lib/utils/url_utility.js index a989a1f77da..303d6d1dba9 100644 --- a/app/assets/javascripts/lib/utils/url_utility.js +++ b/app/assets/javascripts/lib/utils/url_utility.js @@ -414,29 +414,35 @@ export function getWebSocketUrl(path) { * * @param {String} query from "document.location.search" * @param {Object} options - * @param {Boolean} options.gatherArrays - gather array values into an Array + * @param {Boolean?} options.gatherArrays - gather array values into an Array + * @param {Boolean?} options.legacySpacesDecode - (deprecated) plus symbols (+) are not replaced with spaces, false by default * @returns {Object} * * ex: "?one=1&two=2" into {one: 1, two: 2} */ -export function queryToObject(query, options = {}) { - const { gatherArrays = false } = options; +export function queryToObject(query, { gatherArrays = false, legacySpacesDecode = false } = {}) { const removeQuestionMarkFromQuery = String(query).startsWith('?') ? query.slice(1) : query; return removeQuestionMarkFromQuery.split('&').reduce((accumulator, curr) => { const [key, value] = curr.split('='); if (value === undefined) { return accumulator; } - const decodedValue = decodeURIComponent(value); + + const decodedValue = legacySpacesDecode ? decodeURIComponent(value) : decodeUrlParameter(value); if (gatherArrays && key.endsWith('[]')) { - const decodedKey = decodeURIComponent(key.slice(0, -2)); + const decodedKey = legacySpacesDecode + ? decodeURIComponent(key.slice(0, -2)) + : decodeUrlParameter(key.slice(0, -2)); + if (!Array.isArray(accumulator[decodedKey])) { accumulator[decodedKey] = []; } accumulator[decodedKey].push(decodedValue); } else { - accumulator[decodeURIComponent(key)] = decodedValue; + const decodedKey = legacySpacesDecode ? decodeURIComponent(key) : decodeUrlParameter(key); + + accumulator[decodedKey] = decodedValue; } return accumulator; diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js index 8adf1862af2..74b777d7b44 100644 --- a/app/assets/javascripts/monitoring/utils.js +++ b/app/assets/javascripts/monitoring/utils.js @@ -175,7 +175,7 @@ export const graphDataValidatorForAnomalyValues = (graphData) => { * Returns `null` if no parameters form a time range. */ export const timeRangeFromUrl = (search = window.location.search) => { - const params = queryToObject(search); + const params = queryToObject(search, { legacySpacesDecode: true }); return timeRangeFromParams(params); }; @@ -228,7 +228,7 @@ export const convertVariablesForURL = (variables) => * @returns {Object} The custom variables defined by the user in the URL */ export const templatingVariablesFromUrl = (search = window.location.search) => { - const params = queryToObject(search); + const params = queryToObject(search, { legacySpacesDecode: true }); // pick the params with variable prefix const paramsWithVars = pickBy(params, (val, key) => key.startsWith(VARIABLE_PREFIX)); // remove the prefix before storing in the Vuex store @@ -289,7 +289,7 @@ export const timeRangeToUrl = (timeRange, url = window.location.href) => { * @throws Will throw an error if Panel cannot be located. */ export const expandedPanelPayloadFromUrl = (dashboard, search = window.location.search) => { - const params = queryToObject(search); + const params = queryToObject(search, { legacySpacesDecode: true }); // Search for the panel if any of the search params is identified if (params.group || params.title || params.y_label) { diff --git a/app/assets/javascripts/packages_and_registries/shared/utils.js b/app/assets/javascripts/packages_and_registries/shared/utils.js index cc5c7ce82bf..93eb90535d1 100644 --- a/app/assets/javascripts/packages_and_registries/shared/utils.js +++ b/app/assets/javascripts/packages_and_registries/shared/utils.js @@ -1,7 +1,8 @@ import { queryToObject } from '~/lib/utils/url_utility'; import { FILTERED_SEARCH_TERM } from './constants'; -export const getQueryParams = (query) => queryToObject(query, { gatherArrays: true }); +export const getQueryParams = (query) => + queryToObject(query, { gatherArrays: true, legacySpacesDecode: true }); export const keyValueToFilterToken = (type, data) => ({ type, value: { data } }); diff --git a/app/assets/javascripts/search/index.js b/app/assets/javascripts/search/index.js index 10c41315972..d9d4056466a 100644 --- a/app/assets/javascripts/search/index.js +++ b/app/assets/javascripts/search/index.js @@ -8,10 +8,7 @@ import createStore from './store'; import { initTopbar } from './topbar'; export const initSearchApp = () => { - // Similar to url_utility.decodeUrlParameter - // Our query treats + as %20. This replaces the query + symbols with %20. - const sanitizedSearch = window.location.search.replace(/\+/g, '%20'); - const query = queryToObject(sanitizedSearch); + const query = queryToObject(window.location.search); const store = createStore({ query }); diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_utils.js b/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_utils.js index e5c8d29e09b..4b1b09adcfd 100644 --- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_utils.js +++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/filtered_search_utils.js @@ -142,11 +142,11 @@ function extractNameAndOperator(filterName) { * '?myFilterName=foo' * gets translated into: * { myFilterName: { value: 'foo', operator: '=' } } - * @param {String} query URL quert string, e.g. from `window.location.search` + * @param {String} query URL query string, e.g. from `window.location.search` * @return {Object} filter object with filter names and their values */ export function urlQueryToFilter(query = '') { - const filters = queryToObject(query, { gatherArrays: true }); + const filters = queryToObject(query, { gatherArrays: true, legacySpacesDecode: true }); return Object.keys(filters).reduce((memo, key) => { const value = filters[key]; if (!value) { diff --git a/app/assets/stylesheets/startup/startup-dark.scss b/app/assets/stylesheets/startup/startup-dark.scss index 8622df34be6..c6f0b3a2ba7 100644 --- a/app/assets/stylesheets/startup/startup-dark.scss +++ b/app/assets/stylesheets/startup/startup-dark.scss @@ -337,6 +337,9 @@ h1 { .d-none { display: none !important; } +.d-inline-block { + display: inline-block !important; +} .d-block { display: block !important; } @@ -344,6 +347,9 @@ h1 { .d-sm-none { display: none !important; } + .d-sm-inline-block { + display: inline-block !important; + } } @media (min-width: 768px) { .d-md-block { @@ -351,6 +357,9 @@ h1 { } } @media (min-width: 992px) { + .d-lg-none { + display: none !important; + } .d-lg-block { display: block !important; } @@ -2287,24 +2296,11 @@ body.gl-dark { .gl-display-none { display: none; } -@media (min-width: 62rem) { - .gl-lg-display-none { - display: none; - } -} @media (min-width: 36rem) { .gl-sm-display-block { display: block; } } -.gl-display-inline-block { - display: inline-block; -} -@media (min-width: 36rem) { - .gl-sm-display-inline-block { - display: inline-block; - } -} .gl-absolute { position: absolute; } diff --git a/app/assets/stylesheets/startup/startup-general.scss b/app/assets/stylesheets/startup/startup-general.scss index a8d669a0c28..a05e27b6af0 100644 --- a/app/assets/stylesheets/startup/startup-general.scss +++ b/app/assets/stylesheets/startup/startup-general.scss @@ -322,6 +322,9 @@ h1 { .d-none { display: none !important; } +.d-inline-block { + display: inline-block !important; +} .d-block { display: block !important; } @@ -329,6 +332,9 @@ h1 { .d-sm-none { display: none !important; } + .d-sm-inline-block { + display: inline-block !important; + } } @media (min-width: 768px) { .d-md-block { @@ -336,6 +342,9 @@ h1 { } } @media (min-width: 992px) { + .d-lg-none { + display: none !important; + } .d-lg-block { display: block !important; } @@ -2070,24 +2079,11 @@ body.sidebar-refactoring .gl-display-none { display: none; } -@media (min-width: 62rem) { - .gl-lg-display-none { - display: none; - } -} @media (min-width: 36rem) { .gl-sm-display-block { display: block; } } -.gl-display-inline-block { - display: inline-block; -} -@media (min-width: 36rem) { - .gl-sm-display-inline-block { - display: inline-block; - } -} .gl-absolute { position: absolute; } diff --git a/app/controllers/admin/appearances_controller.rb b/app/controllers/admin/application_settings/appearances_controller.rb similarity index 68% rename from app/controllers/admin/appearances_controller.rb rename to app/controllers/admin/application_settings/appearances_controller.rb index c2614a158b7..47b2356a60f 100644 --- a/app/controllers/admin/appearances_controller.rb +++ b/app/controllers/admin/application_settings/appearances_controller.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -class Admin::AppearancesController < Admin::ApplicationController +class Admin::ApplicationSettings::AppearancesController < Admin::ApplicationController before_action :set_appearance, except: :create feature_category :navigation @@ -16,7 +16,7 @@ class Admin::AppearancesController < Admin::ApplicationController @appearance = Appearance.new(appearance_params) if @appearance.save - redirect_to admin_appearances_path, notice: _('Appearance was successfully created.') + redirect_to admin_application_settings_appearances_path, notice: _('Appearance was successfully created.') else render action: 'show' end @@ -24,7 +24,7 @@ class Admin::AppearancesController < Admin::ApplicationController def update if @appearance.update(appearance_params) - redirect_to admin_appearances_path, notice: _('Appearance was successfully updated.') + redirect_to admin_application_settings_appearances_path, notice: _('Appearance was successfully updated.') else render action: 'show' end @@ -35,21 +35,21 @@ class Admin::AppearancesController < Admin::ApplicationController @appearance.save - redirect_to admin_appearances_path, notice: _('Logo was successfully removed.') + redirect_to admin_application_settings_appearances_path, notice: _('Logo was successfully removed.') end def header_logos @appearance.remove_header_logo! @appearance.save - redirect_to admin_appearances_path, notice: _('Header logo was successfully removed.') + redirect_to admin_application_settings_appearances_path, notice: _('Header logo was successfully removed.') end def favicon @appearance.remove_favicon! @appearance.save - redirect_to admin_appearances_path, notice: _('Favicon was successfully removed.') + redirect_to admin_application_settings_appearances_path, notice: _('Favicon was successfully removed.') end private diff --git a/app/controllers/import/bulk_imports_controller.rb b/app/controllers/import/bulk_imports_controller.rb index 8d5c07f52c7..9d6c0a003c4 100644 --- a/app/controllers/import/bulk_imports_controller.rb +++ b/app/controllers/import/bulk_imports_controller.rb @@ -10,7 +10,7 @@ class Import::BulkImportsController < ApplicationController POLLING_INTERVAL = 3_000 - rescue_from BulkImports::Clients::Http::ConnectionError, with: :bulk_import_connection_error + rescue_from BulkImports::Clients::HTTP::ConnectionError, with: :bulk_import_connection_error def configure session[access_token_key] = configure_params[access_token_key]&.strip @@ -86,7 +86,7 @@ class Import::BulkImportsController < ApplicationController end def client - @client ||= BulkImports::Clients::Http.new( + @client ||= BulkImports::Clients::HTTP.new( uri: session[url_key], token: session[access_token_key], per_page: params[:per_page], diff --git a/app/models/bulk_imports/export_status.rb b/app/models/bulk_imports/export_status.rb index 72ece4e8fc0..98804d18f27 100644 --- a/app/models/bulk_imports/export_status.rb +++ b/app/models/bulk_imports/export_status.rb @@ -9,7 +9,7 @@ module BulkImports @relation = relation @entity = @pipeline_tracker.entity @configuration = @entity.bulk_import.configuration - @client = Clients::Http.new(uri: @configuration.url, token: @configuration.access_token) + @client = Clients::HTTP.new(uri: @configuration.url, token: @configuration.access_token) end def started? diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 9c211448d5e..23c2ccedb7f 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -39,6 +39,7 @@ module Ci has_one :deployment, as: :deployable, class_name: 'Deployment' has_one :pending_state, class_name: 'Ci::BuildPendingState', inverse_of: :build has_one :queuing_entry, class_name: 'Ci::PendingBuild', foreign_key: :build_id + has_one :runtime_metadata, class_name: 'Ci::RunningBuild', foreign_key: :build_id has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build @@ -310,7 +311,22 @@ module Ci after_transition pending: any do |build, transition| Ci::UpdateBuildQueueService.new.pop(build, transition) end + + after_transition any => [:running] do |build, transition| + Ci::UpdateBuildQueueService.new.track(build, transition) + end + + after_transition running: any do |build, transition| + Ci::UpdateBuildQueueService.new.untrack(build, transition) + + Ci::BuildRunnerSession.where(build: build).delete_all + end + # rubocop:enable CodeReuse/ServiceClass + # + after_transition pending: :running do |build| + build.ensure_metadata.update_timeout_state + end after_transition pending: :running do |build| build.deployment&.run @@ -364,14 +380,6 @@ module Ci end end - after_transition pending: :running do |build| - build.ensure_metadata.update_timeout_state - end - - after_transition running: any do |build| - Ci::BuildRunnerSession.where(build: build).delete_all - end - after_transition any => [:skipped, :canceled] do |build, transition| if transition.to_name == :skipped build.deployment&.skip @@ -1068,16 +1076,26 @@ module Ci options.dig(:allow_failure_criteria, :exit_codes).present? end - def all_queuing_entries - # We can have only one queuing entry, because there is a unique index on - # `build_id`, but we need a relation to remove this single queuing entry - # more efficiently in a single statement without actually load data. + def create_queuing_entry! + ::Ci::PendingBuild.upsert_from_build!(self) + end + ## + # We can have only one queuing entry or running build tracking entry, + # because there is a unique index on `build_id` in each table, but we need + # a relation to remove these entries more efficiently in a single statement + # without actually loading data. + # + def all_queuing_entries ::Ci::PendingBuild.where(build_id: self.id) end - def create_queuing_entry! - ::Ci::PendingBuild.upsert_from_build!(self) + def all_runtime_metadata + ::Ci::RunningBuild.where(build_id: self.id) + end + + def shared_runner_build? + runner&.instance_type? end protected diff --git a/app/models/ci/pending_build.rb b/app/models/ci/pending_build.rb index 12d2028df8b..b9a8a44bd6b 100644 --- a/app/models/ci/pending_build.rb +++ b/app/models/ci/pending_build.rb @@ -8,7 +8,7 @@ module Ci belongs_to :build, class_name: 'Ci::Build' def self.upsert_from_build!(build) - entry = self.new(build: build, project: build.project) + entry = self.new(build: build, project: build.project, protected: build.protected?) entry.validate! diff --git a/app/models/ci/running_build.rb b/app/models/ci/running_build.rb new file mode 100644 index 00000000000..9446cfa05da --- /dev/null +++ b/app/models/ci/running_build.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Ci + class RunningBuild < ApplicationRecord + extend Gitlab::Ci::Model + + belongs_to :project + belongs_to :build, class_name: 'Ci::Build' + belongs_to :runner, class_name: 'Ci::Runner' + + enum runner_type: ::Ci::Runner.runner_types + + def self.upsert_shared_runner_build!(build) + unless build.shared_runner_build? + raise ArgumentError, 'build has not been picked by a shared runner' + end + + entry = self.new(build: build, + project: build.project, + runner: build.runner, + runner_type: build.runner.runner_type) + + entry.validate! + + self.upsert(entry.attributes.compact, returning: %w[build_id], unique_by: :build_id) + end + end +end diff --git a/app/models/commit.rb b/app/models/commit.rb index 69527c114d5..23c1dffcc63 100644 --- a/app/models/commit.rb +++ b/app/models/commit.rb @@ -38,7 +38,7 @@ class Commit cache_markdown_field :description, pipeline: :commit_description, limit: 1.megabyte # Share the cache used by the markdown fields - attr_mentionable :full_title, pipeline: :single_line, limit: 1.kilobyte + attr_mentionable :title, pipeline: :single_line attr_mentionable :description, pipeline: :commit_description, limit: 1.megabyte class << self diff --git a/app/models/concerns/enums/ci/commit_status.rb b/app/models/concerns/enums/ci/commit_status.rb index 521a684c733..72788d15c0a 100644 --- a/app/models/concerns/enums/ci/commit_status.rb +++ b/app/models/concerns/enums/ci/commit_status.rb @@ -24,7 +24,7 @@ module Enums project_deleted: 15, ci_quota_exceeded: 16, pipeline_loop_detected: 17, - no_matching_runner: 18, + no_matching_runner: 18, # not used anymore, but cannot be deleted because of old data insufficient_bridge_permissions: 1_001, downstream_bridge_project_not_found: 1_002, invalid_bridge_trigger: 1_003, diff --git a/app/models/group_deploy_token.rb b/app/models/group_deploy_token.rb index d4ad29ddabb..084a8672460 100644 --- a/app/models/group_deploy_token.rb +++ b/app/models/group_deploy_token.rb @@ -9,8 +9,6 @@ class GroupDeployToken < ApplicationRecord validates :deploy_token_id, uniqueness: { scope: [:group_id] } def has_access_to?(requested_project) - return false unless Feature.enabled?(:allow_group_deploy_token, default_enabled: true) - requested_project_group = requested_project&.group return false unless requested_project_group return true if requested_project_group.id == group_id diff --git a/app/services/boards/base_items_list_service.rb b/app/services/boards/base_items_list_service.rb index cbc7a332cbe..a3e24844587 100644 --- a/app/services/boards/base_items_list_service.rb +++ b/app/services/boards/base_items_list_service.rb @@ -6,9 +6,9 @@ module Boards include ActiveRecord::ConnectionAdapters::Quoting def execute - return items.order_closed_date_desc if list&.closed? + items = init_collection - ordered_items + order(items) end # rubocop: disable CodeReuse/ActiveRecord @@ -17,7 +17,7 @@ module Boards keys = metadata_fields.keys # TODO: eliminate need for SQL literal fragment columns = Arel.sql(metadata_fields.values_at(*keys).join(', ')) - results = item_model.where(id: items.select(issuables[:id])).pluck(columns) + results = item_model.where(id: init_collection.select(issuables[:id])).pluck(columns) Hash[keys.zip(results.flatten)] end @@ -29,7 +29,7 @@ module Boards { size: 'COUNT(*)' } end - def ordered_items + def order(items) raise NotImplementedError end @@ -47,8 +47,8 @@ module Boards # We memoize the query here since the finder methods we use are quite complex. This does not memoize the result of the query. # rubocop: disable CodeReuse/ActiveRecord - def items - strong_memoize(:items) do + def init_collection + strong_memoize(:init_collection) do filter(finder.execute).reorder(nil) end end diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb index 6284e454561..0e95bf7a434 100644 --- a/app/services/boards/issues/list_service.rb +++ b/app/services/boards/issues/list_service.rb @@ -11,7 +11,9 @@ module Boards private - def ordered_items + def order(items) + return items.order_closed_date_desc if list&.closed? + items.order_by_position_and_priority(with_cte: params[:search].present?) end diff --git a/app/services/bulk_imports/file_download_service.rb b/app/services/bulk_imports/file_download_service.rb index 1edd0695efd..c5a1241e0a4 100644 --- a/app/services/bulk_imports/file_download_service.rb +++ b/app/services/bulk_imports/file_download_service.rb @@ -52,7 +52,7 @@ module BulkImports end def http_client - @http_client ||= BulkImports::Clients::Http.new( + @http_client ||= BulkImports::Clients::HTTP.new( uri: configuration.url, token: configuration.access_token ) diff --git a/app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb b/app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb deleted file mode 100644 index 0e71d2520ac..00000000000 --- a/app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb +++ /dev/null @@ -1,91 +0,0 @@ -# frozen_string_literal: true - -module Ci - module PipelineCreation - class DropNotRunnableBuildsService - include Gitlab::Utils::StrongMemoize - - def initialize(pipeline) - @pipeline = pipeline - end - - ## - # We want to run this service exactly once, - # before the first pipeline processing call - # - def execute - return unless ::Feature.enabled?(:ci_drop_new_builds_when_ci_quota_exceeded, project, default_enabled: :yaml) - return unless pipeline.created? - - load_runners - validate_build_matchers - end - - private - - attr_reader :pipeline - attr_reader :instance_runners, :private_runners - delegate :project, to: :pipeline - - def load_runners - @instance_runners, @private_runners = project - .all_runners - .active - .online - .runner_matchers - .partition(&:instance_type?) - end - - def validate_build_matchers - pipeline.build_matchers.each do |build_matcher| - failure_reason = validate_build_matcher(build_matcher) - next unless failure_reason - - drop_all_builds(build_matcher.build_ids, failure_reason) - end - end - - def validate_build_matcher(build_matcher) - return if matching_private_runners?(build_matcher) - return if matching_instance_runners?(build_matcher) - - matching_failure_reason(build_matcher) - end - - ## - # We skip pipeline processing until we drop all required builds. Otherwise - # as we drop the first build, the remaining builds to be dropped could - # transition to other states by `PipelineProcessWorker` running async. - # - def drop_all_builds(build_ids, failure_reason) - pipeline.builds.id_in(build_ids).each do |build| - build.drop(failure_reason, skip_pipeline_processing: true) - end - end - - def matching_private_runners?(build_matcher) - private_runners - .find { |matcher| matcher.matches?(build_matcher) } - .present? - end - - def matching_instance_runners?(build_matcher) - instance_runners - .find { |matcher| matching_criteria(matcher, build_matcher) } - .present? - end - - # Overridden in EE - def matching_criteria(runner_matcher, build_matcher) - runner_matcher.matches?(build_matcher) - end - - # Overridden in EE - def matching_failure_reason(build_matcher) - :no_matching_runner - end - end - end -end - -Ci::PipelineCreation::DropNotRunnableBuildsService.prepend_mod_with('Ci::PipelineCreation::DropNotRunnableBuildsService') diff --git a/app/services/ci/pipeline_creation/start_pipeline_service.rb b/app/services/ci/pipeline_creation/start_pipeline_service.rb index 60fc5c56c63..27c12caaa0a 100644 --- a/app/services/ci/pipeline_creation/start_pipeline_service.rb +++ b/app/services/ci/pipeline_creation/start_pipeline_service.rb @@ -10,9 +10,10 @@ module Ci end def execute - DropNotRunnableBuildsService.new(pipeline).execute Ci::ProcessPipelineService.new(pipeline).execute end end end end + +::Ci::PipelineCreation::StartPipelineService.prepend_mod_with('Ci::PipelineCreation::StartPipelineService') diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb index 479cbbbdda3..eea09e9ac67 100644 --- a/app/services/ci/update_build_queue_service.rb +++ b/app/services/ci/update_build_queue_service.rb @@ -48,6 +48,47 @@ module Ci end end + ## + # Add shared runner build tracking entry (used for queuing). + # + def track(build, transition) + return unless Feature.enabled?(:ci_track_shared_runner_builds, build.project, default_enabled: :yaml) + return unless build.shared_runner_build? + + raise InvalidQueueTransition unless transition.to == 'running' + + transition.within_transaction do + result = ::Ci::RunningBuild.upsert_shared_runner_build!(build) + + unless result.empty? + metrics.increment_queue_operation(:shared_runner_build_new) + + result.rows.dig(0, 0) + end + end + end + + ## + # Remove a runtime build tracking entry for a shared runner build (used for + # queuing). + # + def untrack(build, transition) + return unless Feature.enabled?(:ci_untrack_shared_runner_builds, build.project, default_enabled: :yaml) + return unless build.shared_runner_build? + + raise InvalidQueueTransition unless transition.from == 'running' + + transition.within_transaction do + removed = build.all_runtime_metadata.delete_all + + if removed > 0 + metrics.increment_queue_operation(:shared_runner_build_done) + + build.id + end + end + end + ## # Unblock runner associated with given project / build # diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb index 8beb34a4626..7dd9280e5b1 100644 --- a/app/services/projects/create_service.rb +++ b/app/services/projects/create_service.rb @@ -11,6 +11,7 @@ module Projects @initialize_with_readme = Gitlab::Utils.to_boolean(@params.delete(:initialize_with_readme)) @import_data = @params.delete(:import_data) @relations_block = @params.delete(:relations_block) + @default_branch = @params.delete(:default_branch) build_topics end @@ -130,20 +131,16 @@ module Projects access_level: group_access_level) end - if Feature.enabled?(:specialized_project_authorization_workers, default_enabled: :yaml) - AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id) - # AuthorizedProjectsWorker uses an exclusive lease per user but - # specialized workers might have synchronization issues. Until we - # compare the inconsistency rates of both approaches, we still run - # AuthorizedProjectsWorker but with some delay and lower urgency as a - # safety net. - @project.group.refresh_members_authorized_projects( - blocking: false, - priority: UserProjectAccessChangedService::LOW_PRIORITY - ) - else - @project.group.refresh_members_authorized_projects(blocking: false) - end + AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id) + # AuthorizedProjectsWorker uses an exclusive lease per user but + # specialized workers might have synchronization issues. Until we + # compare the inconsistency rates of both approaches, we still run + # AuthorizedProjectsWorker but with some delay and lower urgency as a + # safety net. + @project.group.refresh_members_authorized_projects( + blocking: false, + priority: UserProjectAccessChangedService::LOW_PRIORITY + ) else @project.add_maintainer(@project.namespace.owner, current_user: current_user) end @@ -151,7 +148,7 @@ module Projects def create_readme commit_attrs = { - branch_name: @project.default_branch_or_main, + branch_name: @default_branch.presence || @project.default_branch_or_main, commit_message: 'Initial commit', file_path: 'README.md', file_content: "# #{@project.name}\n\n#{@project.description}" diff --git a/app/services/projects/group_links/create_service.rb b/app/services/projects/group_links/create_service.rb index d8fa2f36fcc..fc5c936b378 100644 --- a/app/services/projects/group_links/create_service.rb +++ b/app/services/projects/group_links/create_service.rb @@ -23,22 +23,18 @@ module Projects private def setup_authorizations(group, group_access = nil) - if Feature.enabled?(:specialized_project_authorization_project_share_worker, default_enabled: :yaml) - AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async( - project.id, group.id, group_access) + AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async( + project.id, group.id, group_access) - # AuthorizedProjectsWorker uses an exclusive lease per user but - # specialized workers might have synchronization issues. Until we - # compare the inconsistency rates of both approaches, we still run - # AuthorizedProjectsWorker but with some delay and lower urgency as a - # safety net. - group.refresh_members_authorized_projects( - blocking: false, - priority: UserProjectAccessChangedService::LOW_PRIORITY - ) - else - group.refresh_members_authorized_projects(blocking: false) - end + # AuthorizedProjectsWorker uses an exclusive lease per user but + # specialized workers might have synchronization issues. Until we + # compare the inconsistency rates of both approaches, we still run + # AuthorizedProjectsWorker but with some delay and lower urgency as a + # safety net. + group.refresh_members_authorized_projects( + blocking: false, + priority: UserProjectAccessChangedService::LOW_PRIORITY + ) end end end diff --git a/app/views/admin/appearances/_form.html.haml b/app/views/admin/application_settings/appearances/_form.html.haml similarity index 80% rename from app/views/admin/appearances/_form.html.haml rename to app/views/admin/application_settings/appearances/_form.html.haml index 872a6bef18b..a48b57bffd9 100644 --- a/app/views/admin/appearances/_form.html.haml +++ b/app/views/admin/application_settings/appearances/_form.html.haml @@ -1,6 +1,6 @@ - parsed_with_gfm = (_("Content parsed with %{link}.") % { link: link_to('GitLab Flavored Markdown', help_page_path('user/markdown'), target: '_blank') }).html_safe -= form_for @appearance, url: admin_appearances_path, html: { class: 'gl-mt-3' } do |f| += form_for @appearance, url: admin_application_settings_appearances_path, html: { class: 'gl-mt-3' } do |f| = form_errors(@appearance) @@ -16,7 +16,7 @@ = image_tag @appearance.header_logo_path, class: 'appearance-light-logo-preview' - if @appearance.persisted? %br - = link_to _('Remove header logo'), header_logos_admin_appearances_path, data: { confirm: _("Header logo will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm" + = link_to _('Remove header logo'), header_logos_admin_application_settings_appearances_path, data: { confirm: _("Header logo will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm" %hr = f.hidden_field :header_logo_cache = f.file_field :header_logo, class: "", accept: 'image/*' @@ -35,7 +35,7 @@ = image_tag @appearance.favicon_path, class: 'appearance-light-logo-preview' - if @appearance.persisted? %br - = link_to _('Remove favicon'), favicon_admin_appearances_path, data: { confirm: _("Favicon will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm" + = link_to _('Remove favicon'), favicon_admin_application_settings_appearances_path, data: { confirm: _("Favicon will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm" %hr = f.hidden_field :favicon_cache = f.file_field :favicon, class: '', accept: 'image/*' @@ -44,7 +44,7 @@ %br = _("Images with incorrect dimensions are not resized automatically, and may result in unexpected behavior.") - = render partial: 'admin/appearances/system_header_footer_form', locals: { form: f } + = render partial: 'admin/application_settings/appearances/system_header_footer_form', locals: { form: f } %hr .row @@ -67,7 +67,7 @@ = image_tag @appearance.logo_path, class: 'appearance-logo-preview' - if @appearance.persisted? %br - = link_to _('Remove logo'), logo_admin_appearances_path, data: { confirm: _("Logo will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm remove-logo" + = link_to _('Remove logo'), logo_admin_application_settings_appearances_path, data: { confirm: _("Logo will be removed. Are you sure?") }, method: :delete, class: "btn gl-button btn-danger btn-danger-secondary btn-sm remove-logo" %hr = f.hidden_field :logo_cache = f.file_field :logo, class: "", accept: 'image/*' @@ -106,7 +106,7 @@ .mt-4 - if @appearance.persisted? Preview last save: - = link_to _('Sign-in page'), preview_sign_in_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer' + = link_to _('Sign-in page'), preview_sign_in_admin_application_settings_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer' = link_to _('New project page'), new_project_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer' - if @appearance.updated_at diff --git a/app/views/admin/appearances/_system_header_footer_form.html.haml b/app/views/admin/application_settings/appearances/_system_header_footer_form.html.haml similarity index 100% rename from app/views/admin/appearances/_system_header_footer_form.html.haml rename to app/views/admin/application_settings/appearances/_system_header_footer_form.html.haml diff --git a/app/views/admin/appearances/preview_sign_in.html.haml b/app/views/admin/application_settings/appearances/preview_sign_in.html.haml similarity index 100% rename from app/views/admin/appearances/preview_sign_in.html.haml rename to app/views/admin/application_settings/appearances/preview_sign_in.html.haml diff --git a/app/views/admin/appearances/show.html.haml b/app/views/admin/application_settings/appearances/show.html.haml similarity index 100% rename from app/views/admin/appearances/show.html.haml rename to app/views/admin/application_settings/appearances/show.html.haml diff --git a/app/views/layouts/header/_default.html.haml b/app/views/layouts/header/_default.html.haml index 03fe2323773..87580e57e75 100644 --- a/app/views/layouts/header/_default.html.haml +++ b/app/views/layouts/header/_default.html.haml @@ -37,7 +37,7 @@ - search_menu_item = top_nav_search_menu_item_attrs %li.nav-item.d-none.d-lg-block.m-auto = render 'layouts/search' unless current_controller?(:search) - %li.nav-item{ class: use_top_nav_redesign ? "gl-display-none gl-sm-display-inline-block gl-lg-display-none" : "gl-display-inline-block gl-lg-display-none" } + %li.nav-item{ class: use_top_nav_redesign ? 'd-none d-sm-inline-block d-lg-none' : 'd-inline-block d-lg-none' } = link_to search_menu_item.fetch(:href), title: search_menu_item.fetch(:title), aria: { label: search_menu_item.fetch(:title) }, data: {toggle: 'tooltip', placement: 'bottom', container: 'body'} do = sprite_icon(search_menu_item.fetch(:icon)) - if header_link?(:issues) diff --git a/app/views/layouts/nav/sidebar/_admin.html.haml b/app/views/layouts/nav/sidebar/_admin.html.haml index 2022c321a98..7a80c4e0ba9 100644 --- a/app/views/layouts/nav/sidebar/_admin.html.haml +++ b/app/views/layouts/nav/sidebar/_admin.html.haml @@ -234,19 +234,7 @@ %strong.fly-out-top-item-name = _('Labels') - = nav_link(controller: :appearances) do - = link_to admin_appearances_path do - .nav-icon-container - = sprite_icon('appearance') - %span.nav-item-name - = _('Appearance') - %ul.sidebar-sub-level-items.is-fly-out-only - = nav_link(controller: :appearances, html_options: { class: "fly-out-top-item" } ) do - = link_to admin_appearances_path do - %strong.fly-out-top-item-name - = _('Appearance') - - = nav_link(controller: [:application_settings, :integrations]) do + = nav_link(controller: [:application_settings, :integrations, :appearances]) do = link_to general_admin_application_settings_path, class: 'has-sub-items' do .nav-icon-container = sprite_icon('settings') @@ -255,7 +243,7 @@ %ul.sidebar-sub-level-items{ data: { qa_selector: 'admin_sidebar_settings_submenu_content' } } -# This active_nav_link check is also used in `app/views/layouts/admin.html.haml` - = nav_link(controller: [:application_settings, :integrations], html_options: { class: "fly-out-top-item" } ) do + = nav_link(controller: [:application_settings, :integrations, :appearances], html_options: { class: "fly-out-top-item" } ) do = link_to general_admin_application_settings_path do %strong.fly-out-top-item-name = _('Settings') @@ -302,6 +290,10 @@ = link_to network_admin_application_settings_path, title: _('Network'), data: { qa_selector: 'admin_settings_network_item' } do %span = _('Network') + = nav_link(controller: :appearances ) do + = link_to admin_application_settings_appearances_path do + %span + = _('Appearance') = nav_link(path: 'application_settings#preferences') do = link_to preferences_admin_application_settings_path, title: _('Preferences'), data: { qa_selector: 'admin_settings_preferences_link' } do %span diff --git a/app/views/layouts/nav/sidebar/_context_menu_body.html.haml b/app/views/layouts/nav/sidebar/_context_menu_body.html.haml index d8409a5ebd1..321bcda5702 100644 --- a/app/views/layouts/nav/sidebar/_context_menu_body.html.haml +++ b/app/views/layouts/nav/sidebar/_context_menu_body.html.haml @@ -2,7 +2,7 @@ - avatar_classes = ['avatar-container', 'rect-avatar', 'group-avatar'] - avatar_classes << avatar_size_class -= link_to group_path(@group), title: @group.name do += link_to group_path(@group), title: @group.name, data: { qa_selector: 'group_scope_link' } do %span{ class: avatar_classes } = group_icon(@group, class: ['avatar', 'avatar-tile', avatar_size_class]) %span.sidebar-context-title diff --git a/app/views/layouts/nav/sidebar/_group.html.haml b/app/views/layouts/nav/sidebar/_group.html.haml index 63797ba6bee..0ce1d48a2de 100644 --- a/app/views/layouts/nav/sidebar/_group.html.haml +++ b/app/views/layouts/nav/sidebar/_group.html.haml @@ -19,14 +19,14 @@ - paths = group_overview_nav_link_paths = nav_link(path: paths, unless: -> { current_path?('groups/contribution_analytics#show') }, html_options: { class: 'home' }) do - information_link = sidebar_refactor_enabled? ? activity_group_path(@group) : group_path(@group) - = link_to information_link, class: 'has-sub-items' do + = link_to information_link, class: 'has-sub-items', data: { qa_selector: 'group_information_link' } do .nav-icon-container - sprite = sidebar_refactor_enabled? ? 'group' : 'home' = sprite_icon(sprite) %span.nav-item-name = group_information_title(@group) - %ul.sidebar-sub-level-items + %ul.sidebar-sub-level-items{ data: { qa_selector: 'group_information_submenu'} } = nav_link(path: paths, html_options: { class: "fly-out-top-item" } ) do = link_to information_link do %strong.fly-out-top-item-name diff --git a/app/views/projects/blob/edit.html.haml b/app/views/projects/blob/edit.html.haml index 9f89981e7ca..ecbef9a11a7 100644 --- a/app/views/projects/blob/edit.html.haml +++ b/app/views/projects/blob/edit.html.haml @@ -5,11 +5,13 @@ - if @conflict .gl-alert.gl-alert-danger.gl-mb-5.gl-mt-5 - = sprite_icon('error', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') - .gl-alert-body - Someone edited the file the same time you did. Please check out - = link_to "the file", project_blob_path(@project, tree_join(@branch_name, @file_path)), target: "_blank", rel: 'noopener noreferrer', class: 'gl-link' - and make sure your changes will not unintentionally remove theirs. + .gl-alert-container + = sprite_icon('error', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') + .gl-alert-content + .gl-alert-body + Someone edited the file the same time you did. Please check out + = link_to _('the file'), project_blob_path(@project, tree_join(@branch_name, @file_path)), target: "_blank", rel: 'noopener noreferrer', class: 'gl-link' + and make sure your changes will not unintentionally remove theirs. %h3.page-title.blob-edit-page-title Edit file diff --git a/app/views/projects/cycle_analytics/show.html.haml b/app/views/projects/cycle_analytics/show.html.haml index 71730da0595..c1f6cfc40c3 100644 --- a/app/views/projects/cycle_analytics/show.html.haml +++ b/app/views/projects/cycle_analytics/show.html.haml @@ -1,6 +1,6 @@ - page_title _("Value Stream Analytics") - add_page_specific_style 'page_bundles/cycle_analytics' - svgs = { empty_state_svg_path: image_path("illustrations/analytics/cycle-analytics-empty-chart.svg"), no_data_svg_path: image_path("illustrations/analytics/cycle-analytics-empty-chart.svg"), no_access_svg_path: image_path("illustrations/analytics/no-access.svg") } -- initial_data = { request_path: project_cycle_analytics_path(@project) }.merge!(svgs) +- initial_data = { request_path: project_cycle_analytics_path(@project), full_path: @project.full_path }.merge!(svgs) #js-cycle-analytics{ data: initial_data } diff --git a/app/views/shared/nav/_scope_menu_body.html.haml b/app/views/shared/nav/_scope_menu_body.html.haml index cccff4d766b..a94c681e2d3 100644 --- a/app/views/shared/nav/_scope_menu_body.html.haml +++ b/app/views/shared/nav/_scope_menu_body.html.haml @@ -1,7 +1,7 @@ - avatar_size = sidebar_refactor_disabled? ? 40 : 32 - avatar_size_class = sidebar_refactor_disabled? ? 's40' : 's32' -= link_to scope_menu.link, **scope_menu.container_html_options do += link_to scope_menu.link, **scope_menu.container_html_options, data: { qa_selector: 'project_scope_link' } do %span{ class: ['avatar-container', 'rect-avatar', 'project-avatar', avatar_size_class] } = source_icon(scope_menu.container, alt: scope_menu.title, class: ['avatar', 'avatar-tile', avatar_size_class], width: avatar_size, height: avatar_size) %span.sidebar-context-title diff --git a/app/workers/bulk_imports/export_request_worker.rb b/app/workers/bulk_imports/export_request_worker.rb index cccc24d3bdc..24e75ad0f85 100644 --- a/app/workers/bulk_imports/export_request_worker.rb +++ b/app/workers/bulk_imports/export_request_worker.rb @@ -24,7 +24,7 @@ module BulkImports end def http_client(configuration) - @client ||= Clients::Http.new( + @client ||= Clients::HTTP.new( uri: configuration.url, token: configuration.access_token ) diff --git a/config/feature_flags/development/allow_group_deploy_token.yml b/config/feature_flags/development/allow_group_deploy_token.yml deleted file mode 100644 index 83f8f86b02c..00000000000 --- a/config/feature_flags/development/allow_group_deploy_token.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: allow_group_deploy_token -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23460 -rollout_issue_url: -milestone: '12.8' -type: development -group: group::release -default_enabled: true diff --git a/config/feature_flags/development/ci_track_shared_runner_builds.yml b/config/feature_flags/development/ci_track_shared_runner_builds.yml new file mode 100644 index 00000000000..b4267ab5be3 --- /dev/null +++ b/config/feature_flags/development/ci_track_shared_runner_builds.yml @@ -0,0 +1,8 @@ +--- +name: ci_track_shared_runner_builds +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62912 +rollout_issue_url: +milestone: '14.0' +type: development +group: group::pipeline execution +default_enabled: false diff --git a/config/feature_flags/development/ci_untrack_shared_runner_builds.yml b/config/feature_flags/development/ci_untrack_shared_runner_builds.yml new file mode 100644 index 00000000000..2e8de5cbb0c --- /dev/null +++ b/config/feature_flags/development/ci_untrack_shared_runner_builds.yml @@ -0,0 +1,8 @@ +--- +name: ci_untrack_shared_runner_builds +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62912 +rollout_issue_url: +milestone: '14.0' +type: development +group: group::pipeline execution +default_enabled: false diff --git a/config/feature_flags/development/sidebar_refactor.yml b/config/feature_flags/development/sidebar_refactor.yml index 317fbee87f5..eb13adfd147 100644 --- a/config/feature_flags/development/sidebar_refactor.yml +++ b/config/feature_flags/development/sidebar_refactor.yml @@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/326111 milestone: '13.11' type: development group: group::editor -default_enabled: false +default_enabled: true diff --git a/config/feature_flags/development/specialized_project_authorization_project_share_worker.yml b/config/feature_flags/development/specialized_project_authorization_project_share_worker.yml deleted file mode 100644 index 4e170fa13c9..00000000000 --- a/config/feature_flags/development/specialized_project_authorization_project_share_worker.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: specialized_project_authorization_project_share_worker -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/32864 -rollout_issue_url: -milestone: '13.2' -type: development -group: group::access -default_enabled: true diff --git a/config/feature_flags/development/specialized_project_authorization_workers.yml b/config/feature_flags/development/specialized_project_authorization_workers.yml deleted file mode 100644 index 16d6d4ed82f..00000000000 --- a/config/feature_flags/development/specialized_project_authorization_workers.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: specialized_project_authorization_workers -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/31377 -rollout_issue_url: -milestone: '13.0' -type: development -group: group::access -default_enabled: true diff --git a/config/routes/admin.rb b/config/routes/admin.rb index f3377c0fdce..7bd24ac5f5b 100644 --- a/config/routes/admin.rb +++ b/config/routes/admin.rb @@ -124,15 +124,6 @@ namespace :admin do end end - resource :appearances, only: [:show, :create, :update], path: 'appearance' do - member do - get :preview_sign_in - delete :logo - delete :header_logos - delete :favicon - end - end - resource :application_settings, only: :update do resources :services, only: [:index, :edit, :update] resources :integrations, only: [:edit, :update] do @@ -153,6 +144,15 @@ namespace :admin do get :status_create_self_monitoring_project delete :delete_self_monitoring_project get :status_delete_self_monitoring_project + + resource :appearances, only: [:show, :create, :update], path: 'appearance', module: 'application_settings' do + member do + get :preview_sign_in + delete :logo + delete :header_logos + delete :favicon + end + end end resources :plan_limits, only: :create diff --git a/db/migrate/20210601123341_add_running_builds_table.rb b/db/migrate/20210601123341_add_running_builds_table.rb new file mode 100644 index 00000000000..4093619d2c3 --- /dev/null +++ b/db/migrate/20210601123341_add_running_builds_table.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class AddRunningBuildsTable < ActiveRecord::Migration[6.0] + def up + create_table :ci_running_builds do |t| + t.references :build, index: { unique: true }, null: false, foreign_key: { to_table: :ci_builds, on_delete: :cascade } + t.references :project, index: true, null: false, foreign_key: { on_delete: :cascade } + t.references :runner, index: true, null: false, foreign_key: { to_table: :ci_runners, on_delete: :cascade } + t.datetime_with_timezone :created_at, null: false, default: -> { 'NOW()' } + t.integer :runner_type, limit: 2, null: false + end + end + + def down + drop_table :ci_running_builds + end +end diff --git a/db/migrate/20210607080044_remove_temporary_index_on_security_findings_scan_id.rb b/db/migrate/20210607080044_remove_temporary_index_on_security_findings_scan_id.rb new file mode 100644 index 00000000000..5357059f475 --- /dev/null +++ b/db/migrate/20210607080044_remove_temporary_index_on_security_findings_scan_id.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class RemoveTemporaryIndexOnSecurityFindingsScanId < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + INDEX_NAME = 'tmp_index_on_security_findings_scan_id' + + disable_ddl_transaction! + + def up + remove_concurrent_index_by_name :security_findings, INDEX_NAME + end + + def down + add_concurrent_index :security_findings, :scan_id, where: 'uuid is null', name: INDEX_NAME + end +end diff --git a/db/migrate/20210610102410_add_protected_attribute_to_pending_builds.rb b/db/migrate/20210610102410_add_protected_attribute_to_pending_builds.rb new file mode 100644 index 00000000000..026fa0d7043 --- /dev/null +++ b/db/migrate/20210610102410_add_protected_attribute_to_pending_builds.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +class AddProtectedAttributeToPendingBuilds < ActiveRecord::Migration[6.1] + def change + add_column :ci_pending_builds, :protected, :boolean, null: false, default: false + end +end diff --git a/db/post_migrate/20210610102413_migrate_protected_attribute_to_pending_builds.rb b/db/post_migrate/20210610102413_migrate_protected_attribute_to_pending_builds.rb new file mode 100644 index 00000000000..f47ff244d7a --- /dev/null +++ b/db/post_migrate/20210610102413_migrate_protected_attribute_to_pending_builds.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +class MigrateProtectedAttributeToPendingBuilds < ActiveRecord::Migration[6.1] + include ::Gitlab::Database::DynamicModelHelpers + + disable_ddl_transaction! + + def up + return unless Gitlab.dev_or_test_env? || Gitlab.com? + + each_batch_range('ci_pending_builds', of: 1000) do |min, max| + execute <<~SQL + UPDATE ci_pending_builds + SET protected = true + FROM ci_builds + WHERE ci_pending_builds.build_id = ci_builds.id + AND ci_builds.protected = true + AND ci_pending_builds.id BETWEEN #{min} AND #{max} + SQL + end + end + + def down + # no op + end +end diff --git a/db/post_migrate/20210610113229_add_index_to_protected_pending_builds.rb b/db/post_migrate/20210610113229_add_index_to_protected_pending_builds.rb new file mode 100644 index 00000000000..140bf7df4e6 --- /dev/null +++ b/db/post_migrate/20210610113229_add_index_to_protected_pending_builds.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class AddIndexToProtectedPendingBuilds < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + INDEX_NAME = 'index_ci_pending_builds_id_on_protected_partial' + + disable_ddl_transaction! + + def up + add_concurrent_index :ci_pending_builds, :id, where: 'protected = true', name: INDEX_NAME + end + + def down + remove_concurrent_index_by_name :ci_pending_builds, INDEX_NAME + end +end diff --git a/db/schema_migrations/20210601123341 b/db/schema_migrations/20210601123341 new file mode 100644 index 00000000000..7fd9943c89e --- /dev/null +++ b/db/schema_migrations/20210601123341 @@ -0,0 +1 @@ +d4a0098c30cd1acea008fa5f1cfb4c23d5b5b894eab2b72f5004acc5233f2576 \ No newline at end of file diff --git a/db/schema_migrations/20210607080044 b/db/schema_migrations/20210607080044 new file mode 100644 index 00000000000..9ccf86d8194 --- /dev/null +++ b/db/schema_migrations/20210607080044 @@ -0,0 +1 @@ +88f16dc06371d320a1245de68aba5ed4ad7cd8f15c4e5898619a751840981072 \ No newline at end of file diff --git a/db/schema_migrations/20210610102410 b/db/schema_migrations/20210610102410 new file mode 100644 index 00000000000..1d6bb16209e --- /dev/null +++ b/db/schema_migrations/20210610102410 @@ -0,0 +1 @@ +dab13c78f6f758c63be923277c0f31e4cce4e30f77a8dc2983a9bb1500a454f9 \ No newline at end of file diff --git a/db/schema_migrations/20210610102413 b/db/schema_migrations/20210610102413 new file mode 100644 index 00000000000..d94b1623e24 --- /dev/null +++ b/db/schema_migrations/20210610102413 @@ -0,0 +1 @@ +ce21070d44a34081c6babd14e6a1b607bad5ed9047b18f4ef0beb64b5a2ce120 \ No newline at end of file diff --git a/db/schema_migrations/20210610113229 b/db/schema_migrations/20210610113229 new file mode 100644 index 00000000000..15d4d567534 --- /dev/null +++ b/db/schema_migrations/20210610113229 @@ -0,0 +1 @@ +3ad279a7c57e433a8ee349dabd2536c1de9055936b05c26b5469606067eb90d4 \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 7220869c5ea..c725e34c109 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -10799,7 +10799,8 @@ CREATE TABLE ci_pending_builds ( id bigint NOT NULL, build_id bigint NOT NULL, project_id bigint NOT NULL, - created_at timestamp with time zone DEFAULT now() NOT NULL + created_at timestamp with time zone DEFAULT now() NOT NULL, + protected boolean DEFAULT false NOT NULL ); CREATE SEQUENCE ci_pending_builds_id_seq @@ -11150,6 +11151,24 @@ CREATE SEQUENCE ci_runners_id_seq ALTER SEQUENCE ci_runners_id_seq OWNED BY ci_runners.id; +CREATE TABLE ci_running_builds ( + id bigint NOT NULL, + build_id bigint NOT NULL, + project_id bigint NOT NULL, + runner_id bigint NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + runner_type smallint NOT NULL +); + +CREATE SEQUENCE ci_running_builds_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE ci_running_builds_id_seq OWNED BY ci_running_builds.id; + CREATE TABLE ci_sources_pipelines ( id integer NOT NULL, project_id integer, @@ -19686,6 +19705,8 @@ ALTER TABLE ONLY ci_runner_projects ALTER COLUMN id SET DEFAULT nextval('ci_runn ALTER TABLE ONLY ci_runners ALTER COLUMN id SET DEFAULT nextval('ci_runners_id_seq'::regclass); +ALTER TABLE ONLY ci_running_builds ALTER COLUMN id SET DEFAULT nextval('ci_running_builds_id_seq'::regclass); + ALTER TABLE ONLY ci_sources_pipelines ALTER COLUMN id SET DEFAULT nextval('ci_sources_pipelines_id_seq'::regclass); ALTER TABLE ONLY ci_sources_projects ALTER COLUMN id SET DEFAULT nextval('ci_sources_projects_id_seq'::regclass); @@ -20891,6 +20912,9 @@ ALTER TABLE ONLY ci_runner_projects ALTER TABLE ONLY ci_runners ADD CONSTRAINT ci_runners_pkey PRIMARY KEY (id); +ALTER TABLE ONLY ci_running_builds + ADD CONSTRAINT ci_running_builds_pkey PRIMARY KEY (id); + ALTER TABLE ONLY ci_sources_pipelines ADD CONSTRAINT ci_sources_pipelines_pkey PRIMARY KEY (id); @@ -22776,6 +22800,8 @@ CREATE INDEX index_ci_minutes_additional_packs_on_namespace_id_purchase_xid ON c CREATE UNIQUE INDEX index_ci_namespace_monthly_usages_on_namespace_id_and_date ON ci_namespace_monthly_usages USING btree (namespace_id, date); +CREATE INDEX index_ci_pending_builds_id_on_protected_partial ON ci_pending_builds USING btree (id) WHERE (protected = true); + CREATE UNIQUE INDEX index_ci_pending_builds_on_build_id ON ci_pending_builds USING btree (build_id); CREATE INDEX index_ci_pending_builds_on_project_id ON ci_pending_builds USING btree (project_id); @@ -22886,6 +22912,12 @@ CREATE INDEX index_ci_runners_on_token ON ci_runners USING btree (token); CREATE INDEX index_ci_runners_on_token_encrypted ON ci_runners USING btree (token_encrypted); +CREATE UNIQUE INDEX index_ci_running_builds_on_build_id ON ci_running_builds USING btree (build_id); + +CREATE INDEX index_ci_running_builds_on_project_id ON ci_running_builds USING btree (project_id); + +CREATE INDEX index_ci_running_builds_on_runner_id ON ci_running_builds USING btree (runner_id); + CREATE INDEX index_ci_sources_pipelines_on_pipeline_id ON ci_sources_pipelines USING btree (pipeline_id); CREATE INDEX index_ci_sources_pipelines_on_project_id ON ci_sources_pipelines USING btree (project_id); @@ -25028,8 +25060,6 @@ CREATE INDEX tmp_idx_deduplicate_vulnerability_occurrences ON vulnerability_occu CREATE INDEX tmp_idx_on_namespaces_delayed_project_removal ON namespaces USING btree (id) WHERE (delayed_project_removal = true); -CREATE INDEX tmp_index_on_security_findings_scan_id ON security_findings USING btree (scan_id) WHERE (uuid IS NULL); - CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2); CREATE UNIQUE INDEX uniq_pkgs_deb_grp_architectures_on_distribution_id_and_name ON packages_debian_group_architectures USING btree (distribution_id, name); @@ -26708,6 +26738,9 @@ ALTER TABLE ONLY vulnerability_scanners ALTER TABLE ONLY reviews ADD CONSTRAINT fk_rails_5ca11d8c31 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE; +ALTER TABLE ONLY ci_running_builds + ADD CONSTRAINT fk_rails_5ca491d360 FOREIGN KEY (runner_id) REFERENCES ci_runners(id) ON DELETE CASCADE; + ALTER TABLE ONLY epic_issues ADD CONSTRAINT fk_rails_5d942936b4 FOREIGN KEY (epic_id) REFERENCES epics(id) ON DELETE CASCADE; @@ -27413,6 +27446,9 @@ ALTER TABLE ONLY geo_hashed_storage_attachments_events ALTER TABLE ONLY merge_request_reviewers ADD CONSTRAINT fk_rails_d9fec24b9d FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE; +ALTER TABLE ONLY ci_running_builds + ADD CONSTRAINT fk_rails_da45cfa165 FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE; + ALTER TABLE ONLY jira_imports ADD CONSTRAINT fk_rails_da617096ce FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL; @@ -27425,6 +27461,9 @@ ALTER TABLE ONLY issues_prometheus_alert_events ALTER TABLE ONLY board_user_preferences ADD CONSTRAINT fk_rails_dbebdaa8fe FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE; +ALTER TABLE ONLY ci_running_builds + ADD CONSTRAINT fk_rails_dc1d0801e8 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; + ALTER TABLE ONLY vulnerability_occurrence_pipelines ADD CONSTRAINT fk_rails_dc3ae04693 FOREIGN KEY (occurrence_id) REFERENCES vulnerability_occurrences(id) ON DELETE CASCADE; diff --git a/doc/api/projects.md b/doc/api/projects.md index 9f50c638386..edfdc93d5fc 100644 --- a/doc/api/projects.md +++ b/doc/api/projects.md @@ -1171,7 +1171,7 @@ POST /projects | `ci_config_path` | string | **{dotted-circle}** No | The path to CI configuration file. | | `container_expiration_policy_attributes` | hash | **{dotted-circle}** No | Update the image cleanup policy for this project. Accepts: `cadence` (string), `keep_n` (integer), `older_than` (string), `name_regex` (string), `name_regex_delete` (string), `name_regex_keep` (string), `enabled` (boolean). Valid values for `cadence` are: `1d` (every day), `7d` (every week), `14d` (every two weeks), `1month` (every month), or `3month` (every quarter). | | `container_registry_enabled` | boolean | **{dotted-circle}** No | Enable container registry for this project. | -| `default_branch` | string | **{dotted-circle}** No | The [default branch](../user/project/repository/branches/default.md) name. | +| `default_branch` | string | **{dotted-circle}** No | The [default branch](../user/project/repository/branches/default.md) name. Requires `initialize_with_readme` to be `true`. | | `description` | string | **{dotted-circle}** No | Short project description. | | `emails_disabled` | boolean | **{dotted-circle}** No | Disable email notifications. | | `external_authorization_classification_label` **(PREMIUM)** | string | **{dotted-circle}** No | The classification label for the project. | @@ -1246,6 +1246,7 @@ POST /projects/user/:user_id | `ci_config_path` | string | **{dotted-circle}** No | The path to CI configuration file. | | `container_registry_enabled` | boolean | **{dotted-circle}** No | Enable container registry for this project. | | `description` | string | **{dotted-circle}** No | Short project description. | +| `default_branch` | string | **{dotted-circle}** No | The [default branch](../user/project/repository/branches/default.md) name. Requires `initialize_with_readme` to be `true`. | | `emails_disabled` | boolean | **{dotted-circle}** No | Disable email notifications. | | `external_authorization_classification_label` **(PREMIUM)** | string | **{dotted-circle}** No | The classification label for the project. | | `forking_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. | diff --git a/doc/development/pipelines.md b/doc/development/pipelines.md index 367fa659150..437b2d04e1e 100644 --- a/doc/development/pipelines.md +++ b/doc/development/pipelines.md @@ -557,6 +557,7 @@ request, be sure to start the `dont-interrupt-me` job before pushing. 1. These cache definitions are composed of [multiple atomic caches](../ci/yaml/README.md#multiple-caches). 1. Only 6 specific jobs, running in 2-hourly scheduled pipelines, are pushing (i.e. updating) to the caches: - `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml). + - `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml). - `update-static-analysis-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml). - `update-qa-cache`, defined in [`.gitlab/ci/qa.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/qa.gitlab-ci.yml). - `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml). diff --git a/doc/topics/autodevops/customize.md b/doc/topics/autodevops/customize.md index e31f0a30ea4..42c54961c1d 100644 --- a/doc/topics/autodevops/customize.md +++ b/doc/topics/autodevops/customize.md @@ -16,15 +16,25 @@ staging and canary deployments, ## Custom buildpacks -If the automatic buildpack detection fails for your project, or if you want to -use a custom buildpack, you can override the buildpack using a project CI/CD variable -or a `.buildpacks` file in your project: +If the automatic buildpack detection fails for your project, or if you +need more control over your build, you can customize the buildpacks +used for the build. -- **Project variable** - Create a project variable `BUILDPACK_URL` with the URL - of the buildpack to use. -- **`.buildpacks` file** - Add a file in your project's repository called `.buildpacks`, - and add the URL of the buildpack to use on a line in the file. If you want to - use multiple buildpacks, enter one buildpack per line. +### Custom buildpacks with Cloud Native Buildpacks + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28165) in GitLab 12.10. + +Specify either: + +- The CI/CD variable `BUILDPACK_URL` according to [`pack`'s specifications](https://buildpacks.io/docs/app-developer-guide/specific-buildpacks/). +- A [`project.toml` project descriptor](https://buildpacks.io/docs/app-developer-guide/using-project-descriptor/) with the buildpacks you would like to include. + +### Custom buildpacks with Herokuish + +Specify either: + +- The CI/CD variable `BUILDPACK_URL`. +- A `.buildpacks` file at the root of your project, containing one buildpack URL per line. The buildpack URL can point to either a Git repository URL or a tarball URL. For Git repositories, you can point to a specific Git reference (such as @@ -347,8 +357,8 @@ applications. | `ADDITIONAL_HOSTS` | Fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. | | `_ADDITIONAL_HOSTS` | For a specific environment, the fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. This takes precedence over `ADDITIONAL_HOSTS`. | | `AUTO_DEVOPS_ATOMIC_RELEASE` | As of GitLab 13.0, Auto DevOps uses [`--atomic`](https://v2.helm.sh/docs/helm/#options-43) for Helm deployments by default. Set this variable to `false` to disable the use of `--atomic` | -| `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` | When set to a non-empty value and no `Dockerfile` is present, Auto Build builds your application using Cloud Native Buildpacks instead of Herokuish. [More details](stages.md#auto-build-using-cloud-native-buildpacks-beta). | -| `AUTO_DEVOPS_BUILD_IMAGE_CNB_BUILDER` | The builder used when building with Cloud Native Buildpacks. The default builder is `heroku/buildpacks:18`. [More details](stages.md#auto-build-using-cloud-native-buildpacks-beta). | +| `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` | Set to `false` to use Herokuish instead of Cloud Native Buildpacks with Auto Build. [More details](stages.md#auto-build-using-cloud-native-buildpacks). | +| `AUTO_DEVOPS_BUILD_IMAGE_CNB_BUILDER` | The builder used when building with Cloud Native Buildpacks. The default builder is `heroku/buildpacks:18`. [More details](stages.md#auto-build-using-cloud-native-buildpacks). | | `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` | Extra arguments to be passed to the `docker build` command. Note that using quotes doesn't prevent word splitting. [More details](#passing-arguments-to-docker-build). | | `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` | A [comma-separated list of CI/CD variable names](#forward-cicd-variables-to-the-build-environment) to be forwarded to the build environment (the buildpack builder or `docker build`). | | `AUTO_DEVOPS_CHART` | Helm Chart used to deploy your apps. Defaults to the one [provided by GitLab](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image/-/tree/master/assets/auto-deploy-app). | @@ -358,7 +368,7 @@ applications. | `AUTO_DEVOPS_CHART_REPOSITORY_PASSWORD` | From GitLab 11.11, used to set a password to connect to the Helm repository. Defaults to no credentials. Also set `AUTO_DEVOPS_CHART_REPOSITORY_USERNAME`. | | `AUTO_DEVOPS_DEPLOY_DEBUG` | From GitLab 13.1, if this variable is present, Helm outputs debug logs. | | `AUTO_DEVOPS_ALLOW_TO_FORCE_DEPLOY_V` | From [auto-deploy-image](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image) v1.0.0, if this variable is present, a new major version of chart is forcibly deployed. For more information, see [Ignore warnings and continue deploying](upgrading_auto_deploy_dependencies.md#ignore-warnings-and-continue-deploying). | -| `BUILDPACK_URL` | Buildpack's full URL. Can point to either [a Git repository URL or a tarball URL](#custom-buildpacks). | +| `BUILDPACK_URL` | Buildpack's full URL. [Must point to a URL supported by Pack or Herokuish](#custom-buildpacks). | | `CANARY_ENABLED` | From GitLab 11.0, used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). | | `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. | | `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. | diff --git a/doc/topics/autodevops/stages.md b/doc/topics/autodevops/stages.md index d003af81003..d3f217d3749 100644 --- a/doc/topics/autodevops/stages.md +++ b/doc/topics/autodevops/stages.md @@ -33,15 +33,24 @@ your own `Dockerfile`, you must either: - Override the default values by [customizing the Auto Deploy Helm chart](customize.md#custom-helm-chart). -### Auto Build using Heroku buildpacks +### Auto Build using Cloud Native Buildpacks + +> - Introduced in [GitLab 12.10](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28165). +> - Auto Build using Cloud Native Buildpacks by default was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63351) in GitLab 14.0. Auto Build builds an application using a project's `Dockerfile` if present. If no -`Dockerfile` is present, it uses [Herokuish](https://github.com/gliderlabs/herokuish) -and [Heroku buildpacks](https://devcenter.heroku.com/articles/buildpacks) -to detect and build the application into a Docker image. +`Dockerfile` is present, Auto Build builds your application using +[Cloud Native Buildpacks](https://buildpacks.io) to detect and build the +application into a Docker image. The feature uses the +[`pack` command](https://github.com/buildpacks/pack). +The default [builder](https://buildpacks.io/docs/concepts/components/builder/) +is `heroku/buildpacks:18` but a different builder can be selected using +the CI/CD variable `AUTO_DEVOPS_BUILD_IMAGE_CNB_BUILDER`. Each buildpack requires your project's repository to contain certain files for -Auto Build to build your application successfully. For example, your application's +Auto Build to build your application successfully. The structure is +specific to the builder and buildpacks you have selected. +For example, when using the Heroku's builder (the default), your application's root directory must contain the appropriate file for your application's language: @@ -51,40 +60,39 @@ language: For the requirements of other languages and frameworks, read the [Heroku buildpacks documentation](https://devcenter.heroku.com/articles/buildpacks#officially-supported-buildpacks). +NOTE: +Auto Test still uses Herokuish, as test suite detection is not +yet part of the Cloud Native Buildpack specification. For more information, see +[this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212689). + +### Auto Build using Herokuish + +> [Replaced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63351) with Cloud Native Buildpacks in GitLab 14.0. + +Prior to GitLab 14.0, [Herokuish](https://github.com/gliderlabs/herokuish) was +the default build method for projects without a `Dockerfile`. Herokuish can +still be used by setting the CI/CD variable `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` +to `false`. + NOTE: If Auto Build fails despite the project meeting the buildpack requirements, set a project CI/CD variable `TRACE=true` to enable verbose logging, which may help you troubleshoot. -### Auto Build using Cloud Native Buildpacks (beta) - -> Introduced in [GitLab 12.10](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28165). - -Auto Build supports building your application using [Cloud Native Buildpacks](https://buildpacks.io) -through the [`pack` command](https://github.com/buildpacks/pack). To use Cloud Native Buildpacks, -set the CI/CD variable `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` to a non-empty -value. The default builder is `heroku/buildpacks:18` but a different builder -can be selected using the CI/CD variable `AUTO_DEVOPS_BUILD_IMAGE_CNB_BUILDER`. - -Cloud Native Buildpacks (CNBs) are an evolution of Heroku buildpacks, and -GitLab expects them to eventually supersede Herokuish-based builds within Auto DevOps. For more -information, see [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212692). +### Moving from Herokuish to Cloud Native Buildpacks Builds using Cloud Native Buildpacks support the same options as builds using -Heroku buildpacks, with the following caveats: +Herokuish, with the following caveats: - The buildpack must be a Cloud Native Buildpack. A Heroku buildpack can be converted to a Cloud Native Buildpack using Heroku's [`cnb-shim`](https://github.com/heroku/cnb-shim). -- `BUILDPACK_URL` must be in a form +- `BUILDPACK_URL` must be in a format [supported by `pack`](https://buildpacks.io/docs/app-developer-guide/specific-buildpacks/). -- The `/bin/herokuish` command is not present in the resulting image, and prefixing +- The `/bin/herokuish` command is not present in the built image, and prefixing commands with `/bin/herokuish procfile exec` is no longer required (nor possible). - -NOTE: -Auto Test still uses Herokuish, as test suite detection is not -yet part of the Cloud Native Buildpack specification. For more information, see -[this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212689). + Instead, custom commands should be prefixed with `/cnb/lifecycle/launcher` + to receive the correct execution environment. ## Auto Test @@ -461,15 +469,16 @@ If present, `DB_MIGRATE` is run as a shell command within an application pod as a Helm pre-upgrade hook. For example, in a Rails application in an image built with -[Herokuish](https://github.com/gliderlabs/herokuish): +[Cloud Native Buildpacks](#auto-build-using-cloud-native-buildpacks): -- `DB_INITIALIZE` can be set to `RAILS_ENV=production /bin/herokuish procfile exec bin/rails db:setup` -- `DB_MIGRATE` can be set to `RAILS_ENV=production /bin/herokuish procfile exec bin/rails db:migrate` +- `DB_INITIALIZE` can be set to `RAILS_ENV=production /cnb/lifecycle/launcher bin/rails db:setup` +- `DB_MIGRATE` can be set to `RAILS_ENV=production /cnb/lifecycle/launcher bin/rails db:migrate` Unless your repository contains a `Dockerfile`, your image is built with -Herokuish, and you must prefix commands run in these images with -`/bin/herokuish procfile exec` (for Herokuish) or `/cnb/lifecycle/launcher` -(for Cloud Native Buildpacks) to replicate the environment where your +Cloud Native Buildpacks, and you must prefix commands run in these images with +`/cnb/lifecycle/launcher`, (or `/bin/herokuish procfile exec` when +using [Herokuish](#auto-build-using-herokuish)) +to replicate the environment where your application runs. ### Upgrade auto-deploy-app Chart @@ -508,14 +517,10 @@ workers: sidekiq: replicaCount: 1 command: - - /bin/herokuish - - procfile - - exec + - /cnb/lifecycle/launcher - sidekiq preStopCommand: - - /bin/herokuish - - procfile - - exec + - /cnb/lifecycle/launcher - sidekiqctl - quiet terminationGracePeriodSeconds: 60 diff --git a/doc/user/admin_area/appearance.md b/doc/user/admin_area/appearance.md index 0d72f09dfd9..97bc1be7faf 100644 --- a/doc/user/admin_area/appearance.md +++ b/doc/user/admin_area/appearance.md @@ -9,7 +9,7 @@ disqus_identifier: 'https://docs.gitlab.com/ee/customization/branded_login_page. # GitLab Appearance **(FREE SELF)** There are several options for customizing the appearance of a self-managed instance -of GitLab. These settings are accessed from the **Admin Area** in the **Appearance** +of GitLab. These settings are accessed from the **Admin Area** in the **Settings > Appearance** section. ## Navigation bar diff --git a/lib/api/entities/snippet.rb b/lib/api/entities/snippet.rb index f05e593a302..af885aaf0eb 100644 --- a/lib/api/entities/snippet.rb +++ b/lib/api/entities/snippet.rb @@ -5,16 +5,22 @@ module API class Snippet < BasicSnippet expose :author, using: Entities::UserBasic expose :file_name do |snippet| - snippet.file_name_on_repo || snippet.file_name + snippet_files.first || snippet.file_name end expose :files do |snippet, options| - snippet.list_files.map do |file| + snippet_files.map do |file| { path: file, raw_url: Gitlab::UrlBuilder.build(snippet, file: file, ref: snippet.repository.root_ref) } end end + + private + + def snippet_files + @snippet_files ||= object.list_files + end end end end diff --git a/lib/api/projects.rb b/lib/api/projects.rb index 9675f22f073..83c335a3248 100644 --- a/lib/api/projects.rb +++ b/lib/api/projects.rb @@ -234,6 +234,7 @@ module API params do optional :name, type: String, desc: 'The name of the project' optional :path, type: String, desc: 'The path of the repository' + optional :default_branch, type: String, desc: 'The default branch of the project' at_least_one_of :name, :path use :optional_create_project_params use :create_params diff --git a/lib/bulk_imports/clients/http.rb b/lib/bulk_imports/clients/http.rb index 8a7a6c5f4a6..c5f12d8c2ba 100644 --- a/lib/bulk_imports/clients/http.rb +++ b/lib/bulk_imports/clients/http.rb @@ -2,7 +2,7 @@ module BulkImports module Clients - class Http + class HTTP API_VERSION = 'v4' DEFAULT_PAGE = 1 DEFAULT_PER_PAGE = 30 diff --git a/lib/bulk_imports/common/extractors/rest_extractor.rb b/lib/bulk_imports/common/extractors/rest_extractor.rb index b18e27fd475..2179e0575c5 100644 --- a/lib/bulk_imports/common/extractors/rest_extractor.rb +++ b/lib/bulk_imports/common/extractors/rest_extractor.rb @@ -24,7 +24,7 @@ module BulkImports attr_reader :query def http_client(configuration) - @http_client ||= BulkImports::Clients::Http.new( + @http_client ||= BulkImports::Clients::HTTP.new( uri: configuration.url, token: configuration.access_token, per_page: 100 diff --git a/lib/bulk_imports/groups/extractors/subgroups_extractor.rb b/lib/bulk_imports/groups/extractors/subgroups_extractor.rb index e5e2b9fdbd4..db5882d49a9 100644 --- a/lib/bulk_imports/groups/extractors/subgroups_extractor.rb +++ b/lib/bulk_imports/groups/extractors/subgroups_extractor.rb @@ -17,7 +17,7 @@ module BulkImports private def http_client(configuration) - @http_client ||= BulkImports::Clients::Http.new( + @http_client ||= BulkImports::Clients::HTTP.new( uri: configuration.url, token: configuration.access_token, per_page: 100 diff --git a/lib/gitlab/ci/queue/metrics.rb b/lib/gitlab/ci/queue/metrics.rb index 7a52713d33f..859aeb35f26 100644 --- a/lib/gitlab/ci/queue/metrics.rb +++ b/lib/gitlab/ci/queue/metrics.rb @@ -33,7 +33,9 @@ module Gitlab :queue_replication_lag, :runner_pre_assign_checks_failed, :runner_pre_assign_checks_success, - :runner_queue_tick + :runner_queue_tick, + :shared_runner_build_new, + :shared_runner_build_done ].to_set.freeze QUEUE_DEPTH_HISTOGRAMS = [ diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml index abcb347b146..cf99d722e4d 100644 --- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml @@ -1,10 +1,10 @@ build: stage: build - image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.6.0" + image: 'registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v1.0.0' variables: - DOCKER_TLS_CERTDIR: "" + DOCKER_TLS_CERTDIR: '' services: - - name: "docker:20.10.6-dind" + - name: 'docker:20.10.6-dind' command: ['--tls=false', '--host=tcp://0.0.0.0:2375'] script: - | diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 24abd92df62..c26ce0c5c2a 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -32967,6 +32967,9 @@ msgstr "" msgid "There was an error while fetching value stream analytics duration data." msgstr "" +msgid "There was an error while fetching value stream summary data." +msgstr "" + msgid "There was an error with the reCAPTCHA. Please solve the reCAPTCHA again." msgstr "" @@ -39385,6 +39388,9 @@ msgstr "" msgid "the correct format." msgstr "" +msgid "the file" +msgstr "" + msgid "the following issue(s)" msgstr "" diff --git a/qa/qa.rb b/qa/qa.rb index 2639455e910..aad40666065 100644 --- a/qa/qa.rb +++ b/qa/qa.rb @@ -338,7 +338,7 @@ module QA autoload :Jenkins, 'qa/page/project/settings/services/jenkins' autoload :Prometheus, 'qa/page/project/settings/services/prometheus' end - autoload :Operations, 'qa/page/project/settings/operations' + autoload :Monitor, 'qa/page/project/settings/monitor' autoload :Alerts, 'qa/page/project/settings/alerts' autoload :Integrations, 'qa/page/project/settings/integrations' end @@ -347,7 +347,9 @@ module QA autoload :CiCd, 'qa/page/project/sub_menus/ci_cd' autoload :Common, 'qa/page/project/sub_menus/common' autoload :Issues, 'qa/page/project/sub_menus/issues' - autoload :Operations, 'qa/page/project/sub_menus/operations' + autoload :Monitor, 'qa/page/project/sub_menus/monitor' + autoload :Deployments, 'qa/page/project/sub_menus/deployments' + autoload :Infrastructure, 'qa/page/project/sub_menus/infrastructure' autoload :Repository, 'qa/page/project/sub_menus/repository' autoload :Settings, 'qa/page/project/sub_menus/settings' autoload :Project, 'qa/page/project/sub_menus/project' @@ -370,25 +372,29 @@ module QA autoload :Index, 'qa/page/project/milestone/index' end - module Operations + module Deployments module Environments - autoload :Index, 'qa/page/project/operations/environments/index' - autoload :Show, 'qa/page/project/operations/environments/show' + autoload :Index, 'qa/page/project/deployments/environments/index' + autoload :Show, 'qa/page/project/deployments/environments/show' end + end + module Infrastructure module Kubernetes - autoload :Index, 'qa/page/project/operations/kubernetes/index' - autoload :Add, 'qa/page/project/operations/kubernetes/add' - autoload :AddExisting, 'qa/page/project/operations/kubernetes/add_existing' - autoload :Show, 'qa/page/project/operations/kubernetes/show' + autoload :Index, 'qa/page/project/infrastructure/kubernetes/index' + autoload :Add, 'qa/page/project/infrastructure/kubernetes/add' + autoload :AddExisting, 'qa/page/project/infrastructure/kubernetes/add_existing' + autoload :Show, 'qa/page/project/infrastructure/kubernetes/show' end + end + module Monitor module Metrics - autoload :Show, 'qa/page/project/operations/metrics/show' + autoload :Show, 'qa/page/project/monitor/metrics/show' end module Incidents - autoload :Index, 'qa/page/project/operations/incidents/index' + autoload :Index, 'qa/page/project/monitor/incidents/index' end end diff --git a/qa/qa/page/group/menu.rb b/qa/qa/page/group/menu.rb index 66e8b5d5863..9a3b129b6d2 100644 --- a/qa/qa/page/group/menu.rb +++ b/qa/qa/page/group/menu.rb @@ -12,6 +12,8 @@ module QA element :group_members_item element :group_milestones_link element :group_settings + element :group_information_link + element :group_information_submenu end view 'app/views/groups/sidebar/_packages_settings.html.haml' do @@ -24,8 +26,10 @@ module QA end def click_group_members_item - within_sidebar do - click_element(:group_members_item) + hover_element(:group_information_link) do + within_submenu(:group_information_submenu) do + click_element(:group_members_item) + end end end diff --git a/qa/qa/page/project/operations/environments/index.rb b/qa/qa/page/project/deployments/environments/index.rb similarity index 95% rename from qa/qa/page/project/operations/environments/index.rb rename to qa/qa/page/project/deployments/environments/index.rb index 6b46fa4985a..598e1f26815 100644 --- a/qa/qa/page/project/operations/environments/index.rb +++ b/qa/qa/page/project/deployments/environments/index.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Deployments module Environments class Index < Page::Base view 'app/assets/javascripts/environments/components/environment_item.vue' do diff --git a/qa/qa/page/project/operations/environments/show.rb b/qa/qa/page/project/deployments/environments/show.rb similarity index 95% rename from qa/qa/page/project/operations/environments/show.rb rename to qa/qa/page/project/deployments/environments/show.rb index aa88c218c89..48e4850d3be 100644 --- a/qa/qa/page/project/operations/environments/show.rb +++ b/qa/qa/page/project/deployments/environments/show.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Deployments module Environments class Show < Page::Base view 'app/views/projects/environments/_external_url.html.haml' do diff --git a/qa/qa/page/project/operations/kubernetes/add.rb b/qa/qa/page/project/infrastructure/kubernetes/add.rb similarity index 93% rename from qa/qa/page/project/operations/kubernetes/add.rb rename to qa/qa/page/project/infrastructure/kubernetes/add.rb index 9a6ea99ac18..e2d50c1bcf1 100644 --- a/qa/qa/page/project/operations/kubernetes/add.rb +++ b/qa/qa/page/project/infrastructure/kubernetes/add.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Infrastructure module Kubernetes class Add < Page::Base view 'app/views/clusters/clusters/new.html.haml' do diff --git a/qa/qa/page/project/operations/kubernetes/add_existing.rb b/qa/qa/page/project/infrastructure/kubernetes/add_existing.rb similarity index 95% rename from qa/qa/page/project/operations/kubernetes/add_existing.rb rename to qa/qa/page/project/infrastructure/kubernetes/add_existing.rb index 59f59ca9966..689c2a31c4f 100644 --- a/qa/qa/page/project/operations/kubernetes/add_existing.rb +++ b/qa/qa/page/project/infrastructure/kubernetes/add_existing.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Infrastructure module Kubernetes class AddExisting < Page::Base view 'app/views/clusters/clusters/user/_form.html.haml' do @@ -32,7 +32,7 @@ module QA end def add_cluster! - click_element :add_kubernetes_cluster_button, Page::Project::Operations::Kubernetes::Show + click_element :add_kubernetes_cluster_button, Page::Project::Infrastructure::Kubernetes::Show end def uncheck_rbac! diff --git a/qa/qa/page/project/operations/kubernetes/index.rb b/qa/qa/page/project/infrastructure/kubernetes/index.rb similarity index 95% rename from qa/qa/page/project/operations/kubernetes/index.rb rename to qa/qa/page/project/infrastructure/kubernetes/index.rb index ca41dddaca2..bdcaf7ffaff 100644 --- a/qa/qa/page/project/operations/kubernetes/index.rb +++ b/qa/qa/page/project/infrastructure/kubernetes/index.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Infrastructure module Kubernetes class Index < Page::Base view 'app/views/clusters/clusters/_empty_state.html.haml' do diff --git a/qa/qa/page/project/operations/kubernetes/show.rb b/qa/qa/page/project/infrastructure/kubernetes/show.rb similarity index 98% rename from qa/qa/page/project/operations/kubernetes/show.rb rename to qa/qa/page/project/infrastructure/kubernetes/show.rb index 3bb51d2d579..e3b4f8fe4d9 100644 --- a/qa/qa/page/project/operations/kubernetes/show.rb +++ b/qa/qa/page/project/infrastructure/kubernetes/show.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Infrastructure module Kubernetes class Show < Page::Base view 'app/assets/javascripts/clusters/components/applications.vue' do @@ -77,7 +77,7 @@ module QA end def save_domain - click_element :save_changes_button, Page::Project::Operations::Kubernetes::Show + click_element :save_changes_button, Page::Project::Infrastructure::Kubernetes::Show end def wait_for_cluster_health diff --git a/qa/qa/page/project/menu.rb b/qa/qa/page/project/menu.rb index ffabaf30374..db70d3e1d02 100644 --- a/qa/qa/page/project/menu.rb +++ b/qa/qa/page/project/menu.rb @@ -8,7 +8,9 @@ module QA include SubMenus::Project include SubMenus::CiCd include SubMenus::Issues - include SubMenus::Operations + include SubMenus::Deployments + include SubMenus::Monitor + include SubMenus::Infrastructure include SubMenus::Repository include SubMenus::Settings include SubMenus::Packages @@ -26,8 +28,10 @@ module QA end def click_activity - within_sidebar do - click_element(:sidebar_menu_item_link, menu_item: 'Activity') + hover_project_information do + within_submenu do + click_element(:sidebar_menu_item_link, menu_item: 'Activity') + end end end @@ -38,8 +42,21 @@ module QA end def click_members + hover_project_information do + within_submenu do + click_element(:sidebar_menu_item_link, menu_item: 'Members') + end + end + end + + private + + def hover_project_information within_sidebar do - click_element(:sidebar_menu_link, menu_item: 'Members') + scroll_to_element(:sidebar_menu_link, menu_item: 'Project information') + find_element(:sidebar_menu_link, menu_item: 'Project information').hover + + yield end end end diff --git a/qa/qa/page/project/operations/incidents/index.rb b/qa/qa/page/project/monitor/incidents/index.rb similarity index 94% rename from qa/qa/page/project/operations/incidents/index.rb rename to qa/qa/page/project/monitor/incidents/index.rb index fd0c5253a7f..9317cb27562 100644 --- a/qa/qa/page/project/operations/incidents/index.rb +++ b/qa/qa/page/project/monitor/incidents/index.rb @@ -3,7 +3,7 @@ module QA module Page module Project - module Operations + module Monitor module Incidents class Index < Page::Base view 'app/assets/javascripts/incidents/components/incidents_list.vue' do diff --git a/qa/qa/page/project/operations/metrics/show.rb b/qa/qa/page/project/monitor/metrics/show.rb similarity index 96% rename from qa/qa/page/project/operations/metrics/show.rb rename to qa/qa/page/project/monitor/metrics/show.rb index 22a7f1eed8f..07ceb108fa3 100644 --- a/qa/qa/page/project/operations/metrics/show.rb +++ b/qa/qa/page/project/monitor/metrics/show.rb @@ -5,7 +5,7 @@ require 'securerandom' module QA module Page module Project - module Operations + module Monitor module Metrics class Show < Page::Base EXPECTED_TITLE = 'Memory Usage (Total)' @@ -134,4 +134,4 @@ module QA end end -QA::Page::Project::Operations::Metrics::Show.prepend_mod_with('Page::Project::Operations::Metrics::Show', namespace: QA) +QA::Page::Project::Monitor::Metrics::Show.prepend_mod_with('Page::Project::Monitor::Metrics::Show', namespace: QA) diff --git a/qa/qa/page/project/settings/operations.rb b/qa/qa/page/project/settings/monitor.rb similarity index 93% rename from qa/qa/page/project/settings/operations.rb rename to qa/qa/page/project/settings/monitor.rb index ff9c47b20ca..87fb0698897 100644 --- a/qa/qa/page/project/settings/operations.rb +++ b/qa/qa/page/project/settings/monitor.rb @@ -4,7 +4,7 @@ module QA module Page module Project module Settings - class Operations < Page::Base + class Monitor < Page::Base include QA::Page::Settings::Common view 'app/assets/javascripts/incidents_settings/components/incidents_settings_tabs.vue' do diff --git a/qa/qa/page/project/sub_menus/deployments.rb b/qa/qa/page/project/sub_menus/deployments.rb new file mode 100644 index 00000000000..24243cb2436 --- /dev/null +++ b/qa/qa/page/project/sub_menus/deployments.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module QA + module Page + module Project + module SubMenus + module Deployments + extend QA::Page::PageConcern + + def self.included(base) + super + + base.class_eval do + include QA::Page::Project::SubMenus::Common + end + end + + def go_to_deployments_environments + hover_deployments do + within_submenu do + click_element(:sidebar_menu_item_link, menu_item: 'Environments') + end + end + end + + private + + def hover_deployments + within_sidebar do + scroll_to_element(:sidebar_menu_link, menu_item: 'Deployments') + find_element(:sidebar_menu_link, menu_item: 'Deployments').hover + + yield + end + end + end + end + end + end +end diff --git a/qa/qa/page/project/sub_menus/infrastructure.rb b/qa/qa/page/project/sub_menus/infrastructure.rb new file mode 100644 index 00000000000..2c207022c8d --- /dev/null +++ b/qa/qa/page/project/sub_menus/infrastructure.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module QA + module Page + module Project + module SubMenus + module Infrastructure + extend QA::Page::PageConcern + + def self.included(base) + super + + base.class_eval do + include QA::Page::Project::SubMenus::Common + end + end + + def go_to_infrastructure_kubernetes + hover_infrastructure do + within_submenu do + click_link('Kubernetes clusters') + end + end + end + + private + + def hover_infrastructure + within_sidebar do + scroll_to_element(:sidebar_menu_link, menu_item: 'Infrastructure') + find_element(:sidebar_menu_link, menu_item: 'Infrastructure').hover + + yield + end + end + end + end + end + end +end diff --git a/qa/qa/page/project/sub_menus/operations.rb b/qa/qa/page/project/sub_menus/monitor.rb similarity index 53% rename from qa/qa/page/project/sub_menus/operations.rb rename to qa/qa/page/project/sub_menus/monitor.rb index 077da697a63..e3593e0a257 100644 --- a/qa/qa/page/project/sub_menus/operations.rb +++ b/qa/qa/page/project/sub_menus/monitor.rb @@ -4,7 +4,7 @@ module QA module Page module Project module SubMenus - module Operations + module Monitor extend QA::Page::PageConcern def self.included(base) @@ -15,32 +15,16 @@ module QA end end - def go_to_operations_environments - hover_operations do - within_submenu do - click_element(:sidebar_menu_item_link, menu_item: 'Environments') - end - end - end - - def go_to_operations_metrics - hover_operations do + def go_to_monitor_metrics + hover_monitor do within_submenu do click_element(:sidebar_menu_item_link, menu_item: 'Metrics') end end end - def go_to_operations_kubernetes - hover_operations do - within_submenu do - click_link('Kubernetes') - end - end - end - - def go_to_operations_incidents - hover_operations do + def go_to_monitor_incidents + hover_monitor do within_submenu do click_element(:sidebar_menu_item_link, menu_item: 'Incidents') end @@ -49,10 +33,10 @@ module QA private - def hover_operations + def hover_monitor within_sidebar do - scroll_to_element(:sidebar_menu_link, menu_item: 'Operations') - find_element(:sidebar_menu_link, menu_item: 'Operations').hover + scroll_to_element(:sidebar_menu_link, menu_item: 'Monitor') + find_element(:sidebar_menu_link, menu_item: 'Monitor').hover yield end diff --git a/qa/qa/page/project/sub_menus/project.rb b/qa/qa/page/project/sub_menus/project.rb index 5499a0f71e3..cd08715bcd8 100644 --- a/qa/qa/page/project/sub_menus/project.rb +++ b/qa/qa/page/project/sub_menus/project.rb @@ -12,13 +12,17 @@ module QA base.class_eval do include QA::Page::Project::SubMenus::Common + + view 'app/views/shared/nav/_scope_menu_body.html.haml' do + element :project_scope_link + end end end def click_project retry_on_exception do within_sidebar do - click_element(:sidebar_menu_link, menu_item: 'Project overview') + click_element(:project_scope_link) end end end diff --git a/qa/qa/page/project/sub_menus/settings.rb b/qa/qa/page/project/sub_menus/settings.rb index 80f62c8efde..f35d27e658d 100644 --- a/qa/qa/page/project/sub_menus/settings.rb +++ b/qa/qa/page/project/sub_menus/settings.rb @@ -53,10 +53,10 @@ module QA end end - def go_to_operations_settings + def go_to_monitor_settings hover_settings do within_submenu do - click_element(:sidebar_menu_item_link, menu_item: 'Operations') + click_element(:sidebar_menu_item_link, menu_item: 'Monitor') end end end diff --git a/qa/qa/resource/kubernetes_cluster/project_cluster.rb b/qa/qa/resource/kubernetes_cluster/project_cluster.rb index 78a24cdb677..b66a75d978b 100644 --- a/qa/qa/resource/kubernetes_cluster/project_cluster.rb +++ b/qa/qa/resource/kubernetes_cluster/project_cluster.rb @@ -12,22 +12,22 @@ module QA end attribute :ingress_ip do - Page::Project::Operations::Kubernetes::Show.perform(&:ingress_ip) + Page::Project::Infrastructure::Kubernetes::Show.perform(&:ingress_ip) end def fabricate! project.visit! Page::Project::Menu.perform( - &:go_to_operations_kubernetes) + &:go_to_infrastructure_kubernetes) - Page::Project::Operations::Kubernetes::Index.perform( + Page::Project::Infrastructure::Kubernetes::Index.perform( &:add_kubernetes_cluster) - Page::Project::Operations::Kubernetes::Add.perform( + Page::Project::Infrastructure::Kubernetes::Add.perform( &:add_existing_cluster) - Page::Project::Operations::Kubernetes::AddExisting.perform do |cluster_page| + Page::Project::Infrastructure::Kubernetes::AddExisting.perform do |cluster_page| cluster_page.set_cluster_name(@cluster.cluster_name) cluster_page.set_api_url(@cluster.api_url) cluster_page.set_ca_certificate(@cluster.ca_certificate) @@ -36,7 +36,7 @@ module QA cluster_page.add_cluster! end - Page::Project::Operations::Kubernetes::Show.perform do |show| + Page::Project::Infrastructure::Kubernetes::Show.perform do |show| # We must wait a few seconds for permissions to be set up correctly for new cluster sleep 25 diff --git a/qa/qa/specs/features/browser_ui/4_verify/pipeline/create_and_process_pipeline_spec.rb b/qa/qa/specs/features/browser_ui/4_verify/pipeline/create_and_process_pipeline_spec.rb index cf9f7f0db12..7a81318f158 100644 --- a/qa/qa/specs/features/browser_ui/4_verify/pipeline/create_and_process_pipeline_spec.rb +++ b/qa/qa/specs/features/browser_ui/4_verify/pipeline/create_and_process_pipeline_spec.rb @@ -1,12 +1,10 @@ # frozen_string_literal: true module QA - # TODO: Remove `:requires_admin` meta when the feature flag is removed - RSpec.describe 'Verify', :runner, :requires_admin do + RSpec.describe 'Verify', :runner do describe 'Pipeline creation and processing' do let(:executor) { "qa-runner-#{Time.now.to_i}" } let(:max_wait) { 30 } - let(:feature_flag) { :ci_drop_new_builds_when_ci_quota_exceeded } let(:project) do Resource::Project.fabricate_via_api! do |project| @@ -28,8 +26,6 @@ module QA it 'users creates a pipeline which gets processed', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1849' do # TODO: Convert back to :smoke once proved to be stable. Related issue: https://gitlab.com/gitlab-org/gitlab/-/issues/300909 - tags_mismatch_status = Runtime::Feature.enabled?(feature_flag, project: project) ? :failed : :pending - Flow::Login.sign_in Resource::Repository::Commit.fabricate_via_api! do |commit| @@ -75,7 +71,7 @@ module QA { 'test-success': :passed, 'test-failure': :failed, - 'test-tags-mismatch': tags_mismatch_status, + 'test-tags-mismatch': :pending, 'test-artifacts': :passed }.each do |job, status| Page::Project::Pipeline::Show.perform do |pipeline| diff --git a/qa/qa/specs/features/browser_ui/7_configure/auto_devops/create_project_with_auto_devops_spec.rb b/qa/qa/specs/features/browser_ui/7_configure/auto_devops/create_project_with_auto_devops_spec.rb index 44888408f4e..1dcc02095f6 100644 --- a/qa/qa/specs/features/browser_ui/7_configure/auto_devops/create_project_with_auto_devops_spec.rb +++ b/qa/qa/specs/features/browser_ui/7_configure/auto_devops/create_project_with_auto_devops_spec.rb @@ -81,11 +81,11 @@ module QA job.click_element(:pipeline_path) end - Page::Project::Menu.perform(&:go_to_operations_environments) - Page::Project::Operations::Environments::Index.perform do |index| + Page::Project::Menu.perform(&:go_to_deployments_environments) + Page::Project::Deployments::Environments::Index.perform do |index| index.click_environment_link('production') end - Page::Project::Operations::Environments::Show.perform do |show| + Page::Project::Deployments::Environments::Show.perform do |show| show.view_deployment do expect(page).to have_content('Hello World!') expect(page).to have_content('you_can_see_this_variable') diff --git a/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb b/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb index 09fbbf5adc5..1803b4b16de 100644 --- a/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb +++ b/qa/qa/specs/features/browser_ui/7_configure/kubernetes/kubernetes_integration_spec.rb @@ -26,9 +26,9 @@ module QA k8s_cluster.cluster = cluster end.project.visit! - Page::Project::Menu.perform(&:go_to_operations_kubernetes) + Page::Project::Menu.perform(&:go_to_infrastructure_kubernetes) - Page::Project::Operations::Kubernetes::Index.perform do |index| + Page::Project::Infrastructure::Kubernetes::Index.perform do |index| expect(index).to have_cluster(cluster) end end diff --git a/qa/qa/specs/features/browser_ui/8_monitor/all_monitor_core_features_spec.rb b/qa/qa/specs/features/browser_ui/8_monitor/all_monitor_core_features_spec.rb index 94df408f9aa..1511f74c883 100644 --- a/qa/qa/specs/features/browser_ui/8_monitor/all_monitor_core_features_spec.rb +++ b/qa/qa/specs/features/browser_ui/8_monitor/all_monitor_core_features_spec.rb @@ -17,9 +17,9 @@ module QA end it 'duplicates to create dashboard to custom', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/871' do - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| on_dashboard.duplicate_dashboard expect(on_dashboard).to have_metrics @@ -28,9 +28,9 @@ module QA end it 'verifies data on filtered deployed environment', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/874' do - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| on_dashboard.filter_environment expect(on_dashboard).to have_metrics @@ -38,9 +38,9 @@ module QA end it 'filters using the quick range', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/873' do - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| on_dashboard.show_last('30 minutes') expect(on_dashboard).to have_metrics @@ -53,13 +53,13 @@ module QA end it 'observes cluster health graph', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/920' do - Page::Project::Menu.perform(&:go_to_operations_kubernetes) + Page::Project::Menu.perform(&:go_to_infrastructure_kubernetes) - Page::Project::Operations::Kubernetes::Index.perform do |cluster_list| + Page::Project::Infrastructure::Kubernetes::Index.perform do |cluster_list| cluster_list.click_on_cluster(@cluster) end - Page::Project::Operations::Kubernetes::Show.perform do |cluster_panel| + Page::Project::Infrastructure::Kubernetes::Show.perform do |cluster_panel| cluster_panel.open_health cluster_panel.wait_for_cluster_health end @@ -78,9 +78,9 @@ module QA push.new_branch = false end - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |dashboard| + Page::Project::Monitor::Metrics::Show.perform do |dashboard| dashboard.select_dashboard('templating.yml') expect(dashboard).to have_template_metric('CPU usage GitLab Runner') @@ -101,9 +101,9 @@ module QA metrics_panel.add_custom_metric end - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| expect(on_dashboard).to have_custom_metric('HTTP Requests Total') end end @@ -116,9 +116,9 @@ module QA metrics_panel.edit_custom_metric end - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| expect(on_dashboard).to have_custom_metric('Throughput') end end @@ -132,9 +132,9 @@ module QA metrics_panel.delete_custom_metric end - Page::Project::Menu.perform(&:go_to_operations_metrics) + Page::Project::Menu.perform(&:go_to_monitor_metrics) - Page::Project::Operations::Metrics::Show.perform do |on_dashboard| + Page::Project::Monitor::Metrics::Show.perform do |on_dashboard| expect(on_dashboard).not_to have_custom_metric('Throughput') end end diff --git a/scripts/debug-rubocop b/scripts/debug-rubocop deleted file mode 100755 index dd1f8df1d11..00000000000 --- a/scripts/debug-rubocop +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -require 'rubocop' - -file = ARGV[0] - -unless file - warn('Error: missing file, please supply one') - abort -end - -# Taken from https://github.com/rubocop/rubocop/blob/v0.93.1/lib/rubocop/result_cache.rb#L159-L171 -def file_checksum(file, config_store) - digester = Digest::SHA1.new - mode = File.stat(file).mode - - puts "mode of #{file} is #{mode}" - puts "signature of #{file} is #{config_store.for_file(file).signature}" - puts "config is:" - puts config_store.for_file(file).to_h - - digester.update( - "#{file}#{mode}#{config_store.for_file(file).signature}" - ) - digester.file(file) - digester.hexdigest -rescue Errno::ENOENT - # Spurious files that come and go should not cause a crash, at least not - # here. - '_' -end - -config_store = RuboCop::ConfigStore.new -checksum = file_checksum(file, config_store) -puts "File checksum for #{file} is #{checksum}" diff --git a/scripts/gitaly-test-build b/scripts/gitaly-test-build index 4890e6912cd..26f84cd81d3 100755 --- a/scripts/gitaly-test-build +++ b/scripts/gitaly-test-build @@ -15,7 +15,12 @@ class GitalyTestBuild def run set_bundler_config - abort 'gitaly build failed' unless build_gitaly + # If we have the binaries from the cache, we can skip building them again + if File.exist?(tmp_tests_gitaly_bin_dir) + GitalySetup::LOGGER.debug "Gitaly binary already built. Skip building...\n" + else + abort 'gitaly build failed' unless build_gitaly + end ensure_gitlab_shell_secret! check_gitaly_config! diff --git a/spec/controllers/admin/appearances_controller_spec.rb b/spec/controllers/admin/application_settings/appearances_controller_spec.rb similarity index 97% rename from spec/controllers/admin/appearances_controller_spec.rb rename to spec/controllers/admin/application_settings/appearances_controller_spec.rb index ee6a4a4c7af..cc914f3c9b8 100644 --- a/spec/controllers/admin/appearances_controller_spec.rb +++ b/spec/controllers/admin/application_settings/appearances_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Admin::AppearancesController do +RSpec.describe Admin::ApplicationSettings::AppearancesController do let(:admin) { create(:admin) } let(:header_message) { 'Header message' } let(:footer_message) { 'Footer' } diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb index 12aa1d89ecc..8f74d210667 100644 --- a/spec/controllers/import/bulk_imports_controller_spec.rb +++ b/spec/controllers/import/bulk_imports_controller_spec.rb @@ -51,7 +51,7 @@ RSpec.describe Import::BulkImportsController do end describe 'GET status' do - let(:client) { BulkImports::Clients::Http.new(uri: 'http://gitlab.example', token: 'token') } + let(:client) { BulkImports::Clients::HTTP.new(uri: 'http://gitlab.example', token: 'token') } describe 'serialized group data' do let(:client_response) do @@ -149,7 +149,7 @@ RSpec.describe Import::BulkImportsController do context 'when connection error occurs' do before do allow(controller).to receive(:client).and_return(client) - allow(client).to receive(:get).and_raise(BulkImports::Clients::Http::ConnectionError) + allow(client).to receive(:get).and_raise(BulkImports::Clients::HTTP::ConnectionError) end it 'returns 422' do diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb index 603e757096f..5596ad7bf21 100644 --- a/spec/features/admin/admin_appearance_spec.rb +++ b/spec/features/admin/admin_appearance_spec.rb @@ -9,7 +9,7 @@ RSpec.describe 'Admin Appearance' do it 'create new appearance' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path fill_in 'appearance_title', with: 'MyCompany' fill_in 'appearance_description', with: 'dev server' @@ -17,7 +17,7 @@ RSpec.describe 'Admin Appearance' do fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines' click_button 'Update appearance settings' - expect(current_path).to eq admin_appearances_path + expect(current_path).to eq admin_application_settings_appearances_path expect(page).to have_content 'Appearance' expect(page).to have_field('appearance_title', with: 'MyCompany') @@ -31,7 +31,7 @@ RSpec.describe 'Admin Appearance' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path click_link "Sign-in page" expect_custom_sign_in_appearance(appearance) @@ -41,7 +41,7 @@ RSpec.describe 'Admin Appearance' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path click_link "New project page" expect_custom_new_project_appearance(appearance) @@ -55,7 +55,7 @@ RSpec.describe 'Admin Appearance' do context 'when system header and footer messages are empty' do it 'shows custom system header and footer fields' do - visit admin_appearances_path + visit admin_application_settings_appearances_path expect(page).to have_field('appearance_header_message', with: '') expect(page).to have_field('appearance_footer_message', with: '') @@ -70,7 +70,7 @@ RSpec.describe 'Admin Appearance' do end it 'shows custom system header and footer fields' do - visit admin_appearances_path + visit admin_application_settings_appearances_path expect(page).to have_field('appearance_header_message', with: appearance.header_message) expect(page).to have_field('appearance_footer_message', with: appearance.footer_message) @@ -99,7 +99,7 @@ RSpec.describe 'Admin Appearance' do before do sign_in(create(:admin)) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines, please :smile:!' click_button 'Update appearance settings' end @@ -115,7 +115,7 @@ RSpec.describe 'Admin Appearance' do it 'appearance logo' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path attach_file(:appearance_logo, logo_fixture) click_button 'Update appearance settings' @@ -128,7 +128,7 @@ RSpec.describe 'Admin Appearance' do it 'header logos' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path attach_file(:appearance_header_logo, logo_fixture) click_button 'Update appearance settings' @@ -141,7 +141,7 @@ RSpec.describe 'Admin Appearance' do it 'Favicon' do sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) - visit admin_appearances_path + visit admin_application_settings_appearances_path attach_file(:appearance_favicon, logo_fixture) click_button 'Update appearance settings' diff --git a/spec/features/admin/admin_search_settings_spec.rb b/spec/features/admin/admin_search_settings_spec.rb index cd61a1db6f3..989cb7cc787 100644 --- a/spec/features/admin/admin_search_settings_spec.rb +++ b/spec/features/admin/admin_search_settings_spec.rb @@ -13,7 +13,7 @@ RSpec.describe 'Admin searches application settings', :js do context 'in appearances page' do before do - visit(admin_appearances_path) + visit(admin_application_settings_appearances_path) end it_behaves_like 'cannot search settings' diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js index 868a8583555..2f85cc04051 100644 --- a/spec/frontend/cycle_analytics/base_spec.js +++ b/spec/frontend/cycle_analytics/base_spec.js @@ -19,6 +19,9 @@ function createStore({ initialState = {} }) { return new Vuex.Store({ state: { ...initState(), + permissions: { + [selectedStage.id]: true, + }, ...initialState, }, getters: { @@ -155,7 +158,11 @@ describe('Value stream analytics component', () => { describe('without enough permissions', () => { beforeEach(() => { wrapper = createComponent({ - initialState: { selectedStage: { ...selectedStage, isUserAllowed: false } }, + initialState: { + permissions: { + [selectedStage.id]: false, + }, + }, }); }); diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js index ab8bac1011e..242ea1932fb 100644 --- a/spec/frontend/cycle_analytics/mock_data.js +++ b/spec/frontend/cycle_analytics/mock_data.js @@ -1,3 +1,4 @@ +import { DEFAULT_VALUE_STREAM } from '~/cycle_analytics/constants'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; export const getStageByTitle = (stages, title) => @@ -95,54 +96,6 @@ export const rawData = { }; export const convertedData = { - stages: [ - selectedStage, - { - ...planStage, - active: false, - isUserAllowed: true, - emptyStageText: - 'The planning stage shows the time from the previous step to pushing your first commit. This time will be added automatically once you push your first commit.', - component: 'stage-plan-component', - slug: 'plan', - }, - { - ...codeStage, - active: false, - isUserAllowed: true, - emptyStageText: - 'The coding stage shows the time from the first commit to creating the merge request. The data will automatically be added here once you create your first merge request.', - component: 'stage-code-component', - slug: 'code', - }, - { - ...testStage, - active: false, - isUserAllowed: true, - emptyStageText: - 'The testing stage shows the time GitLab CI takes to run every pipeline for the related merge request. The data will automatically be added after your first pipeline finishes running.', - component: 'stage-test-component', - slug: 'test', - }, - { - ...reviewStage, - active: false, - isUserAllowed: true, - emptyStageText: - 'The review stage shows the time from creating the merge request to merging it. The data will automatically be added after you merge your first merge request.', - component: 'stage-review-component', - slug: 'review', - }, - { - ...stagingStage, - active: false, - isUserAllowed: true, - emptyStageText: - 'The staging stage shows the time between merging the MR and deploying code to the production environment. The data will be automatically added once you deploy to production for the first time.', - component: 'stage-staging-component', - slug: 'staging', - }, - ], summary: [ { value: '20', title: 'New Issues' }, { value: '-', title: 'Commits' }, @@ -256,3 +209,49 @@ export const transformedProjectStagePathData = [ value: 172800, }, ]; + +export const selectedValueStream = DEFAULT_VALUE_STREAM; + +export const rawValueStreamStages = [ + { + title: 'Issue', + hidden: false, + legend: '', + description: 'Time before an issue gets scheduled', + id: 'issue', + custom: false, + start_event_html_description: + '\u003cp data-sourcepos="1:1-1:13" dir="auto"\u003eIssue created\u003c/p\u003e', + end_event_html_description: + '\u003cp data-sourcepos="1:1-1:71" dir="auto"\u003eIssue first associated with a milestone or issue first added to a board\u003c/p\u003e', + }, + { + title: 'Plan', + hidden: false, + legend: '', + description: 'Time before an issue starts implementation', + id: 'plan', + custom: false, + start_event_html_description: + '\u003cp data-sourcepos="1:1-1:71" dir="auto"\u003eIssue first associated with a milestone or issue first added to a board\u003c/p\u003e', + end_event_html_description: + '\u003cp data-sourcepos="1:1-1:33" dir="auto"\u003eIssue first mentioned in a commit\u003c/p\u003e', + }, + { + title: 'Code', + hidden: false, + legend: '', + description: 'Time until first merge request', + id: 'code', + custom: false, + start_event_html_description: + '\u003cp data-sourcepos="1:1-1:33" dir="auto"\u003eIssue first mentioned in a commit\u003c/p\u003e', + end_event_html_description: + '\u003cp data-sourcepos="1:1-1:21" dir="auto"\u003eMerge request created\u003c/p\u003e', + }, +]; + +export const valueStreamStages = rawValueStreamStages.map((s) => ({ + ...convertObjectPropsToCamelCase(s, { deep: true }), + component: `stage-${s.id}-component`, +})); diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js index 55f5d720e9e..4f37e1266fb 100644 --- a/spec/frontend/cycle_analytics/store/actions_spec.js +++ b/spec/frontend/cycle_analytics/store/actions_spec.js @@ -3,10 +3,27 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import * as actions from '~/cycle_analytics/store/actions'; import httpStatusCodes from '~/lib/utils/http_status'; -import { selectedStage } from '../mock_data'; +import { selectedStage, selectedValueStream } from '../mock_data'; const mockRequestPath = 'some/cool/path'; +const mockFullPath = '/namespace/-/analytics/value_stream_analytics/value_streams'; const mockStartDate = 30; +const mockRequestedDataActions = ['fetchValueStreams', 'fetchCycleAnalyticsData']; +const mockInitializeActionCommit = { + payload: { requestPath: mockRequestPath }, + type: 'INITIALIZE_VSA', +}; +const mockSetDateActionCommit = { payload: { startDate: mockStartDate }, type: 'SET_DATE_RANGE' }; +const mockRequestedDataMutations = [ + { + payload: true, + type: 'SET_LOADING', + }, + { + payload: false, + type: 'SET_LOADING', + }, +]; describe('Project Value Stream Analytics actions', () => { let state; @@ -22,27 +39,26 @@ describe('Project Value Stream Analytics actions', () => { state = {}; }); - it.each` - action | type | payload | expectedActions - ${'initializeVsa'} | ${'INITIALIZE_VSA'} | ${{ requestPath: mockRequestPath }} | ${['fetchCycleAnalyticsData']} - ${'setDateRange'} | ${'SET_DATE_RANGE'} | ${{ startDate: 30 }} | ${[]} - ${'setSelectedStage'} | ${'SET_SELECTED_STAGE'} | ${{ selectedStage }} | ${[]} - `( - '$action should dispatch $expectedActions and commit $type', - ({ action, type, payload, expectedActions }) => + const mutationTypes = (arr) => arr.map(({ type }) => type); + + describe.each` + action | payload | expectedActions | expectedMutations + ${'initializeVsa'} | ${{ requestPath: mockRequestPath }} | ${mockRequestedDataActions} | ${[mockInitializeActionCommit, ...mockRequestedDataMutations]} + ${'setDateRange'} | ${{ startDate: mockStartDate }} | ${mockRequestedDataActions} | ${[mockSetDateActionCommit, ...mockRequestedDataMutations]} + ${'setSelectedStage'} | ${{ selectedStage }} | ${['fetchStageData']} | ${[{ type: 'SET_SELECTED_STAGE', payload: { selectedStage } }]} + ${'setSelectedValueStream'} | ${{ selectedValueStream }} | ${['fetchValueStreamStages']} | ${[{ type: 'SET_SELECTED_VALUE_STREAM', payload: { selectedValueStream } }]} + `('$action', ({ action, payload, expectedActions, expectedMutations }) => { + const types = mutationTypes(expectedMutations); + + it(`will dispatch ${expectedActions} and commit ${types}`, () => testAction({ action: actions[action], state, payload, - expectedMutations: [ - { - type, - payload, - }, - ], + expectedMutations, expectedActions: expectedActions.map((a) => ({ type: a })), - }), - ); + })); + }); describe('fetchCycleAnalyticsData', () => { beforeEach(() => { @@ -60,7 +76,7 @@ describe('Project Value Stream Analytics actions', () => { { type: 'REQUEST_CYCLE_ANALYTICS_DATA' }, { type: 'RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS' }, ], - expectedActions: [{ type: 'setSelectedStage' }, { type: 'fetchStageData' }], + expectedActions: [], })); describe('with a failing request', () => { @@ -85,7 +101,7 @@ describe('Project Value Stream Analytics actions', () => { }); describe('fetchStageData', () => { - const mockStagePath = `${mockRequestPath}/events/${selectedStage.name}.json`; + const mockStagePath = `${mockRequestPath}/events/${selectedStage.name}`; beforeEach(() => { state = { @@ -153,4 +169,115 @@ describe('Project Value Stream Analytics actions', () => { })); }); }); + + describe('fetchValueStreams', () => { + const mockValueStreamPath = /\/analytics\/value_stream_analytics\/value_streams/; + + beforeEach(() => { + state = { + fullPath: mockFullPath, + }; + mock = new MockAdapter(axios); + mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK); + }); + + it(`commits the 'REQUEST_VALUE_STREAMS' mutation`, () => + testAction({ + action: actions.fetchValueStreams, + state, + payload: {}, + expectedMutations: [{ type: 'REQUEST_VALUE_STREAMS' }], + expectedActions: [{ type: 'receiveValueStreamsSuccess' }, { type: 'setSelectedStage' }], + })); + + describe('with a failing request', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST); + }); + + it(`commits the 'RECEIVE_VALUE_STREAMS_ERROR' mutation`, () => + testAction({ + action: actions.fetchValueStreams, + state, + payload: {}, + expectedMutations: [ + { type: 'REQUEST_VALUE_STREAMS' }, + { type: 'RECEIVE_VALUE_STREAMS_ERROR', payload: httpStatusCodes.BAD_REQUEST }, + ], + expectedActions: [], + })); + }); + }); + + describe('receiveValueStreamsSuccess', () => { + const mockValueStream = { + id: 'mockDefault', + name: 'mock default', + }; + const mockValueStreams = [mockValueStream, selectedValueStream]; + it('with data, will set the first value stream', () => { + testAction({ + action: actions.receiveValueStreamsSuccess, + state, + payload: mockValueStreams, + expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: mockValueStreams }], + expectedActions: [{ type: 'setSelectedValueStream', payload: mockValueStream }], + }); + }); + + it('without data, will set the default value stream', () => { + testAction({ + action: actions.receiveValueStreamsSuccess, + state, + payload: [], + expectedMutations: [{ type: 'RECEIVE_VALUE_STREAMS_SUCCESS', payload: [] }], + expectedActions: [{ type: 'setSelectedValueStream', payload: selectedValueStream }], + }); + }); + }); + + describe('fetchValueStreamStages', () => { + const mockValueStreamPath = /\/analytics\/value_stream_analytics\/value_streams/; + + beforeEach(() => { + state = { + fullPath: mockFullPath, + selectedValueStream, + }; + mock = new MockAdapter(axios); + mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK); + }); + + it(`commits the 'REQUEST_VALUE_STREAM_STAGES' and 'RECEIVE_VALUE_STREAM_STAGES_SUCCESS' mutations`, () => + testAction({ + action: actions.fetchValueStreamStages, + state, + payload: {}, + expectedMutations: [ + { type: 'REQUEST_VALUE_STREAM_STAGES' }, + { type: 'RECEIVE_VALUE_STREAM_STAGES_SUCCESS' }, + ], + expectedActions: [], + })); + + describe('with a failing request', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST); + }); + + it(`commits the 'RECEIVE_VALUE_STREAM_STAGES_ERROR' mutation`, () => + testAction({ + action: actions.fetchValueStreamStages, + state, + payload: {}, + expectedMutations: [ + { type: 'REQUEST_VALUE_STREAM_STAGES' }, + { type: 'RECEIVE_VALUE_STREAM_STAGES_ERROR', payload: httpStatusCodes.BAD_REQUEST }, + ], + expectedActions: [], + })); + }); + }); }); diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js index 08c70af6ef6..88e1a13f506 100644 --- a/spec/frontend/cycle_analytics/store/mutations_spec.js +++ b/spec/frontend/cycle_analytics/store/mutations_spec.js @@ -1,6 +1,15 @@ import * as types from '~/cycle_analytics/store/mutation_types'; import mutations from '~/cycle_analytics/store/mutations'; -import { selectedStage, rawEvents, convertedEvents, rawData, convertedData } from '../mock_data'; +import { + selectedStage, + rawEvents, + convertedEvents, + rawData, + convertedData, + selectedValueStream, + rawValueStreamStages, + valueStreamStages, +} from '../mock_data'; let state; const mockRequestPath = 'fake/request/path'; @@ -17,15 +26,15 @@ describe('Project Value Stream Analytics mutations', () => { it.each` mutation | stateKey | value - ${types.SET_SELECTED_STAGE} | ${'isLoadingStage'} | ${false} + ${types.REQUEST_VALUE_STREAMS} | ${'valueStreams'} | ${[]} + ${types.RECEIVE_VALUE_STREAMS_ERROR} | ${'valueStreams'} | ${[]} + ${types.REQUEST_VALUE_STREAM_STAGES} | ${'stages'} | ${[]} + ${types.RECEIVE_VALUE_STREAM_STAGES_ERROR} | ${'stages'} | ${[]} ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'isLoading'} | ${true} - ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'stages'} | ${[]} ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'hasError'} | ${false} - ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${'isLoading'} | ${false} ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${'hasError'} | ${false} ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'isLoading'} | ${false} ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'hasError'} | ${true} - ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'stages'} | ${[]} ${types.REQUEST_STAGE_DATA} | ${'isLoadingStage'} | ${true} ${types.REQUEST_STAGE_DATA} | ${'isEmptyStage'} | ${false} ${types.REQUEST_STAGE_DATA} | ${'hasError'} | ${false} @@ -44,12 +53,15 @@ describe('Project Value Stream Analytics mutations', () => { }); it.each` - mutation | payload | stateKey | value - ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath} - ${types.SET_SELECTED_STAGE} | ${selectedStage} | ${'selectedStage'} | ${selectedStage} - ${types.SET_DATE_RANGE} | ${{ startDate: mockStartData }} | ${'startDate'} | ${mockStartData} - ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'stages'} | ${convertedData.stages} - ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary} + mutation | payload | stateKey | value + ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath} + ${types.SET_DATE_RANGE} | ${{ startDate: mockStartData }} | ${'startDate'} | ${mockStartData} + ${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true} + ${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false} + ${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream} + ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary} + ${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]} + ${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages} `( '$mutation with $payload will set $stateKey to $value', ({ mutation, payload, stateKey, value }) => { diff --git a/spec/frontend/cycle_analytics/utils_spec.js b/spec/frontend/cycle_analytics/utils_spec.js index 2d9d2f5b5b1..15137bb0571 100644 --- a/spec/frontend/cycle_analytics/utils_spec.js +++ b/spec/frontend/cycle_analytics/utils_spec.js @@ -53,17 +53,6 @@ describe('Value stream analytics utils', () => { expect(result.summary).toEqual(convertedData.summary); }); - it('returns the stages data', () => { - expect(result.stages).toEqual(convertedData.stages); - }); - - it('returns each of the default value stream stages', () => { - const stages = result.stages.map(({ name }) => name); - ['issue', 'plan', 'code', 'test', 'review', 'staging'].forEach((stageName) => { - expect(stages).toContain(stageName); - }); - }); - it('returns `-` for summary data that has no value', () => { const singleSummaryResult = decorateData({ stats: [], @@ -72,24 +61,6 @@ describe('Value stream analytics utils', () => { }); expect(singleSummaryResult.summary).toEqual([{ value: '-', title: 'Commits' }]); }); - - it('returns additional fields for each stage', () => { - const singleStageResult = decorateData({ - stats: [{ name: 'issue', value: null }], - permissions: { issue: false }, - }); - const stage = singleStageResult.stages[0]; - const txt = - 'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.'; - - expect(stage).toMatchObject({ - active: false, - isUserAllowed: false, - emptyStageText: txt, - slug: 'issue', - component: 'stage-issue-component', - }); - }); }); describe('transformStagesForPathNavigation', () => { diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js index 8a04e93e88c..bffe108896a 100644 --- a/spec/frontend/lib/utils/url_utility_spec.js +++ b/spec/frontend/lib/utils/url_utility_spec.js @@ -650,45 +650,24 @@ describe('URL utility', () => { }); describe('queryToObject', () => { - it('converts search query into an object', () => { - const searchQuery = '?one=1&two=2'; - - expect(urlUtils.queryToObject(searchQuery)).toEqual({ one: '1', two: '2' }); - }); - - it('removes undefined values from the search query', () => { - const searchQuery = '?one=1&two=2&three'; - - expect(urlUtils.queryToObject(searchQuery)).toEqual({ one: '1', two: '2' }); - }); - - describe('with gatherArrays=false', () => { - it('overwrites values with the same array-key and does not change the key', () => { - const searchQuery = '?one[]=1&one[]=2&two=2&two=3'; - - expect(urlUtils.queryToObject(searchQuery)).toEqual({ 'one[]': '2', two: '3' }); - }); - }); - - describe('with gatherArrays=true', () => { - const options = { gatherArrays: true }; - it('gathers only values with the same array-key and strips `[]` from the key', () => { - const searchQuery = '?one[]=1&one[]=2&two=2&two=3'; - - expect(urlUtils.queryToObject(searchQuery, options)).toEqual({ one: ['1', '2'], two: '3' }); - }); - - it('overwrites values with the same array-key name', () => { - const searchQuery = '?one=1&one[]=2&two=2&two=3'; - - expect(urlUtils.queryToObject(searchQuery, options)).toEqual({ one: ['2'], two: '3' }); - }); - - it('overwrites values with the same key name', () => { - const searchQuery = '?one[]=1&one=2&two=2&two=3'; - - expect(urlUtils.queryToObject(searchQuery, options)).toEqual({ one: '2', two: '3' }); - }); + it.each` + case | query | options | result + ${'converts query'} | ${'?one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }} + ${'converts query without ?'} | ${'one=1&two=2'} | ${undefined} | ${{ one: '1', two: '2' }} + ${'removes undefined values'} | ${'?one=1&two=2&three'} | ${undefined} | ${{ one: '1', two: '2' }} + ${'overwrites values with same key and does not change key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${undefined} | ${{ 'one[]': '2', two: '3' }} + ${'gathers values with the same array-key, strips `[]` from key'} | ${'?one[]=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['1', '2'], two: '3' }} + ${'overwrites values with the same array-key name'} | ${'?one=1&one[]=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: ['2'], two: '3' }} + ${'overwrites values with the same key name'} | ${'?one[]=1&one=2&two=2&two=3'} | ${{ gatherArrays: true }} | ${{ one: '2', two: '3' }} + ${'ignores plus symbols'} | ${'?search=a+b'} | ${{ legacySpacesDecode: true }} | ${{ search: 'a+b' }} + ${'ignores plus symbols in keys'} | ${'?search+term=a'} | ${{ legacySpacesDecode: true }} | ${{ 'search+term': 'a' }} + ${'ignores plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true, legacySpacesDecode: true }} | ${{ search: ['a+b'] }} + ${'replaces plus symbols with spaces'} | ${'?search=a+b'} | ${undefined} | ${{ search: 'a b' }} + ${'replaces plus symbols in keys with spaces'} | ${'?search+term=a'} | ${undefined} | ${{ 'search term': 'a' }} + ${'replaces plus symbols when gathering arrays'} | ${'?search[]=a+b'} | ${{ gatherArrays: true }} | ${{ search: ['a b'] }} + ${'replaces plus symbols when gathering arrays for values with same key'} | ${'?search[]=a+b&search[]=c+d'} | ${{ gatherArrays: true }} | ${{ search: ['a b', 'c d'] }} + `('$case', ({ query, options, result }) => { + expect(urlUtils.queryToObject(query, options)).toEqual(result); }); }); diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb index b84fa1538ef..ac42f12a3d4 100644 --- a/spec/lib/bulk_imports/clients/http_spec.rb +++ b/spec/lib/bulk_imports/clients/http_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Clients::Http do +RSpec.describe BulkImports::Clients::HTTP do include ImportSpecHelper let(:uri) { 'http://gitlab.example' } diff --git a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb index 721dacbe3f4..5ee5cdbe413 100644 --- a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb +++ b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe BulkImports::Common::Extractors::RestExtractor do - let(:http_client) { instance_double(BulkImports::Clients::Http) } + let(:http_client) { instance_double(BulkImports::Clients::HTTP) } let(:options) { { query: double(to_h: { resource: nil, query: nil }) } } let(:response) { double(parsed_response: { 'data' => { 'foo' => 'bar' } }, headers: { 'x-next-page' => '2' }) } diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb index ac8786440e9..f7485b188ce 100644 --- a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb +++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb @@ -12,7 +12,7 @@ RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do response = [{ 'test' => 'group' }] context = BulkImports::Pipeline::Context.new(tracker) - allow_next_instance_of(BulkImports::Clients::Http) do |client| + allow_next_instance_of(BulkImports::Clients::HTTP) do |client| allow(client).to receive(:each_page).and_return(response) end diff --git a/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb b/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb new file mode 100644 index 00000000000..e838476a650 --- /dev/null +++ b/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20210610102413_migrate_protected_attribute_to_pending_builds.rb') + +RSpec.describe MigrateProtectedAttributeToPendingBuilds do + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:queue) { table(:ci_pending_builds) } + let(:builds) { table(:ci_builds) } + + before do + namespaces.create!(id: 123, name: 'sample', path: 'sample') + projects.create!(id: 123, name: 'sample', path: 'sample', namespace_id: 123) + + builds.create!(id: 1, project_id: 123, status: 'pending', protected: false, type: 'Ci::Build') + builds.create!(id: 2, project_id: 123, status: 'pending', protected: true, type: 'Ci::Build') + builds.create!(id: 3, project_id: 123, status: 'pending', protected: false, type: 'Ci::Build') + builds.create!(id: 4, project_id: 123, status: 'pending', protected: true, type: 'Ci::Bridge') + builds.create!(id: 5, project_id: 123, status: 'success', protected: true, type: 'Ci::Build') + + queue.create!(id: 1, project_id: 123, build_id: 1) + queue.create!(id: 2, project_id: 123, build_id: 2) + queue.create!(id: 3, project_id: 123, build_id: 3) + end + + it 'updates entries that should be protected' do + migrate! + + expect(queue.where(protected: true).count).to eq 1 + expect(queue.find_by(protected: true).id).to eq 2 + end +end diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb index fde18705e35..48f32a2092a 100644 --- a/spec/models/bulk_imports/export_status_spec.rb +++ b/spec/models/bulk_imports/export_status_spec.rb @@ -16,7 +16,7 @@ RSpec.describe BulkImports::ExportStatus do subject { described_class.new(tracker, relation) } before do - allow_next_instance_of(BulkImports::Clients::Http) do |client| + allow_next_instance_of(BulkImports::Clients::HTTP) do |client| allow(client).to receive(:get).and_return(response_double) end end @@ -66,7 +66,7 @@ RSpec.describe BulkImports::ExportStatus do context 'when something goes wrong during export status fetch' do it 'returns exception class as error' do - allow_next_instance_of(BulkImports::Clients::Http) do |client| + allow_next_instance_of(BulkImports::Clients::HTTP) do |client| allow(client).to receive(:get).and_raise(StandardError, 'Error!') end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index c8430d669b8..600ddb784c5 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -493,6 +493,34 @@ RSpec.describe Ci::Build do expect(build.queuing_entry).to be_present end end + + context 'when build has been picked by a shared runner' do + let(:build) { create(:ci_build, :pending) } + + it 'creates runtime metadata entry' do + build.runner = create(:ci_runner, :instance_type) + + build.run! + + expect(build.reload.runtime_metadata).to be_present + end + end + end + + describe '#drop' do + context 'when has a runtime tracking entry' do + let(:build) { create(:ci_build, :pending) } + + it 'removes runtime tracking entry' do + build.runner = create(:ci_runner, :instance_type) + + build.run! + expect(build.reload.runtime_metadata).to be_present + + build.drop! + expect(build.reload.runtime_metadata).not_to be_present + end + end end describe '#schedulable?' do @@ -5181,4 +5209,34 @@ RSpec.describe Ci::Build do it { expect(matcher.project).to eq(build.project) } end + + describe '#shared_runner_build?' do + context 'when build does not have a runner assigned' do + it 'is not a shared runner build' do + expect(build.runner).to be_nil + + expect(build).not_to be_shared_runner_build + end + end + + context 'when build has a project runner assigned' do + before do + build.runner = create(:ci_runner, :project) + end + + it 'is not a shared runner build' do + expect(build).not_to be_shared_runner_build + end + end + + context 'when build has an instance runner assigned' do + before do + build.runner = create(:ci_runner, :instance_type) + end + + it 'is a shared runner build' do + expect(build).to be_shared_runner_build + end + end + end end diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb index 5d52ea606e3..c1d4f4b0a5e 100644 --- a/spec/models/ci/pending_build_spec.rb +++ b/spec/models/ci/pending_build_spec.rb @@ -20,7 +20,7 @@ RSpec.describe Ci::PendingBuild do context 'when another queuing entry exists for given build' do before do - described_class.create!(build: build, project: project) + described_class.create!(build: build, project: project, protected: false) end it 'returns a build id as a result' do diff --git a/spec/models/ci/running_build_spec.rb b/spec/models/ci/running_build_spec.rb new file mode 100644 index 00000000000..589e5a86f4d --- /dev/null +++ b/spec/models/ci/running_build_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::RunningBuild do + let_it_be(:project) { create(:project) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + + let(:runner) { create(:ci_runner, :instance_type) } + let(:build) { create(:ci_build, :running, runner: runner, pipeline: pipeline) } + + describe '.upsert_shared_runner_build!' do + context 'another pending entry does not exist' do + it 'creates a new pending entry' do + result = described_class.upsert_shared_runner_build!(build) + + expect(result.rows.dig(0, 0)).to eq build.id + expect(build.reload.runtime_metadata).to be_present + end + end + + context 'when another queuing entry exists for given build' do + before do + described_class.create!(build: build, + project: project, + runner: runner, + runner_type: runner.runner_type) + end + + it 'returns a build id as a result' do + result = described_class.upsert_shared_runner_build!(build) + + expect(result.rows.dig(0, 0)).to eq build.id + end + end + + context 'when build has been picked by a specific runner' do + let(:runner) { create(:ci_runner, :project) } + + it 'raises an error' do + expect { described_class.upsert_shared_runner_build!(build) } + .to raise_error(ArgumentError, 'build has not been picked by a shared runner') + end + end + + context 'when build has not been picked by a runner yet' do + let(:build) { create(:ci_build, pipeline: pipeline) } + + it 'raises an error' do + expect { described_class.upsert_shared_runner_build!(build) } + .to raise_error(ArgumentError, 'build has not been picked by a shared runner') + end + end + end +end diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index b3dc486081c..7b100b7a6f3 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -841,7 +841,7 @@ RSpec.describe Issuable do it_behaves_like 'matches_cross_reference_regex? fails fast' end - context "note with long path string", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/35269' do + context "note with long path string" do let(:project) { create(:project, :public, :repository) } let(:mentionable) { project.commit } diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb index c8917a7dd65..dfc37f9e661 100644 --- a/spec/models/deploy_token_spec.rb +++ b/spec/models/deploy_token_spec.rb @@ -248,70 +248,56 @@ RSpec.describe DeployToken do deploy_token.groups << group end - context 'and the allow_group_deploy_token feature flag is turned off' do - it 'is false' do - stub_feature_flags(allow_group_deploy_token: false) + context 'and the passed-in project does not belong to any group' do + it { is_expected.to be_falsy } + end - is_expected.to be_falsy + context 'and the passed-in project belongs to the token group' do + it 'is true' do + group.projects << project + + is_expected.to be_truthy end end - context 'and the allow_group_deploy_token feature flag is turned on' do + context 'and the passed-in project belongs to a subgroup' do + let(:child_group) { create(:group, parent_id: group.id) } + let(:grandchild_group) { create(:group, parent_id: child_group.id) } + before do - stub_feature_flags(allow_group_deploy_token: true) + grandchild_group.projects << project end - context 'and the passed-in project does not belong to any group' do - it { is_expected.to be_falsy } + context 'and the token group is an ancestor (grand-parent) of this group' do + it { is_expected.to be_truthy } end - context 'and the passed-in project belongs to the token group' do - it 'is true' do - group.projects << project - - is_expected.to be_truthy - end - end - - context 'and the passed-in project belongs to a subgroup' do - let(:child_group) { create(:group, parent_id: group.id) } - let(:grandchild_group) { create(:group, parent_id: child_group.id) } - - before do - grandchild_group.projects << project - end - - context 'and the token group is an ancestor (grand-parent) of this group' do - it { is_expected.to be_truthy } - end - - context 'and the token group is not ancestor of this group' do - let(:child2_group) { create(:group, parent_id: group.id) } - - it 'is false' do - deploy_token.groups = [child2_group] - - is_expected.to be_falsey - end - end - end - - context 'and the passed-in project does not belong to the token group' do - it { is_expected.to be_falsy } - end - - context 'and the project belongs to a group that is parent of the token group' do - let(:super_group) { create(:group) } - let(:deploy_token) { create(:deploy_token, :group) } - let(:group) { create(:group, parent_id: super_group.id) } + context 'and the token group is not ancestor of this group' do + let(:child2_group) { create(:group, parent_id: group.id) } it 'is false' do - super_group.projects << project + deploy_token.groups = [child2_group] is_expected.to be_falsey end end end + + context 'and the passed-in project does not belong to the token group' do + it { is_expected.to be_falsy } + end + + context 'and the project belongs to a group that is parent of the token group' do + let(:super_group) { create(:group) } + let(:deploy_token) { create(:deploy_token, :group) } + let(:group) { create(:group, parent_id: super_group.id) } + + it 'is false' do + super_group.projects << project + + is_expected.to be_falsey + end + end end context 'and the token is of project type' do diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index 6ec48f88e93..b62f8e6b259 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -109,6 +109,43 @@ RSpec.describe API::Projects do end end + shared_examples_for 'create project with default branch parameter' do + let(:params) { { name: 'Foo Project', initialize_with_readme: true, default_branch: default_branch } } + let(:default_branch) { 'main' } + + it 'creates project with provided default branch name' do + expect { request }.to change { Project.count }.by(1) + expect(response).to have_gitlab_http_status(:created) + + project = Project.find(json_response['id']) + expect(project.default_branch).to eq(default_branch) + end + + context 'when branch name is empty' do + let(:default_branch) { '' } + + it 'creates project with a default project branch name' do + expect { request }.to change { Project.count }.by(1) + expect(response).to have_gitlab_http_status(:created) + + project = Project.find(json_response['id']) + expect(project.default_branch).to eq('master') + end + end + + context 'when initialize with readme is not set' do + let(:params) { super().merge(initialize_with_readme: nil) } + + it 'creates project with a default project branch name' do + expect { request }.to change { Project.count }.by(1) + expect(response).to have_gitlab_http_status(:created) + + project = Project.find(json_response['id']) + expect(project.default_branch).to be_nil + end + end + end + describe 'GET /projects' do shared_examples_for 'projects response' do it 'returns an array of projects' do @@ -947,6 +984,10 @@ RSpec.describe API::Projects do expect(project.path).to eq('path-project-Foo') end + it_behaves_like 'create project with default branch parameter' do + let(:request) { post api('/projects', user), params: params } + end + it 'creates last project before reaching project limit' do allow_any_instance_of(User).to receive(:projects_limit_left).and_return(1) post api('/projects', user2), params: { name: 'foo' } @@ -1427,6 +1468,10 @@ RSpec.describe API::Projects do expect(project.path).to eq('path-project-Foo') end + it_behaves_like 'create project with default branch parameter' do + let(:request) { post api("/projects/user/#{user.id}", admin), params: params } + end + it 'responds with 400 on failure and not project' do expect { post api("/projects/user/#{user.id}", admin) } .not_to change { Project.count } diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb index 5171bb40f0a..0961ddce553 100644 --- a/spec/services/bulk_imports/file_download_service_spec.rb +++ b/spec/services/bulk_imports/file_download_service_spec.rb @@ -26,7 +26,7 @@ RSpec.describe BulkImports::FileDownloadService do subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: filename) } before do - allow_next_instance_of(BulkImports::Clients::Http) do |client| + allow_next_instance_of(BulkImports::Clients::HTTP) do |client| allow(client).to receive(:head).and_return(response_double) allow(client).to receive(:stream).and_yield(chunk_double) end diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb index 3f1a2854d76..3b4a6178b8f 100644 --- a/spec/services/ci/create_pipeline_service/needs_spec.rb +++ b/spec/services/ci/create_pipeline_service/needs_spec.rb @@ -202,37 +202,21 @@ RSpec.describe Ci::CreatePipelineService do YAML end - context 'when there are runners matching the builds' do - before do - create(:ci_runner, :online) - end + it 'creates a pipeline with build_a and test_b pending; deploy_b manual', :sidekiq_inline do + processables = pipeline.processables - it 'creates a pipeline with build_a and test_b pending; deploy_b manual', :sidekiq_inline do - processables = pipeline.processables + build_a = processables.find { |processable| processable.name == 'build_a' } + test_a = processables.find { |processable| processable.name == 'test_a' } + test_b = processables.find { |processable| processable.name == 'test_b' } + deploy_a = processables.find { |processable| processable.name == 'deploy_a' } + deploy_b = processables.find { |processable| processable.name == 'deploy_b' } - build_a = processables.find { |processable| processable.name == 'build_a' } - test_a = processables.find { |processable| processable.name == 'test_a' } - test_b = processables.find { |processable| processable.name == 'test_b' } - deploy_a = processables.find { |processable| processable.name == 'deploy_a' } - deploy_b = processables.find { |processable| processable.name == 'deploy_b' } - - expect(pipeline).to be_created_successfully - expect(build_a.status).to eq('pending') - expect(test_a.status).to eq('created') - expect(test_b.status).to eq('pending') - expect(deploy_a.status).to eq('created') - expect(deploy_b.status).to eq('manual') - end - end - - context 'when there are no runners matching the builds' do - it 'creates a pipeline but all jobs failed', :sidekiq_inline do - processables = pipeline.processables - - expect(pipeline).to be_created_successfully - expect(processables).to all be_failed - expect(processables.map(&:failure_reason)).to all eq('no_matching_runner') - end + expect(pipeline).to be_created_successfully + expect(build_a.status).to eq('pending') + expect(test_a.status).to eq('created') + expect(test_b.status).to eq('pending') + expect(deploy_a.status).to eq('created') + expect(deploy_b.status).to eq('manual') end end diff --git a/spec/services/ci/pipeline_creation/drop_not_runnable_builds_service_spec.rb b/spec/services/ci/pipeline_creation/drop_not_runnable_builds_service_spec.rb deleted file mode 100644 index 9b525b39afd..00000000000 --- a/spec/services/ci/pipeline_creation/drop_not_runnable_builds_service_spec.rb +++ /dev/null @@ -1,80 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::PipelineCreation::DropNotRunnableBuildsService do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, group: group) } - - let_it_be_with_reload(:pipeline) do - create(:ci_pipeline, project: project, status: :created) - end - - let_it_be_with_reload(:job) do - create(:ci_build, project: project, pipeline: pipeline) - end - - describe '#execute' do - subject(:execute) { described_class.new(pipeline).execute } - - shared_examples 'jobs allowed to run' do - it 'does not drop the jobs' do - expect { execute }.not_to change { job.reload.status } - end - end - - context 'when the feature flag is disabled' do - before do - stub_feature_flags(ci_drop_new_builds_when_ci_quota_exceeded: false) - end - - it_behaves_like 'jobs allowed to run' - end - - context 'when the pipeline status is running' do - before do - pipeline.update!(status: :running) - end - - it_behaves_like 'jobs allowed to run' - end - - context 'when there are no runners available' do - let_it_be(:offline_project_runner) do - create(:ci_runner, runner_type: :project_type, projects: [project]) - end - - it 'drops the job' do - execute - job.reload - - expect(job).to be_failed - expect(job.failure_reason).to eq('no_matching_runner') - end - end - - context 'with project runners' do - let_it_be(:project_runner) do - create(:ci_runner, :online, runner_type: :project_type, projects: [project]) - end - - it_behaves_like 'jobs allowed to run' - end - - context 'with group runners' do - let_it_be(:group_runner) do - create(:ci_runner, :online, runner_type: :group_type, groups: [group]) - end - - it_behaves_like 'jobs allowed to run' - end - - context 'with instance runners' do - let_it_be(:instance_runner) do - create(:ci_runner, :online, runner_type: :instance_type) - end - - it_behaves_like 'jobs allowed to run' - end - end -end diff --git a/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb b/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb index d399843923c..2aa810e8ea1 100644 --- a/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb +++ b/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb @@ -8,15 +8,6 @@ RSpec.describe Ci::PipelineCreation::StartPipelineService do subject(:service) { described_class.new(pipeline) } describe '#execute' do - it 'calls the pipeline runners matching validation service' do - expect(Ci::PipelineCreation::DropNotRunnableBuildsService) - .to receive(:new) - .with(pipeline) - .and_return(double('service', execute: true)) - - service.execute - end - it 'calls the pipeline process service' do expect(Ci::ProcessPipelineService) .to receive(:new) diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index f047bf649fb..880adc80b24 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -60,7 +60,7 @@ RSpec.describe Ci::RetryBuildService do artifacts_file artifacts_metadata artifacts_size commands resource resource_group_id processed security_scans author pipeline_id report_results pending_state pages_deployments - queuing_entry].freeze + queuing_entry runtime_metadata].freeze shared_examples 'build duplication' do let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) } diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb index 5458280ad9d..44d7809b85f 100644 --- a/spec/services/ci/update_build_queue_service_spec.rb +++ b/spec/services/ci/update_build_queue_service_spec.rb @@ -4,101 +4,208 @@ require 'spec_helper' RSpec.describe Ci::UpdateBuildQueueService do let(:project) { create(:project, :repository) } - let(:build) { create(:ci_build, pipeline: pipeline) } let(:pipeline) { create(:ci_pipeline, project: project) } + let(:build) { create(:ci_build, pipeline: pipeline) } - describe '#push' do - let(:transition) { double('transition') } + describe 'pending builds queue push / pop' do + describe '#push' do + let(:transition) { double('transition') } - before do - allow(transition).to receive(:to).and_return('pending') - allow(transition).to receive(:within_transaction).and_yield - end - - context 'when pending build can be created' do - it 'creates a new pending build in transaction' do - queued = subject.push(build, transition) - - expect(queued).to eq build.id - end - - it 'increments queue push metric' do - metrics = spy('metrics') - - described_class.new(metrics).push(build, transition) - - expect(metrics) - .to have_received(:increment_queue_operation) - .with(:build_queue_push) - end - end - - context 'when invalid transition is detected' do - it 'raises an error' do - allow(transition).to receive(:to).and_return('created') - - expect { subject.push(build, transition) } - .to raise_error(described_class::InvalidQueueTransition) - end - end - - context 'when duplicate entry exists' do before do - ::Ci::PendingBuild.create!(build: build, project: project) + allow(transition).to receive(:to).and_return('pending') + allow(transition).to receive(:within_transaction).and_yield end - it 'does nothing and returns build id' do - queued = subject.push(build, transition) + context 'when pending build can be created' do + it 'creates a new pending build in transaction' do + queued = subject.push(build, transition) - expect(queued).to eq build.id + expect(queued).to eq build.id + end + + it 'increments queue push metric' do + metrics = spy('metrics') + + described_class.new(metrics).push(build, transition) + + expect(metrics) + .to have_received(:increment_queue_operation) + .with(:build_queue_push) + end + end + + context 'when invalid transition is detected' do + it 'raises an error' do + allow(transition).to receive(:to).and_return('created') + + expect { subject.push(build, transition) } + .to raise_error(described_class::InvalidQueueTransition) + end + end + + context 'when duplicate entry exists' do + before do + ::Ci::PendingBuild.create!(build: build, project: project) + end + + it 'does nothing and returns build id' do + queued = subject.push(build, transition) + + expect(queued).to eq build.id + end + end + end + + describe '#pop' do + let(:transition) { double('transition') } + + before do + allow(transition).to receive(:from).and_return('pending') + allow(transition).to receive(:within_transaction).and_yield + end + + context 'when pending build exists' do + before do + Ci::PendingBuild.create!(build: build, project: project) + end + + it 'removes pending build in a transaction' do + dequeued = subject.pop(build, transition) + + expect(dequeued).to eq build.id + end + + it 'increments queue pop metric' do + metrics = spy('metrics') + + described_class.new(metrics).pop(build, transition) + + expect(metrics) + .to have_received(:increment_queue_operation) + .with(:build_queue_pop) + end + end + + context 'when pending build does not exist' do + it 'does nothing if there is no pending build to remove' do + dequeued = subject.pop(build, transition) + + expect(dequeued).to be_nil + end + end + + context 'when invalid transition is detected' do + it 'raises an error' do + allow(transition).to receive(:from).and_return('created') + + expect { subject.pop(build, transition) } + .to raise_error(described_class::InvalidQueueTransition) + end end end end - describe '#pop' do - let(:transition) { double('transition') } + describe 'shared runner builds tracking' do + let(:runner) { create(:ci_runner, :instance_type) } + let(:build) { create(:ci_build, runner: runner, pipeline: pipeline) } - before do - allow(transition).to receive(:from).and_return('pending') - allow(transition).to receive(:within_transaction).and_yield - end + describe '#track' do + let(:transition) { double('transition') } - context 'when pending build exists' do before do - Ci::PendingBuild.create!(build: build, project: project) + allow(transition).to receive(:to).and_return('running') + allow(transition).to receive(:within_transaction).and_yield end - it 'removes pending build in a transaction' do - dequeued = subject.pop(build, transition) + context 'when a shared runner build can be tracked' do + it 'creates a new shared runner build tracking entry' do + build_id = subject.track(build, transition) - expect(dequeued).to eq build.id + expect(build_id).to eq build.id + end + + it 'increments new shared runner build metric' do + metrics = spy('metrics') + + described_class.new(metrics).track(build, transition) + + expect(metrics) + .to have_received(:increment_queue_operation) + .with(:shared_runner_build_new) + end end - it 'increments queue pop metric' do - metrics = spy('metrics') + context 'when invalid transition is detected' do + it 'raises an error' do + allow(transition).to receive(:to).and_return('pending') - described_class.new(metrics).pop(build, transition) + expect { subject.track(build, transition) } + .to raise_error(described_class::InvalidQueueTransition) + end + end - expect(metrics) - .to have_received(:increment_queue_operation) - .with(:build_queue_pop) + context 'when duplicate entry exists' do + before do + ::Ci::RunningBuild.create!( + build: build, project: project, runner: runner, runner_type: runner.runner_type + ) + end + + it 'does nothing and returns build id' do + build_id = subject.track(build, transition) + + expect(build_id).to eq build.id + end end end - context 'when pending build does not exist' do - it 'does nothing if there is no pending build to remove' do - dequeued = subject.pop(build, transition) + describe '#untrack' do + let(:transition) { double('transition') } - expect(dequeued).to be_nil + before do + allow(transition).to receive(:from).and_return('running') + allow(transition).to receive(:within_transaction).and_yield end - end - context 'when invalid transition is detected' do - it 'raises an error' do - allow(transition).to receive(:from).and_return('created') + context 'when shared runner build tracking entry exists' do + before do + Ci::RunningBuild.create!( + build: build, project: project, runner: runner, runner_type: runner.runner_type + ) + end - expect { subject.pop(build, transition) } - .to raise_error(described_class::InvalidQueueTransition) + it 'removes shared runner build' do + build_id = subject.untrack(build, transition) + + expect(build_id).to eq build.id + end + + it 'increments shared runner build done metric' do + metrics = spy('metrics') + + described_class.new(metrics).untrack(build, transition) + + expect(metrics) + .to have_received(:increment_queue_operation) + .with(:shared_runner_build_done) + end + end + + context 'when tracking entry does not exist' do + it 'does nothing if there is no tracking entry to remove' do + build_id = subject.untrack(build, transition) + + expect(build_id).to be_nil + end + end + + context 'when invalid transition is detected' do + it 'raises an error' do + allow(transition).to receive(:from).and_return('pending') + + expect { subject.untrack(build, transition) } + .to raise_error(described_class::InvalidQueueTransition) + end end end end diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index 753c89eb981..cc2a9e4af9b 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -298,7 +298,7 @@ RSpec.describe Projects::CreateService, '#execute' do context 'error handling' do it 'handles invalid options' do - opts[:default_branch] = 'master' + opts[:invalid] = 'option' expect(create_project(user, opts)).to eq(nil) end end @@ -806,7 +806,7 @@ RSpec.describe Projects::CreateService, '#execute' do end end - context 'with specialized_project_authorization_workers' do + context 'with specialized project_authorization workers' do let_it_be(:other_user) { create(:user) } let_it_be(:group) { create(:group) } @@ -847,34 +847,6 @@ RSpec.describe Projects::CreateService, '#execute' do create_project(user, opts) end - - context 'when feature is disabled' do - before do - stub_feature_flags(specialized_project_authorization_workers: false) - end - - it 'updates authorization for current_user' do - project = create_project(user, opts) - - expect( - Ability.allowed?(user, :read_project, project) - ).to be_truthy - end - - it 'uses AuthorizedProjectsWorker' do - expect(AuthorizedProjectsWorker).to( - receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original - ) - expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to( - receive(:perform_async) - ) - expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to( - receive(:bulk_perform_in) - ) - - create_project(user, opts) - end - end end def create_project(user, opts) diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb index c249a51fc56..7e617a3568e 100644 --- a/spec/services/projects/group_links/create_service_spec.rb +++ b/spec/services/projects/group_links/create_service_spec.rb @@ -38,7 +38,7 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do expect { subject.execute(create(:group)) }.not_to change { project.project_group_links.count } end - context 'with specialized_project_authorization_workers' do + context 'with specialized project_authorization workers' do let_it_be(:other_user) { create(:user) } before do @@ -64,25 +64,5 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do subject.execute(group) end - - context 'when feature is disabled' do - before do - stub_feature_flags(specialized_project_authorization_project_share_worker: false) - end - - it 'uses AuthorizedProjectsWorker' do - expect(AuthorizedProjectsWorker).to( - receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original - ) - expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to( - receive(:perform_async) - ) - expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to( - receive(:bulk_perform_in) - ) - - subject.execute(group) - end - end end end diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb index f7838279212..8d528011752 100644 --- a/spec/workers/bulk_imports/export_request_worker_spec.rb +++ b/spec/workers/bulk_imports/export_request_worker_spec.rb @@ -19,7 +19,7 @@ RSpec.describe BulkImports::ExportRequestWorker do it 'requests relations export' do expected = "/groups/foo%2Fbar/export_relations" - expect_next_instance_of(BulkImports::Clients::Http) do |client| + expect_next_instance_of(BulkImports::Clients::HTTP) do |client| expect(client).to receive(:post).with(expected).twice end diff --git a/spec/workers/ci/initial_pipeline_process_worker_spec.rb b/spec/workers/ci/initial_pipeline_process_worker_spec.rb index bb144a3c360..5fb8671fd5c 100644 --- a/spec/workers/ci/initial_pipeline_process_worker_spec.rb +++ b/spec/workers/ci/initial_pipeline_process_worker_spec.rb @@ -11,26 +11,12 @@ RSpec.describe Ci::InitialPipelineProcessWorker do include_examples 'an idempotent worker' do let(:job_args) { pipeline.id } - context 'when there are runners available' do - before do - create(:ci_runner, :online) - end - - it 'marks the pipeline as pending' do - expect(pipeline).to be_created - - subject - - expect(pipeline.reload).to be_pending - end - end - - it 'marks the pipeline as failed' do + it 'marks the pipeline as pending' do expect(pipeline).to be_created subject - expect(pipeline.reload).to be_failed + expect(pipeline.reload).to be_pending end end end