From 13e210a5d6f9d3577752ebfb9e3790adf4740be1 Mon Sep 17 00:00:00 2001
From: GitLab Bot
Date: Wed, 22 May 2024 15:18:20 +0000
Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master
---
.rubocop_todo/gitlab/bounded_contexts.yml | 4 +
.../style/inline_disable_annotation.yml | 1 -
.../admin/abuse_reports/components/app.vue | 9 +-
.../boards/components/boards_selector.vue | 4 +-
.../javascripts/diffs/components/app.vue | 52 +-
.../diffs/components/diffs_file_tree.vue | 26 +-
app/assets/javascripts/diffs/store/actions.js | 5 -
app/assets/javascripts/merge_request_tabs.js | 4 +-
.../javascripts/network/branch_graph.js | 3 +-
.../components/date_range_filter.vue | 13 +-
.../components/list_page/image_list.vue | 4 +-
.../components/manifests_list.vue | 4 +-
.../details/components/app.vue | 24 +-
.../list/components/packages_list.vue | 4 +-
.../shared/components/registry_list.vue | 4 +-
.../branch_rules/components/view/constants.js | 2 +
.../branch_rules/components/view/index.vue | 55 +-
.../components/view/protection.vue | 2 +-
.../components/view/rule_drawer.vue | 117 +
.../edit_branch_rule.mutation.graphql | 19 +-
.../components/states/commit_edit.vue | 13 +-
.../components/states/ready_to_merge.vue | 4 +-
.../components/list_selector/index.vue | 18 +-
.../metric_image_details_modal.vue | 181 ++
.../metric_images/metric_images_tab.vue | 100 +-
.../metric_images/metric_images_table.vue | 99 +-
.../components/registry/list_item.vue | 4 +-
.../projects/mirrors_controller.rb | 2 +-
app/models/bulk_import.rb | 21 +-
app/models/bulk_imports/batch_tracker.rb | 5 +
app/models/bulk_imports/entity.rb | 17 +
app/models/bulk_imports/tracker.rb | 17 +
app/models/ci/partition.rb | 19 +
app/models/packages/debian/file_metadatum.rb | 4 +-
.../packages/debian/group_distribution.rb | 5 +-
app/models/packages/debian/package.rb | 67 +
.../packages/debian/project_distribution.rb | 2 +-
app/models/packages/debian/publication.rb | 14 +-
app/models/packages/package.rb | 49 +-
app/services/ci/partitions/create_service.rb | 3 +-
app/services/ci/partitions/sync_service.rb | 41 +
.../extract_changes_metadata_service.rb | 2 +-
.../debian/process_package_file_service.rb | 13 +-
app/services/users/activity_service.rb | 7 +
.../bulk_imports/pipeline_batch_worker.rb | 7 +
app/workers/bulk_imports/pipeline_worker.rb | 8 +
app/workers/ci/partitioning_worker.rb | 6 +
.../use_remote_mirror_create_service.yml | 9 -
db/docs/elastic_index_settings.yml | 5 +-
db/docs/events.yml | 3 +-
db/docs/packages_packages.yml | 1 +
db/docs/push_event_payloads.yml | 1 +
..._unique_index_on_status_to_ci_partition.rb | 18 +
db/schema_migrations/20240506125412 | 1 +
db/structure.sql | 2 +
doc/api/bulk_imports.md | 34 +
doc/api/projects.md | 21 +-
.../cells/impacted_features/group-transfer.md | 4 +
.../blueprints/custom_models/index.md | 28 +-
doc/user/analytics/value_streams_dashboard.md | 2 +-
doc/user/asciidoc.md | 1 +
.../get_started/get_started_managing_code.md | 1 +
doc/user/group/custom_project_templates.md | 1 +
doc/user/group/ssh_certificates.md | 1 +
doc/user/project/changelogs.md | 1 +
doc/user/project/git_attributes.md | 1 +
doc/user/project/highlighting.md | 1 +
.../project/integrations/beyond_identity.md | 1 +
doc/user/project/integrations/git_guardian.md | 1 +
doc/user/project/protected_tags.md | 1 +
doc/user/project/repository/csv.md | 1 +
doc/user/project/repository/geojson.md | 1 +
.../repository/jupyter_notebooks/index.md | 1 +
.../repository/mirror/troubleshooting.md | 1 +
.../project/repository/signed_commits/gpg.md | 1 +
.../repository/signed_commits/index.md | 1 +
.../project/repository/signed_commits/ssh.md | 1 +
.../project/repository/signed_commits/x509.md | 1 +
doc/user/project/system_notes.md | 1 +
doc/user/tasks.md | 6 +-
lib/api/bulk_imports.rb | 25 +
lib/api/entities/project_statistics.rb | 1 +
lib/api/remote_mirrors.rb | 30 +-
lib/gitlab/allowable.rb | 6 +
.../templates/Jobs/SAST.latest.gitlab-ci.yml | 2 -
lib/gitlab/usage/metric_definition.rb | 23 +-
lib/gitlab/view/presenter/delegated.rb | 1 +
lib/tasks/gitlab/seed/group_seed.rake | 5 +-
locale/gitlab.pot | 15 +-
...cking_finished_on_deprecated_migrations.rb | 5 +-
.../projects/mirrors_controller_spec.rb | 61 -
spec/factories/bulk_import/batch_trackers.rb | 4 +
spec/factories/bulk_import/trackers.rb | 4 +
spec/factories/packages/debian/packages.rb | 86 +
spec/factories/packages/packages.rb | 74 -
spec/factories/users.rb | 4 -
spec/frontend/diffs/components/app_spec.js | 32 +-
.../diffs/components/diffs_file_tree_spec.js | 28 +-
spec/frontend/merge_request_tabs_spec.js | 17 +-
.../observability/date_range_filter_spec.js | 67 +-
.../package_list_row_spec.js.snap | 4 +-
.../components/details/package_files_spec.js | 25 +-
.../package_list_row_spec.js.snap | 4 +-
.../components/view/index_spec.js | 56 +-
.../branch_rules/components/view/mock_data.js | 40 +-
.../components/view/protection_spec.js | 2 +-
.../components/view/rule_drawer_spec.js | 60 +
.../components/states/commit_edit_spec.js | 3 +-
.../components/list_selector/index_spec.js | 11 +-
.../metric_images_table_spec.js.snap | 41 +-
.../metric_image_details_modal_spec.js | 321 +++
.../metric_images/metric_images_tab_spec.js | 106 +-
.../metric_images/metric_images_table_spec.js | 70 +-
spec/helpers/storage_helper_spec.rb | 3 +-
spec/lib/gitlab/allowable_spec.rb | 17 +
spec/models/bulk_import_spec.rb | 28 +-
.../models/bulk_imports/batch_tracker_spec.rb | 10 +
spec/models/bulk_imports/entity_spec.rb | 27 +-
spec/models/bulk_imports/tracker_spec.rb | 20 +
spec/models/ci/partition_spec.rb | 44 +-
spec/models/packages/debian/package_spec.rb | 169 ++
.../packages/debian/publication_spec.rb | 29 +-
spec/models/packages/package_spec.rb | 147 --
spec/requests/api/bulk_imports_spec.rb | 32 +
spec/requests/api/ci/runners_spec.rb | 1917 +++++++++++------
.../api/debian_group_packages_spec.rb | 2 +-
.../api/debian_project_packages_spec.rb | 2 +-
.../sentry_detailed_error_request_spec.rb | 6 +-
spec/requests/api/projects_spec.rb | 2 +-
spec/requests/api/remote_mirrors_spec.rb | 37 -
spec/requests/api/usage_data_spec.rb | 2 +-
spec/requests/api/users_spec.rb | 4 -
.../merge_requests_controller_spec.rb | 8 +-
spec/requests/projects/merge_requests_spec.rb | 6 +-
..._finished_on_deprecated_migrations_spec.rb | 6 +-
.../ci/partitions/create_service_spec.rb | 6 +-
.../ci/partitions/sync_service_spec.rb | 85 +
.../find_or_create_incoming_service_spec.rb | 2 +-
.../process_package_file_service_spec.rb | 18 +-
spec/services/users/activity_service_spec.rb | 24 +
...every_metric_definition_shared_examples.rb | 2 +-
.../debian/distribution_shared_examples.rb | 2 +-
.../pipeline_batch_worker_spec.rb | 26 +
.../bulk_imports/pipeline_worker_spec.rb | 42 +
spec/workers/ci/partitioning_worker_spec.rb | 43 +-
.../process_package_file_worker_spec.rb | 4 +-
146 files changed, 3457 insertions(+), 1786 deletions(-)
create mode 100644 app/assets/javascripts/projects/settings/branch_rules/components/view/rule_drawer.vue
create mode 100644 app/assets/javascripts/vue_shared/components/metric_images/metric_image_details_modal.vue
create mode 100644 app/models/packages/debian/package.rb
create mode 100644 app/services/ci/partitions/sync_service.rb
delete mode 100644 config/feature_flags/gitlab_com_derisk/use_remote_mirror_create_service.yml
create mode 100644 db/migrate/20240506125412_add_unique_index_on_status_to_ci_partition.rb
create mode 100644 db/schema_migrations/20240506125412
create mode 100644 spec/factories/packages/debian/packages.rb
create mode 100644 spec/frontend/projects/settings/branch_rules/components/view/rule_drawer_spec.js
create mode 100644 spec/frontend/vue_shared/components/metric_images/metric_image_details_modal_spec.js
create mode 100644 spec/models/packages/debian/package_spec.rb
create mode 100644 spec/services/ci/partitions/sync_service_spec.rb
diff --git a/.rubocop_todo/gitlab/bounded_contexts.yml b/.rubocop_todo/gitlab/bounded_contexts.yml
index d3542440a9e..97e717828c1 100644
--- a/.rubocop_todo/gitlab/bounded_contexts.yml
+++ b/.rubocop_todo/gitlab/bounded_contexts.yml
@@ -978,14 +978,18 @@ Gitlab/BoundedContexts:
- 'app/models/board_project_recent_visit.rb'
- 'app/models/bulk_import.rb'
- 'app/models/bulk_imports/batch_tracker.rb'
+ - 'app/models/bulk_imports/configuration.rb'
+ - 'app/models/bulk_imports/entity.rb'
- 'app/models/bulk_imports/export.rb'
- 'app/models/bulk_imports/export_batch.rb'
- 'app/models/bulk_imports/export_status.rb'
- 'app/models/bulk_imports/export_upload.rb'
+ - 'app/models/bulk_imports/failure.rb'
- 'app/models/bulk_imports/file_transfer.rb'
- 'app/models/bulk_imports/file_transfer/base_config.rb'
- 'app/models/bulk_imports/file_transfer/group_config.rb'
- 'app/models/bulk_imports/file_transfer/project_config.rb'
+ - 'app/models/bulk_imports/tracker.rb'
- 'app/models/chat_name.rb'
- 'app/models/chat_team.rb'
- 'app/models/ci_platform_metric.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index d76cd4adc5e..34515b80b88 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -1861,7 +1861,6 @@ Style/InlineDisableAnnotation:
- 'ee/spec/controllers/concerns/gitlab_subscriptions/seat_count_alert_spec.rb'
- 'ee/spec/controllers/concerns/routable_actions_spec.rb'
- 'ee/spec/controllers/projects/settings/merge_requests_controller_spec.rb'
- - 'ee/spec/elastic/migrate/20230503064300_backfill_project_permissions_in_blobs_using_permutations_spec.rb'
- 'ee/spec/factories/package_metadata/pm_licenses.rb'
- 'ee/spec/factories/security_scans.rb'
- 'ee/spec/features/dashboards/todos_spec.rb'
diff --git a/app/assets/javascripts/admin/abuse_reports/components/app.vue b/app/assets/javascripts/admin/abuse_reports/components/app.vue
index e1e75a4f8d0..521634f7209 100644
--- a/app/assets/javascripts/admin/abuse_reports/components/app.vue
+++ b/app/assets/javascripts/admin/abuse_reports/components/app.vue
@@ -39,12 +39,9 @@ export default {
-
+
this.jumpToFile(-1));
Mousetrap.bind(keysFor(MR_NEXT_FILE_IN_DIFF), () => this.jumpToFile(+1));
@@ -619,32 +629,36 @@ export default {
);
}
- let keydownTime;
Mousetrap.bind(['mod+f', 'mod+g'], () => {
- keydownTime = new Date().getTime();
+ this.keydownTime = new Date().getTime();
});
- window.addEventListener('blur', () => {
- if (keydownTime) {
- const delta = new Date().getTime() - keydownTime;
+ window.addEventListener('blur', this.handleBrowserFindActivation);
- // To make sure the user is using the find function we need to wait for blur
- // and max 1000ms to be sure it the search box is filtered
- if (delta >= 0 && delta < 1000) {
- this.disableVirtualScroller();
-
- api.trackRedisHllUserEvent('i_code_review_user_searches_diff');
- api.trackRedisCounterEvent('diff_searches');
- }
- }
- });
+ this.listenersAttached = true;
},
removeEventListeners() {
Mousetrap.unbind(keysFor(MR_PREVIOUS_FILE_IN_DIFF));
Mousetrap.unbind(keysFor(MR_NEXT_FILE_IN_DIFF));
Mousetrap.unbind(keysFor(MR_COMMITS_NEXT_COMMIT));
Mousetrap.unbind(keysFor(MR_COMMITS_PREVIOUS_COMMIT));
- Mousetrap.unbind(['ctrl+f', 'command+f']);
+ Mousetrap.unbind(['ctrl+f', 'command+f', 'mod+f', 'mod+g']);
+ window.removeEventListener('blur', this.handleBrowserFindActivation);
+ this.listenersAttached = false;
+ },
+ handleBrowserFindActivation() {
+ if (!this.keydownTime) return;
+
+ const delta = new Date().getTime() - this.keydownTime;
+
+ // To make sure the user is using the find function we need to wait for blur
+ // and max 1000ms to be sure it the search box is filtered
+ if (delta >= 0 && delta < 1000) {
+ this.disableVirtualScroller();
+
+ api.trackRedisHllUserEvent('i_code_review_user_searches_diff');
+ api.trackRedisCounterEvent('diff_searches');
+ }
},
jumpToFile(step) {
const targetIndex = this.currentDiffIndex + step;
@@ -709,6 +723,10 @@ export default {
this.trackEvent(types[event.name]);
}
},
+ fileTreeToggled() {
+ this.toggleTreeList();
+ this.adjustView();
+ },
},
howToMergeDocsPath: helpPagePath('user/project/merge_requests/merge_request_troubleshooting.md', {
anchor: 'check-out-merge-requests-locally-through-the-head-ref',
@@ -738,7 +756,7 @@ export default {
:data-can-create-note="getNoteableData.current_user.can_create_note"
class="files d-flex gl-mt-2"
>
-
+
-// eslint-disable-next-line no-restricted-imports
-import { mapActions, mapState } from 'vuex';
import { Mousetrap } from '~/lib/mousetrap';
import { keysFor, MR_TOGGLE_FILE_BROWSER } from '~/behaviors/shortcuts/keybindings';
import PanelResizer from '~/vue_shared/components/panel_resizer.vue';
@@ -18,7 +16,7 @@ export default {
minTreeWidth: MIN_TREE_WIDTH,
maxTreeWidth: window.innerWidth / 2,
props: {
- renderDiffFiles: {
+ visible: {
type: Boolean,
required: true,
},
@@ -32,33 +30,29 @@ export default {
};
},
computed: {
- ...mapState('diffs', ['showTreeList']),
- renderFileTree() {
- return this.renderDiffFiles && this.showTreeList;
- },
hideFileStats() {
return this.treeWidth <= TREE_HIDE_STATS_WIDTH;
},
},
- watch: {
- renderFileTree() {
- this.$emit('toggled');
- },
- },
mounted() {
- Mousetrap.bind(keysFor(MR_TOGGLE_FILE_BROWSER), this.toggleTreeList);
+ Mousetrap.bind(keysFor(MR_TOGGLE_FILE_BROWSER), this.toggle);
},
beforeDestroy() {
- Mousetrap.unbind(keysFor(MR_TOGGLE_FILE_BROWSER), this.toggleTreeList);
+ Mousetrap.unbind(keysFor(MR_TOGGLE_FILE_BROWSER), this.toggle);
},
methods: {
- ...mapActions('diffs', ['cacheTreeListWidth', 'toggleTreeList']),
+ toggle() {
+ this.$emit('toggled');
+ },
+ cacheTreeListWidth(size) {
+ localStorage.setItem(TREE_LIST_WIDTH_STORAGE_KEY, size);
+ },
},
};
-
+
{
commit(types.TOGGLE_FILE_FINDER_VISIBLE, visible);
};
-export const cacheTreeListWidth = (_, size) => {
- localStorage.setItem(TREE_LIST_WIDTH_STORAGE_KEY, size);
-};
-
export const receiveFullDiffError = ({ commit }, filePath) => {
commit(types.RECEIVE_FULL_DIFF_ERROR, filePath);
createAlert({
diff --git a/app/assets/javascripts/merge_request_tabs.js b/app/assets/javascripts/merge_request_tabs.js
index c51bef92e18..48e4189810e 100644
--- a/app/assets/javascripts/merge_request_tabs.js
+++ b/app/assets/javascripts/merge_request_tabs.js
@@ -5,7 +5,7 @@ import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { createAlert } from '~/alert';
import { getCookie, isMetaClick, parseBoolean, scrollToElement } from '~/lib/utils/common_utils';
-import { parseUrlPathname } from '~/lib/utils/url_utility';
+import { parseUrlPathname, visitUrl } from '~/lib/utils/url_utility';
import createEventHub from '~/helpers/event_hub_factory';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import BlobForkSuggestion from './blob/blob_fork_suggestion';
@@ -282,7 +282,7 @@ export default class MergeRequestTabs {
if (isMetaClick(e)) {
const targetLink = e.currentTarget.getAttribute('href');
- window.open(targetLink, '_blank');
+ visitUrl(targetLink, true);
} else if (action) {
const href = e.currentTarget.getAttribute('href');
this.tabShown(action, href);
diff --git a/app/assets/javascripts/network/branch_graph.js b/app/assets/javascripts/network/branch_graph.js
index 5ae68d22667..8adac3b6505 100644
--- a/app/assets/javascripts/network/branch_graph.js
+++ b/app/assets/javascripts/network/branch_graph.js
@@ -2,6 +2,7 @@
import $ from 'jquery';
import axios from '~/lib/utils/axios_utils';
+import { visitUrl } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
import Raphael from './raphael';
@@ -238,7 +239,7 @@ export default class BranchGraph {
opacity: 0,
cursor: 'pointer',
})
- .click(() => window.open(options.commit_url.replace('%s', commit.id), '_blank'))
+ .click(() => visitUrl(options.commit_url.replace('%s', commit.id), true))
.hover(
function () {
this.tooltip = r.commitTooltip(x + 5, y, commit);
diff --git a/app/assets/javascripts/observability/components/date_range_filter.vue b/app/assets/javascripts/observability/components/date_range_filter.vue
index dc577710077..f7aac8472d7 100644
--- a/app/assets/javascripts/observability/components/date_range_filter.vue
+++ b/app/assets/javascripts/observability/components/date_range_filter.vue
@@ -16,6 +16,16 @@ export default {
required: false,
default: null,
},
+ maxDateRange: {
+ type: Number,
+ required: false,
+ default: null,
+ },
+ dateOptions: {
+ type: Array,
+ required: false,
+ default: () => TIME_RANGE_OPTIONS,
+ },
},
data() {
return {
@@ -28,7 +38,7 @@ export default {
},
computed: {
dateRangeOptions() {
- return TIME_RANGE_OPTIONS.map((option) => {
+ return this.dateOptions.map((option) => {
const dateRange = periodToDate(option.value);
return {
value: option.value,
@@ -95,6 +105,7 @@ export default {
:default-start-date="dateRange.startDate"
:default-end-date="dateRange.endDate"
:default-max-date="defaultMaxDate"
+ :max-date-range="maxDateRange"
@input="onCustomRangeSelected"
/>
diff --git a/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/list_page/image_list.vue b/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/list_page/image_list.vue
index ffba64f58f8..63216c009e6 100644
--- a/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/list_page/image_list.vue
+++ b/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/list_page/image_list.vue
@@ -26,7 +26,7 @@ export default {
-
+
diff --git a/app/assets/javascripts/packages_and_registries/dependency_proxy/components/manifests_list.vue b/app/assets/javascripts/packages_and_registries/dependency_proxy/components/manifests_list.vue
index 462de03d19f..bbababcb77e 100644
--- a/app/assets/javascripts/packages_and_registries/dependency_proxy/components/manifests_list.vue
+++ b/app/assets/javascripts/packages_and_registries/dependency_proxy/components/manifests_list.vue
@@ -53,14 +53,14 @@ export default {
-
+
-
+
diff --git a/app/assets/javascripts/packages_and_registries/infrastructure_registry/list/components/packages_list.vue b/app/assets/javascripts/packages_and_registries/infrastructure_registry/list/components/packages_list.vue
index 6139db9f3bd..6f644d7a8c3 100644
--- a/app/assets/javascripts/packages_and_registries/infrastructure_registry/list/components/packages_list.vue
+++ b/app/assets/javascripts/packages_and_registries/infrastructure_registry/list/components/packages_list.vue
@@ -75,7 +75,7 @@ export default {
-
+
-
+
{
+ const isRedirectNeeded = !branchProtection;
+ if (isRedirectNeeded) {
+ visitUrl(setUrlParams({ branch: name }));
+ } else {
+ this.closeAllowedToMergeDrawer();
+ this.$toast.show(toastMessage);
+ }
+ })
.catch(() => {
createAlert({ message: this.$options.i18n.updateBranchRuleError });
+ })
+ .finally(() => {
+ this.isRuleUpdating = false;
});
},
},
@@ -260,7 +285,7 @@ export default {
{{ $options.i18n.edit }}
@@ -283,7 +308,6 @@ export default {
-
+
+
diff --git a/app/assets/javascripts/projects/settings/branch_rules/components/view/protection.vue b/app/assets/javascripts/projects/settings/branch_rules/components/view/protection.vue
index ddcace4e950..f9b1fffb63f 100644
--- a/app/assets/javascripts/projects/settings/branch_rules/components/view/protection.vue
+++ b/app/assets/javascripts/projects/settings/branch_rules/components/view/protection.vue
@@ -100,7 +100,7 @@ export default {
{{ __('Edit') }}
diff --git a/app/assets/javascripts/projects/settings/branch_rules/components/view/rule_drawer.vue b/app/assets/javascripts/projects/settings/branch_rules/components/view/rule_drawer.vue
new file mode 100644
index 00000000000..4812e6f40aa
--- /dev/null
+++ b/app/assets/javascripts/projects/settings/branch_rules/components/view/rule_drawer.vue
@@ -0,0 +1,117 @@
+
+
+
+
+
+ {{ title }}
+
+
+
+
+ {{ __('Save changes') }}
+
+
+ {{ __('Cancel') }}
+
+
+
+
+
+
+
+
+
diff --git a/app/assets/javascripts/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql b/app/assets/javascripts/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql
index 8ea192a7d52..c24b4b87536 100644
--- a/app/assets/javascripts/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql
+++ b/app/assets/javascripts/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql
@@ -1,9 +1,24 @@
-mutation editBrachRule($id: ProjectsBranchRuleID!, $name: String!) {
- branchRuleUpdate(input: { id: $id, name: $name }) {
+mutation editBrachRule($input: BranchRuleUpdateInput!) {
+ branchRuleUpdate(input: $input) {
errors
branchRule {
id
name
+ branchProtection {
+ allowForcePush
+ pushAccessLevels {
+ nodes {
+ accessLevel
+ accessLevelDescription
+ }
+ }
+ mergeAccessLevels {
+ nodes {
+ accessLevel
+ accessLevelDescription
+ }
+ }
+ }
}
}
}
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/commit_edit.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/commit_edit.vue
index 65a34423c27..92fac6d3baa 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/commit_edit.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/commit_edit.vue
@@ -5,6 +5,10 @@ export default {
components: {
GlFormTextarea,
},
+ model: {
+ prop: 'value',
+ event: 'input',
+ },
props: {
value: {
type: String,
@@ -19,11 +23,6 @@ export default {
required: true,
},
},
- data() {
- return {
- messageText: this.value,
- };
- },
};
@@ -40,13 +39,13 @@ export default {
$emit('input', val)"
/>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
index b6924cc9e34..7a87245887d 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
@@ -605,7 +605,7 @@ export default {
({}),
+ },
+ isProjectOnlyNamespace: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
},
data() {
return {
@@ -59,6 +69,9 @@ export default {
config() {
return CONFIG[this.type];
},
+ showNamespaceDropdown() {
+ return this.config.showNamespaceDropdown && !this.isProjectOnlyNamespace;
+ },
namespaceDropdownText() {
return parseBoolean(this.isProjectNamespace)
? this.$options.i18n.projectGroups
@@ -105,7 +118,7 @@ export default {
}
},
async fetchUsersBySearchTerm(search) {
- const users = await Api.projectUsers(this.projectPath, search);
+ const users = await Api.projectUsers(this.projectPath, search, this.usersQueryOptions);
return users?.map((user) => ({
text: user.name,
@@ -228,10 +241,11 @@ export default {
diff --git a/app/assets/javascripts/vue_shared/components/metric_images/metric_image_details_modal.vue b/app/assets/javascripts/vue_shared/components/metric_images/metric_image_details_modal.vue
new file mode 100644
index 00000000000..608f1731244
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/metric_images/metric_image_details_modal.vue
@@ -0,0 +1,181 @@
+
+
+
+
+
+ {{ $options.i18n.description }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ $options.i18n.cancel }}
+
+
+
+ {{ edit ? $options.i18n.update : $options.i18n.upload }}
+
+
+
+
diff --git a/app/assets/javascripts/vue_shared/components/metric_images/metric_images_tab.vue b/app/assets/javascripts/vue_shared/components/metric_images/metric_images_tab.vue
index f5399924fdb..b752eef7582 100644
--- a/app/assets/javascripts/vue_shared/components/metric_images/metric_images_tab.vue
+++ b/app/assets/javascripts/vue_shared/components/metric_images/metric_images_tab.vue
@@ -1,99 +1,48 @@
@@ -103,40 +52,11 @@ export default {
-
- {{ $options.i18n.modalDescription }}
-
-
-
-
-
-
-
-
+ />
import {
GlButton,
- GlFormGroup,
- GlFormInput,
GlCard,
GlIcon,
GlLink,
@@ -13,6 +11,7 @@ import {
// eslint-disable-next-line no-restricted-imports
import { mapActions } from 'vuex';
import { __, s__ } from '~/locale';
+import MetricImageDetailsModal from './metric_image_details_modal.vue';
export default {
i18n: {
@@ -20,20 +19,18 @@ export default {
modalDescription: s__('Incident|Are you sure you wish to delete this image?'),
modalCancel: __('Cancel'),
modalTitle: s__('Incident|Deleting %{filename}'),
- editModalUpdate: __('Update'),
- editModalTitle: s__('Incident|Editing %{filename}'),
editIconTitle: s__('Incident|Edit image text or link'),
deleteIconTitle: s__('Incident|Delete image'),
+ editButtonLabel: __('Edit'),
},
components: {
GlButton,
- GlFormGroup,
- GlFormInput,
GlCard,
GlIcon,
GlLink,
GlModal,
GlSprintf,
+ MetricImageDetailsModal,
},
directives: {
GlTooltip: GlTooltipDirective,
@@ -67,11 +64,8 @@ export default {
return {
isCollapsed: false,
isDeleting: false,
- isUpdating: false,
modalVisible: false,
editModalVisible: false,
- modalUrl: this.url,
- modalUrlText: this.urlText,
};
},
computed: {
@@ -86,17 +80,6 @@ export default {
},
};
},
- updateActionPrimaryProps() {
- return {
- text: this.$options.i18n.editModalUpdate,
- attributes: {
- loading: this.isUpdating,
- disabled: this.isUpdating,
- category: 'primary',
- variant: 'confirm',
- },
- };
- },
arrowIconName() {
return this.isCollapsed ? 'chevron-right' : 'chevron-down';
},
@@ -110,16 +93,10 @@ export default {
},
},
methods: {
- ...mapActions(['deleteImage', 'updateImage']),
+ ...mapActions(['deleteImage']),
toggleCollapsed() {
this.isCollapsed = !this.isCollapsed;
},
- resetEditFields() {
- this.modalUrl = this.url;
- this.modalUrlText = this.urlText;
- this.editModalVisible = false;
- this.modalVisible = false;
- },
async onDelete() {
try {
this.isDeleting = true;
@@ -129,21 +106,6 @@ export default {
this.modalVisible = false;
}
},
- async onUpdate() {
- try {
- this.isUpdating = true;
- await this.updateImage({
- imageId: this.id,
- url: this.modalUrl,
- urlText: this.modalUrlText,
- });
- } finally {
- this.isUpdating = false;
- this.modalUrl = '';
- this.modalUrlText = '';
- this.editModalVisible = false;
- }
- },
},
};
@@ -164,7 +126,7 @@ export default {
text: $options.i18n.modalCancel,
} /* eslint-enable @gitlab/vue-no-new-non-primitive-in-template */"
@primary.prevent="onDelete"
- @hidden="resetEditFields"
+ @hidden="modalVisible = false"
>
@@ -176,46 +138,15 @@ export default {
{{ $options.i18n.modalDescription }}
-
-
-
-
- {{ filename }}
-
-
-
-
-
-
-
-
-
-
-
-
+ @hidden="editModalVisible = false"
+ />
@@ -242,7 +173,7 @@ export default {
v-if="canUpdate"
v-gl-tooltip.bottom
icon="pencil"
- :aria-label="__('Edit')"
+ :aria-label="$options.i18n.editButtonLabel"
:title="$options.i18n.editIconTitle"
data-testid="edit-button"
@click="editModalVisible = true"
@@ -251,7 +182,7 @@ export default {
v-if="canUpdate"
v-gl-tooltip.bottom
icon="remove"
- :aria-label="__('Delete')"
+ :aria-label="$options.i18n.modalDelete"
:title="$options.i18n.deleteIconTitle"
data-testid="delete-button"
@click="modalVisible = true"
diff --git a/app/assets/javascripts/vue_shared/components/registry/list_item.vue b/app/assets/javascripts/vue_shared/components/registry/list_item.vue
index bb166ce6f22..bd1269894ea 100644
--- a/app/assets/javascripts/vue_shared/components/registry/list_item.vue
+++ b/app/assets/javascripts/vue_shared/components/registry/list_item.vue
@@ -58,7 +58,7 @@ export default {
-
@@ -159,5 +159,5 @@ export default {
-
+
diff --git a/app/controllers/projects/mirrors_controller.rb b/app/controllers/projects/mirrors_controller.rb
index f9d8c2aac49..6bc604d4493 100644
--- a/app/controllers/projects/mirrors_controller.rb
+++ b/app/controllers/projects/mirrors_controller.rb
@@ -17,7 +17,7 @@ class Projects::MirrorsController < Projects::ApplicationController
end
def update
- if push_mirror_create? && Feature.enabled?(:use_remote_mirror_create_service, project)
+ if push_mirror_create?
service = ::RemoteMirrors::CreateService.new(project, current_user, push_mirror_attributes)
result = service.execute
diff --git a/app/models/bulk_import.rb b/app/models/bulk_import.rb
index 3381ff881f1..3e3cf74831f 100644
--- a/app/models/bulk_import.rb
+++ b/app/models/bulk_import.rb
@@ -4,6 +4,8 @@
# projects to a GitLab instance. It associates the import with the responsible
# user.
class BulkImport < ApplicationRecord
+ include AfterCommitQueue
+
MIN_MAJOR_VERSION = 14
MIN_MINOR_VERSION_FOR_PROJECT = 4
@@ -26,6 +28,7 @@ class BulkImport < ApplicationRecord
state :finished, value: 2
state :timeout, value: 3
state :failed, value: -1
+ state :canceled, value: -2
event :start do
transition created: :started
@@ -44,11 +47,21 @@ class BulkImport < ApplicationRecord
transition any => :failed
end
+ event :cancel do
+ transition any => :canceled
+ end
+
# rubocop:disable Style/SymbolProc
after_transition any => [:finished, :failed, :timeout] do |bulk_import|
bulk_import.update_has_failures
end
# rubocop:enable Style/SymbolProc
+
+ after_transition any => [:canceled] do |bulk_import|
+ bulk_import.run_after_commit do
+ bulk_import.propagate_cancel
+ end
+ end
end
def source_version_info
@@ -74,11 +87,17 @@ class BulkImport < ApplicationRecord
update!(has_failures: true)
end
+ def propagate_cancel
+ return unless entities.any?
+
+ entities.each(&:cancel)
+ end
+
def supports_batched_export?
source_version_info >= self.class.min_gl_version_for_migration_in_batches
end
def completed?
- finished? || failed? || timeout?
+ finished? || failed? || timeout? || canceled?
end
end
diff --git a/app/models/bulk_imports/batch_tracker.rb b/app/models/bulk_imports/batch_tracker.rb
index 09f220b96b0..0035237623c 100644
--- a/app/models/bulk_imports/batch_tracker.rb
+++ b/app/models/bulk_imports/batch_tracker.rb
@@ -27,6 +27,7 @@ module BulkImports
state :timeout, value: 3
state :failed, value: -1
state :skipped, value: -2
+ state :canceled, value: -3
event :start do
transition created: :started
@@ -53,6 +54,10 @@ module BulkImports
event :cleanup_stale do
transition [:created, :started] => :timeout
end
+
+ event :cancel do
+ transition any => :canceled
+ end
end
end
end
diff --git a/app/models/bulk_imports/entity.rb b/app/models/bulk_imports/entity.rb
index 74b075c64d4..0c5b3ec0c66 100644
--- a/app/models/bulk_imports/entity.rb
+++ b/app/models/bulk_imports/entity.rb
@@ -18,6 +18,8 @@
# The tree structure of the entities results in the same structure for imported
# Groups and Projects.
class BulkImports::Entity < ApplicationRecord
+ include AfterCommitQueue
+
self.table_name = 'bulk_import_entities'
FailedError = Class.new(StandardError)
@@ -71,6 +73,7 @@ class BulkImports::Entity < ApplicationRecord
state :finished, value: 2
state :timeout, value: 3
state :failed, value: -1
+ state :canceled, value: -2
event :start do
transition created: :started
@@ -90,11 +93,21 @@ class BulkImports::Entity < ApplicationRecord
transition started: :timeout
end
+ event :cancel do
+ transition any => :canceled
+ end
+
# rubocop:disable Style/SymbolProc
after_transition any => [:finished, :failed, :timeout] do |entity|
entity.update_has_failures
end
# rubocop:enable Style/SymbolProc
+
+ after_transition any => [:canceled] do |entity|
+ entity.run_after_commit do
+ entity.propagate_cancel
+ end
+ end
end
def self.all_human_statuses
@@ -221,6 +234,10 @@ class BulkImports::Entity < ApplicationRecord
end
end
+ def propagate_cancel
+ trackers.each(&:cancel)
+ end
+
private
def validate_parent_is_a_group
diff --git a/app/models/bulk_imports/tracker.rb b/app/models/bulk_imports/tracker.rb
index faf45c66616..5890aca591f 100644
--- a/app/models/bulk_imports/tracker.rb
+++ b/app/models/bulk_imports/tracker.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class BulkImports::Tracker < ApplicationRecord
+ include AfterCommitQueue
+
self.table_name = 'bulk_import_trackers'
alias_attribute :pipeline_name, :relation
@@ -52,6 +54,7 @@ class BulkImports::Tracker < ApplicationRecord
state :timeout, value: 4
state :failed, value: -1
state :skipped, value: -2
+ state :canceled, value: -3
event :start do
transition enqueued: :started
@@ -83,6 +86,10 @@ class BulkImports::Tracker < ApplicationRecord
transition any => :failed
end
+ event :cancel do
+ transition any => :canceled
+ end
+
event :cleanup_stale do
transition [:created, :started] => :timeout
end
@@ -90,6 +97,12 @@ class BulkImports::Tracker < ApplicationRecord
after_transition any => [:finished, :failed] do |tracker|
BulkImports::ObjectCounter.persist!(tracker)
end
+
+ after_transition any => [:canceled] do |tracker|
+ tracker.run_after_commit do
+ tracker.propagate_cancel
+ end
+ end
end
def checksums
@@ -111,6 +124,10 @@ class BulkImports::Tracker < ApplicationRecord
pipeline_class.relation.to_sym
end
+ def propagate_cancel
+ batches.each(&:cancel)
+ end
+
private
def cached_checksums
diff --git a/app/models/ci/partition.rb b/app/models/ci/partition.rb
index 4396532c539..3fa079688ca 100644
--- a/app/models/ci/partition.rb
+++ b/app/models/ci/partition.rb
@@ -2,7 +2,10 @@
module Ci
class Partition < Ci::ApplicationRecord
+ MAX_PARTITION_SIZE = 100.gigabytes
+
validates :id, :status, presence: true
+ validates :status, uniqueness: { if: ->(partition) { partition.status_changed? && partition.current? } }
state_machine :status, initial: :preparing do
state :preparing, value: 0
@@ -13,6 +16,14 @@ module Ci
event :ready do
transition preparing: :ready
end
+
+ event :switch_writes do
+ transition ready: :current
+ end
+
+ before_transition [:ready] => :current do
+ Ci::Partition.with_status(:current).update_all(status: Ci::Partition.statuses[:active])
+ end
end
scope :id_after, ->(partition_id) { where(arel_table[:id].gt(partition_id)) }
@@ -29,6 +40,14 @@ module Ci
def create_next!
create!(id: last.id.next, status: statuses[:preparing])
end
+
+ def next_available(partition_id)
+ Ci::Partition
+ .with_status(:ready)
+ .id_after(partition_id)
+ .order(id: :asc)
+ .first
+ end
end
def above_threshold?(threshold)
diff --git a/app/models/packages/debian/file_metadatum.rb b/app/models/packages/debian/file_metadatum.rb
index 325ae0c468e..ca6dd899a72 100644
--- a/app/models/packages/debian/file_metadatum.rb
+++ b/app/models/packages/debian/file_metadatum.rb
@@ -18,10 +18,10 @@ module Packages
validates :file_type, presence: true
validates :file_type, inclusion: { in: %w[unknown] },
- if: -> { package_file&.package&.debian_incoming? || package_file&.package&.processing? }
+ if: -> { package_file&.package&.incoming? || package_file&.package&.processing? }
validates :file_type,
inclusion: { in: %w[source dsc deb udeb buildinfo changes ddeb] },
- if: -> { package_file&.package&.debian_package? && !package_file&.package&.processing? }
+ if: -> { !package_file&.package&.incoming? && !package_file&.package&.processing? }
validates :component,
presence: true,
diff --git a/app/models/packages/debian/group_distribution.rb b/app/models/packages/debian/group_distribution.rb
index dba38c1b538..cb5b90d6343 100644
--- a/app/models/packages/debian/group_distribution.rb
+++ b/app/models/packages/debian/group_distribution.rb
@@ -8,9 +8,8 @@ class Packages::Debian::GroupDistribution < ApplicationRecord
include Packages::Debian::Distribution
def packages
- Packages::Package
+ ::Packages::Debian::Package
.for_projects(group.all_projects.public_only)
- .debian
- .with_debian_codename(codename)
+ .with_codename(codename)
end
end
diff --git a/app/models/packages/debian/package.rb b/app/models/packages/debian/package.rb
new file mode 100644
index 00000000000..ab36842ad6c
--- /dev/null
+++ b/app/models/packages/debian/package.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ class Package < Packages::Package
+ INSTALLABLE_STATUSES = [:default, :hidden].freeze
+
+ self.allow_legacy_sti_class = true
+
+ has_one :publication, inverse_of: :package, class_name: 'Packages::Debian::Publication'
+ has_one :distribution, through: :publication, source: :distribution, inverse_of: :packages,
+ class_name: 'Packages::Debian::ProjectDistribution'
+
+ accepts_nested_attributes_for :publication
+
+ delegate :codename, :suite, to: :distribution, prefix: :distribution
+
+ validates :name, format: { with: Gitlab::Regex.debian_package_name_regex }, if: :version?
+ validates :name, inclusion: { in: [Packages::Debian::INCOMING_PACKAGE_NAME] }, unless: :version?
+
+ validates :version,
+ presence: true,
+ format: { with: Gitlab::Regex.debian_version_regex },
+ if: :version?
+ validate :forbidden_changes
+
+ scope :with_codename, ->(codename) do
+ joins(:distribution).where(Packages::Debian::ProjectDistribution.table_name => { codename: codename })
+ end
+
+ scope :with_codename_or_suite, ->(codename_or_suite) do
+ joins(:distribution)
+ .where(Packages::Debian::ProjectDistribution.table_name => { codename: codename_or_suite })
+ .or(where(Packages::Debian::ProjectDistribution.table_name => { suite: codename_or_suite }))
+ end
+
+ scope :preload_debian_file_metadata, -> { preload(package_files: :debian_file_metadatum) }
+
+ def self.incoming_package!
+ default
+ .with_version(nil)
+ .find_by!(name: Packages::Debian::INCOMING_PACKAGE_NAME)
+ end
+
+ def self.existing_packages_with(name:, version:)
+ with_name(name)
+ .with_version(version)
+ .not_pending_destruction
+ end
+
+ def incoming?
+ name == Packages::Debian::INCOMING_PACKAGE_NAME && version.nil?
+ end
+
+ private
+
+ def forbidden_changes
+ return unless persisted?
+
+ # Debian incoming
+ return unless version_was.nil? || version.nil?
+
+ errors.add(:version, _('cannot be changed')) if version_changed?
+ end
+ end
+ end
+end
diff --git a/app/models/packages/debian/project_distribution.rb b/app/models/packages/debian/project_distribution.rb
index 73777e3b9d8..14c2e6917ca 100644
--- a/app/models/packages/debian/project_distribution.rb
+++ b/app/models/packages/debian/project_distribution.rb
@@ -8,5 +8,5 @@ class Packages::Debian::ProjectDistribution < ApplicationRecord
include Packages::Debian::Distribution
has_many :publications, class_name: 'Packages::Debian::Publication', inverse_of: :distribution, foreign_key: :distribution_id
- has_many :packages, class_name: 'Packages::Package', through: :publications
+ has_many :packages, class_name: 'Packages::Debian::Package', through: :publications
end
diff --git a/app/models/packages/debian/publication.rb b/app/models/packages/debian/publication.rb
index 93f5aa11d81..ff676e8c69a 100644
--- a/app/models/packages/debian/publication.rb
+++ b/app/models/packages/debian/publication.rb
@@ -2,23 +2,15 @@
class Packages::Debian::Publication < ApplicationRecord
belongs_to :package,
- -> { where(package_type: :debian).where.not(version: nil) },
- inverse_of: :debian_publication,
- class_name: 'Packages::Package'
+ -> { where.not(version: nil) },
+ inverse_of: :publication,
+ class_name: 'Packages::Debian::Package'
belongs_to :distribution,
inverse_of: :publications,
class_name: 'Packages::Debian::ProjectDistribution',
foreign_key: :distribution_id
validates :package, presence: true
- validate :valid_debian_package_type
validates :distribution, presence: true
-
- private
-
- def valid_debian_package_type
- return errors.add(:package, _('type must be Debian')) unless package&.debian?
- return errors.add(:package, _('must be a Debian package')) unless package.debian_package?
- end
end
diff --git a/app/models/packages/package.rb b/app/models/packages/package.rb
index f6f352c685a..ba9dd0fe817 100644
--- a/app/models/packages/package.rb
+++ b/app/models/packages/package.rb
@@ -54,14 +54,10 @@ class Packages::Package < ApplicationRecord
has_one :terraform_module_metadatum, inverse_of: :package, class_name: 'Packages::TerraformModule::Metadatum'
has_many :build_infos, inverse_of: :package
has_many :pipelines, through: :build_infos, disable_joins: true
- has_one :debian_publication, inverse_of: :package, class_name: 'Packages::Debian::Publication'
- has_one :debian_distribution, through: :debian_publication, source: :distribution, inverse_of: :packages, class_name: 'Packages::Debian::ProjectDistribution'
has_many :matching_package_protection_rules, -> (package) { where(package_type: package.package_type).for_package_name(package.name) }, through: :project, source: :package_protection_rules
- accepts_nested_attributes_for :debian_publication
accepts_nested_attributes_for :maven_metadatum
- delegate :codename, :suite, to: :debian_distribution, prefix: :debian_distribution
delegate :target_sha, to: :composer_metadatum, prefix: :composer
validates :project, presence: true
@@ -84,8 +80,6 @@ class Packages::Package < ApplicationRecord
validates :name, format: { with: Gitlab::Regex.npm_package_name_regex, message: Gitlab::Regex.npm_package_name_regex_message }, if: :npm?
validates :name, format: { with: Gitlab::Regex.nuget_package_name_regex }, if: :nuget?
validates :name, format: { with: Gitlab::Regex.terraform_module_package_name_regex }, if: :terraform_module?
- validates :name, format: { with: Gitlab::Regex.debian_package_name_regex }, if: :debian_package?
- validates :name, inclusion: { in: [Packages::Debian::INCOMING_PACKAGE_NAME] }, if: :debian_incoming?
validates :version, format: { with: Gitlab::Regex.nuget_version_regex }, if: :nuget?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
validates :version, format: { with: Gitlab::Regex.pypi_version_regex }, if: :pypi?
@@ -97,11 +91,6 @@ class Packages::Package < ApplicationRecord
presence: true,
format: { with: Gitlab::Regex.generic_package_version_regex },
if: :generic?
- validates :version,
- presence: true,
- format: { with: Gitlab::Regex.debian_version_regex },
- if: :debian_package?
- validate :forbidden_debian_changes, if: :debian?
scope :for_projects, ->(project_ids) { where(project_id: project_ids) }
scope :with_name, ->(name) { where(name: name) }
@@ -147,14 +136,6 @@ class Packages::Package < ApplicationRecord
scope :including_dependency_links, -> { includes(dependency_links: :dependency) }
scope :including_dependency_links_with_nuget_metadatum, -> { includes(dependency_links: [:dependency, :nuget_metadatum]) }
- scope :with_debian_codename, ->(codename) do
- joins(:debian_distribution).where(Packages::Debian::ProjectDistribution.table_name => { codename: codename })
- end
- scope :with_debian_codename_or_suite, ->(codename_or_suite) do
- joins(:debian_distribution).where(Packages::Debian::ProjectDistribution.table_name => { codename: codename_or_suite })
- .or(where(Packages::Debian::ProjectDistribution.table_name => { suite: codename_or_suite }))
- end
- scope :preload_debian_file_metadata, -> { preload(package_files: :debian_file_metadatum) }
scope :with_composer_target, -> (target) do
includes(:composer_metadatum)
.joins(:composer_metadatum)
@@ -219,7 +200,8 @@ class Packages::Package < ApplicationRecord
golang: 'Packages::Go::Package',
rubygems: 'Packages::Rubygems::Package',
conan: 'Packages::Conan::Package',
- rpm: 'Packages::Rpm::Package'
+ rpm: 'Packages::Rpm::Package',
+ debian: 'Packages::Debian::Package'
}.freeze
def self.only_maven_packages_with_path(path, use_cte: false)
@@ -252,16 +234,6 @@ class Packages::Package < ApplicationRecord
find_by!(name: name, version: version)
end
- def self.debian_incoming_package!
- find_by!(name: Packages::Debian::INCOMING_PACKAGE_NAME, version: nil, package_type: :debian, status: :default)
- end
-
- def self.existing_debian_packages_with(name:, version:)
- debian.with_name(name)
- .with_version(version)
- .not_pending_destruction
- end
-
def self.pluck_names
pluck(:name)
end
@@ -319,14 +291,6 @@ class Packages::Package < ApplicationRecord
terraform_module?
end
- def debian_incoming?
- debian? && version.nil?
- end
-
- def debian_package?
- debian? && !version.nil?
- end
-
def package_settings
project.namespace.package_settings
end
@@ -417,13 +381,4 @@ class Packages::Package < ApplicationRecord
project.root_namespace.path == ::Packages::Npm.scope_of(name)
end
-
- def forbidden_debian_changes
- return unless persisted?
-
- # Debian incoming
- if version_was.nil? || version.nil?
- errors.add(:version, _('cannot be changed')) if version_changed?
- end
- end
end
diff --git a/app/services/ci/partitions/create_service.rb b/app/services/ci/partitions/create_service.rb
index 4628813fa50..730832775d7 100644
--- a/app/services/ci/partitions/create_service.rb
+++ b/app/services/ci/partitions/create_service.rb
@@ -3,7 +3,6 @@
module Ci
module Partitions
class CreateService
- MAX_PARTITION_SIZE = 100.gigabytes
HEADROOM_PARTITIONS = 3
def initialize(partition)
@@ -26,7 +25,7 @@ module Ci
end
def above_threshold?
- partition.above_threshold?(MAX_PARTITION_SIZE)
+ partition.above_threshold?(Ci::Partition::MAX_PARTITION_SIZE)
end
def headroom_available?
diff --git a/app/services/ci/partitions/sync_service.rb b/app/services/ci/partitions/sync_service.rb
new file mode 100644
index 00000000000..a73d96cc5df
--- /dev/null
+++ b/app/services/ci/partitions/sync_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Ci
+ module Partitions
+ class SyncService
+ def initialize(partition)
+ @partition = partition
+ end
+
+ def execute
+ return unless Feature.enabled?(:ci_partitioning_automation, :instance)
+ return unless partition
+
+ sync_available_partitions_statuses!
+
+ next_ci_partition = next_available_partition
+ return unless next_ci_partition.present? && above_threshold?
+
+ next_ci_partition.switch_writes!
+ end
+
+ private
+
+ attr_reader :partition
+
+ def above_threshold?
+ partition.above_threshold?(Ci::Partition::MAX_PARTITION_SIZE)
+ end
+
+ def sync_available_partitions_statuses!
+ Ci::Partition.id_after(partition.id).each do |partition|
+ partition.ready! if partition.all_partitions_exist?
+ end
+ end
+
+ def next_available_partition
+ Ci::Partition.next_available(partition.id)
+ end
+ end
+ end
+end
diff --git a/app/services/packages/debian/extract_changes_metadata_service.rb b/app/services/packages/debian/extract_changes_metadata_service.rb
index 5f06f46de58..a8e4784a02e 100644
--- a/app/services/packages/debian/extract_changes_metadata_service.rb
+++ b/app/services/packages/debian/extract_changes_metadata_service.rb
@@ -107,7 +107,7 @@ module Packages
end
def incoming
- @package_file.package.project.packages.debian_incoming_package!
+ ::Packages::Debian::Package.for_projects(@package_file.package.project).incoming_package!
end
strong_memoize_attr(:incoming)
end
diff --git a/app/services/packages/debian/process_package_file_service.rb b/app/services/packages/debian/process_package_file_service.rb
index 684192f6006..a1bcc0a6f73 100644
--- a/app/services/packages/debian/process_package_file_service.rb
+++ b/app/services/packages/debian/process_package_file_service.rb
@@ -32,7 +32,7 @@ module Packages
cleanup_temp_package
end
- ::Packages::Debian::GenerateDistributionWorker.perform_async(:project, package.debian_distribution.id)
+ ::Packages::Debian::GenerateDistributionWorker.perform_async(:project, package.distribution.id)
end
end
@@ -90,10 +90,9 @@ module Packages
end
def package
- packages = temp_package.project
- .packages
- .existing_debian_packages_with(name: package_name, version: package_version)
- package = packages.with_debian_codename_or_suite(package_distribution)
+ packages = ::Packages::Debian::Package.for_projects(temp_package.project)
+ .existing_packages_with(name: package_name, version: package_version)
+ package = packages.with_codename_or_suite(package_distribution)
.first
unless package
@@ -101,7 +100,7 @@ module Packages
if package_in_other_distribution
raise ArgumentError, "Debian package #{package_name} #{package_version} exists " \
- "in distribution #{package_in_other_distribution.debian_distribution.codename}"
+ "in distribution #{package_in_other_distribution.distribution.codename}"
end
end
@@ -153,7 +152,7 @@ module Packages
return unless using_temporary_package?
package.update!(
- debian_publication_attributes: { distribution_id: distribution.id }
+ publication_attributes: { distribution_id: distribution.id }
)
end
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index 49f38490b2a..87597d2a59a 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -2,6 +2,8 @@
module Users
class ActivityService
+ LEASE_TIMEOUT = 1.minute.to_i
+
def initialize(author:, namespace: nil, project: nil)
@user = if author.respond_to?(:username)
author
@@ -30,6 +32,11 @@ module Users
today = Date.today
return if user.last_activity_on == today
+ lease = Gitlab::ExclusiveLease.new("activity_service:#{user.id}", timeout: LEASE_TIMEOUT)
+ # Skip transaction checks for exclusive lease as it is breaking system specs.
+ # See issue: https://gitlab.com/gitlab-org/gitlab/-/issues/441536
+ return unless Gitlab::ExclusiveLease.skipping_transaction_check { lease.try_obtain }
+
user.update_attribute(:last_activity_on, today)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event('unique_active_user', values: user.id)
diff --git a/app/workers/bulk_imports/pipeline_batch_worker.rb b/app/workers/bulk_imports/pipeline_batch_worker.rb
index c24cc64e5c0..0d8f5df9cca 100644
--- a/app/workers/bulk_imports/pipeline_batch_worker.rb
+++ b/app/workers/bulk_imports/pipeline_batch_worker.rb
@@ -71,6 +71,7 @@ module BulkImports
def run
return batch.skip! if tracker.failed? || tracker.finished?
+ return cancel_batch if tracker.canceled?
logger.info(log_attributes(message: 'Batch tracker started'))
batch.start!
@@ -149,5 +150,11 @@ module BulkImports
}.merge(extra)
)
end
+
+ def cancel_batch
+ batch.cancel!
+
+ logger.info(log_attributes(message: 'Batch tracker canceled'))
+ end
end
end
diff --git a/app/workers/bulk_imports/pipeline_worker.rb b/app/workers/bulk_imports/pipeline_worker.rb
index ca006f81813..f63368fbe06 100644
--- a/app/workers/bulk_imports/pipeline_worker.rb
+++ b/app/workers/bulk_imports/pipeline_worker.rb
@@ -72,7 +72,9 @@ module BulkImports
attr_reader :pipeline_tracker, :entity
def run
+ return if pipeline_tracker.canceled?
return skip_tracker if entity.failed?
+ return cancel_tracker if entity.canceled?
raise(Pipeline::FailedError, "Export from source instance failed: #{export_status.error}") if export_failed?
raise(Pipeline::ExpiredError, 'Empty export status on source instance') if empty_export_timeout?
@@ -183,6 +185,12 @@ module BulkImports
pipeline_tracker.update!(status_event: 'skip', jid: jid)
end
+ def cancel_tracker
+ logger.info(log_attributes(message: 'Canceling pipeline due to canceled entity'))
+
+ pipeline_tracker.update!(status_event: 'cancel', jid: jid)
+ end
+
def log_attributes(extra = {})
logger.default_attributes.merge(extra)
end
diff --git a/app/workers/ci/partitioning_worker.rb b/app/workers/ci/partitioning_worker.rb
index b9e2a11ac49..87e911a4d4f 100644
--- a/app/workers/ci/partitioning_worker.rb
+++ b/app/workers/ci/partitioning_worker.rb
@@ -12,6 +12,12 @@ module Ci
def perform
Ci::Partitions::SetupDefaultService.new.execute
+
+ ci_partition_current = Ci::Partition.current
+ return unless ci_partition_current
+
+ Ci::Partitions::CreateService.new(ci_partition_current).execute
+ Ci::Partitions::SyncService.new(ci_partition_current).execute
end
end
end
diff --git a/config/feature_flags/gitlab_com_derisk/use_remote_mirror_create_service.yml b/config/feature_flags/gitlab_com_derisk/use_remote_mirror_create_service.yml
deleted file mode 100644
index 0775d728b30..00000000000
--- a/config/feature_flags/gitlab_com_derisk/use_remote_mirror_create_service.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-name: use_remote_mirror_create_service
-feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/455515
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/149263
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/455631
-milestone: '17.0'
-group: group::source code
-type: gitlab_com_derisk
-default_enabled: false
diff --git a/db/docs/elastic_index_settings.yml b/db/docs/elastic_index_settings.yml
index e167340f888..cf711dcda74 100644
--- a/db/docs/elastic_index_settings.yml
+++ b/db/docs/elastic_index_settings.yml
@@ -4,8 +4,9 @@ classes:
- Elastic::IndexSetting
feature_categories:
- global_search
-description: TODO
+description: Describes the settings (such as number of shards and replicas) for each Elasticsearch or Opensearch index
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56344
milestone: '13.11'
-gitlab_schema: gitlab_main
+gitlab_schema: gitlab_main_cell
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/442659
+exempt_from_sharding: true # data is specific to each cell's Elasticsearch cluster, no customer data
\ No newline at end of file
diff --git a/db/docs/events.yml b/db/docs/events.yml
index 4e493fefea3..3301a2f5073 100644
--- a/db/docs/events.yml
+++ b/db/docs/events.yml
@@ -13,5 +13,6 @@ feature_categories:
- user_management
description: Stores events created by users interacting with various product features
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/a847501fd2ffc1c4becc7d0d352d80168d9b3568
-milestone: "2.2"
+milestone: '2.2'
gitlab_schema: gitlab_main_cell
+sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/462801
diff --git a/db/docs/packages_packages.yml b/db/docs/packages_packages.yml
index 14d2e5cfcee..dbd10c8e7ee 100644
--- a/db/docs/packages_packages.yml
+++ b/db/docs/packages_packages.yml
@@ -2,6 +2,7 @@
table_name: packages_packages
classes:
- Packages::Conan::Package
+- Packages::Debian::Package
- Packages::Go::Package
- Packages::MlModel::Package
- Packages::Package
diff --git a/db/docs/push_event_payloads.yml b/db/docs/push_event_payloads.yml
index 35d8e657480..f4795c6a2a4 100644
--- a/db/docs/push_event_payloads.yml
+++ b/db/docs/push_event_payloads.yml
@@ -8,3 +8,4 @@ description: Stores log of push events
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12463
milestone: '9.5'
gitlab_schema: gitlab_main
+sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/462802
diff --git a/db/migrate/20240506125412_add_unique_index_on_status_to_ci_partition.rb b/db/migrate/20240506125412_add_unique_index_on_status_to_ci_partition.rb
new file mode 100644
index 00000000000..dacbbcfe24b
--- /dev/null
+++ b/db/migrate/20240506125412_add_unique_index_on_status_to_ci_partition.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddUniqueIndexOnStatusToCiPartition < Gitlab::Database::Migration[2.2]
+ disable_ddl_transaction!
+ milestone '17.1'
+
+ TABLE_NAME = :ci_partitions
+ INDEX_NAME = :index_ci_partitions_on_current_status
+ CURRENT_STATUS = 2
+
+ def up
+ add_concurrent_index(TABLE_NAME, :status, unique: true, where: "status = #{CURRENT_STATUS}", name: INDEX_NAME)
+ end
+
+ def down
+ remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
+ end
+end
diff --git a/db/schema_migrations/20240506125412 b/db/schema_migrations/20240506125412
new file mode 100644
index 00000000000..9a553ef4a6c
--- /dev/null
+++ b/db/schema_migrations/20240506125412
@@ -0,0 +1 @@
+121da77b8cb11e6b16b02bb5eea3ba8d3e7c83fbc5dde23ab94a8ec7c126f95a
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 9465d917dab..2e848b203ea 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -25017,6 +25017,8 @@ CREATE INDEX index_ci_namespace_mirrors_on_traversal_ids_unnest ON ci_namespace_
CREATE UNIQUE INDEX index_ci_namespace_monthly_usages_on_namespace_id_and_date ON ci_namespace_monthly_usages USING btree (namespace_id, date);
+CREATE UNIQUE INDEX index_ci_partitions_on_current_status ON ci_partitions USING btree (status) WHERE (status = 2);
+
CREATE INDEX index_ci_pending_builds_id_on_protected_partial ON ci_pending_builds USING btree (id) WHERE (protected = true);
CREATE UNIQUE INDEX index_ci_pending_builds_on_build_id ON ci_pending_builds USING btree (build_id);
diff --git a/doc/api/bulk_imports.md b/doc/api/bulk_imports.md
index 6ad7ca40fd4..28350fe7cdf 100644
--- a/doc/api/bulk_imports.md
+++ b/doc/api/bulk_imports.md
@@ -306,3 +306,37 @@ curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab
"source_title": "Issue title"
}
```
+
+## Cancel a migration
+
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/438281) in GitLab 17.1.
+
+Cancel a direct transfer migration. Requires administrator access.
+
+```plaintext
+POST /bulk_imports/:id/cancel
+```
+
+```shell
+curl --request POST --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/1/cancel"
+```
+
+```json
+{
+ "id": 1,
+ "status": "canceled",
+ "source_type": "gitlab",
+ "created_at": "2021-06-18T09:45:55.358Z",
+ "updated_at": "2021-06-18T09:46:27.003Z"
+}
+```
+
+Possible response status codes:
+
+| Status | Description |
+|--------|---------------------------------|
+| 200 | Migration successfully canceled |
+| 401 | Unauthorized |
+| 403 | Forbidden |
+| 404 | Migration not found |
+| 503 | Service unavailable |
diff --git a/doc/api/projects.md b/doc/api/projects.md
index 3f31338a11b..2c16fcba0d7 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -449,7 +449,8 @@ GET /users/:user_id/projects
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/diaspora/diaspora-client",
"_links": {
@@ -580,7 +581,8 @@ GET /users/:user_id/projects
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/brightbox/puppet",
"_links": {
@@ -702,7 +704,8 @@ Example response:
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/diaspora/diaspora-client",
"_links": {
@@ -822,7 +825,8 @@ Example response:
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/brightbox/puppet",
"_links": {
@@ -958,7 +962,8 @@ Example response:
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/diaspora/diaspora-client",
"_links": {
@@ -1078,7 +1083,8 @@ Example response:
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/brightbox/puppet",
"_links": {
@@ -1265,7 +1271,8 @@ GET /projects/:id
"pipeline_artifacts_size": 0,
"packages_size": 0,
"snippets_size": 0,
- "uploads_size": 0
+ "uploads_size": 0,
+ "container_registry_size": 0
},
"container_registry_image_prefix": "registry.example.com/diaspora/diaspora-client",
"_links": {
diff --git a/doc/architecture/blueprints/cells/impacted_features/group-transfer.md b/doc/architecture/blueprints/cells/impacted_features/group-transfer.md
index c9051cde757..779713af033 100644
--- a/doc/architecture/blueprints/cells/impacted_features/group-transfer.md
+++ b/doc/architecture/blueprints/cells/impacted_features/group-transfer.md
@@ -21,8 +21,12 @@ TL;DR
## 3. Proposal
+There is an [investigation](https://gitlab.com/gitlab-org/gitlab/-/issues/458338) to solve this problem using [direct transfer](../../../../user/group/import/index.md).
+
## 4. Evaluation
## 4.1. Pros
## 4.2. Cons
+
+Direct transfer does not migrate users and users cannot exist on more than one Cell. This means in Cells 1.0, for migrations across Cells, any user contributions will be assigned to the user performing the import.
diff --git a/doc/architecture/blueprints/custom_models/index.md b/doc/architecture/blueprints/custom_models/index.md
index 5181beba2b4..f14405019b7 100644
--- a/doc/architecture/blueprints/custom_models/index.md
+++ b/doc/architecture/blueprints/custom_models/index.md
@@ -131,13 +131,39 @@ Installation instructions will be added to the Developer documentation. [issue](
_This list will expand in the near future, but the overall architecture will be the same_
+### Self Hosted models fitting into the current architecture
+
+```mermaid
+sequenceDiagram
+ actor User
+ participant GitLab
+ participant AIGateway as AI Gateway
+ participant SelfHostedModel as Self Hosted Model
+ participant GitLabAIVendor as GitLab AI Vendor
+
+ User ->> GitLab: Send request
+ GitLab ->> GitLab: Check if self-hosted model is configured
+ alt Self-hosted model configured
+ GitLab ->> AIGateway: Create prompt and send request
+ AIGateway ->> SelfHostedModel: Perform API request to AI model
+ SelfHostedModel -->> AIGateway: Respond to the prompt
+ AIGateway -->> GitLab: Forward AI response
+ else
+ GitLab ->> AIGateway: Create prompt and send request
+ AIGateway ->> GitLabAIVendor: Perform API request to AI model
+ GitLabAIVendor -->> AIGateway: Respond to the prompt
+ AIGateway -->> GitLab: Forward AI response
+ end
+ GitLab -->> User: Forward AI response
+```
+
### GitLab Duo Feature Support
| Feature | Default Model | [Mistral AI 7B v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) | [Mixtral 8x22B](https://huggingface.co/mistral-community/Mixtral-8x22B-v0.1) |
|---------------------|------------------|----------------------------------------------------------------|---------------------|
| GitLab Duo Chat | Anthropic Claude-2
Vertex AI Codey textembedding-gecko | Not planned | Not planned |
| Code Completion | Vertex AI Codey code-gecko | ✅ | ✅ |
-| Code Generation | Anthropic Claude-2 | ✅ | ✅ |
+| Code Generation | Anthropic Claude-3 | ✅ | ✅ |
| Git Suggestions | Vertex AI Codey codechat-bison | Not planned | Not planned |
| Discussion Summary | Vertex AI Codey text-bison | Not planned | Not planned |
| Issue Description Generation | Anthropic Claude-2 | Not planned | Not planned |
diff --git a/doc/user/analytics/value_streams_dashboard.md b/doc/user/analytics/value_streams_dashboard.md
index 403e5288426..a6c4657a00a 100644
--- a/doc/user/analytics/value_streams_dashboard.md
+++ b/doc/user/analytics/value_streams_dashboard.md
@@ -167,7 +167,7 @@ On GitLab Dedicated this feature is not available.
AI Impact analytics displays SDLC metrics for a group or project in the month-to-date and the past six months. You can use this table to observe how changes in the AI usage metric correlate with changes in other metrics.
-The metric **Monthly Code Suggestions Usage rate** is calculated as the number of monthly unique Code Suggestions users divided by total monthly [unique contributors](../../user/profile/contributions_calendar.md#user-contribution-events). GitLab considers the total monthly unique code contributors, meaning only users with `pushed` events are included in the calculation.
+The metric **Monthly Code Suggestions Usage rate** represents users that engage with Code Suggestions every month. It is calculated as the number of monthly unique Code Suggestions users divided by total monthly [unique contributors](../../user/profile/contributions_calendar.md#user-contribution-events). Only unique code contributors, meaning users with `pushed` events, are included in the calculation.
The month-over-month comparison of the AI Usage unique users rate gives a more accurate indication of this metric, as it eliminates factors such as developer experience level and project type or complexity.
diff --git a/doc/user/asciidoc.md b/doc/user/asciidoc.md
index 6c8cedf0af9..7ef9e79b9cb 100644
--- a/doc/user/asciidoc.md
+++ b/doc/user/asciidoc.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Use AsciiDoc files in your GitLab project, and understand AsciiDoc syntax."
---
# AsciiDoc
diff --git a/doc/user/get_started/get_started_managing_code.md b/doc/user/get_started/get_started_managing_code.md
index 567fb3c4976..b4133c92f86 100644
--- a/doc/user/get_started/get_started_managing_code.md
+++ b/doc/user/get_started/get_started_managing_code.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+description: "Learn about the GitLab tools for building, tracking, and delivering the code for your project."
---
# Get started managing code
diff --git a/doc/user/group/custom_project_templates.md b/doc/user/group/custom_project_templates.md
index 9445180a804..2bf5e7761b4 100644
--- a/doc/user/group/custom_project_templates.md
+++ b/doc/user/group/custom_project_templates.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "To speed up project creation in your group, build custom project templates and share them with your group."
---
# Custom group-level project templates
diff --git a/doc/user/group/ssh_certificates.md b/doc/user/group/ssh_certificates.md
index b651cf1fe53..b66a2b80f76 100644
--- a/doc/user/group/ssh_certificates.md
+++ b/doc/user/group/ssh_certificates.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Manage Git access to projects by adding CA certificates to your top-level group, instead of individual groups."
---
# Manage group's SSH certificates
diff --git a/doc/user/project/changelogs.md b/doc/user/project/changelogs.md
index 90770d970a2..69765a33d78 100644
--- a/doc/user/project/changelogs.md
+++ b/doc/user/project/changelogs.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+description: "Build, automate, and customize changelogs in your GitLab project."
---
# Changelogs
diff --git a/doc/user/project/git_attributes.md b/doc/user/project/git_attributes.md
index 4c9ce717abc..afd91abc320 100644
--- a/doc/user/project/git_attributes.md
+++ b/doc/user/project/git_attributes.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+description: "Define custom Git attributes for your GitLab project to set options for file handling, display, locking, and storage."
---
# Git attributes
diff --git a/doc/user/project/highlighting.md b/doc/user/project/highlighting.md
index ca2a19fc64e..21db5123988 100644
--- a/doc/user/project/highlighting.md
+++ b/doc/user/project/highlighting.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+description: "Syntax highlighting helps you read files in your GitLab project more easily, and identify what files contain."
---
# Syntax Highlighting
diff --git a/doc/user/project/integrations/beyond_identity.md b/doc/user/project/integrations/beyond_identity.md
index 2d0d6142cee..5e91dc3a307 100644
--- a/doc/user/project/integrations/beyond_identity.md
+++ b/doc/user/project/integrations/beyond_identity.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Integrate GitLab with Beyond Identity to verify GPG keys added to user accounts."
---
# Beyond Identity
diff --git a/doc/user/project/integrations/git_guardian.md b/doc/user/project/integrations/git_guardian.md
index ae098f6c8b6..a50673c4c71 100644
--- a/doc/user/project/integrations/git_guardian.md
+++ b/doc/user/project/integrations/git_guardian.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Integrate GitLab with GitGuardian to get alerts for policy violations and security issues before they can be exploited."
---
# GitGuardian
diff --git a/doc/user/project/protected_tags.md b/doc/user/project/protected_tags.md
index 16dd88b59ae..281bc2dc080 100644
--- a/doc/user/project/protected_tags.md
+++ b/doc/user/project/protected_tags.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Use protected tags in Git to control who can create tags, and prevent accidental tag updates or deletion."
---
# Protected tags
diff --git a/doc/user/project/repository/csv.md b/doc/user/project/repository/csv.md
index f64ea5a9bb8..7687692680c 100644
--- a/doc/user/project/repository/csv.md
+++ b/doc/user/project/repository/csv.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "How comma-separated values (CSV) files display in GitLab projects."
---
# CSV files
diff --git a/doc/user/project/repository/geojson.md b/doc/user/project/repository/geojson.md
index b31f54484eb..2b9730bc84b 100644
--- a/doc/user/project/repository/geojson.md
+++ b/doc/user/project/repository/geojson.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "How GeoJSON files are rendered when viewed in GitLab projects."
---
# GeoJSON files
diff --git a/doc/user/project/repository/jupyter_notebooks/index.md b/doc/user/project/repository/jupyter_notebooks/index.md
index 65c87dfeb39..a62aed4c831 100644
--- a/doc/user/project/repository/jupyter_notebooks/index.md
+++ b/doc/user/project/repository/jupyter_notebooks/index.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+description: "GitLab projects display Jupyter Notebook files as clean, human-readable files instead of raw files."
---
# Jupyter Notebook files
diff --git a/doc/user/project/repository/mirror/troubleshooting.md b/doc/user/project/repository/mirror/troubleshooting.md
index e8b11c611d8..335d9acd0a4 100644
--- a/doc/user/project/repository/mirror/troubleshooting.md
+++ b/doc/user/project/repository/mirror/troubleshooting.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Troubleshooting problems with repository mirroring for GitLab projects."
---
# Troubleshooting repository mirroring
diff --git a/doc/user/project/repository/signed_commits/gpg.md b/doc/user/project/repository/signed_commits/gpg.md
index 9f57c58f0a3..229459467d0 100644
--- a/doc/user/project/repository/signed_commits/gpg.md
+++ b/doc/user/project/repository/signed_commits/gpg.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Sign commits in your GitLab repository with GPG (GNU Privacy Guard) keys."
---
# Sign commits with GPG
diff --git a/doc/user/project/repository/signed_commits/index.md b/doc/user/project/repository/signed_commits/index.md
index 303d4daea9d..614779a4e20 100644
--- a/doc/user/project/repository/signed_commits/index.md
+++ b/doc/user/project/repository/signed_commits/index.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Why you should sign your GitLab commits cryptographically, and how to verify signed commits."
---
# Signed commits
diff --git a/doc/user/project/repository/signed_commits/ssh.md b/doc/user/project/repository/signed_commits/ssh.md
index fe27a017447..7d547656e7f 100644
--- a/doc/user/project/repository/signed_commits/ssh.md
+++ b/doc/user/project/repository/signed_commits/ssh.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Sign commits in your GitLab repository with SSH keys."
---
# Sign commits with SSH keys
diff --git a/doc/user/project/repository/signed_commits/x509.md b/doc/user/project/repository/signed_commits/x509.md
index df3efb25256..e890b569d0a 100644
--- a/doc/user/project/repository/signed_commits/x509.md
+++ b/doc/user/project/repository/signed_commits/x509.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "Sign commits and tags in your GitLab repository with X.509 certificates."
---
# Sign commits and tags with X.509 certificates
diff --git a/doc/user/project/system_notes.md b/doc/user/project/system_notes.md
index ae59d5cf553..2df06e47819 100644
--- a/doc/user/project/system_notes.md
+++ b/doc/user/project/system_notes.md
@@ -2,6 +2,7 @@
stage: Create
group: Source Code
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
+description: "System notes track the history of changes made to an object, like a merge request or issue, in your GitLab project."
---
# System notes
diff --git a/doc/user/tasks.md b/doc/user/tasks.md
index 6331f69b9ac..2bd03ff0e7f 100644
--- a/doc/user/tasks.md
+++ b/doc/user/tasks.md
@@ -348,11 +348,13 @@ To add a task to an iteration:
1. Next to **Iteration**, select **Add to iteration**.
1. From the dropdown list, select the iteration to be associated with the task.
-## Set time tracking
+## Estimate and track spent time
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/438577) in GitLab 17.0.
-To set time tracking, refer to the [time tracking page](project/time_tracking.md)
+You can estimate and track the time you spend on a task.
+
+For more information, see [Time tracking](project/time_tracking.md).
## View task system notes
diff --git a/lib/api/bulk_imports.rb b/lib/api/bulk_imports.rb
index 9dc0e5bae9b..23e32536887 100644
--- a/lib/api/bulk_imports.rb
+++ b/lib/api/bulk_imports.rb
@@ -231,6 +231,31 @@ module API
get ':import_id/entities/:entity_id/failures' do
present paginate(bulk_import_entity.failures), with: Entities::BulkImports::EntityFailure
end
+
+ desc 'Cancel GitLab Migration' do
+ detail 'This feature was introduced in GitLab 17.1'
+ success code: 200, model: Entities::BulkImport
+ failure [
+ { code: 401, message: 'Unauthorized' },
+ { code: 403, message: 'Forbidden' },
+ { code: 404, message: 'Not found' },
+ { code: 503, message: 'Service unavailable' }
+ ]
+ end
+
+ params do
+ requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
+ end
+ post ':import_id/cancel' do
+ authenticated_as_admin!
+
+ bulk_import = BulkImport.find(params[:import_id])
+
+ bulk_import.cancel!
+
+ status :ok
+ present bulk_import, with: Entities::BulkImport
+ end
end
end
end
diff --git a/lib/api/entities/project_statistics.rb b/lib/api/entities/project_statistics.rb
index 6544e8bc8ff..52822ed3557 100644
--- a/lib/api/entities/project_statistics.rb
+++ b/lib/api/entities/project_statistics.rb
@@ -13,6 +13,7 @@ module API
expose :packages_size
expose :snippets_size
expose :uploads_size
+ expose :container_registry_size
end
end
end
diff --git a/lib/api/remote_mirrors.rb b/lib/api/remote_mirrors.rb
index 5cd4237a305..526c92461f1 100644
--- a/lib/api/remote_mirrors.rb
+++ b/lib/api/remote_mirrors.rb
@@ -98,30 +98,18 @@ module API
use :mirror_branches_setting
end
post ':id/remote_mirrors' do
- if Feature.enabled?(:use_remote_mirror_create_service, user_project)
- service = ::RemoteMirrors::CreateService.new(
- user_project,
- current_user,
- declared_params(include_missing: false)
- )
+ service = ::RemoteMirrors::CreateService.new(
+ user_project,
+ current_user,
+ declared_params(include_missing: false)
+ )
- result = service.execute
+ result = service.execute
- if result.success?
- present result.payload[:remote_mirror], with: Entities::RemoteMirror
- else
- render_api_error!(result.message, 400)
- end
+ if result.success?
+ present result.payload[:remote_mirror], with: Entities::RemoteMirror
else
- create_params = declared_params(include_missing: false)
- verify_mirror_branches_setting(create_params)
- new_mirror = user_project.remote_mirrors.create(create_params)
-
- if new_mirror.persisted?
- present new_mirror, with: Entities::RemoteMirror
- else
- render_validation_error!(new_mirror)
- end
+ render_api_error!(result.message, 400)
end
end
diff --git a/lib/gitlab/allowable.rb b/lib/gitlab/allowable.rb
index 879247d0174..bcf94def8ea 100644
--- a/lib/gitlab/allowable.rb
+++ b/lib/gitlab/allowable.rb
@@ -5,5 +5,11 @@ module Gitlab
def can?(...)
Ability.allowed?(...)
end
+
+ def can_any?(user, abilities, subject = :global, **opts)
+ abilities.any? do |ability|
+ can?(user, ability, subject, **opts)
+ end
+ end
end
end
diff --git a/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml
index 2dd8e87323b..35e3d6da483 100644
--- a/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml
@@ -250,5 +250,3 @@ spotbugs-sast:
- if: $CI_COMMIT_BRANCH # If there's no open merge request, add it to a *branch* pipeline instead.
exists:
- '**/*.groovy'
- - '**/*.scala'
- - '**/*.kt'
diff --git a/lib/gitlab/usage/metric_definition.rb b/lib/gitlab/usage/metric_definition.rb
index 80d0e17744d..9754856d0e1 100644
--- a/lib/gitlab/usage/metric_definition.rb
+++ b/lib/gitlab/usage/metric_definition.rb
@@ -19,8 +19,9 @@ module Gitlab
end
def key
- key_path
+ attributes[:key_path]
end
+ alias_method :key_path, :key
def events
events_from_new_structure || events_from_old_structure || {}
@@ -34,6 +35,14 @@ module Gitlab
end
end
+ def status
+ attributes[:status]
+ end
+
+ def value_json_schema
+ attributes[:value_json_schema]
+ end
+
def to_context
return unless %w[redis redis_hll].include?(data_source)
@@ -84,6 +93,10 @@ module Gitlab
VALID_SERVICE_PING_STATUSES.include?(attributes[:status])
end
+ def data_source
+ attributes[:data_source]
+ end
+
def internal_events?
data_source == 'internal_events'
end
@@ -154,14 +167,6 @@ module Gitlab
private
- def method_missing(method, *args)
- attributes[method] || super
- end
-
- def respond_to_missing?(method, *args)
- attributes[method].present? || super
- end
-
def events_from_new_structure
events = attributes[:events]
return unless events
diff --git a/lib/gitlab/view/presenter/delegated.rb b/lib/gitlab/view/presenter/delegated.rb
index 259cf0cf457..ec8b40670d8 100644
--- a/lib/gitlab/view/presenter/delegated.rb
+++ b/lib/gitlab/view/presenter/delegated.rb
@@ -13,6 +13,7 @@ module Gitlab
include Gitlab::View::Presenter::Base
delegator_override_with Gitlab::Routing.url_helpers
delegator_override :can?
+ delegator_override :can_any?
delegator_override :declarative_policy_delegate
delegator_override :present
delegator_override :web_url
diff --git a/lib/tasks/gitlab/seed/group_seed.rake b/lib/tasks/gitlab/seed/group_seed.rake
index 23467d19684..40b9cfe7e4c 100644
--- a/lib/tasks/gitlab/seed/group_seed.rake
+++ b/lib/tasks/gitlab/seed/group_seed.rake
@@ -147,14 +147,15 @@ class GroupSeeder
@resource_count.times do |_|
group = Group.find(group_id)
+ author = group.group_members.non_invite.sample.user
epic_params = {
title: FFaker::Lorem.sentence(6),
description: FFaker::Lorem.paragraphs(3).join("\n\n"),
- author: group.group_members.non_invite.sample.user,
+ author: author,
group: group
}
- Epic.create!(epic_params)
+ ::Epics::CreateService.new(group: group, current_user: author, params: epic_params).execute
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 0d82e17febf..120846f421d 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -9038,6 +9038,9 @@ msgstr ""
msgid "BranchRules|Allowed to merge (%{total})"
msgstr ""
+msgid "BranchRules|Allowed to merge updated"
+msgstr ""
+
msgid "BranchRules|Allowed to push and merge"
msgstr ""
@@ -9113,6 +9116,9 @@ msgstr ""
msgid "BranchRules|Edit"
msgstr ""
+msgid "BranchRules|Edit allowed to merge"
+msgstr ""
+
msgid "BranchRules|From users with push access."
msgstr ""
@@ -27199,9 +27205,6 @@ msgstr ""
msgid "Incidents|Drop or %{linkStart}upload%{linkEnd} a metric screenshot to attach it to the incident"
msgstr ""
-msgid "Incidents|Must start with http or https"
-msgstr ""
-
msgid "Incidents|Must start with http:// or https://"
msgstr ""
@@ -62161,9 +62164,6 @@ msgstr ""
msgid "mrWidget|Your password"
msgstr ""
-msgid "must be a Debian package"
-msgstr ""
-
msgid "must be a boolean value"
msgstr ""
@@ -62766,9 +62766,6 @@ msgstr ""
msgid "two-factor authentication settings"
msgstr ""
-msgid "type must be Debian"
-msgstr ""
-
msgid "type parameter is missing and is required"
msgstr ""
diff --git a/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations.rb b/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations.rb
index 7fd7db25128..f0471e18bb0 100644
--- a/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations.rb
+++ b/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations.rb
@@ -10,18 +10,17 @@ module RuboCop
# # bad
# def disable_project_joins_for_blob?
# Elastic::DataMigrationService
- # .migration_has_finished?(:backfill_project_permissions_in_blobs_using_permutations)
+ # .migration_has_finished?(:backfill_archived_on_issues)
# end
#
# # good
# def disable_project_joins_for_blob?
- # Elastic::DataMigrationService.migration_has_finished?(:backfill_project_permissions_in_blobs)
+ # Elastic::DataMigrationService.migration_has_finished?(:backfill_archived_on_issues)
# end
class AvoidCheckingFinishedOnDeprecatedMigrations < RuboCop::Cop::Base
MSG = 'Migration is deprecated and can not be used with `migration_has_finished?`.'
DEPRECATED_MIGRATIONS = [
- :backfill_project_permissions_in_blobs_using_permutations,
:backfill_archived_on_issues
].freeze
diff --git a/spec/controllers/projects/mirrors_controller_spec.rb b/spec/controllers/projects/mirrors_controller_spec.rb
index 489672f4906..71a8a41540e 100644
--- a/spec/controllers/projects/mirrors_controller_spec.rb
+++ b/spec/controllers/projects/mirrors_controller_spec.rb
@@ -199,67 +199,6 @@ RSpec.describe Projects::MirrorsController, feature_category: :source_code_manag
end
end
- context 'when feature flag "use_remote_mirror_create_service" is disabled' do
- before do
- stub_feature_flags(use_remote_mirror_create_service: false)
- end
-
- context 'With valid URL for a push' do
- let(:remote_mirror_attributes) do
- { "0" => { "enabled" => "0", url: 'https://updated.example.com' } }
- end
-
- it 'processes a successful update' do
- do_put(project, remote_mirrors_attributes: remote_mirror_attributes)
-
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-push-remote-settings'))
- expect(flash[:notice]).to match(/successfully updated/)
- end
-
- it 'creates a RemoteMirror object' do
- expect { do_put(project, remote_mirrors_attributes: remote_mirror_attributes) }.to change(RemoteMirror, :count).by(1)
- end
-
- context 'with json format' do
- it 'processes a successful update' do
- do_put(project, { remote_mirrors_attributes: remote_mirror_attributes }, { format: :json })
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include(
- 'id' => project.id,
- 'remote_mirrors_attributes' => a_kind_of(Array)
- )
- end
- end
- end
-
- context 'With invalid URL for a push' do
- let(:remote_mirror_attributes) do
- { "0" => { "enabled" => "0", url: 'ftp://invalid.invalid' } }
- end
-
- it 'processes an unsuccessful update' do
- do_put(project, remote_mirrors_attributes: remote_mirror_attributes)
-
- expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-push-remote-settings'))
- expect(flash[:alert]).to match(/Only allowed schemes are/)
- end
-
- it 'does not create a RemoteMirror object' do
- expect { do_put(project, remote_mirrors_attributes: remote_mirror_attributes) }.not_to change(RemoteMirror, :count)
- end
-
- context 'with json format' do
- it 'processes an unsuccessful update' do
- do_put(project, { remote_mirrors_attributes: remote_mirror_attributes }, { format: :json })
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['remote_mirrors.url']).to include(/Only allowed schemes are/)
- end
- end
- end
- end
-
context 'when user deletes the remote mirror' do
let(:remote_mirror_attributes) do
{ id: project.remote_mirrors.first.id, _destroy: 1 }
diff --git a/spec/factories/bulk_import/batch_trackers.rb b/spec/factories/bulk_import/batch_trackers.rb
index 427eefc5f3e..ca5e33d61b9 100644
--- a/spec/factories/bulk_import/batch_trackers.rb
+++ b/spec/factories/bulk_import/batch_trackers.rb
@@ -33,5 +33,9 @@ FactoryBot.define do
trait :skipped do
status { -2 }
end
+
+ trait :canceled do
+ status { -3 }
+ end
end
end
diff --git a/spec/factories/bulk_import/trackers.rb b/spec/factories/bulk_import/trackers.rb
index f9b5bb12448..52456c71387 100644
--- a/spec/factories/bulk_import/trackers.rb
+++ b/spec/factories/bulk_import/trackers.rb
@@ -33,6 +33,10 @@ FactoryBot.define do
batched { true }
end
+ trait :canceled do
+ status { -3 }
+ end
+
trait :stale do
created_at { 1.day.ago }
end
diff --git a/spec/factories/packages/debian/packages.rb b/spec/factories/packages/debian/packages.rb
new file mode 100644
index 00000000000..cfed2672b37
--- /dev/null
+++ b/spec/factories/packages/debian/packages.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :debian_package, class: 'Packages::Debian::Package' do
+ project
+ creator { project&.creator }
+ status { :default }
+ sequence(:name) { |n| "#{FFaker::Lorem.word}#{n}" }
+ sequence(:version) { |n| "1.0-#{n}" }
+ package_type { :debian }
+
+ trait :pending_destruction do
+ status { :pending_destruction }
+ end
+
+ transient do
+ without_package_files { false }
+ with_changes_file { false }
+ file_metadatum_trait { processing? ? :unknown : :keep }
+ published_in { :create }
+ end
+
+ publication do
+ if published_in == :create
+ association(:debian_publication, package: instance)
+ elsif published_in
+ association(:debian_publication, package: instance, distribution: published_in)
+ end
+ end
+
+ package_files do
+ package_files = []
+
+ unless without_package_files
+ package_files.push(
+ association(:debian_package_file, :source, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :dsc, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :deb, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :deb_dev, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :udeb, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :ddeb, file_metadatum_trait, package: instance),
+ association(:debian_package_file, :buildinfo, file_metadatum_trait, package: instance)
+ )
+ end
+
+ if with_changes_file
+ package_files.push(association(:debian_package_file, :changes, file_metadatum_trait, package: instance))
+ end
+
+ package_files
+ end
+
+ factory :debian_incoming do
+ name { 'incoming' }
+ version { nil }
+
+ transient do
+ without_package_files { false }
+ file_metadatum_trait { :unknown }
+ published_in { nil }
+ end
+ end
+
+ factory :debian_temporary_with_files do
+ status { :processing }
+
+ transient do
+ without_package_files { false }
+ with_changes_file { false }
+ file_metadatum_trait { :unknown }
+ published_in { nil }
+ end
+ end
+
+ factory :debian_temporary_with_changes do
+ status { :processing }
+
+ transient do
+ without_package_files { true }
+ with_changes_file { true }
+ file_metadatum_trait { :unknown }
+ published_in { nil }
+ end
+ end
+ end
+end
diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb
index 46325426c94..f08faf4f517 100644
--- a/spec/factories/packages/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -43,80 +43,6 @@ FactoryBot.define do
end
end
- factory :debian_package do
- sequence(:name) { |n| "#{FFaker::Lorem.word}#{n}" }
- sequence(:version) { |n| "1.0-#{n}" }
- package_type { :debian }
-
- transient do
- without_package_files { false }
- with_changes_file { false }
- file_metadatum_trait { processing? ? :unknown : :keep }
- published_in { :create }
- end
-
- after :build do |package, evaluator|
- if evaluator.published_in == :create
- build(:debian_publication, package: package)
- elsif !evaluator.published_in.nil?
- create(:debian_publication, package: package, distribution: evaluator.published_in)
- end
- end
-
- after :create do |package, evaluator|
- if evaluator.published_in == :create
- package.debian_publication.save!
- end
-
- unless evaluator.without_package_files
- create :debian_package_file, :source, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :dsc, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :deb, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :deb_dev, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :udeb, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :ddeb, evaluator.file_metadatum_trait, package: package
- create :debian_package_file, :buildinfo, evaluator.file_metadatum_trait, package: package
- end
-
- if evaluator.with_changes_file
- create :debian_package_file, :changes, evaluator.file_metadatum_trait, package: package
- end
- end
-
- factory :debian_incoming do
- name { 'incoming' }
- version { nil }
-
- transient do
- without_package_files { false }
- file_metadatum_trait { :unknown }
- published_in { nil }
- end
- end
-
- factory :debian_temporary_with_files do
- status { :processing }
-
- transient do
- without_package_files { false }
- with_changes_file { false }
- file_metadatum_trait { :unknown }
- published_in { nil }
- end
- end
-
- factory :debian_temporary_with_changes do
- status { :processing }
-
- transient do
- without_package_files { true }
- with_changes_file { true }
- file_metadatum_trait { :unknown }
- published_in { nil }
- end
- end
- end
-
factory :helm_package do
sequence(:name) { |n| "package-#{n}" }
sequence(:version) { |n| "v1.0.#{n}" }
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 86e0b2ed182..0504ff9302c 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -150,10 +150,6 @@ FactoryBot.define do
avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
- trait :with_last_activity_on_today do
- last_activity_on { Date.today }
- end
-
trait :with_sign_ins do
sign_in_count { 3 }
current_sign_in_at { FFaker::Time.between(10.days.ago, 1.day.ago) }
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 69f40e9875d..3d8b7ef66e3 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -590,18 +590,42 @@ describe('diffs/components/app', () => {
expect(wrapper.findComponent(DiffsFileTree).exists()).toBe(true);
});
- it('should pass renderDiffFiles to file tree as true when files are present', () => {
+ it('should pass visible to file tree as true when files are present', () => {
createComponent({
extendStore: ({ state }) => {
state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
},
});
- expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(true);
+ expect(wrapper.findComponent(DiffsFileTree).props('visible')).toBe(true);
});
- it('should pass renderDiffFiles to file tree as false without files', () => {
+ it('should pass visible to file tree as false without files', () => {
createComponent({});
- expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(false);
+ expect(wrapper.findComponent(DiffsFileTree).props('visible')).toBe(false);
+ });
+
+ it('should hide file tree when toggled', async () => {
+ createComponent({
+ extendStore: ({ state }) => {
+ state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
+ },
+ });
+ wrapper.findComponent(DiffsFileTree).vm.$emit('toggled');
+ await nextTick();
+ expect(wrapper.findComponent(DiffsFileTree).props('visible')).toBe(false);
+ });
+
+ it('should show file tree when toggled', async () => {
+ createComponent({
+ extendStore: ({ state }) => {
+ state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
+ },
+ });
+ wrapper.findComponent(DiffsFileTree).vm.$emit('toggled');
+ await nextTick();
+ wrapper.findComponent(DiffsFileTree).vm.$emit('toggled');
+ await nextTick();
+ expect(wrapper.findComponent(DiffsFileTree).props('visible')).toBe(true);
});
});
diff --git a/spec/frontend/diffs/components/diffs_file_tree_spec.js b/spec/frontend/diffs/components/diffs_file_tree_spec.js
index a79023a07cb..f9959619754 100644
--- a/spec/frontend/diffs/components/diffs_file_tree_spec.js
+++ b/spec/frontend/diffs/components/diffs_file_tree_spec.js
@@ -4,20 +4,14 @@ import { Mousetrap } from '~/lib/mousetrap';
import DiffsFileTree from '~/diffs/components/diffs_file_tree.vue';
import TreeList from '~/diffs/components/tree_list.vue';
import PanelResizer from '~/vue_shared/components/panel_resizer.vue';
-import { SET_SHOW_TREE_LIST } from '~/diffs/store/mutation_types';
-import createDiffsStore from '../create_diffs_store';
describe('DiffsFileTree', () => {
let wrapper;
- let store;
- const createComponent = ({ renderDiffFiles = true, showTreeList = true } = {}) => {
- store = createDiffsStore();
- store.commit(`diffs/${SET_SHOW_TREE_LIST}`, showTreeList);
+ const createComponent = ({ visible = true } = {}) => {
wrapper = shallowMount(DiffsFileTree, {
- store,
propsData: {
- renderDiffFiles,
+ visible,
},
});
};
@@ -35,7 +29,7 @@ describe('DiffsFileTree', () => {
describe('when renderDiffFiles and showTreeList are false', () => {
beforeEach(() => {
- createComponent({ renderDiffFiles: false, showTreeList: false });
+ createComponent({ visible: false });
});
it('tree list is hidden', () => {
@@ -44,18 +38,11 @@ describe('DiffsFileTree', () => {
});
});
- it('emits toggled event', async () => {
- createComponent();
- store.commit(`diffs/${SET_SHOW_TREE_LIST}`, false);
- await nextTick();
- expect(wrapper.emitted('toggled')).toStrictEqual([[]]);
- });
-
it('toggles when "f" hotkey is pressed', async () => {
createComponent();
Mousetrap.trigger('f');
await nextTick();
- expect(wrapper.findComponent(TreeList).exists()).toBe(false);
+ expect(wrapper.emitted('toggled')).toStrictEqual([[]]);
});
describe('size', () => {
@@ -84,13 +71,6 @@ describe('DiffsFileTree', () => {
checkWidth(200);
});
- it('sets width of tree list', () => {
- createComponent({}, ({ state }) => {
- state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
- });
- checkWidth(320);
- });
-
it('updates width', async () => {
const WIDTH = 500;
createComponent();
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index c748f9acda7..eb6ebac08ab 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -8,12 +8,17 @@ import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs, { getActionFromHref } from '~/merge_request_tabs';
import Diff from '~/diff';
import '~/lib/utils/common_utils';
-import '~/lib/utils/url_utility';
+import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/webpack', () => ({
resetServiceWorkersPublicPath: jest.fn(),
}));
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
+
describe('MergeRequestTabs', () => {
const testContext = {};
const stubLocation = {};
@@ -127,7 +132,7 @@ describe('MergeRequestTabs', () => {
testContext.class.bindEvents();
$('.merge-request-tabs .commits-tab a').trigger(metakeyEvent);
- expect(window.open).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(expect.any(String), true);
});
it('opens page when commits badge is clicked', () => {
@@ -139,7 +144,7 @@ describe('MergeRequestTabs', () => {
testContext.class.bindEvents();
$('.merge-request-tabs .commits-tab a .badge').trigger(metakeyEvent);
- expect(window.open).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(expect.any(String), true);
});
});
@@ -151,7 +156,7 @@ describe('MergeRequestTabs', () => {
testContext.class.clickTab({ ...clickTabParams, metaKey: true });
- expect(window.open).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(expect.any(String), true);
});
it('opens page tab in a new browser tab with Cmd+Click - Mac', () => {
@@ -162,7 +167,7 @@ describe('MergeRequestTabs', () => {
testContext.class.clickTab({ ...clickTabParams, ctrlKey: true });
- expect(window.open).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(expect.any(String), true);
});
it('opens page tab in a new browser tab with Middle-click - Mac/PC', () => {
@@ -173,7 +178,7 @@ describe('MergeRequestTabs', () => {
testContext.class.clickTab({ ...clickTabParams, which: 2 });
- expect(window.open).toHaveBeenCalled();
+ expect(visitUrl).toHaveBeenCalledWith(expect.any(String), true);
});
});
diff --git a/spec/frontend/observability/date_range_filter_spec.js b/spec/frontend/observability/date_range_filter_spec.js
index 6c74e32534b..bbda3ea57d7 100644
--- a/spec/frontend/observability/date_range_filter_spec.js
+++ b/spec/frontend/observability/date_range_filter_spec.js
@@ -10,22 +10,22 @@ describe('DateRangeFilter', () => {
let wrapper;
- const defaultTimeRange = {
- value: '1h',
- startDate: new Date(),
- endDate: new Date(),
+ const defaultProps = {
+ selected: {
+ value: '1h',
+ startDate: new Date(),
+ endDate: new Date(),
+ },
};
- const mount = (selected) => {
+ const mount = (props = defaultProps) => {
wrapper = shallowMountExtended(DateRangeFilter, {
- propsData: {
- selected,
- },
+ propsData: props,
});
};
beforeEach(() => {
- mount(defaultTimeRange);
+ mount();
});
const findDateRangesDropdown = () => wrapper.findComponent(DateRangesDropdown);
@@ -34,7 +34,7 @@ describe('DateRangeFilter', () => {
it('renders the date ranges dropdown with the default selected value and options', () => {
const dateRangesDropdown = findDateRangesDropdown();
expect(dateRangesDropdown.exists()).toBe(true);
- expect(dateRangesDropdown.props('selected')).toBe(defaultTimeRange.value);
+ expect(dateRangesDropdown.props('selected')).toBe(defaultProps.selected.value);
expect(dateRangesDropdown.props('dateRangeOptions')).toMatchInlineSnapshot(`
Array [
Object {
@@ -101,8 +101,23 @@ describe('DateRangeFilter', () => {
`);
});
+ it('renders dateRangeOptions based on dateOptions if specified', () => {
+ mount({ ...defaultProps, dateOptions: [{ value: '7m', title: 'Last 7 minutes' }] });
+
+ expect(findDateRangesDropdown().props('dateRangeOptions')).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "endDate": 2024-04-23T04:00:00.000Z,
+ "startDate": 2024-04-23T03:53:00.000Z,
+ "text": "Last 7 minutes",
+ "value": "7m",
+ },
+ ]
+ `);
+ });
+
it('does not set the selected value if not specified', () => {
- mount(undefined);
+ mount({ selected: undefined });
expect(findDateRangesDropdown().props('selected')).toBe('');
});
@@ -112,7 +127,9 @@ describe('DateRangeFilter', () => {
startDate: new Date('2022-01-01'),
endDate: new Date('2022-01-02'),
};
- mount({ value: 'custom', startDate: timeRange.startDate, endDate: timeRange.endDate });
+ mount({
+ selected: { value: 'custom', startDate: timeRange.startDate, endDate: timeRange.endDate },
+ });
expect(findDateRangesPicker().exists()).toBe(true);
expect(findDateRangesPicker().props('defaultStartDate')).toBe(timeRange.startDate);
@@ -153,17 +170,19 @@ describe('DateRangeFilter', () => {
});
describe('start opened', () => {
- it('sets startOpend to true if custom date is selected without start and end date', () => {
- mount({ value: 'custom' });
+ it('sets startOpened to true if custom date is selected without start and end date', () => {
+ mount({ selected: { value: 'custom' } });
expect(findDateRangesPicker().props('startOpened')).toBe(true);
});
- it('sets startOpend to false if custom date is selected with start and end date', () => {
+ it('sets startOpened to false if custom date is selected with start and end date', () => {
mount({
- value: 'custom',
- startDate: new Date('2022-01-01'),
- endDate: new Date('2022-01-02'),
+ selected: {
+ value: 'custom',
+ startDate: new Date('2022-01-01'),
+ endDate: new Date('2022-01-02'),
+ },
});
expect(findDateRangesPicker().props('startOpened')).toBe(false);
@@ -183,4 +202,16 @@ describe('DateRangeFilter', () => {
'2024-04-24T00:00:00.000Z',
);
});
+ it('sets max-date-range to maxDateRange', () => {
+ mount({
+ selected: {
+ value: 'custom',
+ startDate: new Date('2022-01-01'),
+ endDate: new Date('2022-01-02'),
+ },
+ maxDateRange: 7,
+ });
+
+ expect(findDateRangesPicker().props('maxDateRange')).toBe(7);
+ });
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
index 3fcd7371960..e9875d7afd3 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`packages_list_row renders 1`] = `
-
@@ -87,5 +87,5 @@ exports[`packages_list_row renders 1`] = `
/>
-
+
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
index dc1c65b7ee8..ec58a35f304 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -864,18 +864,29 @@ describe('Package Files', () => {
});
});
});
+
describe('upload slot', () => {
const resolver = jest.fn().mockResolvedValue(packageFilesQuery({ files: [file] }));
- const uploadSlotSpy = jest.fn();
-
- const callFetchSlotProp = () => uploadSlotSpy.mock.calls[0][0].refetch();
+ const findUploadSlot = () => wrapper.findByTestId('upload-slot');
beforeEach(async () => {
createComponent({
resolver,
options: {
scopedSlots: {
- upload: uploadSlotSpy,
+ upload(props) {
+ return this.$createElement('div', {
+ attrs: {
+ 'data-testid': 'upload-slot',
+ ...props,
+ },
+ on: {
+ click: () => {
+ return props.refetch();
+ },
+ },
+ });
+ },
},
},
});
@@ -883,11 +894,11 @@ describe('Package Files', () => {
});
it('should render slot content', () => {
- expect(uploadSlotSpy).toHaveBeenLastCalledWith({ refetch: expect.anything() });
+ expect(findUploadSlot().attributes()).toMatchObject({ refetch: expect.anything() });
});
- it('should refetch on change', () => {
- callFetchSlotProp();
+ it('should refetch when clicked', async () => {
+ await findUploadSlot().trigger('click');
expect(resolver).toHaveBeenCalledTimes(2);
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index 0183449b8d2..c9cb6f8f4ce 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`packages_list_row renders 1`] = `
-
@@ -114,5 +114,5 @@ exports[`packages_list_row renders 1`] = `
-
+
`;
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
index d51fea8dd8d..84ebeb3de04 100644
--- a/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
@@ -10,6 +10,7 @@ import { createAlert } from '~/alert';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { stubComponent, RENDER_ALL_SLOTS_TEMPLATE } from 'helpers/stub_component';
import RuleView from '~/projects/settings/branch_rules/components/view/index.vue';
+import RuleDrawer from '~/projects/settings/branch_rules/components/view/rule_drawer.vue';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import Protection from '~/projects/settings/branch_rules/components/view/protection.vue';
import BranchRuleModal from '~/projects/settings/components/branch_rule_modal.vue';
@@ -27,7 +28,7 @@ import {
} from '~/projects/settings/branch_rules/components/view/constants';
import branchRulesQuery from 'ee_else_ce/projects/settings/branch_rules/queries/branch_rules_details.query.graphql';
import deleteBranchRuleMutation from '~/projects/settings/branch_rules/mutations/branch_rule_delete.mutation.graphql';
-import editBranchRuleMutation from '~/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql';
+import editBranchRuleMutation from 'ee_else_ce/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql';
import {
editBranchRuleMockResponse,
deleteBranchRuleMockResponse,
@@ -35,6 +36,7 @@ import {
predefinedBranchRulesMockResponse,
matchingBranchesCount,
protectableBranchesMockResponse,
+ allowedToMergeDrawerProps,
} from 'ee_else_ce_jest/projects/settings/branch_rules/components/view/mock_data';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -77,6 +79,7 @@ describe('View branch rules', () => {
.fn()
.mockResolvedValue(protectableBranchesMockResponse);
const errorHandler = jest.fn().mockRejectedValue('error');
+ const toastMock = { show: jest.fn() };
const createComponent = async (
glFeatures = { editBranchRules: true },
@@ -97,9 +100,13 @@ describe('View branch rules', () => {
stubs: {
Protection,
BranchRuleModal,
+ RuleDrawer,
GlCard: stubComponent(GlCard, { template: RENDER_ALL_SLOTS_TEMPLATE }),
GlModal: stubComponent(GlModal, { template: RENDER_ALL_SLOTS_TEMPLATE }),
},
+ mocks: {
+ $toast: toastMock,
+ },
directives: { GlModal: createMockDirective('gl-modal') },
});
@@ -119,11 +126,13 @@ describe('View branch rules', () => {
const findpageTitle = () => wrapper.findByText(I18N.pageTitle);
const findStatusChecksTitle = () => wrapper.findByText(I18N.statusChecksTitle);
const findDeleteRuleButton = () => wrapper.findByTestId('delete-rule-button');
+ const findEditRuleNameButton = () => wrapper.findByTestId('edit-rule-name-button');
const findEditRuleButton = () => wrapper.findByTestId('edit-rule-button');
const findDeleteRuleModal = () => wrapper.findComponent(GlModal);
const findBranchRuleModal = () => wrapper.findComponent(BranchRuleModal);
const findBranchRuleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findNoDataTitle = () => wrapper.findByText(I18N.noData);
+ const findRuleDrawer = () => wrapper.findComponent(RuleDrawer);
const findMatchingBranchesLink = () =>
wrapper.findByText(
@@ -230,7 +239,7 @@ describe('View branch rules', () => {
describe('Editing branch rule', () => {
it('renders edit branch rule button', () => {
- expect(findEditRuleButton().text()).toBe('Edit');
+ expect(findEditRuleNameButton().text()).toBe('Edit');
});
it('passes correct props to the edit rule modal', () => {
@@ -242,7 +251,7 @@ describe('View branch rules', () => {
});
it('renders correct modal id for the edit button', () => {
- const binding = getBinding(findEditRuleButton().element, 'gl-modal');
+ const binding = getBinding(findEditRuleNameButton().element, 'gl-modal');
expect(binding.value).toBe(EDIT_RULE_MODAL_ID);
});
@@ -255,9 +264,12 @@ describe('View branch rules', () => {
it('when edit button in the modal is clicked it makes a call to edit rule and redirects to new branch rule page', async () => {
findBranchRuleModal().vm.$emit('primary', 'main');
await nextTick();
+ await waitForPromises();
expect(editBranchRuleSuccessHandler).toHaveBeenCalledWith({
- id: 'gid://gitlab/Projects/BranchRule/1',
- name: 'main',
+ input: {
+ id: 'gid://gitlab/Projects/BranchRule/1',
+ name: 'main',
+ },
});
await waitForPromises();
expect(util.setUrlParams).toHaveBeenCalledWith({ branch: 'main' });
@@ -328,7 +340,7 @@ describe('View branch rules', () => {
});
it('does not render edit button', () => {
- expect(findEditRuleButton().exists()).toBe(false);
+ expect(findEditRuleNameButton().exists()).toBe(false);
});
it('does not render Protect Branch section', () => {
@@ -336,6 +348,36 @@ describe('View branch rules', () => {
});
});
+ describe('Allowed to merge editing', () => {
+ it('renders the edit button', () => {
+ expect(findEditRuleButton().text()).toBe('Edit');
+ });
+ it('passes expected props to rule drawer', () => {
+ expect(findRuleDrawer().props()).toMatchObject(allowedToMergeDrawerProps);
+ });
+ it('when edit button is clicked it opens rule drawer', async () => {
+ findEditRuleButton().vm.$emit('click');
+ await nextTick();
+ expect(findRuleDrawer().props('isOpen')).toBe(true);
+ });
+ it('when save button is clicked it calls edit rule mutation', async () => {
+ findRuleDrawer().vm.$emit('editRule', { accessLevel: 30 });
+ await nextTick();
+ await waitForPromises();
+ expect(editBranchRuleSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ branchProtection: {
+ mergeAccessLevels: {
+ accessLevel: 30,
+ },
+ },
+ id: 'gid://gitlab/Projects/BranchRule/1',
+ name: 'main',
+ },
+ });
+ });
+ });
+
describe('When rendered for a non-existing rule', () => {
beforeEach(async () => {
jest.spyOn(util, 'getParameterByName').mockReturnValueOnce('non-existing-rule');
@@ -355,7 +397,7 @@ describe('View branch rules', () => {
});
it('does not render edit rule button and modal', () => {
- expect(findEditRuleButton().exists()).toBe(false);
+ expect(findEditRuleNameButton().exists()).toBe(false);
expect(findBranchRuleModal().exists()).toBe(false);
});
});
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js b/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js
index f3ff47f16d4..8723b6587c6 100644
--- a/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js
+++ b/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js
@@ -1,4 +1,4 @@
-const usersMock = [
+export const usersMock = [
{
id: '123',
username: 'usr1',
@@ -37,8 +37,8 @@ const usersMock = [
];
const accessLevelsMock = [
- { accessLevelDescription: 'Administrator' },
- { accessLevelDescription: 'Maintainer' },
+ { accessLevelDescription: 'Maintainers' },
+ { accessLevelDescription: 'Maintainers + Developers' },
];
const approvalsRequired = 3;
@@ -93,6 +93,12 @@ export const accessLevelsMockResponse = [
},
];
+export const mergeAccessLevelsMockResponse = {
+ __typename: 'MergeAccessLevel',
+ accessLevel: 30,
+ accessLevelDescription: 'Maintainers',
+};
+
export const matchingBranchesCount = 3;
export const branchProtectionsMockResponse = {
@@ -188,9 +194,28 @@ export const deleteBranchRuleMockResponse = {
export const editBranchRuleMockResponse = {
data: {
- branchRule: {
+ branchRuleUpdate: {
errors: [],
__typename: 'BranchRuleEditPayload',
+ branchRule: {
+ __typename: 'BranchRule',
+ name: 'newname',
+ isDefault: true,
+ id: 'gid://gitlab/Projects/BranchRule/1',
+ matchingBranchesCount,
+ branchProtection: {
+ __typename: 'BranchProtection',
+ allowForcePush: true,
+ mergeAccessLevels: {
+ __typename: 'MergeAccessLevelConnection',
+ nodes: [mergeAccessLevelsMockResponse],
+ },
+ pushAccessLevels: {
+ __typename: 'PushAccessLevelConnection',
+ nodes: [],
+ },
+ },
+ },
},
},
};
@@ -206,3 +231,10 @@ export const protectableBranchesMockResponse = {
},
},
};
+
+export const allowedToMergeDrawerProps = {
+ isLoading: false,
+ isOpen: false,
+ title: 'Edit allowed to merge',
+ roles: accessLevelsMock,
+};
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js
index fbba8e4474f..6427ffeb802 100644
--- a/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js
@@ -27,8 +27,8 @@ describe('Branch rule protection', () => {
const findHeader = () => wrapper.findByText(protectionPropsMock.header);
const findLink = () => wrapper.findComponent(GlLink);
const findProtectionRows = () => wrapper.findAllComponents(ProtectionRow);
- const findEditButton = () => wrapper.findByTestId('edit-button');
const findEmptyState = () => wrapper.findByTestId('protection-empty-state');
+ const findEditButton = () => wrapper.findByTestId('edit-rule-button');
it('renders a card component', () => {
expect(findCard().exists()).toBe(true);
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/rule_drawer_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/rule_drawer_spec.js
new file mode 100644
index 00000000000..84af949949a
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/view/rule_drawer_spec.js
@@ -0,0 +1,60 @@
+import { GlDrawer } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
+import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
+import RuleDrawer from '~/projects/settings/branch_rules/components/view/rule_drawer.vue';
+import { allowedToMergeDrawerProps } from 'ee_else_ce_jest/projects/settings/branch_rules/components/view/mock_data';
+
+jest.mock('~/lib/utils/dom_utils', () => ({ getContentWrapperHeight: jest.fn() }));
+
+const TEST_HEADER_HEIGHT = '123px';
+
+describe('Edit Rule Drawer', () => {
+ let wrapper;
+
+ const findDrawer = () => wrapper.findComponent(GlDrawer);
+ const findCancelButton = () => wrapper.findByText('Cancel');
+ const findHeader = () => wrapper.find('h2');
+ const findSaveButton = () => wrapper.findByTestId('save-allowed-to-merge');
+
+ const createComponent = (props = allowedToMergeDrawerProps) => {
+ wrapper = shallowMountExtended(RuleDrawer, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ getContentWrapperHeight.mockReturnValue(TEST_HEADER_HEIGHT);
+ createComponent();
+ });
+
+ describe('rendering', () => {
+ it('renders the correct title when adding', () => {
+ expect(findHeader().text()).toBe('Edit allowed to merge');
+ });
+
+ it('renders drawer with props', () => {
+ expect(findDrawer().props()).toMatchObject({
+ open: false,
+ headerHeight: TEST_HEADER_HEIGHT,
+ zIndex: DRAWER_Z_INDEX,
+ });
+ });
+ });
+
+ it('disables the save button when no changes are made', () => {
+ expect(findSaveButton().attributes('disabled')).toBeDefined();
+ });
+
+ it('emits an edit rule event when save button is clicked', () => {
+ findSaveButton().vm.$emit('click');
+ expect(wrapper.emitted('editRule')).toHaveLength(1);
+ });
+
+ it('emits a close event when cancel button is clicked', () => {
+ findCancelButton().vm.$emit('click');
+ expect(wrapper.emitted('close')).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js
index 665765cd951..cf51eb11ba1 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js
@@ -50,10 +50,11 @@ describe('Commits edit component', () => {
it('emits an input event and receives changed value', async () => {
const changedCommitMessage = 'Changed commit message';
+ wrapper.setProps({ value: changedCommitMessage });
findTextarea().vm.$emit('input', changedCommitMessage);
await nextTick();
- expect(wrapper.emitted().input[0]).toEqual([changedCommitMessage]);
+ expect(wrapper.emitted('input')).toEqual([[changedCommitMessage]]);
expect(findTextarea().props('value')).toBe(changedCommitMessage);
});
});
diff --git a/spec/frontend/vue_shared/components/list_selector/index_spec.js b/spec/frontend/vue_shared/components/list_selector/index_spec.js
index f94519b8cab..4024a21ba39 100644
--- a/spec/frontend/vue_shared/components/list_selector/index_spec.js
+++ b/spec/frontend/vue_shared/components/list_selector/index_spec.js
@@ -35,6 +35,7 @@ describe('List Selector spec', () => {
const USERS_MOCK_PROPS = {
projectPath: 'some/project/path',
groupPath: 'some/group/path',
+ usersQueryOptions: { active: true },
type: 'users',
};
@@ -72,7 +73,7 @@ describe('List Selector spec', () => {
const findIcon = () => wrapper.findComponent(GlIcon);
const findAllListBoxComponents = () => wrapper.findAllComponents(GlCollapsibleListbox);
const findSearchResultsDropdown = () => findAllListBoxComponents().at(0);
- const findNamespaceDropdown = () => findAllListBoxComponents().at(1);
+ const findNamespaceDropdown = () => wrapper.findByTestId('namespace-dropdown');
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findAllUserComponents = () => wrapper.findAllComponents(UserItem);
const findAllGroupComponents = () => wrapper.findAllComponents(GroupItem);
@@ -117,6 +118,14 @@ describe('List Selector spec', () => {
expect(findAllUserComponents().length).toBe(0);
});
+ describe('namespace dropdown rendering', () => {
+ beforeEach(() => createComponent({ ...USERS_MOCK_PROPS, isProjectOnlyNamespace: true }));
+
+ it('does not render namespace dropdown with isProjectOnlyNamespace prop', () => {
+ expect(findNamespaceDropdown().exists()).toBe(false);
+ });
+ });
+
describe('selected items', () => {
const selectedUser = { username: 'root' };
const selectedItems = [selectedUser];
diff --git a/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap b/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
index 2dd7149069f..a63b21847cb 100644
--- a/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
+++ b/spec/frontend/vue_shared/components/metric_images/__snapshots__/metric_images_table_spec.js.snap
@@ -22,41 +22,12 @@ exports[`Metrics upload item render the metrics image component 1`] = `
Are you sure you wish to delete this image?
-
-
-
-
-
-
-
-
+
{
+ let wrapper;
+ let store;
+ const testText = 'test text';
+ const testUrl = 'https://valid-url.com';
+
+ const mountComponent = (options = {}) => {
+ store = createStore({}, {});
+
+ wrapper = shallowMountExtended(MetricImageDetailsModal, {
+ store,
+ stubs: {
+ GlFormGroup: stubComponent(GlFormGroup, {
+ props: ['state', 'invalidFeedback'],
+ }),
+ GlFormInput: stubComponent(GlFormInput, {
+ props: ['state', 'value'],
+ template: '',
+ }),
+ },
+ ...options,
+ });
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findTextInput = () =>
+ wrapper.findComponent('[data-testid="metric-image-details-modal-text-input"]');
+ const findUrlInput = () =>
+ wrapper.findComponent('[data-testid="metric-image-details-modal-url-input"]');
+ const findUrlFormGroup = () =>
+ wrapper.findComponent('[data-testid="metric-image-details-url-form-group"]');
+ const findForm = () => wrapper.findComponent('[data-testid="metric-image-details-modal-form"]');
+ const cancelModal = () => findModal().vm.$emit('hidden');
+ const submitForm = () => findForm().vm.$emit('submit', mockEvent);
+ const setTextInputValue = (value) => findTextInput().vm.$emit('input', value);
+ const setUrlInputValue = (value) => findUrlInput().vm.$emit('input', value);
+
+ describe('should display', () => {
+ beforeEach(() => {
+ mountComponent({
+ propsData: { visible: true },
+ });
+ });
+
+ it('a description of the url field', () => {
+ const urlGroup = findUrlFormGroup();
+ expect(urlGroup.attributes('description')).toBe('Must start with http:// or https://');
+ });
+ });
+
+ describe('when URL is invalid', () => {
+ it('should have an error state', async () => {
+ mountComponent({ propsData: { visible: true } });
+
+ setUrlInputValue('invalid-url');
+ submitForm();
+
+ await nextTick();
+
+ const urlGroup = findUrlFormGroup();
+ const urlInput = findUrlInput();
+
+ expect(urlGroup.props('state')).toBe(false);
+ expect(urlGroup.props('invalidFeedback')).toBe('Invalid URL');
+ expect(urlInput.props('state')).toBe(false);
+ });
+
+ it('should focus on the URL input on submit', async () => {
+ mountComponent({
+ attachTo: document.body,
+ propsData: { visible: true },
+ stubs: {
+ GlFormInput: {
+ template: '',
+ },
+ },
+ });
+
+ setUrlInputValue('invalid-url');
+ submitForm();
+
+ await waitForPromises();
+
+ const urlInput = findUrlInput();
+ expect(urlInput.element).toBe(document.activeElement);
+ });
+
+ describe('and modal is in the add state', () => {
+ it('should not dispatch uploadImage action', async () => {
+ mountComponent({ propsData: { visible: true } });
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+
+ setUrlInputValue('invalid-url');
+ submitForm();
+
+ await waitForPromises();
+
+ expect(dispatchSpy).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('and modal is in the edit state', () => {
+ it('should not dispatch updateImage action', async () => {
+ mountComponent({
+ propsData: {
+ edit: true,
+ imageId: 1,
+ filename: 'test.jpg',
+ url: '',
+ urlText: '',
+ visible: true,
+ },
+ });
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+
+ setUrlInputValue('invalid-url');
+ submitForm();
+
+ await waitForPromises();
+
+ expect(dispatchSpy).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when is in the add state', () => {
+ beforeEach(() => {
+ mountComponent({
+ propsData: {
+ imageFiles: fileList,
+ visible: true,
+ },
+ });
+ });
+
+ it('should display a modal title', () => {
+ expect(findModal().attributes('title')).toBe('Add image details');
+ });
+
+ it('should display a modal description', () => {
+ const description = wrapper.findComponent(
+ '[data-testid="metric-image-details-modal-description"]',
+ );
+
+ expect(description.text()).toBe(
+ "Add text or a link to display with your image. If you don't add either, the file name displays instead.",
+ );
+ });
+
+ it('should display an empty text field', () => {
+ const textInput = findTextInput();
+
+ expect(textInput.props('value')).toBe('');
+ });
+
+ it('should display an empty url field', () => {
+ const urlInput = findUrlInput();
+
+ expect(urlInput.props('value')).toBe('');
+ });
+
+ it('should send files, text and url when submitted', async () => {
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+
+ setTextInputValue(testText);
+ setUrlInputValue(testUrl);
+
+ submitForm();
+
+ await waitForPromises();
+
+ expect(dispatchSpy).toHaveBeenCalledWith('uploadImage', {
+ files: fileList,
+ url: testUrl,
+ urlText: testText,
+ });
+ });
+
+ describe('after submit', () => {
+ it('should clear url, text fields and emit `hidden` event', async () => {
+ setTextInputValue(testText);
+ setUrlInputValue(testUrl);
+
+ await nextTick();
+
+ expect(findTextInput().props('value')).toBe(testText);
+ expect(findUrlInput().props('value')).toBe(testUrl);
+
+ submitForm();
+
+ await waitForPromises();
+
+ expect(findTextInput().props('value')).toBe('');
+ expect(findUrlInput().props('value')).toBe('');
+ expect(wrapper.emitted().hidden).toHaveLength(1);
+ });
+ });
+
+ describe('when cancelled', () => {
+ it('should clear url, text fields and emit `hidden` event', async () => {
+ setTextInputValue(testText);
+ setUrlInputValue(testUrl);
+
+ await nextTick();
+
+ expect(findTextInput().props('value')).toBe(testText);
+ expect(findUrlInput().props('value')).toBe(testUrl);
+
+ cancelModal();
+
+ await waitForPromises();
+
+ expect(findTextInput().props('value')).toBe('');
+ expect(findUrlInput().props('value')).toBe('');
+ expect(wrapper.emitted().hidden).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when is in the edit state', () => {
+ const updatedText = 'updated text';
+ const updatedUrl = 'https://updated-url.com';
+
+ beforeEach(() => {
+ mountComponent({
+ propsData: {
+ edit: true,
+ imageId: 1,
+ filename: 'test.jpg',
+ url: testUrl,
+ urlText: testText,
+ visible: true,
+ },
+ });
+ });
+
+ it('should display a modal title', () => {
+ expect(findModal().props('title')).toBe('Editing test.jpg');
+ });
+
+ it('should display the text field with prefilled value', () => {
+ expect(findTextInput().props('value')).toBe(testText);
+ });
+
+ it('should display the url field with prefilled value', () => {
+ expect(findUrlInput().props('value')).toBe(testUrl);
+ });
+
+ it('should update text and url when changed', async () => {
+ const dispatchSpy = jest.spyOn(store, 'dispatch');
+
+ setTextInputValue(updatedText);
+ setUrlInputValue(updatedUrl);
+
+ submitForm();
+
+ await waitForPromises();
+
+ expect(dispatchSpy).toHaveBeenCalledWith('updateImage', {
+ imageId: 1,
+ url: updatedUrl,
+ urlText: updatedText,
+ });
+ });
+
+ describe('after submit', () => {
+ it('should restore url, text fields and emit `hidden` event', async () => {
+ setTextInputValue(updatedText);
+ setUrlInputValue(updatedUrl);
+
+ await nextTick();
+
+ expect(findTextInput().props('value')).toBe(updatedText);
+ expect(findUrlInput().props('value')).toBe(updatedUrl);
+
+ submitForm();
+
+ await waitForPromises();
+
+ expect(findTextInput().props('value')).toBe(testText);
+ expect(findUrlInput().props('value')).toBe(testUrl);
+ expect(wrapper.emitted().hidden).toHaveLength(1);
+ });
+ });
+
+ describe('when cancelled', () => {
+ it('should restore url, text fields and emit `hidden` event', async () => {
+ setTextInputValue(updatedText);
+ setUrlInputValue(updatedUrl);
+
+ await nextTick();
+
+ expect(findTextInput().props('value')).toBe(updatedText);
+ expect(findUrlInput().props('value')).toBe(updatedUrl);
+
+ cancelModal();
+
+ await waitForPromises();
+
+ expect(findTextInput().props('value')).toBe(testText);
+ expect(findUrlInput().props('value')).toBe(testUrl);
+ expect(wrapper.emitted().hidden).toHaveLength(1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
index d94cf5bd764..f128a5f3b80 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
@@ -1,4 +1,3 @@
-import { GlFormInput, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import merge from 'lodash/merge';
@@ -9,14 +8,13 @@ import MetricImagesTab from '~/vue_shared/components/metric_images/metric_images
import createStore from '~/vue_shared/components/metric_images/store';
import waitForPromises from 'helpers/wait_for_promises';
import UploadDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue';
+import MetricImageDetailsModal from '~/vue_shared/components/metric_images/metric_image_details_modal.vue';
import { fileList, initialData } from './mock_data';
const service = {
getMetricImages: jest.fn(),
};
-const mockEvent = { preventDefault: jest.fn() };
-
Vue.use(Vuex);
describe('Metric images tab', () => {
@@ -48,9 +46,8 @@ describe('Metric images tab', () => {
const findUploadDropzone = () => wrapper.findComponent(UploadDropzone);
const findImages = () => wrapper.findAllComponents(MetricImagesTable);
- const findModal = () => wrapper.findComponent(GlModal);
- const submitModal = () => findModal().vm.$emit('primary', mockEvent);
- const cancelModal = () => findModal().vm.$emit('hidden');
+ const findImageDetailsModal = () => wrapper.findComponent(MetricImageDetailsModal);
+ const cancelModal = () => findImageDetailsModal().vm.$emit('hidden');
describe('empty state', () => {
beforeEach(() => {
@@ -84,17 +81,15 @@ describe('Metric images tab', () => {
});
});
- describe('add metric dialog', () => {
- const testUrl = 'https://valid-url.com';
-
- it('should open the add metric dialog when clicked', async () => {
+ describe('metric image details dialog', () => {
+ it('should open when clicked', async () => {
mountComponent();
findUploadDropzone().vm.$emit('change');
await waitForPromises();
- expect(findModal().attributes('visible')).toBe('true');
+ expect(findImageDetailsModal().attributes('visible')).toBe('true');
});
it('should close when cancelled', async () => {
@@ -108,94 +103,7 @@ describe('Metric images tab', () => {
await waitForPromises();
- expect(findModal().attributes('visible')).toBeUndefined();
- });
-
- it('should add files and url when selected', async () => {
- mountComponent({
- data() {
- return { modalVisible: true, modalUrl: testUrl, currentFiles: fileList };
- },
- });
-
- const dispatchSpy = jest.spyOn(store, 'dispatch');
-
- submitModal();
-
- await waitForPromises();
-
- expect(dispatchSpy).toHaveBeenCalledWith('uploadImage', {
- files: fileList,
- url: testUrl,
- urlText: '',
- });
- });
-
- describe('url field', () => {
- beforeEach(() => {
- mountComponent({
- data() {
- return { modalVisible: true, modalUrl: testUrl };
- },
- });
- });
-
- it('should display the url field', () => {
- expect(wrapper.find('#upload-url-input').attributes('value')).toBe(testUrl);
- });
-
- it('should display a description of the url field', () => {
- const urlGroup = wrapper.find('#upload-url-group');
- expect(urlGroup.attributes('description')).toBe('Must start with http:// or https://');
- });
-
- it('should display the url text field', () => {
- expect(wrapper.find('#upload-text-input').attributes('value')).toBe('');
- });
-
- it('should clear url when cancelled', async () => {
- cancelModal();
-
- await waitForPromises();
-
- expect(wrapper.findComponent(GlFormInput).attributes('value')).toBe('');
- });
-
- it('should clear url when submitted', async () => {
- submitModal();
-
- await waitForPromises();
-
- expect(wrapper.findComponent(GlFormInput).attributes('value')).toBe('');
- });
-
- describe('is invalid', () => {
- beforeEach(() => {
- mountComponent({
- data() {
- return { modalVisible: true };
- },
- });
-
- const urlInput = wrapper.find('#upload-url-input');
- urlInput.vm.$emit('input', 'invalid-url');
- urlInput.vm.$emit('blur');
- });
-
- it('should disable the upload button', () => {
- const uploadButton = findModal().props('actionPrimary');
- expect(uploadButton.attributes.disabled).toBe(true);
- });
-
- it('should have an error state', () => {
- const urlGroup = wrapper.find('#upload-url-group');
- const urlInput = wrapper.find('#upload-url-input');
-
- expect(urlGroup.attributes('state')).toBe(undefined);
- expect(urlGroup.attributes('invalid-feedback')).toBe('Invalid URL');
- expect(urlInput.attributes('state')).toBe(undefined);
- });
- });
+ expect(findImageDetailsModal().attributes('visible')).toBeUndefined();
});
});
});
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
index ca141f53bf1..fef01e5e97f 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
@@ -6,6 +6,7 @@ import merge from 'lodash/merge';
import Vuex from 'vuex';
import createStore from '~/vue_shared/components/metric_images/store';
import MetricsImageTable from '~/vue_shared/components/metric_images/metric_images_table.vue';
+import MetricImageDetailsModal from '~/vue_shared/components/metric_images/metric_image_details_modal.vue';
import waitForPromises from 'helpers/wait_for_promises';
const defaultProps = {
@@ -45,17 +46,13 @@ describe('Metrics upload item', () => {
const findCollapseButton = () => wrapper.find('[data-testid="collapse-button"]');
const findMetricImageBody = () => wrapper.find('[data-testid="metric-image-body"]');
const findModal = () => wrapper.findComponent(GlModal);
- const findEditModal = () => wrapper.find('[data-testid="metric-image-edit-modal"]');
const findDeleteButton = () => wrapper.find('[data-testid="delete-button"]');
const findEditButton = () => wrapper.find('[data-testid="edit-button"]');
- const findImageTextInput = () => wrapper.find('[data-testid="metric-image-text-field"]');
- const findImageUrlInput = () => wrapper.find('[data-testid="metric-image-url-field"]');
-
+ const findImageDetailsModal = () => wrapper.findComponent(MetricImageDetailsModal);
const closeModal = () => findModal().vm.$emit('hidden');
const submitModal = () => findModal().vm.$emit('primary', mockEvent);
const deleteImage = () => findDeleteButton().vm.$emit('click');
- const closeEditModal = () => findEditModal().vm.$emit('hidden');
- const submitEditModal = () => findEditModal().vm.$emit('primary', mockEvent);
+ const closeEditModal = () => findImageDetailsModal().vm.$emit('hidden');
const editImage = () => findEditButton().vm.$emit('click');
it('render the metrics image component', () => {
@@ -157,66 +154,29 @@ describe('Metrics upload item', () => {
});
describe('edit functionality', () => {
- it('should open the delete modal when clicked', async () => {
- mountComponent({ stubs: { GlModal: true } });
+ it('should open the metric image details dialog when clicked', async () => {
+ mountComponent({ stubs: { MetricImageDetailsModal: true } });
editImage();
await waitForPromises();
- expect(findEditModal().attributes('visible')).toBe('true');
+ expect(findImageDetailsModal().attributes('visible')).toBe('true');
});
- describe('when the modal is open', () => {
- beforeEach(() => {
- mountComponent({
- data() {
- return { editModalVisible: true };
- },
- propsData: { urlText: 'test' },
- stubs: { GlModal: true },
- });
+ it('should close the metric image details dialog when cancelled', async () => {
+ mountComponent({
+ data() {
+ return { editModalVisible: true };
+ },
+ stubs: { MetricImageDetailsModal: true },
});
- it('should close the modal when cancelled', async () => {
- closeEditModal();
+ closeEditModal();
- await waitForPromises();
- expect(findEditModal().attributes('visible')).toBeUndefined();
- });
+ await waitForPromises();
- it('should delete the image when selected', async () => {
- const dispatchSpy = jest.spyOn(store, 'dispatch').mockImplementation(jest.fn());
-
- submitEditModal();
-
- await waitForPromises();
-
- expect(dispatchSpy).toHaveBeenCalledWith('updateImage', {
- imageId: defaultProps.id,
- url: null,
- urlText: 'test',
- });
- });
-
- it('should clear edits when the modal is closed', async () => {
- await findImageTextInput().setValue('test value');
- await findImageUrlInput().setValue('http://www.gitlab.com');
-
- expect(findImageTextInput().element.value).toBe('test value');
- expect(findImageUrlInput().element.value).toBe('http://www.gitlab.com');
-
- closeEditModal();
-
- await waitForPromises();
-
- editImage();
-
- await waitForPromises();
-
- expect(findImageTextInput().element.value).toBe('test');
- expect(findImageUrlInput().element.value).toBe('');
- });
+ expect(findImageDetailsModal().attributes('visible')).toBeUndefined();
});
});
});
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index e840dddbedd..f199796cee5 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -37,7 +37,8 @@ RSpec.describe StorageHelper, feature_category: :consumables_cost_management do
pipeline_artifacts_size: 11.megabytes,
snippets_size: 40.megabytes,
packages_size: 12.megabytes,
- uploads_size: 15.megabytes
+ uploads_size: 15.megabytes,
+ container_registry_size: 3.gigabytes
)
)
end
diff --git a/spec/lib/gitlab/allowable_spec.rb b/spec/lib/gitlab/allowable_spec.rb
index 0535384be6e..8387c194d0a 100644
--- a/spec/lib/gitlab/allowable_spec.rb
+++ b/spec/lib/gitlab/allowable_spec.rb
@@ -26,4 +26,21 @@ RSpec.describe Gitlab::Allowable do
end
end
end
+
+ describe '#can_any?' do
+ let(:user) { create(:user) }
+ let(:permissions) { [:admin_project, :read_project] }
+
+ context 'when the user is allowed one of the abilities' do
+ let_it_be(:project) { create(:project, :public) }
+
+ it { expect(subject.can_any?(user, permissions, project)).to be(true) }
+ end
+
+ context 'when the user is allowed none of the abilities' do
+ let_it_be(:project) { create(:project, :private) }
+
+ it { expect(subject.can_any?(user, permissions, project)).to be(false) }
+ end
+ end
end
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index 57c6df39167..a5d979206dd 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -48,7 +48,8 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
describe '.all_human_statuses' do
it 'returns all human readable entity statuses' do
- expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed', 'timeout')
+ expect(described_class.all_human_statuses)
+ .to contain_exactly('created', 'started', 'finished', 'failed', 'timeout', 'canceled')
end
end
@@ -58,6 +59,7 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
end
describe '#completed?' do
+ it { expect(described_class.new(status: -2)).to be_completed }
it { expect(described_class.new(status: -1)).to be_completed }
it { expect(described_class.new(status: 0)).not_to be_completed }
it { expect(described_class.new(status: 1)).not_to be_completed }
@@ -118,4 +120,28 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
end
end
end
+
+ describe 'import canceling' do
+ let(:import) { create(:bulk_import, :started) }
+
+ it 'marks import as canceled' do
+ expect(import.canceled?).to eq(false)
+
+ import.cancel!
+
+ expect(import.canceled?).to eq(true)
+ end
+
+ context 'when import has entities' do
+ it 'marks entities as canceled' do
+ entity = create(:bulk_import_entity, bulk_import: import)
+
+ expect(entity.canceled?).to eq(false)
+
+ import.cancel!
+
+ expect(entity.reload.canceled?).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/batch_tracker_spec.rb b/spec/models/bulk_imports/batch_tracker_spec.rb
index 1c7cbc0cb8c..d7f0c3188b8 100644
--- a/spec/models/bulk_imports/batch_tracker_spec.rb
+++ b/spec/models/bulk_imports/batch_tracker_spec.rb
@@ -28,4 +28,14 @@ RSpec.describe BulkImports::BatchTracker, type: :model, feature_category: :impor
end
end
end
+
+ describe 'batch canceling' do
+ it 'marks batch as canceled' do
+ batch = create(:bulk_import_batch_tracker, :created)
+
+ batch.cancel!
+
+ expect(batch.reload.canceled?).to eq(true)
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 3e29c093353..769f50b4ca8 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -213,7 +213,8 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
describe '.all_human_statuses' do
it 'returns all human readable entity statuses' do
- expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed', 'timeout')
+ expect(described_class.all_human_statuses)
+ .to contain_exactly('created', 'started', 'finished', 'failed', 'timeout', 'canceled')
end
end
@@ -574,4 +575,28 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
end
end
+
+ describe 'entity canceling' do
+ let(:entity) { create(:bulk_import_entity, :started) }
+
+ it 'marks entity as canceled' do
+ entity.cancel!
+
+ expect(entity.canceled?).to eq(true)
+ end
+
+ context 'when entity has trackers' do
+ it 'marks trackers as canceled' do
+ tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ relation: 'BulkImports::Common::Pipelines::MilestonesPipeline'
+ )
+
+ entity.cancel!
+
+ expect(tracker.reload.canceled?).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 474fc4e2ead..1bf2ee80fd6 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -206,4 +206,24 @@ RSpec.describe BulkImports::Tracker, type: :model, feature_category: :importers
end
end
end
+
+ describe 'tracker canceling' do
+ let(:tracker) { create(:bulk_import_tracker) }
+
+ it 'marks tracker as canceled' do
+ tracker.cancel!
+
+ expect(tracker.canceled?).to eq(true)
+ end
+
+ context 'when tracker has batches' do
+ it 'marks batches as canceled' do
+ batch = create(:bulk_import_batch_tracker, tracker: tracker)
+
+ tracker.cancel!
+
+ expect(batch.reload.canceled?).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/partition_spec.rb b/spec/models/ci/partition_spec.rb
index eed0a76d5d9..729e3a2c2aa 100644
--- a/spec/models/ci/partition_spec.rb
+++ b/spec/models/ci/partition_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Partition, feature_category: :continuous_integration do
+RSpec.describe Ci::Partition, feature_category: :ci_scaling do
let_it_be_with_reload(:ci_partition) { create(:ci_partition) }
describe 'validations' do
@@ -12,6 +12,14 @@ RSpec.describe Ci::Partition, feature_category: :continuous_integration do
it 'is valid' do
expect(ci_partition).to be_valid
end
+
+ context 'when status is current' do
+ before do
+ ci_partition.update!(status: described_class.statuses[:current])
+ end
+
+ it { is_expected.to validate_uniqueness_of(:status) }
+ end
end
describe '.create_next!' do
@@ -67,6 +75,26 @@ RSpec.describe Ci::Partition, feature_category: :continuous_integration do
expect(id_after).to match_array(ci_next_partition)
end
end
+
+ describe '.next_available' do
+ subject(:next_available) { described_class.next_available(ci_partition.id) }
+
+ let!(:next_ci_partition) { create(:ci_partition, :ready) }
+
+ context 'when one partition is ready' do
+ it { is_expected.to eq(next_ci_partition) }
+ end
+
+ context 'when multiple partitions are ready' do
+ before do
+ create_list(:ci_partition, 2, :ready)
+ end
+
+ it 'returns the first next partition available' do
+ expect(next_available).to eq(next_ci_partition)
+ end
+ end
+ end
end
describe 'state machine' do
@@ -79,6 +107,20 @@ RSpec.describe Ci::Partition, feature_category: :continuous_integration do
expect(ci_partition).to be_ready
end
end
+
+ context 'when transitioning from current to active' do
+ let!(:next_ci_partition) { create(:ci_partition, :ready) }
+
+ before do
+ ci_partition.update!(status: described_class.statuses[:current])
+ next_ci_partition.switch_writes!
+ end
+
+ it 'updates statuses for current and next partition' do
+ expect(ci_partition.reload).to be_active
+ expect(next_ci_partition.reload).to be_current
+ end
+ end
end
describe '#above_threshold?' do
diff --git a/spec/models/packages/debian/package_spec.rb b/spec/models/packages/debian/package_spec.rb
new file mode 100644
index 00000000000..b2282132dd7
--- /dev/null
+++ b/spec/models/packages/debian/package_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::Package, type: :model, feature_category: :package_registry do
+ describe 'associations' do
+ it { is_expected.to have_one(:publication).inverse_of(:package).class_name('Packages::Debian::Publication') }
+
+ it do
+ is_expected.to have_one(:distribution)
+ .through(:publication)
+ .source(:distribution)
+ .inverse_of(:packages)
+ .class_name('Packages::Debian::ProjectDistribution')
+ end
+ end
+
+ describe 'delegates' do
+ it { is_expected.to delegate_method(:codename).to(:distribution).with_prefix(:distribution) }
+ it { is_expected.to delegate_method(:suite).to(:distribution).with_prefix(:distribution) }
+ end
+
+ describe '.with_codename' do
+ let_it_be(:publication) { create(:debian_publication) }
+
+ subject { described_class.with_codename(publication.distribution.codename).to_a }
+
+ it { is_expected.to contain_exactly(publication.package) }
+ end
+
+ describe '.with_codename_or_suite' do
+ let_it_be(:distribution1) { create(:debian_project_distribution, :with_suite) }
+ let_it_be(:distribution2) { create(:debian_project_distribution, :with_suite) }
+
+ let_it_be(:package1) { create(:debian_package, published_in: distribution1) }
+ let_it_be(:package2) { create(:debian_package, published_in: distribution2) }
+
+ context 'with a codename' do
+ subject { described_class.with_codename_or_suite(distribution1.codename).to_a }
+
+ it { is_expected.to contain_exactly(package1) }
+ end
+
+ context 'with a suite' do
+ subject { described_class.with_codename_or_suite(distribution2.suite).to_a }
+
+ it { is_expected.to contain_exactly(package2) }
+ end
+ end
+
+ describe 'validations' do
+ describe '#name' do
+ subject { build(:debian_package) }
+
+ it { is_expected.to allow_value('0ad').for(:name) }
+ it { is_expected.to allow_value('g++').for(:name) }
+ it { is_expected.not_to allow_value('a_b').for(:name) }
+
+ context 'when debian incoming' do
+ subject { create(:debian_incoming) }
+
+ # Only 'incoming' is accepted
+ it { is_expected.to allow_value('incoming').for(:name) }
+ it { is_expected.not_to allow_value('0ad').for(:name) }
+ it { is_expected.not_to allow_value('g++').for(:name) }
+ it { is_expected.not_to allow_value('a_b').for(:name) }
+ end
+ end
+
+ describe '#version' do
+ subject { build(:debian_package) }
+
+ it { is_expected.to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
+ it { is_expected.not_to allow_value('1_0').for(:version) }
+
+ context 'when debian incoming' do
+ subject { create(:debian_incoming) }
+
+ it { is_expected.to allow_value(nil).for(:version) }
+ it { is_expected.not_to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
+ it { is_expected.not_to allow_value('1_0').for(:version) }
+ end
+ end
+
+ describe 'uniqueness for package type debian' do
+ let_it_be(:package) { create(:debian_package) }
+
+ it 'does not allow a Debian package with same project, name, version and distribution' do
+ new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
+ new_package.publication.distribution = package.publication.distribution
+ expect(new_package).not_to be_valid
+ expect(new_package.errors.to_a).to include('Name has already been taken')
+ end
+
+ it 'does not allow a Debian package with same project, name, version, but no distribution' do
+ new_package = build(:debian_package, project: package.project, name: package.name, version: package.version,
+ published_in: nil)
+ expect(new_package).not_to be_valid
+ expect(new_package.errors.to_a).to include('Name has already been taken')
+ end
+
+ context 'with pending_destruction package' do
+ let_it_be(:package) { create(:debian_package, :pending_destruction) }
+
+ it 'allows a Debian package with same project, name, version and distribution' do
+ new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
+ new_package.publication.distribution = package.publication.distribution
+ expect(new_package).to be_valid
+ end
+ end
+ end
+ end
+
+ describe '.preload_debian_file_metadata' do
+ let_it_be(:debian_package) { create(:debian_package) }
+
+ subject(:packages) { described_class.preload_debian_file_metadata }
+
+ it 'preloads package files' do
+ expect(packages.first.association(:package_files)).to be_loaded
+ end
+
+ it 'preloads debian files metadata' do
+ expect(packages.first.package_files.first.association(:debian_file_metadatum)).to be_loaded
+ end
+ end
+
+ describe '.incoming_package!' do
+ let_it_be(:debian_package) { create(:debian_package) }
+ let_it_be(:debian_processing_incoming) { create(:debian_incoming, :processing) }
+
+ subject(:incoming_packages) { described_class.incoming_package! }
+
+ context 'when incoming exists' do
+ let_it_be(:debian_incoming) { create(:debian_incoming) }
+
+ it { is_expected.to eq(debian_incoming) }
+ end
+
+ context 'when incoming not found' do
+ it { expect { incoming_packages }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+
+ describe '.existing_packages_with' do
+ let_it_be(:name) { 'my-package' }
+ let_it_be(:version) { '1.0.0' }
+ let_it_be(:package1) { create(:debian_package, name: name, version: version) }
+ let_it_be(:package2) { create(:debian_package) }
+
+ subject { described_class.existing_packages_with(name: name, version: version) }
+
+ it { is_expected.to contain_exactly(package1) }
+ end
+
+ describe '#incoming?' do
+ let(:package) { build(:debian_package) }
+
+ subject { package.incoming? }
+
+ it { is_expected.to eq(false) }
+
+ context 'with debian_incoming' do
+ let(:package) { create(:debian_incoming) }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+end
diff --git a/spec/models/packages/debian/publication_spec.rb b/spec/models/packages/debian/publication_spec.rb
index 0ed056f499b..85bb045cf65 100644
--- a/spec/models/packages/debian/publication_spec.rb
+++ b/spec/models/packages/debian/publication_spec.rb
@@ -1,13 +1,14 @@
# frozen_string_literal: true
+
require 'spec_helper'
-RSpec.describe Packages::Debian::Publication, type: :model do
+RSpec.describe Packages::Debian::Publication, type: :model, feature_category: :package_registry do
let_it_be_with_reload(:publication) { create(:debian_publication) }
subject { publication }
describe 'relationships' do
- it { is_expected.to belong_to(:package).inverse_of(:debian_publication).class_name('Packages::Package') }
+ it { is_expected.to belong_to(:package).inverse_of(:publication).class_name('Packages::Debian::Package') }
it { is_expected.to belong_to(:distribution).inverse_of(:publications).class_name('Packages::Debian::ProjectDistribution').with_foreign_key(:distribution_id) }
end
@@ -16,30 +17,6 @@ RSpec.describe Packages::Debian::Publication, type: :model do
it { is_expected.to validate_presence_of(:package) }
end
- describe '#valid_debian_package_type' do
- context 'with package type not being Debian' do
- before do
- publication.package.package_type = 'generic'
- end
-
- it 'will not allow package type not being Debian' do
- expect(publication).not_to be_valid
- expect(publication.errors.to_a).to eq(['Package type must be Debian'])
- end
- end
-
- context 'with package not being a Debian package' do
- before do
- publication.package.version = nil
- end
-
- it 'will not allow package not being a distribution' do
- expect(publication).not_to be_valid
- expect(publication.errors.to_a).to eq(['Package must be a Debian package'])
- end
- end
- end
-
describe '#distribution' do
it { is_expected.to validate_presence_of(:distribution) }
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 6feb4ff9a6e..b5b0380415c 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -18,8 +18,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to have_many(:build_infos).inverse_of(:package) }
it { is_expected.to have_many(:installable_nuget_package_files).inverse_of(:package) }
it { is_expected.to have_one(:maven_metadatum).inverse_of(:package) }
- it { is_expected.to have_one(:debian_publication).inverse_of(:package).class_name('Packages::Debian::Publication') }
- it { is_expected.to have_one(:debian_distribution).through(:debian_publication).source(:distribution).inverse_of(:packages).class_name('Packages::Debian::ProjectDistribution') }
it { is_expected.to have_one(:nuget_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:npm_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:terraform_module_metadatum).inverse_of(:package) }
@@ -27,34 +25,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to have_many(:matching_package_protection_rules).through(:project).source(:package_protection_rules) }
end
- describe '.with_debian_codename' do
- let_it_be(:publication) { create(:debian_publication) }
-
- subject { described_class.with_debian_codename(publication.distribution.codename).to_a }
-
- it { is_expected.to contain_exactly(publication.package) }
- end
-
- describe '.with_debian_codename_or_suite' do
- let_it_be(:distribution1) { create(:debian_project_distribution, :with_suite) }
- let_it_be(:distribution2) { create(:debian_project_distribution, :with_suite) }
-
- let_it_be(:package1) { create(:debian_package, published_in: distribution1) }
- let_it_be(:package2) { create(:debian_package, published_in: distribution2) }
-
- context 'with a codename' do
- subject { described_class.with_debian_codename_or_suite(distribution1.codename).to_a }
-
- it { is_expected.to contain_exactly(package1) }
- end
-
- context 'with a suite' do
- subject { described_class.with_debian_codename_or_suite(distribution2.suite).to_a }
-
- it { is_expected.to contain_exactly(package2) }
- end
- end
-
describe '.with_composer_target' do
let!(:package1) { create(:composer_package, :with_metadatum, sha: '123') }
let!(:package2) { create(:composer_package, :with_metadatum, sha: '123') }
@@ -155,24 +125,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to allow_value("my.app-11.07.2018").for(:name) }
it { is_expected.not_to allow_value("my(dom$$$ain)com.my-app").for(:name) }
- context 'debian package' do
- subject { build(:debian_package) }
-
- it { is_expected.to allow_value('0ad').for(:name) }
- it { is_expected.to allow_value('g++').for(:name) }
- it { is_expected.not_to allow_value('a_b').for(:name) }
- end
-
- context 'debian incoming' do
- subject { create(:debian_incoming) }
-
- # Only 'incoming' is accepted
- it { is_expected.to allow_value('incoming').for(:name) }
- it { is_expected.not_to allow_value('0ad').for(:name) }
- it { is_expected.not_to allow_value('g++').for(:name) }
- it { is_expected.not_to allow_value('a_b').for(:name) }
- end
-
context 'generic package' do
subject { build_stubbed(:generic_package) }
@@ -260,21 +212,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to allow_value('2.x-dev').for(:version) }
end
- context 'debian package' do
- subject { build(:debian_package) }
-
- it { is_expected.to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
- it { is_expected.not_to allow_value('1_0').for(:version) }
- end
-
- context 'debian incoming' do
- subject { create(:debian_incoming) }
-
- it { is_expected.to allow_value(nil).for(:version) }
- it { is_expected.not_to allow_value('2:4.9.5+dfsg-5+deb10u1').for(:version) }
- it { is_expected.not_to allow_value('1_0').for(:version) }
- end
-
context 'maven package' do
subject { build_stubbed(:maven_package) }
@@ -632,33 +569,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe "uniqueness for package type debian" do
- let!(:package) { create(:debian_package) }
-
- it "will not allow a Debian package with same project, name, version and distribution" do
- new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
- new_package.debian_publication.distribution = package.debian_publication.distribution
- expect(new_package).not_to be_valid
- expect(new_package.errors.to_a).to include('Name has already been taken')
- end
-
- it "will not allow a Debian package with same project, name, version, but no distribution" do
- new_package = build(:debian_package, project: package.project, name: package.name, version: package.version, published_in: nil)
- expect(new_package).not_to be_valid
- expect(new_package.errors.to_a).to include('Name has already been taken')
- end
-
- context 'with pending_destruction package' do
- let!(:package) { create(:debian_package, :pending_destruction) }
-
- it "will allow a Debian package with same project, name, version and distribution" do
- new_package = build(:debian_package, project: package.project, name: package.name, version: package.version)
- new_package.debian_publication.distribution = package.debian_publication.distribution
- expect(new_package).to be_valid
- end
- end
- end
-
Packages::Package.package_types.keys.without('conan').each do |pt|
context "project id, name, version and package type uniqueness for package type #{pt}" do
let(:package) { create("#{pt}_package") }
@@ -710,23 +620,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe '.debian_incoming_package!' do
- let!(:debian_package) { create(:debian_package) }
- let!(:debian_processing_incoming) { create(:debian_incoming, :processing) }
-
- subject { described_class.debian_incoming_package! }
-
- context 'when incoming exists' do
- let!(:debian_incoming) { create(:debian_incoming) }
-
- it { is_expected.to eq(debian_incoming) }
- end
-
- context 'when incoming not found' do
- it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
- end
- end
-
describe '.with_package_type' do
let!(:package1) { create(:terraform_module_package) }
let!(:package2) { create(:npm_package) }
@@ -1121,46 +1014,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe '#debian_incoming?' do
- let(:package) { build(:package) }
-
- subject { package.debian_incoming? }
-
- it { is_expected.to eq(false) }
-
- context 'with debian_incoming' do
- let(:package) { create(:debian_incoming) }
-
- it { is_expected.to eq(true) }
- end
-
- context 'with debian_package' do
- let(:package) { create(:debian_package) }
-
- it { is_expected.to eq(false) }
- end
- end
-
- describe '#debian_package?' do
- let(:package) { build(:package) }
-
- subject { package.debian_package? }
-
- it { is_expected.to eq(false) }
-
- context 'with debian_incoming' do
- let(:package) { create(:debian_incoming) }
-
- it { is_expected.to eq(false) }
- end
-
- context 'with debian_package' do
- let(:package) { create(:debian_package) }
-
- it { is_expected.to eq(true) }
- end
- end
-
describe '#infrastructure_package?' do
let(:package) { create(:package) }
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 90d15997305..2176ea306cf 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -488,4 +488,36 @@ RSpec.describe API::BulkImports, feature_category: :importers do
it_behaves_like 'disabled feature'
end
+
+ describe 'POST /bulk_imports/:id/cancel' do
+ let(:import) { create(:bulk_import, user: user) }
+
+ context 'when authenticated as admin' do
+ let_it_be(:admin) { create(:admin) }
+
+ it 'cancels the migration and returns 200' do
+ post api("/bulk_imports/#{import.id}/cancel", admin, admin_mode: true)
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response['status']).to eq('canceled')
+ end
+
+ context 'when migration could not be found' do
+ it 'return 404' do
+ post api("/bulk_imports/#{non_existing_record_id}/cancel", admin, admin_mode: true)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when not authenticated as admin' do
+ it 'returns an error' do
+ post api("/bulk_imports/#{import.id}/cancel", user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 8c49bad5e4e..7ea42bb57d7 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -23,17 +23,27 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
let_it_be(:group_runner_a) { create(:ci_runner, :group, description: 'Group runner A', groups: [group]) }
let_it_be(:group_runner_b) { create(:ci_runner, :group, description: 'Group runner B', groups: [subgroup]) }
+ let(:query) { {} }
+ let(:extra_query_parts) { {} }
+ let(:query_path) { query.merge(extra_query_parts).to_param }
+
describe 'GET /runners' do
+ let(:path) { "/runners?#{query_path}" }
+
+ subject(:perform_request) { get api(path, current_user) }
+
context 'authorized user' do
+ let(:current_user) { user }
+
it 'returns response status and headers' do
- get api('/runners', user)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'returns user available runners' do
- get api('/runners', user)
+ perform_request
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
@@ -43,80 +53,121 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
]
end
- it 'filters runners by scope' do
- create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
+ context 'when filtering by scope' do
+ let(:query) { { scope: :paused } }
- get api('/runners?scope=paused', user)
+ before_all do
+ create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ it 'filters runners by scope' do
+ perform_request
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Inactive project runner')
+ ]
+ end
+
+ context 'when is invalid' do
+ let(:query) { { scope: :unknown } }
+
+ it 'avoids filtering' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
- it 'avoids filtering if scope is invalid' do
- get api('/runners?scope=unknown', user)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ context 'when filtering by type' do
+ let(:query) { { type: type } }
- it 'filters runners by type' do
- get api('/runners?type=project_type', user)
+ context 'with project_type type' do
+ let(:type) { :project_type }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Project runner'),
- a_hash_including('description' => 'Two projects runner')
- ]
- end
+ it 'filters runners by type' do
+ perform_request
- it 'does not filter by invalid type' do
- get api('/runners?type=bogus', user)
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Project runner'),
+ a_hash_including('description' => 'Two projects runner')
+ ]
+ end
+ end
- expect(response).to have_gitlab_http_status(:bad_request)
+ context 'when type is invalid' do
+ let(:type) { :bogus }
+
+ it 'does not filter by invalid type' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
context 'with an inactive runner' do
- let_it_be(:runner) { create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project]) }
-
- it 'filters runners by paused state' do
- get api('/runners?paused=true', user)
-
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ let_it_be(:runner) do
+ create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
end
- it 'filters runners by status' do
- get api('/runners?status=paused', user)
+ context 'when filtering by paused' do
+ let(:query) { { paused: true } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ it 'filters runners by paused state' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive project runner'))
+ end
end
- end
- it 'does not filter by invalid status' do
- get api('/runners?status=bogus', user)
+ context 'when filtering by status' do
+ let(:query) { { status: :paused } }
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ it 'filters runners by status' do
+ perform_request
- it 'filters runners by tag_list' do
- create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
- create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive project runner'))
+ end
+ end
- get api('/runners?tag_list=tag1,tag2', user)
+ context 'when filtering by invalid status' do
+ let(:query) { { status: :bogus } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Runner tagged with tag1 and tag2', 'active' => true, 'paused' => false)
- ]
+ it 'does not filter' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when filtering by tag_list' do
+ let(:query) { { tag_list: 'tag1,tag2' } }
+
+ before_all do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+ end
+
+ it 'filters runners by tag_list' do
+ perform_request
+
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2', 'active' => true, 'paused' => false)
+ )
+ end
+ end
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+
it 'does not return runners' do
- get api('/runners')
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -124,21 +175,25 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'GET /runners/all' do
- let(:path) { '/runners/all' }
+ let(:path) { "/runners/all?#{query_path}" }
+
+ subject(:perform_request) { get api(path, current_user) }
it_behaves_like 'GET request permissions for admin mode'
context 'authorized user' do
- context 'with admin privileges' do
+ context 'with admin privileges', :enable_admin_mode do
+ let(:current_user) { admin }
+
it 'returns response status and headers' do
- get api(path, admin, admin_mode: true)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'returns all runners' do
- get api(path, admin, admin_mode: true)
+ perform_request
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner', 'is_shared' => false, 'active' => true, 'paused' => false, 'runner_type' => 'project_type'),
@@ -149,133 +204,206 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
]
end
- it 'filters runners by scope' do
- get api('/runners/all?scope=shared', admin, admin_mode: true)
+ context 'when filtering runners by scope' do
+ let(:query) { { scope: scope } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to contain_exactly(
- a_hash_including('description' => 'Shared runner', 'is_shared' => true)
- )
+ context 'with shared scope' do
+ let(:scope) { :shared }
+
+ it 'filters runners by scope' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Shared runner', 'is_shared' => true)
+ )
+ end
+ end
+
+ context 'with specific scope' do
+ let(:scope) { :specific }
+
+ it 'filters runners by scope' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Project runner'),
+ a_hash_including('description' => 'Two projects runner'),
+ a_hash_including('description' => 'Group runner A'),
+ a_hash_including('description' => 'Group runner B')
+ ]
+ end
+ end
+
+ context 'with invalid scope' do
+ let(:scope) { :unknown }
+
+ it 'avoids filtering' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
- it 'filters runners by scope' do
- get api('/runners/all?scope=specific', admin, admin_mode: true)
+ context 'when filtering runners by type' do
+ let(:query) { { type: type } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ context 'with project_type type' do
+ let(:type) { :project_type }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Project runner'),
- a_hash_including('description' => 'Two projects runner'),
- a_hash_including('description' => 'Group runner A'),
- a_hash_including('description' => 'Group runner B')
- ]
- end
+ it 'filters runners by project type' do
+ perform_request
- it 'avoids filtering if scope is invalid' do
- get api('/runners/all?scope=unknown', admin, admin_mode: true)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Project runner'),
+ a_hash_including('description' => 'Two projects runner')
+ ]
+ end
+ end
- it 'filters runners by project type' do
- get api('/runners/all?type=project_type', admin, admin_mode: true)
+ context 'with group_type type' do
+ let(:type) { :group_type }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Project runner'),
- a_hash_including('description' => 'Two projects runner')
- ]
- end
+ it 'filters runners by group type' do
+ perform_request
- it 'filters runners by group type' do
- get api('/runners/all?type=group_type', admin, admin_mode: true)
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Group runner A'),
+ a_hash_including('description' => 'Group runner B')
+ ]
+ end
+ end
- expect(json_response).to match_array [
- a_hash_including('description' => 'Group runner A'),
- a_hash_including('description' => 'Group runner B')
- ]
- end
+ context 'with invalid type' do
+ let(:type) { :bogus }
- it 'does not filter by invalid type' do
- get api('/runners/all?type=bogus', admin, admin_mode: true)
+ it 'does not filter by invalid type' do
+ perform_request
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
context 'with an inactive runner' do
let_it_be(:runner) { create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project]) }
- it 'filters runners by status' do
- get api('/runners/all?paused=true', admin, admin_mode: true)
+ context 'when filtering runners by paused status' do
+ let(:query) { { paused: true } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ it 'filters runners by status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive project runner'))
+ end
end
- it 'filters runners by status' do
- get api('/runners/all?status=paused', admin, admin_mode: true)
+ context 'when filtering runners by status' do
+ let(:query) { { status: :paused } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ it 'filters runners by status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive project runner'))
+ end
+
+ context 'and status is invalid' do
+ let(:query) { { status: :bogus } }
+
+ it 'does not filter by invalid status' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
end
- it 'does not filter by invalid status' do
- get api('/runners/all?status=bogus', admin, admin_mode: true)
+ context 'when filtering by tag_list' do
+ let(:query) { { tag_list: 'tag1,tag2' } }
- expect(response).to have_gitlab_http_status(:bad_request)
+ before_all do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: %w[tag2])
+ end
+
+ it 'filters runners by tag_list' do
+ perform_request
+
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2')
+ )
+ end
end
- it 'filters runners by tag_list' do
- create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
- create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
-
- get api('/runners/all?tag_list=tag1,tag2', admin, admin_mode: true)
-
- expect(json_response).to match_array [
- a_hash_including('description' => 'Runner tagged with tag1 and tag2')
- ]
- end
-
- context 'with ci_runner_machines' do
- let_it_be(:version_ci_runner) { create(:ci_runner, :project, description: 'Runner with machine') }
- let_it_be(:version_ci_runner_machine) { create(:ci_runner_machine, runner: version_ci_runner, version: '15.0.3') }
- let_it_be(:version_16_ci_runner) { create(:ci_runner, :project, description: 'Runner with machine version 16') }
- let_it_be(:version_16_ci_runner_machine) { create(:ci_runner_machine, runner: version_16_ci_runner, version: '16.0.1') }
-
- it 'filters runners by version_prefix when prefix is "15.0"' do
- get api('/runners/all?version_prefix=15.0', admin, admin_mode: true)
-
- expect(json_response).to match_array [
- a_hash_including('description' => 'Runner with machine', 'active' => true, 'paused' => false)
- ]
+ describe 'with ci_runner_machines' do
+ before_all do
+ version_ci_runner = create(:ci_runner, :project, description: 'Runner with machine')
+ version_16_ci_runner = create(:ci_runner, :project, description: 'Runner with machine version 16')
+ create(:ci_runner_machine, runner: version_ci_runner, version: '15.0.3')
+ create(:ci_runner_machine, runner: version_16_ci_runner, version: '16.0.1')
end
- it 'filters runners by version_prefix when prefix is "16"' do
- get api('/runners/all?version_prefix=16', admin, admin_mode: true)
- expect(json_response).to match_array [
- a_hash_including('description' => 'Runner with machine version 16', 'active' => true, 'paused' => false)
- ]
- end
+ context 'when filtering by version_prefix' do
+ let(:query) { { version_prefix: version_prefix } }
- it 'filters runners by version_prefix when prefix is "25"' do
- get api('/runners/all?version_prefix=25', admin, admin_mode: true)
- expect(json_response).to match_array []
- end
+ context 'with version_prefix set to "15.0"' do
+ let(:version_prefix) { '15.0' }
- it 'does not filter runners by version_prefix when prefix is invalid ("V15")' do
- get api('/runners/all?version_prefix=v15', admin, admin_mode: true)
+ it 'filters runners by version_prefix' do
+ perform_request
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Runner with machine', 'active' => true, 'paused' => false)
+ )
+ end
+ end
+
+ context 'with version_prefix set to "16"' do
+ let(:version_prefix) { '16' }
+
+ it 'filters runners by version_prefix' do
+ perform_request
+
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Runner with machine version 16', 'active' => true, 'paused' => false)
+ )
+ end
+ end
+
+ context 'with version_prefix set to "25"' do
+ let(:version_prefix) { '25' }
+
+ it 'filters runners by version_prefix' do
+ perform_request
+
+ expect(json_response).to match_array []
+ end
+ end
+
+ context 'with version_prefix set to invalid prefix "V15"' do
+ let(:version_prefix) { 'V15' }
+
+ it 'does not filter runners by version_prefix' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
end
end
context 'without admin privileges' do
+ let(:current_user) { user }
+
it 'does not return runners list' do
- get api(path, user)
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -283,8 +411,10 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+
it 'does not return runners' do
- get api('/runners')
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -292,14 +422,23 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'GET /runners/:id' do
- let(:path) { "/runners/#{project_runner.id}" }
+ let(:runner_id) { runner.id }
+ let(:path) { "/runners/#{runner_id}?#{query_path}" }
- it_behaves_like 'GET request permissions for admin mode'
+ subject(:perform_request) { get api(path, current_user) }
+
+ it_behaves_like 'GET request permissions for admin mode' do
+ let(:runner) { project_runner }
+ end
context 'admin user' do
+ let(:current_user) { admin }
+
context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
it "returns runner's details" do
- get api("/runners/#{shared_runner.id}", admin)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(shared_runner.description)
@@ -311,81 +450,102 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
end
- context 'when runner is not shared' do
- context 'when unused runner is present' do
- let!(:unused_project_runner) { create(:ci_runner, :project, :without_projects) }
+ context 'when runner is a project runner' do
+ let(:runner) { project_runner }
- it 'deletes unused runner' do
- expect do
- delete api("/runners/#{unused_project_runner.id}", admin, admin_mode: true)
+ it "returns forbidden" do
+ perform_request
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { ::Ci::Runner.project_type.count }.by(-1)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with admin mode enabled', :enable_admin_mode do
+ it "returns runner's details" do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['description']).to eq(runner.description)
+ end
+
+ it "returns the project's details" do
+ perform_request
+
+ expect(json_response['projects'].first['id']).to eq(project.id)
+ end
+ end
+ end
+
+ context 'when runner does not exist' do
+ let(:runner_id) { non_existing_record_id }
+ let(:runner) { project_runner }
+
+ it 'returns 404', :enable_admin_mode do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'authorized user' do
+ let(:current_user) { user }
+
+ context 'when the runner is a group runner' do
+ let(:runner) { group_runner_a }
+
+ it "returns the runner's details" do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['description']).to eq(runner.description)
+ expect(json_response['groups'].first['id']).to eq(group.id)
+ end
+ end
+
+ context "runner project's administrative user" do
+ let(:current_user) { user }
+
+ context 'when runner is not shared' do
+ let(:runner) { project_runner }
+
+ it "returns runner's details" do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['description']).to eq(runner.description)
end
end
- it "returns runner's details" do
- get api(path, admin, admin_mode: true)
+ context 'when runner is shared' do
+ let(:runner) { shared_runner }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['description']).to eq(project_runner.description)
- end
+ it "returns runner's details" do
+ perform_request
- it "returns the project's details for a project runner" do
- get api(path, admin, admin_mode: true)
-
- expect(json_response['projects'].first['id']).to eq(project.id)
- end
- end
-
- it 'returns 404 if runner does not exist' do
- get api("/runners/#{non_existing_record_id}", admin, admin_mode: true)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when the runner is a group runner' do
- it "returns the runner's details" do
- get api("/runners/#{group_runner_a.id}", admin, admin_mode: true)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['description']).to eq(group_runner_a.description)
- expect(json_response['groups'].first['id']).to eq(group.id)
- end
- end
-
- context "runner project's administrative user" do
- context 'when runner is not shared' do
- it "returns runner's details" do
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['description']).to eq(project_runner.description)
- end
- end
-
- context 'when runner is shared' do
- it "returns runner's details" do
- get api("/runners/#{shared_runner.id}", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['description']).to eq(shared_runner.description)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['description']).to eq(runner.description)
+ end
end
end
end
context 'other authorized user' do
+ let(:current_user) { user2 }
+ let(:runner) { project_runner }
+
it "does not return project runner's details" do
- get api(path, user2)
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+
it "does not return project runner's details" do
- get api(path)
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -393,331 +553,472 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'PUT /runners/:id' do
- let(:path) { "/runners/#{project_runner.id}" }
+ let(:runner_id) { runner.id }
+ let(:path) { "/runners/#{runner_id}?#{query_path}" }
+
+ subject(:perform_request) { put api(path, current_user), params: params }
it_behaves_like 'PUT request permissions for admin mode' do
+ let(:runner) { project_runner }
let(:params) { { description: 'test' } }
end
- context 'admin user' do
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin }
+
# see https://gitlab.com/gitlab-org/gitlab-foss/issues/48625
context 'single parameter update' do
- it 'runner description' do
- description = shared_runner.description
- update_runner(shared_runner.id, admin, description: "#{description}_updated")
+ let(:runner) { shared_runner }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.description).to eq("#{description}_updated")
+ context 'when changing description' do
+ let(:params) { { description: "#{runner.description}_updated" } }
+
+ it 'updates runner description' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.description).to eq(params[:description])
+ end
end
- it 'runner active state' do
- active = shared_runner.active
- update_runner(shared_runner.id, admin, active: !active)
+ context 'when changing active state' do
+ let(:params) { { active: !runner.active } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.active).to eq(!active)
+ it 'updates runner active state' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.active).to eq(params[:active])
+ end
end
- it 'runner paused state' do
- active = shared_runner.active
- update_runner(shared_runner.id, admin, paused: active)
+ context 'when changing paused state' do
+ let(:params) { { paused: runner.active } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.active).to eq(!active)
- end
+ it 'updates runner paused state' do
+ perform_request
- it 'runner tag list' do
- update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.active).to eq(!params[:paused])
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
- end
-
- it 'unrelated runner attribute on an existing runner with too many tags' do
# This test ensures that it is possible to update any attribute on a runner that currently fails the
# validation that ensures that there aren't too many tags associated with a runner
- existing_invalid_shared_runner = build(:ci_runner, :instance, tag_list: (1..::Ci::Runner::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" })
- existing_invalid_shared_runner.save!(validate: false)
+ context 'when changing unrelated runner attribute on an existing runner with too many tags' do
+ let(:params) { { active: !runner.active } }
+ let(:runner) do
+ build(:ci_runner, :instance, tag_list: (1..::Ci::Runner::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" })
+ .tap { |runner| runner.save!(validate: false) }
+ end
- active = existing_invalid_shared_runner.active
- update_runner(existing_invalid_shared_runner.id, admin, paused: active)
+ it 'unrelated runner attribute on an existing runner with too many tags' do
+ perform_request
- expect(response).to have_gitlab_http_status(:ok)
- expect(existing_invalid_shared_runner.reload.active).to eq(!active)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.active).to eq(params[:active])
+ end
+ end
end
- it 'runner untagged flag' do
- # Ensure tag list is non-empty before setting untagged to false.
- update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
- update_runner(shared_runner.id, admin, run_untagged: 'false')
+ context 'when changing tag list' do
+ let(:params) { { tag_list: %w[ruby2.1 pgsql mysql] } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.run_untagged?).to be(false)
+ it 'updates runner tag list' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
+ end
end
- it 'runner unlocked flag' do
- update_runner(shared_runner.id, admin, locked: 'true')
+ context 'when changing untagged flag' do
+ let(:params) { { tag_list: %w[ruby2.1 pgsql mysql], run_untagged: 'false' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.locked?).to be(true)
+ it 'updates untagged flag' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.run_untagged?).to be(false)
+ end
end
- it 'runner access level' do
- update_runner(shared_runner.id, admin, access_level: 'ref_protected')
+ context 'when changing locked flag' do
+ let(:params) { { locked: !runner.locked } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.ref_protected?).to be_truthy
+ it 'updates locked flag' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.locked?).to be(params[:locked])
+ end
end
- it 'runner maximum timeout' do
- update_runner(shared_runner.id, admin, maximum_timeout: 1234)
+ context 'when changing access level' do
+ let(:params) { { access_level: 'ref_protected' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.maximum_timeout).to eq(1234)
+ it 'updates access level' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.ref_protected?).to be_truthy
+ end
end
- it 'maintenance note' do
- maintenance_note = shared_runner.maintenance_note
- update_runner(shared_runner.id, admin, maintenance_note: "#{maintenance_note}_updated")
+ context 'when changing maximum timeout' do
+ let(:params) { { maximum_timeout: 1234 } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.reload.maintenance_note).to eq("#{maintenance_note}_updated")
+ it 'updates maximum timeout' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.maximum_timeout).to eq(1234)
+ end
end
- it 'fails with no parameters' do
- put api("/runners/#{shared_runner.id}", admin)
+ context 'when changing maintenance note' do
+ let(:params) { { maintenance_note: "#{runner.maintenance_note}_updated" } }
- shared_runner.reload
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'updates maintenance note' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.maintenance_note).to eq(params[:maintenance_note])
+ end
+ end
+
+ context 'with no parameters' do
+ let(:params) { {} }
+
+ it 'fails with bad request' do
+ perform_request
+
+ runner.reload
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
context 'when runner is shared' do
- it 'updates runner' do
- description = shared_runner.description
- active = shared_runner.active
- runner_queue_value = shared_runner.ensure_runner_queue_value
-
- update_runner(
- shared_runner.id,
- admin,
- description: "#{description}_updated",
- active: !active,
- tag_list: ['ruby2.1', 'pgsql', 'mysql'],
+ let(:runner) { shared_runner }
+ let(:params) do
+ {
+ description: "#{runner.description}_updated",
+ active: !runner.active,
+ tag_list: %w[ruby2.1 pgsql mysql],
run_untagged: 'false',
locked: 'true',
access_level: 'ref_protected',
maximum_timeout: 1234
- )
- shared_runner.reload
+ }
+ end
+ it 'updates runner' do
+ active = runner.active
+ runner_queue_value = runner.ensure_runner_queue_value
+
+ perform_request
+
+ runner.reload
expect(response).to have_gitlab_http_status(:ok)
- expect(shared_runner.description).to eq("#{description}_updated")
- expect(shared_runner.active).to eq(!active)
- expect(shared_runner.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
- expect(shared_runner.run_untagged?).to be(false)
- expect(shared_runner.locked?).to be(true)
- expect(shared_runner.ref_protected?).to be_truthy
- expect(shared_runner.ensure_runner_queue_value)
- .not_to eq(runner_queue_value)
- expect(shared_runner.maximum_timeout).to eq(1234)
+ expect(runner.description).to eq(params[:description])
+ expect(runner.active).to eq(!active)
+ expect(runner.tag_list).to match_array(params[:tag_list])
+ expect(runner.run_untagged?).to be(false)
+ expect(runner.locked?).to be(true)
+ expect(runner.ref_protected?).to be_truthy
+ expect(runner.ensure_runner_queue_value).not_to eq(runner_queue_value)
+ expect(runner.maximum_timeout).to eq(1234)
end
end
context 'when runner is not shared' do
+ let(:runner) { project_runner }
+ let(:params) { { description: 'test' } }
+
it 'updates runner' do
- description = project_runner.description
- runner_queue_value = project_runner.ensure_runner_queue_value
+ description = runner.description
+ runner_queue_value = runner.ensure_runner_queue_value
- update_runner(project_runner.id, admin, description: 'test')
- project_runner.reload
+ perform_request
+ runner.reload
expect(response).to have_gitlab_http_status(:ok)
- expect(project_runner.description).to eq('test')
- expect(project_runner.description).not_to eq(description)
- expect(project_runner.ensure_runner_queue_value)
- .not_to eq(runner_queue_value)
+ expect(runner.description).to eq(params[:description])
+ expect(runner.description).not_to eq(description)
+ expect(runner.ensure_runner_queue_value).not_to eq(runner_queue_value)
end
end
- it 'returns 404 if runner does not exist' do
- update_runner(non_existing_record_id, admin, description: 'test')
+ context 'when runner id does not exist' do
+ let(:runner_id) { non_existing_record_id }
+ let(:params) { { description: 'test' } }
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it 'returns 404' do
+ perform_request
- def update_runner(id, user, args)
- put api("/runners/#{id}", user, admin_mode: true), params: args
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
context 'authorized user' do
- let_it_be(:params) { { description: 'test' } }
+ let(:current_user) { user }
+ let(:params) { { description: 'test' } }
context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
it 'does not update runner' do
- put api("/runners/#{shared_runner.id}", user), params: params
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is not shared' do
- it 'does not update project runner without access to it' do
- put api(path, user2), params: { description: 'test' }
+ let(:runner) { project_runner }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'updates runner description' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(runner.reload.description).to eq(params[:description])
end
- it 'updates project runner with access to it' do
- description = project_runner.description
- put api(path, admin, admin_mode: true), params: params
- project_runner.reload
+ context 'when user does not have access to runner' do
+ let(:current_user) { user2 }
- expect(project_runner.description).to eq('test')
- expect(project_runner.description).not_to eq(description)
+ it 'does not update runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
end
context 'unauthorized user' do
- it 'does not delete project runner' do
- put api(path)
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+ let(:params) { { description: 'test' } }
+
+ it 'does not update project runner' do
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(runner.reload.description).not_to eq(params[:description])
end
end
end
describe 'DELETE /runners/:id' do
- let(:path) { "/runners/#{shared_runner.id}" }
+ let(:runner_id) { runner.id }
+ let(:path) { "/runners/#{runner_id}?#{query_path}" }
- it_behaves_like 'DELETE request permissions for admin mode'
+ subject(:perform_request) { delete api(path, current_user) }
+
+ it_behaves_like 'DELETE request permissions for admin mode' do
+ let(:runner) { shared_runner }
+ end
+
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin }
- context 'admin user' do
context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
it 'deletes runner' do
- expect_next_instance_of(Ci::Runners::UnregisterRunnerService, shared_runner, admin) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, current_user) do |service|
expect(service).to receive(:execute).once.and_call_original
end
expect do
- delete api(path, admin, admin_mode: true)
+ perform_request
expect(response).to have_gitlab_http_status(:no_content)
end.to change { ::Ci::Runner.instance_type.count }.by(-1)
end
it_behaves_like '412 response' do
- let(:request) { api(path, admin, admin_mode: true) }
+ let(:request) { api(path, current_user) }
end
end
context 'when runner is not shared' do
+ let(:runner) { project_runner }
+
it 'deletes used project runner' do
- expect_next_instance_of(Ci::Runners::UnregisterRunnerService, project_runner, admin) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, current_user) do |service|
expect(service).to receive(:execute).once.and_call_original
end
expect do
- delete api("/runners/#{project_runner.id}", admin, admin_mode: true)
+ perform_request
expect(response).to have_gitlab_http_status(:no_content)
end.to change { ::Ci::Runner.project_type.count }.by(-1)
end
end
- it 'returns 404 if runner does not exist' do
- allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service|
- expect(service).not_to receive(:execute)
+ context 'when runner does not exist' do
+ let(:runner_id) { non_existing_record_id }
+
+ it 'returns 404' do
+ allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service|
+ expect(service).not_to receive(:execute)
+ end
+
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
-
- delete api("/runners/#{non_existing_record_id}", admin, admin_mode: true)
-
- expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'authorized user' do
+ let(:current_user) { user }
+
context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
it 'does not delete runner' do
- delete api(path, user)
+ perform_request
+
expect(response).to have_gitlab_http_status(:forbidden)
end
end
- context 'when runner is not shared' do
- it 'does not delete runner without access to it' do
- delete api("/runners/#{project_runner.id}", user2)
- expect(response).to have_gitlab_http_status(:forbidden)
+ context 'with a project runner' do
+ let(:runner) { project_runner }
+
+ context 'when user does not have access to runner' do
+ let(:current_user) { user2 }
+
+ it 'does not delete runner without access to it' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- it 'does not delete project runner with more than one associated project' do
- delete api("/runners/#{two_projects_runner.id}", user)
- expect(response).to have_gitlab_http_status(:forbidden)
+ context 'when runner is associated with more than one project' do
+ let(:runner) { two_projects_runner }
+
+ it 'does not delete project runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- it 'deletes project runner for one owned project' do
- expect do
- delete api("/runners/#{project_runner.id}", user)
+ context 'when runner is associated with one owned project' do
+ let(:runner) { project_runner }
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { ::Ci::Runner.project_type.count }.by(-1)
- end
+ it 'deletes project runner' do
+ expect do
+ perform_request
- it 'does not delete group runner with guest access' do
- delete api("/runners/#{group_runner_a.id}", group_guest)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'does not delete group runner with reporter access' do
- delete api("/runners/#{group_runner_a.id}", group_reporter)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'does not delete group runner with developer access' do
- delete api("/runners/#{group_runner_a.id}", group_developer)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'does not delete group runner with maintainer access' do
- delete api("/runners/#{group_runner_a.id}", group_maintainer)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'deletes owned group runner with owner access' do
- expect do
- delete api("/runners/#{group_runner_a.id}", user)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { ::Ci::Runner.group_type.count }.by(-1)
- end
-
- it 'deletes inherited group runner with owner access' do
- expect do
- delete api("/runners/#{group_runner_b.id}", user)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { ::Ci::Runner.group_type.count }.by(-1)
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { ::Ci::Runner.project_type.count }.by(-1)
+ end
end
it_behaves_like '412 response' do
- let(:request) { api("/runners/#{project_runner.id}", user) }
+ let(:request) { api(path, current_user) }
+ end
+ end
+
+ context 'with group runner' do
+ let(:runner) { group_runner_a }
+
+ context 'when user has guest access' do
+ let(:current_user) { group_guest }
+
+ it 'does not delete runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user has reporter access' do
+ let(:current_user) { group_reporter }
+
+ it 'does not delete runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user has developer access' do
+ let(:current_user) { group_developer }
+
+ it 'does not delete runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user has maintainer access' do
+ let(:current_user) { group_maintainer }
+
+ it 'does not delete runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user has owner access' do
+ let(:current_user) { user }
+
+ it 'deletes runner' do
+ expect do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { ::Ci::Runner.group_type.count }.by(-1)
+ end
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api(path, current_user) }
+ end
+ end
+
+ context 'with inherited group runner' do
+ let(:runner) { group_runner_b }
+
+ context 'when user has owner access' do
+ let(:current_user) { user }
+
+ it 'deletes group runner' do
+ expect do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { ::Ci::Runner.group_type.count }.by(-1)
+ end
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api(path, current_user) }
end
end
end
context 'unauthorized user' do
- it 'does not delete project runner' do
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+
+ it 'does not delete runner' do
allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service|
expect(service).not_to receive(:execute)
end
- delete api("/runners/#{project_runner.id}")
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -725,117 +1026,170 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'POST /runners/:id/reset_authentication_token' do
- let(:path) { "/runners/#{shared_runner.id}/reset_authentication_token" }
+ let(:runner_id) { runner.id }
+ let(:path) { "/runners/#{runner_id}/reset_authentication_token?#{query_path}" }
+
+ subject(:perform_request) { post api(path, current_user) }
+
+ shared_examples 'a runner accepting authentication token reset' do
+ it 'resets runner authentication token' do
+ expect do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => runner.reload.token, 'token_expires_at' => nil })
+ end.to change { runner.reload.token }
+ end
+ end
it_behaves_like 'POST request permissions for admin mode' do
+ let(:runner) { project_runner }
let(:params) { {} }
end
- context 'admin user' do
- it 'resets shared runner authentication token' do
- expect do
- post api(path, admin, admin_mode: true)
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin }
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq({ 'token' => shared_runner.reload.token, 'token_expires_at' => nil })
- end.to change { shared_runner.reload.token }
+ context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
+ it 'resets runner authentication token' do
+ expect do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => runner.reload.token, 'token_expires_at' => nil })
+ end.to change { runner.reload.token }
+ end
end
- it 'returns 404 if runner does not exist' do
- post api("/runners/#{non_existing_record_id}/reset_authentication_token", admin, admin_mode: true)
+ context 'when runner does not exist' do
+ let(:runner_id) { non_existing_record_id }
- expect(response).to have_gitlab_http_status(:not_found)
+ it 'returns 404' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
context 'authorized user' do
- it 'does not reset project runner authentication token without access to it' do
- expect do
- post api("/runners/#{project_runner.id}/reset_authentication_token", user2)
- expect(response).to have_gitlab_http_status(:forbidden)
- end.not_to change { project_runner.reload.token }
+ context 'with project runner' do
+ let(:runner) { project_runner }
+
+ context 'when user does not have access to runner' do
+ let(:current_user) { user2 }
+
+ it 'does not reset runner' do
+ expect do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { runner.reload.token }
+ end
+ end
+
+ context 'when user has access to runner' do
+ let(:current_user) { user }
+
+ it_behaves_like 'a runner accepting authentication token reset'
+ end
end
- it 'resets project runner authentication token for owned project' do
- expect do
- post api("/runners/#{project_runner.id}/reset_authentication_token", user)
+ context 'with group runner' do
+ let(:runner) { group_runner_a }
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq({ 'token' => project_runner.reload.token, 'token_expires_at' => nil })
- end.to change { project_runner.reload.token }
- end
+ context 'when user has guest access' do
+ let(:current_user) { group_guest }
- it 'does not reset group runner authentication token with guest access' do
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_guest)
+ it 'does not reset runner authentication token' do
+ expect do
+ perform_request
- expect(response).to have_gitlab_http_status(:forbidden)
- end.not_to change { group_runner_a.reload.token }
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { runner.reload.token }
+ end
+ end
- it 'does not reset group runner authentication token with reporter access' do
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_reporter)
+ context 'when user has reporter access' do
+ let(:current_user) { group_reporter }
- expect(response).to have_gitlab_http_status(:forbidden)
- end.not_to change { group_runner_a.reload.token }
- end
+ it 'does not reset runner authentication token' do
+ expect do
+ perform_request
- it 'does not reset group runner authentication token with developer access' do
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_developer)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { runner.reload.token }
+ end
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
- end.not_to change { group_runner_a.reload.token }
- end
+ context 'when user has developer access' do
+ let(:current_user) { group_developer }
- it 'does not reset group runner authentication token with maintainer access' do
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_maintainer)
+ it 'does not reset runner authentication token' do
+ expect do
+ perform_request
- expect(response).to have_gitlab_http_status(:forbidden)
- end.not_to change { group_runner_a.reload.token }
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { runner.reload.token }
+ end
+ end
- it 'resets group runner authentication token with owner access' do
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", user)
+ context 'when user has maintainer access' do
+ let(:current_user) { group_maintainer }
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq({ 'token' => group_runner_a.reload.token, 'token_expires_at' => nil })
- end.to change { group_runner_a.reload.token }
- end
+ it 'does not reset runner authentication token' do
+ expect do
+ perform_request
- it 'resets group runner authentication token with owner access with expiration time', :freeze_time do
- expect(group_runner_a.reload.token_expires_at).to be_nil
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { runner.reload.token }
+ end
+ end
- group.update!(runner_token_expiration_interval: 5.days)
+ context 'when user has owner access' do
+ let(:current_user) { user }
- expect do
- post api("/runners/#{group_runner_a.id}/reset_authentication_token", user)
- group_runner_a.reload
+ it_behaves_like 'a runner accepting authentication token reset'
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response).to eq({ 'token' => group_runner_a.token, 'token_expires_at' => group_runner_a.token_expires_at.iso8601(3) })
- expect(group_runner_a.token_expires_at).to eq(5.days.from_now)
- end.to change { group_runner_a.reload.token }
+ context 'when runner token has expiration time', :freeze_time do
+ before do
+ group.update!(runner_token_expiration_interval: 5.days)
+ end
+
+ it 'resets group runner authentication token with owner access with expiration time' do
+ expect(runner.reload.token_expires_at).to be_nil
+
+ expect do
+ perform_request
+
+ runner.reload
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => runner.token, 'token_expires_at' => runner.token_expires_at.iso8601(3) })
+ expect(runner.token_expires_at).to eq(5.days.from_now)
+ end.to change { runner.reload.token }
+ end
+ end
+ end
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+
it 'does not reset authentication token' do
expect do
- post api(path)
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
- end.not_to change { shared_runner.reload.token }
+ end.not_to change { runner.reload.token }
end
end
end
describe 'GET /runners/:id/jobs' do
- subject(:request) { get api(path, user, **api_params) }
-
let_it_be(:shared_runner_manager1) { create(:ci_runner_machine, runner: shared_runner, system_xid: 'id2') }
let_it_be(:jobs) do
project_runner_manager1 = create(:ci_runner_machine, runner: project_runner, system_xid: 'id1')
@@ -852,26 +1206,24 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
]
end
- let(:api_params) { {} }
- let(:runner_id) { project_runner.id }
- let(:query_part) { query_params.merge(system_id_params).map { |param| param.join('=') }.join('&') }
- let(:path) { "/runners/#{runner_id}/jobs?#{query_part}" }
- let(:query_params) { {} }
- let(:system_id_params) { {} }
+ let(:runner_id) { runner.id }
+ let(:path) { "/runners/#{runner_id}/jobs?#{query_path}" }
- it_behaves_like 'GET request permissions for admin mode'
+ subject(:perform_request) { get api(path, current_user) }
- context 'admin user' do
- let(:user) { admin }
- let(:api_params) { { admin_mode: true } }
+ it_behaves_like 'GET request permissions for admin mode' do
+ let(:runner) { project_runner }
+ end
+
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin }
context 'when runner exists' do
context 'when runner is shared' do
- let(:runner_id) { shared_runner.id }
- let(:system_id) { 'id2' }
+ let(:runner) { shared_runner }
it 'return jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -885,13 +1237,15 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
let(:first_record) { jobs[2] }
let(:second_record) { jobs[1] }
- let(:api_call) { api(path, user, **api_params) }
+ let(:api_call) { api(path, current_user) }
end
end
context 'when runner is a project runner' do
+ let(:runner) { project_runner }
+
it 'return jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -903,9 +1257,8 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'when user does not have authorization to see all jobs' do
- let(:runner_id) { two_projects_runner.id }
- let(:user) { user2 }
- let(:api_params) { {} }
+ let(:runner) { two_projects_runner }
+ let(:current_user) { user2 }
before_all do
project.add_guest(user2)
@@ -913,107 +1266,107 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
it 'shows only jobs it has permission to see' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match([a_hash_including('id' => jobs[6].id)])
end
end
- end
- context 'when valid status is provided' do
- let(:query_params) { { status: :failed } }
+ context 'when valid status is provided' do
+ let(:query) { { status: :failed } }
- it 'return filtered jobs' do
- request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
-
- expect(json_response).to match([a_hash_including('id' => jobs[4].id)])
- end
- end
-
- context 'when valid order_by is provided' do
- let(:query_params) { { order_by: :id } }
-
- context 'when sort order is not specified' do
- it 'return jobs in descending order' do
- request
+ it 'return filtered jobs' do
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to match([
- a_hash_including('id' => jobs[4].id),
- a_hash_including('id' => jobs[3].id)
- ])
+ expect(json_response).to match([a_hash_including('id' => jobs[4].id)])
end
end
- context 'when sort order is specified as asc' do
- let(:query_params) { { order_by: :id, sort: :asc } }
+ context 'when valid order_by is provided' do
+ let(:query) { { order_by: :id } }
- it 'return jobs sorted in ascending order' do
- request
+ context 'when sort order is not specified' do
+ it 'return jobs in descending order' do
+ perform_request
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
- expect(json_response).to match([
- a_hash_including('id' => jobs[3].id),
- a_hash_including('id' => jobs[4].id)
- ])
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[4].id),
+ a_hash_including('id' => jobs[3].id)
+ ])
+ end
+ end
+
+ context 'when sort order is specified as asc' do
+ let(:query) { { order_by: :id, sort: :asc } }
+
+ it 'return jobs sorted in ascending order' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
+ end
end
end
- end
- context 'when invalid status is provided' do
- let(:query_params) { { status: 'non-existing' } }
+ context 'when invalid status is provided' do
+ let(:query) { { status: 'non-existing' } }
- it 'return 400' do
- request
+ it 'return 400' do
+ perform_request
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
- end
- context 'when invalid order_by is provided' do
- let(:query_params) { { order_by: 'non-existing' } }
+ context 'when invalid order_by is provided' do
+ let(:query) { { order_by: 'non-existing' } }
- it 'return 400' do
- request
+ it 'return 400' do
+ perform_request
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
- end
- context 'when invalid sort is provided' do
- let(:query_params) { { sort: 'non-existing' } }
+ context 'when invalid sort is provided' do
+ let(:query) { { sort: 'non-existing' } }
- it 'return 400' do
- request
+ it 'return 400' do
+ perform_request
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
end
describe 'eager loading' do
- let(:runner_id) { shared_runner.id }
+ let(:runner) { shared_runner }
it 'avoids N+1 DB queries' do
- get api(path, user, **api_params)
+ get api(path, current_user)
control = ActiveRecord::QueryRecorder.new do
- get api(path, user, **api_params)
+ get api(path, current_user)
end
create(:ci_build, :failed, runner: shared_runner, project: project)
expect do
- get api(path, user, **api_params)
+ get api(path, current_user)
end.not_to exceed_query_limit(control.count)
end
@@ -1038,7 +1391,7 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
]).once.and_call_original
end
- get api(path, admin, admin_mode: true), params: { per_page: 2, order_by: 'id', sort: 'desc' }
+ get api(path, current_user), params: { per_page: 2, order_by: 'id', sort: 'desc' }
end
end
@@ -1046,7 +1399,7 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
let(:runner_id) { non_existing_record_id }
it 'returns 404' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -1054,22 +1407,24 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context "runner project's administrative user" do
- context 'when runner exists' do
- let(:runner_id) { shared_runner.id }
+ let(:current_user) { user }
+ context 'when runner exists' do
context 'when runner is shared' do
+ let(:runner) { shared_runner }
+
it 'returns 403' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is a project runner' do
- let(:runner_id) { project_runner.id }
+ let(:runner) { project_runner }
it 'return jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -1081,10 +1436,10 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'when valid status is provided' do
- let(:query_params) { { status: :failed } }
+ let(:query) { { status: :failed } }
it 'return filtered jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -1096,10 +1451,10 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'when invalid status is provided' do
- let(:query_params) { { status: 'non-existing' } }
+ let(:query) { { status: 'non-existing' } }
it 'return 400' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -1111,27 +1466,29 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
let(:runner_id) { non_existing_record_id }
it 'returns 404' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'other authorized user' do
- let(:user) { user2 }
+ let(:current_user) { user2 }
+ let(:runner) { shared_runner }
it 'does not return jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
- let(:user) { nil }
+ let(:current_user) { nil }
+ let(:runner) { shared_runner }
it 'does not return jobs' do
- request
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -1139,32 +1496,34 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'with system_id param' do
- let(:system_id_params) { { system_id: system_id } }
- let(:system_id) { 'id1' }
- let(:user) { admin }
- let(:api_params) { { admin_mode: true } }
+ let(:extra_query_parts) { { system_id: 'id1' } }
+ let(:current_user) { user }
- it 'returns jobs from the runner manager' do
- request
+ context 'with project runner' do
+ let(:runner) { project_runner }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_limited_pagination_headers
- expect(response.headers).not_to include('X-Total', 'X-Total-Pages')
-
- expect(json_response).to match([
- a_hash_including('id' => jobs[3].id),
- a_hash_including('id' => jobs[4].id)
- ])
- end
-
- context 'when system_id does not match runner' do
- let(:runner_id) { shared_runner.id }
-
- it 'does not return jobs' do
- request
+ it 'returns jobs from the runner manager' do
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_limited_pagination_headers
+ expect(response.headers).not_to include('X-Total', 'X-Total-Pages')
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
+ end
+ end
+
+ context 'when system_id does not match runner', :enable_admin_mode do
+ let(:current_user) { admin }
+ let(:runner) { shared_runner }
+
+ it 'does not return jobs' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
@@ -1173,16 +1532,20 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
shared_examples_for 'unauthorized access to runners list' do
context 'authorized user without maintainer privileges' do
+ let(:current_user) { user2 }
+
it "does not return group's runners" do
- get api("/#{entity_type}/#{entity.id}/runners", user2)
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+
it "does not return project's runners" do
- get api("/#{entity_type}/#{entity.id}/runners")
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -1190,16 +1553,26 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'GET /projects/:id/runners' do
- context 'authorized user with maintainer privileges' do
+ let(:path) { "/projects/#{project.id}/runners?#{query_path}" }
+
+ subject(:perform_request) { get api(path, current_user) }
+
+ context 'admin user', :enable_admin_mode do
+ let(:current_user) { admin }
+
it 'returns response status and headers' do
- get api('/runners/all', admin, admin_mode: true)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
+ end
+
+ context 'authorized user with maintainer privileges' do
+ let(:current_user) { user }
it 'returns all runners' do
- get api("/projects/#{project.id}/runners", user)
+ perform_request
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner', 'active' => true, 'paused' => false),
@@ -1208,86 +1581,124 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
]
end
- it 'filters runners by scope' do
- get api("/projects/#{project.id}/runners?scope=specific", user)
+ context 'when filtering by scope' do
+ let(:query) { { scope: :specific } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ it 'filters runners by scope' do
+ perform_request
- expect(json_response).to match_array [
- a_hash_including('description' => 'Project runner'),
- a_hash_including('description' => 'Two projects runner')
- ]
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Project runner'),
+ a_hash_including('description' => 'Two projects runner')
+ ]
+ end
+
+ context 'and scope is unknown' do
+ let(:query) { { scope: :unknown } }
+
+ it 'avoids filtering' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
- it 'avoids filtering if scope is invalid' do
- get api("/projects/#{project.id}/runners?scope=unknown", user)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ context 'when filtering by type' do
+ let(:query) { { type: :project_type } }
- it 'filters runners by type' do
- get api("/projects/#{project.id}/runners?type=project_type", user)
+ it 'filters runners by type' do
+ perform_request
- expect(json_response).to match_array [
- a_hash_including('description' => 'Project runner'),
- a_hash_including('description' => 'Two projects runner')
- ]
- end
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Project runner'),
+ a_hash_including('description' => 'Two projects runner')
+ ]
+ end
- it 'does not filter by invalid type' do
- get api("/projects/#{project.id}/runners?type=bogus", user)
+ context 'and type is invalid' do
+ let(:query) { { type: :bogus } }
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'does not filter' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
context 'with an inactive runner' do
let_it_be(:runner) { create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project]) }
- it 'filters runners by status' do
- get api("/projects/#{project.id}/runners?paused=true", user)
+ context 'when filtering by paused status' do
+ let(:query) { { paused: true } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ it 'filters runners by status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Inactive project runner')
+ )
+ end
end
- it 'filters runners by status' do
- get api("/projects/#{project.id}/runners?status=paused", user)
+ context 'when filtering by status' do
+ let(:query) { { status: :paused } }
- expect(json_response).to match_array [
- a_hash_including('description' => 'Inactive project runner')
- ]
+ it 'filters runners by status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Inactive project runner')
+ )
+ end
+
+ context 'and status is invalid' do
+ let(:query) { { status: :bogus } }
+
+ it 'does not filter by invalid status' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
end
- it 'does not filter by invalid status' do
- get api("/projects/#{project.id}/runners?status=bogus", user)
+ context 'when filtering by tag_list' do
+ let(:query) { { tag_list: 'tag1,tag2' } }
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ before_all do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: %w[tag2])
+ end
- it 'filters runners by tag_list' do
- create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
- create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+ it 'filters runners by tag_list' do
+ perform_request
- get api("/projects/#{project.id}/runners?tag_list=tag1,tag2", user)
-
- expect(json_response).to match_array [
- a_hash_including('description' => 'Runner tagged with tag1 and tag2')
- ]
+ expect(json_response).to contain_exactly(
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2')
+ )
+ end
end
end
- it_behaves_like 'unauthorized access to runners list' do
- let(:entity_type) { 'projects' }
- let(:entity) { project }
- end
+ it_behaves_like 'unauthorized access to runners list'
end
describe 'GET /groups/:id/runners' do
+ let(:path) { "/groups/#{group.id}/runners?#{query_path}" }
+
+ subject(:perform_request) { get api(path, current_user) }
+
context 'authorized user with maintainer privileges' do
+ let(:current_user) { user }
+
it 'returns all runners' do
- get api("/groups/#{group.id}/runners", user)
+ perform_request
expect(json_response).to match_array(
[
@@ -1297,75 +1708,109 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
context 'filter by type' do
- it 'returns record when valid and present' do
- get api("/groups/#{group.id}/runners?type=group_type", user)
+ let(:query) { { type: type } }
- expect(json_response).to match_array([a_hash_including('description' => 'Group runner A')])
+ context 'with type group_type' do
+ let(:type) { :group_type }
+
+ it 'returns group runners' do
+ perform_request
+
+ expect(json_response).to match_array([a_hash_including('description' => 'Group runner A')])
+ end
end
- it 'returns instance runners when instance_type is specified' do
- get api("/groups/#{group.id}/runners?type=instance_type", user)
+ context 'with type instance_type' do
+ let(:type) { :instance_type }
- expect(json_response).to match_array([a_hash_including('description' => 'Shared runner')])
+ it 'returns instance runners' do
+ perform_request
+
+ expect(json_response).to match_array([a_hash_including('description' => 'Shared runner')])
+ end
end
# TODO: Remove when REST API v5 is implemented (https://gitlab.com/gitlab-org/gitlab/-/issues/351466)
- it 'returns empty result when type does not match' do
- get api("/groups/#{group.id}/runners?type=project_type", user)
+ context 'with type project_type' do
+ let(:type) { :project_type }
- expect(json_response).to be_empty
- end
+ it 'returns empty result when type does not match' do
+ perform_request
- it 'does not filter by invalid type' do
- get api("/groups/#{group.id}/runners?type=bogus", user)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'with an inactive runner' do
- let_it_be(:runner) { create(:ci_runner, :group, :inactive, description: 'Inactive group runner', groups: [group]) }
-
- it 'returns runners by paused state' do
- get api("/groups/#{group.id}/runners?paused=true", user)
-
- expect(json_response).to match_array([a_hash_including('description' => 'Inactive group runner')])
- end
-
- context 'filter runners by status' do
- it 'returns runners by valid status' do
- get api("/groups/#{group.id}/runners?status=paused", user)
-
- expect(json_response).to match_array([a_hash_including('description' => 'Inactive group runner')])
+ expect(json_response).to be_empty
end
+ end
- it 'does not filter by invalid status' do
- get api("/groups/#{group.id}/runners?status=bogus", user)
+ context 'with invalid type' do
+ let(:type) { :bogus }
+
+ it 'does not filter by invalid type' do
+ perform_request
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
- it 'filters runners by tag_list' do
- create(:ci_runner, :group, description: 'Runner tagged with tag1 and tag2', groups: [group], tag_list: %w[tag1 tag2])
- create(:ci_runner, :group, description: 'Runner tagged with tag2', groups: [group], tag_list: %w[tag1])
+ context 'with an inactive runner' do
+ let_it_be(:runner) { create(:ci_runner, :group, :inactive, description: 'Inactive group runner', groups: [group]) }
- get api("/groups/#{group.id}/runners?tag_list=tag1,tag2", user)
+ context 'when filtering by paused status' do
+ let(:query) { { paused: true } }
- expect(json_response).to match_array([a_hash_including('description' => 'Runner tagged with tag1 and tag2')])
+ it 'filters runners by status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive group runner'))
+ end
+ end
+
+ context 'when filtering by status' do
+ let(:query) { { status: :paused } }
+
+ it 'returns runners by valid status' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Inactive group runner'))
+ end
+
+ context 'and status is invalid' do
+ let(:query) { { status: :bogus } }
+
+ it 'does not filter by invalid status' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
+ context 'when filtering by tag_list' do
+ let(:query) { { tag_list: 'tag1,tag2' } }
+
+ before_all do
+ create(:ci_runner, :group, description: 'Runner tagged with tag1 and tag2', groups: [group], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :group, description: 'Runner tagged with tag2', groups: [group], tag_list: %w[tag1])
+ end
+
+ it 'filters runners by tag_list' do
+ perform_request
+
+ expect(json_response).to contain_exactly(a_hash_including('description' => 'Runner tagged with tag1 and tag2'))
+ end
end
end
- it_behaves_like 'unauthorized access to runners list' do
- let(:entity_type) { 'groups' }
- let(:entity) { group }
- end
+ it_behaves_like 'unauthorized access to runners list'
end
describe 'POST /projects/:id/runners' do
+ let(:params) { { runner_id: runner.id } }
let(:path) { "/projects/#{project.id}/runners" }
+ subject(:perform_request) { post api(path, current_user), params: params }
+
it_behaves_like 'POST request permissions for admin mode' do
let!(:new_project_runner) { create(:ci_runner, :project) }
let(:params) { { runner_id: new_project_runner.id } }
@@ -1375,50 +1820,69 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
context 'authorized user' do
let_it_be(:project_runner2) { create(:ci_runner, :project, projects: [project2]) }
+ let(:current_user) { user }
+ let(:runner) { project_runner2 }
+
it 'enables project runner' do
- expect do
- post api(path, user), params: { runner_id: project_runner2.id }
- end.to change { project.runners.count }.by(+1)
+ expect { perform_request }.to change { project.runners.count }.by(+1)
+
expect(response).to have_gitlab_http_status(:created)
end
- it 'avoids changes when enabling already enabled runner' do
- expect do
- post api(path, user), params: { runner_id: project_runner.id }
- end.to change { project.runners.count }.by(0)
- expect(response).to have_gitlab_http_status(:bad_request)
+ context 'when enabling already enabled runner' do
+ let(:runner) { project_runner }
+
+ it 'avoids changes' do
+ expect { perform_request }.to change { project.runners.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
- it 'does not enable locked runner' do
- project_runner2.update!(locked: true)
+ context 'when enabling locked runner' do
+ let(:runner) { project_runner2 }
- expect do
- post api(path, user), params: { runner_id: project_runner2.id }
- end.to change { project.runners.count }.by(0)
+ before_all do
+ project_runner2.update!(locked: true)
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'does not enable runner' do
+ expect { perform_request }.not_to change { project.runners.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- it 'does not enable shared runner' do
- post api(path, user), params: { runner_id: shared_runner.id }
+ context 'when enabling shared runner' do
+ let(:runner) { shared_runner }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'does not enable runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- it 'does not enable group runner' do
- post api(path, user), params: { runner_id: group_runner_a.id }
+ context 'when enabling group runner' do
+ let(:runner) { group_runner_a }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'does not enable runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- context 'user is admin' do
+ context 'user is admin', :enable_admin_mode do
+ let(:current_user) { admin }
+
context 'when project runner is used' do
let!(:new_project_runner) { create(:ci_runner, :project) }
+ let(:runner) { new_project_runner }
it 'enables any project runner' do
- expect do
- post api(path, admin, admin_mode: true), params: { runner_id: new_project_runner.id }
- end.to change { project.runners.count }.by(+1)
+ expect { perform_request }.to change { project.runners.count }.by(+1)
+
expect(response).to have_gitlab_http_status(:created)
end
@@ -1427,44 +1891,55 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
end
- it 'does not enable project runner' do
- expect do
- post api(path, admin, admin_mode: true), params: { runner_id: new_project_runner.id }
- end.not_to change { project.runners.count }
+ it 'does not enable runner' do
+ expect { perform_request }.not_to change { project.runners.count }
+
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
end
- it 'raises an error when no runner_id param is provided' do
- post api(path, admin, admin_mode: true)
+ context 'when no runner_id param is provided' do
+ let(:params) { {} }
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'raises an error' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when user does not have permissions' do
+ let(:current_user) { user2 }
+ let(:runner) { project_runner }
+
+ it 'does not enable runner' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
- context 'user is not admin' do
+ context 'user is not admin and does not have access to project runner' do
let!(:new_project_runner) { create(:ci_runner, :project) }
+ let(:runner) { new_project_runner }
+ let(:current_user) { user }
- it 'does not enable runner without access to' do
- post api(path, user), params: { runner_id: new_project_runner.id }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'authorized user without permissions' do
it 'does not enable runner' do
- post api(path, user2)
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+
it 'does not enable runner' do
- post api(path)
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -1472,48 +1947,66 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'DELETE /projects/:id/runners/:runner_id' do
- context 'authorized user' do
- context 'when runner have more than one associated projects' do
- it "disables project's runner" do
- expect do
- delete api("/projects/#{project.id}/runners/#{two_projects_runner.id}", user)
+ let(:runner_id) { runner.id }
+ let(:path) { "/projects/#{project.id}/runners/#{runner_id}" }
- expect(response).to have_gitlab_http_status(:no_content)
- end.to change { project.runners.count }.by(-1)
+ subject(:perform_request) { delete api(path, current_user) }
+
+ context 'authorized user' do
+ let(:current_user) { user }
+
+ context 'when runner have more than one associated projects' do
+ let(:runner) { two_projects_runner }
+
+ it "disables project's runner" do
+ expect { perform_request }.to change { project.runners.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
- let(:request) { api("/projects/#{project.id}/runners/#{two_projects_runner.id}", user) }
+ let(:request) { api(path, current_user) }
end
end
context 'when runner have one associated projects' do
+ let(:runner) { project_runner }
+
it "does not disable project's runner" do
- expect do
- delete api("/projects/#{project.id}/runners/#{project_runner.id}", user)
- end.to change { project.runners.count }.by(0)
+ expect { perform_request }.not_to change { project.runners.count }
+
expect(response).to have_gitlab_http_status(:forbidden)
end
end
- it 'returns 404 is runner is not found' do
- delete api("/projects/#{project.id}/runners/0", user)
+ context 'when runner is not found' do
+ let(:runner_id) { non_existing_record_id }
- expect(response).to have_gitlab_http_status(:not_found)
+ it 'returns 404' do
+ perform_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
context 'authorized user without permissions' do
+ let(:current_user) { user2 }
+ let(:runner) { project_runner }
+
it "does not disable project's runner" do
- delete api("/projects/#{project.id}/runners/#{project_runner.id}", user2)
+ perform_request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
+ let(:current_user) { nil }
+ let(:runner) { project_runner }
+
it "does not disable project's runner" do
- delete api("/projects/#{project.id}/runners/#{project_runner.id}")
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
index 25b99862100..ff291def0bd 100644
--- a/spec/requests/api/debian_group_packages_spec.rb
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do
describe 'GET groups/:id/-/packages/debian/pool/:codename/:project_id/:letter/:package_name/:package_version/:file_name' do
using RSpec::Parameterized::TableSyntax
- let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{package.debian_distribution.codename}/#{project.id}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
+ let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{package.distribution.codename}/#{project.id}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
let(:file_name) { params[:file_name] }
where(:file_name, :success_body) do
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index 7f3f633a35c..e8abf91451b 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d
describe 'GET projects/:id/packages/debian/pool/:codename/:letter/:package_name/:package_version/:file_name' do
using RSpec::Parameterized::TableSyntax
- let(:url) { "/projects/#{container.id}/packages/debian/pool/#{package.debian_distribution.codename}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
+ let(:url) { "/projects/#{container.id}/packages/debian/pool/#{package.distribution.codename}/#{letter}/#{package.name}/#{package.version}/#{file_name}" }
let(:file_name) { params[:file_name] }
where(:file_name, :success_body) do
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index 6d270f682c1..e1a8304dce6 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -4,11 +4,9 @@ require 'spec_helper'
RSpec.describe 'getting a detailed sentry error', feature_category: :error_tracking do
include GraphqlHelpers
- # user should have `last_on_activity` set to today,
- # so that `Users::ActivityService` does not register any more updates.
- let_it_be(:current_user) { create(:user, :with_last_activity_on_today) }
- let_it_be(:project) { create(:project, :repository, namespace: create(:namespace, owner: current_user)) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:project_setting) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:current_user) { project.first_owner }
let_it_be(:sentry_detailed_error) { build(:error_tracking_sentry_detailed_error) }
let(:sentry_gid) { sentry_detailed_error.to_global_id.to_s }
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index b1e5b226e70..5fcc9ef4dd2 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -441,7 +441,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
statistics = json_response.find { |p| p['id'] == project.id }['statistics']
expect(statistics).to be_present
- expect(statistics).to include('commit_count', 'storage_size', 'repository_size', 'wiki_size', 'lfs_objects_size', 'job_artifacts_size', 'pipeline_artifacts_size', 'snippets_size', 'packages_size', 'uploads_size')
+ expect(statistics).to include('commit_count', 'storage_size', 'repository_size', 'wiki_size', 'lfs_objects_size', 'job_artifacts_size', 'pipeline_artifacts_size', 'snippets_size', 'packages_size', 'uploads_size', 'container_registry_size')
end
it "does not include license by default" do
diff --git a/spec/requests/api/remote_mirrors_spec.rb b/spec/requests/api/remote_mirrors_spec.rb
index 369ba74f90e..77f30122414 100644
--- a/spec/requests/api/remote_mirrors_spec.rb
+++ b/spec/requests/api/remote_mirrors_spec.rb
@@ -139,43 +139,6 @@ RSpec.describe API::RemoteMirrors, feature_category: :source_code_management do
end
end
- context 'when feature flag "use_remote_mirror_create_service" is disabled' do
- before do
- stub_feature_flags(use_remote_mirror_create_service: false)
- end
-
- context 'creates a remote mirror' do
- context 'disabled by default' do
- let(:params) { { url: 'https://foo:bar@test.com' } }
-
- it_behaves_like 'creates a remote mirror'
- end
-
- context 'enabled' do
- let(:params) { { url: 'https://foo:bar@test.com', enabled: true } }
-
- it_behaves_like 'creates a remote mirror'
- end
-
- context 'auth method' do
- let(:params) { { url: 'https://foo:bar@test.com', enabled: true, auth_method: 'ssh_public_key' } }
-
- it_behaves_like 'creates a remote mirror'
- end
- end
-
- it 'returns error if url is invalid' do
- project.add_maintainer(user)
-
- post api(route, user), params: { url: 'ftp://foo:bar@test.com' }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['url']).to match_array(
- ["is blocked: Only allowed schemes are http, https, ssh, git"]
- )
- end
- end
-
it 'returns error if url is invalid' do
project.add_maintainer(user)
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index 3bad1146e78..0d714572294 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe API::UsageData, feature_category: :service_ping do
context 'with unknown event' do
it 'returns status ok' do
- expect(Gitlab::Redis::HLL).not_to receive(:add).with(hash_including(key: unknown_event))
+ expect(Gitlab::Redis::HLL).not_to receive(:add)
post api(endpoint, user), params: { event: unknown_event }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 0af6dc1272d..ec00baf8041 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1608,10 +1608,6 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
context 'updating password' do
- # user should have `last_on_activity` set to today,
- # so that `Users::ActivityService` does not register any more updates.
- let_it_be(:admin) { create(:admin, :with_last_activity_on_today) }
-
def update_password(user, admin, password = User.random_password)
put api("/users/#{user.id}", admin, admin_mode: true), params: { password: password }
end
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index 1e6298a2d55..7ca1f490154 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -3,11 +3,9 @@
require 'spec_helper'
RSpec.describe Projects::MergeRequestsController, feature_category: :source_code_management do
- # user should have `last_on_activity` set to today,
- # so that `Users::ActivityService` does not register any more updates.
- let_it_be(:user) { create(:user, :with_last_activity_on_today, :with_namespace) }
- let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
- let_it_be(:merge_request) { create(:merge_request, source_project: project, author: user) }
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:project) { merge_request.project }
+ let_it_be(:user) { merge_request.author }
describe 'GET #show' do
let_it_be(:group) { create(:group) }
diff --git a/spec/requests/projects/merge_requests_spec.rb b/spec/requests/projects/merge_requests_spec.rb
index 1dda6a453cf..e57808e6728 100644
--- a/spec/requests/projects/merge_requests_spec.rb
+++ b/spec/requests/projects/merge_requests_spec.rb
@@ -3,10 +3,7 @@
require 'spec_helper'
RSpec.describe 'merge requests actions', feature_category: :source_code_management do
- # user should have `last_on_activity` set to today,
- # so that `Users::ActivityService` does not register any more updates.
- let_it_be(:user) { create(:user, :with_last_activity_on_today) }
- let_it_be(:project) { create(:project, :repository, namespace: create(:namespace, owner: user)) }
+ let_it_be(:project) { create(:project, :repository) }
let(:merge_request) do
create(
@@ -18,6 +15,7 @@ RSpec.describe 'merge requests actions', feature_category: :source_code_manageme
)
end
+ let(:user) { project.first_owner }
let(:user2) { create(:user) }
before do
diff --git a/spec/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations_spec.rb b/spec/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations_spec.rb
index 9853423e758..1a9f9573b04 100644
--- a/spec/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations_spec.rb
+++ b/spec/rubocop/cop/search/avoid_checking_finished_on_deprecated_migrations_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe RuboCop::Cop::Search::AvoidCheckingFinishedOnDeprecatedMigrations
context 'when a deprecated class is used with migration_has_finished?' do
it 'flags it as an offense' do
expect_offense <<~SOURCE
- return if Elastic::DataMigrationService.migration_has_finished?(:backfill_project_permissions_in_blobs_using_permutations)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Migration is deprecated and can not be used with `migration_has_finished?`.
+ return if Elastic::DataMigrationService.migration_has_finished?(:backfill_archived_on_issues)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Migration is deprecated and can not be used with `migration_has_finished?`.
SOURCE
end
end
@@ -24,7 +24,7 @@ RSpec.describe RuboCop::Cop::Search::AvoidCheckingFinishedOnDeprecatedMigrations
context 'when migration_has_finished? method is called on another class' do
it 'does not flag it as an offense' do
expect_no_offenses <<~SOURCE
- return if Klass.migration_has_finished?(:backfill_project_permissions_in_blobs_using_permutations)
+ return if Klass.migration_has_finished?(:backfill_archived_on_issues)
SOURCE
end
end
diff --git a/spec/services/ci/partitions/create_service_spec.rb b/spec/services/ci/partitions/create_service_spec.rb
index a6630d50e42..90d0bb28d90 100644
--- a/spec/services/ci/partitions/create_service_spec.rb
+++ b/spec/services/ci/partitions/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Partitions::CreateService, feature_category: :continuous_integration do
+RSpec.describe Ci::Partitions::CreateService, feature_category: :ci_scaling do
let_it_be(:ci_partition) { create(:ci_partition, :current) }
let(:service) { described_class.new(ci_partition) }
@@ -33,7 +33,7 @@ RSpec.describe Ci::Partitions::CreateService, feature_category: :continuous_inte
context 'when all conditions are satistied' do
before do
- stub_const("#{described_class}::MAX_PARTITION_SIZE", 1.byte)
+ allow(service).to receive(:should_create_next?).and_return(true)
end
it 'creates the next ci_partition' do
@@ -47,7 +47,7 @@ RSpec.describe Ci::Partitions::CreateService, feature_category: :continuous_inte
context 'when database_partition sizes are above the threshold' do
before do
- stub_const("#{described_class}::MAX_PARTITION_SIZE", 1.byte)
+ stub_const("Ci::Partition::MAX_PARTITION_SIZE", 1.byte)
end
context 'when no more headroom available' do
diff --git a/spec/services/ci/partitions/sync_service_spec.rb b/spec/services/ci/partitions/sync_service_spec.rb
new file mode 100644
index 00000000000..4df600ff823
--- /dev/null
+++ b/spec/services/ci/partitions/sync_service_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Partitions::SyncService, feature_category: :ci_scaling do
+ let_it_be_with_reload(:ci_partition) { create(:ci_partition, :current) }
+ let_it_be_with_reload(:next_ci_partition) { create(:ci_partition) }
+ let(:service) { described_class.new(ci_partition) }
+ let(:current_status) { Ci::Partition.statuses[:current] }
+ let(:preparing_status) { Ci::Partition.statuses[:preparing] }
+ let(:active_status) { Ci::Partition.statuses[:active] }
+ let(:ready_status) { Ci::Partition.statuses[:ready] }
+
+ describe '.execute' do
+ subject(:execute_service) { service.execute }
+
+ shared_examples 'ci_partitions not updated' do
+ it 'does not update ci_partition to ready', :aggregate_failures do
+ expect { execute_service }
+ .to not_change { ci_partition.reload.status }
+ .and not_change { next_ci_partition.reload.status }
+ end
+ end
+
+ context 'when ci_partitioning_automation is disabled' do
+ before do
+ stub_feature_flags(ci_partitioning_automation: false)
+ end
+
+ it_behaves_like 'ci_partitions not updated'
+ end
+
+ context 'when ci_partition is nil' do
+ let(:ci_partition) { nil }
+
+ it 'does not perform any action' do
+ expect(service).not_to receive(:sync_partitions_statuses)
+ expect(service).not_to receive(:write_to_next_partition)
+
+ execute_service
+ end
+ end
+
+ context 'when all conditions are satisfied' do
+ before do
+ allow(service).to receive(:above_threshold?).and_return(true)
+ allow_next_found_instance_of(Ci::Partition) do |partition|
+ allow(partition).to receive(:all_partitions_exist?).and_return(true)
+ end
+ end
+
+ it 'updates ci_partitions statuses', :aggregate_failures do
+ expect { execute_service }
+ .to change { ci_partition.reload.status }.from(current_status).to(active_status)
+ .and change { next_ci_partition.reload.status }.from(preparing_status).to(current_status)
+ end
+ end
+
+ context 'when database_partitions are not above the threshold' do
+ before do
+ allow_next_found_instance_of(Ci::Partition) do |partition|
+ allow(partition).to receive(:all_partitions_exist?).and_return(true)
+ end
+ end
+
+ it 'updates next ci_partitions to status ready' do
+ expect { execute_service }.to change { next_ci_partition.reload.status }.from(preparing_status).to(ready_status)
+ end
+ end
+
+ context 'when next_partition is not ready' do
+ before do
+ allow_next_found_instance_of(Ci::Partition) do |partition|
+ allow(partition).to receive(:all_partitions_exist?).and_return(false)
+ end
+ end
+
+ it 'does not update ci_partitions statuses', :aggregate_failures do
+ expect { execute_service }
+ .to not_change { ci_partition.reload.status }
+ .and not_change { next_ci_partition.reload.status }
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/debian/find_or_create_incoming_service_spec.rb b/spec/services/packages/debian/find_or_create_incoming_service_spec.rb
index 27c389b5312..ce9682031a6 100644
--- a/spec/services/packages/debian/find_or_create_incoming_service_spec.rb
+++ b/spec/services/packages/debian/find_or_create_incoming_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Packages::Debian::FindOrCreateIncomingService, feature_category:
expect(package.name).to eq('incoming')
expect(package.version).to be_nil
expect(package.package_type).to eq('debian')
- expect(package.debian_incoming?).to be_truthy
+ expect(package.incoming?).to be_truthy
end
it_behaves_like 'assigns the package creator'
diff --git a/spec/services/packages/debian/process_package_file_service_spec.rb b/spec/services/packages/debian/process_package_file_service_spec.rb
index d4e37403b87..5176b068190 100644
--- a/spec/services/packages/debian/process_package_file_service_spec.rb
+++ b/spec/services/packages/debian/process_package_file_service_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
let(:expected_error) { ArgumentError }
let(:expected_message) do
- "Debian package sample 1.2.3~alpha2 exists in distribution #{matching_package.debian_distribution.codename}"
+ "Debian package sample 1.2.3~alpha2 exists in distribution #{matching_package.distribution.codename}"
end
it_behaves_like 'raises error'
@@ -111,7 +111,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
.and change { package.reload.name }.to('sample')
.and change { package.version }.to('1.2.3~alpha2')
.and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
+ .and change { package.publication }.from(nil)
.and change { debian_file_metadatum.file_type }.from('unknown').to('changes')
.and not_change { debian_file_metadatum.component }
end
@@ -129,7 +129,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
.and change { package.reload.name }.to('sample')
.and change { package.version }.to('1.2.3~alpha2')
.and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
+ .and change { package.publication }.from(nil)
.and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
.and change { debian_file_metadatum.component }.from(nil).to(component_name)
end
@@ -138,7 +138,8 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
context 'with a changes file' do
let!(:incoming) { create(:debian_incoming, project: distribution.project) }
let!(:temporary_with_changes) { create(:debian_temporary_with_changes, project: distribution.project) }
- let(:package) { temporary_with_changes }
+ # Reload factory to reset associations cache for package files
+ let(:package) { temporary_with_changes.reload }
let(:package_file) { temporary_with_changes.package_files.first }
let(:distribution_name) { nil }
@@ -268,7 +269,8 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
context 'with a package file' do
let!(:temporary_with_files) { create(:debian_temporary_with_files, project: distribution.project) }
- let(:package) { temporary_with_files }
+ # Reload factory to reset associations cache for package files
+ let(:package) { temporary_with_files.reload }
let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
let(:distribution_name) { distribution.codename }
@@ -387,7 +389,8 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
context 'with a changes file' do
let!(:incoming) { create(:debian_incoming, project: distribution.project) }
let!(:temporary_with_changes) { create(:debian_temporary_with_changes, project: distribution.project) }
- let(:package) { temporary_with_changes }
+ # Reload factory to reset associations cache for package files
+ let(:package) { temporary_with_changes.reload }
let(:package_file) { temporary_with_changes.package_files.first }
let(:distribution_name) { nil }
@@ -398,7 +401,8 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p
context 'with a package file' do
let!(:temporary_with_files) { create(:debian_temporary_with_files, project: distribution.project) }
- let(:package) { temporary_with_files }
+ # Reload factory to reset associations cache for package files
+ let(:package) { temporary_with_files.reload }
let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
let(:distribution_name) { distribution.codename }
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index 3f8570da463..3e0f9b829ce 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Users::ActivityService, feature_category: :user_profile do
+ include ExclusiveLeaseHelpers
+
let(:user) { create(:user, last_activity_on: last_activity_on) }
subject { described_class.new(author: user) }
@@ -39,6 +41,12 @@ RSpec.describe Users::ActivityService, feature_category: :user_profile do
.to(Date.today)
end
+ it 'tries to obtain ExclusiveLease' do
+ expect(Gitlab::ExclusiveLease).to receive(:new).with("activity_service:#{user.id}", anything).and_call_original
+
+ subject.execute
+ end
+
it 'tracks RedisHLL event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
.to receive(:track_event)
@@ -82,6 +90,12 @@ RSpec.describe Users::ActivityService, feature_category: :user_profile do
let(:last_activity_on) { Date.today }
it_behaves_like 'does not update last_activity_on'
+
+ it 'does not try to obtain ExclusiveLease' do
+ expect(Gitlab::ExclusiveLease).not_to receive(:new).with("activity_service:#{user.id}", anything)
+
+ subject.execute
+ end
end
context 'when in GitLab read-only instance' do
@@ -93,6 +107,16 @@ RSpec.describe Users::ActivityService, feature_category: :user_profile do
it_behaves_like 'does not update last_activity_on'
end
+
+ context 'when a lease could not be obtained' do
+ let(:last_activity_on) { nil }
+
+ before do
+ stub_exclusive_lease_taken("activity_service:#{user.id}", timeout: 1.minute.to_i)
+ end
+
+ it_behaves_like 'does not update last_activity_on'
+ end
end
context 'with DB Load Balancing' do
diff --git a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
index 25e15cdcd09..dba3a74658f 100644
--- a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
+++ b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
@@ -55,7 +55,7 @@ RSpec.shared_examples 'every metric definition' do
let(:metric_files_with_schema) do
Gitlab::Usage::MetricDefinition
.definitions
- .select { |_, v| v.respond_to?(:value_json_schema) }
+ .select { |_, v| v.value_json_schema }
end
let(:expected_metric_files_key_paths) { metric_files_key_paths }
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index 3ea2ff4d8f0..07e2d1bdfec 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -207,7 +207,7 @@ RSpec.shared_examples 'Debian Distribution with project container' do
.with_foreign_key(:distribution_id)
end
- it { is_expected.to have_many(:packages).class_name('Packages::Package').through(:publications) }
+ it { is_expected.to have_many(:packages).class_name('Packages::Debian::Package').through(:publications) }
end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
index d7d3bcff17f..4d029fdcdd2 100644
--- a/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_batch_worker_spec.rb
@@ -99,6 +99,20 @@ RSpec.describe BulkImports::PipelineBatchWorker, feature_category: :importers do
expect(batch.reload).to be_skipped
end
end
+
+ context 'when tracker is canceled' do
+ let(:tracker) { create(:bulk_import_tracker, :canceled) }
+
+ it 'skips and logs the batch' do
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:info).with(a_hash_including('message' => 'Batch tracker canceled'))
+ end
+
+ worker.perform(batch.id)
+
+ expect(batch.reload).to be_canceled
+ end
+ end
end
context 'with batch status' do
@@ -131,6 +145,18 @@ RSpec.describe BulkImports::PipelineBatchWorker, feature_category: :importers do
expect(batch.reload).to be_finished
end
end
+
+ context 'when batch status is canceled' do
+ let(:batch) { create(:bulk_import_batch_tracker, :canceled, tracker: tracker) }
+
+ it 'stays canceled and does not execute' do
+ expect(batch).not_to receive(:start!)
+
+ worker.perform(batch.id)
+
+ expect(batch.reload).to be_canceled
+ end
+ end
end
context 'when exclusive lease cannot be obtained' do
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index bc80bed6c24..282603cf5ec 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -250,6 +250,23 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
+ context 'when pipeline is canceled' do
+ let(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ :canceled,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+ end
+
+ it 'no-ops and returns' do
+ expect(described_class).not_to receive(:run)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+
context 'when tracker is started' do
it 'runs the pipeline' do
pipeline_tracker = create(
@@ -293,6 +310,31 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
+ context 'when entity is canceled' do
+ it 'marks tracker as canceled and logs the cancel' do
+ entity.update!(status: -2)
+
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline',
+ status_event: 'enqueue'
+ )
+
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ allow(logger).to receive(:info)
+
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(message: 'Canceling pipeline due to canceled entity'))
+ end
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.status_name).to eq(:canceled)
+ end
+ end
+
context 'when retry pipeline error is raised' do
let(:pipeline_tracker) do
create(
diff --git a/spec/workers/ci/partitioning_worker_spec.rb b/spec/workers/ci/partitioning_worker_spec.rb
index 81263268249..a12e89d616a 100644
--- a/spec/workers/ci/partitioning_worker_spec.rb
+++ b/spec/workers/ci/partitioning_worker_spec.rb
@@ -2,14 +2,49 @@
require 'spec_helper'
-RSpec.describe Ci::PartitioningWorker, feature_category: :continuous_integration do
+RSpec.describe Ci::PartitioningWorker, feature_category: :ci_scaling do
describe '#perform' do
- subject(:execute_worker) { described_class.new.perform }
+ subject(:perform) { described_class.new.perform }
+
+ let(:default_service) { instance_double(Ci::Partitions::SetupDefaultService) }
it 'calls setup default service' do
- expect(Ci::Partitions::SetupDefaultService).to receive_message_chain(:new, :execute)
+ expect(Ci::Partitions::SetupDefaultService).to receive(:new).and_return(default_service)
+ expect(default_service).to receive(:execute)
- execute_worker
+ perform
+ end
+
+ context 'when current partition does not exist' do
+ before do
+ allow(Ci::Partition).to receive(:current).and_return(nil)
+ end
+
+ it 'does not call services', :aggregate_failures do
+ expect(Ci::Partitions::CreateService).not_to receive(:new)
+ expect(Ci::Partitions::SyncService).not_to receive(:new)
+
+ perform
+ end
+ end
+
+ context 'when current partition exists' do
+ let(:create_service) { instance_double(Ci::Partitions::CreateService) }
+ let(:sync_service) { instance_double(Ci::Partitions::SyncService) }
+
+ it 'calls create service' do
+ expect(Ci::Partitions::CreateService).to receive(:new).and_return(create_service)
+ expect(create_service).to receive(:execute)
+
+ perform
+ end
+
+ it 'calls sync service' do
+ expect(Ci::Partitions::SyncService).to receive(:new).and_return(sync_service)
+ expect(sync_service).to receive(:execute)
+
+ perform
+ end
end
end
end
diff --git a/spec/workers/packages/debian/process_package_file_worker_spec.rb b/spec/workers/packages/debian/process_package_file_worker_spec.rb
index 0c60633ef45..825fc3034ba 100644
--- a/spec/workers/packages/debian/process_package_file_worker_spec.rb
+++ b/spec/workers/packages/debian/process_package_file_worker_spec.rb
@@ -222,7 +222,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
.and change { package.reload.name }.to('sample')
.and change { package.version }.to('1.2.3~alpha2')
.and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
+ .and change { package.publication }.from(nil)
.and change { debian_file_metadatum.reload.file_type }.from('unknown').to('changes')
.and not_change { debian_file_metadatum.component }
end
@@ -255,7 +255,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
.and change { package.reload.name }.to('sample')
.and change { package.version }.to('1.2.3~alpha2')
.and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
+ .and change { package.publication }.from(nil)
.and change { debian_file_metadatum.reload.file_type }.from('unknown').to(expected_file_type)
.and change { debian_file_metadatum.component }.from(nil).to(component_name)
end