+
### Group vulnerability report by OWASP top 10 2017 is deprecated
diff --git a/doc/user/application_security/policies/scan_execution_policies.md b/doc/user/application_security/policies/scan_execution_policies.md
index 08c28ccddf6..a5d90448520 100644
--- a/doc/user/application_security/policies/scan_execution_policies.md
+++ b/doc/user/application_security/policies/scan_execution_policies.md
@@ -140,6 +140,7 @@ This rule enforces the defined actions whenever the pipeline runs for a selected
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/152855) a new application setting `security_policy_scheduled_scans_max_concurrency` in GitLab 17.1. The concurrency limit applies when both the `scan_execution_pipeline_worker` and `scan_execution_pipeline_concurrency_control` are enabled.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/158636) a concurrency limit for scan execution scheduled jobs in GitLab 17.3 [with a flag](../../../administration/feature_flags.md) named `scan_execution_pipeline_concurrency_control`.
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/451890) the `scan_execution_pipeline_worker` feature flag on GitLab.com in GitLab 17.5.
+> - [Feature flag](https://gitlab.com/gitlab-org/gitlab/-/issues/451890) `scan_execution_pipeline_worker` removed in GitLab 17.6.
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/463802) the `scan_execution_pipeline_concurrency_control` feature flag on GitLab.com in GitLab 17.6.
WARNING:
diff --git a/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id.rb b/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id.rb
new file mode 100644
index 00000000000..a06d59345bd
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ class BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId < BackfillDesiredShardingKeyJob
+ operation_name :backfill_protected_branch_merge_access_levels_protected_branch_namespace_id
+ feature_category :source_code_management
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id.rb b/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id.rb
new file mode 100644
index 00000000000..1a8d6f584cf
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ class BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId < BackfillDesiredShardingKeyJob
+ operation_name :backfill_protected_branch_merge_access_levels_protected_branch_project_id
+ feature_category :source_code_management
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id.rb b/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id.rb
new file mode 100644
index 00000000000..f2046fe7a59
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ class BackfillStatusPagePublishedIncidentsNamespaceId < BackfillDesiredShardingKeyJob
+ operation_name :backfill_status_page_published_incidents_namespace_id
+ feature_category :incident_management
+ end
+ end
+end
diff --git a/lib/gitlab/ci/tags/bulk_insert.rb b/lib/gitlab/ci/tags/bulk_insert.rb
index 70eb3200026..2f8831f9354 100644
--- a/lib/gitlab/ci/tags/bulk_insert.rb
+++ b/lib/gitlab/ci/tags/bulk_insert.rb
@@ -95,7 +95,7 @@ module Gitlab
tags.each do |tag|
accumulator[:taggings] << tagging_attributes(tag, taggable) if polymorphic_taggings_available?
- if monomorphic_taggings_available?
+ if monomorphic_taggings_available?(taggable)
accumulator[:monomorphic_taggings] << monomorphic_taggings_record(tag, taggable)
end
end
@@ -129,8 +129,8 @@ module Gitlab
end
end
- def monomorphic_taggings_available?
- config.monomorphic_taggings?
+ def monomorphic_taggings_available?(taggable)
+ config.monomorphic_taggings?(taggable)
end
def polymorphic_taggings_available?
diff --git a/lib/gitlab/ci/tags/bulk_insert/builds_tags_configuration.rb b/lib/gitlab/ci/tags/bulk_insert/builds_tags_configuration.rb
index 139d0505184..c6dc8066033 100644
--- a/lib/gitlab/ci/tags/bulk_insert/builds_tags_configuration.rb
+++ b/lib/gitlab/ci/tags/bulk_insert/builds_tags_configuration.rb
@@ -37,7 +37,7 @@ module Gitlab
true
end
- def monomorphic_taggings?
+ def monomorphic_taggings?(_taggable)
true
end
end
diff --git a/lib/gitlab/ci/tags/bulk_insert/configuration_factory.rb b/lib/gitlab/ci/tags/bulk_insert/configuration_factory.rb
index 3c49730131c..9dff9d15e35 100644
--- a/lib/gitlab/ci/tags/bulk_insert/configuration_factory.rb
+++ b/lib/gitlab/ci/tags/bulk_insert/configuration_factory.rb
@@ -23,7 +23,8 @@ module Gitlab
def strategies
[
- BuildsTagsConfiguration
+ BuildsTagsConfiguration,
+ RunnerTaggingsConfiguration
]
end
end
diff --git a/lib/gitlab/ci/tags/bulk_insert/no_config.rb b/lib/gitlab/ci/tags/bulk_insert/no_config.rb
index cb568451360..57b2aadeb0a 100644
--- a/lib/gitlab/ci/tags/bulk_insert/no_config.rb
+++ b/lib/gitlab/ci/tags/bulk_insert/no_config.rb
@@ -15,7 +15,7 @@ module Gitlab
true
end
- def monomorphic_taggings?
+ def monomorphic_taggings?(_taggable)
false
end
end
diff --git a/lib/gitlab/ci/tags/bulk_insert/runner_taggings_configuration.rb b/lib/gitlab/ci/tags/bulk_insert/runner_taggings_configuration.rb
new file mode 100644
index 00000000000..b8dabba521b
--- /dev/null
+++ b/lib/gitlab/ci/tags/bulk_insert/runner_taggings_configuration.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Tags
+ class BulkInsert
+ class RunnerTaggingsConfiguration
+ include ::Gitlab::Utils::StrongMemoize
+
+ def self.applies_to?(record)
+ record.is_a?(::Ci::Runner)
+ end
+
+ def self.build_from(runner)
+ new(runner)
+ end
+
+ def initialize(runner)
+ @runner = runner
+ end
+
+ def join_model
+ ::Ci::RunnerTagging
+ end
+
+ def unique_by
+ [:tag_id, :runner_id, :runner_type]
+ end
+
+ def attributes_map(runner)
+ {
+ runner_id: runner.id,
+ runner_type: runner.runner_type,
+ sharding_key_id: runner.sharding_key_id
+ }
+ end
+
+ def polymorphic_taggings?
+ true
+ end
+
+ def monomorphic_taggings?(runner)
+ strong_memoize_with(:monomorphic_taggings, runner.owner) do
+ ::Feature.enabled?(:write_to_ci_runner_taggings, runner.owner)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index f8aaaff55d8..dae95ad1edb 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -25285,6 +25285,12 @@ msgstr ""
msgid "GlobalSearch|Archived"
msgstr ""
+msgid "GlobalSearch|Author"
+msgstr ""
+
+msgid "GlobalSearch|Author not included"
+msgstr ""
+
msgid "GlobalSearch|Branch not included"
msgstr ""
@@ -48297,7 +48303,7 @@ msgstr ""
msgid "ScanExecutionPolicy|Add new condition"
msgstr ""
-msgid "ScanExecutionPolicy|Are you sure you want to create merge request fot this policy?"
+msgid "ScanExecutionPolicy|Are you sure you want to create merge request for this policy?"
msgstr ""
msgid "ScanExecutionPolicy|Back to edit policy"
@@ -59074,8 +59080,10 @@ msgstr ""
msgid "Unlimited"
msgstr ""
-msgid "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{group_or_project} to collaborate with you."
-msgstr ""
+msgid "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{name} to collaborate with you. When your trial ends, you'll have a maximum of %{limit} member on the Free tier, or you can get more by upgrading to a paid tier."
+msgid_plural "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{name} to collaborate with you. When your trial ends, you'll have a maximum of %{limit} members on the Free tier, or you can get more by upgrading to a paid tier."
+msgstr[0] ""
+msgstr[1] ""
msgid "UnlimitedMembersDuringTrialAlert|Explore paid plans"
msgstr ""
@@ -62310,11 +62318,6 @@ msgstr ""
msgid "When you transfer your project to a group, you can easily manage multiple projects, view usage quotas for storage, compute minutes, and users, and start a trial or upgrade to a paid tier."
msgstr ""
-msgid "When your trial ends, you'll have a maximum of %d member on the Free tier, or you can get more by upgrading to a paid tier."
-msgid_plural "When your trial ends, you'll have a maximum of %d members on the Free tier, or you can get more by upgrading to a paid tier."
-msgstr[0] ""
-msgstr[1] ""
-
msgid "When your trial ends, you'll move to the Free tier, which has a limit of %{free_user_limit} seat. %{free_user_limit} seat will remain active, and members not occupying a seat will have the %{link_start}Over limit status%{link_end} and lose access to this group."
msgid_plural "When your trial ends, you'll move to the Free tier, which has a limit of %{free_user_limit} seats. %{free_user_limit} seats will remain active, and members not occupying a seat will have the %{link_start}Over limit status%{link_end} and lose access to this group."
msgstr[0] ""
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 38dc83f303f..786eda444e3 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -412,6 +412,7 @@ RSpec.describe 'Database schema',
# These pre-existing columns does not use a schema validation yet
let(:ignored_jsonb_columns_map) do
{
+ "Ai::Conversation::Message" => %w[extras error_details],
"ApplicationSetting" => %w[repository_storages_weighted],
"AlertManagement::Alert" => %w[payload],
"Ci::BuildMetadata" => %w[config_options config_variables],
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index d355acaee11..c9ac817fea2 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -1739,15 +1739,60 @@ export const mockDataForBlobBody = {
export const mockSourceBranches = [
{
- text: 'master',
- value: 'master',
+ text: 'Master Item',
+ value: 'master-item',
},
{
- text: 'feature',
- value: 'feature',
+ text: 'Feature Item',
+ value: 'feature-item',
},
{
- text: 'develop',
- value: 'develop',
+ text: 'Develop Item',
+ value: 'develop-item',
+ },
+];
+
+export const mockAuthorsAxiosResponse = [
+ {
+ id: 1,
+ username: 'root',
+ name: 'Administrator',
+ state: 'active',
+ locked: false,
+ avatar_url:
+ 'https://www.gravatar.com/avatar/8a2ba320206c6d79e89dd41a9081b7ae521d365f2054b3db1ac6462f692b176f?s=80&d=identicon',
+ web_url: 'http://127.0.0.1:3000/root',
+ status_tooltip_html: null,
+ show_status: false,
+ availability: null,
+ path: '/root',
+ },
+ {
+ id: 65,
+ username: 'john',
+ name: 'John Doe',
+ state: 'active',
+ locked: false,
+ avatar_url:
+ 'https://www.gravatar.com/avatar/d9165b0da62fb9f9a57214a8fcc333101f2d10f494c662b53ffbeded3dcfa0dd?s=80&d=identicon',
+ web_url: 'http://127.0.0.1:3000/john',
+ status_tooltip_html: null,
+ show_status: false,
+ availability: null,
+ path: '/john',
+ },
+ {
+ id: 50,
+ username: 'jane',
+ name: 'Jane Doe',
+ state: 'active',
+ locked: false,
+ avatar_url:
+ 'https://www.gravatar.com/avatar/224e81a612a566f3eb211d1d457b2335b662ad0dc7bb8d1b642056dd1b81755c?s=80&d=identicon',
+ web_url: 'http://127.0.0.1:3000/jane',
+ status_tooltip_html: null,
+ show_status: false,
+ availability: null,
+ path: '/jane',
},
];
diff --git a/spec/frontend/search/sidebar/components/author_filter_spec.js b/spec/frontend/search/sidebar/components/author_filter_spec.js
new file mode 100644
index 00000000000..6f09f20a796
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/author_filter_spec.js
@@ -0,0 +1,173 @@
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import Vue, { nextTick } from 'vue';
+// eslint-disable-next-line no-restricted-imports
+import Vuex from 'vuex';
+import { GlFormCheckbox } from '@gitlab/ui';
+import axios from '~/lib/utils/axios_utils';
+import AjaxCache from '~/lib/utils/ajax_cache';
+import AuthorFilter from '~/search/sidebar/components/author_filter/index.vue';
+import FilterDropdown from '~/search/sidebar/components/shared/filter_dropdown.vue';
+import { MOCK_QUERY, mockAuthorsAxiosResponse } from '../../mock_data';
+
+Vue.use(Vuex);
+
+describe('Author filter', () => {
+ let wrapper;
+ const mock = new MockAdapter(axios);
+
+ const actions = {
+ setQuery: jest.fn(),
+ applyQuery: jest.fn(),
+ };
+
+ const defaultState = {
+ query: {
+ scope: 'merge_requests',
+ group_id: 1,
+ search: '*',
+ },
+ };
+
+ const createComponent = (state) => {
+ const store = new Vuex.Store({
+ ...defaultState,
+ state,
+ actions,
+ });
+
+ wrapper = shallowMount(AuthorFilter, {
+ store,
+ });
+ };
+
+ const findFilterDropdown = () => wrapper.findComponent(FilterDropdown);
+ const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('when initial state', () => {
+ it('renders the component', () => {
+ expect(findFilterDropdown().exists()).toBe(true);
+ expect(findGlFormCheckbox().exists()).toBe(true);
+ });
+ });
+
+ describe.each(['not[author_username]', 'author_username'])(
+ `when author is selected for %s author search`,
+ (authorParam) => {
+ beforeEach(async () => {
+ mock
+ .onGet('/-/autocomplete/users.json?current_user=true&active=true&search=')
+ .reply(200, mockAuthorsAxiosResponse);
+ createComponent({
+ query: {
+ ...MOCK_QUERY,
+ [authorParam]: 'root',
+ },
+ });
+
+ findFilterDropdown().vm.$emit('selected', 'root');
+ await nextTick();
+ });
+
+ it('renders the component with selected options', () => {
+ expect(findFilterDropdown().props('selectedItem')).toBe('root');
+ expect(findGlFormCheckbox().attributes('checked')).toBe(
+ authorParam === 'not[author_username]' ? 'true' : undefined,
+ );
+ });
+
+ it('displays the correct placeholder text and icon', () => {
+ expect(findFilterDropdown().props('searchText')).toBe('Administrator');
+ expect(findFilterDropdown().props('icon')).toBe('user');
+ });
+ },
+ );
+
+ describe('when opening dropdown', () => {
+ beforeEach(() => {
+ jest.spyOn(axios, 'get');
+ jest.spyOn(AjaxCache, 'retrieve');
+
+ createComponent({
+ groupInitialJson: {
+ id: 1,
+ full_name: 'gitlab-org/gitlab-test',
+ full_path: 'gitlab-org/gitlab-test',
+ },
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('calls AjaxCache with correct params', () => {
+ findFilterDropdown().vm.$emit('shown');
+ expect(AjaxCache.retrieve).toHaveBeenCalledWith(
+ '/-/autocomplete/users.json?current_user=true&active=true&group_id=1&search=',
+ );
+ });
+ });
+
+ describe.each([false, true])('when selecting an author with %s', (toggle) => {
+ beforeEach(() => {
+ createComponent({
+ query: {
+ ...MOCK_QUERY,
+ },
+ });
+ });
+
+ it('calls setQuery with the correct params', () => {
+ const authorParam = 'author_username';
+ const authorNotParam = 'not[author_username]';
+
+ wrapper.vm.toggleState = !toggle;
+ findFilterDropdown().vm.$emit('selected', 'root');
+
+ expect(actions.setQuery).toHaveBeenCalledTimes(2);
+ expect(actions.setQuery.mock.calls).toMatchObject([
+ [
+ expect.anything(),
+ {
+ key: toggle ? authorParam : authorNotParam,
+ value: 'root',
+ },
+ ],
+ [
+ expect.anything(),
+ {
+ key: toggle ? authorNotParam : authorParam,
+ value: '',
+ },
+ ],
+ ]);
+ });
+ });
+
+ describe('when resetting selected author', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it(`calls setQuery with correct param`, () => {
+ findFilterDropdown().vm.$emit('reset');
+
+ expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
+ key: 'author_username',
+ value: '',
+ });
+
+ expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
+ key: 'not[author_username]',
+ value: '',
+ });
+
+ expect(actions.applyQuery).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/filter_dropdown_spec.js b/spec/frontend/search/sidebar/components/filter_dropdown_spec.js
index 7582d67e8ef..c1bc724e45b 100644
--- a/spec/frontend/search/sidebar/components/filter_dropdown_spec.js
+++ b/spec/frontend/search/sidebar/components/filter_dropdown_spec.js
@@ -1,5 +1,5 @@
-import { shallowMount } from '@vue/test-utils';
import { GlCollapsibleListbox, GlListboxItem, GlIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import FilterDropdown from '~/search/sidebar/components/shared/filter_dropdown.vue';
import waitForPromises from 'helpers/wait_for_promises';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
@@ -11,7 +11,7 @@ describe('BranchDropdown', () => {
const defaultProps = {
listData: mockSourceBranches,
- errors: [],
+ error: '',
selectedItem: 'Master Item',
headerText: 'Filter header',
searchText: 'Search filter items',
@@ -20,8 +20,8 @@ describe('BranchDropdown', () => {
isLoading: false,
};
- const createComponent = (props = {}, options = {}) => {
- wrapper = shallowMount(FilterDropdown, {
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(FilterDropdown, {
propsData: {
...defaultProps,
...props,
@@ -30,13 +30,12 @@ describe('BranchDropdown', () => {
GlCollapsibleListbox,
GlIcon,
},
- ...options,
});
};
const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findGlListboxItems = () => wrapper.findAllComponents(GlListboxItem);
- const findErrorMessages = () => wrapper.findAll('[data-testid="branch-dropdown-error-list"]');
+ const findErrorMessage = () => wrapper.findByTestId('branch-dropdown-error');
describe('when nothing is selected', () => {
beforeEach(() => {
@@ -71,18 +70,24 @@ describe('BranchDropdown', () => {
expect(props.resetButtonLabel).toBe('Reset');
});
- it('renders error messages when errors prop is passed', async () => {
- const errors = ['Error 1', 'Error 2'];
- createComponent({ errors });
+ it('renders error message when error prop is passed', async () => {
+ createComponent({ error: 'Error 1' });
await waitForPromises();
+ expect(findErrorMessage().exists()).toBe(true);
+ expect(findErrorMessage().text()).toBe('Error 1');
+ });
- const errorMessages = findErrorMessages();
+ it('renders error message reactivly', async () => {
+ createComponent();
- expect(errorMessages.length).toBe(errors.length);
- errorMessages.wrappers.forEach((errorWrapper, index) => {
- expect(errorWrapper.text()).toContain(errors[index]);
- });
+ await waitForPromises();
+ expect(findErrorMessage().exists()).toBe(false);
+
+ wrapper.setProps({ error: 'Error 1' });
+ await waitForPromises();
+ expect(findErrorMessage().exists()).toBe(true);
+ expect(findErrorMessage().text()).toBe('Error 1');
});
it('search filters items', async () => {
diff --git a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
index 5c14fc13170..4721fba09d3 100644
--- a/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
+++ b/spec/frontend/search/sidebar/components/merge_requests_filters_spec.js
@@ -8,6 +8,7 @@ import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
import SourceBranchFilter from '~/search/sidebar/components/source_branch_filter/index.vue';
import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
+import AuthorFilter from '~/search/sidebar/components/author_filter/index.vue';
Vue.use(Vuex);
@@ -21,7 +22,12 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const createComponent = (
initialState = {},
- provide = { glFeatures: { searchMrFilterSourceBranch: true } },
+ provide = {
+ glFeatures: {
+ searchMrFilterSourceBranch: true,
+ searchMrFilterAuthor: true,
+ },
+ },
) => {
const store = new Vuex.Store({
state: {
@@ -45,8 +51,9 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
const findSourceBranchFilter = () => wrapper.findComponent(SourceBranchFilter);
const findLabelFilter = () => wrapper.findComponent(LabelFilter);
+ const findAuthorFilter = () => wrapper.findComponent(AuthorFilter);
- describe('Renders correctly with Archived Filter', () => {
+ describe('When renders correctly with Archived Filter', () => {
beforeEach(() => {
createComponent();
});
@@ -59,16 +66,20 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
- it('renders sourceBranchFilter', () => {
+ it('renders SourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
- it('renders label filter', () => {
+ it('renders LabelFilter', () => {
expect(findLabelFilter().exists()).toBe(true);
});
+
+ it('renders AuthorFilter', () => {
+ expect(findAuthorFilter().exists()).toBe(true);
+ });
});
- describe('Renders correctly with basic search', () => {
+ describe('When renders correctly with basic search', () => {
beforeEach(() => {
createComponent({ searchType: 'basic' });
});
@@ -81,35 +92,46 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
- it('renders sourceBranchFilter', () => {
+ it('renders SourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
- it('will not render label filter', () => {
+ it('will not render LabelFilter', () => {
expect(findLabelFilter().exists()).toBe(false);
});
+
+ it('will not render AuthorFilter', () => {
+ expect(findAuthorFilter().exists()).toBe(false);
+ });
});
- describe.each([true, false])(
- 'When feature flag search_mr_filter_source_branch is',
- (searchMrFilterSourceBranch) => {
- beforeEach(() => {
- createComponent(null, { glFeatures: { searchMrFilterSourceBranch } });
- });
+ describe('When feature flag search_mr_filter_source_branch is disabled', () => {
+ beforeEach(() => {
+ createComponent(null, { glFeatures: { searchMrFilterSourceBranch: false } });
+ });
- it(`${searchMrFilterSourceBranch ? 'will' : 'will not'} render sourceBranchFilter`, () => {
- expect(findSourceBranchFilter().exists()).toBe(searchMrFilterSourceBranch);
- });
- },
- );
+ it(`will not render SourceBranchFilter`, () => {
+ expect(findSourceBranchFilter().exists()).toBe(false);
+ });
+ });
- describe('hasMissingProjectContext getter', () => {
+ describe('When feature flag search_mr_filter_author is disabled', () => {
+ beforeEach(() => {
+ createComponent(null, { glFeatures: { searchMrFilterAuthor: false } });
+ });
+
+ it(`will not render AuthorFilter`, () => {
+ expect(findAuthorFilter().exists()).toBe(false);
+ });
+ });
+
+ describe('#hasMissingProjectContext getter', () => {
beforeEach(() => {
defaultGetters.hasMissingProjectContext = () => false;
createComponent();
});
- it('hides archived filter', () => {
+ it('hides ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(false);
});
});
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index 3fa592f9160..e55da9aaf09 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -62,6 +62,7 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
pronouns
ide
userPreferences
+ type
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -397,4 +398,56 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
is_expected.to have_graphql_type(Types::UserPreferencesType)
end
end
+
+ describe 'type field' do
+ subject { described_class.fields['type'] }
+
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:regular_user) { create(:user) }
+ let_it_be(:placeholder_user) { create(:user, :placeholder) }
+ let_it_be(:import_user) { create(:user, :import_user) }
+ let_it_be(:ghost_user) { create(:user, :ghost) }
+
+ let(:query) do
+ <<~GQL
+ query($id: UserID!) {
+ user(id: $id) {
+ type
+ }
+ }
+ GQL
+ end
+
+ it 'returns type field' do
+ is_expected.to have_graphql_type(Types::Users::TypeEnum.to_non_null_type)
+ end
+
+ it 'returns HUMAN for regular users' do
+ result = GitlabSchema.execute(query, variables: { id: regular_user.to_global_id.to_s },
+ context: { current_user: admin }).as_json
+
+ expect(result.dig('data', 'user', 'type')).to eq('HUMAN')
+ end
+
+ it 'returns PLACEHOLDER for placeholder users' do
+ result = GitlabSchema.execute(query, variables: { id: placeholder_user.to_global_id.to_s },
+ context: { current_user: admin }).as_json
+
+ expect(result.dig('data', 'user', 'type')).to eq('PLACEHOLDER')
+ end
+
+ it 'returns IMPORT_USER for import users' do
+ result = GitlabSchema.execute(query, variables: { id: import_user.to_global_id.to_s },
+ context: { current_user: admin }).as_json
+
+ expect(result.dig('data', 'user', 'type')).to eq('IMPORT_USER')
+ end
+
+ it 'returns GHOST for ghost users' do
+ result = GitlabSchema.execute(query, variables: { id: ghost_user.to_global_id.to_s },
+ context: { current_user: admin }).as_json
+
+ expect(result.dig('data', 'user', 'type')).to eq('GHOST')
+ end
+ end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id_spec.rb
new file mode 100644
index 00000000000..34f5f5d8401
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_namespace_id_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId,
+ feature_category: :source_code_management,
+ schema: 20241204130226 do
+ include_examples 'desired sharding key backfill job' do
+ let(:batch_table) { :protected_branch_merge_access_levels }
+ let(:backfill_column) { :protected_branch_namespace_id }
+ let(:backfill_via_table) { :protected_branches }
+ let(:backfill_via_column) { :namespace_id }
+ let(:backfill_via_foreign_key) { :protected_branch_id }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id_spec.rb
new file mode 100644
index 00000000000..321b4af4783
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_protected_branch_merge_access_levels_protected_branch_project_id_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId,
+ feature_category: :source_code_management,
+ schema: 20241204130221 do
+ include_examples 'desired sharding key backfill job' do
+ let(:batch_table) { :protected_branch_merge_access_levels }
+ let(:backfill_column) { :protected_branch_project_id }
+ let(:backfill_via_table) { :protected_branches }
+ let(:backfill_via_column) { :project_id }
+ let(:backfill_via_foreign_key) { :protected_branch_id }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id_spec.rb
new file mode 100644
index 00000000000..7d26491ae32
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_status_page_published_incidents_namespace_id_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillStatusPagePublishedIncidentsNamespaceId,
+ feature_category: :incident_management,
+ schema: 20241205143056 do
+ include_examples 'desired sharding key backfill job' do
+ let(:batch_table) { :status_page_published_incidents }
+ let(:backfill_column) { :namespace_id }
+ let(:backfill_via_table) { :issues }
+ let(:backfill_via_column) { :namespace_id }
+ let(:backfill_via_foreign_key) { :issue_id }
+ end
+end
diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
index 32b4235f2c4..93ed0912731 100644
--- a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
+++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
@@ -2,208 +2,223 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Tags::BulkInsert do
+RSpec.describe Gitlab::Ci::Tags::BulkInsert, feature_category: :continuous_integration do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline) }
let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline) }
- let(:statuses) { [job, other_job] }
+ let_it_be_with_refind(:runner) { create(:ci_runner) }
+ let_it_be_with_refind(:other_runner) { create(:ci_runner, :project_type, projects: [project]) }
+
+ let(:statuses) { [taggable, other_taggable] }
let(:config) { described_class::NoConfig.new }
subject(:service) { described_class.new(statuses, config: config) }
- describe '.bulk_insert_tags!' do
- let(:inserter) { instance_double(described_class) }
-
- it 'delegates to bulk insert class' do
- expect(described_class)
- .to receive(:new)
- .with(statuses, config: nil)
- .and_return(inserter)
-
- expect(inserter).to receive(:insert!)
-
- described_class.bulk_insert_tags!(statuses)
- end
+ where(:taggable_class, :taggable, :other_taggable, :tagging_class, :taggable_id_column, :partition_column,
+ :expected_configuration) do
+ Ci::Build | ref(:job) | ref(:other_job) | Ci::BuildTag | :build_id | :partition_id |
+ described_class::BuildsTagsConfiguration
+ Ci::Runner | ref(:runner) | ref(:other_runner) | Ci::RunnerTagging | :runner_id | :runner_type |
+ described_class::RunnerTaggingsConfiguration
end
- describe '#insert!' do
- context 'without tags' do
- it { expect(service.insert!).to be_truthy }
+ with_them do
+ describe '.bulk_insert_tags!' do
+ let(:inserter) { instance_double(described_class) }
+
+ it 'delegates to bulk insert class' do
+ expect(described_class)
+ .to receive(:new)
+ .with(statuses, config: nil)
+ .and_return(inserter)
+
+ expect(inserter).to receive(:insert!)
+
+ described_class.bulk_insert_tags!(statuses)
+ end
end
- context 'with tags' do
- before do
- job.tag_list = %w[tag1 tag2]
- other_job.tag_list = %w[tag2 tag3 tag4]
+ describe '#insert!' do
+ context 'without tags' do
+ it { expect(service.insert!).to be_truthy }
end
- it 'persists tags' do
- expect(service.insert!).to be_truthy
-
- expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
- expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
- end
-
- it 'persists taggings' do
- service.insert!
-
- expect(job.taggings.size).to eq(2)
- expect(other_job.taggings.size).to eq(3)
-
- expect(Ci::Build.tagged_with('tag1')).to include(job)
- expect(Ci::Build.tagged_with('tag2')).to include(job, other_job)
- expect(Ci::Build.tagged_with('tag3')).to include(other_job)
- end
-
- it 'strips tags' do
- job.tag_list = [' taga', 'tagb ', ' tagc ']
-
- service.insert!
- expect(job.tags.map(&:name)).to match_array(%w[taga tagb tagc])
- end
-
- context 'when batching inserts for tags' do
+ context 'with tags' do
before do
- stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
+ taggable.tag_list = %w[tag1 tag2]
+ other_taggable.tag_list = %w[tag2 tag3 tag4]
end
- it 'inserts tags in batches' do
- recorder = ActiveRecord::QueryRecorder.new { service.insert! }
- count = recorder.log.count { |query| query.include?('INSERT INTO "tags"') }
+ it 'persists tags' do
+ expect(service.insert!).to be_truthy
- expect(count).to eq(2)
- end
- end
-
- context 'when batching inserts for taggings' do
- before do
- stub_const("#{described_class}::TAGGINGS_BATCH_SIZE", 2)
+ expect(taggable.reload.tag_list).to match_array(%w[tag1 tag2])
+ expect(other_taggable.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
- it 'inserts taggings in batches' do
- recorder = ActiveRecord::QueryRecorder.new { service.insert! }
- count = recorder.log.count { |query| query.include?('INSERT INTO "taggings"') }
-
- expect(count).to eq(3)
- end
- end
-
- context 'with no config provided' do
- it 'does not persist tag links' do
+ it 'persists taggings' do
service.insert!
- expect(job.tag_links).to be_empty
- expect(other_job.tag_links).to be_empty
- end
- end
+ expect(taggable.taggings.size).to eq(2)
+ expect(other_taggable.taggings.size).to eq(3)
- context 'with config provided by the factory' do
- let(:config) { nil }
-
- it 'generates a valid config' do
- expect(service.config).to be_a(described_class::BuildsTagsConfiguration)
+ expect(taggable_class.tagged_with('tag1')).to include(taggable)
+ expect(taggable_class.tagged_with('tag2')).to include(taggable, other_taggable)
+ expect(taggable_class.tagged_with('tag3')).to include(other_taggable)
end
- context 'with flags' do
+ it 'strips tags' do
+ taggable.tag_list = [' taga', 'tagb ', ' tagc ']
+
+ service.insert!
+ expect(taggable.tags.map(&:name)).to match_array(%w[taga tagb tagc])
+ end
+
+ context 'when batching inserts for tags' do
before do
- allow(service.config).to receive(:monomorphic_taggings?) { monomorphic_taggings }
- allow(service.config).to receive(:polymorphic_taggings?) { polymorphic_taggings }
+ stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
end
- context 'when writing to both tables' do
- let(:monomorphic_taggings) { true }
- let(:polymorphic_taggings) { true }
+ it 'inserts tags in batches' do
+ recorder = ActiveRecord::QueryRecorder.new { service.insert! }
+ count = recorder.log.count { |query| query.include?('INSERT INTO "tags"') }
- it 'persists tag links and taggings' do
- service.insert!
+ expect(count).to eq(2)
+ end
+ end
- expect(job.tag_links).not_to be_empty
- expect(other_job.tag_links).not_to be_empty
+ context 'when batching inserts for taggings' do
+ before do
+ stub_const("#{described_class}::TAGGINGS_BATCH_SIZE", 2)
+ end
- expect(jobs_tagged_with('tag1')).to contain_exactly(job)
- expect(jobs_tagged_with('tag2')).to contain_exactly(job, other_job)
- expect(jobs_tagged_with('tag3')).to contain_exactly(other_job)
+ it 'inserts taggings in batches' do
+ recorder = ActiveRecord::QueryRecorder.new { service.insert! }
+ count = recorder.log.count { |query| query.include?('INSERT INTO "taggings"') }
- expect(job.taggings).not_to be_empty
- expect(other_job.taggings).not_to be_empty
+ expect(count).to eq(3)
+ end
+ end
- expect(Ci::Build.tagged_with('tag1')).to contain_exactly(job)
- expect(Ci::Build.tagged_with('tag2')).to contain_exactly(job, other_job)
- expect(Ci::Build.tagged_with('tag3')).to contain_exactly(other_job)
+ context 'with no config provided' do
+ it 'does not persist tag links' do
+ service.insert!
+
+ expect(taggable.tag_links).to be_empty
+ expect(other_taggable.tag_links).to be_empty
+ end
+ end
+
+ context 'with config provided by the factory' do
+ let(:config) { nil }
+
+ it 'generates a valid config' do
+ expect(service.config).to be_a(expected_configuration)
+ end
+
+ context 'with flags' do
+ before do
+ allow(service.config).to receive(:monomorphic_taggings?) { monomorphic_taggings }
+ allow(service.config).to receive(:polymorphic_taggings?) { polymorphic_taggings }
end
- end
- context 'when writing only to taggings' do
- let(:monomorphic_taggings) { false }
- let(:polymorphic_taggings) { true }
+ context 'when writing to both tables' do
+ let(:monomorphic_taggings) { true }
+ let(:polymorphic_taggings) { true }
- it 'persists taggings' do
- service.insert!
+ it 'persists tag links and taggings' do
+ service.insert!
- expect(job.tag_links).to be_empty
- expect(other_job.tag_links).to be_empty
+ expect(taggable.tag_links).not_to be_empty
+ expect(other_taggable.tag_links).not_to be_empty
- expect(job.taggings).not_to be_empty
- expect(other_job.taggings).not_to be_empty
+ expect(tagged_with('tag1')).to contain_exactly(taggable)
+ expect(tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
+ expect(tagged_with('tag3')).to contain_exactly(other_taggable)
- expect(Ci::Build.tagged_with('tag1')).to contain_exactly(job)
- expect(Ci::Build.tagged_with('tag2')).to contain_exactly(job, other_job)
- expect(Ci::Build.tagged_with('tag3')).to contain_exactly(other_job)
+ expect(taggable.taggings).not_to be_empty
+ expect(other_taggable.taggings).not_to be_empty
+
+ expect(taggable_class.tagged_with('tag1')).to contain_exactly(taggable)
+ expect(taggable_class.tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
+ expect(taggable_class.tagged_with('tag3')).to contain_exactly(other_taggable)
+ end
end
- end
- context 'when writing only to link table' do
- let(:monomorphic_taggings) { true }
- let(:polymorphic_taggings) { false }
+ context 'when writing only to taggings' do
+ let(:monomorphic_taggings) { false }
+ let(:polymorphic_taggings) { true }
- it 'persists tag links' do
- service.insert!
+ it 'persists taggings' do
+ service.insert!
- expect(job.tag_links).not_to be_empty
- expect(other_job.tag_links).not_to be_empty
+ expect(taggable.tag_links).to be_empty
+ expect(other_taggable.tag_links).to be_empty
- expect(jobs_tagged_with('tag1')).to contain_exactly(job)
- expect(jobs_tagged_with('tag2')).to contain_exactly(job, other_job)
- expect(jobs_tagged_with('tag3')).to contain_exactly(other_job)
+ expect(taggable.taggings).not_to be_empty
+ expect(other_taggable.taggings).not_to be_empty
- expect(job.taggings).to be_empty
- expect(other_job.taggings).to be_empty
+ expect(taggable_class.tagged_with('tag1')).to contain_exactly(taggable)
+ expect(taggable_class.tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
+ expect(taggable_class.tagged_with('tag3')).to contain_exactly(other_taggable)
+ end
end
- end
- def jobs_tagged_with(tag)
- scope = Ci::BuildTag
- .where(tag_id: Ci::Tag.where(name: tag))
- .where(Ci::BuildTag.arel_table[:build_id].eq(Ci::Build.arel_table[:id]))
- .where(Ci::BuildTag.arel_table[:partition_id].eq(Ci::Build.arel_table[:partition_id]))
+ context 'when writing only to link table' do
+ let(:monomorphic_taggings) { true }
+ let(:polymorphic_taggings) { false }
- Ci::Build.where_exists(scope)
+ it 'persists tag links' do
+ service.insert!
+
+ expect(taggable.tag_links).not_to be_empty
+ expect(other_taggable.tag_links).not_to be_empty
+
+ expect(tagged_with('tag1')).to contain_exactly(taggable)
+ expect(tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
+ expect(tagged_with('tag3')).to contain_exactly(other_taggable)
+
+ expect(taggable.taggings).to be_empty
+ expect(other_taggable.taggings).to be_empty
+ end
+ end
+
+ def tagged_with(tag)
+ scope = tagging_class
+ .where(tag_id: Ci::Tag.where(name: tag))
+ .where(tagging_class.arel_table[taggable_id_column].eq(taggable_class.arel_table[:id]))
+ .where(tagging_class.arel_table[partition_column].eq(taggable_class.arel_table[partition_column]))
+
+ taggable_class.where_exists(scope)
+ end
end
end
end
- end
- context 'with tags for only one job' do
- before do
- job.tag_list = %w[tag1 tag2]
- end
+ context 'with tags for only one taggable' do
+ before do
+ taggable.tag_list = %w[tag1 tag2]
+ end
- it 'persists tags' do
- expect(service.insert!).to be_truthy
+ it 'persists tags' do
+ expect(service.insert!).to be_truthy
- expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
- expect(other_job.reload.tag_list).to be_empty
- end
+ expect(taggable.reload.tag_list).to match_array(%w[tag1 tag2])
+ expect(other_taggable.reload.tag_list).to be_empty
+ end
- it 'persists taggings' do
- service.insert!
+ it 'persists taggings' do
+ service.insert!
- expect(job.taggings.size).to eq(2)
+ expect(taggable.taggings.size).to eq(2)
- expect(Ci::Build.tagged_with('tag1')).to include(job)
- expect(Ci::Build.tagged_with('tag2')).to include(job)
+ expect(taggable_class.tagged_with('tag1')).to include(taggable)
+ expect(taggable_class.tagged_with('tag2')).to include(taggable)
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb
index 70551fd8f72..483d4c4ce7e 100644
--- a/spec/lib/gitlab/database/sharding_key_spec.rb
+++ b/spec/lib/gitlab/database/sharding_key_spec.rb
@@ -192,8 +192,6 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
"projects" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476211',
"push_rules" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476212',
"snippets" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476216',
- "upcoming_reconciliations" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476217',
- "vulnerability_exports" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476219',
"topics" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/463254',
"oauth_access_tokens" => "https://gitlab.com/gitlab-org/gitlab/-/issues/496717",
"oauth_access_grants" => "https://gitlab.com/gitlab-org/gitlab/-/issues/496717",
diff --git a/spec/migrations/20241204130225_queue_backfill_protected_branch_merge_access_levels_project_id_spec.rb b/spec/migrations/20241204130225_queue_backfill_protected_branch_merge_access_levels_project_id_spec.rb
new file mode 100644
index 00000000000..62750afc635
--- /dev/null
+++ b/spec/migrations/20241204130225_queue_backfill_protected_branch_merge_access_levels_project_id_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillProtectedBranchMergeAccessLevelsProjectId, feature_category: :source_code_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :protected_branch_merge_access_levels,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ gitlab_schema: :gitlab_main_cell,
+ job_arguments: [
+ :protected_branch_project_id,
+ :protected_branches,
+ :project_id,
+ :protected_branch_id
+ ]
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20241204130230_queue_backfill_protected_branch_merge_access_levels_namespace_id_spec.rb b/spec/migrations/20241204130230_queue_backfill_protected_branch_merge_access_levels_namespace_id_spec.rb
new file mode 100644
index 00000000000..f92e214bd33
--- /dev/null
+++ b/spec/migrations/20241204130230_queue_backfill_protected_branch_merge_access_levels_namespace_id_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillProtectedBranchMergeAccessLevelsNamespaceId, feature_category: :source_code_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :protected_branch_merge_access_levels,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ gitlab_schema: :gitlab_main_cell,
+ job_arguments: [
+ :protected_branch_namespace_id,
+ :protected_branches,
+ :namespace_id,
+ :protected_branch_id
+ ]
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20241205143060_queue_backfill_status_page_published_incidents_namespace_id_spec.rb b/spec/migrations/20241205143060_queue_backfill_status_page_published_incidents_namespace_id_spec.rb
new file mode 100644
index 00000000000..07809ba2f66
--- /dev/null
+++ b/spec/migrations/20241205143060_queue_backfill_status_page_published_incidents_namespace_id_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillStatusPagePublishedIncidentsNamespaceId, feature_category: :incident_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :status_page_published_incidents,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ gitlab_schema: :gitlab_main_cell,
+ job_arguments: [
+ :namespace_id,
+ :issues,
+ :namespace_id,
+ :issue_id
+ ]
+ )
+ }
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 0093f7469e5..64b02c3aca1 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -484,25 +484,32 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
- describe '.created_after' do
+ context 'with created filters' do
let_it_be(:old_pipeline) { create(:ci_pipeline, created_at: 1.week.ago) }
- let_it_be(:pipeline) { create(:ci_pipeline) }
-
- subject { described_class.created_after(1.day.ago) }
-
- it 'returns the pipeline' do
- is_expected.to contain_exactly(pipeline)
- end
- end
-
- describe '.created_before_id' do
- let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:new_pipeline) { create(:ci_pipeline) }
- subject { described_class.created_before_id(new_pipeline.id) }
+ describe '.created_after' do
+ subject { described_class.created_after(1.day.ago) }
- it 'returns the pipeline' do
- is_expected.to contain_exactly(pipeline)
+ it 'returns the newer pipeline' do
+ is_expected.to contain_exactly(new_pipeline)
+ end
+ end
+
+ describe '.created_before' do
+ subject { described_class.created_before(1.day.ago) }
+
+ it 'returns the older pipeline' do
+ is_expected.to contain_exactly(old_pipeline)
+ end
+ end
+
+ describe '.created_before_id' do
+ subject { described_class.created_before_id(new_pipeline.id) }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(old_pipeline)
+ end
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 19a9e70df36..62d1023c797 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -10,6 +10,24 @@ RSpec.describe Ci::Runner, type: :model, factory_default: :keep, feature_categor
let_it_be(:project) { create(:project, group: group) }
let_it_be(:other_project) { create(:project) }
+ describe 'associations' do
+ it { is_expected.to belong_to(:creator).class_name('User').optional }
+
+ it { is_expected.to have_many(:runner_managers).inverse_of(:runner) }
+ it { is_expected.to have_many(:builds) }
+ it { is_expected.to have_one(:last_build).class_name('Ci::Build') }
+ it { is_expected.to have_many(:running_builds).inverse_of(:runner) }
+
+ it { is_expected.to have_many(:runner_projects).inverse_of(:runner) }
+ it { is_expected.to have_many(:projects).through(:runner_projects) }
+
+ it { is_expected.to have_many(:runner_namespaces).inverse_of(:runner) }
+ it { is_expected.to have_many(:groups).through(:runner_namespaces) }
+ it { is_expected.to have_one(:owner_runner_namespace).class_name('Ci::RunnerNamespace') }
+
+ it { is_expected.to have_many(:tag_links).class_name('Ci::RunnerTagging').inverse_of(:runner) }
+ end
+
it_behaves_like 'having unique enum values'
it_behaves_like 'it has loose foreign keys' do
@@ -61,7 +79,7 @@ RSpec.describe Ci::Runner, type: :model, factory_default: :keep, feature_categor
let(:tag_name) { 'tag123' }
context 'on save' do
- let_it_be_with_reload(:runner) { create(:ci_runner) }
+ let(:runner) { create(:ci_runner, :group, groups: [group]) }
before do
runner.tag_list = [tag_name]
@@ -96,6 +114,41 @@ RSpec.describe Ci::Runner, type: :model, factory_default: :keep, feature_categor
expect(described_class.tagged_with(tag_name)).to include(runner)
end
end
+
+ context 'when runner is not yet synced to partitioned table' do
+ let(:connection) { Ci::ApplicationRecord.connection }
+
+ before do
+ # Simulate legacy runners not present in sharded table (created when FK was not present)
+ runner
+
+ connection.execute(<<~SQL)
+ DELETE FROM ci_runners_e59bb2812d;
+ SQL
+ end
+
+ context 'tag does not exist' do
+ before do
+ runner.tag_list = [tag_name]
+ end
+
+ it 'creates a tag and syncs runner to partitioned table' do
+ expect { runner.save! }
+ .to change(Ci::Tag, :count).by(1)
+ .and change { partitioned_runner_exists?(runner) }.from(false).to(true)
+ end
+ end
+
+ private
+
+ def partitioned_runner_exists?(runner)
+ result = connection.execute(<<~SQL)
+ SELECT COUNT(*) FROM ci_runners_e59bb2812d WHERE id = #{runner.id};
+ SQL
+
+ result.first['count'].positive?
+ end
+ end
end
end
@@ -1229,6 +1282,7 @@ RSpec.describe Ci::Runner, type: :model, factory_default: :keep, feature_categor
expect(runner.tags.count).to eq(1)
expect(runner.tags.first.name).to eq('tag')
+ expect(runner.tag_links.count).to eq(1)
end
it 'strips tags' do
@@ -1260,6 +1314,20 @@ RSpec.describe Ci::Runner, type: :model, factory_default: :keep, feature_categor
end
end
end
+
+ context 'with write_to_ci_runner_taggings disabled' do
+ before do
+ stub_feature_flags(write_to_ci_runner_taggings: false)
+ end
+
+ it 'does not save tag_links' do
+ runner.save!
+
+ expect(runner.tags.count).to eq(1)
+ expect(runner.tags.first.name).to eq('tag')
+ expect(runner.tag_links).to be_empty
+ end
+ end
end
describe '#has_tags?' do
diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb
index 85e43df5a3f..73b740bc3cd 100644
--- a/spec/models/project_ci_cd_setting_spec.rb
+++ b/spec/models/project_ci_cd_setting_spec.rb
@@ -43,15 +43,6 @@ RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do
end
end
- describe '.configured_to_delete_old_pipelines' do
- let_it_be(:project) { create(:project, ci_delete_pipelines_in_seconds: 2.weeks.to_i) }
- let_it_be(:other_project) { create(:project, group_runners_enabled: true) }
-
- it 'includes settings with values present' do
- expect(described_class.configured_to_delete_old_pipelines).to contain_exactly(project.ci_cd_settings)
- end
- end
-
describe '#pipeline_variables_minimum_override_role' do
it 'is maintainer by default' do
expect(described_class.new.pipeline_variables_minimum_override_role).to eq('maintainer')
@@ -138,4 +129,13 @@ RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do
end
end
end
+
+ describe '.configured_to_delete_old_pipelines' do
+ let_it_be(:project) { create(:project, ci_delete_pipelines_in_seconds: 2.weeks.to_i) }
+ let_it_be(:other_project) { create(:project, group_runners_enabled: true) }
+
+ it 'includes settings with values present' do
+ expect(described_class.configured_to_delete_old_pipelines).to contain_exactly(project.ci_cd_settings)
+ end
+ end
end
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
index 0bf2fd6f48d..ab3b7f008af 100644
--- a/spec/models/users/credit_card_validation_spec.rb
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -335,14 +335,6 @@ RSpec.describe Users::CreditCardValidation, feature_category: :user_profile do
end
it { is_expected.to eq(true) }
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(credit_card_validation_daily_limit: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
context 'when the limit is exceeded but records have credit_card_validated_at > 24 hours' do
diff --git a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
index a516e22015d..14edf9cc438 100644
--- a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
@@ -58,6 +58,7 @@ RSpec.shared_examples "a user type with merge request interaction type" do
pronouns
ide
userPreferences
+ type
]
# TODO: 'workspaces' needs to be included, but only when this spec is run in EE context, to account for the
diff --git a/spec/workers/ci/destroy_old_pipelines_worker_spec.rb b/spec/workers/ci/destroy_old_pipelines_worker_spec.rb
new file mode 100644
index 00000000000..84dff9de7e5
--- /dev/null
+++ b/spec/workers/ci/destroy_old_pipelines_worker_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DestroyOldPipelinesWorker, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project, ci_delete_pipelines_in_seconds: 2.weeks.to_i) }
+ let_it_be(:ancient_pipeline) { create(:ci_pipeline, project: project, created_at: 1.year.ago) }
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, created_at: 1.month.ago) }
+ let_it_be(:new_pipeline) { create(:ci_pipeline, project: project, created_at: 1.week.ago) }
+
+ describe '#perform' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.rpush(Ci::ScheduleOldPipelinesRemovalCronWorker::QUEUE_KEY, [project.id])
+ end
+ end
+
+ subject(:perform) { described_class.new.perform_work }
+
+ it 'destroys the configured amount of pipelines' do
+ stub_const("#{described_class.name}::LIMIT", 1)
+
+ expect { perform }.to change { project.all_pipelines.count }.by(-1)
+ expect(new_pipeline.reload).to be_present
+ end
+
+ it 'loops thought the available pipelines' do
+ stub_const("#{described_class.name}::LIMIT", 3)
+
+ expect { perform }.to change { project.all_pipelines.count }.by(-2)
+ expect(new_pipeline.reload).to be_present
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id] }
+
+ it 'executes the service' do
+ expect { perform }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/schedule_old_pipelines_removal_cron_worker_spec.rb b/spec/workers/ci/schedule_old_pipelines_removal_cron_worker_spec.rb
new file mode 100644
index 00000000000..d3cb05e68dd
--- /dev/null
+++ b/spec/workers/ci/schedule_old_pipelines_removal_cron_worker_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ScheduleOldPipelinesRemovalCronWorker,
+ :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do
+ let(:worker) { described_class.new }
+
+ let_it_be(:project) { create(:project, ci_delete_pipelines_in_seconds: 2.weeks.to_i) }
+
+ it { is_expected.to include_module(CronjobQueue) }
+ it { expect(described_class.idempotent?).to be_truthy }
+
+ describe '#perform' do
+ it 'enqueues DestroyOldPipelinesWorker jobs' do
+ expect(Ci::DestroyOldPipelinesWorker).to receive(:perform_with_capacity)
+
+ worker.perform
+ end
+
+ it 'enqueues projects to be processed' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.lpop(described_class::QUEUE_KEY).to_i).to eq(project.id)
+ end
+ end
+
+ context 'when the worker reaches the maximum number of records per execution' do
+ before do
+ stub_const("#{described_class}::PROJECTS_LIMIT", 1)
+ end
+
+ it 'sets the last processed record id in Redis cache' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.get(described_class::LAST_PROCESSED_REDIS_KEY).to_i).to eq(project.id)
+ end
+ end
+ end
+
+ context 'when the worker continues processing from previous execution' do
+ let_it_be(:other_project) { create(:project, ci_delete_pipelines_in_seconds: 2.weeks.to_i) }
+
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(described_class::LAST_PROCESSED_REDIS_KEY, other_project.id)
+ end
+ end
+
+ it 'enqueues projects to be processed' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.lpop(described_class::QUEUE_KEY).to_i).to eq(other_project.id)
+ end
+ end
+
+ it 'enqueues DestroyOldPipelinesWorker jobs' do
+ expect(Ci::DestroyOldPipelinesWorker).to receive(:perform_with_capacity)
+
+ worker.perform
+ end
+ end
+
+ context 'when the worker finishes processing before running out of batches' do
+ before do
+ stub_const("#{described_class}::PROJECTS_LIMIT", 2)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(described_class::LAST_PROCESSED_REDIS_KEY, 0)
+ end
+ end
+
+ it 'clears the last processed record id in Redis cache' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.get(described_class::LAST_PROCESSED_REDIS_KEY)).to be_nil
+ end
+ end
+
+ it 'enqueues projects to be processed' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.lpop(described_class::QUEUE_KEY).to_i).to eq(project.id)
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_delete_old_pipelines: false)
+ end
+
+ it 'does not enqueue DestroyOldPipelinesWorker jobs' do
+ expect(Ci::DestroyOldPipelinesWorker).not_to receive(:perform_with_capacity)
+
+ worker.perform
+ end
+
+ it 'does not enqueue projects to be processed' do
+ worker.perform
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.lpop(described_class::QUEUE_KEY)).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 10e9823f308..192e51e8a47 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -482,7 +482,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'BulkImports::RelationBatchExportWorker' => 6,
'BulkImports::RelationExportWorker' => 6,
'Ci::Runners::ExportUsageCsvWorker' => 3,
- 'AppSec::ContainerScanning::ScanImageWorker' => 3
+ 'AppSec::ContainerScanning::ScanImageWorker' => 3,
+ 'Ci::DestroyOldPipelinesWorker' => 0
}.merge(extra_retry_exceptions)
end