{
const findPublishMethod = () => wrapper.findComponent(PublishMethod);
const findCreatedDateText = () => wrapper.findByTestId('created-date');
const findTimeAgoTooltip = () => wrapper.findComponent(TimeagoTooltip);
+ const findBulkDeleteAction = () => wrapper.findComponent(GlFormCheckbox);
const mountComponent = ({
packageEntity = packageWithoutTags,
+ selected = false,
provide = defaultProvide,
} = {}) => {
wrapper = shallowMountExtended(PackagesListRow, {
@@ -53,6 +55,7 @@ describe('packages_list_row', () => {
},
propsData: {
packageEntity,
+ selected,
},
directives: {
GlTooltip: createMockDirective(),
@@ -117,14 +120,13 @@ describe('packages_list_row', () => {
});
});
- it('emits the packageToDelete event when the delete button is clicked', async () => {
+ it('emits the delete event when the delete button is clicked', async () => {
mountComponent({ packageEntity: packageWithoutTags });
findDeleteDropdown().vm.$emit('click');
await nextTick();
- expect(wrapper.emitted('packageToDelete')).toHaveLength(1);
- expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
+ expect(wrapper.emitted('delete')).toHaveLength(1);
});
});
@@ -151,6 +153,39 @@ describe('packages_list_row', () => {
});
});
+ describe('left action template', () => {
+ it('does not render checkbox if not permitted', () => {
+ mountComponent({
+ packageEntity: { ...packageWithoutTags, canDestroy: false },
+ });
+
+ expect(findBulkDeleteAction().exists()).toBe(false);
+ });
+
+ it('renders checkbox', () => {
+ mountComponent();
+
+ expect(findBulkDeleteAction().exists()).toBe(true);
+ expect(findBulkDeleteAction().attributes('checked')).toBeUndefined();
+ });
+
+ it('emits select when checked', () => {
+ mountComponent();
+
+ findBulkDeleteAction().vm.$emit('change');
+
+ expect(wrapper.emitted('select')).toHaveLength(1);
+ });
+
+ it('renders checkbox in selected state if selected', () => {
+ mountComponent({
+ selected: true,
+ });
+
+ expect(findBulkDeleteAction().attributes('checked')).toBe('true');
+ });
+ });
+
describe('secondary left info', () => {
it('has the package version', () => {
mountComponent();
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
index c5b6b7da65b..7cc5bea0f7a 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
@@ -99,10 +99,10 @@ describe('packages_list', () => {
it('shows the registry list with the right props', () => {
expect(findRegistryList().props()).toMatchObject({
+ title: '2 packages',
items: defaultProps.list,
pagination: defaultProps.pageInfo,
isLoading: false,
- hiddenDelete: true,
});
});
@@ -128,7 +128,7 @@ describe('packages_list', () => {
describe('when the user can destroy the package', () => {
beforeEach(async () => {
mountComponent();
- await findPackagesListRow().vm.$emit('packageToDelete', firstPackage);
+ await findPackagesListRow().vm.$emit('delete', firstPackage);
});
it('passes itemToBeDeleted to the modal', () => {
@@ -148,6 +148,27 @@ describe('packages_list', () => {
});
});
+ describe('when the user can bulk destroy packages', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('passes itemToBeDeleted to the modal when there is only one package', async () => {
+ await findRegistryList().vm.$emit('delete', [firstPackage]);
+
+ expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual(firstPackage);
+ expect(wrapper.emitted('delete')).toBeUndefined();
+ });
+
+ it('emits delete when there is more than one package', () => {
+ const items = [firstPackage, secondPackage];
+ findRegistryList().vm.$emit('delete', items);
+
+ expect(wrapper.emitted('delete')).toHaveLength(1);
+ expect(wrapper.emitted('delete')[0]).toEqual([items]);
+ });
+ });
+
describe('when an error package is present', () => {
beforeEach(() => {
mountComponent({ list: [firstPackage, errorPackage] });
@@ -210,7 +231,7 @@ describe('packages_list', () => {
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
mountComponent();
- findPackagesListRow().vm.$emit('packageToDelete', firstPackage);
+ findPackagesListRow().vm.$emit('delete', firstPackage);
return nextTick();
});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index f247c83c85f..f36c5923532 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -294,6 +294,33 @@ export const packageDestroyMutation = () => ({
},
});
+export const packagesDestroyMutation = () => ({
+ data: {
+ destroyPackages: {
+ errors: [],
+ },
+ },
+});
+
+export const packagesDestroyMutationError = () => ({
+ data: {
+ destroyPackages: null,
+ },
+ errors: [
+ {
+ message:
+ "The resource that you are attempting to access does not exist or you don't have permission to perform this action",
+ locations: [
+ {
+ line: 2,
+ column: 3,
+ },
+ ],
+ path: ['destroyPackages'],
+ },
+ ],
+});
+
export const packageDestroyMutationError = () => ({
data: {
destroyPackage: null,
@@ -320,6 +347,7 @@ export const packageDestroyFilesMutation = () => ({
},
},
});
+
export const packageDestroyFilesMutationError = () => ({
data: {
destroyPackageFiles: null,
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap
index 7759c366796..c2fecf87428 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap
@@ -2,6 +2,8 @@
exports[`PackagesListApp renders 1`] = `
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
index 942eb0b3980..abdb875e839 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
@@ -1,4 +1,4 @@
-import { GlBanner, GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlAlert, GlBanner, GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -6,12 +6,13 @@ import * as utils from '~/lib/utils/common_utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { stubComponent } from 'helpers/stub_component';
import ListPage from '~/packages_and_registries/package_registry/pages/list.vue';
import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue';
import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
-
+import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue';
import {
PROJECT_RESOURCE_TYPE,
GROUP_RESOURCE_TYPE,
@@ -19,11 +20,19 @@ import {
HIDE_PACKAGE_MIGRATION_SURVEY_COOKIE,
EMPTY_LIST_HELP_URL,
PACKAGE_HELP_URL,
+ DELETE_PACKAGES_ERROR_MESSAGE,
+ DELETE_PACKAGES_SUCCESS_MESSAGE,
} from '~/packages_and_registries/package_registry/constants';
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
-
-import { packagesListQuery, packageData, pagination } from '../mock_data';
+import destroyPackagesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_packages.mutation.graphql';
+import {
+ packagesListQuery,
+ packageData,
+ pagination,
+ packagesDestroyMutation,
+ packagesDestroyMutationError,
+} from '../mock_data';
jest.mock('~/flash');
@@ -49,20 +58,26 @@ describe('PackagesListApp', () => {
filters: { packageName: 'foo', packageType: 'CONAN' },
};
+ const findAlert = () => wrapper.findComponent(GlAlert);
const findBanner = () => wrapper.findComponent(GlBanner);
const findPackageTitle = () => wrapper.findComponent(PackageTitle);
const findSearch = () => wrapper.findComponent(PackageSearch);
const findListComponent = () => wrapper.findComponent(PackageList);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findDeletePackage = () => wrapper.findComponent(DeletePackage);
+ const findDeletePackagesModal = () => wrapper.findComponent(DeleteModal);
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(packagesListQuery()),
+ mutationResolver,
provide = defaultProvide,
} = {}) => {
Vue.use(VueApollo);
- const requestHandlers = [[getPackagesQuery, resolver]];
+ const requestHandlers = [
+ [getPackagesQuery, resolver],
+ [destroyPackagesMutation, mutationResolver],
+ ];
apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMountExtended(ListPage, {
@@ -76,6 +91,11 @@ describe('PackagesListApp', () => {
GlLink,
PackageList,
DeletePackage,
+ DeleteModal: stubComponent(DeleteModal, {
+ methods: {
+ show: jest.fn(),
+ },
+ }),
},
});
};
@@ -348,4 +368,62 @@ describe('PackagesListApp', () => {
expect(findListComponent().props('isLoading')).toBe(false);
});
});
+
+ describe('bulk delete package', () => {
+ const items = [{ id: '1' }, { id: '2' }];
+
+ it('deletePackage is bound to package-list package:delete event', async () => {
+ mountComponent();
+
+ await waitForFirstRequest();
+
+ findListComponent().vm.$emit('delete', [{ id: '1' }, { id: '2' }]);
+
+ await waitForPromises();
+
+ expect(findDeletePackagesModal().props('itemsToBeDeleted')).toEqual(items);
+ });
+
+ it('calls mutation with the right values and shows success alert', async () => {
+ const mutationResolver = jest.fn().mockResolvedValue(packagesDestroyMutation());
+ mountComponent({
+ mutationResolver,
+ });
+
+ await waitForFirstRequest();
+
+ findListComponent().vm.$emit('delete', items);
+
+ findDeletePackagesModal().vm.$emit('confirm');
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ ids: items.map((item) => item.id),
+ });
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().props('variant')).toEqual('success');
+ expect(findAlert().text()).toMatchInterpolatedText(DELETE_PACKAGES_SUCCESS_MESSAGE);
+ });
+
+ it('on error shows danger alert', async () => {
+ const mutationResolver = jest.fn().mockResolvedValue(packagesDestroyMutationError());
+ mountComponent({
+ mutationResolver,
+ });
+
+ await waitForFirstRequest();
+
+ findListComponent().vm.$emit('delete', items);
+
+ findDeletePackagesModal().vm.$emit('confirm');
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().props('variant')).toEqual('danger');
+ expect(findAlert().text()).toMatchInterpolatedText(DELETE_PACKAGES_ERROR_MESSAGE);
+ });
+ });
});
diff --git a/spec/graphql/mutations/commits/create_spec.rb b/spec/graphql/mutations/commits/create_spec.rb
index fd0c2c46b2e..2c452410cca 100644
--- a/spec/graphql/mutations/commits/create_spec.rb
+++ b/spec/graphql/mutations/commits/create_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe Mutations::Commits::Create do
it 'returns errors' do
expect(mutated_commit).to be_nil
- expect(subject[:errors].to_s).to match(/3:UserCommitFiles: empty CommitMessage/)
+ expect(subject[:errors].to_s).to match(/empty CommitMessage/)
end
end
diff --git a/spec/initializers/memory_watchdog_spec.rb b/spec/initializers/memory_watchdog_spec.rb
index 5c3d020016a..92834c889c2 100644
--- a/spec/initializers/memory_watchdog_spec.rb
+++ b/spec/initializers/memory_watchdog_spec.rb
@@ -3,6 +3,24 @@
require 'fast_spec_helper'
RSpec.describe 'memory watchdog' do
+ shared_examples 'starts configured watchdog' do |configure_monitor_method|
+ shared_examples 'configures and starts watchdog' do
+ it "correctly configures and starts watchdog", :aggregate_failures do
+ expect(Gitlab::Memory::Watchdog::Configurator).to receive(configure_monitor_method)
+
+ expect(Gitlab::Memory::Watchdog).to receive(:new).and_return(watchdog)
+ expect(Gitlab::BackgroundTask).to receive(:new).with(watchdog).and_return(background_task)
+ expect(background_task).to receive(:start)
+ expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_yield
+
+ run_initializer
+ end
+ end
+ end
+
+ let(:watchdog) { instance_double(Gitlab::Memory::Watchdog) }
+ let(:background_task) { instance_double(Gitlab::BackgroundTask) }
+
subject(:run_initializer) do
load rails_root_join('config/initializers/memory_watchdog.rb')
end
@@ -15,10 +33,6 @@ RSpec.describe 'memory watchdog' do
end
context 'when runtime is an application' do
- let(:watchdog) { instance_double(Gitlab::Memory::Watchdog) }
- let(:background_task) { instance_double(Gitlab::BackgroundTask) }
- let(:logger) { Gitlab::AppLogger }
-
before do
allow(Gitlab::Runtime).to receive(:application?).and_return(true)
end
@@ -29,21 +43,6 @@ RSpec.describe 'memory watchdog' do
run_initializer
end
- shared_examples 'starts configured watchdog' do |configure_monitor_method|
- shared_examples 'configures and starts watchdog' do
- it "correctly configures and starts watchdog", :aggregate_failures do
- expect(Gitlab::Memory::Watchdog::Configurator).to receive(configure_monitor_method)
-
- expect(Gitlab::Memory::Watchdog).to receive(:new).and_return(watchdog)
- expect(Gitlab::BackgroundTask).to receive(:new).with(watchdog).and_return(background_task)
- expect(background_task).to receive(:start)
- expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_yield
-
- run_initializer
- end
- end
- end
-
context 'when puma' do
before do
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
@@ -92,10 +91,24 @@ RSpec.describe 'memory watchdog' do
allow(Gitlab::Runtime).to receive(:application?).and_return(true)
end
- it 'does not register life-cycle hook' do
- expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+ context 'when puma' do
+ before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ end
- run_initializer
+ it_behaves_like 'starts configured watchdog', :configure_for_puma
+ end
+
+ context 'when sidekiq' do
+ before do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ end
+
+ it 'does not register life-cycle hook' do
+ expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+
+ run_initializer
+ end
end
end
end
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
new file mode 100644
index 00000000000..cb13a711857
--- /dev/null
+++ b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'puma_worker_killer'
+
+RSpec.describe Gitlab::Cluster::PumaWorkerKillerInitializer do
+ describe '.start' do
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is false' do
+ before do
+ stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', 'false')
+ end
+
+ it 'configures and start PumaWorkerKiller' do
+ expect(PumaWorkerKiller).to receive(:config)
+ expect(PumaWorkerKiller).to receive(:start)
+
+ described_class.start({})
+ end
+ end
+
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is not set' do
+ it 'configures and start PumaWorkerKiller' do
+ expect(PumaWorkerKiller).not_to receive(:config)
+ expect(PumaWorkerKiller).not_to receive(:start)
+
+ described_class.start({})
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
index cd49544125d..1eb077fe6ca 100644
--- a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
@@ -60,6 +60,7 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
conn = model.connection
expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :read_write_retry))
conn.transaction do
expect(conn).to be_transaction_open
@@ -78,6 +79,8 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .to receive(:warn).with(hash_including(event: :read_write_retry))
expect(conn).not_to be_transaction_open
@@ -109,6 +112,8 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
it 'retries when not in a transaction' do
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .to receive(:warn).with(hash_including(event: :read_write_retry))
expect { execute(model.connection) }.not_to raise_error
end
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
index 4476b4123ee..6a409762599 100644
--- a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
@@ -10,7 +10,9 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issuable) { create(:issue, project: project) }
- let!(:label) { create(:label, project: project) }
+ let(:label) { create(:label, project: project) }
+ let(:label_title) { label.title }
+ let(:label_id) { label.id }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
@@ -18,7 +20,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
'actor' => { 'id' => user.id, 'login' => user.username },
'event' => event_type,
'commit_id' => nil,
- 'label_title' => label.title,
+ 'label_title' => label_title,
'created_at' => '2022-04-26 18:30:53 UTC',
'issue' => { 'number' => issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
)
@@ -27,7 +29,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
let(:event_attrs) do
{
user_id: user.id,
- label_id: label.id,
+ label_id: label_id,
created_at: issue_event.created_at
}.stringify_keys
end
@@ -42,7 +44,6 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
end
before do
- allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
allow(finder).to receive(:database_id).and_return(issuable.id)
end
@@ -52,16 +53,35 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
end
context 'with Issue' do
- context 'when importing a labeled event' do
- let(:event_type) { 'labeled' }
- let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
+ context 'when importing event with associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ end
- it_behaves_like 'new event'
+ context 'when importing a labeled event' do
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing an unlabeled event' do
+ let(:event_type) { 'unlabeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
end
- context 'when importing an unlabeled event' do
- let(:event_type) { 'unlabeled' }
- let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'remove') }
+ context 'when importing event without associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(nil)
+ end
+
+ let(:label_title) { 'deleted_label' }
+ let(:label_id) { nil }
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
it_behaves_like 'new event'
end
@@ -70,18 +90,37 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
context 'with MergeRequest' do
let(:issuable) { create(:merge_request, source_project: project, target_project: project) }
- context 'when importing a labeled event' do
+ context 'when importing event with associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ end
+
+ context 'when importing a labeled event' do
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing an unlabeled event' do
+ let(:event_type) { 'unlabeled' }
+ let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
+ end
+
+ context 'when importing event without associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(nil)
+ end
+
+ let(:label_title) { 'deleted_label' }
+ let(:label_id) { nil }
let(:event_type) { 'labeled' }
let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'add') }
it_behaves_like 'new event'
end
-
- context 'when importing an unlabeled event' do
- let(:event_type) { 'unlabeled' }
- let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'remove') }
-
- it_behaves_like 'new event'
- end
end
end
diff --git a/spec/lib/gitlab/memory/watchdog/configurator_spec.rb b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb
index 2c5fae5736d..e6f2d57e9e6 100644
--- a/spec/lib/gitlab/memory/watchdog/configurator_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb
@@ -2,6 +2,7 @@
require 'fast_spec_helper'
require 'prometheus/client'
+require 'sidekiq'
require_dependency 'gitlab/cluster/lifecycle_events'
RSpec.describe Gitlab::Memory::Watchdog::Configurator do
@@ -184,4 +185,15 @@ RSpec.describe Gitlab::Memory::Watchdog::Configurator do
end
end
# rubocop: enable RSpec/VerifiedDoubles
+
+ describe '.configure_for_sidekiq' do
+ let(:logger) { ::Sidekiq.logger }
+
+ subject(:configurator) { described_class.configure_for_sidekiq }
+
+ it_behaves_like 'as configurator',
+ Gitlab::Memory::Watchdog::TermProcessHandler,
+ 'SIDEKIQ_MEMORY_KILLER_CHECK_INTERVAL',
+ 3
+ end
end
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
index 072f5496bca..65b993cca7f 100644
--- a/spec/models/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe 'factories', :saas do
[:ci_job_artifact, :raw],
[:ci_job_artifact, :gzip],
[:ci_job_artifact, :correct_checksum],
+ [:dependency_proxy_blob, :remote_store],
[:environment, :non_playable],
[:composer_cache_file, :object_storage],
[:debian_project_component_file, :object_storage],
@@ -50,6 +51,7 @@ RSpec.describe 'factories', :saas do
[:ee_ci_job_artifact, :v2],
[:ee_ci_job_artifact, :v2_1],
[:geo_ci_secure_file_state, any],
+ [:geo_dependency_proxy_blob_state, any],
[:geo_event_log, :geo_event],
[:geo_job_artifact_state, any],
[:geo_lfs_object_state, any],
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 5edea13afa4..26def474b88 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -399,159 +399,7 @@ RSpec.describe SearchService do
end
end
- context 'redacting search results' do
- let(:search) { 'anything' }
-
- subject(:result) { search_service.search_objects }
-
- shared_examples "redaction limits N+1 queries" do |limit:|
- it 'does not exceed the query limit' do
- # issuing the query to remove the data loading call
- unredacted_results.to_a
-
- # only the calls from the redaction are left
- query = ActiveRecord::QueryRecorder.new { result }
-
- # these are the project authorization calls, which are not preloaded
- expect(query.count).to be <= limit
- end
- end
-
- def found_blob(project)
- Gitlab::Search::FoundBlob.new(project: project)
- end
-
- def found_wiki_page(project)
- Gitlab::Search::FoundWikiPage.new(found_blob(project))
- end
-
- before do
- expect(search_service)
- .to receive(:search_results)
- .and_return(double('search results', objects: unredacted_results))
- end
-
- def ar_relation(klass, *objects)
- klass.id_in(objects.map(&:id))
- end
-
- def kaminari_array(*objects)
- Kaminari.paginate_array(objects).page(1).per(20)
- end
-
- context 'issues' do
- let(:readable) { create(:issue, project: accessible_project) }
- let(:unreadable) { create(:issue, project: inaccessible_project) }
- let(:unredacted_results) { ar_relation(Issue, readable, unreadable) }
- let(:scope) { 'issues' }
-
- it 'redacts the inaccessible issue' do
- expect(result).to contain_exactly(readable)
- end
- end
-
- context 'notes' do
- let(:readable) { create(:note_on_commit, project: accessible_project) }
- let(:unreadable) { create(:note_on_commit, project: inaccessible_project) }
- let(:unredacted_results) { ar_relation(Note, readable, unreadable) }
- let(:scope) { 'notes' }
-
- it 'redacts the inaccessible note' do
- expect(result).to contain_exactly(readable)
- end
- end
-
- context 'merge_requests' do
- let(:readable) { create(:merge_request, source_project: accessible_project, author: user) }
- let(:unreadable) { create(:merge_request, source_project: inaccessible_project) }
- let(:unredacted_results) { ar_relation(MergeRequest, readable, unreadable) }
- let(:scope) { 'merge_requests' }
-
- it 'redacts the inaccessible merge request' do
- expect(result).to contain_exactly(readable)
- end
-
- context 'with :with_api_entity_associations' do
- let(:unredacted_results) { ar_relation(MergeRequest.with_api_entity_associations, readable, unreadable) }
-
- it_behaves_like "redaction limits N+1 queries", limit: 8
- end
- end
-
- context 'project repository blobs' do
- let(:readable) { found_blob(accessible_project) }
- let(:unreadable) { found_blob(inaccessible_project) }
- let(:unredacted_results) { kaminari_array(readable, unreadable) }
- let(:scope) { 'blobs' }
-
- it 'redacts the inaccessible blob' do
- expect(result).to contain_exactly(readable)
- end
- end
-
- context 'project wiki blobs' do
- let(:readable) { found_wiki_page(accessible_project) }
- let(:unreadable) { found_wiki_page(inaccessible_project) }
- let(:unredacted_results) { kaminari_array(readable, unreadable) }
- let(:scope) { 'wiki_blobs' }
-
- it 'redacts the inaccessible blob' do
- expect(result).to contain_exactly(readable)
- end
- end
-
- context 'project snippets' do
- let(:readable) { create(:project_snippet, project: accessible_project) }
- let(:unreadable) { create(:project_snippet, project: inaccessible_project) }
- let(:unredacted_results) { ar_relation(ProjectSnippet, readable, unreadable) }
- let(:scope) { 'snippet_titles' }
-
- it 'redacts the inaccessible snippet' do
- expect(result).to contain_exactly(readable)
- end
-
- context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 14
- end
- end
-
- context 'personal snippets' do
- let(:readable) { create(:personal_snippet, :private, author: user) }
- let(:unreadable) { create(:personal_snippet, :private) }
- let(:unredacted_results) { ar_relation(PersonalSnippet, readable, unreadable) }
- let(:scope) { 'snippet_titles' }
-
- it 'redacts the inaccessible snippet' do
- expect(result).to contain_exactly(readable)
- end
-
- context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 4
- end
- end
-
- context 'commits' do
- let(:readable) { accessible_project.commit }
- let(:unreadable) { inaccessible_project.commit }
- let(:unredacted_results) { kaminari_array(readable, unreadable) }
- let(:scope) { 'commits' }
-
- it 'redacts the inaccessible commit' do
- expect(result).to contain_exactly(readable)
- end
- end
-
- context 'users' do
- let(:other_user) { create(:user) }
- let(:unredacted_results) { ar_relation(User, user, other_user) }
- let(:scope) { 'users' }
-
- it 'passes the users through' do
- # Users are always visible to everyone
- expect(result).to contain_exactly(user, other_user)
- end
- end
- end
+ it_behaves_like 'a redacted search results'
end
describe '#valid_request?' do
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index f41457d2420..24c768258a1 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -38,6 +38,10 @@ module StubConfiguration
allow(Rails.application.routes).to receive(:default_url_options).and_return(url_options)
end
+ def stub_dependency_proxy_setting(messages)
+ allow(Gitlab.config.dependency_proxy).to receive_messages(to_settings(messages))
+ end
+
def stub_gravatar_setting(messages)
allow(Gitlab.config.gravatar).to receive_messages(to_settings(messages))
end
diff --git a/spec/support/shared_examples/features/search/redacted_search_results_shared_examples.rb b/spec/support/shared_examples/features/search/redacted_search_results_shared_examples.rb
new file mode 100644
index 00000000000..4d242d0e719
--- /dev/null
+++ b/spec/support/shared_examples/features/search/redacted_search_results_shared_examples.rb
@@ -0,0 +1,304 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a redacted search results' do
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:accessible_group) { create(:group, :private) }
+ let_it_be(:accessible_project) { create(:project, :repository, :private, name: 'accessible_project') }
+
+ let_it_be(:group_member) { create(:group_member, group: accessible_group, user: user) }
+
+ let_it_be(:inaccessible_group) { create(:group, :private) }
+ let_it_be(:inaccessible_project) { create(:project, :repository, :private, name: 'inaccessible_project') }
+
+ let(:search) { 'anything' }
+
+ subject(:result) { search_service.search_objects }
+
+ def found_blob(project)
+ Gitlab::Search::FoundBlob.new(project: project)
+ end
+
+ def found_wiki_page(project)
+ Gitlab::Search::FoundWikiPage.new(found_blob(project))
+ end
+
+ def ar_relation(klass, *objects)
+ klass.id_in(objects.map(&:id))
+ end
+
+ def kaminari_array(*objects)
+ Kaminari.paginate_array(objects).page(1).per(20)
+ end
+
+ before do
+ accessible_project.add_maintainer(user)
+
+ allow(search_service)
+ .to receive_message_chain(:search_results, :objects)
+ .and_return(unredacted_results)
+ end
+
+ context 'for issues' do
+ let(:readable) { create(:issue, project: accessible_project) }
+ let(:unreadable) { create(:issue, project: inaccessible_project) }
+ let(:unredacted_results) { ar_relation(Issue, readable, unreadable) }
+ let(:scope) { 'issues' }
+
+ it 'redacts the inaccessible issue' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'Issue', id: unreadable.id, ability: :read_issue }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+ end
+
+ context 'for notes' do
+ let(:readable_merge_request) do
+ create(:merge_request_with_diffs, target_project: accessible_project, source_project: accessible_project)
+ end
+
+ let(:readable_note_on_commit) { create(:note_on_commit, project: accessible_project) }
+ let(:readable_diff_note) { create(:diff_note_on_commit, project: accessible_project) }
+ let(:readable_note_on_mr) do
+ create(:discussion_note_on_merge_request, noteable: readable_merge_request, project: accessible_project)
+ end
+
+ let(:readable_diff_note_on_mr) do
+ create(:diff_note_on_merge_request, noteable: readable_merge_request, project: accessible_project)
+ end
+
+ let(:readable_note_on_project_snippet) do
+ create(:note_on_project_snippet, noteable: readable_merge_request, project: accessible_project)
+ end
+
+ let(:unreadable_merge_request) do
+ create(:merge_request_with_diffs, target_project: inaccessible_project, source_project: inaccessible_project)
+ end
+
+ let(:unreadable_note_on_commit) { create(:note_on_commit, project: inaccessible_project) }
+ let(:unreadable_diff_note) { create(:diff_note_on_commit, project: inaccessible_project) }
+ let(:unreadable_note_on_mr) do
+ create(:discussion_note_on_merge_request, noteable: unreadable_merge_request, project: inaccessible_project)
+ end
+
+ let(:unreadable_note_on_project_snippet) do
+ create(:note_on_project_snippet, noteable: unreadable_merge_request, project: inaccessible_project)
+ end
+
+ let(:unredacted_results) do
+ ar_relation(Note,
+ readable_note_on_commit,
+ readable_diff_note,
+ readable_note_on_mr,
+ readable_diff_note_on_mr,
+ readable_note_on_project_snippet,
+ unreadable_note_on_commit,
+ unreadable_diff_note,
+ unreadable_note_on_mr,
+ unreadable_note_on_project_snippet)
+ end
+
+ let(:scope) { 'notes' }
+
+ it 'redacts the inaccessible notes' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'Note', id: unreadable_note_on_commit.id, ability: :read_note },
+ { class_name: 'DiffNote', id: unreadable_diff_note.id, ability: :read_note },
+ { class_name: 'DiscussionNote', id: unreadable_note_on_mr.id, ability: :read_note },
+ { class_name: 'Note', id: unreadable_note_on_project_snippet.id, ability: :read_note }
+ ])))
+
+ expect(result).to contain_exactly(readable_note_on_commit,
+ readable_diff_note,
+ readable_note_on_mr,
+ readable_diff_note_on_mr,
+ readable_note_on_project_snippet)
+ end
+ end
+
+ context 'for merge_requests' do
+ let(:readable) { create(:merge_request, source_project: accessible_project) }
+ let(:unreadable) { create(:merge_request, source_project: inaccessible_project) }
+ let(:unredacted_results) { ar_relation(MergeRequest, readable, unreadable) }
+ let(:scope) { 'merge_requests' }
+
+ it 'redacts the inaccessible merge request' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'MergeRequest', id: unreadable.id, ability: :read_merge_request }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+
+ context 'with :with_api_entity_associations' do
+ let(:unredacted_results) { ar_relation(MergeRequest.with_api_entity_associations, readable, unreadable) }
+
+ it_behaves_like "redaction limits N+1 queries", limit: 8
+ end
+ end
+
+ context 'for blobs' do
+ let(:readable) { found_blob(accessible_project) }
+ let(:unreadable) { found_blob(inaccessible_project) }
+ let(:unredacted_results) { kaminari_array(readable, unreadable) }
+ let(:scope) { 'blobs' }
+
+ it 'redacts the inaccessible blob' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'Gitlab::Search::FoundBlob', id: unreadable.id, ability: :read_blob }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+ end
+
+ context 'for wiki blobs' do
+ let(:readable) { found_wiki_page(accessible_project) }
+ let(:unreadable) { found_wiki_page(inaccessible_project) }
+ let(:unredacted_results) { kaminari_array(readable, unreadable) }
+ let(:scope) { 'wiki_blobs' }
+
+ it 'redacts the inaccessible blob' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'Gitlab::Search::FoundWikiPage', id: unreadable.id, ability: :read_wiki_page }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+ end
+
+ context 'for project snippets' do
+ let(:readable) { create(:project_snippet, project: accessible_project) }
+ let(:unreadable) { create(:project_snippet, project: inaccessible_project) }
+ let(:unredacted_results) { ar_relation(ProjectSnippet, readable, unreadable) }
+ let(:scope) { 'snippet_titles' }
+
+ it 'redacts the inaccessible snippet' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'ProjectSnippet', id: unreadable.id, ability: :read_snippet }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+
+ context 'with :with_api_entity_associations' do
+ it_behaves_like "redaction limits N+1 queries", limit: 14
+ end
+ end
+
+ context 'for personal snippets' do
+ let(:readable) { create(:personal_snippet, :private, author: user) }
+ let(:unreadable) { create(:personal_snippet, :private) }
+ let(:unredacted_results) { ar_relation(PersonalSnippet, readable, unreadable) }
+ let(:scope) { 'snippet_titles' }
+
+ it 'redacts the inaccessible snippet' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'PersonalSnippet', id: unreadable.id, ability: :read_snippet }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+
+ context 'with :with_api_entity_associations' do
+ it_behaves_like "redaction limits N+1 queries", limit: 4
+ end
+ end
+
+ context 'for commits' do
+ let(:readable) { accessible_project.commit }
+ let(:unreadable) { inaccessible_project.commit }
+ let(:unredacted_results) { kaminari_array(readable, unreadable) }
+ let(:scope) { 'commits' }
+
+ it 'redacts the inaccessible commit' do
+ expect(search_service.send(:logger))
+ .to receive(:error)
+ .with(hash_including(
+ message: "redacted_search_results",
+ current_user_id: user.id,
+ query: search,
+ filtered: array_including(
+ [
+ { class_name: 'Commit', id: unreadable.id, ability: :read_commit }
+ ])))
+
+ expect(result).to contain_exactly(readable)
+ end
+ end
+
+ context 'for users' do
+ let(:other_user) { create(:user) }
+ let(:unredacted_results) { ar_relation(User, user, other_user) }
+ let(:scope) { 'users' }
+
+ it 'passes the users through' do
+ # Users are always visible to everyone
+ expect(result).to contain_exactly(user, other_user)
+ end
+ end
+end
+
+RSpec.shared_examples "redaction limits N+1 queries" do |limit:|
+ it 'does not exceed the query limit' do
+ # issuing the query to remove the data loading call
+ unredacted_results.to_a
+
+ # only the calls from the redaction are left
+ query = ActiveRecord::QueryRecorder.new { result }
+
+ # these are the project authorization calls, which are not preloaded
+ expect(query.count).to be <= limit
+ end
+end
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index 565dadd64fe..5f9c6c65a08 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -11,10 +11,10 @@ RSpec.describe 'search/show' do
stub_template "search/_results.html.haml" => 'Results Partial'
end
- context 'feature flag enabled' do
+ context 'search_page_vertical_nav feature flag enabled' do
before do
- allow(self).to receive(:current_user).and_return(user)
- @search_term = search_term
+ allow(view).to receive(:current_user) { user }
+ assign(:search_term, search_term)
render
end
@@ -29,11 +29,11 @@ RSpec.describe 'search/show' do
end
end
- context 'feature flag disabled' do
+ context 'search_page_vertical_nav feature flag disabled' do
before do
stub_feature_flags(search_page_vertical_nav: false)
- @search_term = search_term
+ assign(:search_term, search_term)
render
end
diff --git a/yarn.lock b/yarn.lock
index 27a3f834065..3e1e1b88281 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -4992,10 +4992,10 @@ dompurify@2.3.8:
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.8.tgz#224fe9ae57d7ebd9a1ae1ac18c1c1ca3f532226f"
integrity sha512-eVhaWoVibIzqdGYjwsBWodIQIaXFSB+cKDf4cfxLMsK0xiud6SE+/WCVx/Xw/UwQsa4cS3T2eITcdtmTg2UKcw==
-dompurify@^2.4.0:
- version "2.4.0"
- resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.4.0.tgz#c9c88390f024c2823332615c9e20a453cf3825dd"
- integrity sha512-Be9tbQMZds4a3C6xTmz68NlMfeONA//4dOavl/1rNw50E+/QO0KVpbcU0PcaW0nsQxurXls9ZocqFxk8R2mWEA==
+dompurify@^2.4.0, dompurify@^2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.4.1.tgz#f9cb1a275fde9af6f2d0a2644ef648dd6847b631"
+ integrity sha512-ewwFzHzrrneRjxzmK6oVz/rZn9VWspGFRDb4/rRtIsM1n36t9AKma/ye8syCpcw+XJ25kOK/hOG7t1j2I2yBqA==
domutils@^2.5.2, domutils@^2.6.0:
version "2.6.0"