diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 35064e89941..ef28900bc08 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -220,7 +220,7 @@ variables:
DOCS_REVIEW_APPS_DOMAIN: "docs.gitlab-review.app"
DOCS_GITLAB_REPO_SUFFIX: "ee"
- REVIEW_APPS_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/debian-bookworm-ruby-3.0:gcloud-383-kubectl-1.26-helm-3.9"
+ REVIEW_APPS_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/debian-bookworm-ruby-3.0:gcloud-383-kubectl-1.27-helm-3.9"
REVIEW_APPS_DOMAIN: "gitlab-review.app"
REVIEW_APPS_GCP_PROJECT: "gitlab-review-apps"
REVIEW_APPS_GCP_REGION: "us-central1"
diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS
index 0a9f5bfad43..f55066fcddf 100644
--- a/.gitlab/CODEOWNERS
+++ b/.gitlab/CODEOWNERS
@@ -7,7 +7,7 @@
.gitlab/CODEOWNERS @gitlab-org/development-leaders @gitlab-org/tw-leadership
## Allows release tooling and Gitaly team members to update the Gitaly Version
-GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend @gitlab-org/delivery @gl-gitaly
+/GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend @gitlab-org/delivery @gl-gitaly
## Files that are excluded from required approval
## These rules override the * rule above, so that changes to docs and templates
diff --git a/.gitlab/ci/test-on-cng/main.gitlab-ci.yml b/.gitlab/ci/test-on-cng/main.gitlab-ci.yml
index a830dcb0343..0c07556322c 100644
--- a/.gitlab/ci/test-on-cng/main.gitlab-ci.yml
+++ b/.gitlab/ci/test-on-cng/main.gitlab-ci.yml
@@ -50,12 +50,8 @@ workflow:
- export QA_GITLAB_URL="http://gitlab.${GITLAB_DOMAIN}"
- source scripts/utils.sh
- source scripts/rspec_helpers.sh
- - source scripts/qa/cng_deploy/cng-kind.sh
- cd qa && bundle install
- - bundle exec cng create cluster --ci
- # Currently this only performs pre-deploy setup
- - bundle exec cng create deployment --ci
- - deploy ${GITLAB_DOMAIN}
+ - bundle exec cng create deployment --gitlab-domain "${GITLAB_DOMAIN}" --ci --with-cluster ${EXTRA_DEPLOY_VALUES}
script:
- export QA_COMMAND="bundle exec bin/qa ${QA_SCENARIO:=Test::Instance::All} $QA_GITLAB_URL -- --force-color --order random --format documentation"
- echo "Running - '$QA_COMMAND'"
@@ -120,7 +116,11 @@ cng-qa-min-redis-version:
extends: .cng-base
variables:
QA_SCENARIO: Test::Instance::Smoke
- REDIS_VERSION_TYPE: MIN_REDIS_VERSION
+ before_script:
+ - |
+ redis_version=$(awk -F "=" "/MIN_REDIS_VERSION =/ {print \$2}" $CI_PROJECT_DIR/lib/system_check/app/redis_version_check.rb | sed "s/['\" ]//g")
+ export EXTRA_DEPLOY_VALUES="--set redis.image.tag=${redis_version%.*}"
+ - !reference [.cng-base, before_script]
after_script:
- !reference [.set-suite-status, after_script]
- !reference [.cng-base, after_script]
diff --git a/.rubocop_todo/layout/first_hash_element_indentation.yml b/.rubocop_todo/layout/first_hash_element_indentation.yml
index 02cb82c9f22..5e3cf9681ee 100644
--- a/.rubocop_todo/layout/first_hash_element_indentation.yml
+++ b/.rubocop_todo/layout/first_hash_element_indentation.yml
@@ -2,17 +2,6 @@
# Cop supports --autocorrect.
Layout/FirstHashElementIndentation:
Exclude:
- - 'ee/app/helpers/ee/geo_helper.rb'
- - 'ee/app/helpers/ee/groups/group_members_helper.rb'
- - 'ee/app/models/ee/list.rb'
- - 'ee/app/services/app_sec/dast/profiles/update_service.rb'
- - 'ee/app/services/elastic/cluster_reindexing_service.rb'
- - 'ee/app/services/iterations/create_service.rb'
- - 'ee/app/services/resource_events/change_iteration_service.rb'
- - 'ee/app/services/security/token_revocation_service.rb'
- - 'ee/app/services/timebox_report_service.rb'
- - 'ee/lib/ee/gitlab/ci/parsers.rb'
- - 'ee/lib/ee/gitlab/usage_data.rb'
- 'ee/spec/factories/dependencies.rb'
- 'ee/spec/finders/epics_finder_spec.rb'
- 'ee/spec/finders/namespaces/free_user_cap/users_finder_spec.rb'
diff --git a/.rubocop_todo/layout/space_in_lambda_literal.yml b/.rubocop_todo/layout/space_in_lambda_literal.yml
index fa2afd02f84..88b4419d441 100644
--- a/.rubocop_todo/layout/space_in_lambda_literal.yml
+++ b/.rubocop_todo/layout/space_in_lambda_literal.yml
@@ -2,20 +2,6 @@
# Cop supports --autocorrect.
Layout/SpaceInLambdaLiteral:
Exclude:
- - 'app/serializers/deploy_keys/basic_deploy_key_entity.rb'
- - 'app/serializers/deployment_cluster_entity.rb'
- - 'app/serializers/deployment_entity.rb'
- - 'app/serializers/detailed_status_entity.rb'
- - 'app/serializers/diff_file_base_entity.rb'
- - 'app/serializers/diff_file_entity.rb'
- - 'app/serializers/diffs_entity.rb'
- - 'app/serializers/discussion_entity.rb'
- - 'app/serializers/draft_note_entity.rb'
- - 'app/serializers/environment_entity.rb'
- - 'app/serializers/feature_flag_entity.rb'
- - 'app/serializers/issue_board_entity.rb'
- - 'app/serializers/issue_entity.rb'
- - 'app/serializers/issue_sidebar_basic_entity.rb'
- 'ee/app/serializers/blocking_merge_request_entity.rb'
- 'ee/app/serializers/clusters/environment_entity.rb'
- 'ee/app/serializers/dashboard_operations_project_entity.rb'
diff --git a/app/assets/javascripts/admin/abuse_reports/components/app.vue b/app/assets/javascripts/admin/abuse_reports/components/app.vue
index 521634f7209..a9a6c3a63ad 100644
--- a/app/assets/javascripts/admin/abuse_reports/components/app.vue
+++ b/app/assets/javascripts/admin/abuse_reports/components/app.vue
@@ -40,7 +40,9 @@ export default {
{{ incomingEmail }}
diff --git a/app/assets/javascripts/repository/components/table/parent_row.vue b/app/assets/javascripts/repository/components/table/parent_row.vue
index 0bc22253bd2..82b36ed975b 100644
--- a/app/assets/javascripts/repository/components/table/parent_row.vue
+++ b/app/assets/javascripts/repository/components/table/parent_row.vue
@@ -59,7 +59,7 @@ export default {
v-if="parentPath === loadingPath"
size="sm"
inline
- class="d-inline-block align-text-bottom"
+ class="gl-inline-block align-text-bottom"
/>
Issuable empty state
- `, - }, - stubs: { - IssuableTabs, - }, - }); - -describe('IssuableListRoot', () => { +describe('IssuableListRoot component', () => { + /** @type {import('@vue/test-utils').Wrapper} */ let wrapper; const findAlert = () => wrapper.findComponent(GlAlert); + const findAllIssuableItems = () => wrapper.findAllComponents(IssuableItem); const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar); const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination); const findGlPagination = () => wrapper.findComponent(GlPagination); + const findIssuableGrid = () => wrapper.findComponent(IssuableGrid); const findIssuableItem = () => wrapper.findComponent(IssuableItem); - const findIssuableGrid = () => wrapper.findComponent(issuableGrid); const findIssuableTabs = () => wrapper.findComponent(IssuableTabs); - const findVueDraggable = () => wrapper.findComponent(VueDraggable); const findPageSizeSelector = () => wrapper.findComponent(PageSizeSelector); + const findVueDraggable = () => wrapper.findComponent(VueDraggable); - describe('computed', () => { - beforeEach(() => { - wrapper = createComponent(); - }); - - const mockCheckedIssuables = { - [mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] }, - [mockIssuables[1].iid]: { checked: true, issuable: mockIssuables[1] }, - [mockIssuables[2].iid]: { checked: true, issuable: mockIssuables[2] }, - }; - - const mIssuables = [mockIssuables[0], mockIssuables[1], mockIssuables[2]]; - - describe('skeletonItemCount', () => { - it.each` - totalItems | defaultPageSize | currentPage | returnValue - ${100} | ${20} | ${1} | ${20} - ${105} | ${20} | ${6} | ${5} - ${7} | ${20} | ${1} | ${7} - ${0} | ${20} | ${1} | ${5} - `( - 'returns $returnValue when totalItems is $totalItems, defaultPageSize is $defaultPageSize and currentPage is $currentPage', - async ({ totalItems, defaultPageSize, currentPage, returnValue }) => { - wrapper.setProps({ - totalItems, - defaultPageSize, - currentPage, - }); - - await nextTick(); - - expect(wrapper.vm.skeletonItemCount).toBe(returnValue); - }, - ); - }); - - describe('allIssuablesChecked', () => { - it.each` - checkedIssuables | issuables | specTitle | returnValue - ${mockCheckedIssuables} | ${mIssuables} | ${'same as'} | ${true} - ${{}} | ${mIssuables} | ${'not same as'} | ${false} - `( - 'returns $returnValue when bulkEditIssuables count is $specTitle issuables count', - async ({ checkedIssuables, issuables, returnValue }) => { - wrapper.setProps({ - issuables, - }); - - await nextTick(); - - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - checkedIssuables, - }); - - await nextTick(); - - expect(wrapper.vm.allIssuablesChecked).toBe(returnValue); - }, - ); - }); - - describe('bulkEditIssuables', () => { - it('returns array of issuables which have `checked` set to true within checkedIssuables map', async () => { - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - checkedIssuables: mockCheckedIssuables, - }); - - await nextTick(); - - expect(wrapper.vm.bulkEditIssuables).toHaveLength(mIssuables.length); - }); - }); - }); - - describe('watch', () => { - beforeEach(() => { - wrapper = createComponent(); - }); - - describe('issuables', () => { - it('populates `checkedIssuables` prop with all issuables', async () => { - wrapper.setProps({ - issuables: [mockIssuables[0]], - }); - - await nextTick(); - - expect(Object.keys(wrapper.vm.checkedIssuables)).toHaveLength(1); - expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({ - checked: false, - issuable: mockIssuables[0], - }); - }); - }); - - describe('urlParams', () => { - it('updates window URL reflecting props within `urlParams`', async () => { - const urlParams = { - state: 'closed', - sort: 'updated_asc', - page: 1, - search: 'foo', - }; - - wrapper.setProps({ - urlParams, - }); - - await nextTick(); - - expect(global.window.location.href).toBe( - `${TEST_HOST}/?state=${urlParams.state}&sort=${urlParams.sort}&page=${urlParams.page}&search=${urlParams.search}`, - ); - }); - }); - }); - - describe('methods', () => { - beforeEach(() => { - wrapper = createComponent(); - }); - - describe('issuableId', () => { - it('returns id value from provided issuable object', () => { - expect(wrapper.vm.issuableId({ id: 1 })).toBe(1); - expect(wrapper.vm.issuableId({ iid: 1 })).toBe(1); - expect(wrapper.vm.issuableId({})).toBeDefined(); - }); - }); - - describe('issuableChecked', () => { - it('returns boolean value representing checked status of issuable item', async () => { - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - checkedIssuables: { - [mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] }, - }, - }); - - await nextTick(); - - expect(wrapper.vm.issuableChecked(mockIssuables[0])).toBe(true); - }); - }); - }); - - describe('template', () => { - it('renders component container element with class "issuable-list-container"', () => { - wrapper = createComponent(); - - expect(wrapper.classes()).toContain('issuable-list-container'); - }); - - it('renders issuable-tabs component', () => { - wrapper = createComponent(); - - const tabsEl = findIssuableTabs(); - - expect(tabsEl.exists()).toBe(true); - expect(tabsEl.props()).toMatchObject({ - tabs: wrapper.vm.tabs, - tabCounts: wrapper.vm.tabCounts, - currentTab: wrapper.vm.currentTab, - }); - }); - - it('renders contents for slot "nav-actions" within issuable-tab component', () => { - wrapper = createComponent(); - - const buttonEl = findIssuableTabs().find('button.js-new-issuable'); - - expect(buttonEl.exists()).toBe(true); - expect(buttonEl.text()).toBe('New issuable'); - }); - - it('renders filtered-search-bar component', () => { - wrapper = createComponent(); - - const searchEl = findFilteredSearchBar(); - const { - namespace, - recentSearchesStorageKey, - searchInputPlaceholder, - searchTokens, - sortOptions, - initialFilterValue, - initialSortBy, - } = wrapper.vm; - - expect(searchEl.exists()).toBe(true); - expect(searchEl.props()).toMatchObject({ - namespace, - recentSearchesStorageKey, - searchInputPlaceholder, - tokens: searchTokens, - sortOptions, - initialFilterValue, - initialSortBy, - }); - }); - - it('renders gl-loading-icon when `issuablesLoading` prop is true', () => { - wrapper = createComponent({ props: { issuablesLoading: true } }); - - expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength( - wrapper.vm.skeletonItemCount, - ); - }); - - it('renders issuable-item component for each item within `issuables` array', () => { - wrapper = createComponent(); - - const itemsEl = wrapper.findAllComponents(IssuableItem); - const mockIssuable = mockIssuableListProps.issuables[0]; - - expect(itemsEl).toHaveLength(mockIssuableListProps.issuables.length); - expect(itemsEl.at(0).props()).toMatchObject({ - issuableSymbol: wrapper.vm.issuableSymbol, - issuable: mockIssuable, - }); - }); - - it('renders contents for slot "empty-state" when `issuablesLoading` is false and `issuables` is empty', () => { - wrapper = createComponent({ props: { issuables: [] } }); - - expect(wrapper.find('p.js-issuable-empty-state').exists()).toBe(true); - expect(wrapper.find('p.js-issuable-empty-state').text()).toBe('Issuable empty state'); - }); - - it('renders only gl-pagination when `showPaginationControls` prop is true', () => { - wrapper = createComponent({ - props: { - showPaginationControls: true, - totalItems: 10, - }, - }); - - expect(findGlKeysetPagination().exists()).toBe(false); - expect(findPageSizeSelector().exists()).toBe(false); - expect(findGlPagination().props()).toMatchObject({ - perPage: 20, - value: 1, - prevPage: 0, - nextPage: 2, - totalItems: 10, - align: 'center', - }); - }); - - it('renders only gl-keyset-pagination when `showPaginationControls` and `useKeysetPagination` props are true', () => { - wrapper = createComponent({ - props: { - hasNextPage: true, - hasPreviousPage: true, - showPaginationControls: true, - useKeysetPagination: true, - }, - }); - - expect(findGlPagination().exists()).toBe(false); - expect(findGlKeysetPagination().props()).toMatchObject({ - hasNextPage: true, - hasPreviousPage: true, - }); - }); - - describe('showFilteredSearchFriendlyText prop', () => { - describe.each([true, false])('when %s', (showFilteredSearchFriendlyText) => { - it('passes its value to FilteredSearchBar', () => { - wrapper = createComponent({ props: { showFilteredSearchFriendlyText } }); - - expect(findFilteredSearchBar().props('showFriendlyText')).toBe( - showFilteredSearchFriendlyText, - ); - }); - }); - }); - - describe('alert', () => { - const error = 'oopsie!'; - - it('shows an alert when there is an error', () => { - wrapper = createComponent({ props: { error } }); - - expect(findAlert().text()).toBe(error); - }); - - it('emits "dismiss-alert" event when dismissed', () => { - wrapper = createComponent({ props: { error } }); - - findAlert().vm.$emit('dismiss'); - - expect(wrapper.emitted('dismiss-alert')).toEqual([[]]); - }); - - it('does not render when there is no error', () => { - wrapper = createComponent(); - - expect(findAlert().exists()).toBe(false); - }); - }); - }); - - describe('events', () => { - const data = { - checkedIssuables: { - [mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] }, + const createComponent = (props = {}) => { + wrapper = shallowMount(IssuableListRoot, { + propsData: { + ...mockIssuableListProps, + ...props, }, - }; + slots: { + 'nav-actions': ``, + 'empty-state': `Issuable empty state
`, + }, + }); + }; - it('issuable-tabs component emits `click-tab` event on `click-tab` event', () => { - wrapper = createComponent({ data }); + describe('IssuableTabs component', () => { + beforeEach(() => { + createComponent(); + }); + it('renders', () => { + expect(findIssuableTabs().props()).toEqual({ + tabs: mockIssuableListProps.tabs, + tabCounts: mockIssuableListProps.tabCounts, + currentTab: mockIssuableListProps.currentTab, + truncateCounts: false, + }); + }); + + it('emits "click-tab" event on "click-tab" event', () => { findIssuableTabs().vm.$emit('click'); - expect(wrapper.emitted('click-tab')).toHaveLength(1); + expect(wrapper.emitted('click-tab')).toEqual([[]]); }); - it('sets all issuables as checked when filtered-search-bar component emits `checked-input` event', () => { - wrapper = createComponent({ data }); + it('renders contents for slot "nav-actions" within IssuableTab component', () => { + expect(findIssuableTabs().find('.js-new-issuable').text()).toBe('New issuable'); + }); + }); - const searchEl = findFilteredSearchBar(); + describe('FilteredSearchBar component', () => { + beforeEach(() => { + createComponent(); + }); - searchEl.vm.$emit('checked-input', true); - - expect(searchEl.emitted('checked-input')).toHaveLength(1); - expect(searchEl.emitted('checked-input').length).toBe(1); - - expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({ - checked: true, - issuable: mockIssuables[0], + it('renders', () => { + expect(findFilteredSearchBar().props()).toMatchObject({ + namespace: 'gitlab-org/gitlab-test', + recentSearchesStorageKey: 'issues', + searchInputPlaceholder: 'Search issues', + sortOptions: mockIssuableListProps.sortOptions, + initialFilterValue: [], + initialSortBy: 'created_desc', + syncFilterAndSort: false, + showCheckbox: false, + checkboxChecked: false, + showFriendlyText: false, + termsAsTokens: true, }); }); - it('filtered-search-bar component emits `filter` event on `onFilter` & `sort` event on `onSort` events', () => { - wrapper = createComponent({ data }); + describe('when "checked-input" event is emitted', () => { + it('sets all issuables to checked', async () => { + findFilteredSearchBar().vm.$emit('checked-input', true); + await nextTick(); - const searchEl = findFilteredSearchBar(); + expect(findFilteredSearchBar().props('checkboxChecked')).toBe(true); + expect( + findAllIssuableItems().filter((component) => component.props('checked') === true), + ).toHaveLength(5); + }); - searchEl.vm.$emit('onFilter'); - expect(wrapper.emitted('filter')).toHaveLength(1); - searchEl.vm.$emit('onSort'); - expect(wrapper.emitted('sort')).toHaveLength(1); - }); + it('emits "update-legacy-bulk-edit" event', () => { + findFilteredSearchBar().vm.$emit('checked-input', true); - it('sets an issuable as checked when issuable-item component emits `checked-input` event', () => { - wrapper = createComponent({ data }); - - const issuableItem = wrapper.findAllComponents(IssuableItem).at(0); - - issuableItem.vm.$emit('checked-input', true); - - expect(issuableItem.emitted('checked-input')).toHaveLength(1); - expect(issuableItem.emitted('checked-input').length).toBe(1); - - expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({ - checked: true, - issuable: mockIssuables[0], + expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); }); }); - it('emits `update-legacy-bulk-edit` when filtered-search-bar checkbox is checked', () => { - wrapper = createComponent({ data }); + it('emits "filter" event when "onFilter" event is emitted', () => { + findFilteredSearchBar().vm.$emit('onFilter'); - findFilteredSearchBar().vm.$emit('checked-input'); - - expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); + expect(wrapper.emitted('filter')).toEqual([[]]); }); - it('emits `update-legacy-bulk-edit` when issuable-item checkbox is checked', () => { - wrapper = createComponent({ data }); + it('emits "sort" event when "onSort" event is emitted', () => { + findFilteredSearchBar().vm.$emit('onSort'); - findIssuableItem().vm.$emit('checked-input'); + expect(wrapper.emitted('sort')).toEqual([[]]); + }); + }); - expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); + describe('alert', () => { + const error = 'oopsie!'; + + it('shows an alert when there is an error', () => { + createComponent({ error }); + + expect(findAlert().text()).toBe(error); }); - it('gl-pagination component emits `page-change` event on `input` event', () => { - wrapper = createComponent({ data, props: { showPaginationControls: true } }); + it('does not render when there is no error', () => { + createComponent(); - findGlPagination().vm.$emit('input'); - expect(wrapper.emitted('page-change')).toHaveLength(1); + expect(findAlert().exists()).toBe(false); }); - it.each` - event | glKeysetPaginationEvent - ${'next-page'} | ${'next'} - ${'previous-page'} | ${'prev'} - `( - 'emits `$event` event when gl-keyset-pagination emits `$glKeysetPaginationEvent` event', - ({ event, glKeysetPaginationEvent }) => { - wrapper = createComponent({ - data, - props: { showPaginationControls: true, useKeysetPagination: true }, - }); + it('emits "dismiss-alert" event when dismissed', () => { + createComponent({ error }); - findGlKeysetPagination().vm.$emit(glKeysetPaginationEvent); + findAlert().vm.$emit('dismiss'); - expect(wrapper.emitted(event)).toEqual([[]]); - }, + expect(wrapper.emitted('dismiss-alert')).toEqual([[]]); + }); + }); + + it('renders IssuableBulkEditSidebar component', () => { + createComponent(); + + expect(wrapper.findComponent(IssuableBulkEditSidebar).exists()).toBe(true); + }); + + it('renders skeleton loader when in loading state', () => { + createComponent({ issuablesLoading: true }); + + expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength( + mockIssuableListProps.issuables.length, ); }); - describe('manual sorting', () => { + describe('manual ordering', () => { describe('when enabled', () => { beforeEach(() => { - wrapper = createComponent({ - props: { - ...mockIssuableListProps, - isManualOrdering: true, - }, - }); + createComponent({ isManualOrdering: true }); }); it('renders VueDraggable component', () => { @@ -493,71 +186,177 @@ describe('IssuableListRoot', () => { }); describe('when disabled', () => { - beforeEach(() => { - wrapper = createComponent(); - }); - it('does not render VueDraggable component', () => { + createComponent(); + expect(findVueDraggable().exists()).toBe(false); }); }); }); - describe('page size selector', () => { - beforeEach(() => { - wrapper = createComponent({ - props: { - showPageSizeChangeControls: true, - }, + describe('IssuableItem component', () => { + it('renders for each issuable', () => { + createComponent(); + const issuableItems = findAllIssuableItems(); + + expect(issuableItems).toHaveLength(mockIssuableListProps.issuables.length); + expect(issuableItems.at(0).props()).toEqual({ + hasScopedLabelsFeature: false, + issuableSymbol: '#', + issuable: mockIssuableListProps.issuables[0], + labelFilterParam: 'label_name', + showCheckbox: false, + checked: false, + showWorkItemTypeIcon: false, + preventRedirect: false, + isActive: false, }); }); - it('has the page size change component', () => { + describe('isActive prop', () => { + it('is false if there is no active issuable', () => { + createComponent(); + + expect(findIssuableItem().props('isActive')).toBe(false); + }); + + it('is true if active issuable matches issuable item', () => { + createComponent({ activeIssuable: mockIssuableListProps.issuables[0] }); + + expect(findIssuableItem().props('isActive')).toBe(true); + }); + }); + + it('emits "update-legacy-bulk-edit" event when "checked-input" event is emitted', () => { + createComponent(); + + findIssuableItem().vm.$emit('checked-input'); + + expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); + }); + + it('emits "select-issuable" event when "select-issuable" event is emitted', () => { + const issuable = mockIssuableListProps.issuables[0]; + createComponent(); + + findIssuableItem().vm.$emit('select-issuable', issuable); + + expect(wrapper.emitted('select-issuable')).toEqual([[issuable]]); + }); + }); + + it('renders IssuableGrid component when in grid view context', () => { + createComponent({ isGridView: true }); + + expect(findIssuableGrid().exists()).toBe(true); + }); + + it('renders contents for slot "empty-state" when there are no issuables', () => { + createComponent({ issuables: [] }); + + expect(wrapper.find('p.js-issuable-empty-state').text()).toBe('Issuable empty state'); + }); + + describe('pagination', () => { + describe('GlKeysetPagination component', () => { + it('renders only when "showPaginationControls" and "useKeysetPagination" props are true', () => { + createComponent({ + hasNextPage: true, + hasPreviousPage: true, + showPaginationControls: true, + useKeysetPagination: true, + }); + + expect(findGlPagination().exists()).toBe(false); + expect(findGlKeysetPagination().props()).toMatchObject({ + hasNextPage: true, + hasPreviousPage: true, + }); + }); + + it.each` + event | glKeysetPaginationEvent + ${'next-page'} | ${'next'} + ${'previous-page'} | ${'prev'} + `( + 'emits "$event" event when "$glKeysetPaginationEvent" event is emitted', + ({ event, glKeysetPaginationEvent }) => { + createComponent({ showPaginationControls: true, useKeysetPagination: true }); + + findGlKeysetPagination().vm.$emit(glKeysetPaginationEvent); + + expect(wrapper.emitted(event)).toEqual([[]]); + }, + ); + }); + + describe('GlPagination component', () => { + it('renders only when "showPaginationControls" prop is true', () => { + createComponent({ + showPaginationControls: true, + totalItems: 10, + }); + + expect(findGlKeysetPagination().exists()).toBe(false); + expect(findPageSizeSelector().exists()).toBe(false); + expect(findGlPagination().props()).toMatchObject({ + perPage: 20, + value: 1, + prevPage: 0, + nextPage: 2, + totalItems: 10, + align: 'center', + }); + }); + + it('emits "page-change" event when "input" event is emitted', () => { + createComponent({ showPaginationControls: true }); + + findGlPagination().vm.$emit('input'); + + expect(wrapper.emitted('page-change')).toEqual([[]]); + }); + }); + }); + + describe('PageSizeSelector component', () => { + beforeEach(() => { + createComponent({ showPageSizeChangeControls: true }); + }); + + it('renders', () => { expect(findPageSizeSelector().exists()).toBe(true); }); - it('emits "page-size-change" event when its input is changed', () => { + it('emits "page-size-change" event when "input" event is emitted', () => { const pageSize = 123; + findPageSizeSelector().vm.$emit('input', pageSize); + expect(wrapper.emitted('page-size-change')).toEqual([[pageSize]]); }); }); - describe('grid view issue', () => { - beforeEach(() => { - wrapper = createComponent({ - props: { - isGridView: true, - }, + describe('watchers', () => { + describe('urlParams', () => { + beforeEach(() => { + createComponent(); + }); + + it('updates URL when "urlParams" prop updates', async () => { + const urlParams = { + state: 'closed', + sort: 'updated_asc', + page: 1, + search: 'foo', + }; + + await wrapper.setProps({ urlParams }); + + expect(global.window.location.href).toBe( + `${TEST_HOST}/?state=${urlParams.state}&sort=${urlParams.sort}&page=${urlParams.page}&search=${urlParams.search}`, + ); }); }); - - it('renders issuableGrid', () => { - expect(findIssuableGrid().exists()).toBe(true); - }); - }); - - it('passes `isActive` prop as false if there is no active issuable', () => { - wrapper = createComponent({}); - - expect(findIssuableItem().props('isActive')).toBe(false); - }); - - it('passes `isActive` prop as true if active issuable matches issuable item', () => { - wrapper = createComponent({ - props: { - activeIssuable: mockIssuableListProps.issuables[0], - }, - }); - - expect(findIssuableItem().props('isActive')).toBe(true); - }); - - it('emits `select-issuable` event on emitting `select-issuable` from issuable item', () => { - const mockIssuable = mockIssuableListProps.issuables[0]; - wrapper = createComponent({}); - findIssuableItem().vm.$emit('select-issuable', mockIssuable); - - expect(wrapper.emitted('select-issuable')).toEqual([[mockIssuable]]); }); }); diff --git a/spec/lib/gitlab/background_migration/backfill_epic_issues_into_work_item_parent_links_spec.rb b/spec/lib/gitlab/background_migration/backfill_epic_issues_into_work_item_parent_links_spec.rb new file mode 100644 index 00000000000..9cb381c6381 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_epic_issues_into_work_item_parent_links_spec.rb @@ -0,0 +1,218 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillEpicIssuesIntoWorkItemParentLinks, feature_category: :team_planning do + let(:author) { table(:users).create!(username: 'tester', projects_limit: 100) } + let(:epic_issues) { table(:epic_issues) } + let(:work_item_parent_links) { table(:work_item_parent_links) } + let(:group1) { table(:namespaces).create!(name: 'my test group 1', path: 'my-test-group1', type: 'Group') } + let(:group2) { table(:namespaces).create!(name: 'my test group 2', path: 'my-test-group2', type: 'Group') } + let(:epics) { table(:epics) } + let(:issues) { table(:issues) } + let(:start_id) { epic_issues.minimum(:id) } + let(:end_id) { epic_issues.maximum(:id) } + let(:batch_column) { 'id' } + let(:epic_work_item_type_enum) { 7 } + let(:epic_work_item_type_id) { table(:work_item_types).where(base_type: epic_work_item_type_enum).first.id } + let(:issue_work_item_type_enum) { 0 } + let(:issue_work_item_type_id) { table(:work_item_types).where(base_type: issue_work_item_type_enum).first.id } + let!(:issue_epic1) do + issues.create!( + title: 'Epic 1', namespace_id: group1.id, author_id: author.id, work_item_type_id: epic_work_item_type_id + ) + end + + let!(:issue_epic2) do + issues.create!( + title: 'Epic 2', namespace_id: group1.id, author_id: author.id, work_item_type_id: epic_work_item_type_id + ) + end + + let!(:epic1) do + epics.create!( + title: 'test epic1', + title_html: 'Epic 1', + group_id: group1.id, + author_id: author.id, + iid: 1, + issue_id: issue_epic1.id + ) + end + + let!(:epic2) do + epics.create!( + title: 'test epic2', + title_html: 'Epic 2', + group_id: group1.id, + author_id: author.id, + iid: 2, + issue_id: issue_epic2.id + ) + end + + let(:issues1) do + (1..5).map do |i| + issues.create!( + title: "Issue #{i}", namespace_id: group1.id, author_id: author.id, work_item_type_id: issue_work_item_type_id + ) + end + end + + let(:issues2) do + (1..5).map do |i| + issues.create!( + title: "Issue #{i}", namespace_id: group1.id, author_id: author.id, work_item_type_id: issue_work_item_type_id + ) + end + end + + let(:group2_issue_epic) do + issues.create!( + title: 'Epic 1', + namespace_id: group2.id, + author_id: author.id, + work_item_type_id: epic_work_item_type_id + ) + end + + let!(:group2_epic_issue) do + issue = issues.create!( + title: 'Issue', + namespace_id: group2.id, + author_id: author.id, + work_item_type_id: issue_work_item_type_id + ) + epic = epics.create!( + title: 'test epic1', + title_html: 'Epic 1', + group_id: group2.id, + author_id: author.id, + iid: 1, + issue_id: group2_issue_epic.id + ) + epic_issues.create!(epic_id: epic.id, issue_id: issue.id, relative_position: 1) + end + + let(:migration) do + described_class.new( + start_id: start_id, + end_id: end_id, + batch_table: batch_table, + batch_column: batch_column, + job_arguments: job_arguments, + sub_batch_size: 2, + pause_ms: 2, + connection: ::ApplicationRecord.connection + ) + end + + before do + issues1.each_with_index do |issue, index| + epic_issues.create!(epic_id: epic1.id, issue_id: issue.id, relative_position: index) + end + + issues2.each_with_index do |issue, index| + epic_issues.create!(epic_id: epic2.id, issue_id: issue.id, relative_position: index) + end + end + + RSpec::Matchers.define :have_synced_work_item_epic_issue_link do + match do |epic_issue| + parent_epic = epics.find(epic_issue.epic_id) + parent_work_item = issues.find(parent_epic.issue_id) + child_work_item = issues.find(epic_issue.issue_id) + + work_item_parent_links.exists?( + work_item_parent_id: parent_work_item.id, + work_item_id: child_work_item.id, + relative_position: epic_issue.relative_position, + namespace_id: parent_work_item.namespace_id + ) + end + end + + context 'when epic_issues table is used to go through all records' do + let(:batch_table) { 'epic_issues' } + + context 'when no group id is provided' do + let(:job_arguments) { [nil] } + + it 'backfills all records' do + expect do + migration.perform + end.to change { work_item_parent_links.count }.by(11) # 10 records for group 1 and 1 record for group 2 + + expect(epic_issues.all).to all(have_synced_work_item_epic_issue_link) + end + + it 'upserts records if any of them already exist' do + existing_epic_issue = epic_issues.first + existing_epic = epics.find(existing_epic_issue.epic_id) + existing_work_item_parent_link = work_item_parent_links.create!( + work_item_parent_id: existing_epic.issue_id, + work_item_id: existing_epic_issue.issue_id, + relative_position: -100 + ) + + expect do + migration.perform + end.to change { work_item_parent_links.count }.by(10).and( # only 10 as 1 already exists + change { existing_work_item_parent_link.reload.relative_position } + .from(-100) + .to(existing_epic_issue.relative_position) + ) + end + end + + context 'when a group id is provided' do + let(:job_arguments) { [group2.id] } + + it 'raises an error' do + expect do + migration.perform + end.to raise_error('when group_id is provided, use `epics` as batch_table and `iid` as batch_column') + end + end + end + + context 'when epics table is used to go through all records' do + let(:batch_table) { 'epics' } + let(:batch_column) { 'iid' } + let(:start_id) { epics.minimum(:iid) } + let(:end_id) { epics.maximum(:iid) } + + context 'when a group id is provided' do + let(:job_arguments) { [group2.id] } + + it 'backfills records only for the specified group' do + expect do + migration.perform + end.to change { work_item_parent_links.count }.by(1) # Only 1 record for group 2 + + expect(group2_epic_issue).to have_synced_work_item_epic_issue_link + end + end + + context 'when no group_id is provided' do + let(:job_arguments) { [nil] } + + it 'raises an error' do + expect do + migration.perform + end.to raise_error('use `epic_issues` as batch_table when no group_id is provided') + end + end + + context 'when batch column is not iid' do + let(:job_arguments) { [group2.id] } + let(:batch_column) { 'id' } + + it 'raises an error' do + expect do + migration.perform + end.to raise_error('when group_id is provided, use `epics` as batch_table and `iid` as batch_column') + end + end + end +end diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb index 7e8d022c402..ca30262ec22 100644 --- a/spec/lib/gitlab/database/sharding_key_spec.rb +++ b/spec/lib/gitlab/database/sharding_key_spec.rb @@ -38,7 +38,6 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do 'member_roles.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/444161 *['milestones.project_id', 'milestones.group_id'], 'pages_domains.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/442178, - 'path_locks.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/444643 'releases.project_id', 'remote_mirrors.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/444643 'sprints.group_id', diff --git a/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb index ea52bf2e080..90326521379 100644 --- a/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb +++ b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb @@ -46,5 +46,14 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer do expect(RequestStore.store[:graphql_logs]).to match([request]) end + + it 'does not crash when #analyze_query returns []' do + stub_const('Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer::ALL_ANALYZERS', []) + results = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: []) + result = results.first + + expect(result[:duration_s]).to eq monotonic_time_duration + expect(RequestStore.store[:graphql_logs]).to match([hash_including(operation_name: 'createNote')]) + end end end diff --git a/spec/migrations/20240320193910_queue_backfill_epic_issues_into_work_item_parent_links_spec.rb b/spec/migrations/20240320193910_queue_backfill_epic_issues_into_work_item_parent_links_spec.rb new file mode 100644 index 00000000000..99453c427c8 --- /dev/null +++ b/spec/migrations/20240320193910_queue_backfill_epic_issues_into_work_item_parent_links_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe QueueBackfillEpicIssuesIntoWorkItemParentLinks, feature_category: :team_planning do + let!(:batched_migration) { described_class::MIGRATION } + + it 'schedules a new batched migration' do + reversible_migration do |migration| + migration.before -> { + expect(batched_migration).not_to have_scheduled_batched_migration + } + + migration.after -> { + expect(batched_migration).to have_scheduled_batched_migration( + table_name: :epic_issues, + column_name: :id, + job_arguments: [nil], + interval: described_class::DELAY_INTERVAL, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) + } + end + end +end diff --git a/spec/migrations/20240515094240_delete_invalid_path_locks_records_spec.rb b/spec/migrations/20240515094240_delete_invalid_path_locks_records_spec.rb new file mode 100644 index 00000000000..0845da6b559 --- /dev/null +++ b/spec/migrations/20240515094240_delete_invalid_path_locks_records_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe DeleteInvalidPathLocksRecords, feature_category: :source_code_management do + let!(:path_locks) { table(:path_locks) } + + let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } + let!(:user) { table(:users).create!(email: 'test@example.com', projects_limit: 10) } + let!(:project) { table(:projects).create!(namespace_id: namespace.id, project_namespace_id: namespace.id) } + + describe '#up' do + before do + path_locks.create!(project_id: project.id, user_id: user.id, path: 'path1') + path_locks.create!(project_id: nil, user_id: user.id, path: 'path2') + path_locks.create!(project_id: nil, user_id: user.id, path: 'path3') + + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + it 'deletes records without a project_id' do + migrate! + + expect(path_locks.count).to eq(1) + expect(path_locks.first).to have_attributes( + project_id: project.id, + user_id: user.id, + path: 'path1' + ) + end + + it 'does nothing on gitlab.com' do + allow(Gitlab).to receive(:com?).and_return(true) + + migrate! + + expect(path_locks.count).to eq(3) + end + end +end diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb index ae8a4830bf1..0984258cf7b 100644 --- a/spec/models/container_repository_spec.rb +++ b/spec/models/container_repository_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :container_registry do + include_context 'container registry client stubs' + using RSpec::Parameterized::TableSyntax let(:group) { create(:group, name: 'group') } @@ -54,7 +56,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'on GitLab.com', :saas do context 'supports gitlab api' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) + stub_container_registry_gitlab_api_support(supported: true) expect(repository.gitlab_api_client).to receive(:repository_details).with(repository.path, sizing: :self).and_return(response) end @@ -80,7 +82,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'does not support gitlab api' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false) + stub_container_registry_gitlab_api_support(supported: false) expect(repository.gitlab_api_client).not_to receive(:repository_details) end @@ -116,7 +118,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'when the repository is migrated', :saas do context 'when Gitlab API is supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) + stub_container_registry_gitlab_api_support(supported: true) end shared_examples 'returning an instantiated tag from the API response' do @@ -199,7 +201,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'when the Gitlab API is not supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false) + stub_container_registry_gitlab_api_support(supported: false) end it_behaves_like 'returning an instantiated tag' @@ -264,7 +266,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'when the repository is migrated', :saas do context 'when Gitlab API is supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) + stub_container_registry_gitlab_api_support(supported: true) end context 'when the Gitlab API returns tags' do @@ -309,7 +311,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont context 'when the Gitlab API is not supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false) + stub_container_registry_gitlab_api_support(supported: false) end it_behaves_like 'returning the non-empty tags list' diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb index 1c53f6eae52..93c26afadbe 100644 --- a/spec/models/project_ci_cd_setting_spec.rb +++ b/spec/models/project_ci_cd_setting_spec.rb @@ -15,6 +15,12 @@ RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do end end + describe '#pipeline_variables_minimum_override_role' do + it 'is maintainer by default' do + expect(described_class.new.pipeline_variables_minimum_override_role).to eq('maintainer') + end + end + describe '#forward_deployment_enabled' do it 'is true by default' do expect(described_class.new.forward_deployment_enabled).to be_truthy diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index be486fb5676..50baa68a808 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -1241,6 +1241,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr 'forward_deployment_rollback_allowed' => 'ci_', 'keep_latest_artifact' => '', 'restrict_user_defined_variables' => '', + 'pipeline_variables_minimum_override_role' => 'ci_', 'runner_token_expiration_interval' => '', 'separated_caches' => 'ci_', 'allow_fork_pipelines_to_run_in_parent_project' => 'ci_', diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index 364c9b4d2b4..cf849e9d3d9 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -855,44 +855,236 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do end end - describe 'set_pipeline_variables' do - context 'when user is developer' do - let(:current_user) { developer } + describe 'change_restrict_user_defined_variables' do + using RSpec::Parameterized::TableSyntax - context 'when project allows user defined variables' do - before do - project.update!(restrict_user_defined_variables: false) - end + where(:user_role, :minimum_role, :allowed) do + :guest | :developer | false + :reporter | :developer | false + :developer | :developer | false + :maintainer | :developer | true + :maintainer | :maintainer | true + :maintainer | :no_one_allowed | true + :owner | :owner | true + :developer | :owner | false + :maintainer | :owner | false + end - it { is_expected.to be_allowed(:set_pipeline_variables) } + with_them do + let(:current_user) { public_send(user_role) } + + before do + ci_cd_settings = project.ci_cd_settings + ci_cd_settings.pipeline_variables_minimum_override_role = minimum_role + ci_cd_settings.save! end - context 'when project restricts use of user defined variables' do + it 'allows/disallows change_restrict_user_defined_variables variables based on project defined minimum role' do + if allowed + is_expected.to be_allowed(:change_restrict_user_defined_variables) + else + is_expected.to be_disallowed(:change_restrict_user_defined_variables) + end + end + end + end + + describe 'set_pipeline_variables' do + context 'when `pipeline_variables_minimum_override_role` is defined' do + using RSpec::Parameterized::TableSyntax + + where(:user_role, :minimum_role, :restrict_variables, :allowed) do + :developer | :no_one_allowed | true | false + :maintainer | :no_one_allowed | true | false + :owner | :no_one_allowed | true | false + :guest | :no_one_allowed | true | false + :reporter | :no_one_allowed | true | false + :anonymous | :no_one_allowed | true | false + :developer | :developer | true | true + :maintainer | :developer | true | true + :owner | :developer | true | true + :guest | :developer | true | false + :reporter | :developer | true | false + :anonymous | :developer | true | false + :developer | :maintainer | true | false + :maintainer | :maintainer | true | true + :owner | :maintainer | true | true + :guest | :maintainer | true | false + :reporter | :maintainer | true | false + :anonymous | :maintainer | true | false + :developer | :owner | true | false + :maintainer | :owner | true | false + :owner | :owner | true | true + :guest | :owner | true | false + :reporter | :owner | true | false + :anonymous | :owner | true | false + :developer | :no_one_allowed | false | true + :maintainer | :no_one_allowed | false | true + :owner | :no_one_allowed | false | true + :guest | :no_one_allowed | false | true + :reporter | :no_one_allowed | false | true + :anonymous | :no_one_allowed | false | true + :developer | :developer | false | true + :maintainer | :developer | false | true + :owner | :developer | false | true + :guest | :developer | false | true + :reporter | :developer | false | true + :anonymous | :developer | false | true + :developer | :maintainer | false | true + :maintainer | :maintainer | false | true + :owner | :maintainer | false | true + :guest | :maintainer | false | true + :reporter | :maintainer | false | true + :anonymous | :maintainer | false | true + :developer | :owner | false | true + :maintainer | :owner | false | true + :owner | :owner | false | true + :guest | :owner | false | true + :reporter | :owner | false | true + :anonymous | :owner | false | true + end + with_them do + let(:current_user) { public_send(user_role) } + before do - project.update!(restrict_user_defined_variables: true) + ci_cd_settings = project.ci_cd_settings + ci_cd_settings.restrict_user_defined_variables = restrict_variables + ci_cd_settings.pipeline_variables_minimum_override_role = minimum_role + ci_cd_settings.save! end - it { is_expected.not_to be_allowed(:set_pipeline_variables) } + it 'allows/disallows set pipeline variables based on project defined minimum role' do + if allowed + is_expected.to be_allowed(:set_pipeline_variables) + else + is_expected.to be_disallowed(:set_pipeline_variables) + end + end end end - context 'when user is maintainer' do - let(:current_user) { maintainer } + shared_examples 'set_pipeline_variables only on restrict_user_defined_variables' do + context 'when user is developer' do + let(:current_user) { developer } - context 'when project allows user defined variables' do - before do - project.update!(restrict_user_defined_variables: false) + context 'when project allows user defined variables' do + before do + project.update!(restrict_user_defined_variables: false) + end + + it { is_expected.to be_allowed(:set_pipeline_variables) } end - it { is_expected.to be_allowed(:set_pipeline_variables) } + context 'when project restricts use of user defined variables' do + before do + project.update!(restrict_user_defined_variables: true) + end + + it { is_expected.not_to be_allowed(:set_pipeline_variables) } + end end - context 'when project restricts use of user defined variables' do - before do - project.update!(restrict_user_defined_variables: true) + context 'when user is maintainer' do + let(:current_user) { maintainer } + + context 'when project allows user defined variables' do + before do + project.update!(restrict_user_defined_variables: false) + end + + it { is_expected.to be_allowed(:set_pipeline_variables) } end - it { is_expected.to be_allowed(:set_pipeline_variables) } + context 'when project restricts use of user defined variables' do + before do + project.update!(restrict_user_defined_variables: true) + end + + it { is_expected.to be_allowed(:set_pipeline_variables) } + end + end + end + + it_behaves_like 'set_pipeline_variables only on restrict_user_defined_variables' + + context 'when feature flag allow_user_variables_by_minimum_role is disabled' do + before do + stub_feature_flags(allow_user_variables_by_minimum_role: false) + end + + it_behaves_like 'set_pipeline_variables only on restrict_user_defined_variables' + + context 'when `pipeline_variables_minimum_override_role` is defined' do + using RSpec::Parameterized::TableSyntax + + where(:user_role, :minimum_role, :restrict_variables, :allowed) do + :developer | :no_one_allowed | false | true + :maintainer | :no_one_allowed | false | true + :owner | :no_one_allowed | false | true + :guest | :no_one_allowed | false | true + :reporter | :no_one_allowed | false | true + :anonymous | :no_one_allowed | false | true + :developer | :developer | false | true + :maintainer | :developer | false | true + :owner | :developer | false | true + :guest | :developer | false | true + :reporter | :developer | false | true + :anonymous | :developer | false | true + :developer | :maintainer | false | true + :maintainer | :maintainer | false | true + :owner | :maintainer | false | true + :guest | :maintainer | false | true + :reporter | :maintainer | false | true + :anonymous | :maintainer | false | true + :developer | :owner | false | true + :maintainer | :owner | false | true + :owner | :owner | false | true + :guest | :owner | false | true + :reporter | :owner | false | true + :anonymous | :owner | false | true + :developer | :no_one_allowed | true | false + :maintainer | :no_one_allowed | true | true + :owner | :no_one_allowed | true | true + :guest | :no_one_allowed | true | false + :reporter | :no_one_allowed | true | false + :anonymous | :no_one_allowed | true | false + :developer | :developer | true | false + :maintainer | :developer | true | true + :owner | :developer | true | true + :guest | :developer | true | false + :reporter | :developer | true | false + :anonymous | :developer | true | false + :developer | :maintainer | true | false + :maintainer | :maintainer | true | true + :owner | :maintainer | true | true + :guest | :maintainer | true | false + :reporter | :maintainer | true | false + :anonymous | :maintainer | true | false + :developer | :owner | true | false + :maintainer | :owner | true | true + :owner | :owner | true | true + :guest | :owner | true | false + :reporter | :owner | true | false + :anonymous | :owner | true | false + end + with_them do + let(:current_user) { public_send(user_role) } + + before do + ci_cd_settings = project.ci_cd_settings + ci_cd_settings.restrict_user_defined_variables = restrict_variables + ci_cd_settings.pipeline_variables_minimum_override_role = minimum_role + ci_cd_settings.save! + end + + it 'allows/disallows set pipeline variables based on restrict_user_defined_variables' do + if allowed + is_expected.to be_allowed(:set_pipeline_variables) + else + is_expected.to be_disallowed(:set_pipeline_variables) + end + end + end end end end diff --git a/spec/requests/api/container_repositories_spec.rb b/spec/requests/api/container_repositories_spec.rb index 76e2d42b78e..3966c713f34 100644 --- a/spec/requests/api/container_repositories_spec.rb +++ b/spec/requests/api/container_repositories_spec.rb @@ -91,18 +91,14 @@ RSpec.describe API::ContainerRepositories, feature_category: :container_registry context 'when the repository is migrated', :saas do context 'when the GitLab API is supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) + stub_container_registry_gitlab_api_support(supported: true) do |client| + allow(client).to receive(:tags).and_return(response_body) + end end context 'when the Gitlab API returns tags' do include_context 'with the container registry GitLab API returning tags' - before do - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| - allow(client).to receive(:tags).and_return(response_body) - end - end - it 'returns instantiated tags from the response' do expect_any_instance_of(ContainerRepository) do |repository| expect(repository).to receive(:each_tags_page).and_call_original @@ -124,11 +120,7 @@ RSpec.describe API::ContainerRepositories, feature_category: :container_registry end context 'when the Gitlab API does not return any tags' do - before do - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| - allow(client).to receive(:tags).and_return({ pagination: {}, response_body: {} }) - end - end + let(:response_body) { { pagination: {}, response_body: {} } } it 'returns an instantiated tag from the response' do subject @@ -143,7 +135,7 @@ RSpec.describe API::ContainerRepositories, feature_category: :container_registry context 'when the GitLab API is not supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false) + stub_container_registry_gitlab_api_support(supported: false) end it_behaves_like 'returning a repository and its tags' diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb index fa0d5c2c635..417472ca03a 100644 --- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb +++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb @@ -331,30 +331,45 @@ RSpec.describe 'container repository details', feature_category: :container_regi GQL end - it 'returns the last_published_at', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/463763' do - stub_container_registry_gitlab_api_support(supported: true) do |client| - stub_container_registry_gitlab_api_repository_details(client, path: container_repository.path, last_published_at: '2024-04-30T06:07:36.225Z') - end - - subject - - expect(last_published_at_response).to eq('2024-04-30T06:07:36+00:00') - end - - context 'with a network error' do - it 'returns an error', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/463764' do - stub_container_registry_gitlab_api_network_error + context 'on Gitlab.com', :saas do + it 'returns the last_published_at' do + stub_container_registry_gitlab_api_support(supported: true) do |client| + stub_container_registry_gitlab_api_repository_details( + client, + path: container_repository.path, + sizing: :self, + last_published_at: '2024-04-30T06:07:36.225Z' + ) + end subject - expect_graphql_errors_to_include("Can't connect to the Container Registry. If this error persists, please review the troubleshooting documentation.") + expect(last_published_at_response).to eq('2024-04-30T06:07:36+00:00') + end + + context 'with not supporting the gitlab api' do + it 'returns nil' do + stub_container_registry_gitlab_api_support(supported: false) + + subject + + expect(last_published_at_response).to eq(nil) + end + end + + context 'with a network error' do + it 'returns an error' do + stub_container_registry_gitlab_api_network_error + + subject + + expect_graphql_errors_to_include("Can't connect to the Container Registry. If this error persists, please review the troubleshooting documentation.") + end end end - context 'with not supporting the gitlab api' do + context 'when not on Gitlab.com' do it 'returns nil' do - stub_container_registry_gitlab_api_support(supported: false) - subject expect(last_published_at_response).to eq(nil) diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml index e1937a08e01..60aa7d51060 100644 --- a/spec/requests/api/project_attributes.yml +++ b/spec/requests/api/project_attributes.yml @@ -101,6 +101,7 @@ ci_cd_settings: - inbound_job_token_scope_enabled - restrict_pipeline_cancellation_role remapped_attributes: + pipeline_variables_minimum_override_role: ci_pipeline_variables_minimum_override_role default_git_depth: ci_default_git_depth forward_deployment_enabled: ci_forward_deployment_enabled forward_deployment_rollback_allowed: ci_forward_deployment_rollback_allowed diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb index bbeb56045d4..3588196e9d8 100644 --- a/spec/requests/api/project_container_repositories_spec.rb +++ b/spec/requests/api/project_container_repositories_spec.rb @@ -5,6 +5,8 @@ require 'spec_helper' RSpec.describe API::ProjectContainerRepositories, feature_category: :container_registry do include ExclusiveLeaseHelpers + include_context 'container registry client stubs' + let_it_be(:project) { create(:project, :private) } let_it_be(:project2) { create(:project, :public) } let_it_be(:maintainer) { create(:user, maintainer_of: [project, project2]) } @@ -201,9 +203,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r end before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) - - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| + stub_container_registry_gitlab_api_support(supported: true) do |client| allow(client).to receive(:tags).and_return(response_body) end end @@ -476,7 +476,16 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r context 'when the repository is migrated', :saas do context 'when the Gitlab API is supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true) + stub_container_registry_gitlab_api_support(supported: true) do |client| + allow(client).to receive(:tags).and_return(response_body) + end + end + + let(:response_body) do + { + pagination: {}, + response_body: ::Gitlab::Json.parse(tags_response.to_json) + } end context 'when the Gitlab API returns a tag' do @@ -492,19 +501,6 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r ] end - let_it_be(:response_body) do - { - pagination: {}, - response_body: ::Gitlab::Json.parse(tags_response.to_json) - } - end - - before do - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| - allow(client).to receive(:tags).and_return(response_body) - end - end - it_behaves_like 'returning the tag' end @@ -535,28 +531,11 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r ] end - let_it_be(:response_body) do - { - pagination: {}, - response_body: ::Gitlab::Json.parse(tags_response.to_json) - } - end - - before do - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| - allow(client).to receive(:tags).and_return(response_body) - end - end - it_behaves_like 'returning the tag' end context 'when the Gitlab API does not return a tag' do - before do - allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client| - allow(client).to receive(:tags).and_return({ pagination: {}, response_body: {} }) - end - end + let_it_be(:tags_response) { {} } it 'returns not found' do subject @@ -569,7 +548,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r context 'when the Gitlab API is not supported' do before do - allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false) + stub_container_registry_gitlab_api_support(supported: false) stub_container_registry_tags(repository: root_repository.path, tags: %w[rootA], with_manifest: true) end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index 5fb48de01af..78aec3c2b84 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -2790,6 +2790,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds) expect(json_response['allow_merge_on_skipped_pipeline']).to eq(project.allow_merge_on_skipped_pipeline) expect(json_response['restrict_user_defined_variables']).to eq(project.restrict_user_defined_variables?) + expect(json_response['ci_pipeline_variables_minimum_override_role']).to eq(project.ci_pipeline_variables_minimum_override_role.to_s) expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved) expect(json_response['security_and_compliance_access_level']).to be_present expect(json_response['releases_access_level']).to be_present @@ -3297,6 +3298,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and 'build_git_strategy', 'keep_latest_artifact', 'restrict_user_defined_variables', + 'ci_pipeline_variables_minimum_override_role', 'runners_token', 'runner_token_expiration_interval', 'group_runners_enabled', @@ -4301,6 +4303,118 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and expect(response).to have_gitlab_http_status(:bad_request) end + context 'when ci_pipeline_variables_minimum_override_role is owner' do + before do + project3.add_maintainer(user2) + ci_cd_settings = project3.ci_cd_settings + ci_cd_settings.restrict_user_defined_variables = false + ci_cd_settings.pipeline_variables_minimum_override_role = 'owner' + ci_cd_settings.save! + end + + context 'and current user is maintainer' do + let_it_be(:current_user) { user2 } + + it 'rejects to change restrict_user_defined_variables' do + project_param = { restrict_user_defined_variables: true } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:bad_request) + end + + it 'rejects to change ci_pipeline_variables_minimum_override_role' do + project_param = { ci_pipeline_variables_minimum_override_role: 'developer' } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:bad_request) + end + end + + context 'and current user is owner' do + let_it_be(:current_user) { user } + + it 'successfully changes restrict_user_defined_variables' do + project_param = { restrict_user_defined_variables: true } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:ok) + end + + it 'successfully changes ci_pipeline_variables_minimum_override_role' do + project_param = { ci_pipeline_variables_minimum_override_role: 'developer' } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:ok) + end + end + end + + context 'when ci_pipeline_variables_minimum_override_role is set to maintainer' do + before do + project3.add_maintainer(user2) + ci_cd_settings = project3.ci_cd_settings + ci_cd_settings.restrict_user_defined_variables = false + ci_cd_settings.pipeline_variables_minimum_override_role = 'maintainer' + ci_cd_settings.save! + end + + context 'and current user is maintainer' do + let_it_be(:current_user) { user2 } + + it 'sucessfully changes restrict_user_defined_variables' do + project_param = { restrict_user_defined_variables: true } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:ok) + end + + it 'successfully changes ci_pipeline_variables_minimum_override_role' do + project_param = { ci_pipeline_variables_minimum_override_role: 'developer' } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:ok) + end + + it 'rejects to ci_pipeline_variables_minimum_override_role to owner' do + project_param = { ci_pipeline_variables_minimum_override_role: 'owner' } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:bad_request) + end + end + + context 'and current user is developer' do + let_it_be(:current_user) { user3 } + + before do + project3.add_developer(user3) + end + + it 'fails to change restrict_user_defined_variables' do + project_param = { restrict_user_defined_variables: true } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:forbidden) + end + + it 'fails to change ci_pipeline_variables_minimum_override_role' do + project_param = { ci_pipeline_variables_minimum_override_role: 'developer' } + + put api("/projects/#{project3.id}", current_user), params: project_param + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + end + it 'updates restrict_user_defined_variables' do project_param = { restrict_user_defined_variables: true } @@ -4313,6 +4427,34 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and end end + it 'updates ci_pipeline_variables_minimum_override_role' do + project_param = { ci_pipeline_variables_minimum_override_role: 'owner' } + + put api("/projects/#{project3.id}", user), params: project_param + + expect(response).to have_gitlab_http_status(:ok) + + project_param.each_pair do |k, v| + expect(json_response[k.to_s]).to eq(v) + end + end + + it 'rejects updating ci_pipeline_variables_minimum_override_role when an invalid role is provided' do + project_param = { ci_pipeline_variables_minimum_override_role: 'wrong' } + + put api("/projects/#{project3.id}", user), params: project_param + + expect(response).to have_gitlab_http_status(:bad_request) + end + + it 'rejects updating ci_pipeline_variables_minimum_override_role when an existing but not allowed role is provided' do + project_param = { ci_pipeline_variables_minimum_override_role: 'guest' } + + put api("/projects/#{project3.id}", user), params: project_param + + expect(response).to have_gitlab_http_status(:bad_request) + end + it 'updates public_builds (deprecated)' do project3.update!({ public_builds: false }) project_param = { public_builds: 'true' } diff --git a/spec/requests/api/remote_mirrors_spec.rb b/spec/requests/api/remote_mirrors_spec.rb index 77f30122414..7930605eb57 100644 --- a/spec/requests/api/remote_mirrors_spec.rb +++ b/spec/requests/api/remote_mirrors_spec.rb @@ -305,15 +305,15 @@ RSpec.describe API::RemoteMirrors, feature_category: :source_code_management do expect(response).to have_gitlab_http_status(:not_found) end - it 'returns bad request if the update service fails' do - expect_next_instance_of(Projects::UpdateService) do |service| - expect(service).to receive(:execute).and_return(status: :error, message: 'message') + it 'returns bad request if the destroy service fails' do + expect_next_instance_of(RemoteMirrors::DestroyService) do |service| + expect(service).to receive(:execute).and_return(ServiceResponse.error(message: 'error')) end expect { delete api(route[mirror.id], user) }.not_to change { project.remote_mirrors.count } expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response).to eq({ 'message' => 'message' }) + expect(json_response).to eq({ 'message' => 'error' }) end it 'deletes a remote mirror' do @@ -321,6 +321,35 @@ RSpec.describe API::RemoteMirrors, feature_category: :source_code_management do expect(response).to have_gitlab_http_status(:no_content) end + + context 'when feature flag "use_remote_mirror_destroy_service" is disabled' do + before do + stub_feature_flags(use_remote_mirror_destroy_service: false) + end + + it 'returns 404 for non existing id' do + delete api(route[non_existing_record_id], user) + + expect(response).to have_gitlab_http_status(:not_found) + end + + it 'returns bad request if the destroy service fails' do + expect_next_instance_of(Projects::UpdateService) do |service| + expect(service).to receive(:execute).and_return(status: :error, message: 'message') + end + + expect { delete api(route[mirror.id], user) }.not_to change { project.remote_mirrors.count } + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response).to eq({ 'message' => 'message' }) + end + + it 'deletes a remote mirror' do + expect { delete api(route[mirror.id], user) }.to change { project.remote_mirrors.count }.from(1).to(0) + + expect(response).to have_gitlab_http_status(:no_content) + end + end end end end diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index 51b966c3acb..a11e1ae0342 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -26,6 +26,109 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d describe '#execute' do let(:admin) { create(:admin) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:developer) { create(:user) } + let_it_be(:owner) { create(:user) } + + context 'when changing restrict_user_defined_variables' do + using RSpec::Parameterized::TableSyntax + + where(:current_user_role, :project_minimum_role, :from_value, :to_value, :status) do + :owner | :developer | true | false | :success + :owner | :maintainer | true | false | :success + :owner | :developer | false | true | :success + :owner | :maintainer | false | true | :success + :maintainer | :developer | true | false | :success + :maintainer | :maintainer | true | false | :success + :maintainer | :owner | true | false | :api_error + :maintainer | :owner | false | true | :api_error + :maintainer | :owner | true | true | :success + :developer | :owner | true | false | :api_error + :developer | :developer | true | false | :api_error + :developer | :maintainer | true | false | :api_error + end + + with_them do + let(:current_user) { public_send(current_user_role) } + + before do + project.add_maintainer(maintainer) + project.add_developer(developer) + project.add_owner(owner) + + ci_cd_settings = project.ci_cd_settings + ci_cd_settings.pipeline_variables_minimum_override_role = project_minimum_role + ci_cd_settings.restrict_user_defined_variables = from_value + ci_cd_settings.save! + end + + it 'allows/disallows to change restrict_user_defined_variables' do + result = update_project(project, current_user, restrict_user_defined_variables: to_value) + expect(result[:status]).to eq(status) + end + + context 'when feature flag allow_user_variables_by_minimum_role is disabled`' do + before do + stub_feature_flags(allow_user_variables_by_minimum_role: false) + end + + it 'allows to change restrict_user_defined_variables' do + result = update_project(project, current_user, restrict_user_defined_variables: to_value) + expect(result[:status]).to eq(:success) + end + end + end + end + + context 'when changing ci_pipeline_variables_minimum_override_role' do + using RSpec::Parameterized::TableSyntax + + where(:current_user_role, :restrict_user_defined_variables, :from_value, :to_value, :status) do + :owner | true | :owner | :developer | :success + :owner | true | :owner | :maintainer | :success + :owner | true | :developer | :maintainer | :success + :owner | true | :maintainer | :owner | :success + :maintainer | true | :owner | :developer | :api_error + :maintainer | true | :owner | :maintainer | :api_error + :maintainer | true | :developer | :maintainer | :success + :maintainer | true | :maintainer | :owner | :api_error + :owner | false | :owner | :maintainer | :success + :maintainer | false | :owner | :developer | :api_error + :maintainer | false | :maintainer | :owner | :api_error + end + + with_them do + let(:current_user) { public_send(current_user_role) } + + before do + project.add_maintainer(maintainer) + project.add_developer(developer) + project.add_owner(owner) + + ci_cd_settings = project.ci_cd_settings + ci_cd_settings.pipeline_variables_minimum_override_role = from_value + ci_cd_settings.restrict_user_defined_variables = restrict_user_defined_variables + ci_cd_settings.save! + end + + it 'allows/disallows to change ci_pipeline_variables_minimum_override_role' do + result = update_project(project, current_user, ci_pipeline_variables_minimum_override_role: to_value.to_s) + expect(result[:status]).to eq(status) + end + + context 'when feature flag allow_user_variables_by_minimum_role is disabled`' do + before do + stub_feature_flags(allow_user_variables_by_minimum_role: false) + end + + it 'allows to change ci_pipeline_variables_minimum_override_role' do + result = update_project(project, current_user, ci_pipeline_variables_minimum_override_role: to_value.to_s) + expect(result[:status]).to eq(:success) + end + end + end + end + context 'when changing visibility level' do it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent', params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL }, diff --git a/spec/services/remote_mirrors/destroy_service_spec.rb b/spec/services/remote_mirrors/destroy_service_spec.rb new file mode 100644 index 00000000000..af939008a98 --- /dev/null +++ b/spec/services/remote_mirrors/destroy_service_spec.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe RemoteMirrors::DestroyService, feature_category: :source_code_management do + subject(:service) { described_class.new(project, user) } + + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user, maintainer_of: project) } + + let!(:remote_mirror) { create(:remote_mirror, project: project) } + + describe '#execute', :aggregate_failures do + subject(:execute) { service.execute(remote_mirror) } + + it 'destroys a push mirror' do + expect { execute }.to change { project.remote_mirrors.count }.from(1).to(0) + + is_expected.to be_success + end + + context 'when user does not have permissions' do + let(:user) { nil } + + it 'returns an error' do + expect { execute }.not_to change { RemoteMirror.count } + + is_expected.to be_error + expect(execute.message).to eq('Access Denied') + end + end + + context 'when remote mirror is missing' do + let(:remote_mirror) { nil } + + it 'returns an error' do + is_expected.to be_error + expect(execute.message).to eq('Remote mirror is missing') + end + end + + context 'when mirror does not match the project' do + let!(:remote_mirror) { create(:remote_mirror) } + + it 'returns an error' do + expect { execute }.not_to change { RemoteMirror.count } + + is_expected.to be_error + expect(execute.message).to eq('Project mismatch') + end + end + + context 'when destroy process fails' do + before do + allow(remote_mirror).to receive(:destroy) do + remote_mirror.errors.add(:base, 'destroy error') + end.and_return(nil) + end + + it 'returns an error' do + expect { execute }.not_to change { RemoteMirror.count } + + is_expected.to be_error + expect(execute.message.full_messages).to include('destroy error') + end + end + end +end diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb index 4a45d52d9ee..4a7ef1f7d5e 100644 --- a/spec/services/work_items/parent_links/create_service_spec.rb +++ b/spec/services/work_items/parent_links/create_service_spec.rb @@ -118,6 +118,17 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol expect(tasks_parent).to match_array([work_item]) end + context 'when relative_position is set' do + let(:params) { { issuable_references: [task1, task2], relative_position: 1337 } } + + it 'creates relationships with given relative_position' do + result = subject + + expect(result[:created_references].first.relative_position).to eq(1337) + expect(result[:created_references].second.relative_position).to eq(1337) + end + end + it 'returns success status and created links', :aggregate_failures do expect(subject.keys).to match_array([:status, :created_references]) expect(subject[:status]).to eq(:success) diff --git a/spec/views/user_settings/profiles/show.html.haml_spec.rb b/spec/views/user_settings/profiles/show.html.haml_spec.rb index 4c246c301aa..0e9059c0cc0 100644 --- a/spec/views/user_settings/profiles/show.html.haml_spec.rb +++ b/spec/views/user_settings/profiles/show.html.haml_spec.rb @@ -11,6 +11,7 @@ RSpec.describe 'user_settings/profiles/show', feature_category: :user_profile do allow(controller).to receive(:current_user).and_return(user) allow(view).to receive(:experiment_enabled?) stub_feature_flags(edit_user_profile_vue: false) + allow(view).to receive(:can?) end context 'when the profile page is opened' do @@ -50,5 +51,19 @@ RSpec.describe 'user_settings/profiles/show', feature_category: :user_profile do type: :hidden ) end + + describe 'private profile', feature_category: :user_management do + before do + allow(view).to receive(:can?).with(user, :make_profile_private, user).and_return(true) + end + + it 'renders correct CE partial' do + render + + expect(rendered).to render_template('user_settings/profiles/_private_profile') + expect(rendered).to have_link 'Learn more', + href: help_page_path('user/profile/index', anchor: 'make-your-user-profile-page-private') + end + end end end diff --git a/workhorse/go.mod b/workhorse/go.mod index 0096208f274..1646fa776b7 100644 --- a/workhorse/go.mod +++ b/workhorse/go.mod @@ -27,7 +27,7 @@ require ( gitlab.com/gitlab-org/gitaly/v16 v16.11.2 gitlab.com/gitlab-org/labkit v1.21.0 gocloud.dev v0.37.0 - golang.org/x/image v0.15.0 + golang.org/x/image v0.16.0 golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 golang.org/x/net v0.23.0 golang.org/x/oauth2 v0.20.0 @@ -121,7 +121,7 @@ require ( golang.org/x/mod v0.16.0 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.19.0 // indirect - golang.org/x/text v0.14.0 // indirect + golang.org/x/text v0.15.0 // indirect golang.org/x/time v0.5.0 // indirect golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect google.golang.org/api v0.169.0 // indirect diff --git a/workhorse/go.sum b/workhorse/go.sum index cd5d5f6287c..04b6d25441d 100644 --- a/workhorse/go.sum +++ b/workhorse/go.sum @@ -547,8 +547,8 @@ golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8= -golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= +golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw= +golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -743,8 +743,9 @@ golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/workhorse/internal/helper/command/command.go b/workhorse/internal/helper/command/command.go index 67d6838b170..7970d016170 100644 --- a/workhorse/internal/helper/command/command.go +++ b/workhorse/internal/helper/command/command.go @@ -1,3 +1,4 @@ +// Package command provides helper functions for working with commands and processes package command import ( @@ -5,16 +6,17 @@ import ( "syscall" ) +// ExitStatus returns the exit code of an error if it implements the ExitCode() method func ExitStatus(err error) (int, bool) { if v, ok := err.(interface{ ExitCode() int }); ok { return v.ExitCode(), true } else if err != nil { return -1, false - } else { - return 0, false } + return 0, false } +// KillProcessGroup sends a SIGTERM signal to the process group of the given command func KillProcessGroup(cmd *exec.Cmd) error { if cmd == nil { return nil @@ -22,7 +24,7 @@ func KillProcessGroup(cmd *exec.Cmd) error { if p := cmd.Process; p != nil && p.Pid > 0 { // Send SIGTERM to the process group of cmd - syscall.Kill(-p.Pid, syscall.SIGTERM) + _ = syscall.Kill(-p.Pid, syscall.SIGTERM) } // reap our child process diff --git a/workhorse/internal/helper/countingresponsewriter.go b/workhorse/internal/helper/countingresponsewriter.go index 9bcecf2d9b1..7cdb0c87b6c 100644 --- a/workhorse/internal/helper/countingresponsewriter.go +++ b/workhorse/internal/helper/countingresponsewriter.go @@ -1,9 +1,11 @@ +// Package helper provides various helper functions and types for HTTP handling package helper import ( "net/http" ) +// CountingResponseWriter is an interface that wraps http.ResponseWriter type CountingResponseWriter interface { http.ResponseWriter Count() int64 @@ -16,6 +18,7 @@ type countingResponseWriter struct { count int64 } +// NewCountingResponseWriter creates a new CountingResponseWriter func NewCountingResponseWriter(rw http.ResponseWriter) CountingResponseWriter { return &countingResponseWriter{rw: rw} } diff --git a/workhorse/internal/helper/countingresponsewriter_test.go b/workhorse/internal/helper/countingresponsewriter_test.go index d070b215b90..fbe8aa112cd 100644 --- a/workhorse/internal/helper/countingresponsewriter_test.go +++ b/workhorse/internal/helper/countingresponsewriter_test.go @@ -52,8 +52,9 @@ func TestCountingResponseWriterWrite(t *testing.T) { func TestCountingResponseWriterFlushable(t *testing.T) { rw := httptest.NewRecorder() + crw := countingResponseWriter{rw: rw} - rc := http.NewResponseController(&crw) + rc := http.NewResponseController(&crw) //nolint:bodyclose // false-positive https://github.com/timakin/bodyclose/issues/52 err := rc.Flush() require.NoError(t, err, "the underlying response writer is not flushable") diff --git a/workhorse/internal/helper/exception/exception.go b/workhorse/internal/helper/exception/exception.go index 9b1628ffecb..ba7ef05e7e4 100644 --- a/workhorse/internal/helper/exception/exception.go +++ b/workhorse/internal/helper/exception/exception.go @@ -1,3 +1,4 @@ +// Package exception provides utility functions for handling exceptions package exception import ( @@ -17,6 +18,7 @@ var ravenHeaderBlacklist = []string{ "Private-Token", } +// Track captures and reports an exception func Track(r *http.Request, err error, fields log.Fields) { client := raven.DefaultClient extra := raven.Extra{} @@ -45,6 +47,7 @@ func Track(r *http.Request, err error, fields log.Fields) { client.Capture(packet, nil) } +// CleanHeaders redacts sensitive headers in the request func CleanHeaders(r *http.Request) { if r == nil { return diff --git a/workhorse/internal/helper/fail/fail.go b/workhorse/internal/helper/fail/fail.go index 32c2940a0cc..8b6c828bba6 100644 --- a/workhorse/internal/helper/fail/fail.go +++ b/workhorse/internal/helper/fail/fail.go @@ -1,3 +1,4 @@ +// Package fail provides functionality for handling failure responses in HTTP requests package fail import ( @@ -12,6 +13,7 @@ type failure struct { fields log.Fields } +// Option represents a function that modifies a failure object type Option func(*failure) // WithStatus sets the HTTP status and body text of the failure response. diff --git a/workhorse/internal/helper/helpers.go b/workhorse/internal/helper/helpers.go index a4a91901ea9..29a6fe04b2e 100644 --- a/workhorse/internal/helper/helpers.go +++ b/workhorse/internal/helper/helpers.go @@ -1,3 +1,4 @@ +// Package helper provides utility functions for various tasks package helper import ( @@ -5,19 +6,21 @@ import ( "mime" "net/url" "os" + "path/filepath" "gitlab.com/gitlab-org/gitlab/workhorse/internal/log" ) +// OpenFile opens a file at the specified path and returns its properties func OpenFile(path string) (file *os.File, fi os.FileInfo, err error) { - file, err = os.Open(path) + file, err = os.Open(filepath.Clean(path)) if err != nil { return } defer func() { if err != nil { - file.Close() + _ = file.Close() } }() @@ -39,6 +42,7 @@ func OpenFile(path string) (file *os.File, fi os.FileInfo, err error) { return } +// URLMustParse parses the given string as a URL func URLMustParse(s string) *url.URL { u, err := url.Parse(s) if err != nil { @@ -47,6 +51,7 @@ func URLMustParse(s string) *url.URL { return u } +// IsContentType checks if the actual content type matches the expected content type func IsContentType(expected, actual string) bool { parsed, _, err := mime.ParseMediaType(actual) return err == nil && parsed == expected diff --git a/workhorse/internal/helper/nginx/nginx.go b/workhorse/internal/helper/nginx/nginx.go index ca7c8543f75..101b1f9e9b4 100644 --- a/workhorse/internal/helper/nginx/nginx.go +++ b/workhorse/internal/helper/nginx/nginx.go @@ -1,13 +1,17 @@ +// Package nginx provides helper functions for interacting with NGINX package nginx import "net/http" +// ResponseBufferHeader is the HTTP header used to control response buffering in NGINX const ResponseBufferHeader = "X-Accel-Buffering" +// DisableResponseBuffering disables response buffering in NGINX for the provided HTTP response writer func DisableResponseBuffering(w http.ResponseWriter) { w.Header().Set(ResponseBufferHeader, "no") } +// AllowResponseBuffering enables response buffering in NGINX for the provided HTTP response writer func AllowResponseBuffering(w http.ResponseWriter) { w.Header().Del(ResponseBufferHeader) }