Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-02-12 21:11:54 +00:00
parent 664908c02e
commit a26438c547
93 changed files with 1688 additions and 233 deletions

View File

@ -22,22 +22,29 @@
stage: prepare
needs: []
script:
- yarn_install_script
- export GITLAB_ASSETS_HASH=$(bin/rake gitlab:assets:hash_sum)
- 'echo "CACHE_ASSETS_AS_PACKAGE: ${CACHE_ASSETS_AS_PACKAGE}"'
# The new strategy to cache assets as generic packages is experimental and can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
- |
if [[ "${CACHE_ASSETS_AS_PACKAGE}" == "true" ]]; then
source scripts/gitlab_component_helpers.sh
function compile_assets() {
yarn_install_script
export GITLAB_ASSETS_HASH=$(bin/rake gitlab:assets:hash_sum)
echo "CACHE_ASSETS_AS_PACKAGE: ${CACHE_ASSETS_AS_PACKAGE}"
if ! gitlab_assets_archive_doesnt_exist; then
# We remove all assets from the native cache as they could pollute the fresh assets from the package
rm -rf public/assets/ app/assets/javascripts/locale/**/app.js
run_timed_command "retry download_and_extract_gitlab_assets"
# The new strategy to cache assets as generic packages is experimental and
# can be disabled by removing the `CACHE_ASSETS_AS_PACKAGE` variable
if [[ "${CACHE_ASSETS_AS_PACKAGE}" == "true" ]]; then
source scripts/gitlab_component_helpers.sh
if ! gitlab_assets_archive_doesnt_exist; then
# We remove all assets from the native cache as they could pollute the fresh assets from the package
rm -rf public/assets/ app/assets/javascripts/locale/**/app.js
run_timed_command "retry download_and_extract_gitlab_assets"
fi
fi
fi
- assets_compile_script
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
assets_compile_script
echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
}
run_with_custom_exit_code compile_assets
.update-cache-base:
after_script:
@ -123,12 +130,18 @@ retrieve-frontend-fixtures:
script:
- source scripts/utils.sh
- source scripts/gitlab_component_helpers.sh
- install_gitlab_gem
- export_fixtures_sha_for_download
- |
if check_fixtures_download; then
run_timed_command "download_and_extract_fixtures"
fi
function retrieve_frontend_fixtures() {
install_gitlab_gem
export_fixtures_sha_for_download
if check_fixtures_download; then
run_timed_command "download_and_extract_fixtures"
fi
}
run_with_custom_exit_code retrieve_frontend_fixtures
artifacts:
expire_in: 30 days
paths:
@ -203,9 +216,14 @@ upload-frontend-fixtures:
script:
- source scripts/gitlab_component_helpers.sh
- export_fixtures_sha_for_upload
- 'fixtures_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }'
- run_timed_command "create_fixtures_package"
- run_timed_command "upload_fixtures_package"
- |
function upload_frontend_fixtures() {
fixtures_archive_doesnt_exist || { echoinfo "INFO: Exiting early as package exists."; exit 0; }
run_timed_command "create_fixtures_package"
run_timed_command "upload_fixtures_package"
}
run_with_custom_exit_code upload_frontend_fixtures
graphql-schema-dump:
variables:

View File

@ -4129,7 +4129,6 @@ Layout/LineLength:
- 'spec/support/shared_examples/quick_actions/issue/clone_quick_action_shared_examples.rb'
- 'spec/support/shared_examples/quick_actions/issue/move_quick_action_shared_examples.rb'
- 'spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/debian_distributions_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb'

View File

@ -2611,7 +2611,6 @@ RSpec/ContextWording:
- 'spec/support/shared_contexts/prometheus/alert_shared_context.rb'
- 'spec/support/shared_contexts/rack_attack_shared_context.rb'
- 'spec/support/shared_contexts/read_ci_configuration_shared_context.rb'
- 'spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb'
- 'spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb'
- 'spec/support/shared_contexts/requests/api/go_modules_shared_context.rb'
- 'spec/support/shared_contexts/requests/api/graphql/group_and_project_boards_query_shared_context.rb'

View File

@ -176,7 +176,6 @@ RSpec/ReturnFromStub:
- 'spec/support/shared_examples/lib/gitlab/middleware/read_only_gitlab_instance_shared_examples.rb'
- 'spec/support/shared_examples/lib/gitlab/sidekiq_middleware/strategy_shared_examples.rb'
- 'spec/support/shared_examples/models/concerns/can_move_repository_storage_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb'
- 'spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb'
- 'spec/support/shared_examples/services/boards/create_service_shared_examples.rb'
- 'spec/support/shared_examples/uploaders/object_storage_shared_examples.rb'

View File

@ -232,7 +232,6 @@ Style/FormatString:
- 'spec/support/helpers/javascript_fixtures_helpers.rb'
- 'spec/support/shared_contexts/bulk_imports_requests_shared_context.rb'
- 'spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb'
- 'spec/support/shared_examples/services/jira/requests/base_shared_examples.rb'
- 'spec/support/shared_examples/views/registration_features_prompt_shared_examples.rb'
- 'spec/validators/any_field_validator_spec.rb'

View File

@ -2405,7 +2405,6 @@ Style/InlineDisableAnnotation:
- 'spec/support/shared_examples/models/member_shared_examples.rb'
- 'spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb'
- 'spec/support/shared_examples/requests/api/award_emoji_todo_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb'
- 'spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb'

View File

@ -1 +1 @@
9673bce13f9be045d82ba58ebbb53daf0d5fa804
af8ae81032144f6b873ec403ad397a195d08c3ae

View File

@ -43,7 +43,7 @@ export default {
},
mixins: [glFeatureFlagsMixin()],
lastUsedHelpLink: helpPagePath('/user/profile/personal_access_tokens.md', {
anchor: 'view-the-time-at-and-ips-where-a-token-was-last-used',
anchor: 'view-token-usage-information',
}),
i18n: {
button: {

View File

@ -27,7 +27,7 @@ export default {
GlTooltip: GlTooltipDirective,
},
lastUsedHelpLink: helpPagePath('/user/profile/personal_access_tokens.md', {
anchor: 'view-the-time-at-and-ips-where-a-token-was-last-used',
anchor: 'view-token-usage-information',
}),
i18n: {
emptyField: __('Never'),

View File

@ -1,6 +1,4 @@
import ClipboardJS from 'clipboard';
import $ from 'jquery';
import Clipboard from 'clipboard';
import { parseBoolean } from '~/lib/utils/common_utils';
import { __ } from '~/locale';
import { fixTitle, add, show, hide, once } from '~/tooltips';
@ -55,41 +53,51 @@ function genericError(e) {
}
}
/**
* This a workaround around Clipboard limitations to allow the context-specific copy/pasting
* of plain text or GFM. The Ruby `clipboard_button` helper sneaks a JSON hash with `text` and
* `gfm` keys into the `data-clipboard-text` attribute that Clipboard reads from.
* When Clipboard creates a new `textarea` (directly inside `body`, with a `readonly`
* attribute`), sets its value to the value of this data attribute, focusses on it, and finally
* programmatically issues the 'Copy' command, this code intercepts the copy command/event at
* the last minute to deconstruct this JSON hash and set the `text/plain` and `text/x-gfm` copy
* data types to the intended values.
*/
const handleCopyEvent = (e) => {
const { target } = e;
if (target !== document.querySelector('body > textarea[readonly]')) {
return;
}
const { clipboardData } = e;
if (!clipboardData) return;
const text = target.value;
let json;
try {
json = JSON.parse(text);
} catch {
return;
}
if (!json.text || !json.gfm) return;
e.preventDefault();
clipboardData.setData('text/plain', json.text);
clipboardData.setData('text/x-gfm', json.gfm);
};
export default function initCopyToClipboard() {
const clipboard = new ClipboardJS('[data-clipboard-target], [data-clipboard-text]');
const clipboard = new Clipboard('[data-clipboard-target], [data-clipboard-text]');
clipboard.on('success', genericSuccess);
clipboard.on('error', genericError);
/**
* This a workaround around ClipboardJS limitations to allow the context-specific copy/pasting
* of plain text or GFM. The Ruby `clipboard_button` helper sneaks a JSON hash with `text` and
* `gfm` keys into the `data-clipboard-text` attribute that ClipboardJS reads from.
* When ClipboardJS creates a new `textarea` (directly inside `body`, with a `readonly`
* attribute`), sets its value to the value of this data attribute, focusses on it, and finally
* programmatically issues the 'Copy' command, this code intercepts the copy command/event at
* the last minute to deconstruct this JSON hash and set the `text/plain` and `text/x-gfm` copy
* data types to the intended values.
*/
$(document).on('copy', 'body > textarea[readonly]', (e) => {
const { clipboardData } = e.originalEvent;
if (!clipboardData) return;
const text = e.target.value;
let json;
try {
json = JSON.parse(text);
} catch (ex) {
return;
}
if (!json.text || !json.gfm) return;
e.preventDefault();
clipboardData.setData('text/plain', json.text);
clipboardData.setData('text/x-gfm', json.gfm);
});
document.addEventListener('copy', handleCopyEvent);
return clipboard;
}

View File

@ -39,7 +39,8 @@ export default {
},
},
i18n: {
title: s__('Environments|Stop environment'),
stopTitle: s__('Environments|Stop environment'),
stoppingTitle: s__('Environments|Stopping environment'),
},
data() {
return {
@ -47,6 +48,14 @@ export default {
isEnvironmentStopping: false,
};
},
computed: {
isLoadingState() {
return this.environment.state === 'stopping' || this.isEnvironmentStopping || this.isLoading;
},
title() {
return this.isLoadingState ? this.$options.i18n.stoppingTitle : this.$options.i18n.stopTitle;
},
},
mounted() {
eventHub.$on('stopEnvironment', this.onStopEnvironment);
},
@ -75,16 +84,23 @@ export default {
};
</script>
<template>
<gl-button
<div
v-gl-tooltip="{ id: $options.stopEnvironmentTooltipId }"
v-gl-modal-directive="'stop-environment-modal'"
:loading="isLoading || isEnvironmentStopping"
:title="$options.i18n.title"
:aria-label="$options.i18n.title"
size="small"
icon="stop"
category="secondary"
variant="danger"
@click="onClick"
/>
:title="title"
:tabindex="isLoadingState ? 0 : null"
class="gl-relative -gl-ml-[1px]"
>
<gl-button
v-gl-modal-directive="'stop-environment-modal'"
:loading="isLoadingState"
:aria-label="title"
:class="{ 'gl-pointer-events-none': isLoadingState }"
class="!gl-rounded-none"
size="small"
icon="stop"
category="secondary"
variant="danger"
@click="onClick"
/>
</div>
</template>

View File

@ -110,6 +110,9 @@ export default {
...action,
}));
},
isEnvironmentStopping() {
return this.environment?.state === 'stopping';
},
canStop() {
return this.environment?.canStop;
},
@ -233,7 +236,7 @@ export default {
/>
<stop-component
v-if="canStop"
v-if="canStop || isEnvironmentStopping"
:environment="environment"
data-track-action="click_button"
data-track-label="environment_stop"

View File

@ -1,15 +1,23 @@
<script>
import { debounce } from 'lodash';
import { debounce, difference } from 'lodash';
import { GlCollapsibleListbox, GlButton, GlAvatar, GlIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import { InternalEvents } from '~/tracking';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { TYPENAME_MERGE_REQUEST } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import userAutocompleteWithMRPermissionsQuery from '~/graphql_shared/queries/project_autocomplete_users_with_mr_permissions.query.graphql';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import { SEARCH_SELECT_REVIEWER_EVENT, SELECT_REVIEWER_EVENT } from '../../constants';
import UpdateReviewers from './update_reviewers.vue';
import userPermissionsQuery from './queries/user_permissions.query.graphql';
function toUsernames(reviewers) {
return reviewers.map((reviewer) => reviewer.username);
}
export default {
apollo: {
userPermissions: {
@ -31,6 +39,7 @@ export default {
UpdateReviewers,
InviteMembersTrigger,
},
mixins: [InternalEvents.mixin()],
inject: ['projectPath', 'issuableId', 'issuableIid', 'directlyInviteMembers'],
props: {
users: {
@ -54,14 +63,25 @@ export default {
search: '',
searching: false,
fetchedUsers: [],
currentSelectedReviewers: this.selectedReviewers.map((r) => r.username),
currentSelectedReviewers: toUsernames(this.selectedReviewers),
userPermissions: {},
};
},
computed: {
usersForList() {
let users;
if (this.fetchedUsers.length) {
users = this.fetchedUsers;
} else {
users = this.users;
}
return users;
},
mappedUsers() {
const items = [];
let users;
const users = this.usersForList;
if (this.selectedReviewersToShow.length && !this.search) {
items.push({
@ -70,12 +90,6 @@ export default {
});
}
if (this.fetchedUsers.length) {
users = this.fetchedUsers;
} else {
users = this.users;
}
items.push({
textSrOnly: true,
text: __('Users'),
@ -96,7 +110,7 @@ export default {
},
watch: {
selectedReviewers(newVal) {
this.currentSelectedReviewers = newVal.map((r) => r.username);
this.currentSelectedReviewers = toUsernames(newVal);
},
},
created() {
@ -142,6 +156,29 @@ export default {
removeAllReviewers() {
this.currentSelectedReviewers = [];
},
trackReviewersSelectEvent() {
const telemetryEvent = this.search ? SEARCH_SELECT_REVIEWER_EVENT : SELECT_REVIEWER_EVENT;
const previousUsernames = toUsernames(this.selectedReviewers);
const listUsernames = toUsernames(this.usersForList);
// Reviewers are always shown first if they are in the list,
// so we should exclude them for when we check the position
const selectableList = difference(listUsernames, previousUsernames);
const additions = difference(this.currentSelectedReviewers, previousUsernames);
additions.forEach((added) => {
// Convert from 0- to 1-index
const listPosition = selectableList.findIndex((user) => user === added) + 1;
this.trackEvent(telemetryEvent, {
value: listPosition,
selectable_reviewers_count: selectableList.length,
});
});
},
processReviewers(updateReviewers) {
this.trackReviewersSelectEvent();
updateReviewers();
},
},
i18n: {
selectReviewer: __('Select reviewer'),
@ -170,7 +207,7 @@ export default {
:searching="searching"
@search="debouncedFetchAutocompleteUsers"
@shown="shownDropdown"
@hidden="updateReviewers"
@hidden="processReviewers(updateReviewers)"
@reset="removeAllReviewers"
>
<template #toggle>

View File

@ -1 +1,4 @@
export const MAX_LIST_SIZE = 10;
export const SELECT_REVIEWER_EVENT = 'user_selects_reviewer_from_mr_sidebar';
export const SEARCH_SELECT_REVIEWER_EVENT = 'user_selects_reviewer_from_mr_sidebar_after_search';

View File

@ -112,7 +112,7 @@ export const accessLevelsConfig = {
},
};
export const SQUASH_SETTING_DO_NOT_ALLOW = 'do_not_allow';
export const SQUASH_SETTING_ALLOW = 'allow';
export const SQUASH_SETTING_ENCOURAGE = 'encourage';
export const SQUASH_SETTING_REQUIRE = 'require';
export const SQUASH_SETTING_DO_NOT_ALLOW = 'NEVER';
export const SQUASH_SETTING_ALLOW = 'ALLOWED';
export const SQUASH_SETTING_ENCOURAGE = 'ENCOURAGED';
export const SQUASH_SETTING_REQUIRE = 'ALWAYS';

View File

@ -31,9 +31,11 @@ import {
CHANGED_REQUIRE_CODEOWNER_APPROVAL,
} from 'ee_else_ce/projects/settings/branch_rules/tracking/constants';
import deleteBranchRuleMutation from '../../mutations/branch_rule_delete.mutation.graphql';
import editSquashOptionMutation from '../../mutations/edit_squash_option.mutation.graphql';
import BranchRuleModal from '../../../components/branch_rule_modal.vue';
import Protection from './protection.vue';
import AccessLevelsDrawer from './access_levels_drawer.vue';
import SquashSettingsDrawer from './squash_settings_drawer.vue';
import ProtectionToggle from './protection_toggle.vue';
import {
I18N,
@ -70,12 +72,14 @@ export default {
GlButton,
BranchRuleModal,
AccessLevelsDrawer,
SquashSettingsDrawer,
PageHeading,
CrudComponent,
SettingsSection,
},
mixins: [glFeatureFlagsMixin()],
inject: {
allowEditSquashSetting: { default: false },
projectPath: {
default: '',
},
@ -154,10 +158,11 @@ export default {
matchingBranchesCount: null,
isAllowedToMergeDrawerOpen: false,
isAllowedToPushAndMergeDrawerOpen: false,
isSquashSettingsDrawerOpen: false,
isRuleUpdating: false,
isAllowForcePushLoading: false,
isCodeOwnersLoading: false,
squashOption: null,
squashOption: {},
};
},
computed: {
@ -248,6 +253,11 @@ export default {
showSquashSetting() {
return this.glFeatures.branchRuleSquashSettings && !this.branch?.includes('*'); // Squash settings are not available for wildcards
},
showEditSquashSetting() {
return (
this.canAdminProtectedBranches && this.allowEditSquashSetting && !this.isAllBranchesRule
);
},
},
methods: {
...mapActions(['setRulesFilter', 'fetchRules']),
@ -343,6 +353,29 @@ export default {
});
}
},
onEditSquashSettings(selectedOption) {
this.isRuleUpdating = true;
const branchRuleId = this.branchRule.id;
this.$apollo
.mutate({
mutation: editSquashOptionMutation,
variables: { input: { branchRuleId, squashOption: selectedOption } },
})
.then(({ data: { branchRuleSquashOptionUpdate } }) => {
if (branchRuleSquashOptionUpdate?.errors.length) {
createAlert({ message: this.$options.i18n.updateBranchRuleError });
return;
}
this.$apollo.queries.project.refetch();
})
.catch(() => createAlert({ message: this.$options.i18n.updateBranchRuleError }))
.finally(() => {
this.isSquashSettingsDrawerOpen = false;
this.isRuleUpdating = false;
});
},
editBranchRule({
name = this.branchRule.name,
branchProtection = null,
@ -570,10 +603,11 @@ export default {
v-if="showSquashSetting"
:header="$options.i18n.squashSettingHeader"
:empty-state-copy="$options.i18n.squashSettingEmptyState"
:is-edit-available="false"
:is-edit-available="showEditSquashSetting"
:icon="null"
class="gl-mt-5"
data-testid="squash-setting-content"
@edit="isSquashSettingsDrawerOpen = true"
>
<template #description>
<gl-sprintf :message="$options.i18n.squashSettingHelpText">
@ -593,6 +627,14 @@ export default {
</protection>
</settings-section>
<squash-settings-drawer
:is-open="isSquashSettingsDrawerOpen"
:is-loading="isRuleUpdating"
:selected-option="squashOption.option"
@submit="onEditSquashSettings"
@close="isSquashSettingsDrawerOpen = false"
/>
<!-- Status checks -->
<settings-section
v-if="showStatusChecksSection"

View File

@ -3,6 +3,7 @@ import { GlDrawer, GlButton, GlFormRadioGroup, GlFormRadio } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
import { findSelectedOptionValueByLabel } from './utils';
import {
SQUASH_SETTING_DO_NOT_ALLOW,
SQUASH_SETTING_ALLOW,
@ -59,15 +60,18 @@ export default {
},
data() {
return {
selected: this.selectedOption,
selected: findSelectedOptionValueByLabel(this.$options.OPTIONS, this.selectedOption),
};
},
computed: {
getDrawerHeaderHeight() {
return getContentWrapperHeight();
},
selectedOptionValue() {
return findSelectedOptionValueByLabel(this.$options.OPTIONS, this.selectedOption);
},
hasChanged() {
return this.selected !== this.selectedOption;
return this.selected !== this.selectedOptionValue;
},
},
methods: {

View File

@ -0,0 +1,4 @@
export const findSelectedOptionValueByLabel = (options, label) => {
const option = options.find((opt) => opt.label === label);
return option?.value || options[0]?.value;
};

View File

@ -4,7 +4,7 @@ import createDefaultClient from '~/lib/graphql';
import { parseBoolean } from '~/lib/utils/common_utils';
import View from 'ee_else_ce/projects/settings/branch_rules/components/view/index.vue';
export default function mountBranchRules(el, store) {
export default function mountBranchRules(el, store, allowEditSquashSetting = false) {
if (!el) {
return null;
}
@ -47,6 +47,7 @@ export default function mountBranchRules(el, store) {
showCodeOwners: parseBoolean(showCodeOwners),
showEnterpriseAccessLevels: parseBoolean(showEnterpriseAccessLevels),
canAdminProtectedBranches: parseBoolean(canAdminProtectedBranches),
allowEditSquashSetting,
},
render(h) {
return h(View);

View File

@ -0,0 +1,9 @@
mutation updateBranchRuleSquashOption($input: BranchRuleSquashOptionUpdateInput!) {
branchRuleSquashOptionUpdate(input: $input) {
errors
squashOption {
option
helpText
}
}
}

View File

@ -1,6 +1,6 @@
<script>
/**
* Falls back to the code used in `copy_to_clipboard.js`
* Falls back to the code used in `javascripts/behaviors/copy_to_clipboard.js`
*
* Renders a button with a clipboard icon that copies the content of `data-clipboard-text`
* when clicked.

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
module Mutations
module Projects
module BranchRules
module SquashOptions
class Update < BaseMutation
graphql_name 'BranchRuleSquashOptionUpdate'
description 'Update a squash option for a branch rule'
authorize :update_branch_rule
argument :branch_rule_id, ::Types::GlobalIDType[::Projects::BranchRule],
required: true,
description: 'Global ID of the branch rule.'
argument :squash_option, ::Types::Projects::BranchRules::SquashOptionSettingEnum,
required: true,
description: 'Squash option after mutation.'
field :squash_option,
type: ::Types::Projects::BranchRules::SquashOptionType,
null: true,
description: 'Updated squash option after mutation.'
def resolve(branch_rule_id:, squash_option:)
branch_rule = authorized_find!(id: branch_rule_id)
if feature_disabled?(branch_rule.project)
raise_resource_not_available_error! 'Squash options feature disabled'
end
service_response = ::Projects::BranchRules::SquashOptions::UpdateService.new(
branch_rule,
squash_option: squash_option,
current_user: current_user
).execute
{
squash_option: (service_response.payload if service_response.success?),
errors: service_response.errors
}
end
private
def feature_disabled?(project)
Feature.disabled?(:branch_rule_squash_settings, project)
end
end
end
end
end
end
Mutations::Projects::BranchRules::SquashOptions::Update.prepend_mod

View File

@ -128,6 +128,7 @@ module Types
mount_mutation Mutations::Projects::SyncFork, calls_gitaly: true, experiment: { milestone: '15.9' }
mount_mutation Mutations::Projects::TextReplace, calls_gitaly: true, experiment: { milestone: '17.1' }
mount_mutation Mutations::Projects::Star, experiment: { milestone: '16.7' }
mount_mutation Mutations::Projects::BranchRules::SquashOptions::Update, experiment: { milestone: '17.9' }
mount_mutation Mutations::BranchRules::Update, experiment: { milestone: '16.7' }
mount_mutation Mutations::BranchRules::Create, experiment: { milestone: '16.7' }
mount_mutation Mutations::Releases::Create

View File

@ -9,7 +9,7 @@ module Types
abilities :read_work_item, :update_work_item, :delete_work_item,
:admin_work_item, :admin_parent_link, :set_work_item_metadata,
:create_note, :admin_work_item_link, :mark_note_as_internal,
:report_spam, :move_work_item, :clone_work_item
:report_spam, :move_work_item, :clone_work_item, :summarize_comments
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Types
module Projects
module BranchRules
class SquashOptionSettingEnum < ::Types::BaseEnum
graphql_name 'SquashOptionSetting'
description 'Options for default squash behaviour for merge requests'
value 'NEVER', description: 'Do not allow.', value: 0
value 'ALLOWED', description: 'Allow.', value: 3
value 'ENCOURAGED', description: 'Encourage.', value: 2
value 'ALWAYS', description: 'Require.', value: 1
end
end
end
end

View File

@ -21,7 +21,8 @@ module HasUserType
llm_bot: 14,
placeholder: 15,
duo_code_review_bot: 16,
import_user: 17
import_user: 17,
ci_pipeline_bot: 18
}.with_indifferent_access.freeze
BOT_USER_TYPES = %w[
@ -38,6 +39,7 @@ module HasUserType
service_account
llm_bot
duo_code_review_bot
ci_pipeline_bot
].freeze
# `service_account` allows instance/namespaces to configure a user for external integrations/automations

View File

@ -10,10 +10,10 @@ module Projects
def human_squash_option
case squash_option
when 'never' then 'Do not allow'
when 'always' then 'Require'
when 'default_on' then 'Encourage'
when 'default_off' then 'Allow'
when 'never' then s_('SquashSettings|Do not allow')
when 'always' then s_('SquashSettings|Require')
when 'default_on' then s_('SquashSettings|Encourage')
when 'default_off' then s_('SquashSettings|Allow')
end
end

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module Projects
module BranchRules
module SquashOptions
class UpdateService
AUTHORIZATION_ERROR_MESSAGE = 'Not authorized'
NOT_SUPPORTED_ERROR_MESSAGE = 'Updating BranchRule not supported'
def initialize(branch_rule, squash_option:, current_user:)
@branch_rule = branch_rule
@squash_option = squash_option
@current_user = current_user
end
def execute
return ServiceResponse.error(message: AUTHORIZATION_ERROR_MESSAGE) unless authorized?
if branch_rule.is_a?(::Projects::AllBranchesRule)
execute_on_all_branches_rule
else
execute_on_branch_rule
end
end
private
attr_reader :branch_rule, :squash_option, :current_user
def execute_on_all_branches_rule
result = Projects::UpdateService.new(project, current_user,
project_setting_attributes: { squash_option: squash_option }).execute
return ServiceResponse.error(message: result[:message]) unless result[:status] == :success
success_response
end
def execute_on_branch_rule
ServiceResponse.error(message: NOT_SUPPORTED_ERROR_MESSAGE)
end
def success_response
ServiceResponse.success(payload: branch_rule.squash_option)
end
def project
branch_rule.project
end
def protected_branch
branch_rule.protected_branch
end
def authorized?
Ability.allowed?(current_user, :update_branch_rule, branch_rule)
end
end
end
end
end
::Projects::BranchRules::SquashOptions::UpdateService.prepend_mod

View File

@ -0,0 +1,21 @@
---
description: User selects a reviewer from the merge request reviewers sidebar
internal_events: true
action: user_selects_reviewer_from_mr_sidebar
identifiers:
- project
- namespace
- user
additional_properties:
value:
description: List position of the selected user
selectable_reviewers_count:
description: The total number of selectable reviewers in the list this reviewer was selected from
product_group: code_review
product_categories:
- code_review_workflow
milestone: '17.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180307
tiers:
- premium
- ultimate

View File

@ -0,0 +1,21 @@
---
description: User selects a reviewer from the merge request reviewers sidebar after they have entered a search query and the list has updated
internal_events: true
action: user_selects_reviewer_from_mr_sidebar_after_search
identifiers:
- project
- namespace
- user
additional_properties:
value:
description: List position of the selected user
selectable_reviewers_count:
description: The total number of selectable reviewers in the list this reviewer was selected from
product_group: code_review
product_categories:
- code_review_workflow
milestone: '17.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180307
tiers:
- premium
- ultimate

View File

@ -0,0 +1,22 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_user_selects_reviewer_from_mr_sidebar
description: Count of unique users who selected a reviewer from the MR reviewer sidebar
product_group: code_review
product_categories:
- code_review_workflow
performance_indicator_type: []
value_type: number
status: active
milestone: '17.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180307
time_frame:
- 28d
- 7d
data_source: internal_events
data_category: optional
tiers:
- premium
- ultimate
events:
- name: user_selects_reviewer_from_mr_sidebar
unique: user.id

View File

@ -0,0 +1,22 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_user_selects_reviewer_from_mr_sidebar_after_search
description: Count of unique users who selected a reviewer from the MR reviewer sidebar after filtering the reviewer list with a custom search
product_group: code_review
product_categories:
- code_review_workflow
performance_indicator_type: []
value_type: number
status: active
milestone: '17.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180307
time_frame:
- 28d
- 7d
data_source: internal_events
data_category: optional
tiers:
- premium
- ultimate
events:
- name: user_selects_reviewer_from_mr_sidebar_after_search
unique: user.id

View File

@ -271,6 +271,8 @@
- 1
- - compliance_management_standards_soc2_at_least_one_non_author_approval_group
- 1
- - compliance_management_timeout_pending_external_controls
- 1
- - compliance_management_update_default_framework
- 1
- - compliance_management_violation_export_mailer

View File

@ -0,0 +1,12 @@
---
table_name: merge_requests_approval_rules_groups
classes:
- MergeRequests::ApprovalRulesGroup
feature_categories:
- code_review_workflow
description: Stores relationship between approval rules v2 and groups
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179861
milestone: '17.9'
gitlab_schema: gitlab_main_cell
sharding_key:
group_id: namespaces

View File

@ -0,0 +1,12 @@
---
table_name: merge_requests_approval_rules_merge_requests
classes:
- MergeRequests::ApprovalRulesMergeRequest
feature_categories:
- code_review_workflow
description: Stores relationship between approval rules v2 and merge requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179861
milestone: '17.9'
gitlab_schema: gitlab_main_cell
sharding_key:
project_id: projects

View File

@ -0,0 +1,12 @@
---
table_name: merge_requests_approval_rules_projects
classes:
- MergeRequests::ApprovalRulesProject
feature_categories:
- code_review_workflow
description: Stores relationship between approval rules v2 and projects
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179861
milestone: '17.9'
gitlab_schema: gitlab_main_cell
sharding_key:
project_id: projects

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class CreateMergeRequestsApprovalRulesGroups < Gitlab::Database::Migration[2.2]
milestone '17.9'
def change
create_table :merge_requests_approval_rules_groups do |t| # -- Migration/EnsureFactoryForTable false positive
t.bigint :approval_rule_id, null: false
t.bigint :group_id, null: false
t.index :group_id
t.timestamps_with_timezone null: false
end
add_index(
:merge_requests_approval_rules_groups,
%i[approval_rule_id group_id],
unique: true,
name: 'index_mrs_ars_groups_on_ar_id_and_group_id'
)
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class CreateMergeRequestsApprovalRulesMergeRequests < Gitlab::Database::Migration[2.2]
milestone '17.9'
def change
create_table :merge_requests_approval_rules_merge_requests do |t| # Migration/EnsureFactoryForTable false positive
t.bigint :approval_rule_id, null: false
t.bigint :merge_request_id, null: false
t.bigint :project_id, null: false
t.index :merge_request_id, name: 'index_mrs_approval_rules_mrs_on_mr_id'
t.index :project_id, name: 'index_mrs_approval_rules_mrs_on_project_id'
t.timestamps_with_timezone null: false
end
add_index(
:merge_requests_approval_rules_merge_requests,
%i[approval_rule_id merge_request_id],
unique: true,
name: 'index_mrs_ars_mrs_on_ar_id_and_mr_id'
)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class CreateMergeRequestsApprovalRulesProjects < Gitlab::Database::Migration[2.2]
milestone '17.9'
def change
create_table :merge_requests_approval_rules_projects do |t| # -- Migration/EnsureFactoryForTable false positive
t.bigint :approval_rule_id, null: false
t.bigint :project_id, null: false
t.index :project_id, name: 'index_mrs_approval_rules_projects_on_project_id'
t.timestamps_with_timezone null: false
end
add_index(
:merge_requests_approval_rules_projects,
%i[approval_rule_id project_id],
unique: true,
name: 'index_mrs_ars_projects_on_ar_id_and_project_id'
)
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesGroupsApprovalRuleFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_groups, :merge_requests_approval_rules,
column: :approval_rule_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_groups, column: :approval_rule_id
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesGroupsGroupFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_groups, :namespaces, column: :group_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_groups, column: :group_id
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesProjectsApprovalRuleFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_projects, :merge_requests_approval_rules,
column: :approval_rule_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_projects, column: :approval_rule_id
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesProjectsProjectFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_projects, :projects, column: :project_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_projects, column: :project_id
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesMrsApprovalRuleFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_merge_requests, :merge_requests_approval_rules,
column: :approval_rule_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_merge_requests, column: :approval_rule_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesMrsMrFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_merge_requests, :merge_requests,
column: :merge_request_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_merge_requests, column: :merge_request_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddMergeRequestsApprovalRulesMrsProjectFk < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :merge_requests_approval_rules_merge_requests, :projects,
column: :project_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :merge_requests_approval_rules_merge_requests, column: :project_id
end
end
end

View File

@ -0,0 +1 @@
620def37e032e7fa463c4f1ecff0aacde33a5b9bd7a56c2f6ead418b3f4ce828

View File

@ -0,0 +1 @@
e5a26f1a31603cf3ac570824874afbeac3bb970ca079bdaaebbf9cbe13155230

View File

@ -0,0 +1 @@
d6fc925f97583a4509894202b2e529ae64138f2cceef76e0d0c4dcaafe70f45a

View File

@ -0,0 +1 @@
56101662487483f6866c2c1f2cd0ee66d1fa12d08c0da12bd550d389e37f6bf7

View File

@ -0,0 +1 @@
4b740e775e43ed2143b7060b5e5e1730aeb7309cc83e4d76a5316cc84e80893b

View File

@ -0,0 +1 @@
31f9cc1659cd680b16a742eddeb361c4eb059388b1af0136d0d5a648a6fb8cca

View File

@ -0,0 +1 @@
08d74afd22511618546821c8cc53473ec3a9f6ec4be3c80a20086e1a2e71da37

View File

@ -0,0 +1 @@
1611b7751ef4fa7476f9ffeab5263f9e0e85f89ab21e3028b2d5245dd38f31db

View File

@ -0,0 +1 @@
cd304e944ce13f05dc95d6c74a604bca579a04b08d9abf0fd143ae03c00f1390

View File

@ -0,0 +1 @@
69fd6b83d6c739006134ada0d7019b9c04ea5d6ae34b3dc4906546a2c3070110

View File

@ -16347,6 +16347,23 @@ CREATE TABLE merge_requests_approval_rules (
CONSTRAINT check_c7c36145b7 CHECK ((char_length(name) <= 255))
);
CREATE TABLE merge_requests_approval_rules_groups (
id bigint NOT NULL,
approval_rule_id bigint NOT NULL,
group_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE merge_requests_approval_rules_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE merge_requests_approval_rules_groups_id_seq OWNED BY merge_requests_approval_rules_groups.id;
CREATE SEQUENCE merge_requests_approval_rules_id_seq
START WITH 1
INCREMENT BY 1
@ -16356,6 +16373,41 @@ CREATE SEQUENCE merge_requests_approval_rules_id_seq
ALTER SEQUENCE merge_requests_approval_rules_id_seq OWNED BY merge_requests_approval_rules.id;
CREATE TABLE merge_requests_approval_rules_merge_requests (
id bigint NOT NULL,
approval_rule_id bigint NOT NULL,
merge_request_id bigint NOT NULL,
project_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE merge_requests_approval_rules_merge_requests_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE merge_requests_approval_rules_merge_requests_id_seq OWNED BY merge_requests_approval_rules_merge_requests.id;
CREATE TABLE merge_requests_approval_rules_projects (
id bigint NOT NULL,
approval_rule_id bigint NOT NULL,
project_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE merge_requests_approval_rules_projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE merge_requests_approval_rules_projects_id_seq OWNED BY merge_requests_approval_rules_projects.id;
CREATE TABLE merge_requests_closing_issues (
id bigint NOT NULL,
merge_request_id bigint NOT NULL,
@ -25606,6 +25658,12 @@ ALTER TABLE ONLY merge_requests ALTER COLUMN id SET DEFAULT nextval('merge_reque
ALTER TABLE ONLY merge_requests_approval_rules ALTER COLUMN id SET DEFAULT nextval('merge_requests_approval_rules_id_seq'::regclass);
ALTER TABLE ONLY merge_requests_approval_rules_groups ALTER COLUMN id SET DEFAULT nextval('merge_requests_approval_rules_groups_id_seq'::regclass);
ALTER TABLE ONLY merge_requests_approval_rules_merge_requests ALTER COLUMN id SET DEFAULT nextval('merge_requests_approval_rules_merge_requests_id_seq'::regclass);
ALTER TABLE ONLY merge_requests_approval_rules_projects ALTER COLUMN id SET DEFAULT nextval('merge_requests_approval_rules_projects_id_seq'::regclass);
ALTER TABLE ONLY merge_requests_closing_issues ALTER COLUMN id SET DEFAULT nextval('merge_requests_closing_issues_id_seq'::regclass);
ALTER TABLE ONLY merge_requests_compliance_violations ALTER COLUMN id SET DEFAULT nextval('merge_requests_compliance_violations_id_seq'::regclass);
@ -28167,9 +28225,18 @@ ALTER TABLE ONLY merge_request_reviewers
ALTER TABLE ONLY merge_request_user_mentions
ADD CONSTRAINT merge_request_user_mentions_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_requests_approval_rules_groups
ADD CONSTRAINT merge_requests_approval_rules_groups_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_requests_approval_rules_merge_requests
ADD CONSTRAINT merge_requests_approval_rules_merge_requests_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_requests_approval_rules
ADD CONSTRAINT merge_requests_approval_rules_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_requests_approval_rules_projects
ADD CONSTRAINT merge_requests_approval_rules_projects_pkey PRIMARY KEY (id);
ALTER TABLE ONLY merge_requests_closing_issues
ADD CONSTRAINT merge_requests_closing_issues_pkey PRIMARY KEY (id);
@ -33579,6 +33646,8 @@ CREATE INDEX index_merge_request_reviewers_on_user_id ON merge_request_reviewers
CREATE UNIQUE INDEX index_merge_request_user_mentions_on_note_id ON merge_request_user_mentions USING btree (note_id) WHERE (note_id IS NOT NULL);
CREATE INDEX index_merge_requests_approval_rules_groups_on_group_id ON merge_requests_approval_rules_groups USING btree (group_id);
CREATE INDEX index_merge_requests_approval_rules_on_group_id ON merge_requests_approval_rules USING btree (group_id);
CREATE INDEX index_merge_requests_approval_rules_on_project_id ON merge_requests_approval_rules USING btree (project_id);
@ -33757,6 +33826,18 @@ CREATE INDEX index_mr_metrics_on_target_project_id_merged_at_nulls_last ON merge
CREATE INDEX index_mr_metrics_on_target_project_id_merged_at_time_to_merge ON merge_request_metrics USING btree (target_project_id, merged_at, created_at) WHERE (merged_at > created_at);
CREATE INDEX index_mrs_approval_rules_mrs_on_mr_id ON merge_requests_approval_rules_merge_requests USING btree (merge_request_id);
CREATE INDEX index_mrs_approval_rules_mrs_on_project_id ON merge_requests_approval_rules_merge_requests USING btree (project_id);
CREATE INDEX index_mrs_approval_rules_projects_on_project_id ON merge_requests_approval_rules_projects USING btree (project_id);
CREATE UNIQUE INDEX index_mrs_ars_groups_on_ar_id_and_group_id ON merge_requests_approval_rules_groups USING btree (approval_rule_id, group_id);
CREATE UNIQUE INDEX index_mrs_ars_mrs_on_ar_id_and_mr_id ON merge_requests_approval_rules_merge_requests USING btree (approval_rule_id, merge_request_id);
CREATE UNIQUE INDEX index_mrs_ars_projects_on_ar_id_and_project_id ON merge_requests_approval_rules_projects USING btree (approval_rule_id, project_id);
CREATE INDEX index_namespace_admin_notes_on_namespace_id ON namespace_admin_notes USING btree (namespace_id);
CREATE UNIQUE INDEX index_namespace_aggregation_schedules_on_namespace_id ON namespace_aggregation_schedules USING btree (namespace_id);
@ -38628,6 +38709,9 @@ ALTER TABLE ONLY observability_traces_issues_connections
ALTER TABLE ONLY merge_request_assignment_events
ADD CONSTRAINT fk_08f7602bfd FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_groups
ADD CONSTRAINT fk_094b4086a3 FOREIGN KEY (approval_rule_id) REFERENCES merge_requests_approval_rules(id) ON DELETE CASCADE;
ALTER TABLE ONLY catalog_resource_component_last_usages
ADD CONSTRAINT fk_094c686785 FOREIGN KEY (component_project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -38787,6 +38871,9 @@ ALTER TABLE ONLY agent_project_authorizations
ALTER TABLE ONLY ai_agent_version_attachments
ADD CONSTRAINT fk_1d4253673b FOREIGN KEY (ai_vectorizable_file_id) REFERENCES ai_vectorizable_files(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_merge_requests
ADD CONSTRAINT fk_1d49645a27 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY design_management_versions
ADD CONSTRAINT fk_1dccb304f8 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -39108,6 +39195,9 @@ ALTER TABLE ONLY incident_management_timeline_events
ALTER TABLE ONLY todos
ADD CONSTRAINT fk_45054f9c45 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_projects
ADD CONSTRAINT fk_451a9dfe93 FOREIGN KEY (approval_rule_id) REFERENCES merge_requests_approval_rules(id) ON DELETE CASCADE;
ALTER TABLE ONLY security_policy_requirements
ADD CONSTRAINT fk_458f7f5ad5 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -39225,6 +39315,9 @@ ALTER TABLE ONLY approval_merge_request_rules
ALTER TABLE ONLY deploy_keys_projects
ADD CONSTRAINT fk_58a901ca7e FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_groups
ADD CONSTRAINT fk_59068f09e5 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY oauth_access_grants
ADD CONSTRAINT fk_59cdb2323c FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
@ -39258,6 +39351,9 @@ ALTER TABLE ONLY dast_scanner_profiles_builds
ALTER TABLE ONLY protected_environment_deploy_access_levels
ADD CONSTRAINT fk_5d9b05a7e9 FOREIGN KEY (protected_environment_project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_merge_requests
ADD CONSTRAINT fk_5ddc4a2f7b FOREIGN KEY (approval_rule_id) REFERENCES merge_requests_approval_rules(id) ON DELETE CASCADE;
ALTER TABLE ONLY issue_assignees
ADD CONSTRAINT fk_5e0c8d9154 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -39399,6 +39495,9 @@ ALTER TABLE ONLY index_statuses
ALTER TABLE ONLY abuse_report_notes
ADD CONSTRAINT fk_74e1990397 FOREIGN KEY (abuse_report_id) REFERENCES abuse_reports(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests_approval_rules_merge_requests
ADD CONSTRAINT fk_74e3466397 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY software_license_policies
ADD CONSTRAINT fk_74f6d8328a FOREIGN KEY (custom_software_license_id) REFERENCES custom_software_licenses(id) ON DELETE CASCADE;
@ -39846,6 +39945,9 @@ ALTER TABLE ONLY ml_experiments
ALTER TABLE ONLY merge_request_metrics
ADD CONSTRAINT fk_ae440388cc FOREIGN KEY (latest_closed_by_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY merge_requests_approval_rules_projects
ADD CONSTRAINT fk_af4078336f FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY analytics_cycle_analytics_group_stages
ADD CONSTRAINT fk_analytics_cycle_analytics_group_stages_group_value_stream_id FOREIGN KEY (group_value_stream_id) REFERENCES analytics_cycle_analytics_group_value_streams(id) ON DELETE CASCADE;

View File

@ -38965,6 +38965,7 @@ Check permissions for the current user on a work item.
| <a id="workitempermissionsreadworkitem"></a>`readWorkItem` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_work_item` on this resource. |
| <a id="workitempermissionsreportspam"></a>`reportSpam` | [`Boolean!`](#boolean) | If `true`, the user can perform `report_spam` on this resource. |
| <a id="workitempermissionssetworkitemmetadata"></a>`setWorkItemMetadata` | [`Boolean!`](#boolean) | If `true`, the user can perform `set_work_item_metadata` on this resource. |
| <a id="workitempermissionssummarizecomments"></a>`summarizeComments` | [`Boolean!`](#boolean) | If `true`, the user can perform `summarize_comments` on this resource. |
| <a id="workitempermissionsupdateworkitem"></a>`updateWorkItem` | [`Boolean!`](#boolean) | If `true`, the user can perform `update_work_item` on this resource. |
### `WorkItemRelatedBranch`
@ -43007,6 +43008,7 @@ Possible types of user.
| <a id="usertypeadmin_bot"></a>`ADMIN_BOT` | Admin bot. |
| <a id="usertypealert_bot"></a>`ALERT_BOT` | Alert bot. |
| <a id="usertypeautomation_bot"></a>`AUTOMATION_BOT` | Automation bot. |
| <a id="usertypeci_pipeline_bot"></a>`CI_PIPELINE_BOT` | Ci pipeline bot. |
| <a id="usertypeduo_code_review_bot"></a>`DUO_CODE_REVIEW_BOT` | Duo code review bot. |
| <a id="usertypeghost"></a>`GHOST` | Ghost. |
| <a id="usertypehuman"></a>`HUMAN` | Human. |

View File

@ -0,0 +1,50 @@
---
stage: AI-powered
group: AI Framework
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
title: Evaluation runner
---
Evaluation runner (`evaluation-runner`) allows GitLab employees to run evaluations on specific GitLab AI features with one click.
- You can run the evaluation on GitLab.com and GitLab-supported self-hosted models.
- To view the AI features that are currently supported, see
[Evaluation pipelines](https://gitlab.com/gitlab-org/modelops/ai-model-validation-and-research/ai-evaluation/evaluation-runner#evaluation-pipelines).
Evaluation runner spins up a new GDK instance on a remote environment, runs an evaluation, and reports the result.
For more details, view the
[`evaluation-runner` repository](https://gitlab.com/gitlab-org/modelops/ai-model-validation-and-research/ai-evaluation/evaluation-runner).
## Architecture
```mermaid
flowchart LR
subgraph EV["Evaluators"]
PL(["PromptLibrary/ELI5"])
DSIN(["Input Dataset"])
end
subgraph ER["EvaluationRunner"]
CI["CI/CD pipelines"]
subgraph GDKS["Remote GDKs"]
subgraph GDKM["GDK-master"]
bl1["Duo features on master branch"]
fi1["fixtures (Issue,MR,etc)"]
end
subgraph GDKF["GDK-feature"]
bl2["Duo features on feature branch"]
fi2["fixtures (Issue,MR,etc)"]
end
end
end
subgraph MR["MergeRequests"]
GRMR["GitLab-Rails MR"]
GRAI["AI Gateway MR"]
end
MR -- [1] trigger --- CI
CI -- [2] spins up --- GDKS
PL -- [3] get responses and evaluate --- GDKS
```

View File

@ -0,0 +1,181 @@
---
stage: Deploy
group: Environments
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# GitLab-managed Kubernetes resources
DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** GitLab.com, GitLab Self-Managed, GitLab Dedicated
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/16130) in GitLab 17.9
Use GitLab-managed Kubernetes resources to provision Kubernetes resources with environment templates. An environment template can:
- Create namespaces and service accounts automatically for new environments
- Manage access permissions through role bindings
- Configure other required Kubernetes resources
When developers deploy applications, GitLab creates the resources based on the environment template.
## Configure GitLab-managed Kubernetes resources
Prerequisites:
- You must have a configured [GitLab agent for Kubernetes](install/_index.md).
- You have [authorized the agent](ci_cd_workflow.md#authorize-the-agent) to access relevant projects or groups.
- (Optional) You have configured [agent impersonation](ci_cd_workflow.md#restrict-project-and-group-access-by-using-impersonation) to prevent privilege escalations. The default environment template assumes you have configured [`ci_job` impersonation](ci_cd_workflow.md#impersonate-the-cicd-job-that-accesses-the-cluster).
### Turn on Kubernetes resource management
To turn on resource management, modify the agent configuration file to include the required permissions:
```yaml
ci_access:
projects:
- id: <your_group/your_project>
access_as:
ci_job: {}
resource_management:
enabled: true
groups:
- id: <your_other_group>
access_as:
ci_job: {}
resource_management:
enabled: true
```
### Create environment templates
Environment templates define what Kubernetes resources are created, updated, or removed.
The [default environment template](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/blob/master/internal/module/managed_resources/server/default_template.yaml) creates a `Namespace` and configures a `RoleBinding` for the CI/CD job.
To overwrite the default template, add a template configuration file called `default.yaml` in the agent directory:
```plaintext
.gitlab/agents/<agent-name>/environment_templates/default.yaml
```
To create an environment template, add a template configuration file in the agent directory at:
```plaintext
.gitlab/agents/<agent-name>/environment_templates/<template-name>.yaml
```
You can specify which template is included in a CI/CD pipeline. For more information, see [Use templates in CI/CD pipelines](#use-managed-resources-in-cicd-pipelines).
#### Supported Kubernetes resources
The following Kubernetes resources (`kind`) are supported:
- `Namespace`
- `ServiceAccount`
- `RoleBinding`
- FluxCD Source Controller objects:
- `GitRepository`
- `HelmRepository`
- `HelmChart`
- `Bucket`
- `OCIRepository`
- FluxCD Kustomize Controller objects:
- `Kustomization`
- FluxCD Helm Controller objects:
- `HelmRelease`
- FluxCD Notification Controller objects:
- `Alert`
- `Provider`
- `Receiver`
#### Example environment template
The following example creates a namespace and grants a group administrator access to a cluster.
```yaml
objects:
- apiVersion: v1
kind: Namespace
metadata:
name: '{{ .environment.slug }}-{{ .project.id }}-{{ .agent.id }}'
- apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: bind-{{ .agent.id }}-{{ .project.id }}-{{ .environment.slug }}
namespace: {{ .project.slug }}-{{ .project.id }}-{{ .environment.slug }}
subjects:
- kind: Group
apiGroup: rbac.authorization.k8s.io
name: gitlab:project_env:{{ .project.id }}:{{ .environment.slug }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: admin
# Resource lifecycle configuration
apply_resources: on_start # Resources are applied when environment is started/restarted
delete_resources: on_stop # Resources are removed when environment is stopped
```
### Template variables
Environment templates support limited variable substitution.
The following variables are available:
| Category | Variable | Description |
|----------|----------|-------------|
| Agent | `{{ .agent.id }}` | The agent identifier. |
| Agent | `{{ .agent.name }}` | The agent name. |
| Agent | `{{ .agent.url }}` | The agent URL. |
| Environment | `{{ .environment.name }}` | The environment name. |
| Environment | `{{ .environment.slug }}` | The environment slug. |
| Environment | `{{ .environment.url }}` | The environment URL. |
| Environment | `{{ .environment.tier }}` | The environment tier. |
| Project | `{{ .project.id }}` | The project identifier. |
| Project | `{{ .project.slug }}` | The project slug. |
| Project | `{{ .project.path }}` | The project path. |
| Project | `{{ .project.url }}` | The project URL. |
| CI Pipeline | `{{ .ci_pipeline.id }}` | The pipeline identifier. |
| CI Job | `{{ .ci_job.id }}` | The CI/CD job identifier. |
| User | `{{ .user.id }}` | The user identifier. |
| User | `{{ .user.username }}` | The username. |
All variables should be referenced using the double curly brace syntax, for example: `{{ .project.id }}`.
See [`text/template`](https://pkg.go.dev/text/template) documentation for more information on the templating system used.
### Resource lifecycle management
Use the following settings to configure when Kubernetes resources should be applied or removed from an environment:
```yaml
# Apply resources when environment is started or restarted
apply_resources: on_start
# Never delete resources
delete_resources: never
# Delete resources when environment is stopped
delete_resources: on_stop
```
### Use managed resources in CI/CD pipelines
To use managed Kubernetes resources in your CI/CD pipelines, specify the agent and optionally the template name in your environment configuration:
```yaml
deploy:
environment:
name: production
kubernetes:
agent: agent-name
template: my-template # Optional, uses default template if not specified
```
## Troubleshooting
Any errors related to managed Kubernetes resources can be found on:
- The environment page in your GitLab project
- The CI/CD job logs when using the feature in pipelines

View File

@ -18,7 +18,7 @@ DETAILS:
As an administrator of Kubernetes clusters in an organization, you can grant Kubernetes access to members
of a specific project or group.
Granting access also activates the Dashboard for Kubernetes for a project or group.
Granting access also activates [the Dashboard for Kubernetes](../../../ci/environments/kubernetes_dashboard.md) for a project or group.
For self-managed instances, make sure you either:

View File

@ -170,6 +170,7 @@ Audit event types belong to the following product categories.
| [`merge_request_merged`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164846) | A merge request is merged | **{check-circle}** Yes | GitLab [17.5](https://gitlab.com/gitlab-org/gitlab/-/issues/442279) | Project |
| [`omniauth_login_failed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123080) | An OmniAuth login fails | **{check-circle}** Yes | GitLab [16.3](https://gitlab.com/gitlab-org/gitlab/-/issues/374107) | User |
| [`password_reset_requested`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114548) | A user requests a password reset using a registered email address | **{check-circle}** Yes | GitLab [15.11](https://gitlab.com/gitlab-org/gitlab/-/issues/374107) | User |
| [`pending_compliance_external_control_failed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180016) | A project's compliance external control status is updated to fail because of timeout. | **{check-circle}** Yes | GitLab [17.9](https://gitlab.com/gitlab-org/gitlab/-/issues/513421) | Project |
| [`personal_access_token_created`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108952) | A user creates a personal access token | **{check-circle}** Yes | GitLab [15.9](https://gitlab.com/gitlab-org/gitlab/-/issues/374113) | User |
| [`personal_access_token_revoked`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108952) | A personal access token is revoked | **{check-circle}** Yes | GitLab [15.9](https://gitlab.com/gitlab-org/gitlab/-/issues/374113) | User |
| [`project_archived`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117528) | A project is archived | **{check-circle}** Yes | GitLab [15.11](https://gitlab.com/gitlab-org/gitlab/-/issues/374105) | Project |

View File

@ -33,28 +33,132 @@ This workflow uses an agent to connect to your cluster. The agent:
NOTE:
The certificate-based integration was used for popular GitLab features like
GitLab Managed Apps, GitLab-managed clusters, and Auto DevOps.
Some features are currently available only when using certificate-based integration.
GitLab-managed Apps, GitLab-managed clusters, and Auto DevOps.
## Migrate cluster application deployments
## Find certificate-based clusters
### Migrate from GitLab-managed clusters
You can find all the certificate-based clusters within a GitLab instance or group, including subgroups and projects, using [a dedicated API](../../../api/cluster_discovery.md#discover-certificate-based-clusters). Querying the API with a group ID returns all the certificate-based clusters defined at or below the provided group.
With GitLab-managed clusters, GitLab creates separate service accounts and namespaces
for every branch and deploys by using these resources.
Clusters defined in parent groups are not returned in this case. This behavior helps group Owners find all the clusters they need to migrate.
The GitLab agent uses [impersonation](../../clusters/agent/ci_cd_workflow.md#restrict-project-and-group-access-by-using-impersonation)
strategies to deploy to your cluster with restricted account access. To do so:
Disabled clusters are returned as well to avoid accidentally leaving clusters behind.
1. Choose the impersonation strategy that suits your needs.
1. Use Kubernetes RBAC rules to manage impersonated account permissions in Kubernetes.
1. Use the `access_as` attribute in your agent configuration file to define the impersonation.
NOTE:
The cluster discovery API does not work for personal namespaces.
### Migrate from Auto DevOps
## Migrate generic deployments
To migrate generic deployments:
1. Install the [GitLab agent for Kubernetes](../../clusters/agent/install/_index.md).
1. Follow the CI/CD workflow to [authorize the agent to access](../../clusters/agent/ci_cd_workflow.md#authorize-the-agent) groups and projects, or to [secure access with impersonation].(../../clusters/agent/ci_cd_workflow.md#restrict-project-and-group-access-by-using-impersonation).
1. On the left sidebar, select **Operate > Kubernetes clusters**.
1. From the certificate-based clusters section, open the cluster that serves the same environment scope.
1. Select the **Details** tab and turn off the cluster.
## Migrate from GitLab-managed clusters to Kubernetes resources
With GitLab-managed clusters, GitLab creates separate service accounts and namespaces for every branch and deploys by using these resources.
Now, you can use [GitLab-managed Kubernetes resources](../../clusters/agent/managed_kubernetes_resources.md) to self-serve resources with enhanced security controls.
With GitLab-managed Kubernetes resources, you can:
- Set up environments securely without manual intervention.
- Control resource creation and access without giving developers administrative cluster permissions.
- Provide self-service capabilities for [developers](https://handbook.gitlab.com/handbook/product/personas/#sasha-software-developer) when they create a new project or environment.
- Allow developers to deploy testing and development versions in dedicated or shared namespaces.
Prerequisites:
- Install the [GitLab agent for Kubernetes](../../clusters/agent/install/_index.md).
- [Authorize the agent](../../clusters/agent/ci_cd_workflow.md#authorize-the-agent) to access relevant projects or groups.
- Check the status of the **Namespace per environment** checkbox in your certificate-based cluster integration page.
To migrate from GitLab-managed clusters to GitLab-managed Kubernetes resources:
1. Configure the agent to turn on resource management in your agent configuration file:
```yaml
ci_access:
projects:
- id: <your_group/your_project>
access_as:
ci_job: {}
resource_management:
enabled: true
groups:
- id: <your_other_group>
access_as:
ci_job: {}
resource_management:
enabled: true
```
1. Create an environment template under `.gitlab/agents/<agent-name>/environment_templates/default.yaml`. Check the status of the **Namespace per environment** checkbox in your certificate-based cluster integration page.
If **Namespace per environment** was checked, use the following template:
```yaml
objects:
- apiVersion: v1
kind: Namespace
metadata:
name: {{ .project.slug }}-{{ .project.id }}-{{ .environment.slug }}
- apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: bind-{{ .agent.id }}-{{ .project.id }}-{{ .environment.slug }}
namespace: {{ .project.slug }}-{{ .project.id }}-{{ .environment.slug }}
subjects:
- kind: Group
apiGroup: rbac.authorization.k8s.io
name: gitlab:project_env:{{ .project.id }}:{{ .environment.slug }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: admin
```
If **Namespace per environment** was unchecked, use the following template:
```yaml
objects:
- apiVersion: v1
kind: Namespace
metadata:
name: {{ .project.slug }}-{{ .project.id }}
- apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: bind-{{ .agent.id }}-{{ .project.id }}-{{ .environment.slug }}
namespace: {{ .project.slug }}-{{ .project.id }}
subjects:
- kind: Group
apiGroup: rbac.authorization.k8s.io
name: gitlab:project_env:{{ .project.id }}:{{ .environment.slug }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: admin
```
1. In your CI/CD configuration, use the agent with the `environment.kubernetes.agent: <path/to/agent/project:agent-name>` syntax.
1. On the left sidebar, select **Operate > Kubernetes clusters**.
1. From the certificate-based clusters section, open the cluster that serves the same environment scope.
1. Select the **Details** tab and turn off the cluster.
## Migrate from Auto DevOps
In your Auto DevOps project, you can use the GitLab agent to connect with your Kubernetes cluster.
1. [Install an agent](../../clusters/agent/install/_index.md) in your cluster.
Prerequisites
- Install the [GitLab agent for Kubernetes](../../clusters/agent/install/_index.md).
- [Authorize the agent](../../clusters/agent/ci_cd_workflow.md#authorize-the-agent) to access relevant projects or groups.
To migrate from Auto DevOps:
1. In GitLab, go to the project where you use Auto DevOps.
1. Add three variables. On the left sidebar, select **Settings > CI/CD** and expand **Variables**.
- Add a key called `KUBE_INGRESS_BASE_DOMAIN` with the application deployment domain as the value.
@ -92,17 +196,13 @@ In your Auto DevOps project, you can use the GitLab agent to connect with your K
For an example, [view this project](https://gitlab.com/gitlab-examples/ops/gitops-demo/hello-world-service).
### Migrate generic deployments
## Migrate from GitLab-managed applications
Follow the process for the [CI/CD workflow](../../clusters/agent/ci_cd_workflow.md).
## Migrate from GitLab Managed applications
GitLab Managed Apps (GMA) were deprecated in GitLab 14.0, and removed in GitLab 15.0.
GitLab-managed Apps (GMA) were deprecated in GitLab 14.0, and removed in GitLab 15.0.
The agent for Kubernetes does not support them. To migrate from GMA to the
agent, go through the following steps:
1. [Migrate from GitLab Managed Apps to a cluster management project](../../clusters/migrating_from_gma_to_project_template.md).
1. [Migrate from GitLab-managed Apps to a cluster management project](../../clusters/migrating_from_gma_to_project_template.md).
1. [Migrate the cluster management project to use the agent](../../clusters/management_project_template.md).
## Migrate a cluster management project
@ -111,4 +211,4 @@ See [how to use a cluster management project with the GitLab agent](../../cluste
## Migrate cluster monitoring features
Cluster monitoring features are not yet supported by the GitLab agent for Kubernetes.
Once you connect a Kubernetes cluster to GitLab using the agent for Kubernetes, you can use [the dashboard for Kubernetes](../../../ci/environments/kubernetes_dashboard.md) after enabling [user access](../../clusters/agent/user_access.md).

View File

@ -162,7 +162,7 @@ To disable the enterprise users' personal access tokens:
When you delete or block an enterprise user account, their personal access tokens are automatically revoked.
## View the time at and IPs where a token was last used
## View token usage information
> - In GitLab 16.0 and earlier, token usage information is updated every 24 hours.
> - The frequency of token usage information updates [changed](https://gitlab.com/gitlab-org/gitlab/-/issues/410168) in GitLab 16.1 from 24 hours to 10 minutes.

View File

@ -283,7 +283,7 @@ module Gitlab
end
def bot_user_can_read_project?(user, project)
(user.project_bot? || user.service_account? || user.security_policy_bot?) && can_read_project?(user, project)
(user.project_bot? || user.ci_pipeline_bot? || user.service_account? || user.security_policy_bot?) && can_read_project?(user, project)
end
def valid_oauth_token?(token)

View File

@ -22805,6 +22805,9 @@ msgstr ""
msgid "Environments|Stop unused environments"
msgstr ""
msgid "Environments|Stopping environment"
msgstr ""
msgid "Environments|Synced"
msgstr ""
@ -26743,6 +26746,9 @@ msgstr ""
msgid "GlobalSearch|Projects"
msgstr ""
msgid "GlobalSearch|Projects not indexed"
msgstr ""
msgid "GlobalSearch|Recent epics"
msgstr ""
@ -26905,9 +26911,6 @@ msgstr ""
msgid "GlobalSearch|in %{scope}"
msgstr ""
msgid "GlobalSearch|projects not indexed"
msgstr ""
msgid "GlobalShortcuts|Copied reference to clipboard."
msgstr ""
@ -33547,7 +33550,7 @@ msgstr ""
msgid "Learn more about seats owed"
msgstr ""
msgid "Learn more about shards and replicas in the %{configuration_link_start}Advanced Search configuration%{configuration_link_end} documentation. Changes don't take place until you %{recreated_link_start}recreate%{recreated_link_end} the index."
msgid "Learn more about shards and replicas in the %{configuration_link_start}advanced search configuration%{configuration_link_end} documentation. Changes don't take place until you %{recreated_link_start}recreate%{recreated_link_end} the index."
msgstr ""
msgid "Learn more."
@ -39051,7 +39054,7 @@ msgstr ""
msgid "Now, personalize your GitLab experience"
msgstr ""
msgid "Number of Elasticsearch shards and replicas per index:"
msgid "Number of Elasticsearch shards and replicas per index"
msgstr ""
msgid "Number of Git pushes after which Gitaly is asked to optimize a repository."
@ -53541,6 +53544,33 @@ msgstr ""
msgid "Service accounts"
msgstr ""
msgid "ServiceAccounts|Add Service Account"
msgstr ""
msgid "ServiceAccounts|An error occurred while fetching the service accounts."
msgstr ""
msgid "ServiceAccounts|Delete Account"
msgstr ""
msgid "ServiceAccounts|Delete Account and Contributions"
msgstr ""
msgid "ServiceAccounts|Edit"
msgstr ""
msgid "ServiceAccounts|Manage Access Tokens"
msgstr ""
msgid "ServiceAccounts|Name"
msgstr ""
msgid "ServiceAccounts|Service Accounts"
msgstr ""
msgid "ServiceAccounts|Service accounts are non-human accounts that allow interactions between software applications, systems, or services. %{learnMore}"
msgstr ""
msgid "ServiceAccount|No more seats are available to create Service Account User"
msgstr ""

View File

@ -53,7 +53,9 @@ module QA
end
def has_no_runner?(runner)
has_no_element?("runner-row-#{runner.id}")
retry_until(reload: true, sleep_interval: 2, max_attempts: 6, message: "Retry for runner removal") do
has_no_element?("runner-row-#{runner.id}")
end
end
def go_to_runner_managers_page(runner)

View File

@ -28,8 +28,7 @@ module QA
runner.unregister!
Page::Group::Runners::Index.perform do |group_runners|
group_runners.refresh
expect { group_runners.has_no_runner?(runner) }.to eventually_be(true).within(sleep_interval: 2)
expect(group_runners.has_no_runner?(runner)).to be(true)
end
end
end

View File

@ -518,7 +518,7 @@ RSpec.describe Import::GithubController, feature_category: :importers do
post :cancel_all
expect(json_response).to eq([
expect(json_response).to match_array([
{
'id' => project.id,
'status' => 'success'

View File

@ -136,6 +136,10 @@ FactoryBot.define do
user_type { :llm_bot }
end
trait :ci_pipeline_bot do
user_type { :ci_pipeline_bot }
end
trait :duo_code_review_bot do
user_type { :duo_code_review_bot }
end

View File

@ -23,7 +23,7 @@ RSpec.describe 'Environments page', :js, feature_category: :continuous_delivery
end
def stop_button_selector
'button[title="Stop environment"]'
'button[aria-label="Stop environment"]'
end
def upcoming_deployment_content_selector

View File

@ -185,7 +185,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
const assistiveElement = lastUsed.find('.gl-sr-only');
expect(anchor.exists()).toBe(true);
expect(anchor.attributes('href')).toBe(
'/help/user/profile/personal_access_tokens.md#view-the-time-at-and-ips-where-a-token-was-last-used',
'/help/user/profile/personal_access_tokens.md#view-token-usage-information',
);
expect(assistiveElement.text()).toBe('The last time a token was used');
});
@ -199,7 +199,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
const assistiveElement = lastUsedIPs.find('.gl-sr-only');
expect(anchor.exists()).toBe(true);
expect(anchor.attributes('href')).toBe(
'/help/user/profile/personal_access_tokens.md#view-the-time-at-and-ips-where-a-token-was-last-used',
'/help/user/profile/personal_access_tokens.md#view-token-usage-information',
);
expect(assistiveElement.text()).toBe(
'The last five distinct IP addresses from where the token was used',

View File

@ -115,7 +115,7 @@ describe('~/access_tokens/components/inactive_access_token_table_app', () => {
const assistiveElement = lastUsed.find('.gl-sr-only');
expect(anchor.exists()).toBe(true);
expect(anchor.attributes('href')).toBe(
'/help/user/profile/personal_access_tokens.md#view-the-time-at-and-ips-where-a-token-was-last-used',
'/help/user/profile/personal_access_tokens.md#view-token-usage-information',
);
expect(assistiveElement.text()).toBe('The last time a token was used');
});

View File

@ -7,6 +7,7 @@ import isEnvironmentStoppingQuery from '~/environments/graphql/queries/is_enviro
import StopComponent from '~/environments/components/environment_stop.vue';
import eventHub from '~/environments/event_hub';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { resolvedEnvironment } from './graphql/mock_data';
describe('Stop Component', () => {
@ -24,7 +25,7 @@ describe('Stop Component', () => {
const findButton = () => wrapper.findComponent(GlButton);
describe('eventHub', () => {
describe('default', () => {
beforeEach(() => {
createWrapper();
});
@ -33,6 +34,12 @@ describe('Stop Component', () => {
expect(findButton().exists()).toBe(true);
expect(wrapper.attributes('title')).toEqual('Stop environment');
});
});
describe('eventHub', () => {
beforeEach(() => {
createWrapper();
});
it('emits requestStopEnvironment in the event hub when button is clicked', () => {
jest.spyOn(eventHub, '$emit');
@ -44,37 +51,72 @@ describe('Stop Component', () => {
describe('graphql', () => {
Vue.use(VueApollo);
let mockApollo;
const resolvers = {
Query: {
isEnvironmentStopping: () => true,
},
};
beforeEach(() => {
mockApollo = createMockApollo();
mockApollo.clients.defaultClient.writeQuery({
query: isEnvironmentStoppingQuery,
variables: { environment: resolvedEnvironment },
data: { isEnvironmentStopping: true },
});
const createWrapperWithApollo = () => {
createWrapper(
{ graphql: true, environment: resolvedEnvironment },
{ apolloProvider: mockApollo },
);
});
};
it('should render a button to stop the environment', () => {
expect(findButton().exists()).toBe(true);
expect(wrapper.attributes('title')).toEqual('Stop environment');
it('queries for environment stopping state', () => {
mockApollo = createMockApollo([], resolvers);
jest.spyOn(mockApollo.defaultClient, 'watchQuery');
createWrapperWithApollo();
expect(mockApollo.defaultClient.watchQuery).toHaveBeenCalledWith({
query: isEnvironmentStoppingQuery,
variables: { environment: resolvedEnvironment },
});
});
it('sets the environment to stop on click', () => {
mockApollo = createMockApollo();
jest.spyOn(mockApollo.defaultClient, 'mutate');
createWrapperWithApollo();
findButton().vm.$emit('click');
expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({
mutation: setEnvironmentToStopMutation,
variables: { environment: resolvedEnvironment },
});
});
it('should show a loading icon if the environment is currently stopping', () => {
expect(findButton().props('loading')).toBe(true);
describe('when the environment is currently stopping', () => {
beforeEach(async () => {
mockApollo = createMockApollo([], resolvers);
createWrapperWithApollo();
await waitForPromises();
});
it('should render a button with a loading icon and a correct title', () => {
const button = findButton();
expect(button.props('loading')).toBe(true);
expect(wrapper.attributes('title')).toBe('Stopping environment');
});
});
});
describe('when the environment is in stopping state', () => {
beforeEach(() => {
createWrapper({ environment: { ...resolvedEnvironment, state: 'stopping' } });
});
it('should render a button with a loading icon and a correct title', () => {
const button = findButton();
expect(button.props('loading')).toBe(true);
expect(wrapper.attributes('title')).toBe('Stopping environment');
});
});
});

View File

@ -163,6 +163,15 @@ describe('~/environments/components/new_environment_item.vue', () => {
expect(findStopComponent().exists()).toBe(false);
});
it('shows a button to stop the environment if the environment is in stopping state', () => {
wrapper = createWrapper({
propsData: { environment: { ...resolvedEnvironment, state: 'stopping' } },
apolloProvider: createApolloProvider(),
});
expect(findStopComponent().exists()).toBe(true);
});
});
describe('rollback', () => {

View File

@ -5,12 +5,15 @@ import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
import { useMockInternalEventsTracking } from 'helpers/tracking_internal_events_helper';
import ReviewerDropdown from '~/merge_requests/components/reviewers/reviewer_dropdown.vue';
import UpdateReviewers from '~/merge_requests/components/reviewers/update_reviewers.vue';
import userPermissionsQuery from '~/merge_requests/components/reviewers/queries/user_permissions.query.graphql';
import userAutocompleteWithMRPermissionsQuery from '~/graphql_shared/queries/project_autocomplete_users_with_mr_permissions.query.graphql';
import setReviewersMutation from '~/merge_requests/components/reviewers/queries/set_reviewers.mutation.graphql';
const { bindInternalEventDocument } = useMockInternalEventsTracking();
let wrapper;
let autocompleteUsersMock;
let setReviewersMutationMock;
@ -209,6 +212,70 @@ describe('Reviewer dropdown component', () => {
}),
);
});
describe('tracking when the dropdown is closed', () => {
let trackEventSpy;
beforeEach(async () => {
createComponent(true, {
users: [createMockUser(), createMockUser({ id: 2, name: 'Nonadmin', username: 'bob' })],
});
await waitForPromises();
({ trackEventSpy } = bindInternalEventDocument(wrapper.element));
});
it('tracks which position any selected users were in as a telemetry event', () => {
findDropdown().vm.$emit('select', ['root']);
findDropdown().vm.$emit('hidden');
expect(trackEventSpy).toHaveBeenCalledWith(
'user_selects_reviewer_from_mr_sidebar',
{
value: 1,
selectable_reviewers_count: 2,
},
undefined,
);
});
it('tracks which position any selected users were in - discounting already selected reviewers - as a telemetry event', async () => {
createComponent(true, {
users: [createMockUser(), createMockUser({ id: 2, name: 'Nonadmin', username: 'bob' })],
selectedReviewers: [createMockUser()],
});
await waitForPromises();
findDropdown().vm.$emit('select', ['bob']);
findDropdown().vm.$emit('hidden');
expect(trackEventSpy).toHaveBeenCalledWith(
'user_selects_reviewer_from_mr_sidebar',
{
value: 1,
selectable_reviewers_count: 1,
},
undefined,
);
});
it('tracks which position any selected users were in after a search as a telemetry event', () => {
findDropdown().vm.$emit('search', 'bob');
findDropdown().vm.$emit('select', ['bob']);
findDropdown().vm.$emit('hidden');
expect(trackEventSpy).toHaveBeenCalledWith(
'user_selects_reviewer_from_mr_sidebar_after_search',
{
value: 2,
selectable_reviewers_count: 2,
},
undefined,
);
});
});
});
describe('when users are passed as a prop', () => {

View File

@ -15,6 +15,7 @@ import CrudComponent from '~/vue_shared/components/crud_component.vue';
import SettingsSection from '~/vue_shared/components/settings/settings_section.vue';
import RuleView from '~/projects/settings/branch_rules/components/view/index.vue';
import AccessLevelsDrawer from '~/projects/settings/branch_rules/components/view/access_levels_drawer.vue';
import SquashSettingsDrawer from '~/projects/settings/branch_rules/components/view/squash_settings_drawer.vue';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import Protection from '~/projects/settings/branch_rules/components/view/protection.vue';
import ProtectionToggle from '~/projects/settings/branch_rules/components/view/protection_toggle.vue';
@ -31,6 +32,7 @@ import branchRulesQuery from 'ee_else_ce/projects/settings/branch_rules/queries/
import squashOptionQuery from '~/projects/settings/branch_rules/queries/squash_option.query.graphql';
import deleteBranchRuleMutation from '~/projects/settings/branch_rules/mutations/branch_rule_delete.mutation.graphql';
import editBranchRuleMutation from 'ee_else_ce/projects/settings/branch_rules/mutations/edit_branch_rule.mutation.graphql';
import editBranchRuleSquashOptionMutation from '~/projects/settings/branch_rules/mutations/edit_squash_option.mutation.graphql';
import {
editBranchRuleMockResponse,
deleteBranchRuleMockResponse,
@ -74,6 +76,7 @@ describe('View branch rules', () => {
.mockResolvedValue(predefinedBranchRulesMockResponse);
const deleteBranchRuleSuccessHandler = jest.fn().mockResolvedValue(deleteBranchRuleMockResponse);
const editBranchRuleSuccessHandler = jest.fn().mockResolvedValue(editBranchRuleMockResponse);
const editSquashOptionSuccessHandler = jest.fn().mockResolvedValue(editBranchRuleMockResponse);
const protectableBranchesMockRequestHandler = jest
.fn()
.mockResolvedValue(protectableBranchesMockResponse);
@ -84,10 +87,12 @@ describe('View branch rules', () => {
const createComponent = async ({
glFeatures = { editBranchRules: true, branchRuleSquashSettings: true },
canAdminProtectedBranches = true,
allowEditSquashSetting = true,
branchRulesQueryHandler = branchRulesMockRequestHandler,
squashOptionQueryHandler = squashOptionMockRequestHandler,
deleteMutationHandler = deleteBranchRuleSuccessHandler,
editMutationHandler = editBranchRuleSuccessHandler,
editSquashOptionMutationHandler = editSquashOptionSuccessHandler,
} = {}) => {
fakeApollo = createMockApollo([
[branchRulesQuery, branchRulesQueryHandler],
@ -95,6 +100,7 @@ describe('View branch rules', () => {
[getProtectableBranches, protectableBranchesMockRequestHandler],
[deleteBranchRuleMutation, deleteMutationHandler],
[editBranchRuleMutation, editMutationHandler],
[editBranchRuleSquashOptionMutation, editSquashOptionMutationHandler],
]);
wrapper = shallowMountExtended(RuleView, {
@ -105,6 +111,7 @@ describe('View branch rules', () => {
branchRulesPath,
glFeatures,
canAdminProtectedBranches,
allowEditSquashSetting,
},
stubs: {
ApprovalRulesApp: true,
@ -152,6 +159,7 @@ describe('View branch rules', () => {
const findNoDataTitle = () => wrapper.findByText(I18N.noData);
const findAccessLevelsDrawer = () => wrapper.findComponent(AccessLevelsDrawer);
const findSquashSettingSection = () => wrapper.findByTestId('squash-setting-content');
const findSquashSettingsDrawer = () => wrapper.findComponent(SquashSettingsDrawer);
const findMatchingBranchesLink = () =>
wrapper.findByText(
@ -168,6 +176,88 @@ describe('View branch rules', () => {
expect(findSquashSettingSection().exists()).toBe(false);
});
it.each`
scenario | canAdminProtectedBranches | expectedIsEditAvailable | description
${'user has permission'} | ${true} | ${true} | ${'shows edit button'}
${'user does not have permission'} | ${false} | ${false} | ${'hides edit button'}
`(
'$description when $scenario',
async ({ canAdminProtectedBranches, expectedIsEditAvailable }) => {
await createComponent({
glFeatures: { branchRuleSquashSettings: true },
canAdminProtectedBranches,
});
expect(findSquashSettingSection().props('isEditAvailable')).toBe(expectedIsEditAvailable);
},
);
it('opens squash settings drawer when edit is clicked', async () => {
await createComponent();
findSquashSettingSection().vm.$emit('edit');
await nextTick();
expect(findSquashSettingsDrawer().props('isOpen')).toBe(true);
});
it('calls mutation with correct data when drawer emits submit', async () => {
const mutationSpy = jest.fn().mockResolvedValue({
data: { branchRuleSquashOptionUpdate: { errors: [] } },
});
await createComponent({
editSquashOptionMutationHandler: mutationSpy,
});
findSquashSettingSection().vm.$emit('edit');
await nextTick();
findSquashSettingsDrawer().vm.$emit('submit', 'always');
await waitForPromises();
expect(mutationSpy).toHaveBeenCalledWith({
input: {
branchRuleId: 'gid://gitlab/Projects/BranchRule/1',
squashOption: 'always',
},
});
});
it('shows error alert if mutation fails', async () => {
const mutationSpy = jest.fn().mockResolvedValue({
data: { branchRuleSquashOptionUpdate: { errors: ['error'] } },
});
await createComponent({
editSquashOptionMutationHandler: mutationSpy,
});
findSquashSettingSection().vm.$emit('edit');
await nextTick();
const drawer = wrapper.findComponent(SquashSettingsDrawer);
drawer.vm.$emit('submit', 'always');
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while updating branch rule.',
});
});
it('closes drawer after successful update', async () => {
await createComponent();
findSquashSettingSection().vm.$emit('edit');
await nextTick();
const drawer = findSquashSettingsDrawer();
drawer.vm.$emit('submit', 'always');
await waitForPromises();
expect(drawer.props('isOpen')).toBe(false);
});
it('renders squash settings section', () => {
const content = findSquashSettingSection();
expect(content.text()).toContain('Encourage');

View File

@ -4,6 +4,12 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
import SquashSettingsDrawer from '~/projects/settings/branch_rules/components/view/squash_settings_drawer.vue';
import {
SQUASH_SETTING_DO_NOT_ALLOW,
SQUASH_SETTING_ALLOW,
SQUASH_SETTING_ENCOURAGE,
SQUASH_SETTING_REQUIRE,
} from '~/projects/settings/branch_rules/components/view/constants';
jest.mock('~/lib/utils/dom_utils', () => ({ getContentWrapperHeight: jest.fn() }));
@ -14,7 +20,7 @@ describe('Squash Settings Drawer', () => {
const defaultProps = {
isOpen: false,
isLoading: false,
selectedOption: 'do_not_allow',
selectedOption: SQUASH_SETTING_DO_NOT_ALLOW,
};
const findDrawer = () => wrapper.findComponent(GlDrawer);
@ -50,9 +56,12 @@ describe('Squash Settings Drawer', () => {
);
it.each([
['encourage', 'encourage'],
[undefined, 'do_not_allow'],
])('sets correct initial selection for %s', async (input, expected) => {
[undefined, SQUASH_SETTING_DO_NOT_ALLOW],
['Allow', SQUASH_SETTING_ALLOW],
['Encourage', SQUASH_SETTING_ENCOURAGE],
['Require', SQUASH_SETTING_REQUIRE],
['Do not allow', SQUASH_SETTING_DO_NOT_ALLOW],
])('sets correct selection for %s', async (input, expected) => {
createComponent({ selectedOption: input });
await nextTick();
findSaveButton().vm.$emit('click');

View File

@ -0,0 +1,24 @@
import { findSelectedOptionValueByLabel } from '~/projects/settings/branch_rules/components/view/utils';
describe('utils', () => {
describe('findSelectedOptionValueByLabel', () => {
const options = [
{ label: 'Option 1', value: 'value1' },
{ label: 'Option 2', value: 'value2' },
{ label: 'Option 3', value: 'value3' },
];
it('returns the value when option with matching value is found', () => {
expect(findSelectedOptionValueByLabel(options, 'Option 2')).toBe('value2');
});
it('returns first option value when no option with matching value is found', () => {
expect(findSelectedOptionValueByLabel(options, 'non-existent-label')).toBe('value1');
});
it('returns undefined when options array is empty', () => {
const emptyOptions = [];
expect(findSelectedOptionValueByLabel(emptyOptions, 'value1')).toBeUndefined();
});
});
});

View File

@ -7,7 +7,7 @@ RSpec.describe Types::PermissionTypes::WorkItem do
expected_permissions = [
:read_work_item, :update_work_item, :delete_work_item, :admin_work_item,
:admin_parent_link, :set_work_item_metadata, :create_note, :admin_work_item_link, :mark_note_as_internal,
:report_spam
:report_spam, :summarize_comments
]
expected_permissions.each do |permission|

View File

@ -256,6 +256,9 @@ merge_requests:
- approver_users
- approver_groups
- approved_by_users
- v2_approval_rules
- v2_approval_rules_projects
- v2_approval_rules_merge_requests
- draft_notes
- merge_train_car
- blocks_as_blocker
@ -770,6 +773,9 @@ project:
- approval_merge_request_rules
- approval_merge_request_rule_sources
- approval_project_rules
- v2_approval_rules
- v2_approval_rules_projects
- v2_approval_rules_merge_requests
- approvers
- approver_users
- audit_events

View File

@ -14,7 +14,7 @@ RSpec.describe User, feature_category: :system_access do
expect(described_class::USER_TYPES.keys)
.to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot
visual_review_bot migration_bot automation_bot security_policy_bot admin_bot suggested_reviewers_bot
service_account llm_bot placeholder duo_code_review_bot import_user])
service_account llm_bot placeholder duo_code_review_bot import_user ci_pipeline_bot])
expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::NON_INTERNAL_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES)

View File

@ -46,11 +46,13 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
# disallowed
expect(permissions(non_member_user, project_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item,
:summarize_comments
)
expect(permissions(non_member_user, project_confidential_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item,
:summarize_comments
)
end
@ -85,11 +87,13 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
# disallowed
expect(permissions(non_member_user, project_work_item)).to be_disallowed(
:admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :admin_work_item_link, :report_spam, :move_work_item, :clone_work_item
:admin_parent_link, :admin_work_item_link, :report_spam, :move_work_item, :clone_work_item,
:summarize_comments
)
expect(permissions(non_member_user, project_confidential_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item
:admin_parent_link, :admin_work_item_link, :create_note, :report_spam, :move_work_item, :clone_work_item,
:summarize_comments
)
end
@ -123,12 +127,12 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
expect(permissions(non_member_user, work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :set_work_item_metadata, :admin_work_item_link, :create_note,
:move_work_item, :clone_work_item
:move_work_item, :clone_work_item, :summarize_comments
)
expect(permissions(non_member_user, confidential_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :set_work_item_metadata, :admin_work_item_link, :create_note,
:move_work_item, :clone_work_item
:move_work_item, :clone_work_item, :summarize_comments
)
end
end
@ -160,12 +164,12 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
# disallowed
expect(permissions(non_member_user, work_item)).to be_disallowed(
:admin_work_item, :update_work_item, :delete_work_item, :admin_parent_link, :set_work_item_metadata,
:admin_work_item_link, :move_work_item, :clone_work_item
:admin_work_item_link, :move_work_item, :clone_work_item, :summarize_comments
)
expect(permissions(non_member_user, confidential_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:admin_parent_link, :set_work_item_metadata, :admin_work_item_link, :create_note,
:move_work_item, :clone_work_item
:move_work_item, :clone_work_item, :summarize_comments
)
end
end

View File

@ -14,14 +14,14 @@ RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category:
let(:get_url) { "/projects/#{project.id}/repository/commits/#{sha}/statuses" }
context 'ci commit exists' do
let!(:master) do
let_it_be(:master) do
project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', protected: false).tap do |p|
p.ensure_project_iid! # Necessary to avoid cross-database modification error
p.save!
end
end
let!(:develop) do
let_it_be(:develop) do
project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'develop', protected: false).tap do |p|
p.ensure_project_iid! # Necessary to avoid cross-database modification error
p.save!
@ -35,12 +35,12 @@ RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category:
create(:commit_status, { pipeline: pipeline, ref: pipeline.ref }.merge(opts))
end
let!(:status1) { create_status(master, status: 'running', retried: true) }
let!(:status2) { create_status(master, name: 'coverage', status: 'pending', retried: true) }
let!(:status3) { create_status(develop, status: 'running', allow_failure: true) }
let!(:status4) { create_status(master, name: 'coverage', status: 'success') }
let!(:status5) { create_status(develop, name: 'coverage', status: 'success') }
let!(:status6) { create_status(master, status: 'success', stage: 'deploy') }
let_it_be(:status1) { create_status(master, status: 'running', retried: true) }
let_it_be(:status2) { create_status(master, name: 'coverage', status: 'pending', retried: true) }
let_it_be(:status3) { create_status(develop, status: 'running', allow_failure: true) }
let_it_be(:status4) { create_status(master, name: 'coverage', status: 'success') }
let_it_be(:status5) { create_status(develop, name: 'coverage', status: 'success') }
let_it_be(:status6) { create_status(master, status: 'success', stage: 'deploy') }
context 'latest commit statuses' do
before do
@ -52,8 +52,7 @@ RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category:
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(statuses_id).to contain_exactly(status3.id, status4.id, status5.id, status6.id)
json_response.sort_by! { |status| status['id'] }
expect(statuses_id).to eq([status3.id, status4.id, status5.id, status6.id].sort)
expect(json_response.map { |status| status['allow_failure'] }).to eq([true, false, false, false])
end
end
@ -73,7 +72,7 @@ RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category:
context 'Get all commit statuses' do
let(:params) { { all: 1 } }
let(:expected_statuses) { [status1.id, status2.id, status3.id, status4.id, status5.id, status6.id] }
let(:expected_statuses) { (develop.statuses.ids + master.statuses.ids).sort }
it_behaves_like 'get commit statuses'
end
@ -93,42 +92,42 @@ RSpec.describe API::CommitStatuses, :clean_gitlab_redis_cache, feature_category:
context 'latest commit statuses for specific ref' do
let(:params) { { ref: 'develop' } }
let(:expected_statuses) { [status3.id, status5.id] }
let(:expected_statuses) { [status3.id, status5.id].sort }
it_behaves_like 'get commit statuses'
end
context 'latest commit statues for specific name' do
let(:params) { { name: 'coverage' } }
let(:expected_statuses) { [status4.id, status5.id] }
let(:expected_statuses) { [status4.id, status5.id].sort }
it_behaves_like 'get commit statuses'
end
context 'latest commit statuses for specific pipeline' do
let(:params) { { pipeline_id: develop.id } }
let(:expected_statuses) { [status3.id, status5.id] }
let(:expected_statuses) { [status3.id, status5.id].sort }
it_behaves_like 'get commit statuses'
end
context 'return commit statuses sort by desc id' do
let(:params) { { all: 1, sort: "desc" } }
let(:expected_statuses) { [status6.id, status5.id, status4.id, status3.id, status2.id, status1.id] }
let(:expected_statuses) { (develop.statuses.ids + master.statuses.ids).sort.reverse }
it_behaves_like 'get commit statuses'
end
context 'return commit statuses sort by desc pipeline_id' do
let(:params) { { all: 1, order_by: "pipeline_id", sort: "desc" } }
let(:expected_statuses) { [status3.id, status5.id, status1.id, status2.id, status4.id, status6.id] }
let(:expected_statuses) { develop.statuses.order(id: :asc).ids + master.statuses.order(id: :asc).ids }
it_behaves_like 'get commit statuses'
end
context 'return commit statuses sort by asc pipeline_id' do
let(:params) { { all: 1, order_by: "pipeline_id" } }
let(:expected_statuses) { [status1.id, status2.id, status4.id, status6.id, status3.id, status5.id] }
let(:expected_statuses) { master.statuses.order(id: :asc).ids + develop.statuses.order(id: :asc).ids }
it_behaves_like 'get commit statuses'
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Conan::V1::InstancePackages, feature_category: :package_registry do
include_context 'conan api setup'
include_context 'with conan api setup'
let_it_be_with_reload(:package) { create(:conan_package, project: project, without_recipe_revisions: true) }
let(:snowplow_gitlab_standard_context) do
@ -43,7 +43,7 @@ RSpec.describe API::Conan::V1::InstancePackages, feature_category: :package_regi
end
context 'with recipe endpoints' do
include_context 'conan recipe endpoints'
include_context 'for conan recipe endpoints'
let(:project_id) { 9999 }
let(:url_prefix) { "#{Settings.gitlab.base_url}/api/v4" }
@ -126,7 +126,7 @@ RSpec.describe API::Conan::V1::InstancePackages, feature_category: :package_regi
end
context 'with file download endpoints' do
include_context 'conan file download endpoints'
include_context 'for conan file download endpoints'
describe 'GET /api/v4/packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel' \
'/:recipe_revision/export/:file_name' do
@ -155,7 +155,7 @@ RSpec.describe API::Conan::V1::InstancePackages, feature_category: :package_regi
end
context 'with file upload endpoints' do
include_context 'conan file upload endpoints'
include_context 'for conan file upload endpoints'
describe 'PUT /api/v4/packages/conan/v1/files/:package_name/:package_version/:package_username/:package_channel' \
'/:recipe_revision/export/:file_name/authorize' do

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Conan::V1::ProjectPackages, feature_category: :package_registry do
include_context 'conan api setup'
include_context 'with conan api setup'
let_it_be_with_reload(:package) { create(:conan_package, project: project, without_recipe_revisions: true) }
let(:project_id) { project.id }
@ -55,7 +55,7 @@ RSpec.describe API::Conan::V1::ProjectPackages, feature_category: :package_regis
end
context 'with recipe endpoints' do
include_context 'conan recipe endpoints'
include_context 'for conan recipe endpoints'
let(:url_prefix) { "#{Settings.gitlab.base_url}/api/v4/projects/#{project_id}" }
let(:recipe_path) { package.conan_recipe_path }
@ -150,7 +150,7 @@ RSpec.describe API::Conan::V1::ProjectPackages, feature_category: :package_regis
end
context 'with file download endpoints' do
include_context 'conan file download endpoints'
include_context 'for conan file download endpoints'
subject(:request) { get api(url), headers: headers }
@ -181,7 +181,7 @@ RSpec.describe API::Conan::V1::ProjectPackages, feature_category: :package_regis
end
context 'with file upload endpoints' do
include_context 'conan file upload endpoints'
include_context 'for conan file upload endpoints'
describe 'PUT /api/v4/projects/:id/packages/conan/v1/files/:package_name/:package_version/:package_username' \
'/:package_channel/:recipe_revision/export/:file_name/authorize' do

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Conan::V2::ProjectPackages, feature_category: :package_registry do
include_context 'conan api setup'
include_context 'with conan api setup'
let_it_be_with_reload(:package) { create(:conan_package, project: project) }
let(:project_id) { project.id }
@ -30,7 +30,7 @@ RSpec.describe API::Conan::V2::ProjectPackages, feature_category: :package_regis
describe 'GET /api/v4/projects/:id/packages/conan/v2/conans/:package_name/:package_version/:package_username/' \
':package_channel/revisions/:recipe_revision/files/:file_name' do
include_context 'conan file download endpoints'
include_context 'for conan file download endpoints'
let(:file_name) { recipe_file.file_name }
let(:recipe_revision) { recipe_file_metadata.recipe_revision_value }

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Updating a squash option', feature_category: :source_code_management do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let(:branch_rule) { Projects::AllBranchesRule.new(project) }
let(:global_id) { branch_rule.to_global_id.to_s }
let(:mutation) do
graphql_mutation(:branch_rule_squash_option_update, { branch_rule_id: global_id, squash_option: 'NEVER' })
end
let(:mutation_response) { graphql_mutation_response(:branch_rule_squash_option_update) }
subject(:mutation_request) { post_graphql_mutation(mutation, current_user: current_user) }
before do
stub_licensed_features(branch_rule_squash_options: true)
end
context 'when the user does not have permission' do
it_behaves_like 'a mutation that returns top-level errors',
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
context 'and a squash option exists' do
let!(:squash_option) { create(:project_setting, project: project) }
it 'does not update the squash option' do
expect { mutation_request }.not_to change { squash_option.reload.squash_option }
end
end
end
context 'when the user has permission' do
before_all do
project.add_maintainer(current_user)
end
context 'and the branch_rule_squash_settings feature flag is disabled' do
before do
stub_feature_flags(branch_rule_squash_settings: false)
end
it 'raises an error' do
mutation_request
expect(graphql_errors).to include(a_hash_including('message' => 'Squash options feature disabled'))
end
end
it 'updates the squash option' do
expect do
mutation_request
end.to change {
project.reload.project_setting&.squash_option
}.from('default_off').to('never')
end
it 'responds with the updated squash option' do
mutation_request
expect(mutation_response['squashOption']['option']).to eq('Do not allow')
expect(mutation_response['squashOption']['helpText']).to eq(
'Squashing is never performed and the checkbox is hidden.'
)
end
end
end

View File

@ -90,7 +90,8 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
'markNoteAsInternal' => true,
'moveWorkItem' => true,
'cloneWorkItem' => true,
'reportSpam' => false
'reportSpam' => false,
'summarizeComments' => false
},
'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path)
)

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Projects::BranchRules::SquashOptions::UpdateService, feature_category: :source_code_management do
describe '#execute' do
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:maintainer) { create(:user, maintainer_of: project) }
let_it_be(:developer) { create(:user, developer_of: project) }
let(:squash_option) { ::Projects::BranchRules::SquashOption.squash_options['always'] }
let(:branch_rule) { ::Projects::AllBranchesRule.new(project) }
subject(:execute) do
described_class.new(branch_rule, squash_option: squash_option, current_user: current_user).execute
end
context 'when branch rule is an AllBranchesRule' do
let(:current_user) { maintainer }
it 'updates the project level squash option' do
expect { execute }
.to change { project.reload&.project_setting&.squash_option }.from('default_off').to('always')
end
end
context 'when the user is not authorized' do
let(:current_user) { developer }
it 'returns an error response' do
result = execute
expect(result.message).to eq(described_class::AUTHORIZATION_ERROR_MESSAGE)
expect(result).to be_error
end
end
context 'when branch rule is BranchRule' do
let_it_be(:protected_branch) { create :protected_branch, project: project }
let(:branch_rule) { ::Projects::BranchRule.new(project, protected_branch) }
let(:current_user) { maintainer }
it 'returns an error response' do
expect(execute).to be_error
expect(execute.message).to eq('Updating BranchRule not supported')
end
end
end
end

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
RSpec.shared_context 'conan api setup' do
RSpec.shared_context 'with conan api setup' do
include PackagesManagerApiSpecHelpers
include HttpBasicAuthHelpers
@ -44,7 +44,7 @@ RSpec.shared_context 'conan api setup' do
end
end
RSpec.shared_context 'conan recipe endpoints' do
RSpec.shared_context 'for conan recipe endpoints' do
include PackagesManagerApiSpecHelpers
include HttpBasicAuthHelpers
@ -52,7 +52,7 @@ RSpec.shared_context 'conan recipe endpoints' do
let(:headers) { build_token_auth_header(jwt.encoded) }
end
RSpec.shared_context 'conan file download endpoints' do
RSpec.shared_context 'for conan file download endpoints' do
include PackagesManagerApiSpecHelpers
include HttpBasicAuthHelpers
@ -65,7 +65,7 @@ RSpec.shared_context 'conan file download endpoints' do
let(:recipe_file_metadata) { recipe_file.conan_file_metadatum }
end
RSpec.shared_context 'conan file upload endpoints' do
RSpec.shared_context 'for conan file upload endpoints' do
include PackagesManagerApiSpecHelpers
include WorkhorseHelpers
include HttpBasicAuthHelpers

View File

@ -21,17 +21,18 @@ RSpec.shared_examples 'checks abilities for project level work items' do
# disallowed
expect(permissions(guest, project_work_item)).to be_disallowed(
:admin_work_item, :update_work_item, :delete_work_item, :set_work_item_metadata, :move_work_item, :clone_work_item
:admin_work_item, :update_work_item, :delete_work_item, :set_work_item_metadata, :move_work_item,
:clone_work_item, :summarize_comments
)
expect(permissions(guest, project_confidential_work_item)).to be_disallowed(
:read_work_item, :read_issue, :read_note, :admin_work_item, :update_work_item, :delete_work_item,
:set_work_item_metadata, :create_note, :move_work_item, :clone_work_item
:set_work_item_metadata, :create_note, :move_work_item, :clone_work_item, :summarize_comments
)
expect(permissions(guest_author, authored_project_work_item)).to be_disallowed(
:admin_work_item, :set_work_item_metadata, :move_work_item, :clone_work_item
:admin_work_item, :set_work_item_metadata, :move_work_item, :clone_work_item, :summarize_comments
)
expect(permissions(guest_author, authored_project_confidential_work_item)).to be_disallowed(
:admin_work_item, :set_work_item_metadata, :move_work_item, :clone_work_item
:admin_work_item, :set_work_item_metadata, :move_work_item, :clone_work_item, :summarize_comments
)
expect(permissions(guest, incident_work_item)).to be_disallowed(
@ -90,9 +91,11 @@ RSpec.shared_examples 'checks abilities for project level work items' do
)
# disallowed
expect(permissions(reporter, project_work_item)).to be_disallowed(:delete_work_item)
expect(permissions(reporter, project_confidential_work_item)).to be_disallowed(:delete_work_item)
expect(permissions(reporter, incident_work_item)).to be_disallowed(:delete_work_item)
expect(permissions(reporter, project_work_item)).to be_disallowed(:delete_work_item, :summarize_comments)
expect(permissions(reporter, project_confidential_work_item)).to be_disallowed(
:delete_work_item, :summarize_comments
)
expect(permissions(reporter, incident_work_item)).to be_disallowed(:delete_work_item, :summarize_comments)
end
it 'checks group reporter abilities' do
@ -107,7 +110,8 @@ RSpec.shared_examples 'checks abilities for project level work items' do
)
# disallowed
expect(permissions(group_reporter, project_work_item)).to be_disallowed(:delete_work_item)
expect(permissions(group_reporter, project_confidential_work_item)).to be_disallowed(:delete_work_item)
expect(permissions(group_reporter, project_work_item)).to be_disallowed(:delete_work_item, :summarize_comments)
expect(permissions(group_reporter, project_confidential_work_item))
.to be_disallowed(:delete_work_item, :summarize_comments)
end
end

View File

@ -63,7 +63,10 @@ RSpec.shared_examples 'conan search endpoint' do
end
it { expect(response).to have_gitlab_http_status(:bad_request) }
it { expect(json_response['message']).to eq('400 Bad request - Search term length must be less than 200 characters.') }
it 'returns an error message' do
expect(json_response['message']).to eq('400 Bad request - Search term length must be less than 200 characters.')
end
end
context 'returns error when search term has too many wildcards' do
@ -74,7 +77,10 @@ RSpec.shared_examples 'conan search endpoint' do
end
it { expect(response).to have_gitlab_http_status(:bad_request) }
it { expect(json_response['message']).to eq('400 Bad request - Too many wildcards in search term. Maximum is 5.') }
it 'returns an error message' do
expect(json_response['message']).to eq('400 Bad request - Too many wildcards in search term. Maximum is 5.')
end
end
end
@ -365,7 +371,8 @@ end
RSpec.shared_examples 'empty recipe for not found package' do
context 'with invalid recipe url' do
let(:recipe_path) do
'aa/bb/%{project}/ccc' % { project: ::Packages::Conan::Metadatum.package_username_from(full_path: project.full_path) }
format('aa/bb/%{project}/ccc',
project: ::Packages::Conan::Metadatum.package_username_from(full_path: project.full_path))
end
let(:presenter) { double('::Packages::Conan::PackagePresenter') }
@ -384,8 +391,7 @@ RSpec.shared_examples 'empty recipe for not found package' do
project,
any_args
).and_return(presenter)
allow(presenter).to receive(:recipe_snapshot) { {} }
allow(presenter).to receive(:package_snapshot) { {} }
allow(presenter).to receive_messages(recipe_snapshot: {}, package_snapshot: {})
subject
@ -412,13 +418,14 @@ end
RSpec.shared_examples 'recipe download_urls' do
let(:recipe_path) { package.conan_recipe_path }
let(:base_url_with_recipe_path) { "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}" }
it_behaves_like 'enforcing read_packages job token policy'
it 'returns the download_urls for the recipe files' do
expected_response = {
'conanfile.py' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanfile.py",
'conanmanifest.txt' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
'conanfile.py' => "#{base_url_with_recipe_path}/0/export/conanfile.py",
'conanmanifest.txt' => "#{base_url_with_recipe_path}/0/export/conanmanifest.txt"
}
subject
@ -431,14 +438,15 @@ end
RSpec.shared_examples 'package download_urls' do
let(:recipe_path) { package.conan_recipe_path }
let(:base_url_with_recipe_path) { "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}" }
it_behaves_like 'enforcing read_packages job token policy'
it 'returns the download_urls for the package files' do
expected_response = {
'conaninfo.txt' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/#{conan_package_reference}/0/conaninfo.txt",
'conanmanifest.txt' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/#{conan_package_reference}/0/conanmanifest.txt",
'conan_package.tgz' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/#{conan_package_reference}/0/conan_package.tgz"
'conaninfo.txt' => "#{base_url_with_recipe_path}/0/package/#{conan_package_reference}/0/conaninfo.txt",
'conanmanifest.txt' => "#{base_url_with_recipe_path}/0/package/#{conan_package_reference}/0/conanmanifest.txt",
'conan_package.tgz' => "#{base_url_with_recipe_path}/0/package/#{conan_package_reference}/0/conan_package.tgz"
}
subject
@ -537,6 +545,7 @@ end
RSpec.shared_examples 'recipe upload_urls endpoint' do
let(:recipe_path) { package.conan_recipe_path }
let(:base_url_with_recipe_path) { "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}" }
let(:params) do
{ 'conanfile.py': 24,
@ -553,8 +562,8 @@ RSpec.shared_examples 'recipe upload_urls endpoint' do
subject
expected_response = {
'conanfile.py': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanfile.py",
'conanmanifest.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
'conanfile.py': "#{base_url_with_recipe_path}/0/export/conanfile.py",
'conanmanifest.txt': "#{base_url_with_recipe_path}/0/export/conanmanifest.txt"
}
expect(response.body).to eq(expected_response.to_json)
@ -571,9 +580,9 @@ RSpec.shared_examples 'recipe upload_urls endpoint' do
subject
expected_response = {
'conan_sources.tgz': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_sources.tgz",
'conan_export.tgz': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_export.tgz",
'conanmanifest.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
'conan_sources.tgz': "#{base_url_with_recipe_path}/0/export/conan_sources.tgz",
'conan_export.tgz': "#{base_url_with_recipe_path}/0/export/conan_export.tgz",
'conanmanifest.txt': "#{base_url_with_recipe_path}/0/export/conanmanifest.txt"
}
expect(response.body).to eq(expected_response.to_json)
@ -590,7 +599,7 @@ RSpec.shared_examples 'recipe upload_urls endpoint' do
subject
expected_response = {
'conanmanifest.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
'conanmanifest.txt': "#{base_url_with_recipe_path}/0/export/conanmanifest.txt"
}
expect(response.body).to eq(expected_response.to_json)
@ -600,6 +609,7 @@ end
RSpec.shared_examples 'package upload_urls endpoint' do
let(:recipe_path) { package.conan_recipe_path }
let(:base_url_with_recipe_path) { "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}" }
let(:params) do
{ 'conaninfo.txt': 24,
@ -615,9 +625,9 @@ RSpec.shared_examples 'package upload_urls endpoint' do
it 'returns a set of upload urls for the files requested' do
expected_response = {
'conaninfo.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conaninfo.txt",
'conanmanifest.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conanmanifest.txt",
'conan_package.tgz': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conan_package.tgz"
'conaninfo.txt': "#{base_url_with_recipe_path}/0/package/123456789/0/conaninfo.txt",
'conanmanifest.txt': "#{base_url_with_recipe_path}/0/package/123456789/0/conanmanifest.txt",
'conan_package.tgz': "#{base_url_with_recipe_path}/0/package/123456789/0/conan_package.tgz"
}
subject
@ -633,7 +643,7 @@ RSpec.shared_examples 'package upload_urls endpoint' do
it 'returns upload urls only for the valid requested files' do
expected_response = {
'conaninfo.txt': "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conaninfo.txt"
'conaninfo.txt': "#{base_url_with_recipe_path}/0/package/123456789/0/conaninfo.txt"
}
subject
@ -1010,10 +1020,13 @@ RSpec.shared_examples 'uploads a package file' do
end
context 'with existing package' do
let!(:existing_package) { create(:conan_package, name: recipe_path_name, version: recipe_path_version, project: project) }
let!(:existing_package) do
create(:conan_package, name: recipe_path_name, version: recipe_path_version, project: project)
end
before do
existing_package.conan_metadatum.update!(package_username: recipe_path_username, package_channel: recipe_path_channel)
existing_package.conan_metadatum.update!(package_username: recipe_path_username,
package_channel: recipe_path_channel)
end
it 'does not create a new package' do
@ -1044,7 +1057,7 @@ RSpec.shared_examples 'uploads a package file' do
end
let(:tmp_object) do
fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang
fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang -- Method #create! is undefined for class Fog::AWS::Storage::Files
key: "tmp/uploads/#{file_name}",
body: 'content'
)