Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
82bcb96b9a
commit
83968ce3f7
|
|
@ -192,7 +192,6 @@ RSpec/VerifiedDoubleReference:
|
|||
- 'spec/services/commits/commit_patch_service_spec.rb'
|
||||
- 'spec/services/error_tracking/issue_update_service_spec.rb'
|
||||
- 'spec/services/import/github/cancel_project_import_service_spec.rb'
|
||||
- 'spec/services/import/github/gists_import_service_spec.rb'
|
||||
- 'spec/services/incident_management/timeline_events/create_service_spec.rb'
|
||||
- 'spec/services/issues/close_service_spec.rb'
|
||||
- 'spec/services/merge_requests/after_create_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
19c06cf576e10d1cb8c6d922d6311e6f4d50814e
|
||||
b6e23a698ef223a575ed12a3e717836a30dcd636
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
37bc7f28335f1ba13198787bbf8194934a59c27f
|
||||
373688d6b701b7ffea8a0198c3f391acc4b55100
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -755,4 +755,4 @@ gem 'paper_trail', '~> 16.0', feature_category: :shared
|
|||
|
||||
gem "i18n_data", "~> 0.13.1", feature_category: :system_access
|
||||
|
||||
gem "gitlab-cloud-connector", "~> 1.20", require: 'gitlab/cloud_connector', feature_category: :plan_provisioning
|
||||
gem "gitlab-cloud-connector", "~> 1.21", require: 'gitlab/cloud_connector', feature_category: :plan_provisioning
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@
|
|||
{"name":"gitaly","version":"18.1.0.pre.rc1","platform":"ruby","checksum":"8f65a0c5bb3694c91c9fa4bfa7ceabfc131846b78feed8ee32a744aaacf6e70a"},
|
||||
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
|
||||
{"name":"gitlab-chronic","version":"0.10.6","platform":"ruby","checksum":"a244d11a1396d2aac6ae9b2f326adf1605ec1ad20c29f06e8b672047d415a9ac"},
|
||||
{"name":"gitlab-cloud-connector","version":"1.20.0","platform":"ruby","checksum":"259a703e2c635128a1d259d90f2b38edfaefd0b812332b363ed9cdca3200aa45"},
|
||||
{"name":"gitlab-cloud-connector","version":"1.21.0","platform":"ruby","checksum":"16eb2a42f223c6c70efc20a8fd9e2bbe4fa91603894daa8a72f354f425a07709"},
|
||||
{"name":"gitlab-crystalball","version":"1.1.1","platform":"ruby","checksum":"0464a113b0809e0e9fa7c0100bb6634fe38465af95aa04efa49541d64250b8ed"},
|
||||
{"name":"gitlab-dangerfiles","version":"4.9.2","platform":"ruby","checksum":"d5c050f685d8720f6e70191a7d1216854d860dbdea5b455f87abe7542e005798"},
|
||||
{"name":"gitlab-experiment","version":"0.9.1","platform":"ruby","checksum":"f230ee742154805a755d5f2539dc44d93cdff08c5bbbb7656018d61f93d01f48"},
|
||||
|
|
|
|||
|
|
@ -735,7 +735,7 @@ GEM
|
|||
terminal-table (>= 1.5.1)
|
||||
gitlab-chronic (0.10.6)
|
||||
numerizer (~> 0.2)
|
||||
gitlab-cloud-connector (1.20.0)
|
||||
gitlab-cloud-connector (1.21.0)
|
||||
activesupport (~> 7.0)
|
||||
jwt (~> 2.9.3)
|
||||
gitlab-crystalball (1.1.1)
|
||||
|
|
@ -2151,7 +2151,7 @@ DEPENDENCIES
|
|||
gitlab-active-context!
|
||||
gitlab-backup-cli!
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-cloud-connector (~> 1.20)
|
||||
gitlab-cloud-connector (~> 1.21)
|
||||
gitlab-crystalball (~> 1.1.0)
|
||||
gitlab-dangerfiles (~> 4.9.0)
|
||||
gitlab-duo-workflow-service-client (~> 0.2)!
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@
|
|||
{"name":"gitaly","version":"18.1.0.pre.rc1","platform":"ruby","checksum":"8f65a0c5bb3694c91c9fa4bfa7ceabfc131846b78feed8ee32a744aaacf6e70a"},
|
||||
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
|
||||
{"name":"gitlab-chronic","version":"0.10.6","platform":"ruby","checksum":"a244d11a1396d2aac6ae9b2f326adf1605ec1ad20c29f06e8b672047d415a9ac"},
|
||||
{"name":"gitlab-cloud-connector","version":"1.20.0","platform":"ruby","checksum":"259a703e2c635128a1d259d90f2b38edfaefd0b812332b363ed9cdca3200aa45"},
|
||||
{"name":"gitlab-cloud-connector","version":"1.21.0","platform":"ruby","checksum":"16eb2a42f223c6c70efc20a8fd9e2bbe4fa91603894daa8a72f354f425a07709"},
|
||||
{"name":"gitlab-crystalball","version":"1.1.1","platform":"ruby","checksum":"0464a113b0809e0e9fa7c0100bb6634fe38465af95aa04efa49541d64250b8ed"},
|
||||
{"name":"gitlab-dangerfiles","version":"4.9.2","platform":"ruby","checksum":"d5c050f685d8720f6e70191a7d1216854d860dbdea5b455f87abe7542e005798"},
|
||||
{"name":"gitlab-experiment","version":"0.9.1","platform":"ruby","checksum":"f230ee742154805a755d5f2539dc44d93cdff08c5bbbb7656018d61f93d01f48"},
|
||||
|
|
|
|||
|
|
@ -729,7 +729,7 @@ GEM
|
|||
terminal-table (>= 1.5.1)
|
||||
gitlab-chronic (0.10.6)
|
||||
numerizer (~> 0.2)
|
||||
gitlab-cloud-connector (1.20.0)
|
||||
gitlab-cloud-connector (1.21.0)
|
||||
activesupport (~> 7.0)
|
||||
jwt (~> 2.9.3)
|
||||
gitlab-crystalball (1.1.1)
|
||||
|
|
@ -2146,7 +2146,7 @@ DEPENDENCIES
|
|||
gitlab-active-context!
|
||||
gitlab-backup-cli!
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-cloud-connector (~> 1.20)
|
||||
gitlab-cloud-connector (~> 1.21)
|
||||
gitlab-crystalball (~> 1.1.0)
|
||||
gitlab-dangerfiles (~> 4.9.0)
|
||||
gitlab-duo-workflow-service-client (~> 0.2)!
|
||||
|
|
|
|||
|
|
@ -0,0 +1,239 @@
|
|||
<script>
|
||||
import {
|
||||
GlTable,
|
||||
GlButton,
|
||||
GlModal,
|
||||
GlTooltipDirective,
|
||||
GlLoadingIcon,
|
||||
GlSprintf,
|
||||
} from '@gitlab/ui';
|
||||
import { createAlert, VARIANT_INFO } from '~/alert';
|
||||
import { __, s__ } from '~/locale';
|
||||
import CrudComponent from '~/vue_shared/components/crud_component.vue';
|
||||
import { duoContextExclusionHelpPath } from '../constants';
|
||||
import ManageExclusionsDrawer from './manage_exclusions_drawer.vue';
|
||||
|
||||
export default {
|
||||
name: 'ExclusionSettings',
|
||||
components: {
|
||||
CrudComponent,
|
||||
GlTable,
|
||||
GlButton,
|
||||
GlModal,
|
||||
GlLoadingIcon,
|
||||
GlSprintf,
|
||||
ManageExclusionsDrawer,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
},
|
||||
props: {
|
||||
exclusionRules: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
title: s__('DuoFeatures|GitLab Duo context exclusions'),
|
||||
description: s__(
|
||||
'DuoFeatures|Specify project files and directories that GitLab Duo will not access. Excluded content is never sent to AI models. %{linkStart}learn more%{linkEnd}.',
|
||||
),
|
||||
manageExclusions: s__('DuoFeatures|Manage exclusions'),
|
||||
actionsLabel: __('Actions'),
|
||||
delete: __('Delete'),
|
||||
deleteRuleModalTitle: s__('DuoFeatures|Delete exclusion rule?'),
|
||||
deleteRuleModalText: s__('DuoFeatures|Do you want to delete this exclusion rule?'),
|
||||
ruleDeletedMessage: s__('DuoFeatures|The exclusion rule was deleted.'),
|
||||
deleteFailedMessage: s__('DuoFeatures|Failed to delete the exclusion rule. Try again.'),
|
||||
emptyStateMessage: s__(
|
||||
'DuoFeatures|No exclusion rules defined. Add a rule to exclude files from GitLab Duo context.',
|
||||
),
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
rules: [...this.exclusionRules],
|
||||
ruleToDelete: null,
|
||||
isDeleting: false,
|
||||
isManageDrawerOpen: false,
|
||||
duoContextExclusionHelpPath,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
fields() {
|
||||
return [
|
||||
{
|
||||
key: 'pattern',
|
||||
label: s__('DuoFeatures|Pattern'),
|
||||
tdClass: '!gl-align-middle',
|
||||
},
|
||||
{
|
||||
key: 'actions',
|
||||
label: this.$options.i18n.actionsLabel,
|
||||
thAlignRight: true,
|
||||
tdClass: 'gl-text-right !gl-align-middle',
|
||||
},
|
||||
];
|
||||
},
|
||||
tableItems() {
|
||||
return this.rules.map((pattern, index) => ({
|
||||
id: index,
|
||||
pattern,
|
||||
isDeleting: false,
|
||||
}));
|
||||
},
|
||||
deleteProps() {
|
||||
return {
|
||||
text: this.$options.i18n.delete,
|
||||
attributes: { category: 'primary', variant: 'danger' },
|
||||
};
|
||||
},
|
||||
cancelProps() {
|
||||
return {
|
||||
text: __('Cancel'),
|
||||
};
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
exclusionRules: {
|
||||
handler(newRules) {
|
||||
this.rules = [...newRules];
|
||||
},
|
||||
deep: true,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
openManageDrawer() {
|
||||
this.isManageDrawerOpen = true;
|
||||
},
|
||||
confirmDeleteRule(rule) {
|
||||
this.ruleToDelete = rule;
|
||||
this.$refs.deleteModal.show();
|
||||
},
|
||||
deleteRule() {
|
||||
if (!this.ruleToDelete) return;
|
||||
|
||||
this.isDeleting = true;
|
||||
|
||||
const index = this.ruleToDelete.id;
|
||||
this.rules.splice(index, 1);
|
||||
this.emitUpdate();
|
||||
|
||||
createAlert({
|
||||
message: this.$options.i18n.ruleDeletedMessage,
|
||||
variant: VARIANT_INFO,
|
||||
});
|
||||
|
||||
this.isDeleting = false;
|
||||
this.ruleToDelete = null;
|
||||
},
|
||||
emitUpdate() {
|
||||
this.$emit('update', this.rules);
|
||||
},
|
||||
closeManageDrawer() {
|
||||
this.isManageDrawerOpen = false;
|
||||
},
|
||||
saveExclusionRules(rules) {
|
||||
this.rules = [...rules];
|
||||
this.emitUpdate();
|
||||
this.closeManageDrawer();
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<crud-component
|
||||
ref="crudComponent"
|
||||
:title="$options.i18n.title"
|
||||
:count="rules.length"
|
||||
icon="remove"
|
||||
data-testid="exclusion-settings-crud"
|
||||
>
|
||||
<template #description>
|
||||
<gl-sprintf :message="$options.i18n.description">
|
||||
<template #link="{ content }">
|
||||
<a :href="duoContextExclusionHelpPath" target="_blank" rel="noopener noreferrer">{{
|
||||
content
|
||||
}}</a>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</template>
|
||||
<template #actions>
|
||||
<gl-button
|
||||
variant="default"
|
||||
data-testid="manage-exclusions-button"
|
||||
@click="openManageDrawer"
|
||||
>
|
||||
{{ $options.i18n.manageExclusions }}
|
||||
</gl-button>
|
||||
</template>
|
||||
<gl-table
|
||||
:empty-text="$options.i18n.emptyStateMessage"
|
||||
:fields="fields"
|
||||
:items="tableItems"
|
||||
stacked="md"
|
||||
show-empty
|
||||
class="b-table-fixed"
|
||||
data-testid="exclusion-rules-table"
|
||||
>
|
||||
<template #cell(pattern)="{ item }">
|
||||
<code class="gl-font-mono gl-rounded-sm gl-bg-gray-10 gl-px-2 gl-py-1">
|
||||
{{ item.pattern }}
|
||||
</code>
|
||||
</template>
|
||||
|
||||
<template #cell(actions)="{ item }">
|
||||
<div class="table-action-buttons gl-flex gl-justify-end gl-gap-2">
|
||||
<gl-button
|
||||
v-gl-tooltip
|
||||
:disabled="isDeleting"
|
||||
category="tertiary"
|
||||
icon="remove"
|
||||
size="medium"
|
||||
:title="$options.i18n.delete"
|
||||
:aria-label="$options.i18n.delete"
|
||||
data-testid="delete-exclusion-rule"
|
||||
@click="confirmDeleteRule(item)"
|
||||
/>
|
||||
<gl-loading-icon v-show="item.isDeleting" size="sm" :inline="true" />
|
||||
</div>
|
||||
</template>
|
||||
</gl-table>
|
||||
|
||||
<gl-modal
|
||||
ref="deleteModal"
|
||||
modal-id="delete-exclusion-rule-modal"
|
||||
:title="$options.i18n.deleteRuleModalTitle"
|
||||
:action-primary="deleteProps"
|
||||
:action-cancel="cancelProps"
|
||||
data-testid="delete-exclusion-rule-modal"
|
||||
@primary="deleteRule"
|
||||
>
|
||||
<div class="well gl-mb-4">
|
||||
<code
|
||||
v-if="ruleToDelete"
|
||||
class="gl-font-mono gl-rounded-sm gl-bg-gray-10 gl-px-2 gl-py-1"
|
||||
>
|
||||
{{ ruleToDelete.pattern }}
|
||||
</code>
|
||||
</div>
|
||||
<p>
|
||||
<gl-sprintf :message="$options.i18n.deleteRuleModalText">
|
||||
<template #strong="{ content }">
|
||||
<strong>{{ content }}</strong>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</p>
|
||||
</gl-modal>
|
||||
|
||||
<manage-exclusions-drawer
|
||||
:open="isManageDrawerOpen"
|
||||
:exclusion-rules="rules"
|
||||
@save="saveExclusionRules"
|
||||
@close="closeManageDrawer"
|
||||
/>
|
||||
</crud-component>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -5,6 +5,7 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
|||
import { __, s__ } from '~/locale';
|
||||
import { duoHelpPath, amazonQHelpPath } from '../constants';
|
||||
import ProjectSettingRow from './project_setting_row.vue';
|
||||
import ExclusionSettings from './exclusion_settings.vue';
|
||||
|
||||
export default {
|
||||
i18n: {
|
||||
|
|
@ -17,6 +18,7 @@ export default {
|
|||
GlButton,
|
||||
ProjectSettingRow,
|
||||
CascadingLockIcon,
|
||||
ExclusionSettings,
|
||||
},
|
||||
mixins: [glFeatureFlagMixin()],
|
||||
props: {
|
||||
|
|
@ -50,11 +52,17 @@ export default {
|
|||
required: false,
|
||||
default: false,
|
||||
},
|
||||
duoContextExclusionSettings: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: () => ({}),
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
autoReviewEnabled: this.amazonQAutoReviewEnabled,
|
||||
duoEnabled: this.duoFeaturesEnabled,
|
||||
exclusionRules: this.duoContextExclusionSettings?.exclusion_rules || [],
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -76,6 +84,9 @@ export default {
|
|||
|
||||
return null;
|
||||
},
|
||||
shouldShowExclusionSettings() {
|
||||
return this.licensedAiFeaturesAvailable && this.showDuoContextExclusion;
|
||||
},
|
||||
showCascadingButton() {
|
||||
return (
|
||||
this.duoFeaturesLocked &&
|
||||
|
|
@ -83,6 +94,9 @@ export default {
|
|||
Object.keys(this.cascadingSettingsData).length
|
||||
);
|
||||
},
|
||||
showDuoContextExclusion() {
|
||||
return this.glFeatures.useDuoContextExclusion;
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
duoFeaturesEnabled(isEnabled) {
|
||||
|
|
@ -93,6 +107,14 @@ export default {
|
|||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
handleExclusionRulesUpdate(rules) {
|
||||
this.exclusionRules = rules;
|
||||
this.$nextTick(() => {
|
||||
this.$el.closest('form')?.submit();
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
|
|
@ -153,6 +175,33 @@ export default {
|
|||
</div>
|
||||
</project-setting-row>
|
||||
|
||||
<exclusion-settings
|
||||
v-if="shouldShowExclusionSettings"
|
||||
:exclusion-rules="exclusionRules"
|
||||
@update="handleExclusionRulesUpdate"
|
||||
/>
|
||||
|
||||
<!-- Hidden inputs for form submission -->
|
||||
<div v-if="exclusionRules.length > 0">
|
||||
<input
|
||||
v-for="(rule, index) in exclusionRules"
|
||||
:key="index"
|
||||
type="hidden"
|
||||
:name="`project[project_setting_attributes][duo_context_exclusion_settings][exclusion_rules][]`"
|
||||
:value="rule"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- need to use a null for empty array due to strong params deep_munge -->
|
||||
<div v-if="exclusionRules.length === 0">
|
||||
<input
|
||||
type="hidden"
|
||||
:name="`project[project_setting_attributes][duo_context_exclusion_settings][exclusion_rules]`"
|
||||
data-testid="exclusion-rule-input-null"
|
||||
:value="null"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<template #footer>
|
||||
<gl-button
|
||||
variant="confirm"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,166 @@
|
|||
<script>
|
||||
import { GlDrawer, GlButton, GlFormTextarea, GlAccordion, GlAccordionItem } from '@gitlab/ui';
|
||||
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
|
||||
import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
|
||||
import { s__ } from '~/locale';
|
||||
|
||||
export default {
|
||||
name: 'ManageExclusionsDrawer',
|
||||
components: {
|
||||
GlDrawer,
|
||||
GlButton,
|
||||
GlFormTextarea,
|
||||
GlAccordion,
|
||||
GlAccordionItem,
|
||||
},
|
||||
props: {
|
||||
open: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
exclusionRules: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
localRules: '',
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
getDrawerHeaderHeight() {
|
||||
return getContentWrapperHeight();
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
exclusionRules: {
|
||||
handler(newRules) {
|
||||
this.localRules = newRules.join('\n');
|
||||
},
|
||||
immediate: true,
|
||||
},
|
||||
open(isOpen) {
|
||||
if (isOpen) {
|
||||
this.localRules = this.exclusionRules.join('\n');
|
||||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
handleSave() {
|
||||
const rules = this.localRules
|
||||
.split('\n')
|
||||
.map((rule) => rule.trim())
|
||||
.filter((rule) => rule.length > 0);
|
||||
|
||||
// Update the exclusion rules in the parent component
|
||||
this.$emit('save', rules);
|
||||
},
|
||||
handleCancel() {
|
||||
this.localRules = this.exclusionRules.join('\n');
|
||||
this.$emit('close');
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
title: s__('DuoFeatures|Manage Exclusions'),
|
||||
filesAndDirectoriesLabel: s__('DuoFeatures|Files or directories'),
|
||||
filesAndDirectoriesHelp: s__('DuoFeatures|Add each exclusion on a separate line.'),
|
||||
saveExclusions: s__('DuoFeatures|Save exclusions'),
|
||||
cancel: s__('DuoFeatures|Cancel'),
|
||||
viewExamples: s__('DuoFeatures|View examples of exclusions.'),
|
||||
exampleEnvFiles: s__('DuoFeatures|Excludes all .env files'),
|
||||
exampleSecretsDirectory: s__('DuoFeatures|Excludes entire secrets directory'),
|
||||
exampleKeyFiles: s__('DuoFeatures|Excludes all .key files in any subdirectory'),
|
||||
exampleSpecificFile: s__('DuoFeatures|Excludes the specified file'),
|
||||
exampleAllowFile: s__(
|
||||
'DuoFeatures|Allows the specified file in the specified directory, even if excluded by previous rules',
|
||||
),
|
||||
},
|
||||
DRAWER_Z_INDEX,
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-drawer
|
||||
:header-height="getDrawerHeaderHeight"
|
||||
:z-index="$options.DRAWER_Z_INDEX"
|
||||
:open="open"
|
||||
data-testid="manage-exclusions-drawer"
|
||||
@close="handleCancel"
|
||||
>
|
||||
<template #title>
|
||||
<h2 class="gl-heading-3 gl-mb-0">{{ $options.i18n.title }}</h2>
|
||||
</template>
|
||||
|
||||
<div class="gl-flex gl-flex-col gl-gap-4 gl-p-4">
|
||||
<div>
|
||||
<label for="exclusion-rules-textarea" class="gl-mb-2 gl-block gl-font-bold">
|
||||
{{ $options.i18n.filesAndDirectoriesLabel }}
|
||||
</label>
|
||||
<p class="gl-mb-3 gl-text-subtle">
|
||||
{{ $options.i18n.filesAndDirectoriesHelp }}
|
||||
</p>
|
||||
<gl-form-textarea
|
||||
id="exclusion-rules-textarea"
|
||||
v-model="localRules"
|
||||
class="gl-font-monospace"
|
||||
rows="10"
|
||||
data-testid="exclusion-rules-textarea"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<gl-accordion :header-level="3" class="gl-border-t gl-pt-4">
|
||||
<gl-accordion-item
|
||||
:title="$options.i18n.viewExamples"
|
||||
class="gl-font-normal"
|
||||
data-testid="examples-accordion"
|
||||
>
|
||||
<div class="md">
|
||||
<blockquote>
|
||||
<ul class="gl-mb-0 gl-p-0">
|
||||
<li>
|
||||
<!-- eslint-disable-next-line @gitlab/vue-require-i18n-strings -->
|
||||
<code class="gl-font-monospace">*.env</code> - {{ $options.i18n.exampleEnvFiles }}
|
||||
</li>
|
||||
<li>
|
||||
<!-- eslint-disable-next-line @gitlab/vue-require-i18n-strings -->
|
||||
<code class="gl-font-monospace">secrets/</code> -
|
||||
{{ $options.i18n.exampleSecretsDirectory }}
|
||||
</li>
|
||||
<li>
|
||||
<!-- eslint-disable-next-line @gitlab/vue-require-i18n-strings -->
|
||||
<code class="gl-font-monospace">**/*.key</code> -
|
||||
{{ $options.i18n.exampleKeyFiles }}
|
||||
</li>
|
||||
<li>
|
||||
<!-- eslint-disable-next-line @gitlab/vue-require-i18n-strings -->
|
||||
<code class="gl-font-monospace">config/production.yml</code> -
|
||||
{{ $options.i18n.exampleSpecificFile }}
|
||||
</li>
|
||||
<li>
|
||||
<!-- eslint-disable-next-line @gitlab/vue-require-i18n-strings -->
|
||||
<code class="gl-font-monospace">!secrets/file.json</code> -
|
||||
{{ $options.i18n.exampleAllowFile }}
|
||||
</li>
|
||||
</ul>
|
||||
</blockquote>
|
||||
</div>
|
||||
</gl-accordion-item>
|
||||
</gl-accordion>
|
||||
|
||||
<div class="gl-border-t gl-flex gl-gap-3 gl-pt-4">
|
||||
<gl-button
|
||||
variant="confirm"
|
||||
data-testid="save-exclusions-button"
|
||||
@click.prevent.stop="handleSave"
|
||||
>
|
||||
{{ $options.i18n.saveExclusions }}
|
||||
</gl-button>
|
||||
<gl-button data-testid="cancel-button" @click="handleCancel">
|
||||
{{ $options.i18n.cancel }}
|
||||
</gl-button>
|
||||
</div>
|
||||
</div>
|
||||
</gl-drawer>
|
||||
</template>
|
||||
|
|
@ -58,6 +58,7 @@ export const extendedPratExpiryWebhooksExecuteHelpPath = helpPagePath(
|
|||
|
||||
export const duoHelpPath = helpPagePath('user/ai_features');
|
||||
export const amazonQHelpPath = helpPagePath('user/duo_amazon_q/_index.md');
|
||||
export const duoContextExclusionHelpPath = helpPagePath('user/gitlab_duo/context_exclusion');
|
||||
|
||||
export const pipelineExecutionPoliciesHelpPath = helpPagePath(
|
||||
'user/application_security/policies/pipeline_execution_policies',
|
||||
|
|
|
|||
|
|
@ -173,12 +173,10 @@ $merge-request-sticky-header-height: 45px;
|
|||
|
||||
&::after {
|
||||
content: '';
|
||||
top: $calc-application-header-height;
|
||||
height: $merge-request-sticky-header-height;
|
||||
@include translucent-glass-background;
|
||||
@apply gl-border-b gl-absolute gl-z-0 gl-hidden gl-top-0;
|
||||
|
||||
left: calc((100% + var(--application-bar-left) - 100vw) / 2);
|
||||
width: calc(100vw - var(--application-bar-left));
|
||||
@apply gl-border-b gl-fixed gl-left-0 gl-right-0 gl-z-0 gl-hidden;
|
||||
}
|
||||
|
||||
@container sticky-header scroll-state(stuck: top) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Mutations
|
||||
module WorkItems
|
||||
module Hierarchy
|
||||
class AddChildrenItems < BaseMutation
|
||||
graphql_name 'WorkItemHierarchyAddChildrenItems'
|
||||
description "Adds children to a given work item's hierarchy by Global ID."
|
||||
|
||||
authorize :read_work_item
|
||||
|
||||
argument :children_ids, [::Types::GlobalIDType[::WorkItem]],
|
||||
required: true,
|
||||
description: 'Global IDs of children work items.',
|
||||
loads: ::Types::WorkItemType,
|
||||
as: :children
|
||||
argument :id,
|
||||
::Types::GlobalIDType[::WorkItem],
|
||||
required: true,
|
||||
description: 'Global ID of the work item.'
|
||||
|
||||
field :added_children, [::Types::WorkItemType],
|
||||
null: false,
|
||||
description: 'Work items that were added as children.'
|
||||
|
||||
def resolve(id:, **attributes)
|
||||
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/408575')
|
||||
|
||||
work_item = authorized_find!(id: id)
|
||||
children = attributes[:children]
|
||||
|
||||
update_result = ::WorkItems::ParentLinks::CreateService
|
||||
.new(work_item, current_user, { issuable_references: children })
|
||||
.execute
|
||||
|
||||
{
|
||||
added_children: update_result[:created_references]&.map(&:work_item) || [],
|
||||
errors: Array.wrap(update_result[:message])
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -248,6 +248,7 @@ module Types
|
|||
mount_mutation Mutations::WorkItems::LinkedItems::Remove, experiment: { milestone: '16.3' }
|
||||
mount_mutation Mutations::WorkItems::AddClosingMergeRequest, experiment: { milestone: '17.1' }
|
||||
mount_mutation Mutations::WorkItems::Hierarchy::Reorder, experiment: { milestone: '17.3' }
|
||||
mount_mutation Mutations::WorkItems::Hierarchy::AddChildrenItems, experiment: { milestone: '18.2' }
|
||||
mount_mutation Mutations::WorkItems::BulkUpdate, experiment: { milestone: '17.4' }
|
||||
mount_mutation Mutations::WorkItems::BulkMove, experiment: { milestone: '18.2' }
|
||||
mount_mutation Mutations::WorkItems::UserPreference::Update, experiment: { milestone: '17.10' }
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ module Pages
|
|||
class LookupPath
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(deployment:, domain: nil, trim_prefix: nil, access_control: false)
|
||||
def initialize(deployment:, root_namespace_id:, domain: nil, trim_prefix: nil, access_control: false)
|
||||
@deployment = deployment
|
||||
@project = deployment.project
|
||||
@root_namespace_id = root_namespace_id
|
||||
@domain = domain
|
||||
@trim_prefix = trim_prefix || @project.full_path
|
||||
@access_control = access_control
|
||||
|
|
@ -71,15 +72,9 @@ module Pages
|
|||
end
|
||||
strong_memoize_attr :primary_domain
|
||||
|
||||
def top_level_namespace_path
|
||||
project_path = project.full_path
|
||||
project_path.split('/').first
|
||||
end
|
||||
strong_memoize_attr :top_level_namespace_path
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :deployment, :trim_prefix, :domain
|
||||
attr_reader :project, :root_namespace_id, :deployment, :trim_prefix, :domain
|
||||
|
||||
def prefix_value
|
||||
return deployment.path_prefix if project.pages_url_builder.is_namespace_homepage?
|
||||
|
|
|
|||
|
|
@ -18,7 +18,8 @@ module Pages
|
|||
end
|
||||
|
||||
def lookup_paths
|
||||
projects.flat_map { |project| lookup_paths_for(project) }
|
||||
root_namespace_id = @namespace&.root_ancestor&.id
|
||||
projects.flat_map { |project| lookup_paths_for(project, root_namespace_id) }
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -33,10 +34,11 @@ module Pages
|
|||
@namespace&.pages_access_control_trie&.covered?(project.namespace.traversal_ids)
|
||||
end
|
||||
|
||||
def lookup_paths_for(project)
|
||||
def lookup_paths_for(project, root_namespace_id)
|
||||
deployments_for(project).map do |deployment|
|
||||
Pages::LookupPath.new(
|
||||
deployment: deployment,
|
||||
root_namespace_id: root_namespace_id,
|
||||
trim_prefix: trim_prefix,
|
||||
domain: domain,
|
||||
access_control: access_control_for(project)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,11 @@ module Import
|
|||
start_import
|
||||
success
|
||||
rescue Octokit::Unauthorized
|
||||
error('Access denied to the GitHub account.', 401)
|
||||
error('Access denied to the GitHub account', 401)
|
||||
rescue Octokit::TooManyRequests
|
||||
# Because `check_user_token` method uses octokit directly, it may raise
|
||||
# the default rate limiting exception rather than our own.
|
||||
error('GitHub API rate limit exceeded', 429)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
name: security_policy_access_token_push_bypass
|
||||
description: Branch push that is blocked by a security policy is bypassed for configured access token
|
||||
introduced_by_issue: https://gitlab.com/gitlab-org/gitlab/-/issues/549644
|
||||
introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196249
|
||||
feature_category: security_policy_management
|
||||
milestone: '18.2'
|
||||
saved_to_database: true
|
||||
scope: [Project]
|
||||
streamed: true
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
name: security_policy_service_account_push_bypass
|
||||
description: Branch push that is blocked by a security policy is bypassed for configured service account
|
||||
introduced_by_issue: https://gitlab.com/gitlab-org/gitlab/-/issues/549644
|
||||
introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196249
|
||||
feature_category: security_policy_management
|
||||
milestone: '18.2'
|
||||
saved_to_database: true
|
||||
scope: [Project]
|
||||
streamed: true
|
||||
|
|
@ -18,9 +18,9 @@ Doorkeeper.configure do
|
|||
# Ensure user is redirected to redirect_uri after login
|
||||
session[:user_return_to] = request.fullpath
|
||||
|
||||
namespace_path = request.query_parameters['top_level_namespace_path']
|
||||
root_namespace_id = request.query_parameters['root_namespace_id']
|
||||
|
||||
resolver = Gitlab::Auth::OAuth::OauthResourceOwnerRedirectResolver.new(namespace_path)
|
||||
resolver = Gitlab::Auth::OAuth::OauthResourceOwnerRedirectResolver.new(root_namespace_id)
|
||||
redirect_to(resolver.resolve_redirect_url)
|
||||
nil
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,6 +3,6 @@ migration_job_name: BackfillBulkImportTrackersNamespaceId
|
|||
description: Backfills sharding key `bulk_import_trackers.namespace_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180437
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205195337
|
||||
finalized_by: 20250414075039
|
||||
milestone: '18.2'
|
||||
queued_migration_version: 20250709150739
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
|
|||
|
|
@ -3,6 +3,6 @@ migration_job_name: BackfillBulkImportTrackersOrganizationId
|
|||
description: Backfills sharding key `bulk_import_trackers.organization_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180437
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205195342
|
||||
finalized_by: 20250414075035
|
||||
milestone: '18.2'
|
||||
queued_migration_version: 20250709152228
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
|
|||
|
|
@ -3,6 +3,6 @@ migration_job_name: BackfillBulkImportTrackersProjectId
|
|||
description: Backfills sharding key `bulk_import_trackers.project_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180437
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205195332
|
||||
finalized_by: 20250414075044
|
||||
milestone: '18.2'
|
||||
queued_migration_version: 20250709130040
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
|
|||
|
|
@ -10,31 +10,10 @@ class QueueBackfillBulkImportTrackersProjectId < Gitlab::Database::Migration[2.2
|
|||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,31 +10,10 @@ class QueueBackfillBulkImportTrackersNamespaceId < Gitlab::Database::Migration[2
|
|||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,31 +10,10 @@ class QueueBackfillBulkImportTrackersOrganizationId < Gitlab::Database::Migratio
|
|||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
# no-op because the original migration ran but was not successful https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188587#note_2457847947
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RequeueBackfillBulkImportTrackersProjectId < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.2'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportTrackersProjectId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
begin
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
rescue StandardError => e
|
||||
# NOTE: It's possible that the BBM was already cleaned as it has been finalized before the requeue.
|
||||
# We can safely ignore this failure.
|
||||
Gitlab::AppLogger.warn("Failed to delete batched background migration: #{e.message}")
|
||||
end
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RequeueBackfillBulkImportTrackersNamespaceId < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.2'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportTrackersNamespaceId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
begin
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
rescue StandardError => e
|
||||
# NOTE: It's possible that the BBM was already cleaned as it has been finalized before the requeue.
|
||||
# We can safely ignore this failure.
|
||||
Gitlab::AppLogger.warn("Failed to delete batched background migration: #{e.message}")
|
||||
end
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RequeueBackfillBulkImportTrackersOrganizationId < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.2'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportTrackersOrganizationId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
begin
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
rescue StandardError => e
|
||||
# NOTE: It's possible that the BBM was already cleaned as it has been finalized before the requeue.
|
||||
# We can safely ignore this failure.
|
||||
Gitlab::AppLogger.warn("Failed to delete batched background migration: #{e.message}")
|
||||
end
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_trackers,
|
||||
:id,
|
||||
[
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
36c30919acf486a7116d0db6216de82e46157db4f3cc83e47129fc1edba2accc
|
||||
|
|
@ -0,0 +1 @@
|
|||
7ce5d87376ba4e9b3b82944b79f49f18d3b38aeffc202d3252b3108885d12300
|
||||
|
|
@ -0,0 +1 @@
|
|||
d3c8186f8cc6b32877cbb0d7a45d5de841a730fa0ac44c268cce19d56824bb23
|
||||
|
|
@ -2252,6 +2252,30 @@ Input type: `AiCatalogAgentCreateInput`
|
|||
| <a id="mutationaicatalogagentcreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during the mutation. |
|
||||
| <a id="mutationaicatalogagentcreateitem"></a>`item` | [`AiCatalogItem`](#aicatalogitem) | Item created. |
|
||||
|
||||
### `Mutation.aiCatalogAgentDelete`
|
||||
|
||||
{{< details >}}
|
||||
**Introduced** in GitLab 18.2.
|
||||
**Status**: Experiment.
|
||||
{{< /details >}}
|
||||
|
||||
Input type: `AiCatalogAgentDeleteInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationaicatalogagentdeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationaicatalogagentdeleteid"></a>`id` | [`AiCatalogItemID!`](#aicatalogitemid) | Global ID of the catalog Agent to delete. |
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationaicatalogagentdeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationaicatalogagentdeleteerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during the mutation. |
|
||||
| <a id="mutationaicatalogagentdeletesuccess"></a>`success` | [`Boolean!`](#boolean) | Returns true if catalog Agent was successfully deleted. |
|
||||
|
||||
### `Mutation.aiDuoWorkflowCreate`
|
||||
|
||||
{{< details >}}
|
||||
|
|
@ -13357,6 +13381,33 @@ Input type: `WorkItemExportInput`
|
|||
| <a id="mutationworkitemexporterrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during the mutation. |
|
||||
| <a id="mutationworkitemexportmessage"></a>`message` | [`String`](#string) | Export request result message. |
|
||||
|
||||
### `Mutation.workItemHierarchyAddChildrenItems`
|
||||
|
||||
Adds children to a given work item's hierarchy by Global ID.
|
||||
|
||||
{{< details >}}
|
||||
**Introduced** in GitLab 18.2.
|
||||
**Status**: Experiment.
|
||||
{{< /details >}}
|
||||
|
||||
Input type: `WorkItemHierarchyAddChildrenItemsInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemschildrenids"></a>`childrenIds` | [`[WorkItemID!]!`](#workitemid) | Global IDs of children work items. |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemsclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemsid"></a>`id` | [`WorkItemID!`](#workitemid) | Global ID of the work item. |
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemsaddedchildren"></a>`addedChildren` | [`[WorkItem!]!`](#workitem) | Work items that were added as children. |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemsclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationworkitemhierarchyaddchildrenitemserrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during the mutation. |
|
||||
|
||||
### `Mutation.workItemRemoveLinkedItems`
|
||||
|
||||
Remove items linked to the work item.
|
||||
|
|
@ -36763,6 +36814,7 @@ Project-level settings for product analytics provider.
|
|||
| <a id="projectdetailedimportstatus"></a>`detailedImportStatus` | [`DetailedImportStatus`](#detailedimportstatus) | Detailed import status of the project. |
|
||||
| <a id="projectdora"></a>`dora` | [`Dora`](#dora) | Project's DORA metrics. |
|
||||
| <a id="projectduoagenticchatavailable"></a>`duoAgenticChatAvailable` {{< icon name="warning-solid" >}} | [`Boolean`](#boolean) | **Introduced** in GitLab 18.1. **Status**: Experiment. User access to Duo agentic Chat feature. |
|
||||
| <a id="projectduocontextexclusionsettings"></a>`duoContextExclusionSettings` {{< icon name="warning-solid" >}} | [`DuoContextExclusionSettings`](#duocontextexclusionsettings) | **Introduced** in GitLab 18.2. **Status**: Experiment. Settings for excluding files from Duo context. |
|
||||
| <a id="projectduofeaturesenabled"></a>`duoFeaturesEnabled` {{< icon name="warning-solid" >}} | [`Boolean`](#boolean) | **Introduced** in GitLab 16.9. **Status**: Experiment. Indicates whether GitLab Duo features are enabled for the project. |
|
||||
| <a id="projectduoworkflowstatuscheck"></a>`duoWorkflowStatusCheck` {{< icon name="warning-solid" >}} | [`DuoWorkflowEnablement`](#duoworkflowenablement) | **Introduced** in GitLab 17.7. **Status**: Experiment. Indicates whether Duo Agent Platform is enabled for the project. |
|
||||
| <a id="projectexplorecatalogpath"></a>`exploreCatalogPath` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 17.6. **Status**: Experiment. Path to the project catalog resource. |
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ Before starting the flow, generate the `STATE`, the `CODE_VERIFIER` and the `COD
|
|||
`/oauth/authorize` page with the following query parameters:
|
||||
|
||||
```plaintext
|
||||
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=code&state=STATE&scope=REQUESTED_SCOPES&code_challenge=CODE_CHALLENGE&code_challenge_method=S256&top_level_namespace_path=TOP_LEVEL_NAMESPACE
|
||||
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=code&state=STATE&scope=REQUESTED_SCOPES&code_challenge=CODE_CHALLENGE&code_challenge_method=S256&root_namespace_id=ROOT_NAMESPACE_ID
|
||||
```
|
||||
|
||||
This page asks the user to approve the request from the app to access their
|
||||
|
|
@ -134,7 +134,7 @@ Before starting the flow, generate the `STATE`, the `CODE_VERIFIER` and the `COD
|
|||
redirected back to the specified `REDIRECT_URI`. The [scope parameter](../integration/oauth_provider.md#view-all-authorized-applications)
|
||||
is a space-separated list of scopes associated with the user.
|
||||
For example,`scope=read_user+profile` requests the `read_user` and `profile` scopes.
|
||||
The `top_level_namespace_path` is the top level namespace path associated with the project. This optional parameter
|
||||
The `root_namespace_id` is the root namespace ID associated with the project. This optional parameter
|
||||
should be used when [SAML SSO](../user/group/saml_sso/_index.md) is configured for the associated group.
|
||||
The redirect includes the authorization `code`, for example:
|
||||
|
||||
|
|
@ -220,7 +220,7 @@ be used as a CSRF token.
|
|||
`/oauth/authorize` page with the following query parameters:
|
||||
|
||||
```plaintext
|
||||
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=code&state=STATE&scope=REQUESTED_SCOPES&top_level_namespace_path=TOP_LEVEL_NAMESPACE
|
||||
https://gitlab.example.com/oauth/authorize?client_id=APP_ID&redirect_uri=REDIRECT_URI&response_type=code&state=STATE&scope=REQUESTED_SCOPES&root_namespace_id=ROOT_NAMESPACE_ID
|
||||
```
|
||||
|
||||
This page asks the user to approve the request from the app to access their
|
||||
|
|
@ -228,7 +228,7 @@ be used as a CSRF token.
|
|||
redirected back to the specified `REDIRECT_URI`. The [scope parameter](../integration/oauth_provider.md#view-all-authorized-applications)
|
||||
is a space-separated list of scopes associated with the user.
|
||||
For example,`scope=read_user+profile` requests the `read_user` and `profile` scopes.
|
||||
The `top_level_namespace_path` is the top level namespace path associated with the project. This optional parameter
|
||||
The `root_namespace_id` is the root namespace ID associated with the project. This optional parameter
|
||||
should be used when [SAML SSO](../user/group/saml_sso/_index.md) is configured for the associated group.
|
||||
The redirect includes the authorization `code`, for example:
|
||||
|
||||
|
|
|
|||
|
|
@ -144,7 +144,8 @@ Do not add an `H1` heading in Markdown because there can be only one for each pa
|
|||
|
||||
### Description lists in Markdown
|
||||
|
||||
To define terms or differentiate between options, use description lists.
|
||||
To define terms or differentiate between options, use description lists. For a list of UI elements,
|
||||
use a regular [list](#lists) instead of a description list.
|
||||
|
||||
Do not mix description lists with other styles.
|
||||
|
||||
|
|
@ -509,6 +510,8 @@ Use bold for:
|
|||
- UI elements with a visible label. Match the text and capitalization of the label.
|
||||
- Navigation paths.
|
||||
|
||||
Do not use bold for keywords or emphasis.
|
||||
|
||||
UI elements include:
|
||||
|
||||
- Buttons
|
||||
|
|
@ -530,16 +533,16 @@ To make text bold, wrap it with double asterisks (`**`). For example:
|
|||
1. Select **Cancel**.
|
||||
```
|
||||
|
||||
When you use bold format for UI elements or keywords, place any punctuation outside the bold tag.
|
||||
When you use bold format for UI elements, place any punctuation outside the bold tag.
|
||||
This rule includes periods, commas, colons, and right-angle brackets (`>`).
|
||||
|
||||
The punctuation is part of the sentence structure rather than the UI element or keyword that you're emphasizing.
|
||||
The punctuation is part of the sentence structure rather than the UI element that you're emphasizing.
|
||||
|
||||
Include punctuation in the bold tag when it's part of the UI element or keyword itself.
|
||||
Include punctuation in the bold tag when it's part of the UI element itself.
|
||||
|
||||
For example:
|
||||
|
||||
- `**Option**: This an option description.`
|
||||
- `**Start a review**: This a description of the button that starts a review.`
|
||||
- `Select **Overview** > **Users**.`
|
||||
|
||||
### Inline code
|
||||
|
|
@ -639,6 +642,14 @@ Use lists to present information in a format that is easier to scan.
|
|||
- Do this other thing.
|
||||
```
|
||||
|
||||
- Do not use [bold](#bold) formatting to define keywords or concepts in a list. Use bold for UI element labels only. For example:
|
||||
|
||||
- `**Start a review**: This a description of the button that starts a review.`
|
||||
- `Offline environments: This is a description of offline environments.`
|
||||
|
||||
For keywords and concepts, consider a [reference topic](../topic_types/reference.md) or
|
||||
[description list](#description-lists-in-markdown) for alternative formatting.
|
||||
|
||||
### Choose between an ordered or unordered list
|
||||
|
||||
Use ordered lists for a sequence of steps. For example:
|
||||
|
|
@ -781,7 +792,7 @@ To turn on the setting:
|
|||
|
||||
- In the UI:
|
||||
|
||||
1. In the VS Code menu, go to **Code > Settings > Settings**.
|
||||
1. In the VS Code menu, go to **Code** > **Settings** > **Settings**.
|
||||
1. Search for `Limit Last Column Length`.
|
||||
1. In the **Limit Last Column Length** dropdown list, select **Follow header row length**.
|
||||
|
||||
|
|
@ -1125,7 +1136,7 @@ elements:
|
|||
|
||||
### Names for UI elements
|
||||
|
||||
All UI elements [should be **bold**](#bold).
|
||||
All UI elements [should be **bold**](#bold). The `>` in the navigation path should not be bold.
|
||||
|
||||
Guidance for individual UI elements is in [the word list](word_list.md).
|
||||
|
||||
|
|
@ -1139,7 +1150,7 @@ To open project settings:
|
|||
|
||||
```markdown
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
1. Expand **General pipelines**.
|
||||
```
|
||||
|
||||
|
|
@ -1147,7 +1158,7 @@ To open group settings:
|
|||
|
||||
```markdown
|
||||
1. On the left sidebar, select **Search or go to** and find your group.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
1. Expand **General pipelines**.
|
||||
```
|
||||
|
||||
|
|
@ -1156,7 +1167,7 @@ To open settings for a top-level group:
|
|||
```markdown
|
||||
1. On the left sidebar, select **Search or go to** and find your group.
|
||||
This group must be at the top level.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
1. Expand **General pipelines**.
|
||||
```
|
||||
|
||||
|
|
@ -1164,7 +1175,7 @@ To open either project or group settings:
|
|||
|
||||
```markdown
|
||||
1. On the left sidebar, select **Search or go to** and find your project or group.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
1. Expand **General pipelines**.
|
||||
```
|
||||
|
||||
|
|
@ -1184,7 +1195,7 @@ To open the **Admin** area:
|
|||
|
||||
```markdown
|
||||
1. On the left sidebar, at the bottom, select **Admin**.
|
||||
1. Select **Settings > CI/CD**.
|
||||
1. Select **Settings** > **CI/CD**.
|
||||
```
|
||||
|
||||
You do not have to repeat `On the left sidebar` in your second step.
|
||||
|
|
@ -1262,7 +1273,7 @@ Use the phrase **Complete the fields**.
|
|||
For example:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > Repository**.
|
||||
1. Select **Settings** > **Repository**.
|
||||
1. Expand **Push rules**.
|
||||
1. Complete the fields.
|
||||
|
||||
|
|
|
|||
|
|
@ -450,6 +450,7 @@ SECRET_DETECTION_HISTORIC_SCAN: false
|
|||
SAST_EXCLUDED_ANALYZERS: ''
|
||||
DEFAULT_SAST_EXCLUDED_PATHS: spec, test, tests, tmp
|
||||
DS_EXCLUDED_ANALYZERS: ''
|
||||
SECURE_ENABLE_LOCAL_CONFIGURATION: true
|
||||
```
|
||||
|
||||
In GitLab 16.9 and earlier:
|
||||
|
|
|
|||
|
|
@ -546,6 +546,8 @@ Audit event types belong to the following product categories.
|
|||
| [`policy_violations_detected`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193482) | Security policy violation is detected in the merge request | {{< icon name="dotted-circle" >}} No | GitLab [18.2](https://gitlab.com/gitlab-org/gitlab/-/work_items/549811) | Project |
|
||||
| [`policy_violations_resolved`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193482) | Security policy violations are resolved in the merge request | {{< icon name="dotted-circle" >}} No | GitLab [18.2](https://gitlab.com/gitlab-org/gitlab/-/issues/549812) | Project |
|
||||
| [`policy_yaml_invalidated`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196721) | The policy YAML is invalidated in security policy project | {{< icon name="check-circle" >}} Yes | GitLab [18.2](https://gitlab.com/gitlab-org/gitlab/-/work_items/550892) | Project |
|
||||
| [`security_policy_access_token_push_bypass`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196249) | Branch push that is blocked by a security policy is bypassed for configured access token | {{< icon name="check-circle" >}} Yes | GitLab [18.2](https://gitlab.com/gitlab-org/gitlab/-/issues/549644) | Project |
|
||||
| [`security_policy_service_account_push_bypass`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196249) | Branch push that is blocked by a security policy is bypassed for configured service account | {{< icon name="check-circle" >}} Yes | GitLab [18.2](https://gitlab.com/gitlab-org/gitlab/-/issues/549644) | Project |
|
||||
|
||||
### Security testing configuration
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
stage: AI-powered
|
||||
group: Code Creation
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
title: Control GitLab Duo context exclusion
|
||||
---
|
||||
|
||||
## Exclude context from GitLab Duo
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/17124) in GitLab 18.1 [with a flag](../../administration/feature_flags/_index.md) named `use_duo_context_exclusion`. Disabled by default.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
You can control which project content is included as context for GitLab Duo. Use this to protect sensitive information such as password files and configuration files.
|
||||
|
||||
### Manage GitLab Duo context exclusions
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select **Settings > General**.
|
||||
1. Expand **Visibility, project features, permissions**.
|
||||
1. Under **GitLab Duo**, in the **GitLab Duo Content Exclusions section**, select **Manage exclusions**.
|
||||
1. Specify which project files and directories are excluded from GitLab Duo context, and select **Save exclusions**.
|
||||
1. Optional. To delete an existing exclusion, select **Delete** ({{< icon name="remove" >}}) for the appropriate exclusion.
|
||||
1. Select **Save changes**.
|
||||
|
|
@ -151,18 +151,22 @@ Agentic Chat extends Chat capabilities with the following features:
|
|||
|
||||
### Chat feature comparison
|
||||
|
||||
| Capability | Chat | Agentic Chat |
|
||||
|------------|------| -------------|
|
||||
| Ask general programming questions | Yes | Yes |
|
||||
| Get answers about currently open file in the editor | Yes | Yes. Provide the path of the file in your question. |
|
||||
| Provide context about specified files | Yes. Use `/include` to add a file to the conversation. | Yes. Provide the path of the file in your question. |
|
||||
| Autonomously search project contents | No | Yes |
|
||||
| Autonomously create files and change files | No | Yes. Ask it to change files. Note, it may overwrite changes that you have made manually and have not committed, yet. |
|
||||
| Retrieve issues and MRs without specifying IDs | No | Yes. Search by other criteria. For example, an MR or issue's title or assignee. |
|
||||
| Combine information from multiple sources | No | Yes |
|
||||
| Analyze pipeline logs | Yes. Requires Duo Enterprise add-on. | Yes |
|
||||
| Restart a conversation | Yes. Use `/reset`. | Yes. Use `/reset`. |
|
||||
| Delete a conversation | Yes. Use `/clear`.| No. |
|
||||
| Capability | Chat | Agentic Chat |
|
||||
| ------------ |------| ------------- |
|
||||
| Ask general programming questions | Yes | Yes |
|
||||
| Get answers about currently open file in the editor | Yes | Yes. Provide the path of the file in your question. |
|
||||
| Provide context about specified files | Yes. Use `/include` to add a file to the conversation. | Yes. Provide the path of the file in your question. |
|
||||
| Autonomously search project contents | No | Yes |
|
||||
| Autonomously create files and change files | No | Yes. Ask it to change files. Note, it may overwrite changes that you have made manually and have not committed, yet. |
|
||||
| Retrieve issues and MRs without specifying IDs | No | Yes. Search by other criteria. For example, an MR or issue's title or assignee. |
|
||||
| Combine information from multiple sources | No | Yes |
|
||||
| Analyze pipeline logs | Yes. Requires Duo Enterprise add-on. | Yes |
|
||||
| Restart a conversation | Yes. Use `/reset`. | Yes. Use `/reset`. |
|
||||
| Delete a conversation | Yes. Use `/clear`.| No |
|
||||
| Create issues and MRs | No | Yes |
|
||||
| Use Git | No | Yes, IDEs only |
|
||||
| Run Shell commands | No | Yes, IDEs only |
|
||||
| Run MCP tools | No | Yes, IDEs only |
|
||||
|
||||
## Use cases
|
||||
|
||||
|
|
|
|||
|
|
@ -27,16 +27,15 @@ You don't need to authenticate if the project or the group is public.
|
|||
If the project is internal, you must be a registered user on the GitLab instance.
|
||||
An anonymous user cannot pull packages from an internal project.
|
||||
|
||||
To authenticate, you can use:
|
||||
To authenticate, you can use either:
|
||||
|
||||
- A [personal access token](../../profile/personal_access_tokens.md)
|
||||
- A [personal access token](../../profile/personal_access_tokens.md), [group access token](../../../user/group/settings/group_access_tokens.md), or [project access token](../../../user/project/settings/project_access_tokens.md)
|
||||
with the scope set to `api`.
|
||||
- A [deploy token](../../project/deploy_tokens/_index.md) with the scope set to
|
||||
`read_package_registry`, `write_package_registry`, or both.
|
||||
- A [CI/CD job token](../../../ci/jobs/ci_job_token.md).
|
||||
- A [CI/CD job token](../../../ci/jobs/ci_job_token.md) if you want to publish packages with a CI/CD pipeline.
|
||||
|
||||
If your organization uses two-factor authentication (2FA), you must use a personal access token with the scope set to `api`.
|
||||
If you want to publish a package with a CI/CD pipeline, you must use a CI/CD job token.
|
||||
For more information, review the [guidance on tokens](../package_registry/supported_functionality.md#authenticate-with-the-registry).
|
||||
|
||||
Do not use authentication methods other than the methods documented here. Undocumented authentication methods might be removed in the future.
|
||||
|
|
|
|||
|
|
@ -12,22 +12,22 @@ title: Store all of your packages in one GitLab project
|
|||
|
||||
{{< /details >}}
|
||||
|
||||
You can store all packages in one project's package registry and configure your remote repositories to
|
||||
Store packages from multiple sources in one project's package registry and configure your remote repositories to
|
||||
point to this project in GitLab.
|
||||
|
||||
Use this approach when you want to:
|
||||
|
||||
- Publish packages to GitLab in a different project than where your code is stored
|
||||
- Publish packages to GitLab in a different project than where your code is stored.
|
||||
- Group packages together in one project (for example, all npm packages, all packages for a specific
|
||||
department, or all private packages in the same project)
|
||||
- Use one remote repository when installing packages for other projects
|
||||
- Migrate packages from a third-party package registry to a single location in GitLab
|
||||
- Have CI/CD pipelines build all packages to one project so you can manage packages in the same location
|
||||
department, or all private packages in the same project).
|
||||
- Use one remote repository when installing packages for other projects.
|
||||
- Migrate packages from a third-party package registry to a single location in GitLab.
|
||||
- Have CI/CD pipelines build all packages to one project so you can manage packages in the same location.
|
||||
|
||||
## Example walkthrough
|
||||
|
||||
No functionality is specific to this feature. Instead, we're taking advantage of the functionality
|
||||
of each package management system to publish different package types to the same place.
|
||||
Use each package management system
|
||||
to publish different package types in the same place.
|
||||
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
|
||||
Watch a video of how to add Maven, npm, and Conan packages to [the same project](https://youtu.be/ui2nNBwN35c).
|
||||
|
|
@ -42,63 +42,68 @@ Let's take a look at how you might create one project to host all of your packag
|
|||
1. Create an access token for authentication. All package types in the package registry can be published by using:
|
||||
|
||||
- A [personal access token](../../profile/personal_access_tokens.md).
|
||||
- A [group access token](../../../user/group/settings/group_access_tokens.md) or [project access token](../../../user/project/settings/project_access_tokens.md).
|
||||
- A [CI/CD job token](../../../ci/jobs/ci_job_token.md) (`CI_JOB_TOKEN`) in a CI/CD job.
|
||||
Any projects publishing packages to this project's registry should be listed
|
||||
in this project's [job token allowlist](../../../ci/jobs/ci_job_token.md#add-a-group-or-project-to-the-job-token-allowlist).
|
||||
The project's [job token allowlist](../../../ci/jobs/ci_job_token.md#add-a-group-or-project-to-the-job-token-allowlist) should list any projects publishing to this project's registry.
|
||||
|
||||
If the project is private, downloading packages requires authentication as well.
|
||||
|
||||
1. Configure your local project and publish the package.
|
||||
|
||||
You can upload all types of packages to the same project, or
|
||||
split things up based on package type or package visibility level.
|
||||
You can upload all package types to the same project, or
|
||||
split up packages based on package type or visibility level.
|
||||
|
||||
### npm
|
||||
|
||||
If you're using npm, create an `.npmrc` file. Add the appropriate URL for publishing
|
||||
packages to your project. Finally, add a section to your `package.json` file.
|
||||
For npm packages:
|
||||
|
||||
Follow the instructions in the
|
||||
[GitLab package registry npm documentation](../npm_registry/_index.md#authenticate-to-the-package-registry). After
|
||||
you do this, you can publish your npm package to your project using `npm publish`, as described in the
|
||||
[publishing packages](../npm_registry/_index.md#publish-to-gitlab-package-registry) section.
|
||||
- Create an [`.npmrc` file](../npm_registry/_index.md#with-the-npmrc-file) to configure the registry URL.
|
||||
- Scope your packages with the `publishConfig` option in the `package.json` file of your project.
|
||||
- Publish packages with `npm publish`.
|
||||
|
||||
For more information, see [npm packages in the package registry](../npm_registry/_index.md).
|
||||
|
||||
### Maven
|
||||
|
||||
If you are using Maven, you update your `pom.xml` file with distribution sections. These updates include the
|
||||
appropriate URL for your project, as described in the [GitLab Maven Repository documentation](../maven_repository/_index.md#naming-convention).
|
||||
Then, you need to add a `settings.xml` file and [include your access token](../maven_repository/_index.md#authenticate-to-the-package-registry).
|
||||
Now you can [publish Maven packages](../maven_repository/_index.md#publish-a-package) to your project.
|
||||
For Maven packages:
|
||||
|
||||
1. Update your `pom.xml` file with `repository` and `distributionManagement` sections to configure the registry URL.
|
||||
1. Add a `settings.xml` file and include your access token.
|
||||
1. Publish packages with `mvn deploy`.
|
||||
|
||||
For more information, see [Maven packages in the package registry](../maven_repository/_index.md).
|
||||
|
||||
### Conan 1
|
||||
|
||||
For Conan 1, you must add GitLab as a Conan registry remote. For instructions, see
|
||||
[Add the package registry as a Conan remote](../conan_1_repository/_index.md#add-the-package-registry-as-a-conan-remote).
|
||||
Then, create your package using the plus-separated (`+`) project path as your Conan user. For example,
|
||||
For Conan 1:
|
||||
|
||||
- Add the GitLab package registry as a Conan registry remote.
|
||||
- [Create your Conan 1 package](build_packages.md#build-a-conan-1-package) using the plus-separated (`+`) project path as your Conan user. For example,
|
||||
if your project is located at `https://gitlab.com/foo/bar/my-proj`,
|
||||
[create your Conan package](build_packages.md#conan-1) using `conan create . foo+bar+my-proj/channel`.
|
||||
`channel` is your package channel (such as `stable` or `beta`).
|
||||
create your Conan package using `conan create . foo+bar+my-proj/channel`. `channel` is the package channel, such as `beta` or `stable`:
|
||||
|
||||
After you create your package, you're ready to [publish your package](../conan_1_repository/_index.md#publish-a-conan-package),
|
||||
depending on your final package recipe. For example:
|
||||
```shell
|
||||
CONAN_LOGIN_USERNAME=<gitlab-username> CONAN_PASSWORD=<personal_access_token> conan upload MyPackage/1.0.0@foo+bar+my-proj/channel --all --remote=gitlab
|
||||
```
|
||||
|
||||
```shell
|
||||
CONAN_LOGIN_USERNAME=<gitlab-username> CONAN_PASSWORD=<personal_access_token> conan upload MyPackage/1.0.0@foo+bar+my-proj/channel --all --remote=gitlab
|
||||
```
|
||||
- Publish your package with `conan upload` or your package recipe.
|
||||
|
||||
For more information, see [Conan 1 packages in the package registry](../conan_1_repository/_index.md).
|
||||
|
||||
### Conan 2
|
||||
|
||||
For Conan 2, you must add GitLab as a Conan registry remote. For instructions, see
|
||||
[Add the package registry as a Conan remote](../conan_2_repository/_index.md#add-the-package-registry-as-a-conan-remote).
|
||||
Then, [create your Conan 2 package](build_packages.md#conan-2).
|
||||
For Conan 2:
|
||||
|
||||
After you create your package, you're ready to [publish your package](../conan_2_repository/_index.md#publish-a-conan-2-package),
|
||||
depending on your final package recipe.
|
||||
- Add the GitLab package registry as a Conan registry remote.
|
||||
- [Create your Conan 2 package](build_packages.md#conan-2).
|
||||
- Publish your package with `conan upload` or your package recipe.
|
||||
|
||||
For more information, see [Conan 2 packages in the package registry](../conan_2_repository/_index.md).
|
||||
|
||||
### Composer
|
||||
|
||||
You can't publish a Composer package outside of its project. An [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/250633)
|
||||
exists to implement functionality that allows you to publish such packages to other projects.
|
||||
You can't publish a Composer package outside of its project. Support for publishing Composer packages
|
||||
in other projects is proposed in [issue 250633](https://gitlab.com/gitlab-org/gitlab/-/issues/250633).
|
||||
|
||||
### All other package types
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ require_relative 'validation/fixers/missing_index'
|
|||
require_relative 'validation/validators/different_definition_indexes'
|
||||
require_relative 'validation/validators/extra_indexes'
|
||||
require_relative 'validation/validators/missing_indexes'
|
||||
require_relative 'validation/validators/missing_sequences'
|
||||
|
||||
require_relative 'validation/validators/extra_table_columns'
|
||||
require_relative 'validation/validators/missing_table_columns'
|
||||
|
|
@ -35,12 +36,14 @@ require_relative 'validation/sources/connection_adapters/base'
|
|||
require_relative 'validation/sources/connection_adapters/active_record_adapter'
|
||||
require_relative 'validation/sources/connection_adapters/pg_adapter'
|
||||
require_relative 'validation/sources/structure_sql'
|
||||
require_relative 'validation/sources/sequence_structure_sql_parser'
|
||||
require_relative 'validation/sources/database'
|
||||
require_relative 'validation/sources/connection'
|
||||
|
||||
require_relative 'validation/schema_objects/base'
|
||||
require_relative 'validation/schema_objects/column'
|
||||
require_relative 'validation/schema_objects/index'
|
||||
require_relative 'validation/schema_objects/sequence'
|
||||
require_relative 'validation/schema_objects/table'
|
||||
require_relative 'validation/schema_objects/trigger'
|
||||
require_relative 'validation/schema_objects/foreign_key'
|
||||
|
|
@ -49,6 +52,8 @@ require_relative 'validation/adapters/column_database_adapter'
|
|||
require_relative 'validation/adapters/column_structure_sql_adapter'
|
||||
require_relative 'validation/adapters/foreign_key_database_adapter'
|
||||
require_relative 'validation/adapters/foreign_key_structure_sql_adapter'
|
||||
require_relative 'validation/adapters/sequence_database_adapter'
|
||||
require_relative 'validation/adapters/sequence_structure_sql_adapter'
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module Adapters
|
||||
class SequenceDatabaseAdapter
|
||||
def initialize(query_result)
|
||||
@query_result = query_result
|
||||
end
|
||||
|
||||
def name
|
||||
return unless query_result['sequence_name']
|
||||
|
||||
"#{schema}.#{query_result['sequence_name']}"
|
||||
end
|
||||
|
||||
def column_owner
|
||||
return unless query_result['owned_by_column']
|
||||
|
||||
"#{schema}.#{query_result['owned_by_column']}"
|
||||
end
|
||||
|
||||
def user_owner
|
||||
query_result['user_owner']
|
||||
end
|
||||
|
||||
def start_value
|
||||
query_result['start_value']
|
||||
end
|
||||
|
||||
def increment_by
|
||||
query_result['increment_by']
|
||||
end
|
||||
|
||||
def min_value
|
||||
query_result['min_value']
|
||||
end
|
||||
|
||||
def max_value
|
||||
query_result['max_value']
|
||||
end
|
||||
|
||||
def cycle
|
||||
query_result['cycle']
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :query_result
|
||||
|
||||
def schema
|
||||
query_result['schema'] || 'public'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module Adapters
|
||||
class SequenceStructureSqlAdapter
|
||||
attr_reader :sequence_name, :schema_name
|
||||
attr_accessor :owner_table, :owner_column, :owner_schema
|
||||
|
||||
def initialize(
|
||||
sequence_name:, schema_name: nil, owner_table: nil,
|
||||
owner_column: nil, owner_schema: nil)
|
||||
@sequence_name = sequence_name
|
||||
@schema_name = schema_name
|
||||
@owner_table = owner_table
|
||||
@owner_column = owner_column
|
||||
@owner_schema = owner_schema
|
||||
end
|
||||
|
||||
# Fully qualified sequence name (schema.sequence_name)
|
||||
def name
|
||||
"#{schema}.#{sequence_name}"
|
||||
end
|
||||
|
||||
# Just the column name
|
||||
def column_name
|
||||
owner_column
|
||||
end
|
||||
|
||||
def table_name
|
||||
owner_table
|
||||
end
|
||||
|
||||
# Fully qualified column reference (schema.table.column)
|
||||
def column_owner
|
||||
return unless owner_table && owner_column
|
||||
|
||||
"#{column_schema}.#{owner_table}.#{owner_column}"
|
||||
end
|
||||
|
||||
# Get the schema this sequence belongs to
|
||||
def schema
|
||||
schema_name || owner_schema || 'public'
|
||||
end
|
||||
|
||||
def column_schema
|
||||
owner_schema || schema_name || 'public'
|
||||
end
|
||||
|
||||
def to_s
|
||||
"SequenceStructureSqlAdapter(#{name} -> #{column_owner})"
|
||||
end
|
||||
|
||||
def inspect
|
||||
"#<SequenceStructureSqlAdapter:#{object_id} #{name} -> #{column_owner}>"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module SchemaObjects
|
||||
class Sequence < Base
|
||||
def initialize(adapter)
|
||||
@adapter = adapter
|
||||
end
|
||||
|
||||
# Sequence should include the schema, as the same name could be used across different schemas
|
||||
#
|
||||
# @example public.sequence_name
|
||||
def name
|
||||
@name ||= adapter.name
|
||||
end
|
||||
|
||||
# Fully qualified column reference (schema.table.column)
|
||||
def owner
|
||||
@owner ||= adapter.column_owner
|
||||
end
|
||||
|
||||
def table_name
|
||||
@table_name ||= adapter.table_name
|
||||
end
|
||||
|
||||
def statement
|
||||
"CREATE SEQUENCE #{name}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :adapter
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -27,6 +27,10 @@ module Gitlab
|
|||
table_map[table_name]
|
||||
end
|
||||
|
||||
def fetch_sequence_by_name(sequence_name)
|
||||
sequence_map[sequence_name]
|
||||
end
|
||||
|
||||
def index_exists?(index_name)
|
||||
index = index_map[index_name]
|
||||
|
||||
|
|
@ -59,6 +63,10 @@ module Gitlab
|
|||
true
|
||||
end
|
||||
|
||||
def sequence_exists?(sequence_name)
|
||||
!!fetch_sequence_by_name(sequence_name)
|
||||
end
|
||||
|
||||
def indexes
|
||||
index_map.values
|
||||
end
|
||||
|
|
@ -75,6 +83,10 @@ module Gitlab
|
|||
table_map.values
|
||||
end
|
||||
|
||||
def sequences
|
||||
sequence_map.values
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :connection
|
||||
|
|
@ -113,6 +125,12 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def sequence_map
|
||||
@sequence_map ||= fetch_sequences.transform_values! do |stmt|
|
||||
SchemaObjects::Sequence.new(Adapters::SequenceDatabaseAdapter.new(stmt.first))
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_tables
|
||||
# rubocop:disable Rails/SquishedSQLHeredocs
|
||||
sql = <<~SQL
|
||||
|
|
@ -187,6 +205,44 @@ module Gitlab
|
|||
|
||||
connection.exec_query(sql, [connection.current_schema])
|
||||
end
|
||||
|
||||
# Fetch all the sequences
|
||||
def fetch_sequences
|
||||
# rubocop:disable Rails/SquishedSQLHeredocs
|
||||
sql = <<~SQL
|
||||
SELECT
|
||||
c.relname AS sequence_name,
|
||||
n.nspname AS schema,
|
||||
pg_catalog.pg_get_userbyid(c.relowner) AS user_owner,
|
||||
s.seqstart AS start_value,
|
||||
s.seqincrement AS increment_by,
|
||||
s.seqmin AS min_value,
|
||||
s.seqmax AS max_value,
|
||||
s.seqcycle AS cycle,
|
||||
s.seqcache AS cache_size,
|
||||
pg_catalog.obj_description(c.oid, 'pg_class') AS comment,
|
||||
CASE
|
||||
WHEN d.refobjid IS NOT NULL THEN
|
||||
ref_class.relname || '.' || ref_attr.attname
|
||||
ELSE NULL
|
||||
END AS owned_by_column
|
||||
FROM pg_catalog.pg_class c
|
||||
INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
||||
LEFT JOIN pg_catalog.pg_sequence s ON s.seqrelid = c.oid
|
||||
LEFT JOIN pg_catalog.pg_depend d ON d.objid = c.oid
|
||||
AND d.deptype = 'a'
|
||||
AND d.classid = 'pg_class'::regclass
|
||||
LEFT JOIN pg_catalog.pg_class ref_class ON ref_class.oid = d.refobjid
|
||||
LEFT JOIN pg_catalog.pg_attribute ref_attr ON ref_attr.attrelid = d.refobjid
|
||||
AND ref_attr.attnum = d.refobjsubid
|
||||
WHERE c.relkind = 'S'
|
||||
AND n.nspname IN ($1, $2)
|
||||
ORDER BY c.relname, n.nspname
|
||||
SQL
|
||||
# rubocop:enable Rails/SquishedSQLHeredocs
|
||||
|
||||
connection.exec_query(sql, schemas).group_by { |seq| "#{seq['schema']}.#{seq['sequence_name']}" }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,185 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module Sources
|
||||
class SequenceStructureSqlParser
|
||||
attr_reader :sequences
|
||||
|
||||
def initialize(parsed_structure, default_schema_name)
|
||||
@parsed_structure = parsed_structure
|
||||
@default_schema_name = default_schema_name
|
||||
@sequences = {}
|
||||
end
|
||||
|
||||
# Returns a map of sequence name to sequence structure objects
|
||||
def execute
|
||||
extract_sequences
|
||||
@sequences
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :parsed_structure, :default_schema_name
|
||||
|
||||
def extract_sequences
|
||||
parsed_structure.tree.stmts.each do |stmt|
|
||||
case stmt.stmt.node
|
||||
when :create_seq_stmt
|
||||
process_create_sequence(stmt.stmt.create_seq_stmt)
|
||||
when :alter_seq_stmt
|
||||
process_alter_sequence(stmt.stmt.alter_seq_stmt)
|
||||
when :alter_table_stmt
|
||||
process_alter_table(stmt.stmt.alter_table_stmt)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Process CREATE SEQUENCE SQL queries. For example:
|
||||
#
|
||||
# CREATE SEQUENCE web_hook_logs_id_seq
|
||||
def process_create_sequence(create_seq)
|
||||
sequence_name = create_seq.sequence.relname
|
||||
schema_name = resolve_schema_name(create_seq.sequence.schemaname)
|
||||
full_name = "#{schema_name}.#{sequence_name}"
|
||||
|
||||
@sequences[full_name] = ::Gitlab::Schema::Validation::Adapters::SequenceStructureSqlAdapter.new(
|
||||
sequence_name: sequence_name,
|
||||
schema_name: schema_name
|
||||
)
|
||||
end
|
||||
|
||||
# Processes ALTER SEQUENCE SQL queries to extract column owner. For example:
|
||||
#
|
||||
# ALTER SEQUENCE ai_code_suggestion_events_id_seq OWNED BY ai_code_suggestion_events.id;
|
||||
def process_alter_sequence(alter_seq)
|
||||
sequence_schema = alter_seq.sequence.schemaname
|
||||
sequence_schema = default_schema_name if sequence_schema == ''
|
||||
sequence_name = alter_seq.sequence.relname
|
||||
|
||||
# Look for OWNED BY option
|
||||
return unless alter_seq.options
|
||||
|
||||
owner_schema = default_schema_name
|
||||
owner_table = nil
|
||||
owner_column = nil
|
||||
|
||||
alter_seq.options.each do |option|
|
||||
def_elem = option.def_elem
|
||||
|
||||
next unless def_elem.defname == 'owned_by'
|
||||
next unless def_elem.arg && def_elem.arg.node == :list
|
||||
|
||||
owned_by_list = def_elem.arg.list.items
|
||||
|
||||
next unless owned_by_list.length >= 2
|
||||
|
||||
# Handle schema.table.column or table.column
|
||||
if owned_by_list.length == 3
|
||||
owner_schema = owned_by_list[0].string.sval
|
||||
owner_table = owned_by_list[1].string.sval
|
||||
owner_column = owned_by_list[2].string.sval
|
||||
else
|
||||
owner_table = owned_by_list[0].string.sval
|
||||
owner_column = owned_by_list[1].string.sval
|
||||
end
|
||||
end
|
||||
|
||||
full_name = "#{sequence_schema}.#{sequence_name}"
|
||||
# Update or create sequence with ownership info
|
||||
existing = @sequences[full_name]
|
||||
|
||||
unless existing
|
||||
warn "Could not find sequence #{full_name} for ALTER SEQUENCE command"
|
||||
return
|
||||
end
|
||||
|
||||
existing.owner_table = owner_table
|
||||
existing.owner_column = owner_column
|
||||
existing.owner_schema = owner_schema
|
||||
end
|
||||
|
||||
# Process ALTER TABLE commands to extract sequence owner. For example:
|
||||
#
|
||||
# ALTER TABLE ONLY web_hook_logs ALTER COLUMN id SET DEFAULT nextval('web_hook_logs_id_seq'::regclass);
|
||||
def process_alter_table(alter_table)
|
||||
table_name = alter_table.relation.relname
|
||||
table_schema = resolve_schema_name(alter_table.relation.schemaname)
|
||||
|
||||
alter_table.cmds.each do |cmd|
|
||||
alter_cmd = cmd.alter_table_cmd
|
||||
|
||||
# Look for SET DEFAULT nextval(...) commands
|
||||
next unless alter_cmd.subtype == :AT_ColumnDefault
|
||||
|
||||
column_name = alter_cmd.name
|
||||
sequence_name = extract_sequence_from_default(alter_cmd.def)
|
||||
sequence_schema, sequence_name = process_sequence_name(sequence_name)
|
||||
lookup_name = "#{sequence_schema}.#{sequence_name}"
|
||||
|
||||
# Update existing sequence or create new one
|
||||
existing = @sequences[lookup_name]
|
||||
|
||||
@sequences[lookup_name] = ::Gitlab::Schema::Validation::Adapters::SequenceStructureSqlAdapter.new(
|
||||
sequence_name: sequence_name,
|
||||
schema_name: existing&.schema_name || table_schema,
|
||||
owner_table: existing&.owner_table || table_name,
|
||||
owner_column: existing&.owner_column || column_name,
|
||||
owner_schema: existing&.owner_schema || table_schema
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def extract_sequence_from_default(expr)
|
||||
return nil unless expr
|
||||
|
||||
case expr.node
|
||||
when :func_call
|
||||
func_call = expr.func_call
|
||||
return extract_string_from_expr(func_call.args.first) if nextval_func?(func_call)
|
||||
when :type_cast
|
||||
return extract_sequence_from_default(expr.type_cast.arg)
|
||||
end
|
||||
nil
|
||||
end
|
||||
|
||||
def extract_string_from_expr(expr)
|
||||
case expr.node
|
||||
when :a_const
|
||||
expr.a_const.sval.sval if expr.a_const.val == :sval
|
||||
when :type_cast
|
||||
extract_string_from_expr(expr.type_cast.arg)
|
||||
end
|
||||
end
|
||||
|
||||
def resolve_schema_name(schema_name)
|
||||
return default_schema_name if schema_name == ''
|
||||
|
||||
schema_name
|
||||
end
|
||||
|
||||
def process_sequence_name(sequence_name)
|
||||
data = sequence_name.split('.', 2)
|
||||
|
||||
return default_schema_name, sequence_name if data.length == 1
|
||||
|
||||
[data.first, data.last]
|
||||
end
|
||||
|
||||
def nextval_func?(func_call)
|
||||
return false unless func_call.args.any?
|
||||
|
||||
func_call.funcname.any? do |name|
|
||||
sval = name.string&.sval
|
||||
|
||||
next false unless sval
|
||||
|
||||
sval.casecmp('nextval').zero?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -32,6 +32,10 @@ module Gitlab
|
|||
table_map[table_name]
|
||||
end
|
||||
|
||||
def fetch_sequence_by_name(sequence_name)
|
||||
sequence_map[sequence_name]
|
||||
end
|
||||
|
||||
def index_exists?(index_name)
|
||||
!!fetch_index_by_name(index_name)
|
||||
end
|
||||
|
|
@ -48,6 +52,10 @@ module Gitlab
|
|||
!!fetch_table_by_name(table_name)
|
||||
end
|
||||
|
||||
def sequence_exists?(sequence_name)
|
||||
!!fetch_sequence_by_name(sequence_name)
|
||||
end
|
||||
|
||||
def indexes
|
||||
@indexes ||= map_with_default_schema(index_statements, SchemaObjects::Index)
|
||||
end
|
||||
|
|
@ -56,6 +64,18 @@ module Gitlab
|
|||
@triggers ||= map_with_default_schema(trigger_statements, SchemaObjects::Trigger)
|
||||
end
|
||||
|
||||
def sequences
|
||||
sequence_map.values
|
||||
end
|
||||
|
||||
def sequence_map
|
||||
@sequences ||= begin
|
||||
parser = Gitlab::Schema::Validation::Sources::SequenceStructureSqlParser.new(
|
||||
parsed_structure_file, schema_name)
|
||||
parser.execute.transform_values! { |sequence| SchemaObjects::Sequence.new(sequence) }
|
||||
end
|
||||
end
|
||||
|
||||
def foreign_keys
|
||||
@foreign_keys ||= foreign_key_statements.map do |stmt|
|
||||
stmt.relation.schemaname = schema_name if stmt.relation.schemaname == ''
|
||||
|
|
@ -132,7 +152,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def parsed_structure_file
|
||||
PgQuery.parse(File.read(structure_file_path))
|
||||
@parsed_structure_file ||= PgQuery.parse(File.read(structure_file_path))
|
||||
end
|
||||
|
||||
def map_with_default_schema(statements, validation_class)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ module Gitlab
|
|||
MissingIndexes,
|
||||
MissingTriggers,
|
||||
MissingForeignKeys,
|
||||
MissingSequences,
|
||||
DifferentDefinitionTables,
|
||||
DifferentDefinitionIndexes,
|
||||
DifferentDefinitionTriggers,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Schema
|
||||
module Validation
|
||||
module Validators
|
||||
class MissingSequences < Base
|
||||
ERROR_MESSAGE = "The sequence %s is missing from the database"
|
||||
|
||||
def execute
|
||||
structure_sql.sequences.filter_map do |sequence|
|
||||
next if database.sequence_exists?(sequence.name)
|
||||
|
||||
build_inconsistency(self.class, sequence, nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,3 +1,7 @@
|
|||
CREATE SEQUENCE missing_sequence;
|
||||
CREATE SEQUENCE shared_audit_event_id_seq;
|
||||
CREATE SEQUENCE abuse_events_id_seq;
|
||||
|
||||
CREATE INDEX missing_index ON events USING btree (created_at, author_id);
|
||||
|
||||
CREATE UNIQUE INDEX wrong_index ON table_name (column_name, column_name_2);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,170 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Schema::Validation::Adapters::SequenceDatabaseAdapter do
|
||||
let(:query_result) do
|
||||
{
|
||||
'schema' => 'public',
|
||||
'sequence_name' => 'users_id_seq',
|
||||
'owned_by_column' => 'users.id',
|
||||
'user_owner' => 'gitlab',
|
||||
'start_value' => '1',
|
||||
'increment_by' => '1',
|
||||
'min_value' => '1',
|
||||
'max_value' => '9223372036854775807',
|
||||
'cycle' => false
|
||||
}
|
||||
end
|
||||
|
||||
subject(:adapter) { described_class.new(query_result) }
|
||||
|
||||
describe '#name' do
|
||||
it 'returns formatted sequence name with schema' do
|
||||
expect(adapter.name).to eq('public.users_id_seq')
|
||||
end
|
||||
|
||||
context 'when schema or sequence_name is nil' do
|
||||
let(:query_result) { { 'schema' => nil, 'sequence_name' => 'test_seq' } }
|
||||
|
||||
it 'defaults to public schema' do
|
||||
expect(adapter.name).to eq('public.test_seq')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both schema and sequence_name are missing' do
|
||||
let(:query_result) { {} }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(adapter.name).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#column_owner' do
|
||||
it 'returns formatted column owner with schema' do
|
||||
expect(adapter.column_owner).to eq('public.users.id')
|
||||
end
|
||||
|
||||
context 'when owned_by_column contains table.column format' do
|
||||
let(:query_result) do
|
||||
{
|
||||
'schema' => 'analytics',
|
||||
'owned_by_column' => 'events.event_id'
|
||||
}
|
||||
end
|
||||
|
||||
it 'prepends schema correctly' do
|
||||
expect(adapter.column_owner).to eq('analytics.events.event_id')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when schema or owned_by_column is nil' do
|
||||
let(:query_result) { { 'schema' => 'test', 'owned_by_column' => nil } }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(adapter.column_owner).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#user_owner' do
|
||||
it 'returns the user owner' do
|
||||
expect(adapter.user_owner).to eq('gitlab')
|
||||
end
|
||||
|
||||
context 'when user_owner is nil' do
|
||||
let(:query_result) { { 'user_owner' => nil } }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(adapter.user_owner).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user_owner is missing' do
|
||||
let(:query_result) { {} }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(adapter.user_owner).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#start_value' do
|
||||
it 'returns the start value' do
|
||||
expect(adapter.start_value).to eq('1')
|
||||
end
|
||||
|
||||
context 'when start_value is different' do
|
||||
let(:query_result) { { 'start_value' => '100' } }
|
||||
|
||||
it 'returns the correct value' do
|
||||
expect(adapter.start_value).to eq('100')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#increment_by' do
|
||||
it 'returns the increment value' do
|
||||
expect(adapter.increment_by).to eq('1')
|
||||
end
|
||||
|
||||
context 'when increment_by is different' do
|
||||
let(:query_result) { { 'increment_by' => '5' } }
|
||||
|
||||
it 'returns the correct value' do
|
||||
expect(adapter.increment_by).to eq('5')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#min_value' do
|
||||
it 'returns the minimum value' do
|
||||
expect(adapter.min_value).to eq('1')
|
||||
end
|
||||
|
||||
context 'when min_value is different' do
|
||||
let(:query_result) { { 'min_value' => '0' } }
|
||||
|
||||
it 'returns the correct value' do
|
||||
expect(adapter.min_value).to eq('0')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#max_value' do
|
||||
it 'returns the maximum value' do
|
||||
expect(adapter.max_value).to eq('9223372036854775807')
|
||||
end
|
||||
|
||||
context 'when max_value is different' do
|
||||
let(:query_result) { { 'max_value' => '1000' } }
|
||||
|
||||
it 'returns the correct value' do
|
||||
expect(adapter.max_value).to eq('1000')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#cycle' do
|
||||
it 'returns the cycle value' do
|
||||
expect(adapter.cycle).to be(false)
|
||||
end
|
||||
|
||||
context 'when cycle is true' do
|
||||
let(:query_result) { { 'cycle' => true } }
|
||||
|
||||
it 'returns true' do
|
||||
expect(adapter.cycle).to be(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cycle is nil' do
|
||||
let(:query_result) { { 'cycle' => nil } }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(adapter.cycle).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Schema::Validation::Adapters::SequenceStructureSqlAdapter do
|
||||
let(:sequence_name) { 'users_id_seq' }
|
||||
let(:schema_name) { 'public' }
|
||||
let(:owner_table) { 'users' }
|
||||
let(:owner_column) { 'id' }
|
||||
let(:owner_schema) { 'app_schema' }
|
||||
|
||||
subject(:sequence_sql_adapter) do
|
||||
described_class.new(
|
||||
sequence_name: sequence_name,
|
||||
schema_name: schema_name,
|
||||
owner_table: owner_table,
|
||||
owner_column: owner_column,
|
||||
owner_schema: owner_schema
|
||||
)
|
||||
end
|
||||
|
||||
describe '#initialize' do
|
||||
context 'with all parameters' do
|
||||
it 'sets all attributes correctly' do
|
||||
expect(sequence_sql_adapter.sequence_name).to eq(sequence_name)
|
||||
expect(sequence_sql_adapter.schema_name).to eq(schema_name)
|
||||
expect(sequence_sql_adapter.owner_table).to eq(owner_table)
|
||||
expect(sequence_sql_adapter.owner_column).to eq(owner_column)
|
||||
expect(sequence_sql_adapter.owner_schema).to eq(owner_schema)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with minimal parameters' do
|
||||
let(:schema_name) { nil }
|
||||
let(:owner_table) { nil }
|
||||
let(:owner_column) { nil }
|
||||
let(:owner_schema) { nil }
|
||||
|
||||
it 'sets sequence_name and defaults others to nil' do
|
||||
expect(sequence_sql_adapter.sequence_name).to eq(sequence_name)
|
||||
expect(sequence_sql_adapter.schema_name).to be_nil
|
||||
expect(sequence_sql_adapter.owner_table).to be_nil
|
||||
expect(sequence_sql_adapter.owner_column).to be_nil
|
||||
expect(sequence_sql_adapter.owner_schema).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#name' do
|
||||
context 'when schema_name is present' do
|
||||
let(:owner_table) { nil }
|
||||
let(:owner_column) { nil }
|
||||
let(:owner_schema) { nil }
|
||||
|
||||
it 'returns fully qualified sequence name' do
|
||||
expect(sequence_sql_adapter.name).to eq('public.users_id_seq')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when schema_name and owner_schema are nil' do
|
||||
let(:schema_name) { nil }
|
||||
let(:owner_schema) { nil }
|
||||
|
||||
it 'returns sequence name with public schema' do
|
||||
expect(sequence_sql_adapter.name).to eq('public.users_id_seq')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#column_name' do
|
||||
it 'returns the owner_column' do
|
||||
expect(sequence_sql_adapter.column_name).to eq(owner_column)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#table_name' do
|
||||
it 'returns the owner_table' do
|
||||
expect(sequence_sql_adapter.table_name).to eq(owner_table)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#column_owner' do
|
||||
context 'when owner_schema, owner_table, and owner_column are all present' do
|
||||
it 'returns fully qualified column reference' do
|
||||
expect(sequence_sql_adapter.column_owner).to eq('app_schema.users.id')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when only owner_table and owner_column are present' do
|
||||
let(:sequence_name) { nil }
|
||||
let(:schema_name) { nil }
|
||||
let(:owner_schema) { nil }
|
||||
|
||||
it 'returns table.column format' do
|
||||
expect(sequence_sql_adapter.column_owner).to eq('public.users.id')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when only owner_column is present' do
|
||||
let(:owner_table) { nil }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(sequence_sql_adapter.column_owner).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no owner information is present' do
|
||||
let(:owner_table) { nil }
|
||||
let(:owner_column) { nil }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(sequence_sql_adapter.column_owner).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#schema' do
|
||||
context 'when schema_name is present' do
|
||||
it 'returns schema_name' do
|
||||
expect(sequence_sql_adapter.schema).to eq(schema_name)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when schema_name is nil but owner_schema is present' do
|
||||
let(:schema_name) { nil }
|
||||
|
||||
it 'returns owner_schema' do
|
||||
expect(sequence_sql_adapter.schema).to eq(owner_schema)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both schema_name and owner_schema are nil' do
|
||||
let(:schema_name) { nil }
|
||||
let(:owner_schema) { nil }
|
||||
|
||||
it 'returns default public schema' do
|
||||
expect(sequence_sql_adapter.schema).to eq('public')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when schema_name is present and owner_schema is also present' do
|
||||
it 'prioritizes schema_name over owner_schema' do
|
||||
expect(sequence_sql_adapter.schema).to eq(schema_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#to_s' do
|
||||
it 'returns a string representation' do
|
||||
expect(sequence_sql_adapter.to_s).to eq('SequenceStructureSqlAdapter(public.users_id_seq -> app_schema.users.id)')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#inspect' do
|
||||
it 'returns an inspect string with object_id' do
|
||||
result = sequence_sql_adapter.inspect
|
||||
expect(result).to match(/^#<SequenceStructureSqlAdapter:\d+ public\.users_id_seq -> app_schema\.users\.id>$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,247 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
# rubocop:disable Rails/SquishedSQLHeredocs -- This gem does not depend on Rails
|
||||
RSpec.describe Gitlab::Schema::Validation::Sources::SequenceStructureSqlParser, feature_category: :database do
|
||||
let(:default_schema_name) { 'public' }
|
||||
|
||||
subject(:parser) { described_class.new(parsed_structure, default_schema_name) }
|
||||
|
||||
describe '#execute' do
|
||||
let(:parsed_structure) { PgQuery.parse(sql) }
|
||||
|
||||
context 'with CREATE SEQUENCE statements' do
|
||||
where(:sql, :expected_sequences) do
|
||||
[
|
||||
[
|
||||
'CREATE SEQUENCE public.web_hook_logs_id_seq;',
|
||||
{ 'public.web_hook_logs_id_seq' => { sequence_name: 'web_hook_logs_id_seq', schema_name: 'public' } }
|
||||
],
|
||||
[
|
||||
'CREATE SEQUENCE web_hook_logs_id_seq;',
|
||||
{ 'public.web_hook_logs_id_seq' => { sequence_name: 'web_hook_logs_id_seq', schema_name: 'public' } }
|
||||
],
|
||||
[
|
||||
'CREATE SEQUENCE custom_schema.test_seq;',
|
||||
{ 'custom_schema.test_seq' => { sequence_name: 'test_seq', schema_name: 'custom_schema' } }
|
||||
]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'creates sequences with correct attributes' do
|
||||
result = parser.execute
|
||||
|
||||
expected_sequences.each do |full_name, expected_attrs|
|
||||
expect(result).to have_key(full_name)
|
||||
sequence = result[full_name]
|
||||
expect(sequence).to be_a(Gitlab::Schema::Validation::Adapters::SequenceStructureSqlAdapter)
|
||||
expect(sequence.sequence_name).to eq(expected_attrs[:sequence_name])
|
||||
expect(sequence.schema_name).to eq(expected_attrs[:schema_name])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ALTER SEQUENCE OWNED BY statements' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE SEQUENCE public.ai_code_suggestion_events_id_seq;
|
||||
ALTER SEQUENCE public.ai_code_suggestion_events_id_seq OWNED BY ai_code_suggestion_events.id;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'sets ownership information' do
|
||||
result = parser.execute
|
||||
sequence = result['public.ai_code_suggestion_events_id_seq']
|
||||
|
||||
expect(sequence.sequence_name).to eq('ai_code_suggestion_events_id_seq')
|
||||
expect(sequence.schema_name).to eq('public')
|
||||
expect(sequence.owner_table).to eq('ai_code_suggestion_events')
|
||||
expect(sequence.owner_column).to eq('id')
|
||||
expect(sequence.owner_schema).to eq('public')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ALTER SEQUENCE OWNED BY with schema.table.column format' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE SEQUENCE public.test_seq;
|
||||
ALTER SEQUENCE public.test_seq OWNED BY custom_schema.test_table.test_column;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'sets ownership information with custom schema' do
|
||||
result = parser.execute
|
||||
sequence = result['public.test_seq']
|
||||
|
||||
expect(sequence.owner_table).to eq('test_table')
|
||||
expect(sequence.owner_column).to eq('test_column')
|
||||
expect(sequence.owner_schema).to eq('custom_schema')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ALTER TABLE SET DEFAULT nextval statements' do
|
||||
# rubocop:disable Layout/LineLength -- Long SQL statements are unavoidable
|
||||
where(:sql, :parsed_sequence_name, :expected_sequence_name, :expected_owner_table, :expected_owner_column) do
|
||||
[
|
||||
[
|
||||
"CREATE SEQUENCE public.web_hook_logs_id_seq;
|
||||
ALTER TABLE ONLY public.web_hook_logs ALTER COLUMN id SET DEFAULT nextval('web_hook_logs_id_seq'::regclass);",
|
||||
'public.web_hook_logs_id_seq',
|
||||
'web_hook_logs_id_seq',
|
||||
'web_hook_logs',
|
||||
'id'
|
||||
],
|
||||
[
|
||||
"CREATE SEQUENCE public.issues_id_seq;
|
||||
ALTER TABLE public.issues ALTER COLUMN id SET DEFAULT nextval('public.issues_id_seq'::regclass);",
|
||||
'public.issues_id_seq',
|
||||
'issues_id_seq',
|
||||
'issues',
|
||||
'id'
|
||||
],
|
||||
[
|
||||
"CREATE SEQUENCE public.test_seq;
|
||||
ALTER TABLE custom_schema.test_table ALTER COLUMN test_id SET DEFAULT nextval('test_seq'::regclass);",
|
||||
'public.test_seq',
|
||||
'test_seq',
|
||||
'test_table',
|
||||
'test_id'
|
||||
]
|
||||
]
|
||||
# rubocop:enable Layout/LineLength
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'extracts sequence ownership from ALTER TABLE statements' do
|
||||
result = parser.execute
|
||||
sequence = result[parsed_sequence_name]
|
||||
|
||||
expect(sequence).to be_a(Gitlab::Schema::Validation::Adapters::SequenceStructureSqlAdapter)
|
||||
expect(sequence.sequence_name).to eq(expected_sequence_name)
|
||||
expect(sequence.owner_table).to eq(expected_owner_table)
|
||||
expect(sequence.owner_column).to eq(expected_owner_column)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with combined CREATE and ALTER statements' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE SEQUENCE public.web_hook_logs_id_seq;
|
||||
ALTER TABLE ONLY public.web_hook_logs ALTER COLUMN id SET DEFAULT nextval('web_hook_logs_id_seq'::regclass);
|
||||
ALTER SEQUENCE public.web_hook_logs_id_seq OWNED BY web_hook_logs.id;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'processes all statements and merges information' do
|
||||
result = parser.execute
|
||||
sequence = result['public.web_hook_logs_id_seq']
|
||||
|
||||
expect(sequence.sequence_name).to eq('web_hook_logs_id_seq')
|
||||
expect(sequence.schema_name).to eq('public')
|
||||
expect(sequence.owner_table).to eq('web_hook_logs')
|
||||
expect(sequence.owner_column).to eq('id')
|
||||
expect(sequence.owner_schema).to eq('public')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple sequences' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE SEQUENCE public.users_id_seq;
|
||||
CREATE SEQUENCE public.projects_id_seq;
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
|
||||
ALTER TABLE ONLY public.projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'processes multiple sequences correctly' do
|
||||
result = parser.execute
|
||||
|
||||
expect(result).to have_key('public.users_id_seq')
|
||||
expect(result).to have_key('public.projects_id_seq')
|
||||
|
||||
users_seq = result['public.users_id_seq']
|
||||
projects_seq = result['public.projects_id_seq']
|
||||
|
||||
expect(users_seq.owner_table).to eq('users')
|
||||
expect(projects_seq.owner_table).to eq('projects')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ALTER SEQUENCE on non-existent sequence' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
ALTER SEQUENCE public.non_existent_seq OWNED BY test_table.id;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'warns about missing sequence' do
|
||||
expect(parser).to receive(:warn).with(
|
||||
'Could not find sequence public.non_existent_seq for ALTER SEQUENCE command')
|
||||
parser.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-sequence statements' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE TABLE public.test_table (id integer);
|
||||
CREATE INDEX idx_test ON public.test_table (id);
|
||||
ALTER TABLE public.test_table ADD COLUMN name varchar(255);
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'ignores non-sequence statements' do
|
||||
result = parser.execute
|
||||
expect(result).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty SQL' do
|
||||
let(:sql) { '' }
|
||||
|
||||
it 'retuSQL.squishSQL.squishrns empty hash' do
|
||||
result = parser.execute
|
||||
expect(result).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'with complex nextval expressions' do
|
||||
let(:sql) do
|
||||
<<~SQL
|
||||
CREATE SEQUENCE public.test_seq;
|
||||
ALTER TABLE public.test_table ALTER COLUMN id SET DEFAULT nextval(('test_seq'::text)::regclass);
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'extracts sequence name from complex expressions' do
|
||||
result = parser.execute
|
||||
sequence = result['public.test_seq']
|
||||
|
||||
expect(sequence.sequence_name).to eq('test_seq')
|
||||
expect(sequence.owner_table).to eq('test_table')
|
||||
expect(sequence.owner_column).to eq('id')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'default schema handling' do
|
||||
context 'with custom default schema' do
|
||||
let(:default_schema_name) { 'custom_default' }
|
||||
let(:sql) { 'CREATE SEQUENCE test_seq;' }
|
||||
let(:parsed_structure) { PgQuery.parse(sql) }
|
||||
|
||||
it 'uses custom default schema' do
|
||||
result = parser.execute
|
||||
sequence = result['custom_default.test_seq']
|
||||
|
||||
expect(sequence.schema_name).to eq('custom_default')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
# rubocop:enable Rails/SquishedSQLHeredocs
|
||||
|
|
@ -24,6 +24,7 @@ RSpec.describe Gitlab::Schema::Validation::Validators::Base, feature_category: :
|
|||
Gitlab::Schema::Validation::Validators::MissingIndexes,
|
||||
Gitlab::Schema::Validation::Validators::MissingTriggers,
|
||||
Gitlab::Schema::Validation::Validators::MissingForeignKeys,
|
||||
Gitlab::Schema::Validation::Validators::MissingSequences,
|
||||
Gitlab::Schema::Validation::Validators::DifferentDefinitionTables,
|
||||
Gitlab::Schema::Validation::Validators::DifferentDefinitionIndexes,
|
||||
Gitlab::Schema::Validation::Validators::DifferentDefinitionTriggers,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Schema::Validation::Validators::MissingSequences, feature_category: :database do
|
||||
missing_sequences = %w[
|
||||
public.missing_sequence
|
||||
public.shared_audit_event_id_seq
|
||||
public.abuse_events_id_seq
|
||||
]
|
||||
|
||||
include_examples 'sequence validators', described_class, missing_sequences
|
||||
end
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.shared_examples 'sequence validators' do |validator, expected_result|
|
||||
let(:structure_file_path) { 'spec/fixtures/structure.sql' }
|
||||
let(:database_sequences) do
|
||||
%w[
|
||||
wrong_sequence
|
||||
extra_sequence
|
||||
shared_audit_event_id_seq
|
||||
]
|
||||
end
|
||||
|
||||
let(:inconsistency_type) { validator.name }
|
||||
let(:connection_class) { class_double(Class, name: 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter') }
|
||||
# rubocop:disable RSpec/VerifiedDoubleReference
|
||||
let(:connection) do
|
||||
instance_double('connection', class: connection_class, current_schema: 'public')
|
||||
end
|
||||
# rubocop:enable RSpec/VerifiedDoubleReference
|
||||
|
||||
let(:schema) { 'public' }
|
||||
let(:database) { Gitlab::Schema::Validation::Sources::Database.new(connection) }
|
||||
let(:structure_file) { Gitlab::Schema::Validation::Sources::StructureSql.new(structure_file_path, schema) }
|
||||
|
||||
before do
|
||||
allow(database).to receive(:sequence_exists?) do |sequence_name|
|
||||
database_sequences.include?(sequence_name)
|
||||
end
|
||||
end
|
||||
|
||||
subject(:result) { validator.new(structure_file, database).execute }
|
||||
|
||||
it 'returns sequence inconsistencies' do
|
||||
expect(result.map(&:object_name)).to match_array(expected_result)
|
||||
expect(result.map(&:type)).to all(eql inconsistency_type)
|
||||
end
|
||||
end
|
||||
|
|
@ -13,7 +13,7 @@ module API
|
|||
:unique_host,
|
||||
:root_directory,
|
||||
:primary_domain,
|
||||
:top_level_namespace_path
|
||||
:root_namespace_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ module Gitlab
|
|||
class OauthResourceOwnerRedirectResolver
|
||||
include ::Gitlab::Routing
|
||||
|
||||
attr_reader :top_level_namespace_path
|
||||
attr_reader :root_namespace_id
|
||||
|
||||
def initialize(top_level_namespace_path)
|
||||
@top_level_namespace_path = top_level_namespace_path
|
||||
def initialize(root_namespace_id)
|
||||
@root_namespace_id = root_namespace_id
|
||||
end
|
||||
|
||||
def resolve_redirect_url
|
||||
|
|
|
|||
|
|
@ -23984,6 +23984,66 @@ msgstr ""
|
|||
msgid "DuoEnterprise|You now have access to GitLab Duo Enterprise, which boosts your efficiency and effectiveness by reducing the time required to write and understand code."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Add each exclusion on a separate line."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Allows the specified file in the specified directory, even if excluded by previous rules"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Cancel"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Delete exclusion rule?"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Do you want to delete this exclusion rule?"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Excludes all .env files"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Excludes all .key files in any subdirectory"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Excludes entire secrets directory"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Excludes the specified file"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Failed to delete the exclusion rule. Try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Files or directories"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|GitLab Duo context exclusions"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Manage Exclusions"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Manage exclusions"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|No exclusion rules defined. Add a rule to exclude files from GitLab Duo context."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Pattern"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Save exclusions"
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|Specify project files and directories that GitLab Duo will not access. Excluded content is never sent to AI models. %{linkStart}learn more%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|The exclusion rule was deleted."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoFeatures|View examples of exclusions."
|
||||
msgstr ""
|
||||
|
||||
msgid "DuoProDiscover|A single platform integrates the best AI model for each use case across the entire development workflow."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@
|
|||
"@gitlab/fonts": "^1.3.0",
|
||||
"@gitlab/query-language-rust": "0.13.1",
|
||||
"@gitlab/svgs": "3.138.0",
|
||||
"@gitlab/ui": "115.0.1",
|
||||
"@gitlab/ui": "115.4.0",
|
||||
"@gitlab/vue-router-vue3": "npm:vue-router@4.5.1",
|
||||
"@gitlab/vuex-vue3": "npm:vuex@4.1.0",
|
||||
"@gitlab/web-ide": "^0.0.1-dev-20250704091020",
|
||||
|
|
|
|||
|
|
@ -1,30 +1,41 @@
|
|||
ARG MISE_DATA_DIR=/home/gdk/.local/share/mise
|
||||
ARG GDK_DIR=/home/gdk/gitlab-development-kit
|
||||
|
||||
ARG GDK_SHA=ee1e28c9048d9adba0c575b78adf868eeb70e26f
|
||||
# Use tag prefix when running on 'stable' branch to make sure 'protected' image is used which is not deleted by registry cleanup
|
||||
ARG GDK_BASE_TAG_PREFIX
|
||||
|
||||
FROM registry.gitlab.com/gitlab-org/gitlab-development-kit/mise-bootstrapped-verify/main:${GDK_BASE_TAG_PREFIX}${GDK_SHA} AS base
|
||||
|
||||
ARG MISE_DATA_DIR
|
||||
ARG GDK_DIR
|
||||
ENV GITLAB_LICENSE_MODE=test \
|
||||
GDK_KILL_CONFIRM=true \
|
||||
TOOL_VERSION_MANAGER=mise
|
||||
MISE_DATA_DIR=${MISE_DATA_DIR} \
|
||||
MISE_TRUSTED_CONFIG_PATHS=${GDK_DIR}
|
||||
|
||||
# Disable idiomatic version files like .ruby-version by default and only rely on .tool-versions
|
||||
RUN mise settings add idiomatic_version_file_enable_tools "[]"
|
||||
|
||||
# Clone GDK at specific sha and bootstrap packages
|
||||
# Clone GDK at specific sha
|
||||
#
|
||||
ARG GDK_SHA
|
||||
RUN set -eux; \
|
||||
git clone --depth 1 https://gitlab.com/gitlab-org/gitlab-development-kit.git && cd gitlab-development-kit; \
|
||||
git fetch --depth 1 origin ${GDK_SHA} && git -c advice.detachedHead=false checkout ${GDK_SHA}; \
|
||||
mkdir gitlab \
|
||||
&& make bootstrap \
|
||||
&& sudo apt-get autoclean
|
||||
mkdir gitlab
|
||||
|
||||
WORKDIR /home/gdk/gitlab-development-kit
|
||||
WORKDIR ${GDK_DIR}
|
||||
|
||||
COPY --chown=gdk:gdk qa/gdk/gdk.yml ./
|
||||
|
||||
# Bootstrap gdk
|
||||
RUN set -eux; \
|
||||
make bootstrap \
|
||||
&& sudo apt-get autoclean \
|
||||
&& rm -rf ${MISE_DATA_DIR}/downloads \
|
||||
&& rm -rf ${MISE_DATA_DIR}/installs/ruby/*/lib/ruby/gems/*/cache
|
||||
|
||||
# Build gitlab-shell
|
||||
#
|
||||
FROM base AS gitlab-shell
|
||||
|
|
@ -32,8 +43,7 @@ FROM base AS gitlab-shell
|
|||
COPY --chown=gdk:gdk GITLAB_SHELL_VERSION ./gitlab/
|
||||
RUN make gitlab-shell-setup \
|
||||
&& cd gitlab-shell \
|
||||
&& go clean -cache -modcache -r \
|
||||
&& rm -rf /home/gdk/.local/share/mise/installs/ruby/*/lib/ruby/gems/*/cache
|
||||
&& go clean -cache -modcache -r
|
||||
|
||||
# Build gitlab-workhorse
|
||||
#
|
||||
|
|
@ -64,44 +74,62 @@ FROM base AS gitlab-gems
|
|||
COPY --chown=gdk:gdk Gemfile Gemfile.lock .tool-versions ./gitlab/
|
||||
COPY --chown=gdk:gdk vendor/gems/ ./gitlab/vendor/gems/
|
||||
COPY --chown=gdk:gdk gems/ ./gitlab/gems/
|
||||
RUN set -x \
|
||||
&& make gitlab-asdf-install \
|
||||
RUN set -eux; \
|
||||
make gitlab-asdf-install \
|
||||
&& make .gitlab-bundle \
|
||||
&& cd gitlab \
|
||||
&& rm -rf /home/gdk/.local/share/mise/installs/ruby/*/lib/ruby/gems/*/cache
|
||||
&& rm -rf ${MISE_DATA_DIR}/installs/ruby/*/lib/ruby/gems/*/cache
|
||||
|
||||
# Install gitlab npm dependencies
|
||||
#
|
||||
FROM base AS gitlab-node-modules
|
||||
|
||||
COPY --chown=gdk:gdk package.json yarn.lock ./gitlab/
|
||||
COPY --chown=gdk:gdk package.json yarn.lock .tool-versions ./gitlab/
|
||||
COPY --chown=gdk:gdk scripts/frontend/postinstall.js ./gitlab/scripts/frontend/postinstall.js
|
||||
COPY --chown=gdk:gdk scripts/frontend/preinstall.mjs ./gitlab/scripts/frontend/preinstall.mjs
|
||||
RUN make .gitlab-yarn && yarn cache clean
|
||||
|
||||
# Build gitlab-topology-service
|
||||
#
|
||||
FROM base AS gitlab-topology-service
|
||||
RUN make gitlab-topology-service-setup
|
||||
|
||||
# Build gitlab-http-router
|
||||
#
|
||||
FROM base AS gitlab-http-router
|
||||
RUN make gitlab-http-router-setup
|
||||
|
||||
# Build final image
|
||||
#
|
||||
FROM base AS gdk
|
||||
|
||||
ARG GDK_DIR
|
||||
|
||||
# Set global defaults so we can initialize empty git repo
|
||||
RUN git config --global init.defaultBranch master \
|
||||
&& git config --global user.email "gdk@example.com" \
|
||||
&& git config --global user.name "gdk"
|
||||
|
||||
# Copy all components from separate docker stages
|
||||
COPY --from=gitlab-shell --chown=gdk:gdk /home/gdk/gitlab-development-kit/gitlab-shell ./gitlab-shell/
|
||||
COPY --from=gitaly --chown=gdk:gdk /home/gdk/gitlab-development-kit/gitaly ./gitaly/
|
||||
COPY --from=workhorse --chown=gdk:gdk /home/gdk/gitlab-development-kit/gitlab/workhorse ./gitlab/workhorse/
|
||||
COPY --from=gitlab-gems --chown=gdk:gdk /home/gdk/.local/share/mise/installs/ruby /home/gdk/.local/share/mise/installs/ruby/
|
||||
COPY --from=gitlab-node-modules --chown=gdk:gdk /home/gdk/gitlab-development-kit/gitlab/node_modules ./gitlab/node_modules/
|
||||
COPY --from=gitlab-shell --chown=gdk:gdk ${GDK_DIR}/gitlab-shell ./gitlab-shell/
|
||||
COPY --from=gitlab-http-router --chown=gdk:gdk ${GDK_DIR}/gitlab-http-router ./gitlab-http-router
|
||||
COPY --from=gitaly --chown=gdk:gdk ${GDK_DIR}/gitaly ./gitaly/
|
||||
COPY --from=workhorse --chown=gdk:gdk ${GDK_DIR}/gitlab/workhorse ./gitlab/workhorse/
|
||||
COPY --from=gitlab-node-modules --chown=gdk:gdk ${GDK_DIR}/gitlab/node_modules ./gitlab/node_modules/
|
||||
# TODO: Check if skipping setup can be added same way as gitaly and other services, otherwise 'make all' rebuilds topology-service on all rails code changes
|
||||
# https://gitlab.com/gitlab-org/gitlab-development-kit/-/issues/2802
|
||||
COPY --from=gitlab-topology-service --chown=gdk:gdk ${GDK_DIR}/gitlab-topology-service ./gitlab-topology-service
|
||||
# Copy all mise tools and gems for main rails app
|
||||
COPY --from=gitlab-gems --chown=gdk:gdk ${MISE_DATA_DIR}/installs/ ${MISE_DATA_DIR}/installs/
|
||||
COPY --from=gitlab-gems --chown=gdk:gdk ${MISE_DATA_DIR}/shims/ ${MISE_DATA_DIR}/shims/
|
||||
COPY --from=gitlab-gems --chown=gdk:gdk ${MISE_DATA_DIR}/plugins/ ${MISE_DATA_DIR}/plugins/
|
||||
|
||||
# Copy code
|
||||
COPY --chown=gdk:gdk ./ ./gitlab/
|
||||
COPY --chown=gdk:gdk qa/gdk/entrypoint ../
|
||||
|
||||
# Create custom hook for E2E tests
|
||||
RUN mkdir -p /home/gdk/gitlab-development-kit/gitaly-custom-hooks/pre-receive.d
|
||||
COPY --chown=gdk:gdk --chmod=700 qa/gdk/pre-receive /home/gdk/gitlab-development-kit/gitaly-custom-hooks/pre-receive.d
|
||||
RUN mkdir -p ${GDK_DIR}/gitaly-custom-hooks/pre-receive.d
|
||||
COPY --chown=gdk:gdk --chmod=700 qa/gdk/pre-receive ${GDK_DIR}/gitaly-custom-hooks/pre-receive.d
|
||||
|
||||
# Set up GDK
|
||||
RUN set -eux; \
|
||||
|
|
@ -112,8 +140,6 @@ RUN set -eux; \
|
|||
gdk config set gitaly.skip_setup true \
|
||||
&& gdk config set workhorse.skip_setup true \
|
||||
&& gdk config set gitlab_shell.skip_setup true \
|
||||
&& cp .ruby-version ./gitlab/ \
|
||||
&& cp .tool-versions ./gitlab/ \
|
||||
&& make redis/redis.conf all \
|
||||
&& gdk kill \
|
||||
&& rm -rf ./gitlab/.git
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
---
|
||||
asdf:
|
||||
opt_out: true
|
||||
mise:
|
||||
enabled: true
|
||||
hostname: gdk.test
|
||||
sshd:
|
||||
additional_config: 'AcceptEnv GIT_PROTOCOL'
|
||||
|
|
@ -29,7 +31,6 @@ gitlab:
|
|||
- ["*", "default"]
|
||||
sidekiq_cron:
|
||||
enabled: true
|
||||
|
||||
rails:
|
||||
bootsnap: false
|
||||
hostname: gdk.test
|
||||
|
|
@ -49,5 +50,9 @@ tracer:
|
|||
enabled: false
|
||||
gitlab_http_router:
|
||||
enabled: true
|
||||
mise:
|
||||
auto_update: false
|
||||
gitlab_topology_service:
|
||||
enabled: true
|
||||
auto_update: false
|
||||
workhorse:
|
||||
skip_compile: false
|
||||
|
|
|
|||
|
|
@ -70,9 +70,10 @@
|
|||
"null"
|
||||
]
|
||||
},
|
||||
"top_level_namespace_path": {
|
||||
"root_namespace_id": {
|
||||
"type": [
|
||||
"string"
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -0,0 +1,235 @@
|
|||
import { GlTable } from '@gitlab/ui';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import CrudComponent from '~/vue_shared/components/crud_component.vue';
|
||||
import ExclusionSettings from '~/pages/projects/shared/permissions/components/exclusion_settings.vue';
|
||||
import ManageExclusionsDrawer from '~/pages/projects/shared/permissions/components/manage_exclusions_drawer.vue';
|
||||
import { createAlert } from '~/alert';
|
||||
|
||||
jest.mock('~/alert');
|
||||
|
||||
const defaultProps = {
|
||||
exclusionRules: ['*.log', 'node_modules/', 'secrets.json'],
|
||||
};
|
||||
|
||||
describe('ExclusionSettings', () => {
|
||||
let wrapper;
|
||||
|
||||
const mountComponent = (props = {}, mountFn = mountExtended) => {
|
||||
const propsData = {
|
||||
...defaultProps,
|
||||
...props,
|
||||
};
|
||||
|
||||
return mountFn(ExclusionSettings, {
|
||||
propsData,
|
||||
});
|
||||
};
|
||||
|
||||
const findCrudComponent = () =>
|
||||
wrapper.findByTestId('exclusion-settings-crud').findComponent(CrudComponent);
|
||||
const findTable = () => wrapper.findByTestId('exclusion-rules-table').findComponent(GlTable);
|
||||
const findDeleteButtons = () => wrapper.findAllByTestId('delete-exclusion-rule');
|
||||
const findManageExclusionsButton = () => wrapper.findByTestId('manage-exclusions-button');
|
||||
const findManageExclusionsDrawer = () => wrapper.findComponent(ManageExclusionsDrawer);
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
it('renders the CRUD component with correct props', () => {
|
||||
const crudComponent = findCrudComponent();
|
||||
|
||||
expect(crudComponent.exists()).toBe(true);
|
||||
expect(crudComponent.props()).toMatchObject({
|
||||
title: wrapper.vm.$options.i18n.title,
|
||||
count: 3,
|
||||
icon: 'remove',
|
||||
});
|
||||
});
|
||||
|
||||
it('renders table items correctly', () => {
|
||||
const expectedItems = [
|
||||
{ id: 0, pattern: '*.log', isDeleting: false },
|
||||
{ id: 1, pattern: 'node_modules/', isDeleting: false },
|
||||
{ id: 2, pattern: 'secrets.json', isDeleting: false },
|
||||
];
|
||||
|
||||
expect(wrapper.vm.tableItems).toEqual(expectedItems);
|
||||
});
|
||||
|
||||
it('renders delete buttons for each rule', () => {
|
||||
const deleteButtons = findDeleteButtons();
|
||||
|
||||
expect(deleteButtons).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty state', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent({ exclusionRules: [] });
|
||||
});
|
||||
|
||||
it('shows empty state message when no rules exist', () => {
|
||||
const table = findTable();
|
||||
|
||||
// Check that the empty-text prop is set to the component's i18n message
|
||||
expect(table.text()).toContain(wrapper.vm.$options.i18n.emptyStateMessage);
|
||||
expect(findCrudComponent().props('count')).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleting rules', () => {
|
||||
it('opens delete modal when delete button is clicked', async () => {
|
||||
const deleteButton = findDeleteButtons().at(0);
|
||||
|
||||
// Spy on the confirmDeleteRule method
|
||||
const confirmDeleteRuleSpy = jest.spyOn(wrapper.vm, 'confirmDeleteRule');
|
||||
|
||||
await deleteButton.trigger('click');
|
||||
|
||||
// Check that confirmDeleteRule was called with the correct item
|
||||
expect(confirmDeleteRuleSpy).toHaveBeenCalledWith({
|
||||
id: 0,
|
||||
pattern: '*.log',
|
||||
isDeleting: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('deletes a rule when confirmed in modal', async () => {
|
||||
// Simulate the confirmDeleteRule method being called
|
||||
const ruleToDelete = {
|
||||
id: 1,
|
||||
pattern: 'node_modules/',
|
||||
isDeleting: false,
|
||||
};
|
||||
wrapper.vm.ruleToDelete = ruleToDelete;
|
||||
|
||||
// Confirm deletion by calling the deleteRule method directly
|
||||
await wrapper.vm.deleteRule();
|
||||
|
||||
expect(wrapper.emitted('update')).toHaveLength(1);
|
||||
expect(wrapper.emitted('update')[0][0]).toEqual([
|
||||
'*.log',
|
||||
'secrets.json', // node_modules/ removed
|
||||
]);
|
||||
});
|
||||
|
||||
it('shows success alert when rule is deleted', async () => {
|
||||
// Simulate the confirmDeleteRule method being called
|
||||
const ruleToDelete = {
|
||||
id: 0,
|
||||
pattern: '*.log',
|
||||
isDeleting: false,
|
||||
};
|
||||
wrapper.vm.ruleToDelete = ruleToDelete;
|
||||
|
||||
// Call deleteRule method directly
|
||||
await wrapper.vm.deleteRule();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith({
|
||||
message: wrapper.vm.$options.i18n.ruleDeletedMessage,
|
||||
variant: 'info',
|
||||
});
|
||||
});
|
||||
|
||||
it('shows correct rule in delete modal', async () => {
|
||||
const deleteButton = findDeleteButtons().at(1);
|
||||
|
||||
// Spy on the confirmDeleteRule method
|
||||
const confirmDeleteRuleSpy = jest.spyOn(wrapper.vm, 'confirmDeleteRule');
|
||||
|
||||
await deleteButton.trigger('click');
|
||||
|
||||
expect(confirmDeleteRuleSpy).toHaveBeenCalledWith({
|
||||
id: 1,
|
||||
pattern: 'node_modules/',
|
||||
isDeleting: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('props watching', () => {
|
||||
it('updates internal rules when exclusionRules prop changes', async () => {
|
||||
const newRules = ['*.backup', 'cache/'];
|
||||
await wrapper.setProps({ exclusionRules: newRules });
|
||||
|
||||
expect(wrapper.vm.rules).toEqual(newRules);
|
||||
});
|
||||
});
|
||||
|
||||
describe('manage exclusions drawer', () => {
|
||||
it('renders the manage exclusions button', () => {
|
||||
const button = findManageExclusionsButton();
|
||||
|
||||
expect(button.exists()).toBe(true);
|
||||
expect(button.text()).toBe(wrapper.vm.$options.i18n.manageExclusions);
|
||||
});
|
||||
|
||||
it('renders the manage exclusions drawer', () => {
|
||||
const drawer = findManageExclusionsDrawer();
|
||||
|
||||
expect(drawer.exists()).toBe(true);
|
||||
expect(drawer.props('open')).toBe(false);
|
||||
expect(drawer.props('exclusionRules')).toEqual(defaultProps.exclusionRules);
|
||||
});
|
||||
|
||||
it('opens the drawer when manage exclusions button is clicked', async () => {
|
||||
const button = findManageExclusionsButton();
|
||||
|
||||
await button.trigger('click');
|
||||
|
||||
expect(wrapper.vm.isManageDrawerOpen).toBe(true);
|
||||
expect(findManageExclusionsDrawer().props('open')).toBe(true);
|
||||
});
|
||||
|
||||
it('closes the drawer when close event is emitted', async () => {
|
||||
// Open drawer first
|
||||
await findManageExclusionsButton().trigger('click');
|
||||
expect(wrapper.vm.isManageDrawerOpen).toBe(true);
|
||||
|
||||
// Close drawer
|
||||
const drawer = findManageExclusionsDrawer();
|
||||
await drawer.vm.$emit('close');
|
||||
|
||||
expect(wrapper.vm.isManageDrawerOpen).toBe(false);
|
||||
});
|
||||
|
||||
it('saves exclusion rules when save event is emitted from drawer', async () => {
|
||||
const newRules = ['*.tmp', 'build/', 'dist/'];
|
||||
const drawer = findManageExclusionsDrawer();
|
||||
|
||||
await drawer.vm.$emit('save', newRules);
|
||||
|
||||
expect(wrapper.vm.rules).toEqual(newRules);
|
||||
expect(wrapper.emitted('update')).toHaveLength(1);
|
||||
expect(wrapper.emitted('update')[0][0]).toEqual(newRules);
|
||||
});
|
||||
|
||||
it('closes drawer and shows success message when save is successful', async () => {
|
||||
const newRules = ['*.tmp', 'build/', 'dist/'];
|
||||
const drawer = findManageExclusionsDrawer();
|
||||
|
||||
// Open drawer first
|
||||
await findManageExclusionsButton().trigger('click');
|
||||
expect(wrapper.vm.isManageDrawerOpen).toBe(true);
|
||||
|
||||
// Save rules
|
||||
await drawer.vm.$emit('save', newRules);
|
||||
|
||||
expect(wrapper.vm.isManageDrawerOpen).toBe(false);
|
||||
});
|
||||
|
||||
it('passes updated rules to drawer when internal rules change', async () => {
|
||||
const newRules = ['*.backup', 'cache/'];
|
||||
await wrapper.setProps({ exclusionRules: newRules });
|
||||
|
||||
const drawer = findManageExclusionsDrawer();
|
||||
expect(drawer.props('exclusionRules')).toEqual(newRules);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
import { nextTick } from 'vue';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import GitlabDuoSettings from '~/pages/projects/shared/permissions/components/gitlab_duo_settings.vue';
|
||||
import ExclusionSettings from '~/pages/projects/shared/permissions/components/exclusion_settings.vue';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
|
||||
const defaultProps = {
|
||||
|
|
@ -10,12 +12,15 @@ const defaultProps = {
|
|||
amazonQAutoReviewEnabled: false,
|
||||
duoFeaturesLocked: false,
|
||||
licensedAiFeaturesAvailable: true,
|
||||
duoContextExclusionSettings: {
|
||||
exclusion_rules: ['*.log', 'node_modules/'],
|
||||
},
|
||||
};
|
||||
|
||||
describe('GitlabDuoSettings', () => {
|
||||
let wrapper;
|
||||
|
||||
const mountComponent = (props = {}, mountFn = mountExtended) => {
|
||||
const mountComponent = (props = {}, provide = {}, mountFn = mountExtended) => {
|
||||
const propsData = {
|
||||
...defaultProps,
|
||||
...props,
|
||||
|
|
@ -23,6 +28,12 @@ describe('GitlabDuoSettings', () => {
|
|||
|
||||
return mountFn(GitlabDuoSettings, {
|
||||
propsData,
|
||||
provide: {
|
||||
glFeatures: {
|
||||
useDuoContextExclusion: true,
|
||||
...provide,
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -30,11 +41,23 @@ describe('GitlabDuoSettings', () => {
|
|||
const findSaveButton = () => wrapper.findByTestId('gitlab-duo-save-button');
|
||||
const findDuoSettings = () => wrapper.findByTestId('duo-settings');
|
||||
const findDuoCascadingLockIcon = () => wrapper.findByTestId('duo-cascading-lock-icon');
|
||||
const findExclusionSettings = () => wrapper.findComponent(ExclusionSettings);
|
||||
const findExclusionRulesHiddenInputs = () =>
|
||||
wrapper.findAll(
|
||||
'input[name="project[project_setting_attributes][duo_context_exclusion_settings][exclusion_rules][]"]',
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (wrapper) {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
}
|
||||
});
|
||||
|
||||
it('renders the component correctly', () => {
|
||||
expect(findCard().exists()).toBe(true);
|
||||
expect(findSaveButton().exists()).toBe(true);
|
||||
|
|
@ -139,7 +162,7 @@ describe('GitlabDuoSettings', () => {
|
|||
});
|
||||
|
||||
describe('when areDuoSettingsLocked is true', () => {
|
||||
beforeEach(() => {
|
||||
it('shows CascadingLockIcon when cascadingSettingsData is provided', () => {
|
||||
wrapper = mountComponent({
|
||||
cascadingSettingsData: {
|
||||
lockedByAncestor: false,
|
||||
|
|
@ -148,13 +171,18 @@ describe('GitlabDuoSettings', () => {
|
|||
},
|
||||
duoFeaturesLocked: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('shows CascadingLockIcon when cascadingSettingsData is provided', () => {
|
||||
expect(findDuoCascadingLockIcon().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('passes correct props to CascadingLockIcon', () => {
|
||||
wrapper = mountComponent({
|
||||
cascadingSettingsData: {
|
||||
lockedByAncestor: false,
|
||||
lockedByApplicationSetting: false,
|
||||
ancestorNamespace: null,
|
||||
},
|
||||
duoFeaturesLocked: true,
|
||||
});
|
||||
expect(findDuoCascadingLockIcon().props()).toMatchObject({
|
||||
isLockedByGroupAncestor: false,
|
||||
isLockedByApplicationSettings: false,
|
||||
|
|
@ -194,4 +222,149 @@ describe('GitlabDuoSettings', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExclusionSettings', () => {
|
||||
it('renders ExclusionSettings component when duo features are available', () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
|
||||
expect(findExclusionSettings().exists()).toBe(true);
|
||||
expect(findExclusionSettings().props('exclusionRules')).toEqual(['*.log', 'node_modules/']);
|
||||
});
|
||||
|
||||
it('does not render ExclusionSettings when duo features are not available', () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: false },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
|
||||
expect(findExclusionSettings().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not render ExclusionSettings when feature flag is disabled', () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: false },
|
||||
);
|
||||
|
||||
expect(findExclusionSettings().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('updates exclusion rules when ExclusionSettings emits update', async () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
const newRules = ['*.log', 'node_modules/', '*.tmp'];
|
||||
|
||||
const exclusionSettings = findExclusionSettings();
|
||||
expect(exclusionSettings.exists()).toBe(true);
|
||||
|
||||
await exclusionSettings.vm.$emit('update', newRules);
|
||||
|
||||
expect(wrapper.vm.exclusionRules).toEqual(newRules);
|
||||
});
|
||||
|
||||
it('renders hidden inputs for exclusion rules form submission', () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
const hiddenInputs = findExclusionRulesHiddenInputs();
|
||||
|
||||
expect(hiddenInputs).toHaveLength(2);
|
||||
expect(hiddenInputs.at(0).attributes('value')).toBe('*.log');
|
||||
expect(hiddenInputs.at(1).attributes('value')).toBe('node_modules/');
|
||||
});
|
||||
|
||||
it('updates hidden inputs when exclusion rules change', async () => {
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
const newRules = ['*.tmp', 'cache/'];
|
||||
|
||||
const exclusionSettings = findExclusionSettings();
|
||||
expect(exclusionSettings.exists()).toBe(true);
|
||||
|
||||
await exclusionSettings.vm.$emit('update', newRules);
|
||||
|
||||
const hiddenInputs = findExclusionRulesHiddenInputs();
|
||||
expect(hiddenInputs).toHaveLength(2);
|
||||
expect(hiddenInputs.at(0).attributes('value')).toBe('*.tmp');
|
||||
expect(hiddenInputs.at(1).attributes('value')).toBe('cache/');
|
||||
|
||||
const nullHiddenInput = wrapper.findByTestId('exclusion-rule-input-null');
|
||||
expect(nullHiddenInput.exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('handles empty exclusion rules', () => {
|
||||
wrapper = mountComponent(
|
||||
{
|
||||
licensedAiFeaturesAvailable: true,
|
||||
duoContextExclusionSettings: { exclusion_rules: [] },
|
||||
},
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
|
||||
expect(findExclusionSettings().exists()).toBe(true);
|
||||
expect(findExclusionSettings().props('exclusionRules')).toEqual([]);
|
||||
expect(findExclusionRulesHiddenInputs()).toHaveLength(0);
|
||||
|
||||
// Check that a null hidden input is created for empty exclusion rules
|
||||
const nullHiddenInput = wrapper.findByTestId('exclusion-rule-input-null');
|
||||
expect(nullHiddenInput.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('handles missing duo context exclusion settings', () => {
|
||||
wrapper = mountComponent(
|
||||
{
|
||||
licensedAiFeaturesAvailable: true,
|
||||
duoContextExclusionSettings: {},
|
||||
},
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
|
||||
expect(findExclusionSettings().exists()).toBe(true);
|
||||
expect(findExclusionSettings().props('exclusionRules')).toEqual([]);
|
||||
});
|
||||
|
||||
it('submits form after DOM is updated when exclusion rules are updated', async () => {
|
||||
// Create a mock form element
|
||||
const mockForm = document.createElement('form');
|
||||
const mockSubmit = jest.fn();
|
||||
mockForm.submit = mockSubmit;
|
||||
|
||||
// Mock the closest method to return our mock form
|
||||
const mockClosest = jest.fn().mockReturnValue(mockForm);
|
||||
|
||||
wrapper = mountComponent(
|
||||
{ licensedAiFeaturesAvailable: true },
|
||||
{ useDuoContextExclusion: true },
|
||||
);
|
||||
|
||||
// Mock the $el.closest method
|
||||
wrapper.vm.$el.closest = mockClosest;
|
||||
|
||||
const newRules = ['*.log', 'node_modules/', '*.tmp'];
|
||||
const exclusionSettings = findExclusionSettings();
|
||||
|
||||
// Emit the update event
|
||||
await exclusionSettings.vm.$emit('update', newRules);
|
||||
|
||||
// Wait for nextTick to ensure DOM updates are processed
|
||||
await nextTick();
|
||||
|
||||
// Verify that closest was called with 'form'
|
||||
expect(mockClosest).toHaveBeenCalledWith('form');
|
||||
|
||||
// Verify that form.submit() was called
|
||||
expect(mockSubmit).toHaveBeenCalled();
|
||||
|
||||
// Verify that exclusion rules were updated
|
||||
expect(wrapper.vm.exclusionRules).toEqual(newRules);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,237 @@
|
|||
import { GlDrawer, GlAccordion, GlAccordionItem } from '@gitlab/ui';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import ManageExclusionsDrawer from '~/pages/projects/shared/permissions/components/manage_exclusions_drawer.vue';
|
||||
|
||||
const defaultProps = {
|
||||
open: true,
|
||||
exclusionRules: ['*.log', 'node_modules/', 'secrets.json'],
|
||||
};
|
||||
|
||||
describe('ManageExclusionsDrawer', () => {
|
||||
let wrapper;
|
||||
|
||||
const mountComponent = (props = {}, mountFn = mountExtended) => {
|
||||
const propsData = {
|
||||
...defaultProps,
|
||||
...props,
|
||||
};
|
||||
|
||||
return mountFn(ManageExclusionsDrawer, {
|
||||
propsData,
|
||||
});
|
||||
};
|
||||
|
||||
const findDrawer = () => wrapper.findComponent(GlDrawer);
|
||||
const findTextarea = () => wrapper.findByTestId('exclusion-rules-textarea');
|
||||
const findSaveButton = () => wrapper.findByTestId('save-exclusions-button');
|
||||
const findCancelButton = () => wrapper.findByTestId('cancel-button');
|
||||
const findAccordion = () => wrapper.findComponent(GlAccordion);
|
||||
const findAccordionItem = () => wrapper.findComponent(GlAccordionItem);
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent();
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
it('renders the drawer with correct props', () => {
|
||||
const drawer = findDrawer();
|
||||
|
||||
expect(drawer.exists()).toBe(true);
|
||||
expect(drawer.props('open')).toBe(true);
|
||||
});
|
||||
|
||||
it('renders the correct title', () => {
|
||||
expect(wrapper.text()).toContain('Manage Exclusions');
|
||||
});
|
||||
|
||||
it('renders the textarea with correct label', () => {
|
||||
const textarea = findTextarea();
|
||||
|
||||
expect(textarea.exists()).toBe(true);
|
||||
expect(wrapper.text()).toContain('Files or directories');
|
||||
});
|
||||
|
||||
it('renders save and cancel buttons', () => {
|
||||
expect(findSaveButton().exists()).toBe(true);
|
||||
expect(findCancelButton().exists()).toBe(true);
|
||||
expect(findSaveButton().text()).toBe('Save exclusions');
|
||||
expect(findCancelButton().text()).toBe('Cancel');
|
||||
});
|
||||
|
||||
it('renders the examples accordion', () => {
|
||||
const accordion = findAccordion();
|
||||
const accordionItem = findAccordionItem();
|
||||
|
||||
expect(accordion.exists()).toBe(true);
|
||||
expect(accordionItem.exists()).toBe(true);
|
||||
expect(accordionItem.props('title')).toBe('View examples of exclusions.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('data initialization', () => {
|
||||
it('initializes local rules from props', () => {
|
||||
expect(wrapper.vm.localRules).toBe('*.log\nnode_modules/\nsecrets.json');
|
||||
});
|
||||
|
||||
it('handles empty exclusion rules', () => {
|
||||
wrapper = mountComponent({ exclusionRules: [] });
|
||||
expect(wrapper.vm.localRules).toBe('');
|
||||
});
|
||||
|
||||
it('updates local rules when exclusion rules prop changes', async () => {
|
||||
const newRules = ['*.tmp', 'build/'];
|
||||
await wrapper.setProps({ exclusionRules: newRules });
|
||||
|
||||
expect(wrapper.vm.localRules).toBe('*.tmp\nbuild/');
|
||||
});
|
||||
|
||||
it('resets local rules when drawer opens', async () => {
|
||||
// Change local rules
|
||||
wrapper.vm.localRules = 'modified content';
|
||||
|
||||
// Close and open drawer
|
||||
await wrapper.setProps({ open: false });
|
||||
await wrapper.setProps({ open: true });
|
||||
|
||||
expect(wrapper.vm.localRules).toBe('*.log\nnode_modules/\nsecrets.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('user interactions', () => {
|
||||
it('updates local rules when textarea content changes', async () => {
|
||||
const textarea = findTextarea();
|
||||
const newContent = '*.tmp\nbuild/\ndist/';
|
||||
|
||||
await textarea.setValue(newContent);
|
||||
|
||||
expect(wrapper.vm.localRules).toBe(newContent);
|
||||
});
|
||||
|
||||
it('emits save event with parsed rules when save button is clicked', async () => {
|
||||
const textarea = findTextarea();
|
||||
await textarea.setValue('*.tmp\n\nbuild/\n \ndist/\n');
|
||||
|
||||
await findSaveButton().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('save')).toHaveLength(1);
|
||||
expect(wrapper.emitted('save')[0][0]).toEqual(['*.tmp', 'build/', 'dist/']);
|
||||
});
|
||||
|
||||
it('filters out empty lines and trims whitespace when saving', async () => {
|
||||
const textarea = findTextarea();
|
||||
await textarea.setValue(' *.tmp \n\n build/ \n\n \n dist/ \n');
|
||||
|
||||
await findSaveButton().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('save')[0][0]).toEqual(['*.tmp', 'build/', 'dist/']);
|
||||
});
|
||||
|
||||
it('emits close event when cancel button is clicked', async () => {
|
||||
await findCancelButton().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('close')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('resets local rules to original when cancel is clicked', async () => {
|
||||
const textarea = findTextarea();
|
||||
await textarea.setValue('modified content');
|
||||
|
||||
await findCancelButton().trigger('click');
|
||||
|
||||
expect(wrapper.vm.localRules).toBe('*.log\nnode_modules/\nsecrets.json');
|
||||
});
|
||||
|
||||
it('emits close event when drawer close event is triggered', async () => {
|
||||
const drawer = findDrawer();
|
||||
|
||||
await drawer.vm.$emit('close');
|
||||
|
||||
expect(wrapper.emitted('close')).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('examples accordion', () => {
|
||||
it('shows example patterns when accordion is expanded', () => {
|
||||
const accordionContent = wrapper.text();
|
||||
|
||||
// Check for example descriptions
|
||||
expect(accordionContent).toContain('Excludes all .env files');
|
||||
expect(accordionContent).toContain('Excludes entire secrets directory');
|
||||
expect(accordionContent).toContain('Excludes all .key files in any subdirectory');
|
||||
expect(accordionContent).toContain('Excludes the specified file');
|
||||
expect(accordionContent).toContain(
|
||||
'Allows the specified file in the specified directory, even if excluded by previous rules',
|
||||
);
|
||||
});
|
||||
|
||||
it('includes specific example patterns', () => {
|
||||
const accordionContent = wrapper.text();
|
||||
|
||||
// Check for specific examples
|
||||
expect(accordionContent).toContain('*.env');
|
||||
expect(accordionContent).toContain('secrets/');
|
||||
expect(accordionContent).toContain('**/*.key');
|
||||
expect(accordionContent).toContain('config/production.yml');
|
||||
expect(accordionContent).toContain('!secrets/file.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('drawer visibility', () => {
|
||||
it('shows drawer when open prop is true', () => {
|
||||
wrapper = mountComponent({ open: true });
|
||||
|
||||
expect(findDrawer().props('open')).toBe(true);
|
||||
});
|
||||
|
||||
it('hides drawer when open prop is false', () => {
|
||||
wrapper = mountComponent({ open: false });
|
||||
|
||||
expect(findDrawer().props('open')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('handles single rule correctly', () => {
|
||||
wrapper = mountComponent({ exclusionRules: ['single-rule.txt'] });
|
||||
|
||||
expect(wrapper.vm.localRules).toBe('single-rule.txt');
|
||||
});
|
||||
|
||||
it('handles rules with special characters', () => {
|
||||
const specialRules = ['*.log', 'path/with spaces/', 'file-with-dashes.txt'];
|
||||
wrapper = mountComponent({ exclusionRules: specialRules });
|
||||
|
||||
expect(wrapper.vm.localRules).toBe('*.log\npath/with spaces/\nfile-with-dashes.txt');
|
||||
});
|
||||
|
||||
it('saves empty rules array when textarea is empty', async () => {
|
||||
const textarea = findTextarea();
|
||||
await textarea.setValue('');
|
||||
|
||||
await findSaveButton().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('save')[0][0]).toEqual([]);
|
||||
});
|
||||
|
||||
it('saves empty rules array when textarea contains only whitespace', async () => {
|
||||
const textarea = findTextarea();
|
||||
await textarea.setValue(' \n\n \n ');
|
||||
|
||||
await findSaveButton().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('save')[0][0]).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('accessibility', () => {
|
||||
it('has proper form labels', () => {
|
||||
const label = wrapper.find('label[for="exclusion-rules-textarea"]');
|
||||
expect(label.exists()).toBe(true);
|
||||
expect(label.text()).toContain('Files or directories');
|
||||
});
|
||||
|
||||
it('provides help text for the textarea', () => {
|
||||
expect(wrapper.text()).toContain('Add each exclusion on a separate line.');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -40,6 +40,7 @@ describe('ExtendedDashboardPanel', () => {
|
|||
it('sets the default props for the dashboard panel', () => {
|
||||
expect(findDashboardPanel().props()).toStrictEqual({
|
||||
containerClass: 'grid-stack-item-content',
|
||||
bodyContentClass: '',
|
||||
borderColorClass: '',
|
||||
title: '',
|
||||
titleIcon: '',
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Auth::OAuth::OauthResourceOwnerRedirectResolver, feature_category: :system_access do
|
||||
let(:resolver) { described_class.new(namespace_path) }
|
||||
let(:namespace_path) { nil }
|
||||
let(:resolver) { described_class.new(namespace_id) }
|
||||
let(:namespace_id) { nil }
|
||||
let(:group) { create(:group) }
|
||||
|
||||
describe '#resolve_redirect_url' do
|
||||
|
|
@ -14,8 +14,8 @@ RSpec.describe Gitlab::Auth::OAuth::OauthResourceOwnerRedirectResolver, feature_
|
|||
allow(resolver).to receive(:new_user_session_url).and_return('/login')
|
||||
end
|
||||
|
||||
context 'with any namespace path' do
|
||||
let(:namespace_path) { group.full_path }
|
||||
context 'with any namespace id' do
|
||||
let(:namespace_id) { group.id }
|
||||
|
||||
it 'returns new_user_session_url' do
|
||||
expect(resolver).to receive(:new_user_session_url)
|
||||
|
|
@ -23,8 +23,8 @@ RSpec.describe Gitlab::Auth::OAuth::OauthResourceOwnerRedirectResolver, feature_
|
|||
end
|
||||
end
|
||||
|
||||
context 'with nil namespace path' do
|
||||
let(:namespace_path) { nil }
|
||||
context 'with nil namespace id' do
|
||||
let(:namespace_id) { nil }
|
||||
|
||||
it 'returns new_user_session_url' do
|
||||
expect(resolve_redirect_url).to eq('/login')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe RequeueBackfillBulkImportTrackersProjectId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_trackers,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe RequeueBackfillBulkImportTrackersNamespaceId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_trackers,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe RequeueBackfillBulkImportTrackersOrganizationId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_trackers,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -31,6 +31,7 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
subject(:lookup_path) do
|
||||
described_class.new(
|
||||
deployment: deployment,
|
||||
root_namespace_id: project.namespace.root_ancestor.id,
|
||||
trim_prefix: trim_prefix,
|
||||
access_control: access_control
|
||||
)
|
||||
|
|
@ -52,12 +53,6 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#top_level_namespace_path' do
|
||||
it 'returns the top level namespace path' do
|
||||
expect(lookup_path.top_level_namespace_path).to eq(group.path)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#access_control' do
|
||||
context 'when access control is enabled' do
|
||||
let(:access_control) { true }
|
||||
|
|
@ -85,7 +80,9 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
end
|
||||
|
||||
describe '#https_only' do
|
||||
subject(:lookup_path) { described_class.new(deployment: deployment, domain: domain) }
|
||||
subject(:lookup_path) do
|
||||
described_class.new(deployment: deployment, root_namespace_id: project.namespace.root_ancestor.id, domain: domain)
|
||||
end
|
||||
|
||||
context 'when no domain provided' do
|
||||
let(:domain) { nil }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,160 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'work item add children items', feature_category: :team_planning do
|
||||
include GraphqlHelpers
|
||||
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:project) { create(:project, group: group) }
|
||||
let_it_be(:author) { create(:user, reporter_of: group) }
|
||||
let_it_be(:guest) { create(:user, guest_of: group) }
|
||||
let_it_be(:planner) { create(:user, planner_of: group) }
|
||||
let_it_be(:work_item, refind: true) { create(:work_item, project: project, author: author) }
|
||||
|
||||
let_it_be(:valid_child1) { create(:work_item, :task, project: project, created_at: 5.minutes.ago) }
|
||||
let_it_be(:valid_child2) { create(:work_item, :task, project: project, created_at: 5.minutes.from_now) }
|
||||
let(:children_ids) { [valid_child1.to_global_id.to_s, valid_child2.to_global_id.to_s] }
|
||||
let(:input) { { 'childrenIds' => children_ids } }
|
||||
|
||||
let(:fields) do
|
||||
<<~FIELDS
|
||||
addedChildren {
|
||||
id
|
||||
}
|
||||
errors
|
||||
FIELDS
|
||||
end
|
||||
|
||||
let(:mutation_work_item) { work_item }
|
||||
let(:mutation) do
|
||||
graphql_mutation(:workItemHierarchyAddChildrenItems, input.merge('id' => mutation_work_item.to_gid.to_s), fields)
|
||||
end
|
||||
|
||||
let(:mutation_response) { graphql_mutation_response(:work_item_hierarchy_add_children_items) }
|
||||
let(:added_children_response) { mutation_response['addedChildren'] }
|
||||
|
||||
before_all do
|
||||
# Ensure support bot user is created so creation doesn't count towards query limit
|
||||
# and we don't try to obtain an exclusive lease within a transaction.
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/509629
|
||||
Users::Internal.support_bot_id
|
||||
end
|
||||
|
||||
shared_examples 'request with error' do |message|
|
||||
it 'ignores update and returns an error' do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(added_children_response).to be_empty
|
||||
expect(mutation_response['errors'].first).to include(message)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has permissions to update a work item' do
|
||||
let(:current_user) { planner }
|
||||
|
||||
context 'when updating children' do
|
||||
let_it_be(:invalid_child) { create(:work_item, project: project) }
|
||||
|
||||
let(:error) do
|
||||
"#{invalid_child.to_reference} cannot be added: it's not allowed to add this type of parent item"
|
||||
end
|
||||
|
||||
context 'when child work item type is invalid' do
|
||||
let(:children_ids) { [invalid_child.to_global_id.to_s] }
|
||||
|
||||
it 'returns response with errors' do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
|
||||
expect(added_children_response).to be_empty
|
||||
expect(mutation_response['errors']).to match_array([error])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a mix of existing and non existing work items' do
|
||||
let(:children_ids) { [valid_child1.to_global_id.to_s, "gid://gitlab/WorkItem/#{non_existing_record_id}"] }
|
||||
|
||||
it 'returns a top level error and does not add valid work item' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
work_item.reload
|
||||
end.not_to change { work_item.work_item_children.count }
|
||||
|
||||
expect(graphql_errors.first['message']).to include('No object found for `childrenIds')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when child work item type is valid' do
|
||||
it 'updates the work item children' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
work_item.reload
|
||||
end.to change { work_item.work_item_children.count }.by(2)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(added_children_response).to match_array([
|
||||
{ 'id' => valid_child2.to_global_id.to_s },
|
||||
{ 'id' => valid_child1.to_global_id.to_s }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when updating hierarchy for incident' do
|
||||
let_it_be(:incident) { create(:work_item, :incident, project: project) }
|
||||
let_it_be(:child_item) { create(:work_item, :task, project: project) }
|
||||
let(:mutation_work_item) { incident }
|
||||
|
||||
let(:input) do
|
||||
{ 'childrenIds' => [child_item.to_global_id.to_s] }
|
||||
end
|
||||
|
||||
context 'when user is a guest' do
|
||||
let(:current_user) { guest }
|
||||
|
||||
it 'returns an error and does not update' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
incident.reload
|
||||
end.not_to change { incident.work_item_children.count }
|
||||
|
||||
expect(added_children_response).to be_empty
|
||||
expect(mutation_response['errors']).to include(
|
||||
"No matching work item found. Make sure that you are adding a valid work item ID."
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is an admin' do
|
||||
let_it_be(:admin) { create(:admin) }
|
||||
let(:current_user) { admin }
|
||||
|
||||
it 'successfully updates the incident hierarchy' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
incident.reload
|
||||
end.to change { incident.work_item_children.count }.by(1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(added_children_response).to match_array([{ 'id' => child_item.to_global_id.to_s }])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when guest updates hierarchy for non-incident work item' do
|
||||
let(:current_user) { guest }
|
||||
let(:mutation_work_item) { work_item } # regular work item, not incident
|
||||
|
||||
it 'successfully updates the work item hierarchy' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
work_item.reload
|
||||
end.to change { work_item.work_item_children.count }.by(1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(added_children_response).to match_array([{ 'id' => child_item.to_global_id.to_s }])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -351,9 +351,10 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
|
|||
|
||||
context 'when rate limit reached' do
|
||||
before do
|
||||
allow(Import::Github::GistsImportService)
|
||||
.to receive(:new).with(user, client, access_params)
|
||||
.and_raise(Gitlab::GithubImport::RateLimitError)
|
||||
allow_next_instance_of(Import::Github::GistsImportService) do |service|
|
||||
allow(service).to receive(:execute)
|
||||
.and_return({ status: :error, message: 'GitHub API rate limit exceeded', http_status: 429 })
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns 429 error' do
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
|
|||
'unique_host' => nil,
|
||||
'root_directory' => deployment.root_directory,
|
||||
'primary_domain' => nil,
|
||||
'top_level_namespace_path' => project.namespace.root_ancestor.path
|
||||
'root_namespace_id' => project.namespace.root_ancestor.id
|
||||
}
|
||||
]
|
||||
)
|
||||
|
|
@ -186,7 +186,7 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
|
|||
'unique_host' => 'unique-domain.example.com',
|
||||
'root_directory' => 'public',
|
||||
'primary_domain' => nil,
|
||||
'top_level_namespace_path' => project.namespace.root_ancestor.path
|
||||
'root_namespace_id' => project.namespace.root_ancestor.id
|
||||
}
|
||||
]
|
||||
)
|
||||
|
|
@ -232,7 +232,7 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
|
|||
'unique_host' => 'unique-domain.example.com',
|
||||
'root_directory' => 'public',
|
||||
'primary_domain' => 'https://pages.io',
|
||||
'top_level_namespace_path' => project.namespace.root_ancestor.path
|
||||
'root_namespace_id' => project.namespace.root_ancestor.id
|
||||
}
|
||||
]
|
||||
)
|
||||
|
|
@ -284,7 +284,7 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
|
|||
'unique_host' => nil,
|
||||
'root_directory' => 'public',
|
||||
'primary_domain' => nil,
|
||||
'top_level_namespace_path' => project.namespace.root_ancestor.path
|
||||
'root_namespace_id' => project.namespace.root_ancestor.id
|
||||
}
|
||||
]
|
||||
)
|
||||
|
|
@ -444,7 +444,7 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
|
|||
'unique_host' => nil,
|
||||
'root_directory' => 'public',
|
||||
'primary_domain' => nil,
|
||||
'top_level_namespace_path' => project.namespace.root_ancestor.path
|
||||
'root_namespace_id' => project.namespace.root_ancestor.id
|
||||
}
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ RSpec.describe Import::Github::GistsImportService, feature_category: :importers
|
|||
|
||||
let_it_be(:user) { create(:user) }
|
||||
let(:params) { { github_access_token: 'token' } }
|
||||
let(:import_status) { instance_double('Gitlab::GithubGistsImport::Status') }
|
||||
let(:import_status) { instance_double(Gitlab::GithubGistsImport::Status) }
|
||||
let(:client) { Gitlab::GithubImport::Client.new(params[:github_access_token]) }
|
||||
let(:octokit_user) { { login: 'user_login' } }
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ RSpec.describe Import::Github::GistsImportService, feature_category: :importers
|
|||
let(:expected_result) do
|
||||
{
|
||||
http_status: 401,
|
||||
message: 'Access denied to the GitHub account.',
|
||||
message: 'Access denied to the GitHub account',
|
||||
status: :error
|
||||
}
|
||||
end
|
||||
|
|
@ -65,5 +65,24 @@ RSpec.describe Import::Github::GistsImportService, feature_category: :importers
|
|||
expect(import.execute).to eq(expected_result)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the rate limit is exceeded' do
|
||||
before do
|
||||
allow(client.octokit).to receive(:user).and_raise(Octokit::TooManyRequests)
|
||||
allow(import_status).to receive(:started?).and_return(false)
|
||||
end
|
||||
|
||||
let(:expected_result) do
|
||||
{
|
||||
http_status: 429,
|
||||
message: 'GitHub API rate limit exceeded',
|
||||
status: :error
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns 429 error' do
|
||||
expect(import.execute).to eq(expected_result)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
23
yarn.lock
23
yarn.lock
|
|
@ -1338,21 +1338,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-3.1.1.tgz#af3aea7f1e52ec916d8b5c9dcc0f09d4c060a3fc"
|
||||
integrity sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==
|
||||
|
||||
"@floating-ui/core@^1.7.1", "@floating-ui/core@^1.7.2":
|
||||
"@floating-ui/core@^1.7.2":
|
||||
version "1.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.7.2.tgz#3d1c35263950b314b6d5a72c8bfb9e3c1551aefd"
|
||||
integrity sha512-wNB5ooIKHQc+Kui96jE/n69rHFWAVoxn5CAzL1Xdd8FG03cgY3MLO+GF9U3W737fYDSgPWA6MReKhBQBop6Pcw==
|
||||
dependencies:
|
||||
"@floating-ui/utils" "^0.2.10"
|
||||
|
||||
"@floating-ui/dom@1.7.1":
|
||||
version "1.7.1"
|
||||
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.7.1.tgz#76a4e3cbf7a08edf40c34711cf64e0cc8053d912"
|
||||
integrity sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==
|
||||
dependencies:
|
||||
"@floating-ui/core" "^1.7.1"
|
||||
"@floating-ui/utils" "^0.2.9"
|
||||
|
||||
"@floating-ui/dom@1.7.2", "@floating-ui/dom@^1.0.0", "@floating-ui/dom@^1.7.2":
|
||||
version "1.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.7.2.tgz#3540b051cf5ce0d4f4db5fb2507a76e8ea5b4a45"
|
||||
|
|
@ -1368,7 +1360,7 @@
|
|||
dependencies:
|
||||
"@floating-ui/dom" "^1.0.0"
|
||||
|
||||
"@floating-ui/utils@^0.2.10", "@floating-ui/utils@^0.2.9":
|
||||
"@floating-ui/utils@^0.2.10":
|
||||
version "0.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.10.tgz#a2a1e3812d14525f725d011a73eceb41fef5bc1c"
|
||||
integrity sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==
|
||||
|
|
@ -1464,13 +1456,14 @@
|
|||
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.138.0.tgz#5db6d76ceedcf3716e9ce624b272a58052d8d121"
|
||||
integrity sha512-Jzd7GhmKxsQdCTttOe6C4AjqGvq8L91N6uUYnAmwnLGeY3aRD12BKBSgId5FrTH6rvk2w36o1+AwIqP+YuHV4g==
|
||||
|
||||
"@gitlab/ui@115.0.1":
|
||||
version "115.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-115.0.1.tgz#a449a00b1b9352952d542a456cca3a210a02c706"
|
||||
integrity sha512-gZU8w2W1N36tqDjzzTsH1Mg3xjyVU+ki+2J3bHvw65ovQVF/+A7qRrn2CT07IV0L70zCNOP8RO9VKJKINxpp6A==
|
||||
"@gitlab/ui@115.4.0":
|
||||
version "115.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-115.4.0.tgz#f5fb265560c0d7bc98e5537c82ca701318be72f0"
|
||||
integrity sha512-IjeeXHFZdnXIUoGqACdwwAPXTDT7OZdJ2kVPDOTfvc66Yow4OIXRNsZr300Fd3dvItyyDy5f/hCqhbyLMWb9hQ==
|
||||
dependencies:
|
||||
"@floating-ui/dom" "1.7.1"
|
||||
"@floating-ui/dom" "1.7.2"
|
||||
echarts "^5.6.0"
|
||||
gridstack "^12.2.1"
|
||||
iframe-resizer "^4.4.5"
|
||||
lodash "^4.17.21"
|
||||
popper.js "^1.16.1"
|
||||
|
|
|
|||
Loading…
Reference in New Issue