Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
aec24714b9
commit
fc7097e255
|
|
@ -1,9 +1,9 @@
|
|||
<script>
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { GlButton } from '@gitlab/ui';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlLink,
|
||||
GlButton,
|
||||
},
|
||||
|
||||
props: {
|
||||
|
|
@ -26,5 +26,5 @@ export default {
|
|||
};
|
||||
</script>
|
||||
<template>
|
||||
<gl-link :href="historyPathWithId">{{ s__('BulkImport|Migration details') }} ></gl-link>
|
||||
<gl-button :href="historyPathWithId">{{ s__('BulkImport|Migration details') }}</gl-button>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -29,10 +29,14 @@ export default {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<gl-link :href="group.webUrl" target="_blank" class="gl-inline-flex gl-h-7 gl-items-center">
|
||||
<gl-link
|
||||
:href="group.webUrl"
|
||||
target="_blank"
|
||||
class="gl-inline-flex gl-h-7 gl-items-center gl-gap-2"
|
||||
>
|
||||
{{ group.fullPath }} <gl-icon name="external-link" class="gl-fill-icon-link" />
|
||||
</gl-link>
|
||||
<div v-if="group.flags.isFinished && fullLastImportPath" class="gl-text-sm">
|
||||
<div v-if="group.flags.isFinished && fullLastImportPath" class="gl-text-sm gl-text-subtle">
|
||||
<gl-sprintf :message="s__('BulkImport|Last imported to %{link}')">
|
||||
<template #link>
|
||||
<gl-link :href="absoluteLastImportPath" class="gl-text-sm" target="_blank">{{
|
||||
|
|
|
|||
|
|
@ -145,14 +145,14 @@ export default {
|
|||
key: 'selected',
|
||||
label: '',
|
||||
thClass: 'gl-w-3 !gl-pr-3',
|
||||
tdClass: '!gl-pr-3',
|
||||
tdClass: '!gl-flex lg:!gl-table-cell lg:!gl-pr-3',
|
||||
},
|
||||
{
|
||||
key: 'webUrl',
|
||||
label: s__('BulkImport|Source group'),
|
||||
// eslint-disable-next-line @gitlab/require-i18n-strings
|
||||
thClass: '!gl-pl-0 gl-w-1/2',
|
||||
tdClass: '!gl-pl-0',
|
||||
thClass: 'lg:!gl-pl-0 gl-w-1/2',
|
||||
tdClass: 'lg:!gl-pl-0',
|
||||
},
|
||||
{
|
||||
key: 'importTarget',
|
||||
|
|
@ -162,11 +162,13 @@ export default {
|
|||
{
|
||||
key: 'progress',
|
||||
label: __('Status'),
|
||||
tdClass: '!gl-align-middle',
|
||||
tdAttr: { 'data-testid': 'import-status-indicator' },
|
||||
},
|
||||
{
|
||||
key: 'actions',
|
||||
label: '',
|
||||
tdClass: '!gl-flex lg:!gl-table-cell',
|
||||
},
|
||||
],
|
||||
|
||||
|
|
@ -342,12 +344,7 @@ export default {
|
|||
|
||||
methods: {
|
||||
rowClasses(groupTableItem) {
|
||||
const DEFAULT_CLASSES = [
|
||||
'gl-border-strong',
|
||||
'gl-border-0',
|
||||
'gl-border-b-1',
|
||||
'gl-border-solid',
|
||||
];
|
||||
const DEFAULT_CLASSES = ['gl-border-strong', 'gl-border-0', 'gl-border-b', 'gl-border-solid'];
|
||||
const result = [...DEFAULT_CLASSES];
|
||||
if (groupTableItem.flags.isUnselectable) {
|
||||
result.push('!gl-cursor-default');
|
||||
|
|
@ -675,24 +672,20 @@ export default {
|
|||
{{ s__('BulkImport|View import history') }}
|
||||
</gl-button>
|
||||
</template>
|
||||
<template #description
|
||||
><span>{{ s__('BulkImport|Select the groups and projects you want to import.') }}</span>
|
||||
<span>
|
||||
<gl-sprintf
|
||||
:message="
|
||||
s__(
|
||||
'BulkImport|Please note: importing projects is a %{docsLinkStart}beta%{docsLinkEnd} feature.',
|
||||
)
|
||||
"
|
||||
<template #description>
|
||||
{{ s__('BulkImport|Select the groups and projects you want to import.') }}
|
||||
<gl-sprintf
|
||||
:message="
|
||||
s__('BulkImport|Importing projects is a %{docsLinkStart}beta%{docsLinkEnd} feature.')
|
||||
"
|
||||
>
|
||||
<template #docsLink="{ content }"
|
||||
><gl-link :href="$options.betaFeatureHelpPath" target="_blank">{{
|
||||
content
|
||||
}}</gl-link></template
|
||||
>
|
||||
<template #docsLink="{ content }"
|
||||
><gl-link :href="$options.betaFeatureHelpPath" target="_blank">{{
|
||||
content
|
||||
}}</gl-link></template
|
||||
>
|
||||
</gl-sprintf>
|
||||
</span></template
|
||||
>
|
||||
</gl-sprintf>
|
||||
</template>
|
||||
</page-heading>
|
||||
|
||||
<gl-alert
|
||||
|
|
@ -728,7 +721,7 @@ export default {
|
|||
</template>
|
||||
</gl-sprintf>
|
||||
</gl-alert>
|
||||
<gl-alert variant="warning" :dismissible="false" class="mt-3">
|
||||
<gl-alert variant="warning" :dismissible="false">
|
||||
<gl-sprintf
|
||||
:message="
|
||||
s__(
|
||||
|
|
@ -742,13 +735,6 @@ export default {
|
|||
</gl-sprintf>
|
||||
</gl-alert>
|
||||
<div class="gl-border-0 gl-border-b-1 gl-border-solid gl-border-b-default gl-py-5">
|
||||
<gl-search-box-by-click
|
||||
class="gl-mb-5"
|
||||
data-testid="filter-groups"
|
||||
:placeholder="s__('BulkImport|Filter by source group')"
|
||||
@submit="filter = $event"
|
||||
@clear="filter = ''"
|
||||
/>
|
||||
<span v-if="!$apollo.loading && hasGroups">
|
||||
<gl-sprintf :message="statusMessage">
|
||||
<template #start>
|
||||
|
|
@ -784,6 +770,15 @@ export default {
|
|||
</help-popover>
|
||||
</span>
|
||||
</div>
|
||||
<div class="gl-flex gl-flex-col gl-gap-3 gl-bg-subtle gl-p-5 gl-pb-4">
|
||||
<gl-search-box-by-click
|
||||
data-testid="filter-groups"
|
||||
:placeholder="s__('BulkImport|Filter by source group')"
|
||||
@submit="filter = $event"
|
||||
@clear="filter = ''"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<gl-loading-icon v-if="$apollo.loading" size="lg" class="gl-mt-5" />
|
||||
<template v-else>
|
||||
<gl-empty-state
|
||||
|
|
@ -806,9 +801,9 @@ export default {
|
|||
</gl-empty-state>
|
||||
<template v-else>
|
||||
<div
|
||||
class="import-table-bar gl-sticky gl-z-3 gl-flex-col gl-bg-subtle gl-px-4 md:gl-flex md:gl-flex-row md:gl-items-center md:gl-justify-between"
|
||||
class="import-table-bar gl-sticky gl-z-3 gl-flex-col gl-bg-subtle gl-px-5 md:gl-flex md:gl-flex-row md:gl-items-center md:gl-justify-between"
|
||||
>
|
||||
<div class="gl-items-center gl-gap-4 gl-py-3 md:gl-flex">
|
||||
<div class="gl-flex gl-w-full gl-items-center gl-gap-4 gl-pb-4">
|
||||
<span data-test-id="selection-count">
|
||||
<gl-sprintf :message="__('%{count} selected')">
|
||||
<template #count>
|
||||
|
|
@ -834,7 +829,7 @@ export default {
|
|||
v-gl-tooltip
|
||||
:title="s__('BulkImport|Some groups will be imported without projects.')"
|
||||
name="warning"
|
||||
class="gl-text-orange-500"
|
||||
variant="warning"
|
||||
data-testid="import-projects-warning"
|
||||
/>
|
||||
</span>
|
||||
|
|
@ -869,6 +864,7 @@ export default {
|
|||
selectable
|
||||
select-mode="multi"
|
||||
selected-variant="primary"
|
||||
stacked="lg"
|
||||
@row-selected="preventSelectingAlreadyImportedGroups"
|
||||
>
|
||||
<template #head(selected)="{ selectAllRows, clearSelected }">
|
||||
|
|
@ -911,13 +907,13 @@ export default {
|
|||
/>
|
||||
</template>
|
||||
<template #cell(progress)="{ item: group }">
|
||||
<import-status-cell :status="group.visibleStatus" :has-failures="hasFailures(group)" />
|
||||
<import-history-link
|
||||
v-if="showHistoryLink(group)"
|
||||
:id="group.progress.id"
|
||||
:history-path="historyShowPath"
|
||||
class="gl-mt-2 gl-inline-block"
|
||||
/>
|
||||
<div class="gl-mt-3">
|
||||
<import-status-cell
|
||||
class="gl-items-end lg:gl-items-start"
|
||||
:status="group.visibleStatus"
|
||||
:has-failures="hasFailures(group)"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
<template #cell(actions)="{ item: group, index }">
|
||||
<import-actions-cell
|
||||
|
|
@ -928,6 +924,12 @@ export default {
|
|||
:is-project-creation-allowed="group.flags.isProjectCreationAllowed"
|
||||
@import-group="importGroup({ group, extraArgs: $event, index })"
|
||||
/>
|
||||
<import-history-link
|
||||
v-if="showHistoryLink(group)"
|
||||
:id="group.progress.id"
|
||||
:history-path="historyShowPath"
|
||||
class="gl-mt-3"
|
||||
/>
|
||||
</template>
|
||||
</gl-table>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -92,6 +92,16 @@ const transformOptions = (options = {}) => {
|
|||
|
||||
const installed = new WeakMap();
|
||||
|
||||
export const getMatchedComponents = (instance, path) => {
|
||||
if (instance.getMatchedComponents) {
|
||||
return instance.getMatchedComponents(path);
|
||||
}
|
||||
|
||||
const route = path ? instance.resolve(path) : instance.currentRoute.value;
|
||||
|
||||
return route.matched.flatMap((record) => Object.values(record.components));
|
||||
};
|
||||
|
||||
export default class VueRouterCompat {
|
||||
constructor(options) {
|
||||
// eslint-disable-next-line no-constructor-return
|
||||
|
|
|
|||
|
|
@ -1,151 +0,0 @@
|
|||
<script>
|
||||
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
|
||||
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
|
||||
import { GRAPHQL_PAGE_SIZE, LIST_KEY_CREATED_AT } from '~/ml/model_registry/constants';
|
||||
import { queryToObject, setUrlParams, updateHistory } from '~/lib/utils/url_utility';
|
||||
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
|
||||
import LoadOrErrorOrShow from '~/ml/model_registry/components/load_or_error_or_show.vue';
|
||||
|
||||
export default {
|
||||
name: 'SearchableList',
|
||||
components: { RegistryList, RegistrySearch, LoadOrErrorOrShow },
|
||||
props: {
|
||||
items: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
pageInfo: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
isLoading: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
errorMessage: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
showSearch: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
sortableFields: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const query = queryToObject(window.location.search);
|
||||
|
||||
const filter = query.name ? [{ value: { data: query.name }, type: FILTERED_SEARCH_TERM }] : [];
|
||||
|
||||
const orderBy = query.orderBy || LIST_KEY_CREATED_AT;
|
||||
|
||||
return {
|
||||
filters: filter,
|
||||
sorting: {
|
||||
orderBy,
|
||||
sort: (query.sort || 'desc').toLowerCase(),
|
||||
},
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
isListEmpty() {
|
||||
return this.items.length === 0;
|
||||
},
|
||||
parsedQuery() {
|
||||
const name = this.filters
|
||||
.map((f) => f.value.data)
|
||||
.join(' ')
|
||||
.trim();
|
||||
|
||||
const filterByQuery = name === '' ? {} : { name };
|
||||
|
||||
return { ...filterByQuery, ...this.sorting };
|
||||
},
|
||||
},
|
||||
created() {
|
||||
this.nextPage();
|
||||
},
|
||||
methods: {
|
||||
prevPage() {
|
||||
const variables = {
|
||||
first: null,
|
||||
last: GRAPHQL_PAGE_SIZE,
|
||||
before: this.pageInfo.startCursor,
|
||||
...this.parsedQuery,
|
||||
};
|
||||
|
||||
this.fetchPage(variables);
|
||||
},
|
||||
nextPage() {
|
||||
const variables = {
|
||||
first: GRAPHQL_PAGE_SIZE,
|
||||
last: null,
|
||||
after: this.pageInfo.endCursor,
|
||||
...this.parsedQuery,
|
||||
};
|
||||
|
||||
this.fetchPage(variables);
|
||||
},
|
||||
fetchPage(variables) {
|
||||
updateHistory({
|
||||
url: setUrlParams(variables, window.location.href, true),
|
||||
title: document.title,
|
||||
replace: true,
|
||||
});
|
||||
|
||||
this.$emit('fetch-page', variables);
|
||||
},
|
||||
submitFilters() {
|
||||
this.fetchPage(this.parsedQuery);
|
||||
},
|
||||
updateFilters(newValue) {
|
||||
this.filters = newValue;
|
||||
},
|
||||
updateSorting(newValue) {
|
||||
this.sorting = { ...this.sorting, ...newValue };
|
||||
},
|
||||
updateSortingAndEmitUpdate(newValue) {
|
||||
this.updateSorting(newValue);
|
||||
this.submitFilters();
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<registry-search
|
||||
v-if="showSearch"
|
||||
:filters="filters"
|
||||
:sorting="sorting"
|
||||
:sortable-fields="sortableFields"
|
||||
@sorting:changed="updateSortingAndEmitUpdate"
|
||||
@filter:changed="updateFilters"
|
||||
@filter:submit="submitFilters"
|
||||
@filter:clear="filters = []"
|
||||
/>
|
||||
<load-or-error-or-show :is-loading="isLoading" :error-message="errorMessage">
|
||||
<slot v-if="isListEmpty" name="empty-state"></slot>
|
||||
<registry-list
|
||||
v-else
|
||||
:hidden-delete="true"
|
||||
:is-loading="isLoading"
|
||||
:items="items"
|
||||
:pagination="pageInfo"
|
||||
@prev-page="prevPage"
|
||||
@next-page="nextPage"
|
||||
>
|
||||
<template #default="{ item }">
|
||||
<slot name="item" :item="item"></slot>
|
||||
</template>
|
||||
</registry-list>
|
||||
</load-or-error-or-show>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -9,11 +9,21 @@ import { getRefType } from './utils/ref_type';
|
|||
|
||||
Vue.use(VueRouter);
|
||||
|
||||
const normalizePathParam = (pathParam) => {
|
||||
// Vue Router 4 when there's more than one `:path` segment
|
||||
if (Array.isArray(pathParam)) {
|
||||
return joinPaths(...pathParam);
|
||||
}
|
||||
|
||||
// Vue Router 3, or when there's zero or one `:path` segments.
|
||||
return pathParam?.replace(/^\//, '') || '/';
|
||||
};
|
||||
|
||||
export default function createRouter(base, baseRef) {
|
||||
const treePathRoute = {
|
||||
component: TreePage,
|
||||
props: (route) => ({
|
||||
path: route.params.path?.replace(/^\//, '') || '/',
|
||||
path: normalizePathParam(route.params.path),
|
||||
refType: getRefType(route.query.ref_type || null),
|
||||
}),
|
||||
};
|
||||
|
|
@ -36,25 +46,25 @@ export default function createRouter(base, baseRef) {
|
|||
{
|
||||
name: 'treePathDecoded',
|
||||
// Sometimes the ref needs decoding depending on how the backend sends it to us
|
||||
path: `(/-)?/tree/${decodeURI(baseRef)}/:path*`,
|
||||
path: `/:dash(-)?/tree/${decodeURI(baseRef)}/:path*`,
|
||||
...treePathRoute,
|
||||
},
|
||||
{
|
||||
name: 'treePath',
|
||||
// Support without decoding as well just in case the ref doesn't need to be decoded
|
||||
path: `(/-)?/tree/${escapeRegExp(baseRef)}/:path*`,
|
||||
path: `/:dash(-)?/tree/${escapeRegExp(baseRef)}/:path*`,
|
||||
...treePathRoute,
|
||||
},
|
||||
{
|
||||
name: 'blobPathDecoded',
|
||||
// Sometimes the ref needs decoding depending on how the backend sends it to us
|
||||
path: `(/-)?/blob/${decodeURI(baseRef)}/:path*`,
|
||||
path: `/:dash(-)?/blob/${decodeURI(baseRef)}/:path*`,
|
||||
...blobPathRoute,
|
||||
},
|
||||
{
|
||||
name: 'blobPath',
|
||||
// Support without decoding as well just in case the ref doesn't need to be decoded
|
||||
path: `(/-)?/blob/${escapeRegExp(baseRef)}/:path*`,
|
||||
path: `/:dash(-)?/blob/${escapeRegExp(baseRef)}/:path*`,
|
||||
...blobPathRoute,
|
||||
},
|
||||
{
|
||||
|
|
@ -80,7 +90,7 @@ export default function createRouter(base, baseRef) {
|
|||
'edit',
|
||||
decodeURI(baseRef),
|
||||
'-',
|
||||
to.params.path || '',
|
||||
normalizePathParam(to.params.path),
|
||||
needsClosingSlash && '/',
|
||||
),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1623,7 +1623,9 @@ class Project < ApplicationRecord
|
|||
# - Relation import
|
||||
# - Direct Transfer
|
||||
def any_import_in_progress?
|
||||
relation_import_trackers.last&.started? ||
|
||||
last_relation_import_tracker = relation_import_trackers.last
|
||||
|
||||
(last_relation_import_tracker&.started? && !last_relation_import_tracker.stale?) ||
|
||||
import_started? ||
|
||||
BulkImports::Entity.with_status(:started).where(project_id: id).any?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ module Projects
|
|||
validates :relation, presence: true
|
||||
validate :cannot_be_created_for_importing_project, on: :create
|
||||
|
||||
STALE_TIMEOUT = 24.hours
|
||||
|
||||
enum :relation, { issues: 0, merge_requests: 1, ci_pipelines: 2, milestones: 3 }
|
||||
|
||||
state_machine :status, initial: :created do
|
||||
|
|
@ -20,6 +22,7 @@ module Projects
|
|||
|
||||
event :start do
|
||||
transition created: :started
|
||||
transition started: :started
|
||||
end
|
||||
|
||||
event :finish do
|
||||
|
|
@ -34,7 +37,7 @@ module Projects
|
|||
def stale?
|
||||
return false if finished? || failed?
|
||||
|
||||
created_at.before?(24.hours.ago)
|
||||
created_at.before?(STALE_TIMEOUT.ago)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -7,10 +7,10 @@
|
|||
= render ::Layouts::PageHeadingComponent.new(_('Group members')) do |c|
|
||||
- c.with_description do
|
||||
= group_member_header_subtext(@group)
|
||||
- c.with_actions do
|
||||
- if current_appearance&.member_guidelines?
|
||||
.gl-w-full.order-md-1
|
||||
= brand_member_guidelines
|
||||
- c.with_actions do
|
||||
.js-invite-group-trigger{ data: { classes: 'md:gl-w-auto gl-w-full', display_text: _('Invite a group') } }
|
||||
.js-invite-members-trigger{ data: { variant: 'confirm',
|
||||
classes: 'md:gl-w-auto gl-w-full',
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ module Projects
|
|||
module ImportExport
|
||||
class RelationImportWorker
|
||||
include ApplicationWorker
|
||||
include Sidekiq::InterruptionsExhausted
|
||||
|
||||
sidekiq_options retry: 6
|
||||
|
||||
|
|
@ -16,12 +17,26 @@ module Projects
|
|||
|
||||
attr_reader :tracker, :project, :current_user
|
||||
|
||||
sidekiq_retries_exhausted do |job, exception|
|
||||
new.perform_failure(job['args'].first, exception)
|
||||
end
|
||||
|
||||
sidekiq_interruptions_exhausted do |job|
|
||||
new.perform_failure(job['args'].first,
|
||||
::Import::Exceptions::SidekiqExhaustedInterruptionsError.new
|
||||
)
|
||||
end
|
||||
|
||||
def perform(tracker_id, user_id)
|
||||
@current_user = User.find(user_id)
|
||||
@tracker = ::Projects::ImportExport::RelationImportTracker.find(tracker_id)
|
||||
@project = tracker.project
|
||||
|
||||
return unless tracker.can_start?
|
||||
unless tracker.can_start?
|
||||
::Import::Framework::Logger.info(message: 'Cannot start tracker', tracker_id: tracker.id,
|
||||
tracker_status: tracker.status_name)
|
||||
return
|
||||
end
|
||||
|
||||
tracker.start!
|
||||
|
||||
|
|
@ -31,20 +46,21 @@ module Projects
|
|||
|
||||
tracker.finish!
|
||||
rescue StandardError => error
|
||||
failure_service = Gitlab::ImportExport::ImportFailureService.new(project)
|
||||
failure_service.log_import_failure(
|
||||
source: 'RelationImportWorker#perform',
|
||||
exception: error,
|
||||
relation_key: tracker.relation
|
||||
)
|
||||
|
||||
tracker.fail_op!
|
||||
log_failure(error)
|
||||
|
||||
raise
|
||||
ensure
|
||||
remove_extracted_import
|
||||
end
|
||||
|
||||
def perform_failure(tracker_id, exception)
|
||||
@tracker = ::Projects::ImportExport::RelationImportTracker.find(tracker_id)
|
||||
@project = tracker.project
|
||||
|
||||
log_failure(exception)
|
||||
tracker.fail_op!
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def extract_import_file
|
||||
|
|
@ -104,6 +120,15 @@ module Projects
|
|||
def perform_post_import_tasks
|
||||
project.reset_counters_and_iids
|
||||
end
|
||||
|
||||
def log_failure(exception)
|
||||
failure_service = Gitlab::ImportExport::ImportFailureService.new(project)
|
||||
failure_service.log_import_failure(
|
||||
source: 'RelationImportWorker#perform',
|
||||
exception: exception,
|
||||
relation_key: tracker.relation
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
- title: "`kpt`-based `agentk` is deprecated"
|
||||
removal_milestone: "18.0"
|
||||
announcement_milestone: "17.9"
|
||||
breaking_change: true
|
||||
window: 2
|
||||
reporter: nagyv-gitlab
|
||||
stage: deploy
|
||||
issue_url: https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/issues/656
|
||||
# Use the impact calculator https://gitlab-com.gitlab.io/gl-infra/breaking-change-impact-calculator/?
|
||||
# https://gitlab-com.gitlab.io/gl-infra/breaking-change-impact-calculator/?usage=edge_case&migration_complexity=minor_manual&scope=project&identification_complexity=manual&additional_complexity=no&base_impact=major&pipeline_impact=none&compliance_impact=none&availability_impact=none&authorization_impact=none&API_impact=none
|
||||
impact: low
|
||||
scope: project
|
||||
resolution_role: Maintainer
|
||||
manual_task: true
|
||||
body: | # (required) Don't change this line.
|
||||
In GitLab 18.0, we'll remove support for the `kpt`-based installation of the agent for Kubernetes.
|
||||
Instead, you should install the agent with one of the supported installation methods:
|
||||
|
||||
- Helm (recommended)
|
||||
- GitLab CLI
|
||||
- Flux
|
||||
|
||||
To migrate from `kpt` to Helm, follow [the agent installation documentation](https://docs.gitlab.com/ee/user/clusters/agent/install/) to overwrite your `kpt`-deployed `agentk` instance.
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
- title: "Support for project build as part of SpotBugs scans"
|
||||
removal_milestone: "18.0"
|
||||
announcement_milestone: "17.9"
|
||||
breaking_change: false
|
||||
window: 1 # Note: a change window is not applicable to a non-breaking change
|
||||
reporter: thiagocsf
|
||||
stage: application security testing
|
||||
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/513409
|
||||
impact: low
|
||||
scope: project
|
||||
resolution_role: Developer
|
||||
manual_task: true
|
||||
body: | # (required) Don't change this line.
|
||||
The SpotBugs [SAST analyzer](https://docs.gitlab.com/ee/user/application_security/sast/index.html#supported-languages-and-frameworks)
|
||||
can perform a build when the artifacts to be scanned aren't present. While this usually works well for simple projects, it can fail on more complex builds.
|
||||
|
||||
From GitLab 18.0, to resolve SpotBugs analyzer build failures, you should:
|
||||
|
||||
1. [Pre-compile](https://docs.gitlab.com/ee/user/application_security/sast/#pre-compilation) the project.
|
||||
1. Pass the artifacts you want to scan to the analyzer.
|
||||
end_of_support_milestone: 18.0
|
||||
tiers: [Free, Silver, Gold, Core, Premium, Ultimate]
|
||||
documentation_url: https://docs.gitlab.com/ee/user/application_security/sast/troubleshooting.html#project-couldnt-be-built
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: BackfillBulkImportFailuresNamespaceId
|
||||
description: Backfills sharding key `bulk_import_failures.namespace_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180435
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205194756
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: BackfillBulkImportFailuresOrganizationId
|
||||
description: Backfills sharding key `bulk_import_failures.organization_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180435
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205194761
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: BackfillBulkImportFailuresProjectId
|
||||
description: Backfills sharding key `bulk_import_failures.project_id` from `bulk_import_entities`.
|
||||
feature_category: importers
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180435
|
||||
milestone: '17.9'
|
||||
queued_migration_version: 20250205194751
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -34,3 +34,7 @@ desired_sharding_key:
|
|||
table: bulk_import_entities
|
||||
sharding_key: organization_id
|
||||
belongs_to: entity
|
||||
desired_sharding_key_migration_job_name:
|
||||
- BackfillBulkImportFailuresProjectId
|
||||
- BackfillBulkImportFailuresNamespaceId
|
||||
- BackfillBulkImportFailuresOrganizationId
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddProjectIdToBulkImportFailures < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def change
|
||||
add_column :bulk_import_failures, :project_id, :bigint
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddNamespaceIdToBulkImportFailures < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def change
|
||||
add_column :bulk_import_failures, :namespace_id, :bigint
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddOrganizationIdToBulkImportFailures < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def change
|
||||
add_column :bulk_import_failures, :organization_id, :bigint
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IndexBulkImportFailuresOnProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_bulk_import_failures_on_project_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :bulk_import_failures, :project_id, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :bulk_import_failures, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresProjectIdFk < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :bulk_import_failures, :projects, column: :project_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :bulk_import_failures, column: :project_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresProjectIdTrigger < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
install_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :project_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :project_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :project_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :project_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillBulkImportFailuresProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportFailuresProjectId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
[
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IndexBulkImportFailuresOnNamespaceId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_bulk_import_failures_on_namespace_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :bulk_import_failures, :namespace_id, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :bulk_import_failures, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresNamespaceIdFk < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :bulk_import_failures, :namespaces, column: :namespace_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :bulk_import_failures, column: :namespace_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
install_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :namespace_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :namespace_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :namespace_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :namespace_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillBulkImportFailuresNamespaceId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportFailuresNamespaceId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
[
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IndexBulkImportFailuresOnOrganizationId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_bulk_import_failures_on_organization_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :bulk_import_failures, :organization_id, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :bulk_import_failures, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresOrganizationIdFk < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :bulk_import_failures, :organizations, column: :organization_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :bulk_import_failures, column: :organization_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBulkImportFailuresOrganizationIdTrigger < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
install_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :organization_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :organization_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_sharding_key_assignment_trigger(
|
||||
table: :bulk_import_failures,
|
||||
sharding_key: :organization_id,
|
||||
parent_table: :bulk_import_entities,
|
||||
parent_sharding_key: :organization_id,
|
||||
foreign_key: :bulk_import_entity_id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillBulkImportFailuresOrganizationId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillBulkImportFailuresOrganizationId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:bulk_import_failures,
|
||||
:id,
|
||||
[
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
52712f6673056c27341c0b8981fc6e924e28a11a047472f1f59ba10552afb0c9
|
||||
|
|
@ -0,0 +1 @@
|
|||
1826e690211f4de94c2c8c235fcd23ad9868469b277d475e175e8534d654d064
|
||||
|
|
@ -0,0 +1 @@
|
|||
5bd5f02ddfc2b28cb62879d42d1145823002976560e5e88bafb5391f8686f50c
|
||||
|
|
@ -0,0 +1 @@
|
|||
c07b8cf01fa5c903693ded23b53cace5868fef5c0200fa129f61bc94100dd89c
|
||||
|
|
@ -0,0 +1 @@
|
|||
fdf2ce5da9d2b550e3c32a2cef6ff8d4289720710dc6030a6c8b8eb2ae3c2be7
|
||||
|
|
@ -0,0 +1 @@
|
|||
67eb82e6774e8472e8e88e5c549a03747faa2c41d76c83668a04fbb2c506edca
|
||||
|
|
@ -0,0 +1 @@
|
|||
a13a7693c5a4f67bb590c1f05842b65d8b04d40e1c83a44eec8d1775531b5360
|
||||
|
|
@ -0,0 +1 @@
|
|||
289108e9ba6d848506b00d3566a3f1c9bff4773dd726e2f01ce8822e9aedb19d
|
||||
|
|
@ -0,0 +1 @@
|
|||
29d384cb9a5a1be7a91dafd1d94c94db0f4b869994ba5fb7755d5cf6f490f908
|
||||
|
|
@ -0,0 +1 @@
|
|||
ee612f8ca1b98df84b025491cdc1b918776eaeca56935da53d2a275c406c28e1
|
||||
|
|
@ -0,0 +1 @@
|
|||
06a8ce5524fd572a45e227c867e82a32a0ad0df5699905ce753679d5069bff84
|
||||
|
|
@ -0,0 +1 @@
|
|||
ccd7b40c4d897a0ba4554bd655dc5c951d4382293efd5bc1f690019d4d8c093c
|
||||
|
|
@ -0,0 +1 @@
|
|||
4a0bf49e38e6bd69c194150d3b524e64e362fd401e06bcb95b7d5ef3dcdade40
|
||||
|
|
@ -0,0 +1 @@
|
|||
0d6cf422a4cc20ec42cbf4651f7ae204c94e9f1a566abd3c3acec21a0c1707f5
|
||||
|
|
@ -0,0 +1 @@
|
|||
667b89da80348593027e2ba887e8df8abcbc1e30d62ed9f2081c6b0e2d7953ef
|
||||
|
|
@ -1755,6 +1755,22 @@ RETURN NEW;
|
|||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_36cb404f9a02() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW."organization_id" IS NULL THEN
|
||||
SELECT "organization_id"
|
||||
INTO NEW."organization_id"
|
||||
FROM "bulk_import_entities"
|
||||
WHERE "bulk_import_entities"."id" = NEW."bulk_import_entity_id";
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_388de55cd36c() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -2475,6 +2491,22 @@ RETURN NEW;
|
|||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_7b21c87a1f91() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW."project_id" IS NULL THEN
|
||||
SELECT "project_id"
|
||||
INTO NEW."project_id"
|
||||
FROM "bulk_import_entities"
|
||||
WHERE "bulk_import_entities"."id" = NEW."bulk_import_entity_id";
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_7b378a0c402b() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -2664,6 +2696,22 @@ RETURN NEW;
|
|||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_8cb8ad095bf6() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW."namespace_id" IS NULL THEN
|
||||
SELECT "namespace_id"
|
||||
INTO NEW."namespace_id"
|
||||
FROM "bulk_import_entities"
|
||||
WHERE "bulk_import_entities"."id" = NEW."bulk_import_entity_id";
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_8d002f38bdef() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -9836,6 +9884,9 @@ CREATE TABLE bulk_import_failures (
|
|||
source_url text,
|
||||
source_title text,
|
||||
subrelation text,
|
||||
project_id bigint,
|
||||
namespace_id bigint,
|
||||
organization_id bigint,
|
||||
CONSTRAINT check_053d65c7a4 CHECK ((char_length(pipeline_class) <= 255)),
|
||||
CONSTRAINT check_6eca8f972e CHECK ((char_length(exception_message) <= 255)),
|
||||
CONSTRAINT check_721a422375 CHECK ((char_length(pipeline_step) <= 255)),
|
||||
|
|
@ -31690,6 +31741,12 @@ CREATE INDEX index_bulk_import_failures_on_bulk_import_entity_id ON bulk_import_
|
|||
|
||||
CREATE INDEX index_bulk_import_failures_on_correlation_id_value ON bulk_import_failures USING btree (correlation_id_value);
|
||||
|
||||
CREATE INDEX index_bulk_import_failures_on_namespace_id ON bulk_import_failures USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_bulk_import_failures_on_organization_id ON bulk_import_failures USING btree (organization_id);
|
||||
|
||||
CREATE INDEX index_bulk_import_failures_on_project_id ON bulk_import_failures USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_bulk_import_trackers_on_namespace_id ON bulk_import_trackers USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_bulk_import_trackers_on_organization_id ON bulk_import_trackers USING btree (organization_id);
|
||||
|
|
@ -38146,6 +38203,8 @@ CREATE TRIGGER trigger_2dafd0d13605 BEFORE INSERT OR UPDATE ON pages_domain_acme
|
|||
|
||||
CREATE TRIGGER trigger_30209d0fba3e BEFORE INSERT OR UPDATE ON alert_management_alert_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_30209d0fba3e();
|
||||
|
||||
CREATE TRIGGER trigger_36cb404f9a02 BEFORE INSERT OR UPDATE ON bulk_import_failures FOR EACH ROW EXECUTE FUNCTION trigger_36cb404f9a02();
|
||||
|
||||
CREATE TRIGGER trigger_388de55cd36c BEFORE INSERT OR UPDATE ON ci_builds_runner_session FOR EACH ROW EXECUTE FUNCTION trigger_388de55cd36c();
|
||||
|
||||
CREATE TRIGGER trigger_38bfee591e40 BEFORE INSERT OR UPDATE ON dependency_proxy_blob_states FOR EACH ROW EXECUTE FUNCTION trigger_38bfee591e40();
|
||||
|
|
@ -38236,6 +38295,8 @@ CREATE TRIGGER trigger_7943cb549289 BEFORE INSERT OR UPDATE ON issuable_metric_i
|
|||
|
||||
CREATE TRIGGER trigger_7a8b08eed782 BEFORE INSERT OR UPDATE ON boards_epic_board_positions FOR EACH ROW EXECUTE FUNCTION trigger_7a8b08eed782();
|
||||
|
||||
CREATE TRIGGER trigger_7b21c87a1f91 BEFORE INSERT OR UPDATE ON bulk_import_failures FOR EACH ROW EXECUTE FUNCTION trigger_7b21c87a1f91();
|
||||
|
||||
CREATE TRIGGER trigger_7b378a0c402b BEFORE INSERT OR UPDATE ON issue_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_7b378a0c402b();
|
||||
|
||||
CREATE TRIGGER trigger_7de792ddbc05 BEFORE INSERT OR UPDATE ON dast_site_validations FOR EACH ROW EXECUTE FUNCTION trigger_7de792ddbc05();
|
||||
|
|
@ -38266,6 +38327,8 @@ CREATE TRIGGER trigger_8b39d532224c BEFORE INSERT OR UPDATE ON ci_secure_file_st
|
|||
|
||||
CREATE TRIGGER trigger_8ba074736a77 BEFORE INSERT OR UPDATE ON snippet_repository_storage_moves FOR EACH ROW EXECUTE FUNCTION trigger_8ba074736a77();
|
||||
|
||||
CREATE TRIGGER trigger_8cb8ad095bf6 BEFORE INSERT OR UPDATE ON bulk_import_failures FOR EACH ROW EXECUTE FUNCTION trigger_8cb8ad095bf6();
|
||||
|
||||
CREATE TRIGGER trigger_8d002f38bdef BEFORE INSERT OR UPDATE ON packages_debian_group_components FOR EACH ROW EXECUTE FUNCTION trigger_8d002f38bdef();
|
||||
|
||||
CREATE TRIGGER trigger_8d17725116fe BEFORE INSERT OR UPDATE ON merge_request_reviewers FOR EACH ROW EXECUTE FUNCTION trigger_8d17725116fe();
|
||||
|
|
@ -39281,6 +39344,9 @@ ALTER TABLE ONLY deploy_tokens
|
|||
ALTER TABLE ONLY oauth_openid_requests
|
||||
ADD CONSTRAINT fk_7092424b77 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY bulk_import_failures
|
||||
ADD CONSTRAINT fk_70f30b02fd FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY protected_branch_push_access_levels
|
||||
ADD CONSTRAINT fk_7111b68cdb FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -39500,6 +39566,9 @@ ALTER TABLE ONLY protected_branch_merge_access_levels
|
|||
ALTER TABLE ONLY work_item_dates_sources
|
||||
ADD CONSTRAINT fk_8a4948b668 FOREIGN KEY (start_date_sourcing_work_item_id) REFERENCES issues(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY bulk_import_failures
|
||||
ADD CONSTRAINT fk_8c0911e763 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY bulk_import_exports
|
||||
ADD CONSTRAINT fk_8c6f33cebe FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -40124,6 +40193,9 @@ ALTER TABLE ONLY personal_access_tokens
|
|||
ALTER TABLE ONLY project_group_links
|
||||
ADD CONSTRAINT fk_daa8cee94c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY bulk_import_failures
|
||||
ADD CONSTRAINT fk_dad28985ee FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY project_topics
|
||||
ADD CONSTRAINT fk_db13576296 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
# Warning: gitlab_base.NonStandardSpaces
|
||||
#
|
||||
# Use only standard spaces. Do not use:
|
||||
#
|
||||
# U+202F : NARROW NO-BREAK SPACE [NNBSP]
|
||||
# U+00A0 : NO-BREAK SPACE [NBSP]
|
||||
# U+200B : ZERO WIDTH SPACE [ZWSP]
|
||||
#
|
||||
# For a list of all options, see https://vale.sh/docs/topics/styles/
|
||||
extends: existence
|
||||
message: "Use standard spaces only. Do not use no-break or zero width spaces."
|
||||
vocab: false
|
||||
level: error
|
||||
ignorecase: true
|
||||
link: https://docs.gitlab.com/ee/development/documentation/styleguide/index.html#punctuation
|
||||
scope: raw
|
||||
raw:
|
||||
- '[ ]'
|
||||
|
|
@ -78,23 +78,7 @@ On the **primary** site:
|
|||
## Reset verification for projects where verification has failed
|
||||
|
||||
Geo actively tries to correct verification failures marking the repository to
|
||||
be resynced with a back-off period. If you want to reset them manually, this
|
||||
Rake task marks projects where verification has failed or the checksum mismatch
|
||||
to be resynced without the back-off period:
|
||||
|
||||
Run the appropriate commands on a **Rails node on the secondary** site.
|
||||
|
||||
For repositories:
|
||||
|
||||
```shell
|
||||
sudo gitlab-rake geo:verification:repository:reset
|
||||
```
|
||||
|
||||
For wikis:
|
||||
|
||||
```shell
|
||||
sudo gitlab-rake geo:verification:wiki:reset
|
||||
```
|
||||
be resynced with a back-off period. You can also manually [resync and reverify individual components through the UI or the Rails console](../replication/troubleshooting/synchronization_verification.md#resync-and-reverify-individual-components).
|
||||
|
||||
## Reconcile differences with checksum mismatches
|
||||
|
||||
|
|
|
|||
|
|
@ -484,16 +484,6 @@ See the [PostgreSQL wiki for more details](https://wiki.postgresql.org/wiki/Loca
|
|||
|
||||
This section documents common error messages reported in the **Admin** area on the web interface, and how to fix them.
|
||||
|
||||
### Geo database configuration file is missing
|
||||
|
||||
GitLab cannot find or doesn't have permission to access the `database_geo.yml` configuration file.
|
||||
|
||||
In a Linux package installation, the file should be in `/var/opt/gitlab/gitlab-rails/etc`.
|
||||
If it doesn't exist or inadvertent changes have been made to it, run `sudo gitlab-ctl reconfigure` to restore it to its correct state.
|
||||
|
||||
If this path is mounted on a remote volume, ensure your volume configuration
|
||||
has the correct permissions.
|
||||
|
||||
### An existing tracking database cannot be reused
|
||||
|
||||
Geo cannot reuse an existing tracking database.
|
||||
|
|
|
|||
|
|
@ -12,6 +12,13 @@ While existing content will be automatically updated, any new or modified docume
|
|||
|
||||
For the latest migration status, see [this issue](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/issues/44).
|
||||
|
||||
## New project
|
||||
|
||||
The new Docs website is in the [`gitlab-org/technical-writing/docs-gitlab-com`](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com) project.
|
||||
|
||||
After launch, all issues from the [original `gitlab-org/gitlab-docs` project](https://gitlab.com/gitlab-org/gitlab-docs)
|
||||
will be moved over to the new one, or closed if they're no longer applicable.
|
||||
|
||||
## Formatting changes
|
||||
|
||||
### Page titles
|
||||
|
|
@ -120,7 +127,7 @@ With Hugo, these will no longer have any effect. They will render as plain text.
|
|||
|
||||
**Why:** Hugo uses the Goldmark Markdown rendering engine, not Kramdown.
|
||||
|
||||
**When:** After launch.
|
||||
**When:** At this time, avoid adding new Kramdown tags. Support for these is dropped entirely after launch.
|
||||
|
||||
**Testing:** We are running an audit job on the CI pipeline for Kramdown tags ([example](https://gitlab.com/gitlab-org/technical-writing-group/gitlab-docs-hugo/-/jobs/8885163533)).
|
||||
These tags will be manually removed as part of launch.
|
||||
|
|
|
|||
|
|
@ -260,3 +260,21 @@ resources:
|
|||
| Small | 2 vCPUs, 8 GB RAM | Single instance | 40 | Fixed deployment; no autoscaling. |
|
||||
| Medium | AWS t3.2xlarge | Single instance | 160 | HPA based on CPU or latency thresholds. |
|
||||
| Large | Multiple t3.2xlarge | Clustered instances | 160 per instance | HPA + node autoscaling for high demand. |
|
||||
|
||||
## Support multiple GitLab instances
|
||||
|
||||
You can deploy a single AI gateway to support multiple GitLab instances, or deploy separate AI gateways per instance or geographic region. To help decide which is appropriate, consider:
|
||||
|
||||
- Expected traffic of approximately seven requests per second per 1,000 billable users.
|
||||
- Resource requirements based on total concurrent requests across all instances.
|
||||
- Best practice authentication configuration for each GitLab instance.
|
||||
|
||||
## Co-locate your AI gateway and instance
|
||||
|
||||
The AI gateway is available in multiple regions globally to ensure optimal performance for users regardless of location, through:
|
||||
|
||||
- Improved response times for Duo features.
|
||||
- Reduced latency for geographically distributed users.
|
||||
- Data sovereignty requirements compliance.
|
||||
|
||||
You should locate your AI gateway in the same geographic region as your GitLab instance to help provide a frictionless developer experience, particularly for latency-sensitive features like Code Suggestions.
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ This window takes place on April 28 - 30, 2025 from 09:00 UTC to 22:00 UTC.
|
|||
| [`mergeTrainIndex` and `mergeTrainsCount` GraphQL fields deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/473759) | Low | Verify | Project |
|
||||
| [RunnersRegistrationTokenReset GraphQL mutation is deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/505703) | High | Verify | Instance, group, project |
|
||||
| [Behavior change for Upcoming and Started milestone filters](https://gitlab.com/gitlab-org/gitlab/-/issues/501294) | Low | Plan | Group, project |
|
||||
| [`kpt`-based `agentk` is deprecated](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/issues/656) | Low | Deploy | Project |
|
||||
|
||||
## Window 3
|
||||
|
||||
|
|
|
|||
|
|
@ -1587,6 +1587,29 @@ SLES 15 SP6 for continued support.
|
|||
|
||||
</div>
|
||||
|
||||
<div class="deprecation " data-milestone="18.0">
|
||||
|
||||
### Support for project build as part of SpotBugs scans
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
||||
- Announced in GitLab <span class="milestone">17.9</span>
|
||||
- End of Support in GitLab <span class="milestone">18.0</span>
|
||||
- Removal in GitLab <span class="milestone">18.0</span>
|
||||
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/513409).
|
||||
|
||||
</div>
|
||||
|
||||
The SpotBugs [SAST analyzer](https://docs.gitlab.com/ee/user/application_security/sast/index.html#supported-languages-and-frameworks)
|
||||
can perform a build when the artifacts to be scanned aren't present. While this usually works well for simple projects, it can fail on more complex builds.
|
||||
|
||||
From GitLab 18.0, to resolve SpotBugs analyzer build failures, you should:
|
||||
|
||||
1. [Pre-compile](https://docs.gitlab.com/ee/user/application_security/sast/#pre-compilation) the project.
|
||||
1. Pass the artifacts you want to scan to the analyzer.
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### The GitLab legacy requirement IID is deprecated in favor of work item IID
|
||||
|
|
@ -1859,6 +1882,29 @@ For migration instructions, see
|
|||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### `kpt`-based `agentk` is deprecated
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
||||
- Announced in GitLab <span class="milestone">17.9</span>
|
||||
- Removal in GitLab <span class="milestone">18.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change))
|
||||
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/issues/656).
|
||||
|
||||
</div>
|
||||
|
||||
In GitLab 18.0, we'll remove support for the `kpt`-based installation of the agent for Kubernetes.
|
||||
Instead, you should install the agent with one of the supported installation methods:
|
||||
|
||||
- Helm (recommended)
|
||||
- GitLab CLI
|
||||
- Flux
|
||||
|
||||
To migrate from `kpt` to Helm, follow [the agent installation documentation](https://docs.gitlab.com/ee/user/clusters/agent/install/) to overwrite your `kpt`-deployed `agentk` instance.
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### `maxHoursBeforeTermination` GraphQL field is deprecated
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
|
|
|||
|
|
@ -500,7 +500,7 @@ Automatic compilation can fail if:
|
|||
- your project requires custom build configurations.
|
||||
- you use language versions that aren't built into the analyzer.
|
||||
|
||||
To resolve these issues, you can skip the analyzer's compilation step and directly provide artifacts from an earlier stage in your pipeline instead.
|
||||
To resolve these issues, you should skip the analyzer's compilation step and directly provide artifacts from an earlier stage in your pipeline instead.
|
||||
This strategy is called _pre-compilation_.
|
||||
|
||||
To use pre-compilation:
|
||||
|
|
|
|||
|
|
@ -121,13 +121,22 @@ For Maven builds, add the following to your `pom.xml` file:
|
|||
|
||||
### Project couldn't be built
|
||||
|
||||
If your job is failing at the build step with the message "Project couldn't be built", it's most likely because your job is asking SpotBugs to build with a tool that isn't part of its default tools. For a list of the SpotBugs default tools, see [SpotBugs' asdf dependencies](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs/-/blob/master/config/.gl-tool-versions).
|
||||
If your `spotbugs-sast` job is failing at the build step with the message "Project couldn't be built", it's most likely because:
|
||||
|
||||
The solution is to use [pre-compilation](_index.md#pre-compilation). Pre-compilation ensures the images required by SpotBugs are available in the job's container.
|
||||
- Your project is asking SpotBugs to build with a tool that isn't part of its default tools. For a list of the SpotBugs default tools, see [SpotBugs' asdf dependencies](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs/-/blob/master/config/.gl-tool-versions).
|
||||
- Your build needs custom configurations or additional dependencies that the analyzer's automatic build process can't accommodate.
|
||||
|
||||
The SpotBugs-based analyzer is only used for scanning Groovy code, but it may trigger in other cases, such as [when all SAST jobs run unexpectedly](#sast-jobs-run-unexpectedly).
|
||||
|
||||
The solution depends on whether you need to scan Groovy code:
|
||||
|
||||
- If you don't have any Groovy code, or don't need to scan it, you should [disable the SpotBugs analyzer](analyzers.md#disable-specific-default-analyzers).
|
||||
- If you do need to scan Groovy code, you should use [pre-compilation](_index.md#pre-compilation).
|
||||
Pre-compilation avoids these failures by scanning an artifact you've already built in your pipeline, rather than trying to compile it in the `spotbugs-sast` job.
|
||||
|
||||
### Java out of memory error
|
||||
|
||||
When a SAST job is running you might get an error that states `java.lang.OutOfMemoryError`. This issue occurs when Java has run out of memory.
|
||||
When a `spotbugs-sast` job is running you might get an error that states `java.lang.OutOfMemoryError`. This issue occurs when Java has run out of memory while scanning.
|
||||
|
||||
To try to resolve this issue you can:
|
||||
|
||||
|
|
|
|||
|
|
@ -27,10 +27,11 @@ For projects and groups the Vulnerability Report contains:
|
|||
|
||||
- Totals of vulnerabilities per severity level.
|
||||
- Filters for common vulnerability attributes.
|
||||
- Details of each vulnerability, presented in tabular layout. For some vulnerabilities, the details
|
||||
include a link to the relevant file and line number, in the default branch.
|
||||
- Details of each vulnerability, presented in a table.
|
||||
|
||||
For projects the Vulnerability Report also contains:
|
||||
For some vulnerabilities, the details include a link to the relevant file and line number in the default branch. For CVE vulnerabilities, you can also view the KEV status and the CVSS and EPSS scores in the vulnerability report. For more details on the security scores, see [vulnerability risk assessment data](../vulnerabilities/risk_assessment_data.md).
|
||||
|
||||
For projects, the Vulnerability Report also contains:
|
||||
|
||||
- A time stamp showing when it was updated, including a link to the latest pipeline. This is updated only when a pipeline is run against the default branch.
|
||||
- The number of failures that occurred in the most recent pipeline. Select the failure
|
||||
|
|
|
|||
|
|
@ -174,7 +174,8 @@ To configure this URL, use one of these methods:
|
|||
Create or edit the `.npmrc` file in your project root:
|
||||
|
||||
```plaintext
|
||||
@scope:registry=https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/ //gitlab.example.com/api/v4/projects/<project_id>/packages/npm/:_authToken="${NPM_TOKEN}"
|
||||
@scope:registry=https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/
|
||||
//gitlab.example.com/api/v4/projects/<project_id>/packages/npm/:_authToken="${NPM_TOKEN}"
|
||||
```
|
||||
|
||||
:::TabTitle `npm config`
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillBulkImportFailuresNamespaceId < BackfillDesiredShardingKeyJob
|
||||
operation_name :backfill_bulk_import_failures_namespace_id
|
||||
feature_category :importers
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillBulkImportFailuresOrganizationId < BackfillDesiredShardingKeyJob
|
||||
operation_name :backfill_bulk_import_failures_organization_id
|
||||
feature_category :importers
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillBulkImportFailuresProjectId < BackfillDesiredShardingKeyJob
|
||||
operation_name :backfill_bulk_import_failures_project_id
|
||||
feature_category :importers
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -10768,6 +10768,9 @@ msgstr ""
|
|||
msgid "BulkImport|Import without projects"
|
||||
msgstr ""
|
||||
|
||||
msgid "BulkImport|Importing projects is a %{docsLinkStart}beta%{docsLinkEnd} feature."
|
||||
msgstr ""
|
||||
|
||||
msgid "BulkImport|Importing the group failed."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -10828,9 +10831,6 @@ msgstr ""
|
|||
msgid "BulkImport|Placeholder reassignments completed with errors"
|
||||
msgstr ""
|
||||
|
||||
msgid "BulkImport|Please note: importing projects is a %{docsLinkStart}beta%{docsLinkEnd} feature."
|
||||
msgstr ""
|
||||
|
||||
msgid "BulkImport|Please select a parent group."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@
|
|||
"@gitlab/fonts": "^1.3.0",
|
||||
"@gitlab/query-language-rust": "0.3.2",
|
||||
"@gitlab/svgs": "3.123.0",
|
||||
"@gitlab/ui": "108.1.0",
|
||||
"@gitlab/ui": "108.2.0",
|
||||
"@gitlab/vue-router-vue3": "npm:vue-router@4.1.6",
|
||||
"@gitlab/vuex-vue3": "npm:vuex@4.0.0",
|
||||
"@gitlab/web-ide": "^0.0.1-dev-20250128095641",
|
||||
|
|
|
|||
|
|
@ -233,10 +233,8 @@ spec/frontend/ref/init_ambiguous_ref_modal_spec.js
|
|||
spec/frontend/releases/components/app_edit_new_spec.js
|
||||
spec/frontend/releases/components/asset_links_form_spec.js
|
||||
spec/frontend/repository/components/header_area/blob_controls_spec.js
|
||||
spec/frontend/repository/components/header_area/blob_overflow_menu_spec.js
|
||||
spec/frontend/repository/components/table/index_spec.js
|
||||
spec/frontend/repository/components/table/row_spec.js
|
||||
spec/frontend/repository/router_spec.js
|
||||
spec/frontend/search/sidebar/components/checkbox_filter_spec.js
|
||||
spec/frontend/search/topbar/components/app_spec.js
|
||||
spec/frontend/sessions/new/components/email_verification_spec.js
|
||||
|
|
|
|||
|
|
@ -36,22 +36,6 @@ then
|
|||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Test for non-standard spaces (NBSP, NNBSP, ZWSP) in documentation.
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for non-standard spaces...${COLOR_RESET}\n"
|
||||
if grep --extended-regexp --binary-file=without-match --recursive '[ ]' doc/ >/dev/null 2>&1;
|
||||
then
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Non-standard spaces (NBSP, NNBSP, ZWSP) should not be used in documentation!${COLOR_RESET}"
|
||||
printf " https://docs.gitlab.com/ee/development/documentation/styleguide/index.html#spaces-between-words\n"
|
||||
printf "Replace with standard spaces:\n" >&2
|
||||
# Find the spaces, then add color codes with sed to highlight each NBSP or NNBSP in the output.
|
||||
# shellcheck disable=SC1018
|
||||
grep --extended-regexp --binary-file=without-match --recursive --color=auto '[ ]' doc \
|
||||
| sed -e ''/ /s//"$(printf "\033[0;101m \033[0m")"/'' -e ''/ /s//"$(printf "\033[0;101m \033[0m")"/''
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Ensure that the CHANGELOG.md does not contain duplicate versions
|
||||
DUPLICATE_CHANGELOG_VERSIONS=$(grep --extended-regexp '^## .+' CHANGELOG.md | sed -E 's| \(.+\)||' | sort -r | uniq -d)
|
||||
# shellcheck disable=2059
|
||||
|
|
|
|||
|
|
@ -10,5 +10,13 @@ FactoryBot.define do
|
|||
trait :started do
|
||||
status { 1 }
|
||||
end
|
||||
|
||||
trait :finished do
|
||||
status { 2 }
|
||||
end
|
||||
|
||||
trait :stale do
|
||||
created_at { (Projects::ImportExport::RelationImportTracker::STALE_TIMEOUT + 1).ago }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { GlButton } from '@gitlab/ui';
|
||||
|
||||
import ImportHistoryLink from '~/import_entities/import_groups/components/import_history_link.vue';
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ describe('import history link', () => {
|
|||
});
|
||||
};
|
||||
|
||||
const findGlLink = () => wrapper.findComponent(GlLink);
|
||||
const findButton = () => wrapper.findComponent(GlButton);
|
||||
|
||||
it('renders link with href', () => {
|
||||
const mockId = 174;
|
||||
|
|
@ -28,7 +28,7 @@ describe('import history link', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(findGlLink().text()).toBe('Migration details >');
|
||||
expect(findGlLink().attributes('href')).toBe('/import/174/history');
|
||||
expect(findButton().text()).toBe('Migration details');
|
||||
expect(findButton().attributes('href')).toBe('/import/174/history');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,226 +0,0 @@
|
|||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
|
||||
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
|
||||
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
|
||||
import { BASE_SORT_FIELDS } from '~/ml/model_registry/constants';
|
||||
import * as urlHelpers from '~/lib/utils/url_utility';
|
||||
import LoadOrErrorOrShow from '~/ml/model_registry/components/load_or_error_or_show.vue';
|
||||
import { defaultPageInfo } from '../mock_data';
|
||||
|
||||
describe('ml/model_registry/components/searchable_list.vue', () => {
|
||||
let wrapper;
|
||||
|
||||
const findLoadOrErrorOrShow = () => wrapper.findComponent(LoadOrErrorOrShow);
|
||||
const findRegistryList = () => wrapper.findComponent(RegistryList);
|
||||
const findEmptyState = () => wrapper.findByTestId('empty-state-slot');
|
||||
const findFirstRow = () => wrapper.findByTestId('element');
|
||||
const findRows = () => wrapper.findAllByTestId('element');
|
||||
const findSearch = () => wrapper.findComponent(RegistrySearch);
|
||||
|
||||
const expectedFirstPage = {
|
||||
after: 'eyJpZCI6IjIifQ',
|
||||
first: 30,
|
||||
last: null,
|
||||
orderBy: 'created_at',
|
||||
sort: 'desc',
|
||||
};
|
||||
|
||||
const defaultProps = {
|
||||
items: ['a', 'b', 'c'],
|
||||
pageInfo: defaultPageInfo,
|
||||
isLoading: false,
|
||||
errorMessage: '',
|
||||
showSearch: false,
|
||||
sortableFields: [],
|
||||
};
|
||||
|
||||
const mountComponent = (props = {}) => {
|
||||
wrapper = shallowMountExtended(SearchableList, {
|
||||
propsData: {
|
||||
...defaultProps,
|
||||
...props,
|
||||
},
|
||||
stubs: {
|
||||
RegistryList,
|
||||
},
|
||||
slots: {
|
||||
'empty-state': '<div data-testid="empty-state-slot">This is empty</div>',
|
||||
item: '<div data-testid="element"></div>',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
describe('when list is loaded and has no data', () => {
|
||||
beforeEach(() => mountComponent({ items: [] }));
|
||||
|
||||
it('shows empty state', () => {
|
||||
expect(findEmptyState().text()).toBe('This is empty');
|
||||
});
|
||||
|
||||
it('does not display loader', () => {
|
||||
expect(findLoadOrErrorOrShow().props('isLoading')).toBe(false);
|
||||
});
|
||||
|
||||
it('does not display rows', () => {
|
||||
expect(findFirstRow().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not display registry list', () => {
|
||||
expect(findRegistryList().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('Does not display error message', () => {
|
||||
expect(findLoadOrErrorOrShow().props('errorMessage')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('if errorMessage', () => {
|
||||
beforeEach(() => mountComponent({ errorMessage: 'Failure!' }));
|
||||
|
||||
it('shows error message', () => {
|
||||
expect(findLoadOrErrorOrShow().props('errorMessage')).toContain('Failure!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('if loading', () => {
|
||||
beforeEach(() => mountComponent({ isLoading: true }));
|
||||
|
||||
it('shows loader', () => {
|
||||
expect(findLoadOrErrorOrShow().props('isLoading')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when list is loaded with data', () => {
|
||||
beforeEach(() => mountComponent());
|
||||
|
||||
it('displays package registry list', () => {
|
||||
expect(findRegistryList().exists()).toEqual(true);
|
||||
});
|
||||
|
||||
it('binds the right props', () => {
|
||||
expect(findRegistryList().props()).toMatchObject({
|
||||
items: ['a', 'b', 'c'],
|
||||
isLoading: false,
|
||||
pagination: defaultPageInfo,
|
||||
hiddenDelete: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('displays package version rows', () => {
|
||||
expect(findRows().exists()).toEqual(true);
|
||||
expect(findRows()).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('does not display loader', () => {
|
||||
expect(findLoadOrErrorOrShow().props('isLoading')).toBe(false);
|
||||
});
|
||||
|
||||
it('does not display empty state', () => {
|
||||
expect(findEmptyState().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when user interacts with pagination', () => {
|
||||
beforeEach(() => mountComponent());
|
||||
|
||||
it('when it is created emits fetch-page to get first page', () => {
|
||||
mountComponent({ showSearch: true, sortableFields: BASE_SORT_FIELDS });
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage]]);
|
||||
});
|
||||
|
||||
it('when list emits next-page emits fetchPage with correct pageInfo', () => {
|
||||
findRegistryList().vm.$emit('next-page');
|
||||
|
||||
const expectedNewPageInfo = {
|
||||
after: 'eyJpZCI6IjIifQ',
|
||||
first: 30,
|
||||
last: null,
|
||||
orderBy: 'created_at',
|
||||
sort: 'desc',
|
||||
};
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage], [expectedNewPageInfo]]);
|
||||
});
|
||||
|
||||
it('when list emits prev-page emits fetchPage with correct pageInfo', () => {
|
||||
findRegistryList().vm.$emit('prev-page');
|
||||
|
||||
const expectedNewPageInfo = {
|
||||
before: 'eyJpZCI6IjE2In0',
|
||||
first: null,
|
||||
last: 30,
|
||||
orderBy: 'created_at',
|
||||
sort: 'desc',
|
||||
};
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage], [expectedNewPageInfo]]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(urlHelpers, 'updateHistory').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
it('does not show search bar when showSearch is false', () => {
|
||||
mountComponent({ showSearch: false });
|
||||
|
||||
expect(findSearch().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('mounts search correctly', () => {
|
||||
mountComponent({ showSearch: true, sortableFields: BASE_SORT_FIELDS });
|
||||
|
||||
expect(findSearch().props()).toMatchObject({
|
||||
filters: [],
|
||||
sorting: {
|
||||
orderBy: 'created_at',
|
||||
sort: 'desc',
|
||||
},
|
||||
sortableFields: BASE_SORT_FIELDS,
|
||||
});
|
||||
});
|
||||
|
||||
it('on search submit, emits fetch-page with correct variables', () => {
|
||||
mountComponent({ showSearch: true, sortableFields: BASE_SORT_FIELDS });
|
||||
|
||||
findSearch().vm.$emit('filter:submit');
|
||||
|
||||
const expectedVariables = {
|
||||
orderBy: 'created_at',
|
||||
sort: 'desc',
|
||||
};
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage], [expectedVariables]]);
|
||||
});
|
||||
|
||||
it('on sorting changed, emits fetch-page with correct variables', () => {
|
||||
mountComponent({ showSearch: true, sortableFields: BASE_SORT_FIELDS });
|
||||
|
||||
const orderBy = 'name';
|
||||
findSearch().vm.$emit('sorting:changed', { orderBy });
|
||||
|
||||
const expectedVariables = {
|
||||
orderBy: 'name',
|
||||
sort: 'desc',
|
||||
};
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage], [expectedVariables]]);
|
||||
});
|
||||
|
||||
it('on direction changed, emits fetch-page with correct variables', () => {
|
||||
mountComponent({ showSearch: true, sortableFields: BASE_SORT_FIELDS });
|
||||
|
||||
const sort = 'asc';
|
||||
findSearch().vm.$emit('sorting:changed', { sort });
|
||||
|
||||
const expectedVariables = {
|
||||
orderBy: 'created_at',
|
||||
sort: 'asc',
|
||||
};
|
||||
|
||||
expect(wrapper.emitted('fetch-page')).toEqual([[expectedFirstPage], [expectedVariables]]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -2,6 +2,7 @@ import BlobPage from '~/repository/pages/blob.vue';
|
|||
import IndexPage from '~/repository/pages/index.vue';
|
||||
import TreePage from '~/repository/pages/tree.vue';
|
||||
import createRouter from '~/repository/router';
|
||||
import { getMatchedComponents } from '~/lib/utils/vue3compat/vue_router';
|
||||
|
||||
describe('Repository router spec', () => {
|
||||
it.each`
|
||||
|
|
@ -11,18 +12,13 @@ describe('Repository router spec', () => {
|
|||
${'/tree/feat(test)'} | ${'feat(test)'} | ${TreePage} | ${'TreePage'}
|
||||
${'/-/tree/main'} | ${'main'} | ${TreePage} | ${'TreePage'}
|
||||
${'/-/tree/main/app/assets'} | ${'main'} | ${TreePage} | ${'TreePage'}
|
||||
${'/-/tree/123/app/assets'} | ${'main'} | ${null} | ${'null'}
|
||||
${'/-/blob/main/file.md'} | ${'main'} | ${BlobPage} | ${'BlobPage'}
|
||||
`('sets component as $componentName for path "$path"', ({ path, component, branch }) => {
|
||||
const router = createRouter('', branch);
|
||||
|
||||
const componentsForRoute = router.getMatchedComponents(path);
|
||||
const componentsForRoute = getMatchedComponents(router, path);
|
||||
|
||||
expect(componentsForRoute.length).toBe(component ? 1 : 0);
|
||||
|
||||
if (component) {
|
||||
expect(componentsForRoute).toContain(component);
|
||||
}
|
||||
expect(componentsForRoute).toEqual([component]);
|
||||
});
|
||||
|
||||
describe('Storing Web IDE path globally', () => {
|
||||
|
|
@ -45,11 +41,14 @@ describe('Repository router spec', () => {
|
|||
${'/-/tree/main'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/`}
|
||||
${'/-/tree/main/app/assets'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/app/assets/`}
|
||||
${'/-/blob/main/file.md'} | ${'main'} | ${`/-/ide/project/${proj}/edit/main/-/file.md`}
|
||||
`('generates the correct Web IDE url for $path', ({ path, branch, expectedPath } = {}) => {
|
||||
const router = createRouter(proj, branch);
|
||||
`(
|
||||
'generates the correct Web IDE url for $path',
|
||||
async ({ path, branch, expectedPath } = {}) => {
|
||||
const router = createRouter(proj, branch);
|
||||
|
||||
router.push(path);
|
||||
expect(window.gl.webIDEPath).toBe(expectedPath);
|
||||
});
|
||||
await router.push(path);
|
||||
expect(window.gl.webIDEPath).toBe(expectedPath);
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillBulkImportFailuresNamespaceId,
|
||||
feature_category: :importers,
|
||||
schema: 20250205194752 do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :bulk_import_failures }
|
||||
let(:backfill_column) { :namespace_id }
|
||||
let(:backfill_via_table) { :bulk_import_entities }
|
||||
let(:backfill_via_column) { :namespace_id }
|
||||
let(:backfill_via_foreign_key) { :bulk_import_entity_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillBulkImportFailuresOrganizationId,
|
||||
feature_category: :importers,
|
||||
schema: 20250205194757 do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :bulk_import_failures }
|
||||
let(:backfill_column) { :organization_id }
|
||||
let(:backfill_via_table) { :bulk_import_entities }
|
||||
let(:backfill_via_column) { :organization_id }
|
||||
let(:backfill_via_foreign_key) { :bulk_import_entity_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillBulkImportFailuresProjectId,
|
||||
feature_category: :importers,
|
||||
schema: 20250205194747 do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :bulk_import_failures }
|
||||
let(:backfill_column) { :project_id }
|
||||
let(:backfill_via_table) { :bulk_import_entities }
|
||||
let(:backfill_via_column) { :project_id }
|
||||
let(:backfill_via_foreign_key) { :bulk_import_entity_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -192,6 +192,7 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
|
|||
# To add a table to this list, create an issue under https://gitlab.com/groups/gitlab-org/-/epics/11670.
|
||||
# Use https://gitlab.com/gitlab-org/gitlab/-/issues/476206 as an example.
|
||||
work_in_progress = {
|
||||
"bulk_import_failures" => "https://gitlab.com/gitlab-org/gitlab/-/issues/517824",
|
||||
"organization_users" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476210',
|
||||
"push_rules" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476212',
|
||||
"snippets" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476216',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillBulkImportFailuresProjectId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_failures,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:project_id,
|
||||
:bulk_import_entities,
|
||||
:project_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillBulkImportFailuresNamespaceId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_failures,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:namespace_id,
|
||||
:bulk_import_entities,
|
||||
:namespace_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillBulkImportFailuresOrganizationId, feature_category: :importers do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :bulk_import_failures,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:organization_id,
|
||||
:bulk_import_entities,
|
||||
:organization_id,
|
||||
:bulk_import_entity_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -4162,6 +4162,22 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when a relation import is in progress but it is stale' do
|
||||
before do
|
||||
create(:relation_import_tracker, :started, :stale, project: project)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsey }
|
||||
end
|
||||
|
||||
context 'when a relation import has finished' do
|
||||
before do
|
||||
create(:relation_import_tracker, :finished, :stale, project: project)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsey }
|
||||
end
|
||||
|
||||
context 'when direct transfer is in progress' do
|
||||
before do
|
||||
create(:bulk_import_entity, :project_entity, :started, project: project)
|
||||
|
|
|
|||
|
|
@ -44,12 +44,6 @@ RSpec.describe Projects::ImportExport::RelationImportWorker, feature_category: :
|
|||
allow(worker).to receive(:process_import).and_raise(StandardError, 'import_forced_to_fail')
|
||||
end
|
||||
|
||||
it 'marks the tracker as failed' do
|
||||
expect { perform }
|
||||
.to raise_error(StandardError, 'import_forced_to_fail')
|
||||
.and change { tracker.reload.failed? }.from(false).to(true)
|
||||
end
|
||||
|
||||
it 'creates a record of the failure' do
|
||||
expect { perform }
|
||||
.to raise_error(StandardError, 'import_forced_to_fail')
|
||||
|
|
@ -60,6 +54,19 @@ RSpec.describe Projects::ImportExport::RelationImportWorker, feature_category: :
|
|||
end
|
||||
end
|
||||
|
||||
context 'when tracker can not be started' do
|
||||
before do
|
||||
tracker.update!(status: 2)
|
||||
end
|
||||
|
||||
it 'does not start the import process' do
|
||||
expect(Import::Framework::Logger).to receive(:info).with(message: 'Cannot start tracker', tracker_id: tracker.id,
|
||||
tracker_status: :finished)
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'an idempotent worker' do
|
||||
let(:job_args) { [tracker.id, user.id] }
|
||||
|
||||
|
|
@ -75,4 +82,30 @@ RSpec.describe Projects::ImportExport::RelationImportWorker, feature_category: :
|
|||
end
|
||||
|
||||
it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed
|
||||
|
||||
describe '.sidekiq_retries_exhausted' do
|
||||
it 'marks the tracker as failed and creates a record of the failure' do
|
||||
job = { 'args' => [tracker.id, user.id] }
|
||||
|
||||
expect { described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new('Error!')) }.to change {
|
||||
tracker.reload.failed?
|
||||
}.from(false).to(true)
|
||||
|
||||
failure = tracker.project.import_failures.last
|
||||
expect(failure.exception_message).to eq('Error!')
|
||||
end
|
||||
end
|
||||
|
||||
describe '.sidekiq_interruptions_exhausted' do
|
||||
it 'marks the tracker as failed and creates a record of the failure' do
|
||||
job = { 'args' => [tracker.id, user.id] }
|
||||
|
||||
expect { described_class.interruptions_exhausted_block.call(job) }.to change {
|
||||
tracker.reload.failed?
|
||||
}.from(false).to(true)
|
||||
|
||||
failure = tracker.project.import_failures.last
|
||||
expect(failure.exception_message).to eq('Import process reached the maximum number of interruptions')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1436,10 +1436,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.123.0.tgz#1fa3b1a709755ff7c8ef67e18c0442101655ebf0"
|
||||
integrity sha512-yjVn+utOTIKk8d9JlvGo6EgJ4TQ+CKpe3RddflAqtsQqQuL/2MlVdtaUePybxYzWIaumFuh5LouQ6BrWyw1niQ==
|
||||
|
||||
"@gitlab/ui@108.1.0":
|
||||
version "108.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-108.1.0.tgz#9053a5bf131e973f581db49aeec5048ab0987aec"
|
||||
integrity sha512-aDUopu+Hwm/tr3GhsKCqQZxNhH3RMDANlxa0mDiqt0PWW11X/QzDaNkZUQaNDoT8SW53ds68RydZLrO2aUofng==
|
||||
"@gitlab/ui@108.2.0":
|
||||
version "108.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-108.2.0.tgz#45ecf0f839bee5bc88bb5a5a42385064fc209cc1"
|
||||
integrity sha512-IMhc17HnuWD/54rK6dPqL+gvgysVY5exoHGoomQg2aL6AG4P0tpSQ4S6hAq1p+rYpd04/iSuEGqSkhlhykOguw==
|
||||
dependencies:
|
||||
"@floating-ui/dom" "1.4.3"
|
||||
echarts "^5.3.2"
|
||||
|
|
|
|||
Loading…
Reference in New Issue