Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-02-28 18:08:32 +00:00
parent baed745d21
commit 36eff6e508
102 changed files with 3173 additions and 546 deletions

View File

@ -715,6 +715,7 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/doc/development/distributed_tracing.md @msedlakjakubowski
/doc/development/documentation/ @sselhorn
/doc/development/elasticsearch.md @ashrafkhamis
/doc/development/search/advanced_search_migration_styleguide.md @ashrafkhamis
/doc/development/experiment_guide/ @phillipwells
/doc/development/export_csv.md @eread
/doc/development/fe_guide/content_editor.md @ashrafkhamis
@ -1431,4 +1432,4 @@ ee/lib/ee/api/entities/project.rb @gitlab-org/manage/manage-workspace/backend-ap
[Manage::Foundations]
/lib/sidebars/ @gitlab/ @gitlab-org/manage/foundations/engineering
/ee/lib/sidebars/ @gitlab-org/manage/foundations/engineering
/ee/lib/sidebars/ @gitlab-org/manage/foundations/engineering

View File

@ -10,10 +10,10 @@ const CLIPBOARD_ERROR_EVENT = 'clipboard-error';
const I18N_ERROR_MESSAGE = __('Copy failed. Please manually copy the value.');
function showTooltip(target, title) {
const { title: originalTitle } = target.dataset;
const { originalTitle } = target.dataset;
once('hidden', (tooltip) => {
if (tooltip.target === target) {
if (originalTitle && tooltip.target === target) {
target.setAttribute('title', originalTitle);
target.setAttribute('aria-label', originalTitle);
fixTitle(target);

View File

@ -0,0 +1,81 @@
<script>
import { GlButton } from '@gitlab/ui';
import { s__, sprintf } from '~/locale';
import { createAlert } from '~/flash';
import { getParameterByName } from '~/lib/utils/url_utility';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { TYPENAME_CI_RUNNER } from '~/graphql_shared/constants';
import runnerForRegistrationQuery from '../graphql/register/runner_for_registration.query.graphql';
import { I18N_FETCH_ERROR, PARAM_KEY_PLATFORM, DEFAULT_PLATFORM } from '../constants';
import RegistrationInstructions from '../components/registration/registration_instructions.vue';
import { captureException } from '../sentry_utils';
export default {
name: 'AdminRegisterRunnerApp',
components: {
GlButton,
RegistrationInstructions,
},
props: {
runnerId: {
type: String,
required: true,
},
runnersPath: {
type: String,
required: true,
},
},
data() {
return {
platform: getParameterByName(PARAM_KEY_PLATFORM) || DEFAULT_PLATFORM,
runner: null,
};
},
apollo: {
runner: {
query: runnerForRegistrationQuery,
variables() {
return {
id: convertToGraphQLId(TYPENAME_CI_RUNNER, this.runnerId),
};
},
error(error) {
createAlert({ message: I18N_FETCH_ERROR });
captureException({ error, component: this.$options.name });
},
},
},
computed: {
description() {
return this.runner?.description;
},
heading() {
if (this.description) {
return sprintf(s__('Runners|Register "%{runnerDescription}" runner'), {
runnerDescription: this.description,
});
}
return s__('Runners|Register runner');
},
ephemeralAuthenticationToken() {
return this.runner?.ephemeralAuthenticationToken;
},
},
};
</script>
<template>
<div>
<h1 class="gl-font-size-h1">{{ heading }}</h1>
<registration-instructions
:loading="$apollo.queries.runner.loading"
:platform="platform"
:token="ephemeralAuthenticationToken"
/>
<gl-button :href="runnersPath" variant="confirm">{{
s__('Runners|Go to runners page')
}}</gl-button>
</div>
</template>

View File

@ -1,5 +1,36 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { showAlertFromLocalStorage } from '../local_storage_alert/show_alert_from_local_storage';
import AdminRegisterRunnerApp from './admin_register_runner_app.vue';
export const initAdminRegisterRunner = () => {
Vue.use(VueApollo);
export const initAdminRegisterRunner = (selector = '#js-admin-register-runner') => {
showAlertFromLocalStorage();
const el = document.querySelector(selector);
if (!el) {
return null;
}
const { runnerId, runnersPath } = el.dataset;
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
return new Vue({
el,
apolloProvider,
render(h) {
return h(AdminRegisterRunnerApp, {
props: {
runnerId,
runnersPath,
},
});
},
});
};

View File

@ -0,0 +1,42 @@
<script>
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
export default {
components: {
ClipboardButton,
},
props: {
prompt: {
type: String,
required: false,
default: '',
},
command: {
type: [Array, String],
required: false,
default: '',
},
},
computed: {
lines() {
if (typeof this.command === 'string') {
return [this.command];
}
return this.command;
},
clipboard() {
return this.lines.join('');
},
},
};
</script>
<template>
<div class="gl-display-flex gl-gap-3 gl-align-items-flex-start">
<!-- eslint-disable vue/require-v-for-key-->
<pre
class="gl-w-full"
><span v-if="prompt" class="gl-user-select-none">{{ prompt }} </span><template v-for="line in lines">{{ line }}<br class="gl-user-select-none"/></template></pre>
<!-- eslint-enable vue/require-v-for-key-->
<clipboard-button :text="clipboard" :title="__('Copy')" />
</div>
</template>

View File

@ -0,0 +1,142 @@
<script>
import { GlIcon, GlLink, GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { INSTALL_HELP_URL, EXECUTORS_HELP_URL, SERVICE_COMMANDS_HELP_URL } from '../../constants';
import CliCommand from './cli_command.vue';
import { commandPrompt, registerCommand, runCommand } from './utils';
export default {
components: {
GlIcon,
GlLink,
GlSkeletonLoader,
GlSprintf,
ClipboardButton,
CliCommand,
},
props: {
platform: {
type: String,
required: true,
},
loading: {
type: Boolean,
required: false,
default: false,
},
token: {
type: String,
required: false,
default: null,
},
},
computed: {
commandPrompt() {
return commandPrompt({ platform: this.platform });
},
registerCommand() {
return registerCommand({ platform: this.platform, registrationToken: this.token });
},
runCommand() {
return runCommand({ platform: this.platform });
},
},
INSTALL_HELP_URL,
EXECUTORS_HELP_URL,
SERVICE_COMMANDS_HELP_URL,
};
</script>
<template>
<div>
<p>
<gl-sprintf
:message="
s__(
'Runners|GitLab Runner must be installed before you can register a runner. %{linkStart}How do I install GitLab Runner?%{linkEnd}',
)
"
>
<template #link="{ content }">
<gl-link :href="$options.INSTALL_HELP_URL">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
<section>
<h2 class="gl-font-size-h2">{{ s__('Runners|Step 1') }}</h2>
<p>
{{
s__(
'Runners|Copy and paste the following command into your command line to register the runner.',
)
}}
</p>
<gl-skeleton-loader v-if="loading" />
<template v-else>
<cli-command :prompt="commandPrompt" :command="registerCommand" />
<p>
<gl-icon name="information-o" class="gl-text-blue-600!" />
<gl-sprintf
:message="
s__(
'Runners|The %{boldStart}runner token%{boldEnd} %{token} displays %{boldStart}only for a short time%{boldEnd}, and is stored in the %{codeStart}config.toml%{codeEnd} after you create the runner. It will not be visible once the runner is registered.',
)
"
>
<template #token>
<code>{{ token }}</code>
<clipboard-button
:text="token"
:title="__('Copy')"
size="small"
category="tertiary"
class="gl-border-none!"
/>
</template>
<template #bold="{ content }"
><span class="gl-font-weight-bold">{{ content }}</span></template
>
<template #code="{ content }"
><code>{{ content }}</code></template
>
</gl-sprintf>
</p>
</template>
</section>
<section>
<h2 class="gl-font-size-h2">{{ s__('Runners|Step 2') }}</h2>
<p>
<gl-sprintf
:message="
s__(
'Runners|Choose an executor when prompted by the command line. Executors run builds in different environments. %{linkStart}Not sure which one to select?%{linkEnd}',
)
"
>
<template #link="{ content }">
<gl-link :href="$options.EXECUTORS_HELP_URL">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</section>
<section>
<h2 class="gl-font-size-h2">{{ s__('Runners|Optional. Step 3') }}</h2>
<p>{{ s__('Runners|Manually verify that the runner is available to pick up jobs.') }}</p>
<cli-command :prompt="commandPrompt" :command="runCommand" />
<p>
<gl-sprintf
:message="
s__(
'Runners|This may not be needed if you manage your runner as a %{linkStart}system or user service%{linkEnd}.',
)
"
>
<template #link="{ content }">
<gl-link :href="$options.SERVICE_COMMANDS_HELP_URL">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</section>
</div>
</template>

View File

@ -0,0 +1,43 @@
import {
DEFAULT_PLATFORM,
LINUX_PLATFORM,
MACOS_PLATFORM,
WINDOWS_PLATFORM,
} from '../../constants';
/* eslint-disable @gitlab/require-i18n-strings */
const OS = {
[LINUX_PLATFORM]: {
commandPrompt: '$',
executable: 'gitlab-runner',
},
[MACOS_PLATFORM]: {
commandPrompt: '$',
executable: 'gitlab-runner',
},
[WINDOWS_PLATFORM]: {
commandPrompt: '>',
executable: '.\\gitlab-runner.exe',
},
};
export const commandPrompt = ({ platform }) => {
return (OS[platform] || OS[DEFAULT_PLATFORM]).commandPrompt;
};
export const executable = ({ platform }) => {
return (OS[platform] || OS[DEFAULT_PLATFORM]).executable;
};
export const registerCommand = ({ platform, url = gon.gitlab_url, registrationToken }) => {
return [
`${executable({ platform })} register`,
` --url ${url}`,
` --registration-token ${registrationToken}`,
];
};
export const runCommand = ({ platform }) => {
return `${executable({ platform })} run`;
};
/* eslint-enable @gitlab/require-i18n-strings */

View File

@ -188,5 +188,9 @@ export const DEFAULT_PLATFORM = LINUX_PLATFORM;
// Runner docs are in a separate repository and are not shipped with GitLab
// they are rendered as external URLs.
export const INSTALL_HELP_URL = 'https://docs.gitlab.com/runner/install';
export const EXECUTORS_HELP_URL = 'https://docs.gitlab.com/runner/executors/';
export const SERVICE_COMMANDS_HELP_URL =
'https://docs.gitlab.com/runner/commands/#service-related-commands';
export const DOCKER_HELP_URL = 'https://docs.gitlab.com/runner/install/docker.html';
export const KUBERNETES_HELP_URL = 'https://docs.gitlab.com/runner/install/kubernetes.html';

View File

@ -0,0 +1,7 @@
query getRunnerForRegistration($id: CiRunnerID!) {
runner(id: $id) {
id
description
ephemeralAuthenticationToken
}
}

View File

@ -66,7 +66,7 @@ export default {
<template>
<div v-if="isFetchingMergeRequests || (!isFetchingMergeRequests && totalCount)">
<div class="card card-slim gl-mt-5 gl-mb-0">
<div class="card-header gl-bg-gray-10">
<div class="card-header gl-px-5 gl-py-4 gl-bg-white">
<div
class="card-title gl-relative gl-display-flex gl-align-items-center gl-line-height-20 gl-font-weight-bold gl-m-0"
>
@ -79,7 +79,7 @@ export default {
{{ __('Related merge requests') }}
</h3>
<template v-if="totalCount">
<gl-icon name="merge-request" class="gl-ml-5 gl-mr-2 gl-text-gray-500" />
<gl-icon name="merge-request" class="gl-ml-3 gl-mr-2 gl-text-gray-500" />
<span data-testid="count">{{ totalCount }}</span>
</template>
</div>
@ -90,7 +90,7 @@ export default {
label="Fetching related merge requests"
class="gl-py-3"
/>
<ul v-else class="content-list related-items-list">
<ul v-else class="content-list related-items-list gl-bg-gray-10">
<li v-for="mr in mergeRequests" :key="mr.id" class="list-item gl-m-0! gl-p-0!">
<related-issuable-item
:id-key="mr.id"

View File

@ -187,7 +187,7 @@ export default {
'gl-border-b-1': isOpen,
'gl-border-b-0': !isOpen,
}"
class="gl-display-flex gl-justify-content-space-between gl-line-height-24 gl-py-3 gl-px-5 gl-bg-gray-10 gl-border-b-solid gl-border-b-gray-100"
class="gl-display-flex gl-justify-content-space-between gl-line-height-24 gl-pl-5 gl-pr-4 gl-py-4 gl-bg-white gl-border-b-solid gl-border-b-gray-100"
>
<h3 class="card-title h5 gl-my-0 gl-display-flex gl-align-items-center gl-flex-grow-1">
<gl-link

View File

@ -126,7 +126,7 @@ export default {
<gl-disclosure-dropdown ref="dropdown">
<template #toggle>
<gl-button category="tertiary" icon="question-o" class="btn-with-notification">
<span v-if="showWhatsNewNotification" class="notification"></span>
<span v-if="showWhatsNewNotification" class="notification-dot-info"></span>
{{ $options.i18n.help }}
</gl-button>
</template>

View File

@ -9,6 +9,8 @@ import {
import SafeHtml from '~/vue_shared/directives/safe_html';
import { s__, __, sprintf } from '~/locale';
import NewNavToggle from '~/nav/components/new_nav_toggle.vue';
import Tracking from '~/tracking';
import PersistentUserCallout from '~/persistent_user_callout';
import UserNameGroup from './user_name_group.vue';
export default {
@ -18,13 +20,13 @@ export default {
badgeLabel: s__('NorthstarNavigation|Alpha'),
sectionTitle: s__('NorthstarNavigation|Navigation redesign'),
},
user: {
setStatus: s__('SetStatusModal|Set status'),
editStatus: s__('SetStatusModal|Edit status'),
editProfile: s__('CurrentUser|Edit profile'),
preferences: s__('CurrentUser|Preferences'),
gitlabNext: s__('CurrentUser|Switch to GitLab Next'),
},
setStatus: s__('SetStatusModal|Set status'),
editStatus: s__('SetStatusModal|Edit status'),
editProfile: s__('CurrentUser|Edit profile'),
preferences: s__('CurrentUser|Preferences'),
buyPipelineMinutes: s__('CurrentUser|Buy Pipeline minutes'),
oneOfGroupsRunningOutOfPipelineMinutes: s__('CurrentUser|One of your groups is running out'),
gitlabNext: s__('CurrentUser|Switch to GitLab Next'),
provideFeedback: s__('NorthstarNavigation|Provide feedback'),
startTrial: s__('CurrentUser|Start an Ultimate trial'),
signOut: __('Sign out'),
@ -41,6 +43,7 @@ export default {
directives: {
SafeHtml,
},
mixins: [Tracking.mixin()],
inject: ['toggleNewNavEndpoint'],
props: {
data: {
@ -56,7 +59,7 @@ export default {
const { busy, customized } = this.data.status;
const statusLabel =
busy || customized ? this.$options.i18n.user.editStatus : this.$options.i18n.user.setStatus;
busy || customized ? this.$options.i18n.editStatus : this.$options.i18n.setStatus;
return {
text: statusLabel,
@ -73,19 +76,32 @@ export default {
},
editProfileItem() {
return {
text: this.$options.i18n.user.editProfile,
text: this.$options.i18n.editProfile,
href: this.data.settings.profile_path,
};
},
preferencesItem() {
return {
text: this.$options.i18n.user.preferences,
text: this.$options.i18n.preferences,
href: this.data.settings.profile_preferences_path,
};
},
addBuyPipelineMinutesMenuItem() {
return this.data.pipeline_minutes?.show_buy_pipeline_minutes;
},
buyPipelineMinutesItem() {
return {
text: this.$options.i18n.buyPipelineMinutes,
warningText: this.$options.i18n.oneOfGroupsRunningOutOfPipelineMinutes,
href: this.data.pipeline_minutes?.buy_pipeline_minutes_path,
extraAttrs: {
class: 'js-follow-link',
},
};
},
gitlabNextItem() {
return {
text: this.$options.i18n.user.gitlabNext,
text: this.$options.i18n.gitlabNext,
href: this.data.canary_toggle_com_url,
};
},
@ -130,6 +146,38 @@ export default {
'data-current-clear-status-after': this.data.status.clear_after,
};
},
buyPipelineMinutesCalloutData() {
return this.showNotificationDot
? {
'data-feature-id': this.data.pipeline_minutes.callout_attrs.feature_id,
'data-dismiss-endpoint': this.data.pipeline_minutes.callout_attrs.dismiss_endpoint,
}
: {};
},
showNotificationDot() {
return this.data.pipeline_minutes?.show_notification_dot;
},
},
methods: {
onShow() {
this.trackEvents();
this.initCallout();
},
initCallout() {
if (this.showNotificationDot) {
PersistentUserCallout.factory(this.$refs?.buyPipelineMinutesNotificationCallout.$el);
}
},
trackEvents() {
if (this.addBuyPipelineMinutesMenuItem) {
const {
'track-action': trackAction,
'track-label': label,
'track-property': property,
} = this.data.pipeline_minutes.tracking_attrs;
this.track(trackAction, { label, property });
}
},
},
};
</script>
@ -140,9 +188,10 @@ export default {
placement="right"
data-testid="user-dropdown"
data-qa-selector="user_menu"
@shown="onShow"
>
<template #toggle>
<button class="user-bar-item">
<button class="user-bar-item btn-with-notification">
<span class="gl-sr-only">{{ toggleText }}</span>
<gl-avatar
:size="24"
@ -151,6 +200,13 @@ export default {
aria-hidden="true"
data-qa-selector="user_avatar_content"
/>
<span
v-if="showNotificationDot"
class="notification-dot-warning"
data-testid="buy-pipeline-minutes-notification-dot"
v-bind="data.pipeline_minutes.notification_dot_attrs"
>
</span>
</button>
</template>
@ -177,6 +233,25 @@ export default {
<gl-disclosure-dropdown-item :item="preferencesItem" data-testid="preferences-item" />
<gl-disclosure-dropdown-item
v-if="addBuyPipelineMinutesMenuItem"
ref="buyPipelineMinutesNotificationCallout"
:item="buyPipelineMinutesItem"
v-bind="buyPipelineMinutesCalloutData"
data-testid="buy-pipeline-minutes-item"
>
<template #list-item>
<span class="gl-display-flex gl-flex-direction-column">
<span>{{ buyPipelineMinutesItem.text }} <gl-emoji data-name="clock9" /></span>
<span
v-if="data.pipeline_minutes.show_with_subtext"
class="gl-font-sm small gl-pt-2 gl-text-orange-800"
>{{ buyPipelineMinutesItem.warningText }}</span
>
</span>
</template>
</gl-disclosure-dropdown-item>
<gl-disclosure-dropdown-item
v-if="data.gitlab_com_but_not_canary"
:item="gitlabNextItem"

View File

@ -313,7 +313,7 @@ export default {
:status="statusIconName"
:is-loading="isLoadingSummary"
:class="{ 'gl-cursor-pointer': isCollapsible }"
class="gl-p-5"
class="gl-pl-5 gl-pr-4 gl-py-4"
@mousedown="onRowMouseDown"
@mouseup="onRowMouseUp"
>
@ -381,7 +381,7 @@ export default {
v-else-if="hasFullData"
:items="fullData"
:min-item-size="32"
class="report-block-container gl-px-5 gl-py-0"
class="report-block-container gl-p-0"
>
<template #default="{ item, index, active }">
<dynamic-scroller-item :item="item" :active="active" :class="{ active }">
@ -389,7 +389,7 @@ export default {
:class="{
'gl-border-b-solid gl-border-b-1 gl-border-gray-100': index !== fullData.length - 1,
}"
class="gl-py-3 gl-pl-7"
class="gl-py-3 gl-pl-9"
data-testid="extension-list-item"
>
<gl-intersection-observer

View File

@ -287,7 +287,7 @@ export default {
<template>
<section class="media-section" data-testid="widget-extension">
<div class="gl-p-5 gl-align-items-center gl-display-flex">
<div class="gl-px-5 gl-py-4 gl-align-items-center gl-display-flex">
<status-icon
:level="1"
:name="widgetName"

View File

@ -149,7 +149,7 @@ export default {
>
<slot>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
@click="openFileUpload"
>

View File

@ -44,7 +44,7 @@ export default {
<template>
<div class="gl-rounded-base gl-border-1 gl-border-solid gl-border-gray-100 gl-bg-gray-10 gl-mt-4">
<div
class="gl-px-5 gl-py-3 gl-display-flex gl-justify-content-space-between"
class="gl-pl-5 gl-pr-4 gl-py-4 gl-display-flex gl-justify-content-space-between gl-bg-white"
:class="{ 'gl-border-b-1 gl-border-b-solid gl-border-b-gray-100': isOpen }"
>
<div class="gl-display-flex gl-flex-grow-1">

View File

@ -6,6 +6,8 @@ $item-remove-button-space: 42px;
.related-items-list {
padding: $gl-padding-4;
padding-right: $gl-padding-6;
border-bottom-left-radius: $gl-border-size-3;
border-bottom-right-radius: $gl-border-size-3;
&,
.list-item:last-child {

View File

@ -11,6 +11,18 @@
}
}
@mixin notification-dot($color, $size, $top, $left) {
background-color: $color;
border: 2px solid $gray-10; // Same as the sidebar's background color.
position: absolute;
height: $size;
width: $size;
top: $top;
left: $left;
border-radius: 50%;
transition: background-color 100ms linear, border-color 100ms linear;
}
.super-sidebar {
@include gl-fixed;
@include gl-top-0;
@ -98,16 +110,12 @@
.btn-with-notification {
position: relative;
.notification {
background-color: $blue-500;
border: 2px solid $gray-10; // Same as the sidebar's background color.
position: absolute;
height: 9px;
width: 9px;
top: 5px;
left: 22px;
border-radius: 50%;
transition: background-color 100ms linear, border-color 100ms linear;
.notification-dot-info {
@include notification-dot($blue-500, 9px, 5px, 22px);
}
.notification-dot-warning {
@include notification-dot($orange-300, 12px, 1px, 19px);
}
&:hover,

View File

@ -908,6 +908,11 @@ Compare Branches
*/
$compare-branches-sticky-header-height: 68px;
/*
Board Swimlanes
*/
$board-swimlanes-headers-height: 64px;
/**
Bootstrap 4.2.0 introduced new icons for validating forms.
Our design system does not use those, so we are disabling them for now:

View File

@ -819,7 +819,7 @@ $tabs-holder-z-index: 250;
.mr-widget-body,
.mr-widget-content {
padding: $gl-padding;
padding: $gl-padding-12 $gl-padding;
}
.mr-widget-body-ready-merge {
@ -840,6 +840,11 @@ $tabs-holder-z-index: 250;
}
}
.mr-widget-grouped-section .report-block-container {
border-bottom-left-radius: $border-radius-default;
border-bottom-right-radius: $border-radius-default;
}
.mr-widget-extension {
border-top: 1px solid var(--border-color, $border-color);
background-color: var(--gray-10, $gray-10);
@ -916,7 +921,7 @@ $tabs-holder-z-index: 250;
border-left: 2px solid var(--border-color, $border-color);
position: absolute;
bottom: -17px;
left: calc(1rem - 1px);
left: 26px;
height: 16px;
}
}

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
module Mutations
module Projects
class SyncFork < BaseMutation
graphql_name 'ProjectSyncFork'
include FindsProject
authorize :push_code
argument :project_path, GraphQL::Types::ID,
required: true,
description: 'Full path of the project to initialize.'
argument :target_branch, GraphQL::Types::String,
required: true,
description: 'Ref of the fork to fetch into.'
field :details, Types::Projects::ForkDetailsType,
null: true,
description: 'Updated fork details.'
def resolve(project_path:, target_branch:)
project = authorized_find!(project_path)
details_resolver = Resolvers::Projects::ForkDetailsResolver.new(object: project, context: context, field: nil)
details = details_resolver.resolve(ref: target_branch)
return respond(nil, ['This branch of this project cannot be updated from the upstream']) unless details
enqueue_sync_fork(project, target_branch, details)
end
def enqueue_sync_fork(project, target_branch, details)
return respond(details, []) if details.counts[:behind] == 0
if details.has_conflicts?
return respond(details, ['The synchronization cannot happen due to the merge conflict'])
end
return respond(details, ['This service has been called too many times.']) if rate_limit_throttled?(project)
return respond(details, ['Another fork sync is already in progress']) unless details.exclusive_lease.try_obtain
::Projects::Forks::SyncWorker.perform_async(project.id, current_user.id, target_branch) # rubocop:disable CodeReuse/Worker
respond(details, [])
end
def rate_limit_throttled?(project)
Gitlab::ApplicationRateLimiter.throttled?(:project_fork_sync, scope: [project, current_user])
end
def respond(details, errors)
{ details: details, errors: errors }
end
end
end
end

View File

@ -13,8 +13,17 @@ module Resolvers
def resolve(**args)
return unless project.forked?
return unless authorized_fork_source?
return unless project.repository.branch_exists?(args[:ref])
return unless Feature.enabled?(:fork_divergence_counts, project)
::Projects::Forks::DivergenceCounts.new(project, args[:ref]).counts
::Projects::Forks::Details.new(project, args[:ref])
end
private
def authorized_fork_source?
Ability.allowed?(current_user, :read_code, project.fork_source)
end
end
end

View File

@ -90,6 +90,7 @@ module Types
mount_mutation Mutations::Notes::Update::ImageDiffNote
mount_mutation Mutations::Notes::RepositionImageDiffNote
mount_mutation Mutations::Notes::Destroy
mount_mutation Mutations::Projects::SyncFork, calls_gitaly: true, alpha: { milestone: '15.9' }
mount_mutation Mutations::Releases::Create
mount_mutation Mutations::Releases::Update
mount_mutation Mutations::Releases::Delete

View File

@ -9,11 +9,37 @@ module Types
field :ahead, GraphQL::Types::Int,
null: true,
calls_gitaly: true,
method: :ahead,
description: 'Number of commits ahead of upstream.'
field :behind, GraphQL::Types::Int,
null: true,
calls_gitaly: true,
method: :behind,
description: 'Number of commits behind upstream.'
field :is_syncing, GraphQL::Types::Boolean,
null: true,
method: :syncing?,
description: 'Indicates if there is a synchronization in progress.'
field :has_conflicts, GraphQL::Types::Boolean,
null: true,
method: :has_conflicts?,
description: 'Indicates if the fork conflicts with its upstream project.'
def ahead
counts[:ahead]
end
def behind
counts[:behind]
end
def counts
@counts ||= object.counts
end
end
# rubocop: enable Graphql/AuthorizeTypes
end

View File

@ -9,15 +9,27 @@ module NavHelper
header_links.include?(link)
end
def page_has_sidebar?
defined?(@left_sidebar) && @left_sidebar
end
def page_has_collapsed_sidebar?
page_has_sidebar? && collapsed_sidebar?
end
def page_has_collapsed_super_sidebar?
page_has_sidebar? && collapsed_super_sidebar?
end
def page_with_sidebar_class
class_name = page_gutter_class
if show_super_sidebar?
class_name << 'page-with-super-sidebar' if defined?(@left_sidebar) && @left_sidebar
class_name << 'page-with-super-sidebar-collapsed' if collapsed_super_sidebar? && @left_sidebar
class_name << 'page-with-super-sidebar' if page_has_sidebar?
class_name << 'page-with-super-sidebar-collapsed' if page_has_collapsed_super_sidebar?
else
class_name << 'page-with-contextual-sidebar' if defined?(@left_sidebar) && @left_sidebar
class_name << 'page-with-icon-sidebar' if collapsed_sidebar? && @left_sidebar
class_name << 'page-with-contextual-sidebar' if page_has_sidebar?
class_name << 'page-with-icon-sidebar' if page_has_collapsed_sidebar?
end
class_name -= ['right-sidebar-expanded'] if defined?(@right_sidebar) && !@right_sidebar

View File

@ -11,6 +11,7 @@ module Users
UNFINISHED_TAG_CLEANUP_CALLOUT = 'unfinished_tag_cleanup_callout'
SECURITY_NEWSLETTER_CALLOUT = 'security_newsletter_callout'
MERGE_REQUEST_SETTINGS_MOVED_CALLOUT = 'merge_request_settings_moved_callout'
PAGES_MOVED_CALLOUT = 'pages_moved_callout'
REGISTRATION_ENABLED_CALLOUT_ALLOWED_CONTROLLER_PATHS = [/^root/, /^dashboard\S*/, /^admin\S*/].freeze
WEB_HOOK_DISABLED = 'web_hook_disabled'
ULTIMATE_FEATURE_REMOVAL_BANNER = 'ultimate_feature_removal_banner'
@ -76,6 +77,10 @@ module Users
!user_dismissed?(MERGE_REQUEST_SETTINGS_MOVED_CALLOUT) && project.merge_requests_enabled?
end
def show_pages_menu_callout?
!user_dismissed?(PAGES_MOVED_CALLOUT)
end
def ultimate_feature_removal_banner_dismissed?(project)
return false unless project

View File

@ -3,8 +3,11 @@
module Projects
module Forks
# Class for calculating the divergence of a fork with the source project
class DivergenceCounts
class Details
include Gitlab::Utils::StrongMemoize
LATEST_COMMITS_COUNT = 10
LEASE_TIMEOUT = 15.minutes.to_i
EXPIRATION_TIME = 8.hours
def initialize(project, ref)
@ -20,32 +23,55 @@ module Projects
{ ahead: ahead, behind: behind }
end
def exclusive_lease
key = ['project_details', project.id, ref].join(':')
uuid = Gitlab::ExclusiveLease.get_uuid(key)
Gitlab::ExclusiveLease.new(key, uuid: uuid, timeout: LEASE_TIMEOUT)
end
strong_memoize_attr :exclusive_lease
def syncing?
exclusive_lease.exists?
end
def has_conflicts?
!(attrs && attrs[:has_conflicts]).nil?
end
def update!(params)
Rails.cache.write(cache_key, params, expires_in: EXPIRATION_TIME)
@attrs = nil
end
private
attr_reader :project, :fork_repo, :source_repo, :ref
def cache_key
@cache_key ||= ['project_forks', project.id, ref, 'divergence_counts']
@cache_key ||= ['project_fork_details', project.id, ref].join(':')
end
def divergence_counts
fork_sha = fork_repo.commit(ref).sha
source_sha = source_repo.commit.sha
sha = fork_repo.commit(ref)&.sha
source_sha = source_repo.commit&.sha
cached_source_sha, cached_fork_sha, counts = Rails.cache.read(cache_key)
return counts if cached_source_sha == source_sha && cached_fork_sha == fork_sha
return if sha.blank? || source_sha.blank?
counts = calculate_divergence_counts(fork_sha, source_sha)
return attrs[:counts] if attrs.present? && attrs[:source_sha] == source_sha && attrs[:sha] == sha
Rails.cache.write(cache_key, [source_sha, fork_sha, counts], expires_in: EXPIRATION_TIME)
counts = calculate_divergence_counts(sha, source_sha)
update!({ sha: sha, source_sha: source_sha, counts: counts })
counts
end
def calculate_divergence_counts(fork_sha, source_sha)
def calculate_divergence_counts(sha, source_sha)
# If the upstream latest commit exists in the fork repo, then
# it's possible to calculate divergence counts within the fork repository.
return fork_repo.diverging_commit_count(fork_sha, source_sha) if fork_repo.commit(source_sha)
return fork_repo.diverging_commit_count(sha, source_sha) if fork_repo.commit(source_sha)
# Otherwise, we need to find a commit that exists both in the fork and upstream
# in order to use this commit as a base for calculating divergence counts.
@ -67,6 +93,10 @@ module Projects
[ahead, behind]
end
def attrs
@attrs ||= Rails.cache.read(cache_key)
end
end
end
end

View File

@ -162,10 +162,7 @@ module Notes
track_note_creation_usage_for_merge_requests(note) if note.for_merge_request?
track_incident_action(user, note.noteable, 'incident_comment') if note.for_issue?
track_note_creation_in_ipynb(note)
if Feature.enabled?(:notes_create_service_tracking, project)
Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
end
track_note_creation_visual_review(note)
if Feature.enabled?(:route_hll_to_snowplow_phase4, project&.namespace) && note.for_commit?
metric_key_path = 'counts.commit_comment'
@ -209,6 +206,10 @@ module Notes
Gitlab::UsageDataCounters::IpynbDiffActivityCounter.note_created(note)
end
def track_note_creation_visual_review(note)
Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
end
end
end

View File

@ -0,0 +1,113 @@
# frozen_string_literal: true
module Projects
module Forks
# A service for fetching upstream default branch and merging it to the fork's specified branch.
class SyncService < BaseService
ONGOING_MERGE_ERROR = 'The synchronization did not happen due to another merge in progress'
MergeError = Class.new(StandardError)
def initialize(project, user, target_branch)
super(project, user)
@source_project = project.fork_source
@head_sha = project.repository.commit(target_branch).sha
@target_branch = target_branch
@details = Projects::Forks::Details.new(project, target_branch)
end
def execute
execute_service
ServiceResponse.success
rescue MergeError => e
Gitlab::ErrorTracking.log_exception(e, { project_id: project.id, user_id: current_user.id })
ServiceResponse.error(message: e.message)
ensure
details.exclusive_lease.cancel
end
private
attr_reader :source_project, :head_sha, :target_branch, :details
# The method executes multiple steps:
#
# 1. Gitlab::Git::CrossRepo fetches upstream default branch into a temporary ref and returns new source sha.
# 2. New divergence counts are calculated using the source sha.
# 3. If the fork is not behind, there is nothing to merge -> exit.
# 4. Otherwise, continue with the new source sha.
# 5. If Gitlab::Git::CommandError is raised it means that merge couldn't happen due to a merge conflict. The
# details are updated to transfer this error to the user.
def execute_service
counts = []
source_sha = source_project.commit.sha
Gitlab::Git::CrossRepo.new(repository, source_project.repository)
.execute(source_sha) do |cross_repo_source_sha|
counts = repository.diverging_commit_count(head_sha, cross_repo_source_sha)
ahead, behind = counts
next if behind == 0
execute_with_fetched_source(cross_repo_source_sha, ahead)
end
rescue Gitlab::Git::CommandError => e
details.update!({ sha: head_sha, source_sha: source_sha, counts: counts, has_conflicts: true })
raise MergeError, e.message
end
def execute_with_fetched_source(cross_repo_source_sha, ahead)
with_linked_lfs_pointers(cross_repo_source_sha) do
merge_commit_id = perform_merge(cross_repo_source_sha, ahead)
raise MergeError, ONGOING_MERGE_ERROR unless merge_commit_id
end
end
# This method merges the upstream default branch to the fork specified branch.
# Depending on whether the fork branch is ahead of upstream or not, a different type of
# merge is performed.
#
# If the fork's branch is not ahead of the upstream (only behind), fast-forward merge is performed.
# However, if the fork's branch contains commits that don't exist upstream, a merge commit is created.
# In this case, a conflict may happen, which interrupts the merge and returns a message to the user.
def perform_merge(cross_repo_source_sha, ahead)
if ahead > 0
message = "Merge branch #{source_project.path}:#{source_project.default_branch} into #{target_branch}"
repository.merge_to_branch(current_user,
source_sha: cross_repo_source_sha,
target_branch: target_branch,
target_sha: head_sha,
message: message)
else
repository.ff_merge(current_user, cross_repo_source_sha, target_branch, target_sha: head_sha)
end
end
# This method links the newly merged lfs objects (if any) with the existing ones upstream.
# The LfsLinkService service has a limit and may raise an error if there are too many lfs objects to link.
# This is the reason why the block is passed:
#
# 1. Verify that there are not too many lfs objects to link
# 2. Execute the block (which basically performs the merge)
# 3. Link lfs objects
def with_linked_lfs_pointers(newrev, &block)
return yield unless project.lfs_enabled?
oldrev = head_sha
new_lfs_oids =
Gitlab::Git::LfsChanges
.new(repository, newrev)
.new_pointers(not_in: [oldrev])
.map(&:lfs_oid)
Projects::LfsPointers::LfsLinkService.new(project).execute(new_lfs_oids, &block)
rescue Projects::LfsPointers::LfsLinkService::TooManyOidsError => e
raise MergeError, e.message
end
end
end
end

View File

@ -15,9 +15,9 @@ module Projects
def execute(oids)
return [] unless project&.lfs_enabled?
if oids.size > MAX_OIDS
raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
end
validate!(oids)
yield if block_given?
# Search and link existing LFS Object
link_existing_lfs_objects(oids)
@ -25,6 +25,12 @@ module Projects
private
def validate!(oids)
return if oids.size <= MAX_OIDS
raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
end
def link_existing_lfs_objects(oids)
linked_existing_objects = []
iterations = 0

View File

@ -1,4 +1,7 @@
- add_to_breadcrumbs _('Runners'), admin_runners_path
- runner_name = "##{@runner.id} (#{@runner.short_sha})"
- breadcrumb_title s_('Runners|Register')
- page_title s_('Runners|Register'), "##{@runner.id} (#{@runner.short_sha})"
- add_to_breadcrumbs _('Runners'), admin_runners_path
- add_to_breadcrumbs runner_name, register_admin_runner_path(@runner)
#js-admin-register-runner{ data: { runner_id: @runner.id, runners_path: admin_runners_path } }

View File

@ -7,6 +7,13 @@
= render_if_exists 'shared/ultimate_feature_removal_banner', project: @project
- if Feature.enabled?(:show_pages_in_deployments_menu, current_user, type: :experiment)
= render Pajamas::AlertComponent.new(variant: :info,
title: _('GitLab Pages has moved'),
alert_options: { class: 'gl-my-5', data: { feature_id: Users::CalloutsHelper::PAGES_MOVED_CALLOUT, dismiss_endpoint: callouts_path, defer_links: 'true' } }) do |c|
= c.body do
= _('To go to GitLab Pages, on the left sidebar, select %{pages_link}.').html_safe % {pages_link: link_to('Deployments > Pages', project_pages_path(@project)).html_safe}
%section.settings.general-settings.no-animate.expanded#js-general-settings
.settings-header
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only= _('Naming, topics, avatar')
@ -27,7 +34,6 @@
%input{ name: 'update_section', type: 'hidden', value: 'js-shared-permissions' }
%template.js-project-permissions-form-data{ type: "application/json" }= project_permissions_panel_data(@project).to_json.html_safe
.js-project-permissions-form{ data: visibility_confirm_modal_data(@project, reduce_visibility_form_id) }
- if show_merge_request_settings_callout?(@project)
%section.settings.expanded
= render Pajamas::AlertComponent.new(variant: :info,

View File

@ -3108,6 +3108,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: projects_forks_sync
:worker_name: Projects::Forks::SyncWorker
:feature_category: :source_code_management
:has_external_dependencies: false
:urgency: :high
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: projects_git_garbage_collect
:worker_name: Projects::GitGarbageCollectWorker
:feature_category: :gitaly

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Projects
module Forks
class SyncWorker
include ApplicationWorker
data_consistency :sticky
idempotent!
urgency :high
feature_category :source_code_management
def perform(project_id, user_id, ref)
project = Project.find_by_id(project_id)
user = User.find_by_id(user_id)
return unless project && user
::Projects::Forks::SyncService.new(project, user, ref).execute
end
end
end
end

View File

@ -1,8 +0,0 @@
---
name: notes_create_service_tracking
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18890
rollout_issue_url:
milestone: '12.5'
type: development
group: group::pipeline insights
default_enabled: false

View File

@ -419,6 +419,8 @@
- 1
- - projects_finalize_project_statistics_refresh
- 1
- - projects_forks_sync
- 1
- - projects_git_garbage_collect
- 1
- - projects_import_export_parallel_project_export

View File

@ -7,4 +7,4 @@
stage: enablement
tiers: premium, ultimate
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/359133
documentation_url: https://docs.gitlab.com/ee/development/elasticsearch.html#deleting-advanced-search-migrations-in-a-major-version-upgrade
documentation_url: https://docs.gitlab.com/ee/development/search/advanced_search_migration_styleguide.html#deleting-advanced-search-migrations-in-a-major-version-upgrade

View File

@ -0,0 +1,15 @@
- title: "Embedding Grafana panels in Markdown is removed"
announcement_milestone: "15.9"
announcement_date: "2023-02-22"
removal_milestone: "16.0"
removal_date: "2023-05-22"
breaking_change: true
reporter: abellucci
stage: monitor
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/389477
body: |
The ability to add Grafana panels in GitLab Flavored Markdown is removed.
We intend to replace this feature with the ability to [embed charts](https://gitlab.com/groups/gitlab-org/opstrace/-/epics/33)
with the [GitLab Observability UI](https://gitlab.com/gitlab-org/opstrace/opstrace-ui).
tiers: [Free, Silver, Gold, Core, Premium, Ultimate]
documentation_url: https://docs.gitlab.com/ee/operations/metrics/embed_grafana.html#embed-grafana-panels-in-markdown-deprecated

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddSyncIndexOnMergeRequestDiffsExternalDiff < Gitlab::Database::Migration[2.1]
INDEX_NAME = 'index_merge_request_diffs_on_external_diff'
disable_ddl_transaction!
def up
add_concurrent_index :merge_request_diffs, :external_diff, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :merge_request_diffs, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
df059ad89887390a792f292b7062a2f04d901a049c2acea7b8ddaff677b8c9d5

View File

@ -30748,6 +30748,8 @@ CREATE INDEX index_merge_request_diff_details_pending_verification ON merge_requ
CREATE INDEX index_merge_request_diffs_by_id_partial ON merge_request_diffs USING btree (id) WHERE ((files_count > 0) AND ((NOT stored_externally) OR (stored_externally IS NULL)));
CREATE INDEX index_merge_request_diffs_on_external_diff ON merge_request_diffs USING btree (external_diff);
CREATE INDEX index_merge_request_diffs_on_external_diff_store ON merge_request_diffs USING btree (external_diff_store);
CREATE INDEX index_merge_request_diffs_on_merge_request_id_and_id ON merge_request_diffs USING btree (merge_request_id, id);

View File

@ -367,6 +367,9 @@ former is ideal for replicating data belonging to a subset of users, while the
latter is more suited to progressively rolling out Geo to a large GitLab
instance.
NOTE:
Geo's synchronization logic is outlined in the [documentation](../index.md). Both the solution and the documentation is subject to change from time to time. You must independently determine your legal obligations in regard to privacy and cybersecurity laws, and applicable trade control law on an ongoing basis.
Selective synchronization:
1. Does not restrict permissions from **secondary** sites.

View File

@ -446,6 +446,7 @@ instance. For example, `cache` or `shared_state`.
| `gitlab_redis_client_exceptions_total` | Counter | 13.2 | Number of Redis client exceptions, broken down by exception class |
| `gitlab_redis_client_requests_total` | Counter | 13.2 | Number of Redis client requests |
| `gitlab_redis_client_requests_duration_seconds` | Histogram | 13.2 | Redis request latency, excluding blocking commands |
| `gitlab_redis_client_redirections_total` | Counter | 15.10 | Number of Redis Cluster MOVED/ASK redirections, broken down by redirection type |
## Metrics shared directory

View File

@ -4727,6 +4727,30 @@ Input type: `ProjectSetLockedInput`
| <a id="mutationprojectsetlockederrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationprojectsetlockedproject"></a>`project` | [`Project`](#project) | Project after mutation. |
### `Mutation.projectSyncFork`
WARNING:
**Introduced** in 15.9.
This feature is in Alpha. It can be changed or removed at any time.
Input type: `ProjectSyncForkInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationprojectsyncforkclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationprojectsyncforkprojectpath"></a>`projectPath` | [`ID!`](#id) | Full path of the project to initialize. |
| <a id="mutationprojectsyncforktargetbranch"></a>`targetBranch` | [`String!`](#string) | Ref of the fork to fetch into. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationprojectsyncforkclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationprojectsyncforkdetails"></a>`details` | [`ForkDetails`](#forkdetails) | Updated fork details. |
| <a id="mutationprojectsyncforkerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.prometheusIntegrationCreate`
Input type: `PrometheusIntegrationCreateInput`
@ -13744,6 +13768,8 @@ Details of the fork project compared to its upstream project.
| ---- | ---- | ----------- |
| <a id="forkdetailsahead"></a>`ahead` | [`Int`](#int) | Number of commits ahead of upstream. |
| <a id="forkdetailsbehind"></a>`behind` | [`Int`](#int) | Number of commits behind upstream. |
| <a id="forkdetailshasconflicts"></a>`hasConflicts` | [`Boolean`](#boolean) | Indicates if the fork conflicts with its upstream project. |
| <a id="forkdetailsissyncing"></a>`isSyncing` | [`Boolean`](#boolean) | Indicates if there is a synchronization in progress. |
### `GeoNode`

View File

@ -8,17 +8,15 @@ info: To determine the technical writer assigned to the Stage/Group associated w
**Valid access levels**
Currently, these levels are recognized:
These access levels are recognized:
```plaintext
0 => No access
30 => Developer access
40 => Maintainer access
```
- `0`: No access
- `30`: Developer role
- `40`: Maintainer role
## List protected tags
Gets a list of protected tags from a project.
Gets a list of [protected tags](../user/project/protected_tags.md) from a project.
This function takes pagination parameters `page` and `per_page` to restrict the list of protected tags.
```plaintext
@ -27,10 +25,11 @@ GET /projects/:id/protected_tags
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user |
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/protected_tags"
curl --header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/5/protected_tags"
```
Example response:
@ -62,11 +61,12 @@ GET /projects/:id/protected_tags/:name
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user |
| `name` | string | yes | The name of the tag or wildcard |
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
| `name` | string | yes | The name of the tag or wildcard. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/protected_tags/release-1-0"
curl --header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/5/protected_tags/release-1-0"
```
Example response:
@ -86,23 +86,35 @@ Example response:
## Protect repository tags
Protects a single repository tag or several project repository
tags using a wildcard protected tag.
Protects a single repository tag, or several project repository
tags, using a wildcard protected tag.
```plaintext
POST /projects/:id/protected_tags
```
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/protected_tags?name=*-stable&create_access_level=30"
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/5/protected_tags" -d '{
"allowed_to_create" : [
{
"user_id" : 1
},
{
"access_level" : 30
}
],
"create_access_level" : 30,
"name" : "*-stable"
}'
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user |
| `name` | string | yes | The name of the tag or wildcard |
| `create_access_level` | string | no | Access levels allowed to create (defaults: `40`, Maintainer role) |
| `allowed_to_create` | array | no | Array of access levels allowed to create tags, with each described by a hash of the form `{user_id: integer}`, `{group_id: integer}`, or `{access_level: integer}` |
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
| `name` | string | yes | The name of the tag or wildcard. |
| `allowed_to_create` | array | no | Array of access levels allowed to create tags, with each described by a hash of the form `{user_id: integer}`, `{group_id: integer}`, or `{access_level: integer}`. |
| `create_access_level` | string | no | Access levels allowed to create. Default: `40`, for Maintainer role. |
Example response:
@ -128,10 +140,17 @@ DELETE /projects/:id/protected_tags/:name
```
```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/protected_tags/*-stable"
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/5/protected_tags/*-stable"
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user |
| `name` | string | yes | The name of the tag |
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
| `name` | string | yes | The name of the tag. |
## Related topics
- [Tags API](tags.md) for all tags
- [Tags](../user/project/repository/tags/index.md) user documentation
- [Protected tags](../user/project/protected_tags.md) user documentation

View File

@ -4,16 +4,16 @@ group: Global Search
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Elasticsearch knowledge
# Advanced Search development
This area is to maintain a compendium of useful information when working with Elasticsearch.
This page includes information about developing and working with Elasticsearch.
Information on how to enable Elasticsearch and perform the initial indexing is in
the [Elasticsearch integration documentation](../integration/advanced_search/elasticsearch.md#enable-advanced-search).
## Deep Dive
In June 2019, Mario de la Ossa hosted a Deep Dive (GitLab team members only: `https://gitlab.com/gitlab-org/create-stage/issues/1`) on the GitLab [Elasticsearch integration](../integration/advanced_search/elasticsearch.md) to share his domain specific knowledge with anyone who may work in this part of the codebase in the future. You can find the <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [recording on YouTube](https://www.youtube.com/watch?v=vrvl-tN2EaA), and the slides on [Google Slides](https://docs.google.com/presentation/d/1H-pCzI_LNrgrL5pJAIQgvLX8Ji0-jIKOg1QeJQzChug/edit) and in [PDF](https://gitlab.com/gitlab-org/create-stage/uploads/c5aa32b6b07476fa8b597004899ec538/Elasticsearch_Deep_Dive.pdf). Everything covered in this deep dive was accurate as of GitLab 12.0, and while specific details may have changed since then, it should still serve as a good introduction.
In June 2019, Mario de la Ossa hosted a Deep Dive (GitLab team members only: `https://gitlab.com/gitlab-org/create-stage/issues/1`) on the GitLab [Elasticsearch integration](../integration/advanced_search/elasticsearch.md) to share his domain specific knowledge with anyone who may work in this part of the codebase in the future. You can find the <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [recording on YouTube](https://www.youtube.com/watch?v=vrvl-tN2EaA), and the slides on [Google Slides](https://docs.google.com/presentation/d/1H-pCzI_LNrgrL5pJAIQgvLX8Ji0-jIKOg1QeJQzChug/edit) and in [PDF](https://gitlab.com/gitlab-org/create-stage/uploads/c5aa32b6b07476fa8b597004899ec538/Elasticsearch_Deep_Dive.pdf). Everything covered in this deep dive was accurate as of GitLab 12.0, and while specific details might have changed, it should still serve as a good introduction.
In August 2020, a second Deep Dive was hosted, focusing on [GitLab-specific architecture for multi-indices support](#zero-downtime-reindexing-with-multiple-indices). The <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [recording on YouTube](https://www.youtube.com/watch?v=0WdPR9oB2fg) and the [slides](https://lulalala.gitlab.io/gitlab-elasticsearch-deepdive/) are available. Everything covered in this deep dive was accurate as of GitLab 13.3.
@ -184,305 +184,6 @@ If the current version is `v12p1`, and we need to create a new version for `v12p
1. Change the namespace for files under `v12p1` folder from `Latest` to `V12p1`
1. Make changes to files under the `latest` folder as needed
## Creating a new Advanced Search migration
> This functionality was introduced by [#234046](https://gitlab.com/gitlab-org/gitlab/-/issues/234046).
NOTE:
This only supported for indices created with GitLab 13.0 or greater.
In the [`ee/elastic/migrate/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/elastic/migrate) folder, create a new file with the filename format `YYYYMMDDHHMMSS_migration_name.rb`. This format is the same for Rails database migrations.
```ruby
# frozen_string_literal: true
class MigrationName < Elastic::Migration
# Important: Any updates to the Elastic index mappings must be replicated in the respective
# configuration files:
# - `Elastic::Latest::Config`, for the main index.
# - `Elastic::Latest::<Type>Config`, for standalone indices.
def migrate
end
# Check if the migration has completed
# Return true if completed, otherwise return false
def completed?
end
end
```
Applied migrations are stored in `gitlab-#{RAILS_ENV}-migrations` index. All migrations not executed
are applied by the [`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
cron worker sequentially.
To update Elastic index mappings, apply the configuration to the respective files:
- For the main index: [`Elastic::Latest::Config`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/elastic/latest/config.rb).
- For standalone indices: `Elastic::Latest::<Type>Config`.
Migrations can be built with a retry limit and have the ability to be [failed and marked as halted](https://gitlab.com/gitlab-org/gitlab/-/blob/66e899b6637372a4faf61cfd2f254cbdd2fb9f6d/ee/lib/elastic/migration.rb#L40).
Any data or index cleanup needed to support migration retries should be handled within the migration.
### Migration helpers
The following migration helpers are available in `ee/app/workers/concerns/elastic/`:
#### `Elastic::MigrationBackfillHelper`
Backfills a specific field in an index. In most cases, the mapping for the field should already be added.
Requires the `index_name` and `field_name` methods.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationBackfillHelper
private
def index_name
Issue.__elasticsearch__.index_name
end
def field_name
:schema_version
end
end
```
#### `Elastic::MigrationUpdateMappingsHelper`
Updates a mapping in an index by calling `put_mapping` with the mapping specified.
Requires the `index_name` and `new_mappings` methods.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationUpdateMappingsHelper
private
def index_name
Issue.__elasticsearch__.index_name
end
def new_mappings
{
schema_version: {
type: 'short'
}
}
end
end
```
#### `Elastic::MigrationRemoveFieldsHelper`
Removes specified fields from an index.
Requires the `index_name`, `document_type` methods. If there is one field to remove, add the `field_to_remove` method, otherwise add `fields_to_remove` with an array of fields.
Checks in batches if any documents that match `document_type` have the fields specified in Elasticsearch. If documents exist, uses a Painless script to perform `update_by_query`.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
batched!
throttle_delay 1.minute
private
def index_name
User.__elasticsearch__.index_name
end
def document_type
'user'
end
def fields_to_remove
%w[two_factor_enabled has_projects]
end
end
```
The default batch size is `10_000`. You can override this value by specifying `BATCH_SIZE`:
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
batched!
BATCH_SIZE = 100
...
end
```
#### `Elastic::MigrationObsolete`
Marks a migration as obsolete when it's no longer required.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationObsolete
end
```
#### `Elastic::MigrationHelper`
Contains methods you can use when a migration doesn't fit the previous examples.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationHelper
def migrate
...
end
def completed?
...
end
end
```
### Migration options supported by the `Elastic::MigrationWorker`
[`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb) supports the following migration options:
- `batched!` - Allow the migration to run in batches. If set, the [`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
will re-enqueue itself with a delay which is set using the `throttle_delay` option described below. The batching
must be handled within the `migrate` method, this setting controls the re-enqueuing only.
- `batch_size` - Sets the number of documents modified during a `batched!` migration run. This size should be set to a value which allows the updates
enough time to finish. This can be tuned in combination with the `throttle_delay` option described below. The batching
must be handled within a custom `migrate` method or by using the [`Elastic::MigrationBackfillHelper`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/concerns/elastic/migration_backfill_helper.rb)
`migrate` method which uses this setting. Default value is 1000 documents.
- `throttle_delay` - Sets the wait time in between batch runs. This time should be set high enough to allow each migration batch
enough time to finish. Additionally, the time should be less than 30 minutes since that is how often the
[`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
cron worker runs. Default value is 5 minutes.
- `pause_indexing!` - Pause indexing while the migration runs. This setting will record the indexing setting before
the migration runs and set it back to that value when the migration is completed.
- `space_requirements!` - Verify that enough free space is available in the cluster when the migration runs. This setting
will halt the migration if the storage required is not available when the migration runs. The migration must provide
the space required in bytes by defining a `space_required_bytes` method.
- `retry_on_failure` - Enable the retry on failure feature. By default, it retries
the migration 30 times. After it runs out of retries, the migration is marked as halted.
To customize the number of retries, pass the `max_attempts` argument:
`retry_on_failure max_attempts: 10`
```ruby
# frozen_string_literal: true
class BatchedMigrationName < Elastic::Migration
# Declares a migration should be run in batches
batched!
throttle_delay 10.minutes
pause_indexing!
space_requirements!
retry_on_failure
# ...
end
```
### Multi-version compatibility
These Advanced Search migrations, like any other GitLab changes, need to support the case where
[multiple versions of the application are running at the same time](multi_version_compatibility.md).
Depending on the order of deployment, it's possible that the migration
has started or finished and there's still a server running the application code from before the
migration. We need to take this into consideration until we can
[ensure all Advanced Search migrations start after the deployment has finished](https://gitlab.com/gitlab-org/gitlab/-/issues/321619).
### Reverting a migration
Because Elasticsearch does not support transactions, we always need to design our
migrations to accommodate a situation where the application
code is reverted after the migration has started or after it is finished.
For this reason we generally defer destructive actions (for example, deletions after
some data is moved) to a later merge request after the migrations have
completed successfully. To be safe, for self-managed customers we should also
defer it to another release if there is risk of important data loss.
### Best practices for Advanced Search migrations
Follow these best practices for best results:
- When working in batches, keep the batch size under 9,000 documents
and `throttle_delay` for at least 3 minutes. The bulk indexer is set to run
every 1 minute and process a batch of 10,000 documents. These limits
allow the bulk indexer time to process records before another migration
batch is attempted.
- To ensure that document counts are up to date, it is recommended to refresh
the index before checking if a migration is completed.
- Add logging statements to each migration when the migration starts, when a
completion check occurs, and when the migration is completed. These logs
are helpful when debugging issues with migrations.
- Pause indexing if you're using any Elasticsearch Reindex API operations.
- Consider adding a retry limit if there is potential for the migration to fail.
This ensures that migrations can be halted if an issue occurs.
## Deleting Advanced Search migrations in a major version upgrade
Since our Advanced Search migrations usually require us to support multiple
code paths for a long period of time, it's important to clean those up when we
safely can.
We choose to use GitLab major version upgrades as a safe time to remove
backwards compatibility for indices that have not been fully migrated. We
[document this in our upgrade documentation](../update/index.md#upgrading-to-a-new-major-version).
We also choose to replace the migration code with the halted migration
and remove tests so that:
- We don't need to maintain any code that is called from our Advanced Search
migrations.
- We don't waste CI time running tests for migrations that we don't support
anymore.
- Operators who have not run this migration and who upgrade directly to the
target version will see a message prompting them to reindex from scratch.
To be extra safe, we will not delete migrations that were created in the last
minor version before the major upgrade. So, if we are upgrading to `%14.0`,
we should not delete migrations that were only added in `%13.12`. This is an
extra safety net as we expect there are migrations that get merged that may
take multiple weeks to finish on GitLab.com. It would be bad if we upgraded
GitLab.com to `%14.0` before the migrations in `%13.12` were finished. Since
our deployments to GitLab.com are automated and we currently don't have
automated checks to prevent this, the extra precaution is warranted.
Additionally, even if we did have automated checks to prevent it, we wouldn't
actually want to hold up GitLab.com deployments on Advanced Search migrations,
as they may still have another week to go, and that's too long to block
deployments.
### Process for removing migrations
For every migration that was created 2 minor versions before the major version
being upgraded to, we do the following:
1. Confirm the migration has actually completed successfully for GitLab.com.
1. Replace the content of the migration with:
```ruby
include Elastic::MigrationObsolete
```
1. Delete any spec files to support this migration.
1. Remove any logic handling backwards compatibility for this migration. You
can find this by looking for
`Elastic::DataMigrationService.migration_has_finished?(:migration_name_in_lowercase)`.
1. Create a merge request with these changes. Noting that we should not
accidentally merge this before the major release is started.
## Performance Monitoring
### Prometheus

View File

@ -94,7 +94,7 @@ EE: true
uses system fonts for all text."
- Any client-facing change to our REST and GraphQL APIs **must** have a changelog entry.
See the [complete list what comprises a GraphQL breaking change](api_graphql_styleguide.md#breaking-changes).
- Any change that introduces an [Advanced Search migration](elasticsearch.md#creating-a-new-advanced-search-migration)
- Any change that introduces an [Advanced Search migration](search/advanced_search_migration_styleguide.md#creating-a-new-advanced-search-migration)
**must** have a changelog entry.
- A fix for a regression introduced and then fixed in the same release (such as
fixing a bug introduced during a monthly release candidate) **should not**

View File

@ -79,7 +79,7 @@ Consult these topics for information on contributing to specific GitLab features
- [Adding a new Redis instance](redis/new_redis_instance.md)
- [Sidekiq guidelines](sidekiq/index.md) for working with Sidekiq workers
- [Working with Gitaly](gitaly.md)
- [Elasticsearch integration docs](elasticsearch.md)
- [Advanced Search integration docs](advanced_search.md)
- [Working with merge request diffs](diffs.md)
- [Approval Rules](merge_request_concepts/approval_rules.md)
- [Repository mirroring](repository_mirroring.md)

View File

@ -0,0 +1,306 @@
---
stage: Data Stores
group: Global Search
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Advanced Search migration style guide
## Creating a new Advanced Search migration
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/234046) in GitLab 13.6.
NOTE:
This functionality is only supported for indices created in GitLab 13.0 and later.
In the [`ee/elastic/migrate/`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/elastic/migrate) folder, create a new file with the filename format `YYYYMMDDHHMMSS_migration_name.rb`. This format is the same for Rails database migrations.
```ruby
# frozen_string_literal: true
class MigrationName < Elastic::Migration
# Important: Any updates to the Elastic index mappings must be replicated in the respective
# configuration files:
# - `Elastic::Latest::Config`, for the main index.
# - `Elastic::Latest::<Type>Config`, for standalone indices.
def migrate
end
# Check if the migration has completed
# Return true if completed, otherwise return false
def completed?
end
end
```
Applied migrations are stored in `gitlab-#{RAILS_ENV}-migrations` index. All migrations not executed
are applied by the [`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
cron worker sequentially.
To update Elastic index mappings, apply the configuration to the respective files:
- For the main index: [`Elastic::Latest::Config`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/elastic/latest/config.rb).
- For standalone indices: `Elastic::Latest::<Type>Config`.
Migrations can be built with a retry limit and have the ability to be [failed and marked as halted](https://gitlab.com/gitlab-org/gitlab/-/blob/66e899b6637372a4faf61cfd2f254cbdd2fb9f6d/ee/lib/elastic/migration.rb#L40).
Any data or index cleanup needed to support migration retries should be handled in the migration.
### Migration helpers
The following migration helpers are available in `ee/app/workers/concerns/elastic/`:
#### `Elastic::MigrationBackfillHelper`
Backfills a specific field in an index. In most cases, the mapping for the field should already be added.
Requires the `index_name` and `field_name` methods.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationBackfillHelper
private
def index_name
Issue.__elasticsearch__.index_name
end
def field_name
:schema_version
end
end
```
#### `Elastic::MigrationUpdateMappingsHelper`
Updates a mapping in an index by calling `put_mapping` with the mapping specified.
Requires the `index_name` and `new_mappings` methods.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationUpdateMappingsHelper
private
def index_name
Issue.__elasticsearch__.index_name
end
def new_mappings
{
schema_version: {
type: 'short'
}
}
end
end
```
#### `Elastic::MigrationRemoveFieldsHelper`
Removes specified fields from an index.
Requires the `index_name`, `document_type` methods. If there is one field to remove, add the `field_to_remove` method, otherwise add `fields_to_remove` with an array of fields.
Checks in batches if any documents that match `document_type` have the fields specified in Elasticsearch. If documents exist, uses a Painless script to perform `update_by_query`.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
batched!
throttle_delay 1.minute
private
def index_name
User.__elasticsearch__.index_name
end
def document_type
'user'
end
def fields_to_remove
%w[two_factor_enabled has_projects]
end
end
```
The default batch size is `10_000`. You can override this value by specifying `BATCH_SIZE`:
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationRemoveFieldsHelper
batched!
BATCH_SIZE = 100
...
end
```
#### `Elastic::MigrationObsolete`
Marks a migration as obsolete when it's no longer required.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationObsolete
end
```
#### `Elastic::MigrationHelper`
Contains methods you can use when a migration doesn't fit the previous examples.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationHelper
def migrate
...
end
def completed?
...
end
end
```
### Migration options supported by the `Elastic::MigrationWorker`
[`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb) supports the following migration options:
- `batched!` - Allow the migration to run in batches. If set, [`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
re-enqueues itself with a delay which is set using the `throttle_delay` option described below. The batching
must be handled in the `migrate` method. This setting controls the re-enqueuing only.
- `batch_size` - Sets the number of documents modified during a `batched!` migration run. This size should be set to a value which allows the updates
enough time to finish. This can be tuned in combination with the `throttle_delay` option described below. The batching
must be handled in a custom `migrate` method or by using the [`Elastic::MigrationBackfillHelper`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/concerns/elastic/migration_backfill_helper.rb)
`migrate` method which uses this setting. Default value is 1000 documents.
- `throttle_delay` - Sets the wait time in between batch runs. This time should be set high enough to allow each migration batch
enough time to finish. Additionally, the time should be less than 30 minutes because that is how often the
[`Elastic::MigrationWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/app/workers/elastic/migration_worker.rb)
cron worker runs. Default value is 5 minutes.
- `pause_indexing!` - Pause indexing while the migration runs. This setting records the indexing setting before
the migration runs and set it back to that value when the migration is completed.
- `space_requirements!` - Verify that enough free space is available in the cluster when the migration runs. This setting
halts the migration if the storage required is not available when the migration runs. The migration must provide
the space required in bytes by defining a `space_required_bytes` method.
- `retry_on_failure` - Enable the retry on failure feature. By default, it retries
the migration 30 times. After it runs out of retries, the migration is marked as halted.
To customize the number of retries, pass the `max_attempts` argument:
`retry_on_failure max_attempts: 10`
```ruby
# frozen_string_literal: true
class BatchedMigrationName < Elastic::Migration
# Declares a migration should be run in batches
batched!
throttle_delay 10.minutes
pause_indexing!
space_requirements!
retry_on_failure
# ...
end
```
### Multi-version compatibility
These Advanced Search migrations, like any other GitLab changes, need to support the case where
[multiple versions of the application are running at the same time](../multi_version_compatibility.md).
Depending on the order of deployment, it's possible that the migration
has started or finished and there's still a server running the application code from before the
migration. We need to take this into consideration until we can
[ensure all Advanced Search migrations start after the deployment has finished](https://gitlab.com/gitlab-org/gitlab/-/issues/321619).
### Reverting a migration
Because Elasticsearch does not support transactions, we always need to design our
migrations to accommodate a situation where the application
code is reverted after the migration has started or after it is finished.
For this reason we generally defer destructive actions (for example, deletions after
some data is moved) to a later merge request after the migrations have
completed successfully. To be safe, for self-managed customers we should also
defer it to another release if there is risk of important data loss.
### Best practices for Advanced Search migrations
Follow these best practices for best results:
- When working in batches, keep the batch size under 9,000 documents
and `throttle_delay` for at least 3 minutes. The bulk indexer is set to run
every 1 minute and process a batch of 10,000 documents. These limits
allow the bulk indexer time to process records before another migration
batch is attempted.
- To ensure that document counts are up to date, you should refresh
the index before checking if a migration is completed.
- Add logging statements to each migration when the migration starts, when a
completion check occurs, and when the migration is completed. These logs
are helpful when debugging issues with migrations.
- Pause indexing if you're using any Elasticsearch Reindex API operations.
- Consider adding a retry limit if there is potential for the migration to fail.
This ensures that migrations can be halted if an issue occurs.
## Deleting Advanced Search migrations in a major version upgrade
Because our Advanced Search migrations usually require us to support multiple
code paths for a long period of time, it's important to clean those up when we
safely can.
We choose to use GitLab major version upgrades as a safe time to remove
backwards compatibility for indices that have not been fully migrated. We
[document this in our upgrade documentation](../../update/index.md#upgrading-to-a-new-major-version).
We also choose to replace the migration code with the halted migration
and remove tests so that:
- We don't need to maintain any code that is called from our Advanced Search
migrations.
- We don't waste CI time running tests for migrations that we don't support
anymore.
- Operators who have not run this migration and who upgrade directly to the
target version see a message prompting them to reindex from scratch.
To be extra safe, we do not delete migrations that were created in the last
minor version before the major upgrade. So, if we are upgrading to `%14.0`,
we should not delete migrations that were only added in `%13.12`. This
extra safety net allows for migrations that might
take multiple weeks to finish on GitLab.com. It would be bad if we upgraded
GitLab.com to `%14.0` before the migrations in `%13.12` were finished. Because
our deployments to GitLab.com are automated and we don't have
automated checks to prevent this, the extra precaution is warranted.
Additionally, even if we did have automated checks to prevent it, we wouldn't
actually want to hold up GitLab.com deployments on Advanced Search migrations,
as they may still have another week to go, and that's too long to block
deployments.
### Process for removing migrations
For every migration that was created 2 minor versions before the major version
being upgraded to, we do the following:
1. Confirm the migration has actually completed successfully for GitLab.com.
1. Replace the content of the migration with:
```ruby
include Elastic::MigrationObsolete
```
1. Delete any spec files to support this migration.
1. Remove any logic handling backwards compatibility for this migration. You
can find this by looking for
`Elastic::DataMigrationService.migration_has_finished?(:migration_name_in_lowercase)`.
1. Create a merge request with these changes. Noting that we should not
accidentally merge this before the major release is started.

View File

@ -98,9 +98,9 @@ p.each do |project|
end
```
## `500 Whoops` when accessing a Jira issue in GitLab
## `500 We're sorry` when accessing a Jira issue in GitLab
When accessing a Jira issue in GitLab, you might get a `500 Whoops, something went wrong on our end` error.
When accessing a Jira issue in GitLab, you might get a `500 We're sorry. Something went wrong on our end` error.
Check [`production.log`](../../administration/logs/index.md#productionlog) to see if it contains the following exception:
```plaintext

View File

@ -34,6 +34,18 @@ For removal reviewers (Technical Writers only):
https://about.gitlab.com/handbook/marketing/blog/release-posts/#update-the-removals-doc
-->
## Removed in 16.0
### Embedding Grafana panels in Markdown is removed
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
Review the details carefully before upgrading.
The ability to add Grafana panels in GitLab Flavored Markdown is removed.
We intend to replace this feature with the ability to [embed charts](https://gitlab.com/groups/gitlab-org/opstrace/-/epics/33)
with the [GitLab Observability UI](https://gitlab.com/gitlab-org/opstrace/opstrace-ui).
## Removed in 15.9
### Live Preview no longer available in the Web IDE

View File

@ -173,10 +173,22 @@ To view group import history:
### Migrated group items
The [`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/import_export/group/import_export.yml)
file for groups lists many of the items imported when migrating groups by direct transfer. View this file in the branch
for your version of GitLab to see the list of items relevant to you. For example,
[`import_export.yml` on the `14-10-stable-ee` branch](https://gitlab.com/gitlab-org/gitlab/-/blob/14-10-stable-ee/lib/gitlab/import_export/group/import_export.yml).
The group items that are migrated depend on the version of GitLab you use on the destination. To determine if a
specific group item is migrated:
1. Check the [`groups/stage.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/bulk_imports/groups/stage.rb)
file for all editions and the
[`groups/stage.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/ee/bulk_imports/groups/stage.rb) file
for Enterprise Edition for your version on the destination. For example, for version 15.9:
- <https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/lib/bulk_imports/groups/stage.rb> (all editions).
- <https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/ee/lib/ee/bulk_imports/groups/stage.rb> (Enterprise
Edition).
1. Check the
[`group/import_export.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/import_export/group/import_export.yml)
file for groups for your version on the destination. For example, for version 15.9:
<https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/lib/gitlab/import_export/group/import_export.yml>.
Any other group items are **not** migrated.
Group items that are migrated to the destination GitLab instance include:
@ -203,8 +215,6 @@ Group items that are migrated to the destination GitLab instance include:
- Subgroups ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18938) in GitLab 13.7)
- Uploads ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18938) in GitLab 13.7)
Any other items are **not** migrated.
### Migrated project items (beta)
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/267945) in GitLab 14.4 [with a flag](../../feature_flags.md) named `bulk_import_projects`. Disabled by default.
@ -215,10 +225,22 @@ On self-managed GitLab, migrating project resources when migrating groups is not
To make it available ask an administrator to [enable the feature flag](../../../administration/feature_flags.md) named
`bulk_import_projects`. On GitLab.com, groups are migrated with all their projects by default.
The [`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/import_export/project/import_export.yml)
file for projects lists many of the items imported when migrating projects using group migration. View this file in the branch
for your version of GitLab to see the list of items relevant to you. For example,
[`import_export.yml` on the `14-10-stable-ee` branch](https://gitlab.com/gitlab-org/gitlab/-/blob/14-10-stable-ee/lib/gitlab/import_export/project/import_export.yml).
The project items that are migrated depends on the version of GitLab you use on the destination. To determine if a
specific project item is migrated:
1. Check the [`projects/stage.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/bulk_imports/projects/stage.rb)
file for all editions and the
[`projects/stage.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/ee/bulk_imports/projects/stage.rb)
file for Enterprise Edition for your version on the destination. For example, for version 15.9:
- <https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/lib/bulk_imports/projects/stage.rb> (all editions).
- <https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/ee/lib/ee/bulk_imports/projects/stage.rb> (Enterprise
Edition).
1. Check the
[`project/import_export.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/import_export/project/import_export.yml)
file for projects for your version on the destination. For example, for version 15.9:
<https://gitlab.com/gitlab-org/gitlab/-/blob/15-9-stable-ee/lib/gitlab/import_export/project/import_export.yml>.
Any other project items are **not** migrated.
WARNING:
Migrating projects when migrating groups by direct transfer is in [Beta](../../../policy/alpha-beta-support.md#beta-features)
@ -390,7 +412,7 @@ For example:
The [`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/import_export/group/import_export.yml)
file for groups lists items exported and imported when migrating groups using file exports. View this file in the branch
for your version of GitLab to see the list of items relevant to you. For example,
for your version of GitLab to check which items can be imported to the destination GitLab instance. For example,
[`import_export.yml` on the `14-10-stable-ee` branch](https://gitlab.com/gitlab-org/gitlab/-/blob/14-10-stable-ee/lib/gitlab/import_export/group/import_export.yml).
Group items that are exported include:

View File

@ -55,6 +55,7 @@ module Gitlab
phone_verification_verify_code: { threshold: 10, interval: 10.minutes },
namespace_exists: { threshold: 20, interval: 1.minute },
fetch_google_ip_list: { threshold: 10, interval: 1.minute },
project_fork_sync: { threshold: 10, interval: 30.minutes },
jobs_index: { threshold: 600, interval: 1.minute },
bulk_import: { threshold: 6, interval: 1.minute },
projects_api_rate_limit_unauthenticated: {

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Components
##
# Components::Header class represents full component specification that is being prepended as first YAML document
# in the CI Component file.
#
class Header
attr_reader :errors
def initialize(header)
@header = header
@errors = []
end
def empty?
inputs_spec.to_h.empty?
end
def inputs(args)
@input ||= Ci::Input::Inputs.new(inputs_spec, args)
end
def context(args)
inputs(args).then do |input|
raise ArgumentError unless input.valid?
Ci::Interpolation::Context.new({ inputs: input.to_hash })
end
end
private
def inputs_spec
@header.dig(:spec, :inputs)
end
end
end
end
end

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
module Arguments
##
# Input::Arguments::Base is a common abstraction for input arguments:
# - required
# - optional
# - with a default value
#
class Base
attr_reader :key, :value, :spec, :errors
ArgumentNotValidError = Class.new(StandardError)
def initialize(key, spec, value)
@key = key # hash key / argument name
@value = value # user-provided value
@spec = spec # configured specification
@errors = []
unless value.is_a?(String) || value.nil? # rubocop:disable Style/IfUnlessModifier
@errors.push("unsupported value in input argument `#{key}`")
end
validate!
end
def valid?
@errors.none?
end
def validate!
raise NotImplementedError
end
def to_value
raise NotImplementedError
end
def to_hash
raise ArgumentNotValidError unless valid?
@output ||= { key => to_value }
end
def self.matches?(spec)
raise NotImplementedError
end
private
def error(message)
@errors.push("`#{@key}` input: #{message}")
end
end
end
end
end
end

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
module Arguments
##
# Input::Arguments::Default class represents user-provided input argument that has a default value.
#
class Default < Input::Arguments::Base
def validate!
error('invalid specification') unless default.present?
end
##
# User-provided value needs to be specified, but it may be an empty string:
#
# ```yaml
# inputs:
# env:
# default: development
#
# with:
# env: ""
# ```
#
# The configuration above will result in `env` being an empty string.
#
def to_value
value.nil? ? default : value
end
def default
spec[:default]
end
def self.matches?(spec)
spec.count == 1 && spec.each_key.first == :default
end
end
end
end
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
module Arguments
##
# Input::Arguments::Options class represents user-provided input argument that is an enum, and is only valid
# when the value provided is listed as an acceptable one.
#
class Options < Input::Arguments::Base
##
# An empty value is valid if it is allowlisted:
#
# ```yaml
# inputs:
# run:
# - ""
# - tests
#
# with:
# run: ""
# ```
#
# The configuration above will return an empty value.
#
def validate!
return error('argument specification invalid') if options.to_a.empty?
if !value.nil?
error("argument value #{value} not allowlisted") unless options.include?(value)
else
error('argument not provided')
end
end
def to_value
value
end
def options
spec[:options]
end
def self.matches?(spec)
spec.count == 1 && spec.each_key.first == :options
end
end
end
end
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
module Arguments
##
# Input::Arguments::Required class represents user-provided required input argument.
#
class Required < Input::Arguments::Base
##
# The value has to be defined, but it may be empty.
#
def validate!
error('required value has not been provided') if value.nil?
end
def to_value
value
end
##
# Required arguments do not have nested configuration. It has to be defined a null value.
#
# ```yaml
# spec:
# inputs:
# website:
# ```
#
# An empty value, that has no specification is also considered as a "required" input, however we should
# never see that being used, because it will be rejected by Ci::Config::Header validation.
#
# ```yaml
# spec:
# inputs:
# website: ""
# ```
def self.matches?(spec)
spec.to_s.empty?
end
end
end
end
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
module Arguments
##
# Input::Arguments::Unknown object gets fabricated when we can't match an input argument entry with any known
# specification. It is matched as the last one, and always returns an error.
#
class Unknown < Input::Arguments::Base
def validate!
if spec.is_a?(Hash) && spec.count == 1
error("unrecognized input argument specification: `#{spec.each_key.first}`")
else
error('unrecognized input argument definition')
end
end
def to_value
raise ArgumentError, 'unknown argument value'
end
def self.matches?(*)
true
end
end
end
end
end
end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Input
##
# Inputs::Input class represents user-provided inputs, configured using `with:` keyword.
#
# Input arguments are only valid with an associated component's inputs specification from component's header.
#
class Inputs
UnknownSpecArgumentError = Class.new(StandardError)
ARGUMENTS = [
Input::Arguments::Required, # Input argument is required
Input::Arguments::Default, # Input argument has a default value
Input::Arguments::Options, # Input argument that needs to be allowlisted
Input::Arguments::Unknown # Input argument has not been recognized
].freeze
def initialize(spec, args)
@spec = spec
@args = args
@inputs = []
@errors = []
validate!
fabricate!
end
def errors
@errors + @inputs.flat_map(&:errors)
end
def valid?
errors.none?
end
def unknown
@args.keys - @spec.keys
end
def count
@inputs.count
end
def to_hash
@inputs.inject({}) do |hash, argument|
raise ArgumentError unless argument.valid?
hash.merge(argument.to_hash)
end
end
private
def validate!
@errors.push("unknown input arguments: #{unknown.inspect}") if unknown.any?
end
def fabricate!
@spec.each do |key, spec|
argument = ARGUMENTS.find { |klass| klass.matches?(spec) }
raise UnknownSpecArgumentError if argument.nil?
@inputs.push(argument.new(key, spec, @args[key]))
end
end
end
end
end
end

View File

@ -118,6 +118,14 @@ module Gitlab
@exception_counter.increment({ storage: storage_key, exception: ex.class.to_s })
end
def instance_count_cluster_redirection(ex)
# This metric is meant to give a client side view of how often are commands
# redirected to the right node, especially during resharding..
# This metric can be used for Redis alerting and service health monitoring.
@redirection_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_redirections_total, 'Client side Redis Cluster redirection count, per Redis node, per slot')
@redirection_counter.increment(decompose_redirection_message(ex.message).merge({ storage: storage_key }))
end
def instance_observe_duration(duration)
@request_latency_histogram ||= Gitlab::Metrics.histogram(
:gitlab_redis_client_requests_duration_seconds,
@ -166,6 +174,11 @@ module Gitlab
def build_key(namespace)
"#{storage_key}_#{namespace}"
end
def decompose_redirection_message(err_msg)
redirection_type, _, target_node_key = err_msg.split
{ redirection_type: redirection_type, target_node_key: target_node_key }
end
end
end
end

View File

@ -40,7 +40,12 @@ module Gitlab
yield
rescue ::Redis::BaseError => ex
instrumentation_class.instance_count_exception(ex)
if ex.message.start_with?('MOVED', 'ASK')
instrumentation_class.instance_count_cluster_redirection(ex)
else
instrumentation_class.instance_count_exception(ex)
end
instrumentation_class.log_exception(ex)
raise ex
ensure

View File

@ -4743,6 +4743,9 @@ msgstr ""
msgid "Analytics|Analytics dashboards"
msgstr ""
msgid "Analytics|Dashboards are created by editing the projects dashboard files."
msgstr ""
msgid "Analyze your dependencies for known vulnerabilities."
msgstr ""
@ -19001,6 +19004,9 @@ msgstr ""
msgid "GitLab Pages"
msgstr ""
msgid "GitLab Pages has moved"
msgstr ""
msgid "GitLab Shell"
msgstr ""
@ -32589,9 +32595,6 @@ msgstr ""
msgid "ProductAnalytics|An error occurred while fetching data. Refresh the page to try again."
msgstr ""
msgid "ProductAnalytics|Analytics dashboards"
msgstr ""
msgid "ProductAnalytics|Analyze your product with Product Analytics"
msgstr ""
@ -32652,9 +32655,6 @@ msgstr ""
msgid "ProductAnalytics|Creating your product analytics instance..."
msgstr ""
msgid "ProductAnalytics|Dashboards are created by editing the projects dashboard files."
msgstr ""
msgid "ProductAnalytics|Data"
msgstr ""
@ -37100,6 +37100,9 @@ msgstr ""
msgid "Runners|Checkbox"
msgstr ""
msgid "Runners|Choose an executor when prompted by the command line. Executors run builds in different environments. %{linkStart}Not sure which one to select?%{linkEnd}"
msgstr ""
msgid "Runners|Choose your preferred GitLab Runner"
msgstr ""
@ -37118,6 +37121,9 @@ msgstr ""
msgid "Runners|Containers"
msgstr ""
msgid "Runners|Copy and paste the following command into your command line to register the runner."
msgstr ""
msgid "Runners|Copy instructions"
msgstr ""
@ -37192,6 +37198,12 @@ msgstr ""
msgid "Runners|Get started with runners"
msgstr ""
msgid "Runners|GitLab Runner must be installed before you can register a runner. %{linkStart}How do I install GitLab Runner?%{linkEnd}"
msgstr ""
msgid "Runners|Go to runners page"
msgstr ""
msgid "Runners|Group"
msgstr ""
@ -37237,6 +37249,9 @@ msgstr ""
msgid "Runners|Maintenance note"
msgstr ""
msgid "Runners|Manually verify that the runner is available to pick up jobs."
msgstr ""
msgid "Runners|Maximum amount of time the runner can run before it terminates. If a project has a shorter job timeout period, the job timeout period of the instance runner is used instead."
msgstr ""
@ -37309,6 +37324,9 @@ msgstr ""
msgid "Runners|Operating systems"
msgstr ""
msgid "Runners|Optional. Step 3"
msgstr ""
msgid "Runners|Owner"
msgstr ""
@ -37344,6 +37362,9 @@ msgstr ""
msgid "Runners|Register"
msgstr ""
msgid "Runners|Register \"%{runnerDescription}\" runner"
msgstr ""
msgid "Runners|Register a group runner"
msgstr ""
@ -37359,6 +37380,9 @@ msgstr ""
msgid "Runners|Register as many runners as you want. You can register runners as separate users, on separate servers, and on your local machine."
msgstr ""
msgid "Runners|Register runner"
msgstr ""
msgid "Runners|Registration token"
msgstr ""
@ -37515,6 +37539,12 @@ msgstr ""
msgid "Runners|Status"
msgstr ""
msgid "Runners|Step 1"
msgstr ""
msgid "Runners|Step 2"
msgstr ""
msgid "Runners|Stop the runner from accepting new jobs."
msgstr ""
@ -37524,6 +37554,9 @@ msgstr ""
msgid "Runners|Tags control which type of jobs a runner can handle. By tagging a runner, you make sure shared runners only handle the jobs they are equipped to run."
msgstr ""
msgid "Runners|The %{boldStart}runner token%{boldEnd} %{token} displays %{boldStart}only for a short time%{boldEnd}, and is stored in the %{codeStart}config.toml%{codeEnd} after you create the runner. It will not be visible once the runner is registered."
msgstr ""
msgid "Runners|The project, group or instance where the runner was registered. Instance runners are always owned by Administrator."
msgstr ""
@ -37541,6 +37574,9 @@ msgstr[1] ""
msgid "Runners|This group currently has no stale runners."
msgstr ""
msgid "Runners|This may not be needed if you manage your runner as a %{linkStart}system or user service%{linkEnd}."
msgstr ""
msgid "Runners|This runner has not run any jobs."
msgstr ""
@ -44913,6 +44949,9 @@ msgstr ""
msgid "To get started, use the link below to confirm your account."
msgstr ""
msgid "To go to GitLab Pages, on the left sidebar, select %{pages_link}."
msgstr ""
msgid "To help improve GitLab, we would like to periodically %{docs_link}. This can be changed at any time in %{settings_link}."
msgstr ""

View File

@ -56,7 +56,7 @@
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.2.0",
"@gitlab/svgs": "3.21.0",
"@gitlab/ui": "56.1.2",
"@gitlab/ui": "56.2.0",
"@gitlab/visual-review-tools": "1.7.3",
"@gitlab/web-ide": "0.0.1-dev-20230223005157",
"@rails/actioncable": "6.1.4-7",

View File

@ -73,7 +73,7 @@
500
</h1>
<div class="container">
<h3>Whoops, something went wrong on our end.</h3>
<h3>We're sorry. Something went wrong on our end.</h3>
<hr />
<!-- REQUEST_ID -->
<p>Try refreshing the page, or going back and attempting the action again.</p>

View File

@ -73,7 +73,7 @@
502
</h1>
<div class="container">
<h3>Whoops, GitLab is taking too much time to respond.</h3>
<h3>We're sorry. GitLab is taking too much time to respond.</h3>
<hr />
<p>Try refreshing the page, or going back and attempting the action again.</p>
<p>Please contact your GitLab administrator if this problem persists.</p>

View File

@ -73,7 +73,7 @@
503
</h1>
<div class="container">
<h3>Whoops, GitLab is currently unavailable.</h3>
<h3>We're sorry. GitLab is currently unavailable.</h3>
<hr />
<p>Try refreshing the page, or going back and attempting the action again.</p>
<p>Please contact your GitLab administrator if this problem persists.</p>

View File

@ -15,7 +15,7 @@ module QA
let(:source_issue_comments) do
source_issue.comments.map do |note|
{ **note.except(:id, :noteable_id), author: note[:author].except(:web_url) }
{ **note.except(:id, :noteable_id, :project_id), author: note[:author].except(:web_url) }
end
end
@ -32,7 +32,7 @@ module QA
let(:imported_issue_comments) do
imported_issue.comments.map do |note|
{ **note.except(:id, :noteable_id), author: note[:author].except(:web_url) }
{ **note.except(:id, :noteable_id, :project_id), author: note[:author].except(:web_url) }
end
end

View File

@ -6,7 +6,7 @@ module QA
include_context 'with gitlab project migration'
# this spec is used as a sanity test for gitlab migration because it can run outside of orchestrated setup
context 'with import within same instance', orchestrated: false, import: false do
context 'with import within same instance', :reliable, orchestrated: false, import: false do
let!(:source_project_with_readme) { true }
let!(:source_gitlab_address) { Runtime::Scenario.gitlab_address }
let!(:source_admin_api_client) { admin_api_client }

View File

@ -31,7 +31,7 @@ describe('initCopyToClipboard', () => {
const defaultButtonAttributes = {
'data-clipboard-text': 'foo bar',
title,
'data-title': title,
'data-original-title': title,
};
const createButton = (attributes = {}) => {
const combinedAttributes = { ...defaultButtonAttributes, ...attributes };

View File

@ -0,0 +1,108 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlButton } from '@gitlab/ui';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import setWindowLocation from 'helpers/set_window_location_helper';
import { s__ } from '~/locale';
import runnerForRegistrationQuery from '~/ci/runner/graphql/register/runner_for_registration.query.graphql';
import { PARAM_KEY_PLATFORM, DEFAULT_PLATFORM, WINDOWS_PLATFORM } from '~/ci/runner/constants';
import AdminRegisterRunnerApp from '~/ci/runner/admin_register_runner/admin_register_runner_app.vue';
import RegistrationInstructions from '~/ci/runner/components/registration/registration_instructions.vue';
import { runnerForRegistration } from '../mock_data';
const mockRunner = runnerForRegistration.data.runner;
const mockRunnerId = `${getIdFromGraphQLId(mockRunner.id)}`;
const mockRunnersPath = '/admin/runners';
const MOCK_TOKEN = 'MOCK_TOKEN';
Vue.use(VueApollo);
describe('AdminRegisterRunnerApp', () => {
let wrapper;
let mockRunnerQuery;
const findRegistrationInstructions = () => wrapper.findComponent(RegistrationInstructions);
const findBtn = () => wrapper.findComponent(GlButton);
const createComponent = () => {
wrapper = shallowMountExtended(AdminRegisterRunnerApp, {
apolloProvider: createMockApollo([[runnerForRegistrationQuery, mockRunnerQuery]]),
propsData: {
runnerId: mockRunnerId,
runnersPath: mockRunnersPath,
},
});
};
beforeEach(() => {
mockRunnerQuery = jest.fn().mockResolvedValue({
data: {
runner: { ...mockRunner, ephemeralAuthenticationToken: MOCK_TOKEN },
},
});
});
describe('When showing runner details', () => {
beforeEach(async () => {
createComponent();
await waitForPromises();
});
it('loads runner', () => {
expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunner.id });
});
it('shows heading', () => {
expect(wrapper.find('h1').text()).toContain(mockRunner.description);
});
it('shows registration instructions', () => {
expect(findRegistrationInstructions().props()).toEqual({
loading: false,
platform: DEFAULT_PLATFORM,
token: MOCK_TOKEN,
});
});
it('shows runner list button', () => {
expect(findBtn().attributes('href')).toEqual(mockRunnersPath);
expect(findBtn().props('variant')).toEqual('confirm');
});
});
describe('When another platform has been selected', () => {
beforeEach(async () => {
setWindowLocation(`?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`);
createComponent();
await waitForPromises();
});
it('shows registration instructions for the platform', () => {
expect(findRegistrationInstructions().props('platform')).toEqual(WINDOWS_PLATFORM);
});
});
describe('When runner is loading', () => {
beforeEach(async () => {
createComponent();
});
it('shows heading', () => {
expect(wrapper.find('h1').text()).toBe(s__('Runners|Register runner'));
});
it('shows registration instructions', () => {
expect(findRegistrationInstructions().props()).toEqual({
loading: true,
token: null,
platform: DEFAULT_PLATFORM,
});
});
});
});

View File

@ -0,0 +1,61 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`registration utils for "linux" platform commandPrompt matches snapshot 1`] = `"$"`;
exports[`registration utils for "linux" platform commandPrompt matches snapshot 2`] = `"$"`;
exports[`registration utils for "linux" platform registerCommand matches snapshot 1`] = `
Array [
"gitlab-runner register",
" --url http://test.host",
" --registration-token REGISTRATION_TOKEN",
]
`;
exports[`registration utils for "linux" platform registerCommand matches snapshot 2`] = `
Array [
"gitlab-runner register",
" --url http://test.host",
" --registration-token REGISTRATION_TOKEN",
]
`;
exports[`registration utils for "linux" platform runCommand matches snapshot 1`] = `"gitlab-runner run"`;
exports[`registration utils for "linux" platform runCommand matches snapshot 2`] = `"gitlab-runner run"`;
exports[`registration utils for "null" platform commandPrompt matches snapshot 1`] = `"$"`;
exports[`registration utils for "null" platform registerCommand matches snapshot 1`] = `
Array [
"gitlab-runner register",
" --url http://test.host",
" --registration-token REGISTRATION_TOKEN",
]
`;
exports[`registration utils for "null" platform runCommand matches snapshot 1`] = `"gitlab-runner run"`;
exports[`registration utils for "osx" platform commandPrompt matches snapshot 1`] = `"$"`;
exports[`registration utils for "osx" platform registerCommand matches snapshot 1`] = `
Array [
"gitlab-runner register",
" --url http://test.host",
" --registration-token REGISTRATION_TOKEN",
]
`;
exports[`registration utils for "osx" platform runCommand matches snapshot 1`] = `"gitlab-runner run"`;
exports[`registration utils for "windows" platform commandPrompt matches snapshot 1`] = `">"`;
exports[`registration utils for "windows" platform registerCommand matches snapshot 1`] = `
Array [
".\\\\gitlab-runner.exe register",
" --url http://test.host",
" --registration-token REGISTRATION_TOKEN",
]
`;
exports[`registration utils for "windows" platform runCommand matches snapshot 1`] = `".\\\\gitlab-runner.exe run"`;

View File

@ -0,0 +1,39 @@
import CliCommand from '~/ci/runner/components/registration/cli_command.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
describe('CliCommand', () => {
let wrapper;
// use .textContent instead of .text() to capture whitespace that's visible in <pre>
const getPreTextContent = () => wrapper.find('pre').element.textContent;
const getClipboardText = () => wrapper.findComponent(ClipboardButton).props('text');
const createComponent = (props) => {
wrapper = shallowMountExtended(CliCommand, {
propsData: {
...props,
},
});
};
it('when rendering a command', () => {
createComponent({
prompt: '#',
command: 'echo hi',
});
expect(getPreTextContent()).toBe('# echo hi');
expect(getClipboardText()).toBe('echo hi');
});
it('when rendering a multi-line command', () => {
createComponent({
prompt: '#',
command: ['git', ' --version'],
});
expect(getPreTextContent()).toBe('# git --version');
expect(getClipboardText()).toBe('git --version');
});
});

View File

@ -0,0 +1,94 @@
import { GlSprintf, GlLink, GlSkeletonLoader } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { TEST_HOST } from 'helpers/test_constants';
import RegistrationInstructions from '~/ci/runner/components/registration/registration_instructions.vue';
import CliCommand from '~/ci/runner/components/registration/cli_command.vue';
import { DEFAULT_PLATFORM, INSTALL_HELP_URL, EXECUTORS_HELP_URL } from '~/ci/runner/constants';
const REGISTRATION_TOKEN = 'REGISTRATION_TOKEN';
const DUMMY_GON = {
gitlab_url: TEST_HOST,
};
describe('RegistrationInstructions', () => {
let wrapper;
let originalGon;
const findStepAt = (i) => wrapper.findAll('section').at(i);
const findLink = (href, container = wrapper) =>
container.findAllComponents(GlLink).filter((w) => w.attributes('href') === href);
const createComponent = (props) => {
wrapper = shallowMountExtended(RegistrationInstructions, {
propsData: {
platform: DEFAULT_PLATFORM,
token: REGISTRATION_TOKEN,
...props,
},
stubs: {
GlSprintf,
},
});
};
beforeAll(() => {
originalGon = window.gon;
window.gon = { ...DUMMY_GON };
});
afterAll(() => {
window.gon = originalGon;
});
beforeEach(() => {
createComponent();
});
it('renders legacy instructions', () => {
expect(findLink(INSTALL_HELP_URL).exists()).toBe(true);
});
it('renders step 1', () => {
const step1 = findStepAt(0);
expect(step1.findComponent(CliCommand).props()).toEqual({
command: [
'gitlab-runner register',
` --url ${TEST_HOST}`,
` --registration-token ${REGISTRATION_TOKEN}`,
],
prompt: '$',
});
expect(step1.find('code').text()).toBe(REGISTRATION_TOKEN);
expect(step1.findComponent(ClipboardButton).props('text')).toBe(REGISTRATION_TOKEN);
});
it('renders step 1 in loading state', () => {
createComponent({
loading: true,
});
const step1 = findStepAt(0);
expect(step1.findComponent(GlSkeletonLoader).exists()).toBe(true);
expect(step1.find('code').exists()).toBe(false);
expect(step1.findComponent(ClipboardButton).exists()).toBe(false);
});
it('renders step 2', () => {
const step2 = findStepAt(1);
expect(findLink(EXECUTORS_HELP_URL, step2).exists()).toBe(true);
});
it('renders step 3', () => {
const step3 = findStepAt(2);
expect(step3.findComponent(CliCommand).props()).toEqual({
command: 'gitlab-runner run',
prompt: '$',
});
});
});

View File

@ -0,0 +1,54 @@
import { TEST_HOST } from 'helpers/test_constants';
import {
DEFAULT_PLATFORM,
LINUX_PLATFORM,
MACOS_PLATFORM,
WINDOWS_PLATFORM,
} from '~/ci/runner/constants';
import {
commandPrompt,
registerCommand,
runCommand,
} from '~/ci/runner/components/registration/utils';
const REGISTRATION_TOKEN = 'REGISTRATION_TOKEN';
const DUMMY_GON = {
gitlab_url: TEST_HOST,
};
describe('registration utils', () => {
let originalGon;
beforeAll(() => {
originalGon = window.gon;
window.gon = { ...DUMMY_GON };
});
afterAll(() => {
window.gon = originalGon;
});
describe.each([DEFAULT_PLATFORM, LINUX_PLATFORM, MACOS_PLATFORM, WINDOWS_PLATFORM, null])(
'for "%s" platform',
(platform) => {
describe('commandPrompt', () => {
it('matches snapshot', () => {
expect(commandPrompt({ platform })).toMatchSnapshot();
});
});
describe('registerCommand', () => {
it('matches snapshot', () => {
expect(
registerCommand({ platform, registrationToken: REGISTRATION_TOKEN }),
).toMatchSnapshot();
});
});
describe('runCommand', () => {
it('matches snapshot', () => {
expect(runCommand({ platform })).toMatchSnapshot();
});
});
},
);
});

View File

@ -1,5 +1,8 @@
// Fixtures generated by: spec/frontend/fixtures/runner.rb
// Register runner queries
import runnerForRegistration from 'test_fixtures/graphql/ci/runner/register/runner_for_registration.query.graphql.json';
// Show runner queries
import runnerData from 'test_fixtures/graphql/ci/runner/show/runner.query.graphql.json';
import runnerWithGroupData from 'test_fixtures/graphql/ci/runner/show/runner.query.graphql.with_group.json';
@ -325,4 +328,5 @@ export {
runnerJobsData,
runnerFormData,
runnerCreateResult,
runnerForRegistration,
};

View File

@ -146,6 +146,22 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
describe 'runner_for_registration.query.graphql', :freeze_time, type: :request do
runner_for_registration_query = 'register/runner_for_registration.query.graphql'
let_it_be(:query) do
get_graphql_query_as_string("#{query_path}#{runner_for_registration_query}")
end
it "#{fixtures_path}#{runner_for_registration_query}.json" do
post_graphql(query, current_user: admin, variables: {
id: runner.to_global_id.to_s
})
expect_graphql_errors_to_be_empty
end
end
describe 'runner_create.mutation.graphql', type: :request do
runner_create_mutation = 'new/runner_create.mutation.graphql'

View File

@ -1,16 +1,20 @@
import { GlAvatar } from '@gitlab/ui';
import { GlAvatar, GlDisclosureDropdown } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import UserMenu from '~/super_sidebar/components/user_menu.vue';
import UserNameGroup from '~/super_sidebar/components/user_name_group.vue';
import NewNavToggle from '~/nav/components/new_nav_toggle.vue';
import invalidUrl from '~/lib/utils/invalid_url';
import { userMenuMockData, userMenuMockStatus } from '../mock_data';
import { mockTracking } from 'helpers/tracking_helper';
import PersistentUserCallout from '~/persistent_user_callout';
import { userMenuMockData, userMenuMockStatus, userMenuMockPipelineMinutes } from '../mock_data';
describe('UserMenu component', () => {
let wrapper;
let trackingSpy;
const GlEmoji = { template: '<img/>' };
const toggleNewNavEndpoint = invalidUrl;
const showDropdown = () => wrapper.findComponent(GlDisclosureDropdown).vm.$emit('shown');
const createWrapper = (userDataChanges = {}) => {
wrapper = mountExtended(UserMenu, {
@ -28,6 +32,8 @@ describe('UserMenu component', () => {
toggleNewNavEndpoint,
},
});
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
};
describe('Toggle button', () => {
@ -137,22 +143,151 @@ describe('UserMenu component', () => {
let item;
const setItem = ({ has_start_trial } = {}) => {
createWrapper({ status: { ...userMenuMockStatus, has_start_trial } });
createWrapper({ trial: { has_start_trial } });
item = wrapper.findByTestId('start-trial-item');
};
describe('When Ultimate trial is not suggested for the user', () => {
it('does not render the start triel menu item', () => {
it('does not render the start trial menu item', () => {
setItem();
expect(item.exists()).toBe(false);
});
});
describe('When Ultimate trial can be suggested for the user', () => {
it('does not render the status menu item', () => {
it('does render the start trial menu item', () => {
setItem({ has_start_trial: true });
expect(item.exists()).toBe(true);
});
});
});
describe('Buy Pipeline Minutes item', () => {
let item;
const setItem = ({
show_buy_pipeline_minutes,
show_with_subtext,
show_notification_dot,
} = {}) => {
createWrapper({
pipeline_minutes: {
...userMenuMockPipelineMinutes,
show_buy_pipeline_minutes,
show_with_subtext,
show_notification_dot,
},
});
item = wrapper.findByTestId('buy-pipeline-minutes-item');
};
describe('When does NOT meet the condition to buy CI minutes', () => {
beforeEach(() => {
setItem();
});
it('does NOT render the buy pipeline minutes item', () => {
expect(item.exists()).toBe(false);
});
it('does not track the Sentry event', () => {
showDropdown();
expect(trackingSpy).not.toHaveBeenCalled();
});
});
describe('When does meet the condition to buy CI minutes', () => {
it('does render the menu item', () => {
setItem({ show_buy_pipeline_minutes: true });
expect(item.exists()).toBe(true);
});
it('tracks the Sentry event', () => {
setItem({ show_buy_pipeline_minutes: true });
showDropdown();
expect(trackingSpy).toHaveBeenCalledWith(
undefined,
userMenuMockPipelineMinutes.tracking_attrs['track-action'],
{
label: userMenuMockPipelineMinutes.tracking_attrs['track-label'],
property: userMenuMockPipelineMinutes.tracking_attrs['track-property'],
},
);
});
describe('Callout & notification dot', () => {
let spyFactory;
beforeEach(() => {
spyFactory = jest.spyOn(PersistentUserCallout, 'factory');
});
describe('When `show_notification_dot` is `false`', () => {
beforeEach(() => {
setItem({ show_buy_pipeline_minutes: true, show_notification_dot: false });
showDropdown();
});
it('does not set callout attributes', () => {
expect(item.attributes()).not.toEqual(
expect.objectContaining({
'data-feature-id': userMenuMockPipelineMinutes.callout_attrs.feature_id,
'data-dismiss-endpoint': userMenuMockPipelineMinutes.callout_attrs.dismiss_endpoint,
}),
);
});
it('does not initialize the Persistent Callout', () => {
expect(spyFactory).not.toHaveBeenCalled();
});
it('does not render notification dot', () => {
expect(wrapper.findByTestId('buy-pipeline-minutes-notification-dot').exists()).toBe(
false,
);
});
});
describe('When `show_notification_dot` is `true`', () => {
beforeEach(() => {
setItem({ show_buy_pipeline_minutes: true, show_notification_dot: true });
showDropdown();
});
it('sets the callout data attributes', () => {
expect(item.attributes()).toEqual(
expect.objectContaining({
'data-feature-id': userMenuMockPipelineMinutes.callout_attrs.feature_id,
'data-dismiss-endpoint': userMenuMockPipelineMinutes.callout_attrs.dismiss_endpoint,
}),
);
});
it('initializes the Persistent Callout', () => {
expect(spyFactory).toHaveBeenCalled();
});
it('renders notification dot', () => {
expect(wrapper.findByTestId('buy-pipeline-minutes-notification-dot').exists()).toBe(
true,
);
});
});
});
describe('Warning message', () => {
it('does not display the warning message when `show_with_subtext` is `false`', () => {
setItem({ show_buy_pipeline_minutes: true });
expect(item.text()).not.toContain(UserMenu.i18n.oneOfGroupsRunningOutOfPipelineMinutes);
});
it('displays the text and warning message when `show_with_subtext` is true', () => {
setItem({ show_buy_pipeline_minutes: true, show_with_subtext: true });
expect(item.text()).toContain(UserMenu.i18n.oneOfGroupsRunningOutOfPipelineMinutes);
});
});
});
});
@ -160,7 +295,7 @@ describe('UserMenu component', () => {
it('should render a link to the profile page', () => {
createWrapper();
const item = wrapper.findByTestId('edit-profile-item');
expect(item.text()).toBe(UserMenu.i18n.user.editProfile);
expect(item.text()).toBe(UserMenu.i18n.editProfile);
expect(item.find('a').attributes('href')).toBe(userMenuMockData.settings.profile_path);
});
});
@ -169,7 +304,7 @@ describe('UserMenu component', () => {
it('should render a link to the profile page', () => {
createWrapper();
const item = wrapper.findByTestId('preferences-item');
expect(item.text()).toBe(UserMenu.i18n.user.preferences);
expect(item.text()).toBe(UserMenu.i18n.preferences);
expect(item.find('a').attributes('href')).toBe(
userMenuMockData.settings.profile_preferences_path,
);
@ -181,7 +316,7 @@ describe('UserMenu component', () => {
it('should render a link to switch to GitLab Next', () => {
createWrapper({ gitlab_com_but_not_canary: true });
const item = wrapper.findByTestId('gitlab-next-item');
expect(item.text()).toBe(UserMenu.i18n.user.gitlabNext);
expect(item.text()).toBe(UserMenu.i18n.gitlabNext);
expect(item.find('a').attributes('href')).toBe(userMenuMockData.canary_toggle_com_url);
});
});

View File

@ -93,6 +93,21 @@ export const userMenuMockStatus = {
clear_after: '2023-02-09 20:06:35 UTC',
};
export const userMenuMockPipelineMinutes = {
show_buy_pipeline_minutes: false,
show_notification_dot: false,
callout_attrs: {
feature_id: 'pipeline_minutes',
dismiss_endpoint: '/-/dismiss',
},
buy_pipeline_minutes_path: '/buy/pipeline_minutes',
tracking_attrs: {
'track-action': 'trackAction',
'track-label': 'label',
'track-property': 'property',
},
};
export const userMenuMockData = {
name: 'Orange Fox',
username: 'thefox',
@ -107,6 +122,7 @@ export const userMenuMockData = {
profile_path: invalidUrl,
profile_preferences_path: invalidUrl,
},
pipeline_minutes: userMenuMockPipelineMinutes,
can_sign_out: false,
sign_out_link: invalidUrl,
gitlab_com_but_not_canary: true,

View File

@ -60,103 +60,125 @@ function createComponent(options = {}) {
describe('LabelToken', () => {
let mock;
let wrapper;
const defaultLabels = OPTIONS_NONE_ANY;
beforeEach(() => {
mock = new MockAdapter(axios);
});
const findBaseToken = () => wrapper.findComponent(BaseToken);
const findSuggestions = () => wrapper.findAllComponents(GlFilteredSearchSuggestion);
const findTokenSegments = () => wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const triggerFetchLabels = (searchTerm = null) => {
findBaseToken().vm.$emit('fetch-suggestions', searchTerm);
return waitForPromises();
};
afterEach(() => {
mock.restore();
wrapper.destroy();
});
describe('methods', () => {
beforeEach(() => {
wrapper = createComponent();
});
describe('getActiveLabel', () => {
it('returns label object from labels array based on provided `currentValue` param', () => {
expect(wrapper.vm.getActiveLabel(mockLabels, 'Foo Label')).toEqual(mockRegularLabel);
wrapper = createComponent();
expect(findBaseToken().props('getActiveTokenValue')(mockLabels, 'Foo Label')).toEqual(
mockRegularLabel,
);
});
});
describe('getLabelName', () => {
it('returns value of `name` or `title` property present in provided label param', () => {
let mockLabel = {
title: 'foo',
};
it('returns value of `name` or `title` property present in provided label param', async () => {
const customMockLabels = [
{ title: 'Title with no name label' },
{ name: 'Name Label', title: 'Title with name label' },
];
expect(wrapper.vm.getLabelName(mockLabel)).toBe(mockLabel.title);
wrapper = createComponent({
active: true,
config: {
...mockLabelToken,
fetchLabels: jest.fn().mockResolvedValue({ data: customMockLabels }),
},
stubs: { Portal: true },
});
mockLabel = {
name: 'foo',
};
await waitForPromises();
expect(wrapper.vm.getLabelName(mockLabel)).toBe(mockLabel.name);
const suggestions = findSuggestions();
const indexWithTitle = defaultLabels.length;
const indexWithName = defaultLabels.length + 1;
expect(suggestions.at(indexWithTitle).text()).toBe(customMockLabels[0].title);
expect(suggestions.at(indexWithName).text()).toBe(customMockLabels[1].name);
});
});
describe('fetchLabels', () => {
it('calls `config.fetchLabels` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels');
describe('when request is successful', () => {
const searchTerm = 'foo';
wrapper.vm.fetchLabels('foo');
beforeEach(async () => {
wrapper = createComponent({
config: {
fetchLabels: jest.fn().mockResolvedValue({ data: mockLabels }),
},
});
await triggerFetchLabels(searchTerm);
});
expect(wrapper.vm.config.fetchLabels).toHaveBeenCalledWith('foo');
});
it('calls `config.fetchLabels` with provided searchTerm param', () => {
expect(findBaseToken().props('config').fetchLabels).toHaveBeenCalledWith(searchTerm);
});
it('sets response to `labels` when request is succesful', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockResolvedValue(mockLabels);
it('sets response to `labels`', () => {
expect(findBaseToken().props('suggestions')).toEqual(mockLabels);
});
wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.labels).toEqual(mockLabels);
it('sets `loading` to false when request completes', () => {
expect(findBaseToken().props('suggestionsLoading')).toBe(false);
});
});
it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
describe('when request fails', () => {
beforeEach(async () => {
wrapper = createComponent({
config: {
fetchLabels: jest.fn().mockRejectedValue({}),
},
});
await triggerFetchLabels();
});
wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
it('calls `createAlert` with flash error message', () => {
expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching labels.',
});
});
});
it('sets `loading` to false when request completes', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
expect(wrapper.vm.loading).toBe(false);
it('sets `loading` to false when request completes', () => {
expect(findBaseToken().props('suggestionsLoading')).toBe(false);
});
});
});
});
describe('template', () => {
const defaultLabels = OPTIONS_NONE_ANY;
beforeEach(async () => {
wrapper = createComponent({ value: { data: `"${mockRegularLabel.title}"` } });
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
labels: mockLabels,
wrapper = createComponent({
value: { data: `"${mockRegularLabel.title}"` },
config: {
initialLabels: mockLabels,
},
});
await nextTick();
});
it('renders base-token component', () => {
const baseTokenEl = wrapper.findComponent(BaseToken);
const baseTokenEl = findBaseToken();
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
@ -166,7 +188,7 @@ describe('LabelToken', () => {
});
it('renders token item when value is selected', () => {
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const tokenSegments = findTokenSegments();
expect(tokenSegments).toHaveLength(3); // Label, =, "Foo Label"
expect(tokenSegments.at(2).text()).toBe(`~${mockRegularLabel.title}`); // "Foo Label"
@ -181,12 +203,12 @@ describe('LabelToken', () => {
config: { ...mockLabelToken, defaultLabels },
stubs: { Portal: true },
});
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const tokenSegments = findTokenSegments();
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
const suggestions = findSuggestions();
expect(suggestions).toHaveLength(defaultLabels.length);
defaultLabels.forEach((label, index) => {
@ -200,7 +222,7 @@ describe('LabelToken', () => {
config: { ...mockLabelToken, defaultLabels: [] },
stubs: { Portal: true },
});
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const tokenSegments = findTokenSegments();
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
@ -215,11 +237,10 @@ describe('LabelToken', () => {
config: { ...mockLabelToken },
stubs: { Portal: true },
});
const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const tokenSegments = findTokenSegments();
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
const suggestions = findSuggestions();
expect(suggestions).toHaveLength(OPTIONS_NONE_ANY.length);
OPTIONS_NONE_ANY.forEach((label, index) => {
@ -234,7 +255,7 @@ describe('LabelToken', () => {
input: mockInput,
},
});
wrapper.findComponent(BaseToken).vm.$emit('input', [{ data: 'mockData', operator: '=' }]);
findBaseToken().vm.$emit('input', [{ data: 'mockData', operator: '=' }]);
expect(mockInput).toHaveBeenLastCalledWith([{ data: 'mockData', operator: '=' }]);
});

View File

@ -5,7 +5,7 @@ exports[`Upload dropzone component correctly overrides description and drop mess
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -86,7 +86,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -171,7 +171,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -256,7 +256,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -342,7 +342,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -428,7 +428,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div
@ -514,7 +514,7 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
class="gl-w-full gl-relative"
>
<button
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4 gl-mb-0"
class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-px-5 gl-py-4 gl-mb-0"
type="button"
>
<div

View File

@ -9,6 +9,8 @@ RSpec.describe GitlabSchema.types['ForkDetails'], feature_category: :source_code
fields = %i[
ahead
behind
isSyncing
hasConflicts
]
expect(described_class).to have_graphql_fields(*fields)

View File

@ -62,9 +62,9 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
end
describe '#super_sidebar_context' do
let(:user) { build(:user) }
let(:group) { build(:group) }
let(:panel) { {} }
let_it_be(:user) { build(:user) }
let_it_be(:group) { build(:group) }
let_it_be(:panel) { {} }
subject do
helper.super_sidebar_context(user, group: group, project: nil, panel: panel)
@ -75,11 +75,11 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
allow(helper).to receive(:can?).and_return(true)
allow(panel).to receive(:super_sidebar_menu_items).and_return(nil)
allow(panel).to receive(:super_sidebar_context_header).and_return(nil)
Rails.cache.write(['users', user.id, 'assigned_open_issues_count'], 1)
Rails.cache.write(['users', user.id, 'assigned_open_merge_requests_count'], 4)
Rails.cache.write(['users', user.id, 'review_requested_open_merge_requests_count'], 0)
Rails.cache.write(['users', user.id, 'todos_pending_count'], 3)
Rails.cache.write(['users', user.id, 'total_merge_requests_count'], 4)
allow(user).to receive(:assigned_open_issues_count).and_return(1)
allow(user).to receive(:assigned_open_merge_requests_count).and_return(4)
allow(user).to receive(:review_requested_open_merge_requests_count).and_return(0)
allow(user).to receive(:todos_pending_count).and_return(3)
allow(user).to receive(:total_merge_requests_count).and_return(4)
end
it 'returns sidebar values from user', :use_clean_rails_memory_store_caching do

View File

@ -165,6 +165,26 @@ RSpec.describe Users::CalloutsHelper do
end
end
describe '.show_pages_menu_callout?' do
subject { helper.show_pages_menu_callout? }
before do
allow(helper).to receive(:user_dismissed?).with(described_class::PAGES_MOVED_CALLOUT) { dismissed }
end
context 'when user has not dismissed' do
let(:dismissed) { false }
it { is_expected.to be true }
end
context 'when user dismissed' do
let(:dismissed) { true }
it { is_expected.to be false }
end
end
describe '#web_hook_disabled_dismissed?' do
context 'without a project' do
it 'is false' do

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Components::Header, feature_category: :pipeline_composition do
subject { described_class.new(spec) }
context 'when spec is valid' do
let(:spec) do
{
spec: {
inputs: {
website: nil,
run: {
options: %w[opt1 opt2]
}
}
}
}
end
it 'fabricates a spec from valid data' do
expect(subject).not_to be_empty
end
describe '#inputs' do
it 'fabricates input data' do
input = subject.inputs({ website: 'https//gitlab.com', run: 'opt1' })
expect(input.count).to eq 2
end
end
describe '#context' do
it 'fabricates interpolation context' do
ctx = subject.context({ website: 'https//gitlab.com', run: 'opt1' })
expect(ctx).to be_valid
end
end
end
context 'when spec is empty' do
let(:spec) { { spec: {} } }
it 'returns an empty header' do
expect(subject).to be_empty
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Arguments::Base, feature_category: :pipeline_composition do
subject do
Class.new(described_class) do
def validate!; end
def to_value; end
end
end
it 'fabricates an invalid input argument if unknown value is provided' do
argument = subject.new(:something, { spec: 123 }, [:a, :b])
expect(argument).not_to be_valid
expect(argument.errors.first).to eq 'unsupported value in input argument `something`'
end
end

View File

@ -0,0 +1,45 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Arguments::Default, feature_category: :pipeline_composition do
it 'returns a user-provided value if it is present' do
argument = described_class.new(:website, { default: 'https://gitlab.com' }, 'https://example.gitlab.com')
expect(argument).to be_valid
expect(argument.to_value).to eq 'https://example.gitlab.com'
expect(argument.to_hash).to eq({ website: 'https://example.gitlab.com' })
end
it 'returns an empty value if user-provider input is empty' do
argument = described_class.new(:website, { default: 'https://gitlab.com' }, '')
expect(argument).to be_valid
expect(argument.to_value).to eq ''
expect(argument.to_hash).to eq({ website: '' })
end
it 'returns a default value if user-provider one is unknown' do
argument = described_class.new(:website, { default: 'https://gitlab.com' }, nil)
expect(argument).to be_valid
expect(argument.to_value).to eq 'https://gitlab.com'
expect(argument.to_hash).to eq({ website: 'https://gitlab.com' })
end
it 'returns an error if the argument has not been fabricated correctly' do
argument = described_class.new(:website, { required: 'https://gitlab.com' }, 'https://example.gitlab.com')
expect(argument).not_to be_valid
end
describe '.matches?' do
it 'matches specs with default configuration' do
expect(described_class.matches?({ default: 'abc' })).to be true
end
it 'does not match specs different configuration keyword' do
expect(described_class.matches?({ options: %w[a b] })).to be false
end
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Arguments::Options, feature_category: :pipeline_composition do
it 'returns a user-provided value if it is an allowed one' do
argument = described_class.new(:run, { options: %w[opt1 opt2] }, 'opt1')
expect(argument).to be_valid
expect(argument.to_value).to eq 'opt1'
expect(argument.to_hash).to eq({ run: 'opt1' })
end
it 'returns an error if user-provided value is not allowlisted' do
argument = described_class.new(:run, { options: %w[opt1 opt2] }, 'opt3')
expect(argument).not_to be_valid
expect(argument.errors.first).to eq '`run` input: argument value opt3 not allowlisted'
end
it 'returns an error if specification is not correct' do
argument = described_class.new(:website, { options: nil }, 'opt1')
expect(argument).not_to be_valid
expect(argument.errors.first).to eq '`website` input: argument specification invalid'
end
it 'returns an error if specification is using a hash' do
argument = described_class.new(:website, { options: { a: 1 } }, 'opt1')
expect(argument).not_to be_valid
expect(argument.errors.first).to eq '`website` input: argument value opt1 not allowlisted'
end
it 'returns an empty value if it is allowlisted' do
argument = described_class.new(:run, { options: ['opt1', ''] }, '')
expect(argument).to be_valid
expect(argument.to_value).to be_empty
expect(argument.to_hash).to eq({ run: '' })
end
describe '.matches?' do
it 'matches specs with options configuration' do
expect(described_class.matches?({ options: %w[a b] })).to be true
end
it 'does not match specs different configuration keyword' do
expect(described_class.matches?({ default: 'abc' })).to be false
end
end
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Arguments::Required, feature_category: :pipeline_composition do
it 'returns a user-provided value if it is present' do
argument = described_class.new(:website, nil, 'https://example.gitlab.com')
expect(argument).to be_valid
expect(argument.to_value).to eq 'https://example.gitlab.com'
expect(argument.to_hash).to eq({ website: 'https://example.gitlab.com' })
end
it 'returns an empty value if user-provider value is empty' do
argument = described_class.new(:website, nil, '')
expect(argument).to be_valid
expect(argument.to_hash).to eq(website: '')
end
it 'returns an error if user-provided value is unspecified' do
argument = described_class.new(:website, nil, nil)
expect(argument).not_to be_valid
expect(argument.errors.first).to eq '`website` input: required value has not been provided'
end
describe '.matches?' do
it 'matches specs without configuration' do
expect(described_class.matches?(nil)).to be true
end
it 'matches specs with empty configuration' do
expect(described_class.matches?('')).to be true
end
it 'does not match specs with configuration' do
expect(described_class.matches?({ options: %w[a b] })).to be false
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Arguments::Unknown, feature_category: :pipeline_composition do
it 'raises an error when someone tries to evaluate the value' do
argument = described_class.new(:website, nil, 'https://example.gitlab.com')
expect(argument).not_to be_valid
expect { argument.to_value }.to raise_error ArgumentError
end
describe '.matches?' do
it 'always matches' do
expect(described_class.matches?('abc')).to be true
end
end
end

View File

@ -0,0 +1,126 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Input::Inputs, feature_category: :pipeline_composition do
describe '#valid?' do
let(:spec) { { website: nil } }
it 'describes user-provided inputs' do
inputs = described_class.new(spec, { website: 'http://example.gitlab.com' })
expect(inputs).to be_valid
end
end
context 'when proper specification has been provided' do
let(:spec) do
{
website: nil,
env: { default: 'development' },
run: { options: %w[tests spec e2e] }
}
end
let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
it 'fabricates desired input arguments' do
inputs = described_class.new(spec, args)
expect(inputs).to be_valid
expect(inputs.count).to eq 3
expect(inputs.to_hash).to eq(args.merge(env: 'development'))
end
end
context 'when inputs and args are empty' do
it 'is a valid use-case' do
inputs = described_class.new({}, {})
expect(inputs).to be_valid
expect(inputs.to_hash).to be_empty
end
end
context 'when there are arguments recoincilation errors present' do
context 'when required argument is missing' do
let(:spec) { { website: nil } }
it 'returns an error' do
inputs = described_class.new(spec, {})
expect(inputs).not_to be_valid
expect(inputs.errors.first).to eq '`website` input: required value has not been provided'
end
end
context 'when argument is not present but configured as allowlist' do
let(:spec) do
{ run: { options: %w[opt1 opt2] } }
end
it 'returns an error' do
inputs = described_class.new(spec, {})
expect(inputs).not_to be_valid
expect(inputs.errors.first).to eq '`run` input: argument not provided'
end
end
end
context 'when unknown specification argument has been used' do
let(:spec) do
{
website: nil,
env: { default: 'development' },
run: { options: %w[tests spec e2e] },
test: { unknown: 'something' }
}
end
let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
it 'fabricates an unknown argument entry and returns an error' do
inputs = described_class.new(spec, args)
expect(inputs).not_to be_valid
expect(inputs.count).to eq 4
expect(inputs.errors.first).to eq '`test` input: unrecognized input argument specification: `unknown`'
end
end
context 'when unknown arguments are being passed by a user' do
let(:spec) do
{ env: { default: 'development' } }
end
let(:args) { { website: 'https://gitlab.com', run: 'tests' } }
it 'returns an error with a list of unknown arguments' do
inputs = described_class.new(spec, args)
expect(inputs).not_to be_valid
expect(inputs.errors.first).to eq 'unknown input arguments: [:website, :run]'
end
end
context 'when composite specification is being used' do
let(:spec) do
{
env: {
default: 'dev',
options: %w[test dev prod]
}
}
end
let(:args) { { env: 'dev' } }
it 'returns an error describing an unknown specification' do
inputs = described_class.new(spec, args)
expect(inputs).not_to be_valid
expect(inputs.errors.first).to eq '`env` input: unrecognized input argument definition'
end
end
end

View File

@ -64,18 +64,34 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
end
it 'counts exceptions' do
expect(instrumentation_class).to receive(:instance_count_exception)
.with(instance_of(Redis::CommandError)).and_call_original
expect(instrumentation_class).to receive(:log_exception)
.with(instance_of(Redis::CommandError)).and_call_original
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
context 'when encountering exceptions' do
where(:case_name, :exception, :exception_counter) do
'generic exception' | Redis::CommandError | :instance_count_exception
'moved redirection' | Redis::CommandError.new("MOVED 123 127.0.0.1:6380") | :instance_count_cluster_redirection
'ask redirection' | Redis::CommandError.new("ASK 123 127.0.0.1:6380") | :instance_count_cluster_redirection
end
expect do
Gitlab::Redis::SharedState.with do |redis|
redis.call(:auth, 'foo', 'bar')
with_them do
before do
Gitlab::Redis::SharedState.with do |redis|
# We need to go 1 layer deeper to stub _client as we monkey-patch Redis::Client
# with the interceptor. Stubbing `redis` will skip the instrumentation_class.
allow(redis._client).to receive(:process).and_raise(exception)
end
end
end.to raise_exception(Redis::CommandError)
it 'counts exception' do
expect(instrumentation_class).to receive(exception_counter)
.with(instance_of(Redis::CommandError)).and_call_original
expect(instrumentation_class).to receive(:log_exception)
.with(instance_of(Redis::CommandError)).and_call_original
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
expect do
Gitlab::Redis::SharedState.with { |redis| redis.call(:auth, 'foo', 'bar') }
end.to raise_exception(Redis::CommandError)
end
end
end
context 'in production environment' do

View File

@ -2,24 +2,29 @@
require 'spec_helper'
RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code_management do
RSpec.describe Projects::Forks::Details, feature_category: :source_code_management do
include ExclusiveLeaseHelpers
include ProjectForksHelper
let_it_be(:user) { create(:user) }
let_it_be(:source_repo) { create(:project, :repository, :public).repository }
let_it_be(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
let(:fork_branch) { 'fork-branch' }
let(:cache_key) { ['project_fork_details', fork_repo.project.id, fork_branch].join(':') }
describe '#counts', :use_clean_rails_redis_caching do
let(:source_repo) { create(:project, :repository, :public).repository }
let(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
let(:fork_branch) { 'fork-branch' }
let(:cache_key) { ['project_forks', fork_repo.project.id, fork_branch, 'divergence_counts'] }
def expect_cached_counts(value)
counts = described_class.new(fork_repo.project, fork_branch).counts
ahead, behind = value
expect(counts).to eq({ ahead: ahead, behind: behind })
cached_value = [source_repo.commit.sha, fork_repo.commit(fork_branch).sha, value]
cached_value = {
source_sha: source_repo.commit.sha,
sha: fork_repo.commit(fork_branch).sha,
counts: value
}
expect(Rails.cache.read(cache_key)).to eq(cached_value)
end
@ -72,6 +77,9 @@ RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code
end
context 'when counts calculated from a branch that exists upstream' do
let_it_be(:source_repo) { create(:project, :repository, :public).repository }
let_it_be(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
let(:fork_branch) { 'feature' }
it 'compares the fork branch to upstream default branch' do
@ -94,5 +102,61 @@ RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code
expect_cached_counts([2, 30])
end
end
context 'when specified branch does not exist' do
it 'returns nils as counts' do
counts = described_class.new(fork_repo.project, 'non-existent-branch').counts
expect(counts).to eq({ ahead: nil, behind: nil })
end
end
end
describe '#update!', :use_clean_rails_redis_caching do
it 'updates the cache with the specified value' do
value = { source_sha: source_repo.commit.sha, sha: fork_repo.commit.sha, counts: [0, 0], has_conflicts: true }
described_class.new(fork_repo.project, fork_branch).update!(value)
expect(Rails.cache.read(cache_key)).to eq(value)
end
end
describe '#has_conflicts', :use_clean_rails_redis_caching do
it 'returns whether merge for the stored commits failed due to conflicts' do
details = described_class.new(fork_repo.project, fork_branch)
expect do
value = { source_sha: source_repo.commit.sha, sha: fork_repo.commit.sha, counts: [0, 0], has_conflicts: true }
details.update!(value)
end.to change { details.has_conflicts? }.from(false).to(true)
end
end
describe '#exclusive_lease' do
it 'returns exclusive lease to the details' do
key = ['project_details', fork_repo.project.id, fork_branch].join(':')
uuid = SecureRandom.uuid
details = described_class.new(fork_repo.project, fork_branch)
expect(Gitlab::ExclusiveLease).to receive(:get_uuid).with(key).and_return(uuid)
expect(Gitlab::ExclusiveLease).to receive(:new).with(
key, uuid: uuid, timeout: described_class::LEASE_TIMEOUT
).and_call_original
expect(details.exclusive_lease).to be_a(Gitlab::ExclusiveLease)
end
end
describe 'syncing?', :use_clean_rails_redis_caching do
it 'returns whether there is a sync in progress' do
details = described_class.new(fork_repo.project, fork_branch)
expect(details.exclusive_lease.try_obtain).to be_present
expect(details.syncing?).to eq(true)
details.exclusive_lease.cancel
expect(details.syncing?).to eq(false)
end
end
end

View File

@ -0,0 +1,113 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe "Sync project fork", feature_category: :source_code_management do
include GraphqlHelpers
include ProjectForksHelper
include ExclusiveLeaseHelpers
let_it_be(:source_project) { create(:project, :repository, :public) }
let_it_be(:current_user) { create(:user, maintainer_projects: [source_project]) }
let_it_be(:project, refind: true) { fork_project(source_project, current_user, { repository: true }) }
let_it_be(:target_branch) { project.default_branch }
let(:mutation) do
params = { project_path: project.full_path, target_branch: target_branch }
graphql_mutation(:project_sync_fork, params) do
<<-QL.strip_heredoc
details {
ahead
behind
isSyncing
hasConflicts
}
errors
QL
end
end
before do
source_project.change_head('feature')
end
context 'when the user does not have permission' do
let_it_be(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns a top-level access error'
it 'does not call the sync service' do
expect(::Projects::Forks::SyncWorker).not_to receive(:perform_async)
post_graphql_mutation(mutation, current_user: current_user)
end
end
context 'when the user has permission' do
context 'and the sync service executes successfully', :sidekiq_inline do
it 'calls the sync service' do
expect(::Projects::Forks::SyncWorker).to receive(:perform_async).and_call_original
post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_mutation_response(:project_sync_fork)).to eq(
{
'details' => { 'ahead' => 30, 'behind' => 0, "hasConflicts" => false, "isSyncing" => false },
'errors' => []
})
end
end
context 'and the sync service fails to execute' do
let(:target_branch) { 'markdown' }
def expect_error_response(message)
expect(::Projects::Forks::SyncWorker).not_to receive(:perform_async)
post_graphql_mutation(mutation, current_user: current_user)
expect(graphql_mutation_response(:project_sync_fork)['errors']).to eq([message])
end
context 'when fork details cannot be resolved' do
let_it_be(:project) { source_project }
it 'returns an error' do
expect_error_response('This branch of this project cannot be updated from the upstream')
end
end
context 'when the previous execution resulted in a conflict' do
it 'returns an error' do
expect_next_instance_of(::Projects::Forks::Details) do |instance|
expect(instance).to receive(:has_conflicts?).twice.and_return(true)
end
expect_error_response('The synchronization cannot happen due to the merge conflict')
expect(graphql_mutation_response(:project_sync_fork)['details']['hasConflicts']).to eq(true)
end
end
context 'when the request is rate limited' do
it 'returns an error' do
expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
expect_error_response('This service has been called too many times.')
end
end
context 'when another fork sync is in progress' do
it 'returns an error' do
expect_next_instance_of(Projects::Forks::Details) do |instance|
lease = instance_double(Gitlab::ExclusiveLease, try_obtain: false, exists?: true)
expect(instance).to receive(:exclusive_lease).twice.and_return(lease)
end
expect_error_response('Another fork sync is already in progress')
expect(graphql_mutation_response(:project_sync_fork)['details']['isSyncing']).to eq(true)
end
end
end
end
end

View File

@ -10,12 +10,13 @@ RSpec.describe 'getting project fork details', feature_category: :source_code_ma
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
let_it_be(:forked_project) { fork_project(project, current_user, repository: true) }
let(:ref) { 'feature' }
let(:queried_project) { forked_project }
let(:query) do
graphql_query_for(:project,
{ full_path: queried_project.full_path }, <<~QUERY
forkDetails(ref: "feature"){
forkDetails(ref: "#{ref}"){
ahead
behind
}
@ -41,6 +42,38 @@ RSpec.describe 'getting project fork details', feature_category: :source_code_ma
end
end
context 'when project source is not visible' do
it 'does not return fork details' do
project.team.truncate
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['forkDetails']).to be_nil
end
end
context 'when the specified ref does not exist' do
let(:ref) { 'non-existent-branch' }
it 'does not return fork details' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['forkDetails']).to be_nil
end
end
context 'when fork_divergence_counts feature flag is disabled' do
before do
stub_feature_flags(fork_divergence_counts: false)
end
it 'does not return fork details' do
post_graphql(query, current_user: current_user)
expect(graphql_data['project']['forkDetails']).to be_nil
end
end
context 'when a user cannot read the code' do
let_it_be(:current_user) { create(:user) }

View File

@ -123,10 +123,6 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
let(:execute_create_service) { described_class.new(project, user, opts).execute }
before do
stub_feature_flags(notes_create_service_tracking: false)
end
it 'tracks commit comment usage data', :clean_gitlab_redis_shared_state do
expect(counter).to receive(:count).with(:create, 'Commit').and_call_original

View File

@ -0,0 +1,185 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Forks::SyncService, feature_category: :source_code_management do
include ProjectForksHelper
include RepoHelpers
let_it_be(:user) { create(:user) }
let_it_be(:source_project) { create(:project, :repository, :public) }
let_it_be(:project) { fork_project(source_project, user, { repository: true }) }
let(:fork_branch) { project.default_branch }
let(:service) { described_class.new(project, user, fork_branch) }
def details
Projects::Forks::Details.new(project, fork_branch)
end
def expect_to_cancel_exclusive_lease
expect(Gitlab::ExclusiveLease).to receive(:cancel)
end
describe '#execute' do
context 'when fork is up-to-date with the upstream' do
it 'does not perform merge' do
expect_to_cancel_exclusive_lease
expect(project.repository).not_to receive(:merge_to_branch)
expect(project.repository).not_to receive(:ff_merge)
expect(service.execute).to be_success
end
end
context 'when fork is behind the upstream' do
let_it_be(:base_commit) { source_project.commit.sha }
before_all do
source_project.repository.commit_files(
user,
branch_name: source_project.repository.root_ref, message: 'Commit to root ref',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'One more' }]
)
source_project.repository.commit_files(
user,
branch_name: source_project.repository.root_ref, message: 'Another commit to root ref',
actions: [{ action: :create, file_path: 'encoding/NEW-CHANGELOG', content: 'One more time' }]
)
end
before do
project.repository.create_branch(fork_branch, base_commit)
end
context 'when fork is not ahead of the upstream' do
let(:fork_branch) { 'fork-without-new-commits' }
it 'updates the fork using ff merge' do
expect_to_cancel_exclusive_lease
expect(project.commit(fork_branch).sha).to eq(base_commit)
expect(project.repository).to receive(:ff_merge)
.with(user, source_project.commit.sha, fork_branch, target_sha: base_commit)
.and_call_original
expect do
expect(service.execute).to be_success
end.to change { details.counts }.from({ ahead: 0, behind: 2 }).to({ ahead: 0, behind: 0 })
end
end
context 'when fork is ahead of the upstream' do
context 'and has conflicts with the upstream', :use_clean_rails_redis_caching do
let(:fork_branch) { 'fork-with-conflicts' }
it 'returns an error' do
project.repository.commit_files(
user,
branch_name: fork_branch, message: 'Committing something',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'New file' }]
)
expect_to_cancel_exclusive_lease
expect(details).not_to have_conflicts
expect do
result = service.execute
expect(result).to be_error
expect(result.message).to eq("9:merging commits: merge: there are conflicting files.")
end.not_to change { details.counts }
expect(details).to have_conflicts
end
end
context 'and does not have conflicts with the upstream' do
let(:fork_branch) { 'fork-with-new-commits' }
it 'updates the fork using merge' do
project.repository.commit_files(
user,
branch_name: fork_branch, message: 'Committing completely new changelog',
actions: [{ action: :create, file_path: 'encoding/COMPLETELY-NEW-CHANGELOG', content: 'New file' }]
)
commit_message = "Merge branch #{source_project.path}:#{source_project.default_branch} into #{fork_branch}"
expect(project.repository).to receive(:merge_to_branch).with(
user,
source_sha: source_project.commit.sha,
target_branch: fork_branch,
target_sha: project.commit(fork_branch).sha,
message: commit_message
).and_call_original
expect_to_cancel_exclusive_lease
expect do
expect(service.execute).to be_success
end.to change { details.counts }.from({ ahead: 1, behind: 2 }).to({ ahead: 2, behind: 0 })
commits = project.repository.commits_between(source_project.commit.sha, project.commit(fork_branch).sha)
expect(commits.map(&:message)).to eq([
"Committing completely new changelog",
commit_message
])
end
end
end
context 'when a merge cannot happen due to another ongoing merge' do
it 'does not merge' do
expect(service).to receive(:perform_merge).and_return(nil)
result = service.execute
expect(result).to be_error
expect(result.message).to eq(described_class::ONGOING_MERGE_ERROR)
end
end
context 'when upstream branch contains lfs reference' do
let(:source_project) { create(:project, :repository, :public) }
let(:project) { fork_project(source_project, user, { repository: true }) }
let(:fork_branch) { 'fork-fetches-lfs-pointers' }
before do
source_project.change_head('lfs')
allow(source_project).to receive(:lfs_enabled?).and_return(true)
allow(project).to receive(:lfs_enabled?).and_return(true)
create_file_in_repo(source_project, 'lfs', 'lfs', 'one.lfs', 'One')
create_file_in_repo(source_project, 'lfs', 'lfs', 'two.lfs', 'Two')
end
it 'links fetched lfs objects to the fork project', :aggregate_failures do
expect_to_cancel_exclusive_lease
expect do
expect(service.execute).to be_success
end.to change { project.reload.lfs_objects.size }.from(0).to(2)
.and change { details.counts }.from({ ahead: 0, behind: 3 }).to({ ahead: 0, behind: 0 })
expect(project.lfs_objects).to match_array(source_project.lfs_objects)
end
context 'and there are too many of them for a single sync' do
let(:fork_branch) { 'fork-too-many-lfs-pointers' }
it 'updates the fork successfully' do
expect_to_cancel_exclusive_lease
stub_const('Projects::LfsPointers::LfsLinkService::MAX_OIDS', 1)
expect do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Too many LFS object ids to link, please push them manually')
end.not_to change { details.counts }
end
end
end
end
end
end

View File

@ -1,9 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsLinkService do
let!(:project) { create(:project, lfs_enabled: true) }
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
RSpec.describe Projects::LfsPointers::LfsLinkService, feature_category: :source_code_management do
let_it_be(:project) { create(:project, lfs_enabled: true) }
let_it_be(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:all_oids) { LfsObject.pluck(:oid, :size).to_h.merge(new_oids) }
let(:new_lfs_object) { create(:lfs_object) }
@ -17,12 +18,26 @@ RSpec.describe Projects::LfsPointers::LfsLinkService do
describe '#execute' do
it 'raises an error when trying to link too many objects at once' do
stub_const("#{described_class}::MAX_OIDS", 5)
oids = Array.new(described_class::MAX_OIDS) { |i| "oid-#{i}" }
oids << 'the straw'
expect { subject.execute(oids) }.to raise_error(described_class::TooManyOidsError)
end
it 'executes a block after validation and before execution' do
block = instance_double(Proc)
expect(subject).to receive(:validate!).ordered
expect(block).to receive(:call).ordered
expect(subject).to receive(:link_existing_lfs_objects).ordered
subject.execute([]) do
block.call
end
end
it 'links existing lfs objects to the project' do
expect(project.lfs_objects.count).to eq 2

View File

@ -123,18 +123,6 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
expect_snowplow_event(category: 'Notes::CreateService', action: 'execute', label: 'note', value: anything)
end
context 'with notes_create_service_tracking feature flag disabled' do
before do
stub_feature_flags(notes_create_service_tracking: false)
end
it 'does not track Notes::CreateService events', :snowplow do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions"), params: { body: 'hi!' }
expect_no_snowplow_event(category: 'Notes::CreateService', action: 'execute')
end
end
context 'when an admin or owner makes the request' do
it 'accepts the creation date to be set' do
creation_time = 2.weeks.ago

Some files were not shown because too many files have changed in this diff Show More