Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
5f5ff3b0a1
commit
a81ae33abe
|
|
@ -35,7 +35,7 @@ See [the general developer security guidelines](https://gitlab.com/gitlab-org/re
|
|||
- [ ] Assigned (_not_ as reviewer) to `@gitlab-release-tools-bot` with passing CI pipelines.
|
||||
- [ ] Correct `~severity::x` label is applied to this merge request and the related security issue.
|
||||
|
||||
/label ~security
|
||||
/label ~security ~"AppSecWorkType::VulnFixVerification"
|
||||
|
||||
[CHANGELOG entry]: https://docs.gitlab.com/ee/development/changelog.html#overview
|
||||
[Code Review process]: https://docs.gitlab.com/ee/development/code_review.html
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
0173c7eadd49ca64bdd04aa2cc2b47607b09988b
|
||||
521888eb6675473c716f8ad7ac81e3f348f863a2
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
6abfad134db84c4a09d714e6ebd0d5cdddcc650a
|
||||
8972105f4bbcf9086558bee95b35f18db335b4f0
|
||||
|
|
|
|||
|
|
@ -87,14 +87,21 @@ export default {
|
|||
</div>
|
||||
|
||||
<div class="gl-mt-1 gl-flex gl-items-center gl-justify-end gl-text-subtle lg:gl-justify-start">
|
||||
<div v-if="jobRef" class="gl-max-w-26 gl-truncate gl-rounded-base gl-bg-gray-50 gl-px-2">
|
||||
<div v-if="jobRef" class="gl-max-w-26 gl-truncate gl-rounded-base gl-bg-strong gl-px-2">
|
||||
<gl-icon
|
||||
v-if="createdByTag"
|
||||
variant="subtle"
|
||||
name="label"
|
||||
:size="$options.iconSize"
|
||||
data-testid="label-icon"
|
||||
/>
|
||||
<gl-icon v-else name="fork" :size="$options.iconSize" data-testid="fork-icon" />
|
||||
<gl-icon
|
||||
v-else
|
||||
name="fork"
|
||||
:size="$options.iconSize"
|
||||
variant="subtle"
|
||||
data-testid="fork-icon"
|
||||
/>
|
||||
<gl-link
|
||||
class="gl-text-sm gl-text-subtle gl-font-monospace hover:gl-text-subtle"
|
||||
:href="job.refPath"
|
||||
|
|
@ -103,8 +110,8 @@ export default {
|
|||
>
|
||||
</div>
|
||||
<span v-else>{{ __('none') }}</span>
|
||||
<div class="gl-ml-2 gl-flex gl-items-center gl-rounded-base gl-bg-gray-50 gl-px-2">
|
||||
<gl-icon class="gl-mx-2" name="commit" :size="$options.iconSize" />
|
||||
<div class="gl-ml-2 gl-flex gl-items-center gl-rounded-base gl-bg-strong gl-px-2">
|
||||
<gl-icon class="gl-mx-2" name="commit" :size="$options.iconSize" variant="subtle" />
|
||||
<gl-link
|
||||
class="gl-text-sm gl-text-subtle gl-font-monospace hover:gl-text-subtle"
|
||||
:href="job.commitPath"
|
||||
|
|
|
|||
|
|
@ -41,11 +41,21 @@ export default {
|
|||
<ci-icon :status="job.detailedStatus" show-status-text />
|
||||
<div class="gl-ml-1 gl-mt-2 gl-text-sm gl-text-subtle">
|
||||
<div v-if="duration" data-testid="job-duration">
|
||||
<gl-icon name="timer" :size="$options.iconSize" data-testid="duration-icon" />
|
||||
<gl-icon
|
||||
name="timer"
|
||||
:size="$options.iconSize"
|
||||
variant="subtle"
|
||||
data-testid="duration-icon"
|
||||
/>
|
||||
{{ durationFormatted }}
|
||||
</div>
|
||||
<div v-if="finishedTime" data-testid="job-finished-time">
|
||||
<gl-icon name="calendar" :size="$options.iconSize" data-testid="finished-time-icon" />
|
||||
<gl-icon
|
||||
name="calendar"
|
||||
:size="$options.iconSize"
|
||||
variant="subtle"
|
||||
data-testid="finished-time-icon"
|
||||
/>
|
||||
<time-ago-tooltip :time="finishedTime" />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ export default {
|
|||
loadingAriaLabel: __('Loading'),
|
||||
},
|
||||
filterSearchBoxStyles:
|
||||
'gl-my-0 gl-p-5 gl-bg-gray-10 gl-text-default gl-border-b gl-border-default',
|
||||
'gl-my-0 gl-p-5 gl-bg-subtle gl-text-default gl-border-b gl-border-default',
|
||||
components: {
|
||||
GlAlert,
|
||||
GlKeysetPagination,
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ export default {
|
|||
i18n: {
|
||||
commitStatSummary: __('Showing %{conflict}'),
|
||||
resolveInfo: __(
|
||||
'To resolve the conflicts, either use interactive mode to select %{use_ours} or %{use_theirs}, or edit the files inline. Commit these changes into %{branch_name}.',
|
||||
'Resolve source branch %{source_branch_name} conflicts using interactive mode to select %{use_ours} or %{use_theirs}, or manually using %{edit_inline}.',
|
||||
),
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -76,6 +76,22 @@ export default {
|
|||
</script>
|
||||
<template>
|
||||
<div id="conflicts">
|
||||
<div data-testid="resolve-info">
|
||||
<gl-sprintf :message="$options.i18n.resolveInfo">
|
||||
<template #source_branch_name>
|
||||
<a class="ref-name" :href="sourceBranchPath">{{ conflictsData.sourceBranch }}</a>
|
||||
</template>
|
||||
<template #use_ours>
|
||||
<strong>{{ s__('MergeConflict|Use ours') }}</strong>
|
||||
</template>
|
||||
<template #use_theirs>
|
||||
<strong>{{ s__('MergeConflict|Use theirs') }}</strong>
|
||||
</template>
|
||||
<template #edit_inline>
|
||||
<strong>{{ s__('MergeConflict|Edit inline') }}</strong>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</div>
|
||||
<gl-loading-icon v-if="isLoading" size="lg" data-testid="loading-spinner" />
|
||||
<div v-if="hasError" class="nothing-here-block">
|
||||
{{ conflictsData.errorMessage }}
|
||||
|
|
@ -175,24 +191,6 @@ export default {
|
|||
</div>
|
||||
<div class="resolve-conflicts-form gl-mt-6">
|
||||
<div class="form-group row">
|
||||
<div class="col-md-4">
|
||||
<h4 class="gl-mt-0">
|
||||
{{ __('Resolve conflicts on source branch') }}
|
||||
</h4>
|
||||
<div class="gl-mb-5" data-testid="resolve-info">
|
||||
<gl-sprintf :message="$options.i18n.resolveInfo">
|
||||
<template #use_ours>
|
||||
<strong>{{ s__('MergeConflict|Use ours') }}</strong>
|
||||
</template>
|
||||
<template #use_theirs>
|
||||
<strong>{{ s__('MergeConflict|Use theirs') }}</strong>
|
||||
</template>
|
||||
<template #branch_name>
|
||||
<a class="ref-name" :href="sourceBranchPath">{{ conflictsData.sourceBranch }}</a>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-8">
|
||||
<label class="label-bold" for="commit-message">
|
||||
{{ __('Commit message') }}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ query getBlobSearchQuery(
|
|||
chunks {
|
||||
lines {
|
||||
lineNumber
|
||||
richText
|
||||
highlights
|
||||
text
|
||||
}
|
||||
matchCountInChunk
|
||||
|
|
@ -34,6 +34,7 @@ query getBlobSearchQuery(
|
|||
matchCountTotal
|
||||
path
|
||||
projectPath
|
||||
language
|
||||
}
|
||||
matchCount
|
||||
perPage
|
||||
|
|
|
|||
|
|
@ -99,6 +99,7 @@ export default {
|
|||
>
|
||||
<blob-chunks
|
||||
:chunk="chunk"
|
||||
:language="file.language"
|
||||
:blame-link="file.blameUrl"
|
||||
:file-url="file.fileUrl"
|
||||
:position="position"
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { GlTooltipDirective, GlIcon, GlLink } from '@gitlab/ui';
|
|||
import GlSafeHtmlDirective from '~/vue_shared/directives/safe_html';
|
||||
import { s__ } from '~/locale';
|
||||
import { InternalEvents } from '~/tracking';
|
||||
import { initLineHighlight } from '~/search/results/utils';
|
||||
import {
|
||||
EVENT_CLICK_BLOB_RESULT_LINE,
|
||||
EVENT_CLICK_BLOB_RESULT_BLAME_LINE,
|
||||
|
|
@ -30,6 +31,10 @@ export default {
|
|||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
language: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
blameLink: {
|
||||
type: String,
|
||||
required: false,
|
||||
|
|
@ -45,14 +50,31 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
lines: this.chunk.lines.map((line) => ({
|
||||
...line,
|
||||
richText: null,
|
||||
})),
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
codeTheme() {
|
||||
return gon.user_color_scheme || 'white';
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
this.chunk.lines.forEach(async (line, index) => {
|
||||
this.lines[index].richText = await this.codeHighlighting(line);
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
highlightedRichText(richText) {
|
||||
return richText.replace('<b>', '<b class="hll">');
|
||||
codeHighlighting(line) {
|
||||
return initLineHighlight({
|
||||
line,
|
||||
fileUrl: this.fileUrl,
|
||||
language: this.language.toLowerCase(),
|
||||
});
|
||||
},
|
||||
trackLineClick(lineNumber) {
|
||||
this.trackEvent(EVENT_CLICK_BLOB_RESULT_LINE, {
|
||||
|
|
@ -78,7 +100,7 @@ export default {
|
|||
>
|
||||
<div class="blob-content">
|
||||
<div
|
||||
v-for="line in chunk.lines"
|
||||
v-for="line in lines"
|
||||
:key="line.lineNumber"
|
||||
class="line_holder code-search-line gl-flex"
|
||||
data-testid="search-blob-line"
|
||||
|
|
@ -109,9 +131,17 @@ export default {
|
|||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<pre class="code highlight gl-grow" data-testid="search-blob-line-code">
|
||||
<code class="!gl-inline">
|
||||
<span v-safe-html="highlightedRichText(line.richText)" class="line"></span>
|
||||
<pre
|
||||
v-if="line.richText"
|
||||
class="code highlight gl-grow"
|
||||
data-testid="search-blob-line-code-highlighted"
|
||||
>
|
||||
<code v-safe-html="line.richText" class="gl-leading-normal">
|
||||
</code>
|
||||
</pre>
|
||||
<pre v-else class="code gl-grow" data-testid="search-blob-line-code-non-highlighted">
|
||||
<code>
|
||||
<span v-safe-html="line.text" class="line"></span>
|
||||
</code>
|
||||
</pre>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -17,3 +17,8 @@ export const CODE_THEME_SOLARIZED_LIGHT = 'solarized-light';
|
|||
|
||||
export const BORDER_DARK = 'multimatch-divider-dark';
|
||||
export const BORDER_LIGHT = 'multimatch-divider-light';
|
||||
|
||||
export const HIGHLIGHT_MARK = '';
|
||||
export const HIGHLIGHT_MARK_REGEX = '\u200b';
|
||||
export const HIGHLIGHT_HTML_START = '<b class="hll">';
|
||||
export const HIGHLIGHT_HTML_END = '</b>';
|
||||
|
|
|
|||
|
|
@ -0,0 +1,68 @@
|
|||
import {
|
||||
LEGACY_FALLBACKS,
|
||||
ROUGE_TO_HLJS_LANGUAGE_MAP,
|
||||
} from '~/vue_shared/components/source_viewer/constants';
|
||||
import languageLoader from '~/content_editor/services/highlight_js_language_loader';
|
||||
import { highlight } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
|
||||
import {
|
||||
HIGHLIGHT_MARK,
|
||||
HIGHLIGHT_MARK_REGEX,
|
||||
HIGHLIGHT_HTML_START,
|
||||
HIGHLIGHT_HTML_END,
|
||||
} from './constants';
|
||||
|
||||
export const isUnsupportedLanguage = (language) => {
|
||||
const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
|
||||
const supportedLanguages = Object.keys(languageLoader);
|
||||
const isUnsupported = !supportedLanguages.includes(mappedLanguage);
|
||||
return LEGACY_FALLBACKS.includes(language) || isUnsupported;
|
||||
};
|
||||
|
||||
export const markSearchTerm = (str = '', highlights = []) => {
|
||||
const chars = str.split('');
|
||||
[...highlights].reverse().forEach((highligh) => {
|
||||
const [start, end] = highligh;
|
||||
chars.splice(end, 0, HIGHLIGHT_MARK);
|
||||
chars.splice(start, 0, HIGHLIGHT_MARK);
|
||||
});
|
||||
|
||||
return chars.join('');
|
||||
};
|
||||
|
||||
export const cleanLineAndMark = ({ text, highlights } = {}) => {
|
||||
const parsedText = highlights?.length > 0 ? markSearchTerm(text, highlights) : text;
|
||||
return parsedText.replace(/\r?\n/, '');
|
||||
};
|
||||
|
||||
export const highlightSearchTerm = (highlightedString) => {
|
||||
if (highlightedString.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const pattern = new RegExp(`${HIGHLIGHT_MARK_REGEX}(.+?)${HIGHLIGHT_MARK_REGEX}`, 'g');
|
||||
|
||||
const result = highlightedString.replace(
|
||||
pattern,
|
||||
`${HIGHLIGHT_HTML_START}$1${HIGHLIGHT_HTML_END}`,
|
||||
);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const initLineHighlight = async (linesData) => {
|
||||
const { line, fileUrl } = linesData;
|
||||
let { language } = linesData;
|
||||
|
||||
if (fileUrl.endsWith('.gleam')) {
|
||||
language = 'gleam';
|
||||
}
|
||||
|
||||
if (isUnsupportedLanguage(language)) {
|
||||
return line.text;
|
||||
}
|
||||
|
||||
const resultData = await highlight(null, cleanLineAndMark(line), language);
|
||||
|
||||
const withHighlightedSearchTerm = highlightSearchTerm(resultData[0].highlightedContent);
|
||||
return withHighlightedSearchTerm;
|
||||
};
|
||||
|
|
@ -0,0 +1,180 @@
|
|||
<script>
|
||||
import { GlModal, GlForm, GlFormFields, GlFormDate, GlAlert } from '@gitlab/ui';
|
||||
import { formValidators } from '@gitlab/ui/dist/utils';
|
||||
import { s__, __ } from '~/locale';
|
||||
import { isInPast, fallsBefore } from '~/lib/utils/datetime_utility';
|
||||
import { reportToSentry } from '~/ci/utils';
|
||||
import Tracking from '~/tracking';
|
||||
import { INSTRUMENT_TODO_ITEM_CLICK } from '~/todos/constants';
|
||||
import { snoozeTodo } from '../utils';
|
||||
|
||||
const FORM_ID = 'custom-snooze-form';
|
||||
const FORM_GROUPS_CLASSES = 'sm:gl-w-1/3';
|
||||
const DEFAULT_TIME = '09:00';
|
||||
const MODAL_ACTION_CLASSES = 'gl-w-full sm:gl-w-auto';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlModal,
|
||||
GlForm,
|
||||
GlFormFields,
|
||||
GlFormDate,
|
||||
GlAlert,
|
||||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
props: {
|
||||
todo: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
fields: {
|
||||
date: {
|
||||
label: this.$options.i18n.snoozeUntil,
|
||||
groupAttrs: { 'data-testid': 'date-input', class: FORM_GROUPS_CLASSES },
|
||||
validators: [
|
||||
formValidators.required(this.$options.i18n.dateRequired),
|
||||
formValidators.factory(this.$options.i18n.dateInPast, (val) => {
|
||||
const [year, month, day] = val.split('-').map(Number);
|
||||
|
||||
const date = new Date();
|
||||
date.setDate(day);
|
||||
date.setMonth(month - 1);
|
||||
date.setFullYear(year);
|
||||
|
||||
return !isInPast(date);
|
||||
}),
|
||||
],
|
||||
},
|
||||
time: {
|
||||
label: this.$options.i18n.at,
|
||||
groupAttrs: { 'data-testid': 'time-input', class: FORM_GROUPS_CLASSES },
|
||||
inputAttrs: { type: 'time' },
|
||||
validators: [formValidators.required(this.$options.i18n.timeRequired)],
|
||||
},
|
||||
},
|
||||
formValues: {
|
||||
time: DEFAULT_TIME,
|
||||
date: '',
|
||||
},
|
||||
isLoading: false,
|
||||
hasError: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
actionPrimary() {
|
||||
return {
|
||||
text: s__('Todos|Snooze'),
|
||||
attributes: {
|
||||
type: 'submit',
|
||||
variant: 'confirm',
|
||||
form: FORM_ID,
|
||||
loading: this.isLoading,
|
||||
class: MODAL_ACTION_CLASSES,
|
||||
},
|
||||
};
|
||||
},
|
||||
datetime() {
|
||||
if (!this.formValues?.time || !this.formValues?.date) {
|
||||
return null;
|
||||
}
|
||||
return new Date(`${this.formValues.date}T${this.formValues.time}`);
|
||||
},
|
||||
datetimeIsInPast() {
|
||||
if (this.datetime === null) {
|
||||
return false;
|
||||
}
|
||||
return fallsBefore(this.datetime, new Date());
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
show() {
|
||||
this.$refs.modal.$refs.modal.show();
|
||||
},
|
||||
onDateInputChanged(event, inputHandler, validator) {
|
||||
inputHandler(event);
|
||||
validator();
|
||||
},
|
||||
async handleSubmit() {
|
||||
this.hasError = false;
|
||||
if (this.datetimeIsInPast) {
|
||||
return;
|
||||
}
|
||||
this.$emit('submit');
|
||||
|
||||
this.track(INSTRUMENT_TODO_ITEM_CLICK, {
|
||||
label: 'snooze_until_a_specific_date_and_time',
|
||||
extra: {
|
||||
snooze_until: this.datetime.toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
this.isLoading = true;
|
||||
try {
|
||||
const { data } = await snoozeTodo(this.$apollo, this.todo, this.datetime);
|
||||
|
||||
if (data.errors?.length) {
|
||||
throw new Error(data.errors.join(', '));
|
||||
} else {
|
||||
this.$emit('snoozed');
|
||||
}
|
||||
} catch (error) {
|
||||
reportToSentry(this.$options.name, error);
|
||||
this.hasError = true;
|
||||
} finally {
|
||||
this.isLoading = false;
|
||||
}
|
||||
},
|
||||
},
|
||||
FORM_ID,
|
||||
i18n: {
|
||||
snooze: s__('Todos|Snooze'),
|
||||
snoozeUntil: s__('Todos|Snooze until'),
|
||||
at: s__('Todos|At'),
|
||||
dateRequired: s__('Todos|The date is required.'),
|
||||
dateInPast: s__("Todos|Snooze date can't be in the past."),
|
||||
timeRequired: s__('Todos|The time is required.'),
|
||||
datetimeInPastError: s__('Todos|The selected date and time cannot be in the past.'),
|
||||
snoozeError: s__('Todos|Failed to snooze todo. Try again later.'),
|
||||
},
|
||||
actionSecondary: {
|
||||
text: __('Cancel'),
|
||||
attributes: {
|
||||
variant: 'default',
|
||||
class: MODAL_ACTION_CLASSES,
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-modal
|
||||
ref="modal"
|
||||
modal-id="custom-snooze-todo-modal"
|
||||
:title="$options.i18n.snooze"
|
||||
:action-primary="actionPrimary"
|
||||
:action-secondary="$options.actionSecondary"
|
||||
@primary.prevent="$emit('primary')"
|
||||
>
|
||||
<gl-alert v-if="hasError" data-testid="snooze-error" variant="danger" :dismissible="false">{{
|
||||
$options.i18n.snoozeError
|
||||
}}</gl-alert>
|
||||
<gl-form :id="$options.FORM_ID" @submit.prevent>
|
||||
<gl-form-fields
|
||||
v-model="formValues"
|
||||
:fields="fields"
|
||||
:form-id="$options.FORM_ID"
|
||||
@submit="handleSubmit"
|
||||
>
|
||||
<template #input(date)="{ input, blur: validate }">
|
||||
<gl-form-date @change="onDateInputChanged($event, input, validate)" @blur="validate" />
|
||||
</template>
|
||||
</gl-form-fields>
|
||||
</gl-form>
|
||||
<div v-if="datetimeIsInPast" class="gl-text-danger" data-testid="datetime-in-past-error">
|
||||
{{ $options.i18n.datetimeInPastError }}
|
||||
</div>
|
||||
</gl-modal>
|
||||
</template>
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { GlButton, GlDisclosureDropdown, GlTooltip } from '@gitlab/ui';
|
||||
import { GlButton, GlDisclosureDropdown, GlDisclosureDropdownGroup, GlTooltip } from '@gitlab/ui';
|
||||
import { uniqueId } from 'lodash';
|
||||
import dateFormat from '~/lib/dateformat';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
|
|
@ -8,14 +8,17 @@ import { reportToSentry } from '~/ci/utils';
|
|||
import { localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
|
||||
import Tracking from '~/tracking';
|
||||
import { INSTRUMENT_TODO_ITEM_CLICK } from '~/todos/constants';
|
||||
import snoozeTodoMutation from './mutations/snooze_todo.mutation.graphql';
|
||||
import { snoozeTodo } from '../utils';
|
||||
import unSnoozeTodoMutation from './mutations/un_snooze_todo.mutation.graphql';
|
||||
import SnoozeTodoModal from './snooze_todo_modal.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlButton,
|
||||
GlDisclosureDropdown,
|
||||
GlDisclosureDropdownGroup,
|
||||
GlTooltip,
|
||||
SnoozeTodoModal,
|
||||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
inject: ['currentTime'],
|
||||
|
|
@ -101,29 +104,23 @@ export default {
|
|||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
items: [
|
||||
{
|
||||
text: s__('Todos|Until a specific time and date'),
|
||||
action: () => {
|
||||
this.$refs['custom-snooze-time-modal'].show();
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async snooze(until) {
|
||||
try {
|
||||
const { data } = await this.$apollo.mutate({
|
||||
mutation: snoozeTodoMutation,
|
||||
variables: {
|
||||
todoId: this.todo.id,
|
||||
snoozeUntil: until,
|
||||
},
|
||||
optimisticResponse: {
|
||||
todoSnooze: {
|
||||
todo: {
|
||||
id: this.todo.id,
|
||||
snoozedUntil: until,
|
||||
__typename: 'Todo',
|
||||
},
|
||||
errors: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
const { data } = await snoozeTodo(this.$apollo, this.todo, until);
|
||||
|
||||
if (data.errors?.length) {
|
||||
throw new Error(data.errors.join(', '));
|
||||
|
|
@ -207,17 +204,21 @@ export default {
|
|||
@shown="isOpen = true"
|
||||
@hidden="isOpen = false"
|
||||
>
|
||||
<template #list-item="{ item }">
|
||||
<div class="gl-flex gl-justify-between gl-gap-5 gl-whitespace-nowrap">
|
||||
<div>
|
||||
{{ item.text }}
|
||||
<gl-disclosure-dropdown-group :group="dropdownOptions[0]">
|
||||
<template #list-item="{ item }">
|
||||
<div class="gl-flex gl-justify-between gl-gap-5 gl-whitespace-nowrap">
|
||||
<div>
|
||||
{{ item.text }}
|
||||
</div>
|
||||
<div class="gl-text-right gl-text-secondary">{{ item.formattedDate }}</div>
|
||||
</div>
|
||||
<div class="gl-text-right gl-text-secondary">{{ item.formattedDate }}</div>
|
||||
</div>
|
||||
</template>
|
||||
</template>
|
||||
</gl-disclosure-dropdown-group>
|
||||
<gl-disclosure-dropdown-group bordered border-position="top" :group="dropdownOptions[1]" />
|
||||
</gl-disclosure-dropdown>
|
||||
<gl-tooltip v-if="!isOpen" :target="toggleId">
|
||||
{{ $options.i18n.snooze }}
|
||||
</gl-tooltip>
|
||||
<snooze-todo-modal ref="custom-snooze-time-modal" :todo="todo" />
|
||||
</span>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
import snoozeTodoMutation from './components/mutations/snooze_todo.mutation.graphql';
|
||||
|
||||
export function snoozeTodo(apolloClient, todo, until) {
|
||||
return apolloClient.mutate({
|
||||
mutation: snoozeTodoMutation,
|
||||
variables: {
|
||||
todoId: todo.id,
|
||||
snoozeUntil: until,
|
||||
},
|
||||
optimisticResponse: {
|
||||
todoSnooze: {
|
||||
todo: {
|
||||
id: todo.id,
|
||||
snoozedUntil: until,
|
||||
__typename: 'Todo',
|
||||
},
|
||||
errors: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -19,8 +19,10 @@ module Resolvers
|
|||
def resolve(**args)
|
||||
return if resource_parent.nil?
|
||||
|
||||
work_item_finder = finder(prepare_finder_params(args))
|
||||
|
||||
Gitlab::IssuablesCountForState.new(
|
||||
finder(args),
|
||||
work_item_finder,
|
||||
resource_parent,
|
||||
fast_fail: true,
|
||||
store_in_redis_cache: true
|
||||
|
|
|
|||
|
|
@ -11,7 +11,5 @@ module Doorkeeper # rubocop:disable Gitlab/BoundedContexts -- Override from a ge
|
|||
has_one :openid_request,
|
||||
class_name: 'Doorkeeper::OpenidConnect::Request',
|
||||
inverse_of: :access_grant
|
||||
|
||||
attribute :organization_id, default: -> { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,7 +10,5 @@ module Doorkeeper # rubocop:disable Gitlab/BoundedContexts -- Override from a ge
|
|||
columns_changing_default :organization_id
|
||||
|
||||
belongs_to :organization, class_name: 'Organizations::Organization', optional: false
|
||||
|
||||
attribute :organization_id, default: -> { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,8 +9,6 @@ module Doorkeeper # rubocop:disable Gitlab/BoundedContexts -- Override from a ge
|
|||
columns_changing_default :organization_id
|
||||
|
||||
belongs_to :organization, class_name: 'Organizations::Organization', optional: false
|
||||
|
||||
attribute :organization_id, default: -> { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@ module Doorkeeper # rubocop:disable Gitlab/BoundedContexts -- Override from a ge
|
|||
inverse_of: :openid_request
|
||||
|
||||
belongs_to :organization, class_name: 'Organizations::Organization', optional: false
|
||||
|
||||
attribute :organization_id, default: -> { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@
|
|||
id: 'syntax-highlighting-theme',
|
||||
options: { class: 'js-preferences-form' }) do |c|
|
||||
- c.with_description do
|
||||
= s_('Preferences|Customize the appearance of the syntax.')
|
||||
= s_('Preferences|Choose the syntax highlighting theme used when viewing or editing code in GitLab.')
|
||||
= succeed '.' do
|
||||
= link_to _('Learn more'), help_page_path('user/profile/preferences.md', anchor: 'change-the-syntax-highlighting-theme'), target: '_blank', rel: 'noopener noreferrer'
|
||||
- c.with_body do
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
- breadcrumb_title "##{@pipeline.id}"
|
||||
- page_title _('Pipeline')
|
||||
- pipeline_has_errors = @pipeline.builds.empty? && @pipeline.error_messages.any?
|
||||
- pipeline_has_deprecated_dependency_scanning_enabled = @pipeline.builds.any? { |build| build.name.starts_with?('gemnasium') }
|
||||
- add_page_specific_style 'page_bundles/pipeline'
|
||||
- add_page_specific_style 'page_bundles/reports'
|
||||
- add_page_specific_style 'page_bundles/ci_status'
|
||||
|
|
@ -27,13 +26,4 @@
|
|||
= s_("Pipelines|Go to the pipeline editor")
|
||||
|
||||
- else
|
||||
- if pipeline_has_deprecated_dependency_scanning_enabled
|
||||
= render Pajamas::AlertComponent.new(title: _('You are using a deprecated Dependency Scanning analyzer'),
|
||||
variant: :warning,
|
||||
dismissible: true) do |c|
|
||||
- c.with_body do
|
||||
- help_page_link = help_page_url('user/application_security/dependency_scanning/dependency_scanning_sbom/index.md')
|
||||
- learn_more_link = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_link }
|
||||
= html_escape(s_('The Gemnasium analyzer has been replaced with a new Dependency Scanning analyzer. %{learn_more_link_start}Learn more%{link_end}.')) % { learn_more_link_start: learn_more_link, link_end: '</a>'.html_safe }
|
||||
|
||||
#js-pipeline-tabs{ data: js_pipeline_tabs_data(@project, @pipeline, @current_user) }
|
||||
|
|
|
|||
|
|
@ -3,6 +3,6 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110817
|
|||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/392874
|
||||
milestone: '15.10'
|
||||
type: development
|
||||
group: group::pipeline security
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
||||
|
||||
|
|
|
|||
|
|
@ -137,11 +137,6 @@ Doorkeeper.configure do
|
|||
|
||||
enable_dynamic_scopes
|
||||
# Following doorkeeper monkey patches are to identify the organization on best effort basis
|
||||
Doorkeeper::Server.class_eval do
|
||||
def parameters
|
||||
{ organization_id: Organizations::Organization::DEFAULT_ORGANIZATION_ID }.with_indifferent_access.merge(context.request.parameters)
|
||||
end
|
||||
end
|
||||
|
||||
Doorkeeper::OAuth::PasswordAccessTokenRequest.class_eval do
|
||||
private
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ DOCUMENTATION_UPDATE_MISSING = <<~MSG
|
|||
|
||||
For more information, see:
|
||||
|
||||
- The Handbook page on [merge request types](https://about.gitlab.com/handbook/engineering/metrics/#work-type-classification).
|
||||
- The Handbook page on [merge request types](https://handbook.gitlab.com/handbook/product/groups/product-analysis/engineering/metrics/#work-type-classification).
|
||||
- The [definition of done](https://docs.gitlab.com/ee/development/contributing/merge_request_workflow.html#definition-of-done) documentation.
|
||||
MSG
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
view_name: project_design_management_routes_view
|
||||
classes: []
|
||||
feature_categories:
|
||||
- backup_restore
|
||||
description: Project design repository references and its related routes
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179449
|
||||
milestone: '17.9'
|
||||
gitlab_schema: gitlab_main
|
||||
table_size: small
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddProjectDesignManagementRoutesView < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
execute <<-SQL
|
||||
CREATE VIEW project_design_management_routes_view AS
|
||||
SELECT p.id,
|
||||
p.repository_storage as repository_storage,
|
||||
r.path as path_with_namespace,
|
||||
r.name as name_with_namespace
|
||||
FROM design_management_repositories dr
|
||||
JOIN projects p
|
||||
ON (dr.project_id = p.id)
|
||||
JOIN routes r
|
||||
ON (p.id = r.source_id AND source_type = 'Project')
|
||||
SQL
|
||||
end
|
||||
|
||||
def down
|
||||
execute <<-SQL
|
||||
DROP VIEW project_design_management_routes_view;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveIndexPackagesDependenciesOnNameVersionPattern < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
INDEX_NAME = :tmp_idx_packages_dependencies_on_name_version_pattern
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name(:packages_dependencies, INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(
|
||||
:packages_dependencies,
|
||||
%i[name version_pattern],
|
||||
unique: true,
|
||||
name: INDEX_NAME,
|
||||
where: 'project_id IS NULL'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveIndexPackagesDependenciesOnIdWithoutProjectId < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
INDEX_NAME = :tmp_index_packages_dependencies_on_id_without_project_id
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name(:packages_dependencies, INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(:packages_dependencies, :id, name: INDEX_NAME, where: 'project_id IS NULL')
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
196f9265b7377f3f8781b34ecaad2a0a7b673d4df07c95cf86d8004642d38de6
|
||||
|
|
@ -0,0 +1 @@
|
|||
559b77532572872a373d071af5480fd9f8c2714b7bce7ae2f4df627bbbca5c44
|
||||
|
|
@ -0,0 +1 @@
|
|||
43f76ed4f248945fdfe4bac4bb9dafefc73dd04a22690fdc97ed3d68727ae9cf
|
||||
|
|
@ -19102,6 +19102,27 @@ CREATE SEQUENCE project_deploy_tokens_id_seq
|
|||
|
||||
ALTER SEQUENCE project_deploy_tokens_id_seq OWNED BY project_deploy_tokens.id;
|
||||
|
||||
CREATE TABLE routes (
|
||||
id bigint NOT NULL,
|
||||
source_id bigint NOT NULL,
|
||||
source_type character varying NOT NULL,
|
||||
path character varying NOT NULL,
|
||||
created_at timestamp without time zone,
|
||||
updated_at timestamp without time zone,
|
||||
name character varying,
|
||||
namespace_id bigint,
|
||||
CONSTRAINT check_af84c6c93f CHECK ((namespace_id IS NOT NULL))
|
||||
);
|
||||
|
||||
CREATE VIEW project_design_management_routes_view AS
|
||||
SELECT p.id,
|
||||
p.repository_storage,
|
||||
r.path AS path_with_namespace,
|
||||
r.name AS name_with_namespace
|
||||
FROM ((design_management_repositories dr
|
||||
JOIN projects p ON ((dr.project_id = p.id)))
|
||||
JOIN routes r ON (((p.id = r.source_id) AND ((r.source_type)::text = 'Project'::text))));
|
||||
|
||||
CREATE TABLE project_error_tracking_settings (
|
||||
project_id bigint NOT NULL,
|
||||
enabled boolean DEFAULT false NOT NULL,
|
||||
|
|
@ -19360,18 +19381,6 @@ CREATE SEQUENCE project_repository_storage_moves_id_seq
|
|||
|
||||
ALTER SEQUENCE project_repository_storage_moves_id_seq OWNED BY project_repository_storage_moves.id;
|
||||
|
||||
CREATE TABLE routes (
|
||||
id bigint NOT NULL,
|
||||
source_id bigint NOT NULL,
|
||||
source_type character varying NOT NULL,
|
||||
path character varying NOT NULL,
|
||||
created_at timestamp without time zone,
|
||||
updated_at timestamp without time zone,
|
||||
name character varying,
|
||||
namespace_id bigint,
|
||||
CONSTRAINT check_af84c6c93f CHECK ((namespace_id IS NOT NULL))
|
||||
);
|
||||
|
||||
CREATE VIEW project_routes_view AS
|
||||
SELECT p.id,
|
||||
p.repository_storage,
|
||||
|
|
@ -35347,8 +35356,6 @@ CREATE INDEX tmp_idx_orphaned_approval_merge_request_rules ON approval_merge_req
|
|||
|
||||
CREATE INDEX tmp_idx_orphaned_approval_project_rules ON approval_project_rules USING btree (id) WHERE ((report_type = ANY (ARRAY[2, 4])) AND (security_orchestration_policy_configuration_id IS NULL));
|
||||
|
||||
CREATE UNIQUE INDEX tmp_idx_packages_dependencies_on_name_version_pattern ON packages_dependencies USING btree (name, version_pattern) WHERE (project_id IS NULL);
|
||||
|
||||
CREATE INDEX tmp_idx_packages_on_project_id_when_mvn_not_pending_destruction ON packages_packages USING btree (project_id) WHERE ((package_type = 1) AND (status <> 4));
|
||||
|
||||
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
|
||||
|
|
@ -35361,8 +35368,6 @@ CREATE UNIQUE INDEX tmp_index_issues_on_tmp_epic_id ON issues USING btree (tmp_e
|
|||
|
||||
CREATE INDEX tmp_index_packages_conan_file_metadata_on_id_for_migration ON packages_conan_file_metadata USING btree (id) WHERE ((package_reference_id IS NULL) AND (conan_package_reference IS NOT NULL));
|
||||
|
||||
CREATE INDEX tmp_index_packages_dependencies_on_id_without_project_id ON packages_dependencies USING btree (id) WHERE (project_id IS NULL);
|
||||
|
||||
CREATE INDEX tmp_index_pats_on_notification_columns_and_expires_at ON personal_access_tokens USING btree (id) WHERE ((expire_notification_delivered IS TRUE) AND (seven_days_notification_sent_at IS NULL) AND (expires_at IS NOT NULL));
|
||||
|
||||
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ This feature was [deprecated](https://gitlab.com/groups/gitlab-org/configure/-/e
|
|||
|
||||
## Discover certificate-based clusters
|
||||
|
||||
Gets certificate-based clusters that are registered to a group, subgroup, or project.
|
||||
Gets certificate-based clusters that are registered to a group, subgroup, or project. Disabled and enabled clusters are also returned.
|
||||
|
||||
```plaintext
|
||||
GET /discover-cert-based-clusters
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ curl "https://gitlab.com/api/graphql" --header "Authorization: Bearer $GRAPHQL_T
|
|||
```
|
||||
|
||||
To nest strings in the query string,
|
||||
wrap the data in single quotes or escape the strings with `\\`:
|
||||
wrap the data in single quotes or escape the strings with <code>\\</code>:
|
||||
|
||||
```shell
|
||||
curl "https://gitlab.com/api/graphql" --header "Authorization: Bearer $GRAPHQL_TOKEN" \
|
||||
|
|
|
|||
|
|
@ -297,8 +297,8 @@ Use one of the following methods to determine the value for `DOCKER_AUTH_CONFIG`
|
|||
bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=
|
||||
```
|
||||
|
||||
NOTE:
|
||||
If your username includes special characters like `@`, you must escape them with a backslash (`\`) to prevent authentication problems.
|
||||
NOTE:
|
||||
If your username includes special characters like `@`, you must escape them with a backslash (<code>\</code>) to prevent authentication problems.
|
||||
|
||||
Create the Docker JSON configuration content as follows:
|
||||
|
||||
|
|
|
|||
|
|
@ -3441,7 +3441,7 @@ You must:
|
|||
|
||||
- Define [`artifacts`](#artifacts) with a path to the content directory, which is
|
||||
`public` by default.
|
||||
- Use [`publish`](#pagespublish) if want to use a different content directory.
|
||||
- Use [`pages.publish`](#pagespagespublish) if want to use a different content directory.
|
||||
|
||||
**Keyword type**: Job name.
|
||||
|
||||
|
|
@ -3463,18 +3463,20 @@ pages:
|
|||
This example renames the `my-html-content/` directory to `public/`.
|
||||
This directory is exported as an artifact and published with GitLab Pages.
|
||||
|
||||
#### `pages:publish`
|
||||
#### `pages:pages.publish`
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415821) in GitLab 16.1.
|
||||
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
|
||||
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `publish` property under the `pages` keyword in GitLab 17.9.
|
||||
|
||||
Use `publish` to configure the content directory of a [`pages` job](#pages).
|
||||
Use `pages.publish` to configure the content directory of a [`pages` job](#pages).
|
||||
The top-level `publish` keyword is deprecated as of GitLab 17.9 and must now be nested under the `pages` keyword.
|
||||
|
||||
**Keyword type**: Job keyword. You can use it only as part of a `pages` job.
|
||||
|
||||
**Supported values**: A path to a directory containing the Pages content.
|
||||
|
||||
**Example of `publish`**:
|
||||
**Example of `pages.publish`**:
|
||||
|
||||
```yaml
|
||||
pages:
|
||||
|
|
@ -3484,7 +3486,8 @@ pages:
|
|||
artifacts:
|
||||
paths:
|
||||
- dist
|
||||
publish: dist
|
||||
pages:
|
||||
publish: dist
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
environment: production
|
||||
|
|
@ -3494,7 +3497,7 @@ This example uses [Eleventy](https://www.11ty.dev) to generate a static website
|
|||
output the generated HTML files into a the `dist/` directory. This directory is exported
|
||||
as an artifact and published with GitLab Pages.
|
||||
|
||||
It is also possible to use variables in the `publish` field. For example:
|
||||
It is also possible to use variables in the `pages.publish` field. For example:
|
||||
|
||||
```yaml
|
||||
pages:
|
||||
|
|
@ -3505,7 +3508,8 @@ pages:
|
|||
artifacts:
|
||||
paths:
|
||||
- $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
|
||||
publish: $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
|
||||
pages:
|
||||
publish: $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
variables:
|
||||
|
|
|
|||
|
|
@ -1162,9 +1162,9 @@ Do not use **Self-Hosted** by itself.
|
|||
|
||||
## GitLab Duo Workflow
|
||||
|
||||
Use **GitLab Duo Workflow**. After first use, use **Duo Workflow**.
|
||||
Use **GitLab Duo Workflow**. After first use, use **Workflow**.
|
||||
|
||||
Do not use **Workflow** by itself.
|
||||
Do not use **Duo Workflow** by itself.
|
||||
|
||||
## GitLab Flavored Markdown
|
||||
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ In most cases the anchors `\A` for beginning of text and `\z` for end of text sh
|
|||
|
||||
### Escape sequences in Go
|
||||
|
||||
When a character in a string literal or regular expression literal is preceded by a backslash, it is interpreted as part of an escape sequence. For example, the escape sequence `\n` in a string literal corresponds to a single `newline` character, and not the `\` and `n` characters.
|
||||
When a character in a string literal or regular expression literal is preceded by a backslash, it is interpreted as part of an escape sequence. For example, the escape sequence `\n` in a string literal corresponds to a single `newline` character, and not the <code>\</code> and `n` characters.
|
||||
|
||||
There are two Go escape sequences that could produce surprising results. First, `regexp.Compile("\a")` matches the bell character, whereas `regexp.Compile("\\A")` matches the start of text and `regexp.Compile("\\a")` is a Vim (but not Go) regular expression matching any alphabetic character. Second, `regexp.Compile("\b")` matches a backspace, whereas `regexp.Compile("\\b")` matches the start of a word. Confusing one for the other could lead to a regular expression passing or failing much more often than expected, with potential security consequences.
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ E2E test framework utilizes administrator user for certain resource creation, li
|
|||
|
||||
- `GITLAB_ADMIN_USERNAME`
|
||||
- `GITLAB_ADMIN_PASSWORD`
|
||||
- `GITLAB_QA_ADMIN_ACCESS_TOKEN`
|
||||
- `GITLAB_QA_ADMIN_ACCESS_TOKEN`: this variable is optional and would be created via UI using administrator credentials when not set.
|
||||
|
||||
Administrator user can be accessed via global accessor method `QA::Runtime::User::Store.admin_user`.
|
||||
|
||||
|
|
@ -21,7 +21,11 @@ All tests running against one of the [test-pipelines](../test_pipelines.md) auto
|
|||
|
||||
### Using single user
|
||||
|
||||
It is advised to not run all tests using single user but certain environments impose limitations for generating new user for each test. In such case, `test user` is initialized using credentials from environments variables - `GITLAB_USERNAME` and `GITLAB_PASSWORD`. Additionally, to provide a pre-configured personal access token for test user, `GITLAB_QA_ACCESS_TOKEN` variable can be set.
|
||||
It is advised to not run all tests using single user but certain environments impose limitations for generating new user for each test. In order to forcefully disable unique test user creation, environment variable `QA_CREATE_UNIQUE_TEST_USERS` should be set to false. Example reason why unique user creation might be disabled:
|
||||
|
||||
- environment does have administrator user available and can create new users but it has only one top level group with ultimate license. In such case, a single user which is a member of this group has to be used due to new unique users not having access to the common group with ultimate license.
|
||||
|
||||
In such case, `test user` is initialized using credentials from environments variables - `GITLAB_USERNAME` and `GITLAB_PASSWORD`. Additionally, to provide a pre-configured personal access token for test user, `GITLAB_QA_ACCESS_TOKEN` variable can be set.
|
||||
|
||||
### No admin environments
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ source: /doc/user/search/advanced_search.md
|
|||
| `+` | And | [`display +banner`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=display+%2Bbanner&snippets=) |
|
||||
| `-` | Exclude | [`display -banner`](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=display+-banner) |
|
||||
| `*` | Partial | [`bug error 50*`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=bug+error+50%2A&snippets=) |
|
||||
| `\` | Escape | [`\*md`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=%5C*md&group_id=9970&project_id=278964) |
|
||||
| <code>\</code> | Escape | [`\*md`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=%5C*md&group_id=9970&project_id=278964) |
|
||||
| `#` | Issue ID | [`#23456`](https://gitlab.com/search?snippets=&scope=issues&repository_ref=&search=%2323456&group_id=9970&project_id=278964) |
|
||||
| `!` | Merge request ID | [`!23456`](https://gitlab.com/search?snippets=&scope=merge_requests&repository_ref=&search=%2123456&group_id=9970&project_id=278964) |
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ subject to change or delay and remain at the sole discretion of GitLab Inc.
|
|||
|
||||
GitLab Duo Workflow is an AI-powered coding agent in the Visual Studio Code (VS Code) IDE.
|
||||
|
||||
Duo Workflow:
|
||||
Workflow:
|
||||
|
||||
- Is designed to help you solve junior-level coding tasks more quickly,
|
||||
such as drafting code for small features or bugs.
|
||||
|
|
@ -39,17 +39,17 @@ Duo Workflow:
|
|||
|
||||
For more information, see:
|
||||
|
||||
- [How to use Duo Workflow](#use-gitlab-duo-workflow-in-vs-code).
|
||||
- [How to use Workflow](#use-gitlab-duo-workflow-in-vs-code).
|
||||
- [How to get the best results](#how-to-get-the-best-results).
|
||||
|
||||
## Risks of Duo Workflow and AI Agents
|
||||
## Risks of Workflow and AI Agents
|
||||
|
||||
Duo Workflow is an experimental product and users should consider their
|
||||
circumstances before using this tool. Duo Workflow is an AI Agent that is given
|
||||
Workflow is an experimental product and users should consider their
|
||||
circumstances before using this tool. Workflow is an AI Agent that is given
|
||||
some ability to perform actions on the users behalf. AI tools based on LLMs are
|
||||
inherently unpredictable and you should take appropriate precautions.
|
||||
|
||||
Duo Workflow in VS Code runs workflows in a Docker container on your local
|
||||
Workflow in VS Code runs workflows in a Docker container on your local
|
||||
workstation. Running Duo Worklow inside of Docker is not a security measure but a
|
||||
convenience to reduce the amount of disruption to your normal development
|
||||
environment. All the documented risks should be considered before using this
|
||||
|
|
@ -59,14 +59,14 @@ product. The following risks are important to understand:
|
|||
Engine running on the host as this offers less isolation. Since Docker
|
||||
Engine is the most common way to run Docker on Linux we will likely not
|
||||
support many Linux setups by default, but instead we'll require them to
|
||||
install an additional Docker runtime to use Duo Workflow.
|
||||
install an additional Docker runtime to use Workflow.
|
||||
1. This VM running on your local workstation likely has access to your local
|
||||
network, unless you have created additional firewall rules to prevent it.
|
||||
Local network access may be an issue if you are running local development
|
||||
servers on your host that you would not want reachable by the workflow
|
||||
commands. Local network access may also be risky in a corporate intranet
|
||||
environment where you have internal resources that you do not want
|
||||
accessible by Duo Workflow.
|
||||
accessible by Workflow.
|
||||
1. The VM may be able to consume a lot of CPU, RAM and storage based on the
|
||||
limits configured with your Docker VM installation.
|
||||
1. Depending on the configuration of the VM in your Docker installation it may
|
||||
|
|
@ -75,37 +75,37 @@ product. The following risks are important to understand:
|
|||
eventually lead to code execution escaping the VM to the host or accessing
|
||||
resources on the host that you didn't intend.
|
||||
1. Each version of Docker has different ways of mounting directories into the
|
||||
containers. Duo Workflow only mounts the directory for the project you have
|
||||
containers. Workflow only mounts the directory for the project you have
|
||||
open in VS Code but depending on how your Docker installation works and
|
||||
whether or not you are running other containers there may still be some
|
||||
risks it could access other parts of your filesystem.
|
||||
1. Duo Workflow has access to the local filesystem of the
|
||||
project where you started running Duo Workflow. This may include access to
|
||||
1. Workflow has access to the local filesystem of the
|
||||
project where you started running Workflow. This may include access to
|
||||
any credentials that you have stored in files in this directory, even if they
|
||||
are not committed to the project (e.g. `.env` files)
|
||||
1. All your Docker containers usually run in a single VM. So this
|
||||
may mean that Duo Workflow containers are running in the same VM as other
|
||||
non Duo Workflow containers. While the containers are isolated to some
|
||||
may mean that Workflow containers are running in the same VM as other
|
||||
non Workflow containers. While the containers are isolated to some
|
||||
degree this isolation is not as strict as VM level isolation
|
||||
|
||||
Other risks to be aware of when using Duo Workflow:
|
||||
Other risks to be aware of when using Workflow:
|
||||
|
||||
1. Duo Workflow also gets access to a time-limited `ai_workflows` scoped GitLab
|
||||
1. Workflow also gets access to a time-limited `ai_workflows` scoped GitLab
|
||||
OAuth token with your user's identity. This token can be used to access
|
||||
certain GitLab APIs on your behalf. This token is limited to the duration of
|
||||
the workflow and only has access to certain APIs in GitLab but it can still,
|
||||
by design, perform write operations on the users behalf. You should consider
|
||||
what access your user has in GitLab before running workflows.
|
||||
1. You should not give Duo Workflow any additional credentials or secrets, in
|
||||
1. You should not give Workflow any additional credentials or secrets, in
|
||||
goals or messages, as there is a chance it might end up using those in code
|
||||
or other API calls.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you can use GitLab Duo Workflow:
|
||||
Before you can use Workflow:
|
||||
|
||||
1. Ensure you have an account on GitLab.com.
|
||||
1. Ensure that the GitLab.com project you want to use with Duo Workflow meets these requirements:
|
||||
1. Ensure that the GitLab.com project you want to use with Workflow meets these requirements:
|
||||
- You must have at least the Developer role for the project.
|
||||
- Your project must belong to a [group namespace](../namespace/index.md)
|
||||
with an **Ultimate** subscription and [experimental features turned on](../gitlab_duo/turn_on_off.md#turn-on-beta-and-experimental-features).
|
||||
|
|
@ -117,7 +117,7 @@ Before you can use GitLab Duo Workflow:
|
|||
|
||||
### Install Docker and set the socket file path
|
||||
|
||||
GitLab Duo Workflow needs an execution platform like Docker where it can execute arbitrary code,
|
||||
Workflow needs an execution platform like Docker where it can execute arbitrary code,
|
||||
read and write files, and make API calls to GitLab.
|
||||
|
||||
If you are on macOS or Linux, you can either:
|
||||
|
|
@ -170,24 +170,24 @@ that get installed with the script.
|
|||
|
||||
## Use GitLab Duo Workflow in VS Code
|
||||
|
||||
To use GitLab Duo Workflow:
|
||||
To use Workflow:
|
||||
|
||||
1. In VS Code, open the Git repository folder for your GitLab project.
|
||||
- You must check out the branch for the code you would like to change.
|
||||
1. Open the command palette:
|
||||
- On macOS: <kbd>Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>
|
||||
- On Windows and Linux: <kbd>Ctrl</kbd> + <kbd>P</kbd>.
|
||||
1. Type `Duo Workflow` and select **GitLab: Show Duo Workflow**.
|
||||
1. Type `GitLab Duo Workflow` and select **GitLab: Show Duo Workflow**.
|
||||
1. To create a workflow, select **New workflow**.
|
||||
1. For **Task description**, specify a junior-level code task in detail,
|
||||
and then select **Start**.
|
||||
|
||||
After you describe your task, Duo Workflow generates and executes on a plan to address it.
|
||||
After you describe your task, Workflow generates and executes on a plan to address it.
|
||||
While it executes, you can pause or ask it to adjust the plan.
|
||||
|
||||
### How to get the best results
|
||||
|
||||
When you describe your task to Duo Workflow, keep these tips in mind to get the best results:
|
||||
When you describe your task to Workflow, keep these tips in mind to get the best results:
|
||||
|
||||
- It works best within these conditions:
|
||||
- Code tasks on the level of a junior engineer.
|
||||
|
|
@ -198,11 +198,11 @@ When you describe your task to Duo Workflow, keep these tips in mind to get the
|
|||
- Try to add implementation examples, with commit or merge request IDs.
|
||||
- Mention files by their names, and GitLab references by their IDs.
|
||||
For example, project, issue, or merge request IDs.
|
||||
For more information, see [the context that it's aware of](#the-context-gitlab-duo-workflow-is-aware-of).
|
||||
For more information, see [the context that it's aware of](#the-context-workflow-is-aware-of).
|
||||
|
||||
## Supported languages
|
||||
|
||||
Duo Workflow officially supports the following languages:
|
||||
Workflow officially supports the following languages:
|
||||
|
||||
- CSS
|
||||
- Go
|
||||
|
|
@ -214,24 +214,24 @@ Duo Workflow officially supports the following languages:
|
|||
- Ruby
|
||||
- TypeScript
|
||||
|
||||
## The context GitLab Duo Workflow is aware of
|
||||
## The context Workflow is aware of
|
||||
|
||||
GitLab Duo Workflow is aware of the context you're working in, specifically:
|
||||
Workflow is aware of the context you're working in, specifically:
|
||||
|
||||
| Area | How to use GitLab Duo Workflow |
|
||||
| Area | How to use GitLab Workflow |
|
||||
|-------------------------------|--------------------------------|
|
||||
| Epics | Enter the epic ID and the name of the group the epic is in. The group must include a project that meets the [prerequisites](#prerequisites). |
|
||||
| Issues | Enter the issue ID if it's in the current project. In addition, enter the project ID if it is in a different project. The other project must also meet the [prerequisites](#prerequisites). |
|
||||
| Local files | Duo Workflow can access all files available to Git in the project you have open in your editor. Enter the file path to reference a specific file. |
|
||||
| Local files | Workflow can access all files available to Git in the project you have open in your editor. Enter the file path to reference a specific file. |
|
||||
| Merge requests | Enter the merge request ID if it's in the current project. In addition, enter the project ID if it's in a different project. The other project must also meet the [prerequisites](#prerequisites). |
|
||||
| Merge request pipelines | Enter the merge request ID that has the pipeline, if it's in the current project. In addition, enter the project ID if it's in a different project. The other project must also meet the [prerequisites](#prerequisites). |
|
||||
|
||||
Duo Workflow also has access to the GitLab [Search API](../../api/search.md) to find related issues or merge requests.
|
||||
Workflow also has access to the GitLab [Search API](../../api/search.md) to find related issues or merge requests.
|
||||
|
||||
## APIs that Duo Workflow has access to
|
||||
## APIs that Workflow has access to
|
||||
|
||||
To create solutions and understand the context of the problem,
|
||||
Duo Workflow accesses several GitLab APIs.
|
||||
Workflow accesses several GitLab APIs.
|
||||
|
||||
Specifically, an OAuth token with the `ai_workflows` scope has access
|
||||
to the following APIs:
|
||||
|
|
@ -248,12 +248,12 @@ to the following APIs:
|
|||
|
||||
## Current limitations
|
||||
|
||||
Duo Workflow has the following limitations:
|
||||
Workflow has the following limitations:
|
||||
|
||||
- Requires the workspace folder in VS Code to have a Git repository for a GitLab project.
|
||||
- Only runs workflows for the GitLab project that's open in VS Code.
|
||||
- Only accesses files in the current branch and project.
|
||||
- Only accesses GitLab references in the GitLab instance of your project. For example, if your project is in GitLab.com, Duo Workflow only accesses GitLab references in that instance. It cannot access external sources or the web.
|
||||
- Only accesses GitLab references in the GitLab instance of your project. For example, if your project is in GitLab.com, Workflow only accesses GitLab references in that instance. It cannot access external sources or the web.
|
||||
- Only reliably accesses GitLab references if provided with their IDs. For example, issue ID and not issue URL.
|
||||
- Can be slow or fail in large repositories.
|
||||
|
||||
|
|
@ -287,17 +287,17 @@ If you encounter issues:
|
|||
1. To open the logs in VS Code, select **View** > **Output**. In the output panel at the bottom, in the top-right corner, select **GitLab Workflow** or **GitLab Language Server** from the list.
|
||||
1. Review for errors, warnings, connection issues, or authentication problems.
|
||||
1. Check the executor logs:
|
||||
1. Use `docker ps -a | grep duo-workflow` to get the list of Duo Workflow containers and their ids.
|
||||
1. Use `docker ps -a | grep duo-workflow` to get the list of Workflow containers and their ids.
|
||||
1. Use `docker logs <container_id>` to view the logs for the specific container.
|
||||
1. Examine the [Duo Workflow Service production LangSmith trace](https://smith.langchain.com/o/477de7ad-583e-47b6-a1c4-c4a0300e7aca/projects/p/5409132b-2cf3-4df8-9f14-70204f90ed9b?timeModel=%7B%22duration%22%3A%227d%22%7D&tab=0).
|
||||
1. Examine the [Workflow Service production LangSmith trace](https://smith.langchain.com/o/477de7ad-583e-47b6-a1c4-c4a0300e7aca/projects/p/5409132b-2cf3-4df8-9f14-70204f90ed9b?timeModel=%7B%22duration%22%3A%227d%22%7D&tab=0).
|
||||
|
||||
## Audit log
|
||||
|
||||
An audit event is created for each API request done by Duo Workflow.
|
||||
An audit event is created for each API request done by Workflow.
|
||||
On your GitLab Self-Managed instance, you can view these events on the [instance audit events](../../administration/audit_event_reports.md#instance-audit-events) page.
|
||||
|
||||
## Give feedback
|
||||
|
||||
Duo Workflow is an experiment and your feedback is crucial to improve it for you and others.
|
||||
Workflow is an experiment and your feedback is crucial to improve it for you and others.
|
||||
To report issues or suggest improvements,
|
||||
[complete this survey](https://gitlab.fra1.qualtrics.com/jfe/form/SV_9GmCPTV7oH9KNuu).
|
||||
|
|
|
|||
|
|
@ -58,6 +58,8 @@ To improve your workflow while authoring code, try these features:
|
|||
- [The IDE](../gitlab_duo_chat/examples.md#explain-selected-code).
|
||||
- [A file](../../user/project/repository/code_explain.md).
|
||||
- [A merge request](../../user/project/merge_requests/changes.md#explain-code-in-a-merge-request).
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://youtu.be/1izKaLmmaCA?si=O2HDokLLujRro_3O)
|
||||
<!-- Video published on 2023-11-18 -->
|
||||
- [Test Generation](../gitlab_duo_chat/examples.md#write-tests-in-the-ide): Test your code by generating tests.
|
||||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://www.youtube.com/watch?v=zWhwuixUkYU)
|
||||
- [Refactor Code](../gitlab_duo_chat/examples.md#refactor-code-in-the-ide): Improve or refactor the selected code.
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ You can refine user search with `simple_query_string`.
|
|||
| `+` | And | [`display +banner`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=display+%2Bbanner&snippets=) |
|
||||
| `-` | Exclude | [`display -banner`](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=display+-banner) |
|
||||
| `*` | Partial | [`bug error 50*`](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=bug+error+50%2A&snippets=) |
|
||||
| `\` | Escape | [`\*md`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=%5C*md&group_id=9970&project_id=278964) |
|
||||
| <code>\</code> | Escape | [`\*md`](https://gitlab.com/search?snippets=&scope=blobs&repository_ref=&search=%5C*md&group_id=9970&project_id=278964) |
|
||||
| `#` | Issue ID | [`#23456`](https://gitlab.com/search?snippets=&scope=issues&repository_ref=&search=%2323456&group_id=9970&project_id=278964) |
|
||||
| `!` | Merge request ID | [`!23456`](https://gitlab.com/search?snippets=&scope=merge_requests&repository_ref=&search=%2123456&group_id=9970&project_id=278964) |
|
||||
|
||||
|
|
|
|||
|
|
@ -167,7 +167,8 @@ You can snooze to-do items to temporarily hide them from your main To-Do List. T
|
|||
To snooze a to-do item:
|
||||
|
||||
1. In your To-Do List, next to the to-do item you want to snooze, select Snooze (**{clock}**).
|
||||
1. Choose one of the preset snooze durations:
|
||||
1. If you wish to snooze the to-do item until a specific time and date, select the
|
||||
`Until a specific time and date` option. Otherwise, choose one of the preset snooze durations:
|
||||
- For one hour
|
||||
- Until later today (4 hours later)
|
||||
- Until tomorrow (tomorrow at 8 AM local time)
|
||||
|
|
|
|||
|
|
@ -4034,6 +4034,9 @@ msgstr ""
|
|||
msgid "AdminSelfHostedModels|Model uses an API token"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|More models are available in beta. You can %{linkStart}turn on self-hosted model beta features%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Name"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -35526,6 +35529,9 @@ msgstr ""
|
|||
msgid "MergeConflict|Committing..."
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeConflict|Edit inline"
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeConflict|Use ours"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -43217,6 +43223,9 @@ msgstr ""
|
|||
msgid "Preferences|Choose the navigation theme."
|
||||
msgstr ""
|
||||
|
||||
msgid "Preferences|Choose the syntax highlighting theme used when viewing or editing code in GitLab."
|
||||
msgstr ""
|
||||
|
||||
msgid "Preferences|Choose what content you want to see by default on your homepage."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -43244,9 +43253,6 @@ msgstr ""
|
|||
msgid "Preferences|Customize integrations with third party services."
|
||||
msgstr ""
|
||||
|
||||
msgid "Preferences|Customize the appearance of the syntax."
|
||||
msgstr ""
|
||||
|
||||
msgid "Preferences|Customize the behavior of the system layout and default views."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -48301,10 +48307,10 @@ msgstr ""
|
|||
msgid "Resolve conflicts"
|
||||
msgstr ""
|
||||
|
||||
msgid "Resolve conflicts on source branch"
|
||||
msgid "Resolve locally"
|
||||
msgstr ""
|
||||
|
||||
msgid "Resolve locally"
|
||||
msgid "Resolve source branch %{source_branch_name} conflicts using interactive mode to select %{use_ours} or %{use_theirs}, or manually using %{edit_inline}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Resolve thread"
|
||||
|
|
@ -57142,9 +57148,6 @@ msgstr ""
|
|||
msgid "The CSV export will be created in the background. Once finished, it will be sent to %{email} in an attachment."
|
||||
msgstr ""
|
||||
|
||||
msgid "The Gemnasium analyzer has been replaced with a new Dependency Scanning analyzer. %{learn_more_link_start}Learn more%{link_end}."
|
||||
msgstr ""
|
||||
|
||||
msgid "The GitLab subscription service (customers.gitlab.com) is currently experiencing an outage. You can monitor the status and get updates at %{linkStart}status.gitlab.com%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -59497,9 +59500,6 @@ msgstr ""
|
|||
msgid "To remove the read-only state, reduce git repository and git LFS storage."
|
||||
msgstr ""
|
||||
|
||||
msgid "To resolve the conflicts, either use interactive mode to select %{use_ours} or %{use_theirs}, or edit the files inline. Commit these changes into %{branch_name}."
|
||||
msgstr ""
|
||||
|
||||
msgid "To resolve the problem, refine your search criteria. Select a group or project or use double quotes for multiple keywords (for example, %{code_open}\"your search\"%{code_close})."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -59593,6 +59593,9 @@ msgstr ""
|
|||
msgid "Todos|Assigned"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|At"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Build failed"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -59753,6 +59756,12 @@ msgstr ""
|
|||
msgid "Todos|Snooze"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Snooze date can't be in the past."
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Snooze until"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Snooze..."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -59780,9 +59789,18 @@ msgstr ""
|
|||
msgid "Todos|Sorry, your filter produced no results"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|The date is required."
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|The pipeline failed"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|The selected date and time cannot be in the past."
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|The time is required."
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|There are no done to-do items yet."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -59798,6 +59816,9 @@ msgstr ""
|
|||
msgid "Todos|Unmergeable"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Until a specific time and date"
|
||||
msgstr ""
|
||||
|
||||
msgid "Todos|Until later today"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -65987,9 +66008,6 @@ msgstr ""
|
|||
msgid "You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico."
|
||||
msgstr ""
|
||||
|
||||
msgid "You are using a deprecated Dependency Scanning analyzer"
|
||||
msgstr ""
|
||||
|
||||
msgid "You are using a trial license. When you use a paid subscription, you'll be charged for %{trueUpLinkStart}users over license%{trueUpLinkEnd}."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
ARG GDK_SHA=d2c7d9655805919511f234743ca2b3d8e178ac6f
|
||||
ARG GDK_SHA=53baf9d7be970535d97d5fc5ae264d8d6eabe221
|
||||
# Use tag prefix when running on 'stable' branch to make sure 'protected' image is used which is not deleted by registry cleanup
|
||||
ARG GDK_BASE_TAG_PREFIX
|
||||
|
||||
|
|
|
|||
|
|
@ -680,6 +680,13 @@ module QA
|
|||
enabled?(ENV["QA_IGNORE_RUNTIME_DATA"], default: false)
|
||||
end
|
||||
|
||||
# Create uniq test users for each test
|
||||
#
|
||||
# @return [Boolean]
|
||||
def create_unique_test_users?
|
||||
enabled?(ENV["QA_CREATE_UNIQUE_TEST_USERS"], default: true)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Gitlab host tests are running against
|
||||
|
|
|
|||
|
|
@ -156,6 +156,8 @@ module QA
|
|||
#
|
||||
# @return [Boolean]
|
||||
def create_unique_test_user?
|
||||
return false unless Env.create_unique_test_users?
|
||||
|
||||
!Env.running_on_live_env? && !Env.personal_access_tokens_disabled? && admin_api_client
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,10 @@ module QA
|
|||
end
|
||||
|
||||
it 'user unregisters a runner with authentication token',
|
||||
quarantine: {
|
||||
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/513860',
|
||||
type: :stale
|
||||
},
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/510652' do
|
||||
Flow::Login.sign_in
|
||||
|
||||
|
|
|
|||
|
|
@ -126,6 +126,8 @@ module QA
|
|||
|
||||
context "with invalid default admin user credentials" do
|
||||
before do
|
||||
mock_user_get(token: default_admin_token, code: 404, body: "error")
|
||||
|
||||
allow(Resource::PersonalAccessToken).to receive(:fabricate_via_browser_ui!).and_raise(
|
||||
Runtime::User::InvalidCredentialsError
|
||||
)
|
||||
|
|
@ -391,13 +393,41 @@ module QA
|
|||
describe "#test_user" do
|
||||
subject(:test_user) { described_class.test_user }
|
||||
|
||||
context "when running on live environment" do
|
||||
let(:username) { "username" }
|
||||
let(:password) { "password" }
|
||||
let(:username) { "username" }
|
||||
let(:password) { "password" }
|
||||
|
||||
before do
|
||||
stub_env("GITLAB_USERNAME", username)
|
||||
stub_env("GITLAB_PASSWORD", password)
|
||||
end
|
||||
|
||||
context "when unique test user creation is disabled" do
|
||||
before do
|
||||
stub_env("QA_CREATE_UNIQUE_TEST_USERS", false)
|
||||
end
|
||||
|
||||
context "with user variables set" do
|
||||
it "returns user with configured credentials" do
|
||||
expect(test_user.username).to eq(username)
|
||||
expect(test_user.password).to eq(password)
|
||||
end
|
||||
end
|
||||
|
||||
context "without user variables set" do
|
||||
let(:username) { nil }
|
||||
let(:password) { nil }
|
||||
|
||||
it "raises error" do
|
||||
expect { test_user }.to raise_error <<~ERR
|
||||
Missing global test user credentials,
|
||||
please set 'GITLAB_USERNAME' and 'GITLAB_PASSWORD' environment variables
|
||||
ERR
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "when running on live environment" do
|
||||
before do
|
||||
stub_env("GITLAB_USERNAME", username)
|
||||
stub_env("GITLAB_PASSWORD", password)
|
||||
stub_env("GITLAB_QA_ACCESS_TOKEN", nil)
|
||||
|
||||
allow(Runtime::Env).to receive(:running_on_dot_com?).and_return(true)
|
||||
|
|
|
|||
|
|
@ -982,35 +982,43 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 1,
|
||||
richText:
|
||||
'var tests = Object.keys(window.__karma__.files).filter(function (file) {',
|
||||
highlights: [4, 9],
|
||||
text: 'var tests = Object.keys(window.__karma__.files).filter(function (file) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 2,
|
||||
richText: ' return (/_spec\\.js$/.\u003cb\u003etest\u003c/b\u003e(file));',
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 3,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 3, richText: '});', text: '});', __typename: 'SearchBlobLine' },
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
},
|
||||
{
|
||||
lines: [
|
||||
{ lineNumber: 11, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 11,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 12,
|
||||
richText:
|
||||
' // start \u003cb\u003etest\u003c/b\u003e run, once Require.js is done',
|
||||
highlights: [4, 9],
|
||||
text: ' // start test run, once Require.js is done',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 13,
|
||||
richText: ' callback: window.__karma__.start',
|
||||
highlights: [4, 9],
|
||||
text: ' callback: window.__karma__.start',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1022,6 +1030,7 @@ export const mockGetBlobSearchQuery = {
|
|||
fileUrl: 'http://127.0.0.1:3000/flightjs/Flight/-/blob/master/test/test-main.js',
|
||||
matchCount: 3,
|
||||
matchCountTotal: 3,
|
||||
language: 'Javascript',
|
||||
path: 'test/test-main.js',
|
||||
projectPath: 'flightjs/Flight',
|
||||
__typename: 'SearchBlobFileType',
|
||||
|
|
@ -1033,20 +1042,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 5,
|
||||
richText: ' var Component = (function () {',
|
||||
highlights: [4, 9],
|
||||
text: ' var Component = (function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 6,
|
||||
richText:
|
||||
' return defineComponent(function fn\u003cb\u003eTest\u003c/b\u003e() {',
|
||||
highlights: [4, 9],
|
||||
text: ' return defineComponent(function fnTest() {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 7,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1058,18 +1066,22 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 16,
|
||||
richText:
|
||||
' it(\'should call the "before" function before the base function and return the base function\', function () {',
|
||||
highlights: [4, 9],
|
||||
text: ' it(\'should call the "before" function before the base function and return the base function\', function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 17,
|
||||
richText: ' var \u003cb\u003etest\u003c/b\u003e1 = \u0026quot;\u0026quot;;',
|
||||
highlights: [4, 9],
|
||||
text: ' var test1 = "";',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 18, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 18,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1078,20 +1090,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 19,
|
||||
richText: ' function base(arg) {',
|
||||
highlights: [4, 9],
|
||||
text: ' function base(arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 20,
|
||||
richText:
|
||||
' \u003cb\u003etest\u003c/b\u003e1 += \u0026#39;Base: \u0026#39; + arg;',
|
||||
highlights: [4, 9],
|
||||
text: " test1 += 'Base: ' + arg;",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 21,
|
||||
richText: " return 'base';",
|
||||
highlights: null,
|
||||
text: " return 'base';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1103,20 +1114,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 24,
|
||||
richText: ' var advised = advice.before(base, function (arg) {',
|
||||
highlights: [4, 9],
|
||||
text: ' var advised = advice.before(base, function (arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 25,
|
||||
richText:
|
||||
' \u003cb\u003etest\u003c/b\u003e1 += \u0026quot;Before: \u0026quot; + arg + \u0026#39;, \u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: ' test1 += "Before: " + arg + \', \';',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 26,
|
||||
richText: " return 'before';",
|
||||
highlights: null,
|
||||
text: " return 'before';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1128,20 +1138,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 29,
|
||||
richText: " expect(advised('Dan')).toBe('base');",
|
||||
highlights: null,
|
||||
text: " expect(advised('Dan')).toBe('base');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 30,
|
||||
richText:
|
||||
' expect(\u003cb\u003etest\u003c/b\u003e1).toBe(\u0026#39;Before: Dan, Base: Dan\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(test1).toBe('Before: Dan, Base: Dan');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 31,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1153,18 +1162,22 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 33,
|
||||
richText:
|
||||
' it(\'should call the "after" function after the base function, but return the base function\', function () {',
|
||||
highlights: [4, 9],
|
||||
text: ' it(\'should call the "after" function after the base function, but return the base function\', function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 34,
|
||||
richText: ' var \u003cb\u003etest\u003c/b\u003e1 = \u0026quot;\u0026quot;;',
|
||||
highlights: [4, 9],
|
||||
text: ' var test1 = "";',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 35, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 35,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1173,20 +1186,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 36,
|
||||
richText: ' function base(arg) {',
|
||||
highlights: null,
|
||||
text: ' function base(arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 37,
|
||||
richText:
|
||||
' \u003cb\u003etest\u003c/b\u003e1 += \u0026#39;Base: \u0026#39; + arg;',
|
||||
highlights: [4, 9],
|
||||
text: " test1 += 'Base: ' + arg;",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 38,
|
||||
richText: " return 'base';",
|
||||
highlights: null,
|
||||
text: " return 'base';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1198,20 +1210,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 41,
|
||||
richText: ' var advised = advice.after(base, function (arg) {',
|
||||
highlights: [4, 9],
|
||||
text: ' var advised = advice.after(base, function (arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 42,
|
||||
richText:
|
||||
' \u003cb\u003etest\u003c/b\u003e1 += \u0026quot;, After: \u0026quot; + arg;',
|
||||
highlights: [4, 9],
|
||||
text: ' test1 += ", After: " + arg;',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 43,
|
||||
richText: " return 'after';",
|
||||
highlights: null,
|
||||
text: " return 'after';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1223,20 +1234,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 46,
|
||||
richText: " expect(advised('Dan')).toBe('base');",
|
||||
highlights: [4, 9],
|
||||
text: " expect(advised('Dan')).toBe('base');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 47,
|
||||
richText:
|
||||
' expect(\u003cb\u003etest\u003c/b\u003e1).toBe(\u0026#39;Base: Dan, After: Dan\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(test1).toBe('Base: Dan, After: Dan');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 48,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1248,18 +1258,22 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 50,
|
||||
richText:
|
||||
' it(\'should wrap the the first "around" argument with the second argument\', function () {',
|
||||
highlights: [4, 9],
|
||||
text: ' it(\'should wrap the the first "around" argument with the second argument\', function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 51,
|
||||
richText: ' var \u003cb\u003etest\u003c/b\u003e1 = \u0026quot;\u0026quot;;',
|
||||
highlights: null,
|
||||
text: ' var test1 = "";',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 52, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 52,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1268,20 +1282,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 53,
|
||||
richText: ' function base(arg) {',
|
||||
highlights: [4, 9],
|
||||
text: ' function base(arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 54,
|
||||
richText:
|
||||
' \u003cb\u003etest\u003c/b\u003e1 += \u0026#39;Base: \u0026#39; + arg;',
|
||||
highlights: [4, 9],
|
||||
text: " test1 += 'Base: ' + arg;",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 55,
|
||||
richText: " return 'base';",
|
||||
highlights: null,
|
||||
text: " return 'base';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1293,31 +1306,31 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 58,
|
||||
richText: ' var advised = advice.around(base, function (orig, arg) {',
|
||||
highlights: [4, 9],
|
||||
text: ' var advised = advice.around(base, function (orig, arg) {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 59,
|
||||
richText: ' \u003cb\u003etest\u003c/b\u003e1 += \u0026#39;|\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " test1 += '|';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 60,
|
||||
richText: ' orig(arg);',
|
||||
highlights: null,
|
||||
text: ' orig(arg);',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 61,
|
||||
richText: ' \u003cb\u003etest\u003c/b\u003e1 += \u0026#39;|\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " test1 += '|';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 62,
|
||||
richText: " return 'around';",
|
||||
highlights: null,
|
||||
text: " return 'around';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1329,20 +1342,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 65,
|
||||
richText: " expect(advised('Dan')).toBe('around');",
|
||||
highlights: [4, 9],
|
||||
text: " expect(advised('Dan')).toBe('around');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 66,
|
||||
richText:
|
||||
' expect(\u003cb\u003etest\u003c/b\u003e1).toBe(\u0026#39;|Base: Dan|\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(test1).toBe('|Base: Dan|');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 67,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1354,44 +1366,43 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 71,
|
||||
richText: ' var subject = {',
|
||||
highlights: [4, 9],
|
||||
text: ' var subject = {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 72,
|
||||
richText: ' \u003cb\u003etest\u003c/b\u003ea: \u0026#39;\u0026#39;,',
|
||||
highlights: [4, 9],
|
||||
text: " testa: '',",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 73,
|
||||
richText: ' \u003cb\u003etest\u003c/b\u003eb: \u0026#39;\u0026#39;,',
|
||||
highlights: [4, 9],
|
||||
text: " testb: '',",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 74,
|
||||
richText: ' \u003cb\u003etest\u003c/b\u003ec: \u0026#39;\u0026#39;,',
|
||||
highlights: [4, 9],
|
||||
text: " testc: '',",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 75,
|
||||
richText: ' a: function () {',
|
||||
highlights: null,
|
||||
text: ' a: function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 76,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003ea += \u0026#39;A!\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testa += 'A!';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 77,
|
||||
richText: ' },',
|
||||
highlights: null,
|
||||
text: ' },',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1403,20 +1414,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 78,
|
||||
richText: ' b: function () {',
|
||||
highlights: [4, 9],
|
||||
text: ' b: function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 79,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003eb += \u0026#39;B!\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testb += 'B!';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 80,
|
||||
richText: ' },',
|
||||
highlights: null,
|
||||
text: ' },',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1428,20 +1438,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 81,
|
||||
richText: ' c: function () {',
|
||||
highlights: null,
|
||||
text: ' c: function () {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 82,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003ec += \u0026#39;C!\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testc += 'C!';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 83,
|
||||
richText: ' }',
|
||||
highlights: null,
|
||||
text: ' }',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1453,20 +1462,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 88,
|
||||
richText: " subject.before('a', function () {",
|
||||
highlights: null,
|
||||
text: " subject.before('a', function () {",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 89,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003ea += \u0026#39;BEFORE!\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testa += 'BEFORE!';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 90,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1478,20 +1486,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 92,
|
||||
richText: " subject.after('b', function () {",
|
||||
highlights: null,
|
||||
text: " subject.after('b', function () {",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 93,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003eb += \u0026#39;AFTER!\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testb += 'AFTER!';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 94,
|
||||
richText: ' });',
|
||||
highlights: null,
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1503,33 +1510,31 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 96,
|
||||
richText: " subject.around('c', function (orig) {",
|
||||
highlights: [4, 9],
|
||||
text: " subject.around('c', function (orig) {",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 97,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003ec += \u0026#39;|\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testc += '|';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 98,
|
||||
richText: ' orig.call(subject);',
|
||||
highlights: null,
|
||||
text: ' orig.call(subject);',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 99,
|
||||
richText:
|
||||
' this.\u003cb\u003etest\u003c/b\u003ec += \u0026#39;|\u0026#39;;',
|
||||
highlights: [4, 9],
|
||||
text: " this.testc += '|';",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 100,
|
||||
richText: ' });',
|
||||
highlights: [4, 9],
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1541,18 +1546,22 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 102,
|
||||
richText: ' subject.a();',
|
||||
highlights: [4, 9],
|
||||
text: ' subject.a();',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 103,
|
||||
richText:
|
||||
' expect(subject.\u003cb\u003etest\u003c/b\u003ea).toBe(\u0026#39;BEFORE!A!\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(subject.testa).toBe('BEFORE!A!');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 104, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 104,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1561,18 +1570,22 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 105,
|
||||
richText: ' subject.b();',
|
||||
highlights: [4, 9],
|
||||
text: ' subject.b();',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 106,
|
||||
richText:
|
||||
' expect(subject.\u003cb\u003etest\u003c/b\u003eb).toBe(\u0026#39;B!AFTER!\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(subject.testb).toBe('B!AFTER!');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 107, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 107,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1581,20 +1594,19 @@ export const mockGetBlobSearchQuery = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 108,
|
||||
richText: ' subject.c();',
|
||||
highlights: [4, 9],
|
||||
text: ' subject.c();',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 109,
|
||||
richText:
|
||||
' expect(subject.\u003cb\u003etest\u003c/b\u003ec).toBe(\u0026#39;|C!|\u0026#39;);',
|
||||
highlights: [4, 9],
|
||||
text: " expect(subject.testc).toBe('|C!|');",
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 110,
|
||||
richText: ' });',
|
||||
highlights: [4, 9],
|
||||
text: ' });',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1606,6 +1618,7 @@ export const mockGetBlobSearchQuery = {
|
|||
fileUrl: 'http://127.0.0.1:3000/flightjs/Flight/-/blob/master/test/spec/fn_spec.js',
|
||||
matchCount: 27,
|
||||
matchCountTotal: 27,
|
||||
language: 'Javascript',
|
||||
path: 'test/spec/fn_spec.js',
|
||||
projectPath: 'flightjs/Flight',
|
||||
__typename: 'SearchBlobFileType',
|
||||
|
|
@ -1616,6 +1629,7 @@ export const mockGetBlobSearchQuery = {
|
|||
fileUrl: 'http://127.0.0.1:3000/flightjs/Flight/-/blob/master/test/spec/utils_spec.js',
|
||||
matchCount: 1,
|
||||
matchCountTotal: 1,
|
||||
language: 'Javascript',
|
||||
path: 'test/spec/test_utils_spec.js',
|
||||
projectPath: 'flightjs/Flight',
|
||||
__typename: 'SearchBlobFileType',
|
||||
|
|
@ -1636,17 +1650,22 @@ export const mockDataForBlobBody = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 1,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 2,
|
||||
richText: 'test1',
|
||||
highlights: [4, 9],
|
||||
text: 'test1',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 3, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
{
|
||||
lineNumber: 3,
|
||||
highlights: [4, 9],
|
||||
text: ' return (/_spec\\.js$/.test(file));',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
|
|
@ -1655,19 +1674,19 @@ export const mockDataForBlobBody = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 11,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 12,
|
||||
richText: 'test2',
|
||||
highlights: [4, 9],
|
||||
text: 'test2',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 13,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1679,19 +1698,19 @@ export const mockDataForBlobBody = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 11,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 12,
|
||||
richText: 'test3',
|
||||
highlights: [4, 9],
|
||||
text: 'test3',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 13,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1703,19 +1722,19 @@ export const mockDataForBlobBody = {
|
|||
lines: [
|
||||
{
|
||||
lineNumber: 11,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 12,
|
||||
richText: 'test4',
|
||||
highlights: [4, 9],
|
||||
text: 'test4',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: 13,
|
||||
richText: '',
|
||||
highlights: [4, 9],
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
|
|
@ -1729,9 +1748,47 @@ export const mockDataForBlobBody = {
|
|||
matchCountTotal: 2,
|
||||
path: 'file/test.js',
|
||||
projectPath: 'Testjs/Test',
|
||||
language: 'Javascript',
|
||||
__typename: 'SearchBlobFileType',
|
||||
};
|
||||
|
||||
export const mockDataForBlobChunk = {
|
||||
chunk: {
|
||||
lines: [
|
||||
{
|
||||
lineNumber: '1',
|
||||
highlights: [[6, 10]],
|
||||
text: 'const test = 1;',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: '2',
|
||||
highlights: [[9, 13]],
|
||||
text: 'function test() {',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: '3',
|
||||
highlights: [[13, 17]],
|
||||
text: 'console.log("test")',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: '4',
|
||||
highlights: [[]],
|
||||
text: '}',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
},
|
||||
blameLink: 'https://gitlab.com/blame/test.js',
|
||||
fileUrl: 'https://gitlab.com/file/test.js',
|
||||
position: 1,
|
||||
language: 'Javascript',
|
||||
};
|
||||
|
||||
export const mockSourceBranches = [
|
||||
{
|
||||
text: 'Master Item',
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import { GlIcon, GlLink } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import BlobChunks from '~/search/results/components/blob_chunks.vue';
|
||||
import { useMockInternalEventsTracking } from 'helpers/tracking_internal_events_helper';
|
||||
import {
|
||||
EVENT_CLICK_BLOB_RESULT_BLAME_LINE,
|
||||
EVENT_CLICK_BLOB_RESULT_LINE,
|
||||
} from '~/search/results/tracking';
|
||||
import { mockDataForBlobChunk } from '../../mock_data';
|
||||
|
||||
describe('BlobChunks', () => {
|
||||
const { bindInternalEventDocument } = useMockInternalEventsTracking();
|
||||
|
|
@ -26,8 +28,10 @@ describe('BlobChunks', () => {
|
|||
const findGlLink = () => wrapper.findAllComponents(GlLink);
|
||||
const findLine = () => wrapper.findAllByTestId('search-blob-line');
|
||||
const findLineNumbers = () => wrapper.findAllByTestId('search-blob-line-numbers');
|
||||
const findLineCode = () => wrapper.findAllByTestId('search-blob-line-code');
|
||||
const findRootElement = () => wrapper.find('#search-blob-content');
|
||||
const findNonHighlightedLineCode = () =>
|
||||
wrapper.findAllByTestId('search-blob-line-code-non-highlighted');
|
||||
const findHighlightedLineCode = () =>
|
||||
wrapper.findAllByTestId('search-blob-line-code-highlighted');
|
||||
const findBlameLink = () =>
|
||||
findGlLink().wrappers.filter(
|
||||
(w) => w.attributes('data-testid') === 'search-blob-line-blame-link',
|
||||
|
|
@ -35,48 +39,21 @@ describe('BlobChunks', () => {
|
|||
const findLineLink = () =>
|
||||
findGlLink().wrappers.filter((w) => w.attributes('data-testid') === 'search-blob-line-link');
|
||||
|
||||
describe('component basics', () => {
|
||||
describe('when initial render', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
chunk: {
|
||||
lines: [
|
||||
{
|
||||
lineNumber: '1',
|
||||
richText: '',
|
||||
text: '',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{
|
||||
lineNumber: '2',
|
||||
richText: '<b>test1</b>',
|
||||
text: 'test1',
|
||||
__typename: 'SearchBlobLine',
|
||||
},
|
||||
{ lineNumber: 3, richText: '', text: '', __typename: 'SearchBlobLine' },
|
||||
],
|
||||
matchCountInChunk: 1,
|
||||
__typename: 'SearchBlobChunk',
|
||||
},
|
||||
blameLink: 'https://gitlab.com/blame/test.js',
|
||||
fileUrl: 'https://gitlab.com/file/test.js',
|
||||
position: 1,
|
||||
});
|
||||
createComponent(mockDataForBlobChunk);
|
||||
});
|
||||
|
||||
it(`renders default state`, () => {
|
||||
expect(findLine()).toHaveLength(3);
|
||||
expect(findLineNumbers()).toHaveLength(3);
|
||||
expect(findLineCode()).toHaveLength(3);
|
||||
expect(findGlLink()).toHaveLength(6);
|
||||
expect(findGlIcon()).toHaveLength(3);
|
||||
it('renders default state', () => {
|
||||
expect(findLine()).toHaveLength(4);
|
||||
expect(findLineNumbers()).toHaveLength(4);
|
||||
expect(findNonHighlightedLineCode()).toHaveLength(4);
|
||||
expect(findHighlightedLineCode()).toHaveLength(0);
|
||||
expect(findGlLink()).toHaveLength(8);
|
||||
expect(findGlIcon()).toHaveLength(4);
|
||||
});
|
||||
|
||||
it(`renders proper colors`, () => {
|
||||
expect(findRootElement().classes('white')).toBe(true);
|
||||
expect(findLineCode().at(1).find('b').classes('hll')).toBe(true);
|
||||
});
|
||||
|
||||
it(`renders links correctly`, () => {
|
||||
it('renders links correctly', () => {
|
||||
expect(findGlLink().at(0).attributes('href')).toBe('https://gitlab.com/blame/test.js#L1');
|
||||
expect(findGlLink().at(0).attributes('title')).toBe('View blame');
|
||||
expect(findGlLink().at(0).findComponent(GlIcon).exists()).toBe(true);
|
||||
|
|
@ -98,4 +75,16 @@ describe('BlobChunks', () => {
|
|||
expect(trackEventSpy).toHaveBeenCalledWith(event, { property: '1', value: 1 }, undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when frontend highlighting', () => {
|
||||
beforeEach(async () => {
|
||||
createComponent(mockDataForBlobChunk);
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('renders proper colors', () => {
|
||||
expect(findHighlightedLineCode().exists()).toBe(true);
|
||||
expect(findHighlightedLineCode().at(2).text()).toBe('console.log("test")');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,135 @@
|
|||
import { highlight } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
|
||||
import {
|
||||
HIGHLIGHT_MARK,
|
||||
HIGHLIGHT_HTML_START,
|
||||
HIGHLIGHT_HTML_END,
|
||||
} from '~/search/results/constants';
|
||||
|
||||
import {
|
||||
initLineHighlight,
|
||||
cleanLineAndMark,
|
||||
isUnsupportedLanguage,
|
||||
highlightSearchTerm,
|
||||
markSearchTerm,
|
||||
} from '~/search/results/utils';
|
||||
|
||||
jest.mock('~/vue_shared/components/source_viewer/workers/highlight_utils', () => ({
|
||||
highlight: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('Global Search Results Utils', () => {
|
||||
beforeEach(() => {
|
||||
highlight.mockResolvedValue([{ highlightedContent: 'const highlighted = true;' }]);
|
||||
});
|
||||
|
||||
describe('isUnsupportedLanguage', () => {
|
||||
it.each([
|
||||
['javascript', false],
|
||||
['unknownLanguage', true],
|
||||
])('correctly identifies if %s language is unsupported', (language, expected) => {
|
||||
expect(isUnsupportedLanguage(language)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('initLineHighlight', () => {
|
||||
it('returns original line for unsupported language', async () => {
|
||||
highlight.mockClear();
|
||||
|
||||
const result = await initLineHighlight({
|
||||
line: { text: 'const test = true;', highlights: [[6, 9]] },
|
||||
language: 'txt',
|
||||
fileUrl: 'test.txt',
|
||||
});
|
||||
|
||||
expect(result).toBe('const test = true;');
|
||||
expect(highlight).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles gleam files correctly', async () => {
|
||||
await initLineHighlight({
|
||||
line: { text: 'const test = true;', highlights: [] },
|
||||
language: 'javascript',
|
||||
fileUrl: 'test.gleam',
|
||||
});
|
||||
|
||||
expect(highlight).toHaveBeenCalledWith(null, 'const test = true;', 'gleam');
|
||||
});
|
||||
|
||||
describe('when initLineHighlight returns highlight', () => {
|
||||
beforeEach(() => {
|
||||
highlight.mockImplementation((_, input) =>
|
||||
Promise.resolve([{ highlightedContent: input }]),
|
||||
);
|
||||
});
|
||||
|
||||
it('calls highlight with correct parameters', async () => {
|
||||
const result = await initLineHighlight({
|
||||
line: { text: 'const test = true;', highlights: [[6, 10]] },
|
||||
language: 'javascript',
|
||||
fileUrl: 'test.js',
|
||||
});
|
||||
|
||||
expect(result).toBe('const <b class="hll">test</b> = true;');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('highlightSearchTerm', () => {
|
||||
it('returns empty string for empty input', () => {
|
||||
expect(highlightSearchTerm('')).toBe('');
|
||||
});
|
||||
|
||||
it('replaces highlight marks with HTML tags', () => {
|
||||
const input = `console${HIGHLIGHT_MARK}log${HIGHLIGHT_MARK}(true);`;
|
||||
const expected = `console${HIGHLIGHT_HTML_START}log${HIGHLIGHT_HTML_END}(true);`;
|
||||
|
||||
expect(highlightSearchTerm(input)).toBe(expected);
|
||||
});
|
||||
|
||||
it('handles multiple highlights', () => {
|
||||
const input = `${HIGHLIGHT_MARK}const${HIGHLIGHT_MARK} test = ${HIGHLIGHT_MARK}true${HIGHLIGHT_MARK};`;
|
||||
const expected = `${HIGHLIGHT_HTML_START}const${HIGHLIGHT_HTML_END} test = ${HIGHLIGHT_HTML_START}true${HIGHLIGHT_HTML_END};`;
|
||||
|
||||
expect(highlightSearchTerm(input)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('markSearchTerm', () => {
|
||||
it('adds highlight marks at correct positions', () => {
|
||||
const text = 'foobar test foobar test';
|
||||
const highlights = [
|
||||
[7, 11],
|
||||
[19, 23],
|
||||
];
|
||||
|
||||
const result = cleanLineAndMark({ text, highlights });
|
||||
const expected = `foobar ${HIGHLIGHT_MARK}test${HIGHLIGHT_MARK} foobar ${HIGHLIGHT_MARK}test${HIGHLIGHT_MARK}`;
|
||||
|
||||
expect([...result].map((c) => c.charCodeAt(0))).toEqual(
|
||||
[...expected].map((c) => c.charCodeAt(0)),
|
||||
);
|
||||
});
|
||||
|
||||
it('adds single highlight mark at correct position', () => {
|
||||
const text = 'const testValue = true;\n';
|
||||
const highlights = [[6, 15]];
|
||||
|
||||
const result = cleanLineAndMark({ text, highlights });
|
||||
const expected = `const ${HIGHLIGHT_MARK}testValue${HIGHLIGHT_MARK} = true;`;
|
||||
|
||||
expect([...result].map((c) => c.charCodeAt(0))).toEqual(
|
||||
[...expected].map((c) => c.charCodeAt(0)),
|
||||
);
|
||||
});
|
||||
|
||||
it('returns empty string for empty input', () => {
|
||||
expect(markSearchTerm()).toBe('');
|
||||
});
|
||||
|
||||
it('handles empty highlights array', () => {
|
||||
const str = 'const test = true;';
|
||||
|
||||
expect(markSearchTerm(str, [])).toBe(str);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { GlModal, GlFormInput, GlFormFields, GlFormDate } from '@gitlab/ui';
|
||||
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { TODO_STATE_PENDING } from '~/todos/constants';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import snoozeTodoMutation from '~/todos/components/mutations/snooze_todo.mutation.graphql';
|
||||
import { useFakeDate } from 'helpers/fake_date';
|
||||
import SnoozeTodoModal from '~/todos/components/snooze_todo_modal.vue';
|
||||
import { stubComponent } from 'helpers/stub_component';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { mockTracking, unmockTracking } from 'jest/__helpers__/tracking_helper';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
describe('SnoozeTodoModal', () => {
|
||||
let wrapper;
|
||||
const mockTodo = {
|
||||
id: 'gid://gitlab/Todo/1',
|
||||
state: TODO_STATE_PENDING,
|
||||
};
|
||||
const mockCurrentTime = new Date('2024-12-18T13:24:00');
|
||||
|
||||
useFakeDate(mockCurrentTime);
|
||||
|
||||
const snoozeTodoMutationSuccessHandler = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
todoSnooze: {
|
||||
todo: {
|
||||
...mockTodo,
|
||||
snoozedUntil: mockCurrentTime,
|
||||
},
|
||||
errors: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const findTimeInput = () => wrapper.findByTestId('time-input');
|
||||
const findDateInput = () => wrapper.findByTestId('date-input');
|
||||
const findDatetimeInPastError = () => wrapper.findByTestId('datetime-in-past-error');
|
||||
const findSnoozeErrorAlert = () => wrapper.findByTestId('snooze-error');
|
||||
|
||||
const setTime = (time) => {
|
||||
findTimeInput().findComponent(GlFormInput).vm.$emit('input', time);
|
||||
findTimeInput().findComponent(GlFormInput).vm.$emit('blur');
|
||||
return nextTick();
|
||||
};
|
||||
const setDate = (date) => {
|
||||
findDateInput().findComponent(GlFormInput).vm.$emit('change', date);
|
||||
findDateInput().findComponent(GlFormInput).vm.$emit('blur');
|
||||
return nextTick();
|
||||
};
|
||||
const submitForm = () => {
|
||||
wrapper.findComponent(GlFormFields).vm.$emit('submit');
|
||||
return nextTick();
|
||||
};
|
||||
|
||||
const createComponent = ({
|
||||
mountFn = shallowMountExtended,
|
||||
props = {},
|
||||
snoozeTodoMutationHandler = snoozeTodoMutationSuccessHandler,
|
||||
} = {}) => {
|
||||
const mockApollo = createMockApollo();
|
||||
|
||||
mockApollo.defaultClient.setRequestHandler(snoozeTodoMutation, snoozeTodoMutationHandler);
|
||||
|
||||
wrapper = mountFn(SnoozeTodoModal, {
|
||||
apolloProvider: mockApollo,
|
||||
propsData: {
|
||||
todo: mockTodo,
|
||||
...props,
|
||||
},
|
||||
stubs: {
|
||||
GlModal: stubComponent(GlModal, {
|
||||
template: '<div><slot /></div>',
|
||||
}),
|
||||
GlFormFields,
|
||||
GlFormDate,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
it('renders the time and date inputs', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findTimeInput().exists()).toBe(true);
|
||||
expect(findDateInput().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('the time input defaults to 9:00AM', () => {
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findTimeInput().findComponent(GlFormInput).vm.$el.value).toBe('09:00');
|
||||
});
|
||||
|
||||
it('shows an error message if the selected date and time are in the past', async () => {
|
||||
createComponent();
|
||||
await setTime('13:24');
|
||||
await setDate('2024-12-18');
|
||||
|
||||
expect(findDatetimeInPastError().exists()).toBe(false);
|
||||
|
||||
await setTime('13:23');
|
||||
|
||||
expect(findDatetimeInPastError().exists()).toBe(true);
|
||||
expect(findDatetimeInPastError().text()).toBe(
|
||||
'The selected date and time cannot be in the past.',
|
||||
);
|
||||
});
|
||||
|
||||
describe('form validators', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({ mountFn: mountExtended });
|
||||
});
|
||||
|
||||
it('shows an error message if no time is provided', async () => {
|
||||
expect(wrapper.findByText('The time is required.').exists()).toBe(false);
|
||||
|
||||
await setTime('');
|
||||
|
||||
expect(wrapper.findByText('The time is required.').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('shows an error message if no date is provided', async () => {
|
||||
expect(wrapper.findByText('The date is required.').exists()).toBe(false);
|
||||
|
||||
await setDate('');
|
||||
|
||||
expect(wrapper.findByText('The date is required.').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('shows an error message if the selected datetime is in the past', async () => {
|
||||
await setTime('15:00');
|
||||
await setDate('2024-12-01');
|
||||
|
||||
expect(wrapper.findByText("Snooze date can't be in the past.").exists()).toBe(true);
|
||||
|
||||
await setDate('2025-01-01');
|
||||
|
||||
expect(wrapper.findByText("Snooze date can't be in the past.").exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('triggers the snooze mutation and tracks an event when submitting the form', async () => {
|
||||
createComponent();
|
||||
const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
|
||||
const time = '15:00';
|
||||
const date = '2025-01-01';
|
||||
await setTime(time);
|
||||
await setDate(date);
|
||||
submitForm();
|
||||
|
||||
expect(snoozeTodoMutationSuccessHandler).toHaveBeenCalledWith({
|
||||
snoozeUntil: new Date(`${date}T${time}`),
|
||||
todoId: mockTodo.id,
|
||||
});
|
||||
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_todo_item_action', {
|
||||
label: 'snooze_until_a_specific_date_and_time',
|
||||
extra: {
|
||||
snooze_until: '2025-01-01T15:00:00.000Z',
|
||||
},
|
||||
});
|
||||
|
||||
unmockTracking();
|
||||
});
|
||||
|
||||
it('shows an error when the to snooze mutation returns some errors', async () => {
|
||||
createComponent({
|
||||
snoozeTodoMutationHandler: jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
todoSnooze: {
|
||||
todo: mockTodo,
|
||||
errors: ['Could not snooze todo-item.'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
await setTime('15:00');
|
||||
await setDate('2025-01-01');
|
||||
wrapper.findComponent(GlFormFields).vm.$emit('submit');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSnoozeErrorAlert().exists()).toBe(true);
|
||||
expect(findSnoozeErrorAlert().text()).toBe('Failed to snooze todo. Try again later.');
|
||||
});
|
||||
|
||||
it('shows an error when the to snooze mutation fails', async () => {
|
||||
createComponent({
|
||||
snoozeTodoMutationHandler: jest.fn().mockRejectedValue(),
|
||||
});
|
||||
await setTime('15:00');
|
||||
await setDate('2025-01-01');
|
||||
wrapper.findComponent(GlFormFields).vm.$emit('submit');
|
||||
await waitForPromises();
|
||||
|
||||
expect(findSnoozeErrorAlert().exists()).toBe(true);
|
||||
expect(findSnoozeErrorAlert().text()).toBe('Failed to snooze todo. Try again later.');
|
||||
});
|
||||
});
|
||||
|
|
@ -11,6 +11,8 @@ import waitForPromises from 'helpers/wait_for_promises';
|
|||
import { useFakeDate } from 'helpers/fake_date';
|
||||
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
|
||||
import { mockTracking, unmockTracking } from 'jest/__helpers__/tracking_helper';
|
||||
import { stubComponent } from 'helpers/stub_component';
|
||||
import SnoozeTodoModal from '~/todos/components/snooze_todo_modal.vue';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
|
|
@ -22,6 +24,11 @@ describe('ToggleSnoozedStatus', () => {
|
|||
};
|
||||
const mockCurrentTime = new Date('2024-12-18T13:24:00');
|
||||
const mockToastShow = jest.fn();
|
||||
const SnoozeTodoModalStub = stubComponent(SnoozeTodoModal, {
|
||||
methods: {
|
||||
show: jest.fn(),
|
||||
},
|
||||
});
|
||||
|
||||
useFakeDate(mockCurrentTime);
|
||||
|
||||
|
|
@ -78,6 +85,9 @@ describe('ToggleSnoozedStatus', () => {
|
|||
show: mockToastShow,
|
||||
},
|
||||
},
|
||||
stubs: {
|
||||
SnoozeTodoModal: SnoozeTodoModalStub,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -124,6 +134,13 @@ describe('ToggleSnoozedStatus', () => {
|
|||
],
|
||||
name: 'Snooze',
|
||||
},
|
||||
{
|
||||
items: [
|
||||
expect.objectContaining({
|
||||
text: 'Until a specific time and date',
|
||||
}),
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
|
|
@ -164,6 +181,16 @@ describe('ToggleSnoozedStatus', () => {
|
|||
},
|
||||
);
|
||||
|
||||
it('opens the custom snooze todo modal when clicking on the `Until a specific time and date` option', () => {
|
||||
createComponent({ props: { isSnoozed: false, isPending: true } });
|
||||
|
||||
expect(SnoozeTodoModalStub.methods.show).not.toHaveBeenCalled();
|
||||
|
||||
findSnoozeDropdown().props('items')[1].items[0].action();
|
||||
|
||||
expect(SnoozeTodoModalStub.methods.show).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('shows an error when the to snooze mutation returns some errors', async () => {
|
||||
createComponent({
|
||||
props: { isSnoozed: false, isPending: true },
|
||||
|
|
|
|||
|
|
@ -69,6 +69,22 @@ describe('Tracking', () => {
|
|||
maxLocalStorageQueueSize: MAX_LOCAL_STORAGE_QUEUE_SIZE,
|
||||
});
|
||||
});
|
||||
|
||||
it('does not initialize tracking if not enabled', () => {
|
||||
jest.spyOn(Tracking, 'enabled').mockReturnValue(false);
|
||||
|
||||
initUserTracking();
|
||||
|
||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('dispatches SnowplowInitialized event after initializing', () => {
|
||||
const dispatchEventSpy = jest.spyOn(document, 'dispatchEvent');
|
||||
|
||||
initUserTracking();
|
||||
|
||||
expect(dispatchEventSpy).toHaveBeenCalledWith(new Event('SnowplowInitialized'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('initDefaultTrackers', () => {
|
||||
|
|
@ -170,5 +186,50 @@ describe('Tracking', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('does not initialize default trackers if not enabled', () => {
|
||||
jest.spyOn(Tracking, 'enabled').mockReturnValue(false);
|
||||
|
||||
initDefaultTrackers();
|
||||
|
||||
expect(snowplowSpy).not.toHaveBeenCalled();
|
||||
expect(bindDocumentSpy).not.toHaveBeenCalled();
|
||||
expect(trackLoadEventsSpy).not.toHaveBeenCalled();
|
||||
expect(enableFormTracking).not.toHaveBeenCalled();
|
||||
expect(setAnonymousUrlsSpy).not.toHaveBeenCalled();
|
||||
expect(bindInternalEventDocumentSpy).not.toHaveBeenCalled();
|
||||
expect(trackInternalLoadEventsSpy).not.toHaveBeenCalled();
|
||||
expect(initBrowserSDKSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('flushes pending events before other tracking methods', () => {
|
||||
const flushPendingEventsSpy = jest.spyOn(Tracking, 'flushPendingEvents').mockImplementation();
|
||||
|
||||
initDefaultTrackers();
|
||||
|
||||
expect(flushPendingEventsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
bindDocumentSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
expect(flushPendingEventsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
trackLoadEventsSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
expect(flushPendingEventsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
bindInternalEventDocumentSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
expect(flushPendingEventsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
trackInternalLoadEventsSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
expect(flushPendingEventsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
initBrowserSDKSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
});
|
||||
|
||||
it('calls setAnonymousUrls before initializing trackers', () => {
|
||||
initDefaultTrackers();
|
||||
|
||||
expect(setAnonymousUrlsSpy.mock.invocationCallOrder[0]).toBeLessThan(
|
||||
snowplowSpy.mock.invocationCallOrder[0],
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -8,10 +8,17 @@ RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio
|
|||
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
let_it_be(:group) { create(:group, :private) }
|
||||
let_it_be(:work_item_opened1) { create(:work_item, namespace: group) }
|
||||
let_it_be(:work_item_opened2) { create(:work_item, namespace: group, author: current_user) }
|
||||
let_it_be(:work_item_closed1) { create(:work_item, :closed, namespace: group) }
|
||||
let_it_be(:work_item_closed2) { create(:work_item, :closed, namespace: group) }
|
||||
let_it_be(:milestone) { create(:milestone, group: group) }
|
||||
let_it_be(:label) { create(:group_label, group: group) }
|
||||
let_it_be(:work_item_opened1) { create(:work_item, namespace: group, milestone_id: milestone.id, labels: [label]) }
|
||||
let_it_be(:work_item_opened2) { create(:work_item, :confidential, namespace: group, author: current_user) }
|
||||
let_it_be(:work_item_closed1) do
|
||||
create(:work_item, :closed, :confidential, namespace: group, milestone_id: milestone.id)
|
||||
end
|
||||
|
||||
let_it_be(:work_item_closed2) do
|
||||
create(:work_item, :epic, :closed, namespace: group, assignees: [current_user], labels: [label])
|
||||
end
|
||||
|
||||
let(:params) { {} }
|
||||
|
||||
|
|
@ -56,6 +63,96 @@ RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio
|
|||
end
|
||||
end
|
||||
|
||||
context 'when filtering by assignee usernames' do
|
||||
let(:params) { { 'assigneeUsernames' => [current_user.username] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 1,
|
||||
'opened' => 0,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by confidential' do
|
||||
let(:params) { { 'confidential' => true } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 1,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by label name' do
|
||||
let(:params) { { 'labelName' => [label.name] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 1,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by milestone title' do
|
||||
let(:params) { { 'milestoneTitle' => [milestone.title] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 1,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by reaction emoji' do
|
||||
before_all do
|
||||
create(:award_emoji, :upvote, user: current_user, awardable: work_item_opened1)
|
||||
create(:award_emoji, :upvote, user: current_user, awardable: work_item_opened2)
|
||||
create(:award_emoji, :downvote, user: current_user, awardable: work_item_closed2)
|
||||
end
|
||||
|
||||
let(:params) { { 'myReactionEmoji' => AwardEmoji::THUMBS_UP } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 2,
|
||||
'closed' => 0
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by type' do
|
||||
let(:params) { { 'types' => [:ISSUE] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 3,
|
||||
'opened' => 2,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by search' do
|
||||
let(:params) { { search: 'foo', in: [:TITLE] } }
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,19 @@ RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio
|
|||
let_it_be(:current_user) { create(:user) }
|
||||
let_it_be(:group) { create(:group, :private) }
|
||||
let_it_be(:project) { create(:project, :repository, :private, group: group) }
|
||||
let_it_be(:work_item_opened1) { create(:work_item, project: project, title: 'Foo') }
|
||||
let_it_be(:work_item_opened2) { create(:work_item, project: project, author: current_user) }
|
||||
let_it_be(:work_item_closed) { create(:work_item, :closed, project: project, description: 'Bar') }
|
||||
let_it_be(:milestone) { create(:milestone, project: project) }
|
||||
let_it_be(:label) { create(:label, project: project) }
|
||||
let_it_be(:work_item_opened1) do
|
||||
create(:work_item, project: project, milestone_id: milestone.id, title: 'Foo', labels: [label])
|
||||
end
|
||||
|
||||
let_it_be(:work_item_opened2) do
|
||||
create(:work_item, project: project, author: current_user, assignees: [current_user], milestone_id: milestone.id)
|
||||
end
|
||||
|
||||
let_it_be(:work_item_closed) do
|
||||
create(:work_item, :closed, :confidential, project: project, description: 'Bar', labels: [label])
|
||||
end
|
||||
|
||||
let(:params) { {} }
|
||||
|
||||
|
|
@ -69,6 +79,81 @@ RSpec.describe 'getting Work Item counts by state', feature_category: :portfolio
|
|||
end
|
||||
end
|
||||
|
||||
context 'when filtering by assignee usernames' do
|
||||
let(:params) { { 'assigneeUsernames' => [current_user.username] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 1,
|
||||
'opened' => 1,
|
||||
'closed' => 0
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by confidential' do
|
||||
let(:params) { { 'confidential' => true } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 1,
|
||||
'opened' => 0,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by label name' do
|
||||
let(:params) { { 'labelName' => [label.name] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 1,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by milestone title' do
|
||||
let(:params) { { 'milestoneTitle' => [milestone.title] } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 2,
|
||||
'closed' => 0
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when filtering by reaction emoji' do
|
||||
before_all do
|
||||
create(:award_emoji, :upvote, user: current_user, awardable: work_item_opened1)
|
||||
create(:award_emoji, :upvote, user: current_user, awardable: work_item_closed)
|
||||
end
|
||||
|
||||
let(:params) { { 'myReactionEmoji' => AwardEmoji::THUMBS_UP } }
|
||||
|
||||
it 'returns the correct counts for each state' do
|
||||
query_counts
|
||||
|
||||
expect(work_item_counts).to eq(
|
||||
'all' => 2,
|
||||
'opened' => 1,
|
||||
'closed' => 1
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when searching in title' do
|
||||
let(:params) { { search: 'Foo', in: [:TITLE] } }
|
||||
|
||||
|
|
|
|||
|
|
@ -63,57 +63,5 @@ RSpec.describe 'projects/pipelines/show', feature_category: :pipeline_compositio
|
|||
|
||||
expect(rendered).to have_selector('#js-pipeline-tabs')
|
||||
end
|
||||
|
||||
context 'when pipeline uses dependency scanning' do
|
||||
let(:build_name) { nil }
|
||||
let(:build) { create(:ci_build, name: build_name) }
|
||||
let(:pipeline) { create(:ci_pipeline, project: project, builds: [build]) }
|
||||
|
||||
shared_examples 'pipeline with deprecated dependency scanning job' do
|
||||
it 'shows deprecation warning' do
|
||||
render
|
||||
|
||||
expect(rendered).to have_content('You are using a deprecated Dependency Scanning analyzer')
|
||||
expect(rendered).to have_content(
|
||||
'The Gemnasium analyzer has been replaced with a new Dependency Scanning analyzer')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when gemnasium job is defined' do
|
||||
let(:build_name) { 'gemnasium' }
|
||||
|
||||
it_behaves_like 'pipeline with deprecated dependency scanning job'
|
||||
end
|
||||
|
||||
context 'when gemnasium-maven job is defined' do
|
||||
let(:build_name) { 'gemnasium-maven' }
|
||||
|
||||
it_behaves_like 'pipeline with deprecated dependency scanning job'
|
||||
end
|
||||
|
||||
context 'when gemnasium-python job is defined' do
|
||||
let(:build_name) { 'gemnasium-python' }
|
||||
|
||||
it_behaves_like 'pipeline with deprecated dependency scanning job'
|
||||
end
|
||||
|
||||
context 'when a gemnasium job is defined using parallel:matrix' do
|
||||
let(:build_name) { 'gemnasium: [variable]' }
|
||||
|
||||
it_behaves_like 'pipeline with deprecated dependency scanning job'
|
||||
end
|
||||
|
||||
context 'when a custom dependency scanning job is defined' do
|
||||
let(:build_name) { 'custom-govulncheck-dependency-scanning-job' }
|
||||
|
||||
it 'shows deprecation warning' do
|
||||
render
|
||||
|
||||
expect(rendered).not_to have_content('You are using a deprecated Dependency Scanning analyzer')
|
||||
expect(rendered).not_to have_content(
|
||||
'The Gemnasium analyzer has been replaced with a new Dependency Scanning analyzer')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
|
@ -33,6 +34,8 @@ const (
|
|||
geoProxyEndpointPath = "/api/v4/geo/proxy"
|
||||
)
|
||||
|
||||
var errResponseLimit = fmt.Errorf("response body exceeded maximum buffer size (%d bytes)", failureResponseLimit)
|
||||
|
||||
// API represents a client for interacting with an external API.
|
||||
type API struct {
|
||||
Client *http.Client
|
||||
|
|
@ -462,9 +465,19 @@ func passResponseBack(httpResponse *http.Response, w http.ResponseWriter, r *htt
|
|||
// the entire response body in memory before sending it on.
|
||||
responseBody, err := bufferResponse(httpResponse.Body)
|
||||
if err != nil {
|
||||
fail.Request(w, r, err)
|
||||
// A user can issue a git clone command against a URL that doesn't
|
||||
// get handled by the info refs endpoint, resulting in a full-fledged 404
|
||||
// response (i.e. like the one returned in a browser) that's gonna exceed
|
||||
// the response error limit, eventually making Workhorse returning a 500.
|
||||
// Here we intercept such 404s and just return the response code without a body.
|
||||
if errors.Is(err, errResponseLimit) && httpResponse.StatusCode == 404 {
|
||||
fail.Request(w, r, err, fail.WithStatus(httpResponse.StatusCode))
|
||||
} else {
|
||||
fail.Request(w, r, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err = httpResponse.Body.Close(); err != nil {
|
||||
fmt.Printf("Error closing response body: %s", err)
|
||||
}
|
||||
|
|
@ -492,7 +505,7 @@ func bufferResponse(r io.Reader) (*bytes.Buffer, error) {
|
|||
}
|
||||
|
||||
if n == failureResponseLimit {
|
||||
return nil, fmt.Errorf("response body exceeded maximum buffer size (%d bytes)", failureResponseLimit)
|
||||
return nil, errResponseLimit
|
||||
}
|
||||
|
||||
return responseBody, nil
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"net/http/httptest"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
|
@ -108,6 +109,27 @@ func TestPreAuthorizeFixedPath_Unauthorized(t *testing.T) {
|
|||
require.ErrorAs(t, err, &preAuthError)
|
||||
}
|
||||
|
||||
func TestPreAuthorizeHandler_NotFound(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||
io.WriteString(w, strings.Repeat("a", failureResponseLimit+100))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
req, err := http.NewRequest("GET", "/original/request/path", nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
|
||||
|
||||
handler := api.PreAuthorizeHandler(func(_ http.ResponseWriter, _ *http.Request, _ *Response) {}, "/api/v4/internal/authorized_request")
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
handler.ServeHTTP(rr, req)
|
||||
|
||||
require.Equal(t, http.StatusNotFound, rr.Code)
|
||||
}
|
||||
|
||||
func getGeoProxyDataGivenResponse(t *testing.T, givenInternalAPIResponse string) (*GeoProxyData, error) {
|
||||
t.Helper()
|
||||
ts := testRailsServer(regexp.MustCompile(`/api/v4/geo/proxy`), 200, givenInternalAPIResponse)
|
||||
|
|
|
|||
Loading…
Reference in New Issue