Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-08-17 18:10:35 +00:00
parent 20dbd96076
commit 4484c85231
94 changed files with 1003 additions and 2480 deletions

View File

@ -2,6 +2,17 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 14.1.3 (2021-08-17)
### Fixed (2 changes)
- [Geo 2.0 Regression - Add ability to remove primary](gitlab-org/gitlab@1635f3d07d421edd2a83be109d7c54635aa4f58c) ([merge request](gitlab-org/gitlab!68383)) **GitLab Enterprise Edition**
- [[RUN AS-IF-FOSS] AS Fix SAML SSO login redirects not working](gitlab-org/gitlab@7b551e3d2a4ba6127549c613ee95e2c12c014b90) ([merge request](gitlab-org/gitlab!68383)) **GitLab Enterprise Edition**
### Changed (1 change)
- [Resolve "operator does not exist: integer[] || bigint in...](gitlab-org/gitlab@99e6457b6d9d39805dc7758c47091cf6ad0f2bdd) ([merge request](gitlab-org/gitlab!68383))
## 14.1.2 (2021-08-03)
### Security (19 changes)

View File

@ -0,0 +1,78 @@
import { Node } from '@tiptap/core';
export default Node.create({
name: 'reference',
inline: true,
group: 'inline',
atom: true,
addAttributes() {
return {
className: {
default: null,
parseHTML: (element) => {
return {
className: element.className,
};
},
},
referenceType: {
default: null,
parseHTML: (element) => {
return {
referenceType: element.dataset.referenceType,
};
},
},
originalText: {
default: null,
parseHTML: (element) => {
return {
originalText: element.dataset.original,
};
},
},
href: {
default: null,
parseHTML: (element) => {
return {
href: element.getAttribute('href'),
};
},
},
text: {
default: null,
parseHTML: (element) => {
return {
text: element.textContent,
};
},
},
};
},
parseHTML() {
return [
{
tag: 'a.gfm:not([data-link=true])',
priority: 51,
},
];
},
renderHTML({ node }) {
return [
'a',
{
class: node.attrs.className,
href: node.attrs.href,
'data-reference-type': node.attrs.referenceType,
'data-original': node.attrs.originalText,
},
node.attrs.text,
];
},
});

View File

@ -23,6 +23,7 @@ import ListItem from '../extensions/list_item';
import Loading from '../extensions/loading';
import OrderedList from '../extensions/ordered_list';
import Paragraph from '../extensions/paragraph';
import Reference from '../extensions/reference';
import Strike from '../extensions/strike';
import Subscript from '../extensions/subscript';
import Superscript from '../extensions/superscript';
@ -82,6 +83,7 @@ export const createContentEditor = ({
Loading,
OrderedList,
Paragraph,
Reference,
Strike,
Subscript,
Superscript,

View File

@ -19,6 +19,7 @@ import Link from '../extensions/link';
import ListItem from '../extensions/list_item';
import OrderedList from '../extensions/ordered_list';
import Paragraph from '../extensions/paragraph';
import Reference from '../extensions/reference';
import Strike from '../extensions/strike';
import Subscript from '../extensions/subscript';
import Superscript from '../extensions/superscript';
@ -91,6 +92,9 @@ const defaultSerializerConfig = {
[ListItem.name]: defaultMarkdownSerializer.nodes.list_item,
[OrderedList.name]: defaultMarkdownSerializer.nodes.ordered_list,
[Paragraph.name]: defaultMarkdownSerializer.nodes.paragraph,
[Reference.name]: (state, node) => {
state.write(node.attrs.originalText || node.attrs.text);
},
[Table.name]: (state, node) => {
state.renderContent(node);
},

View File

@ -1,269 +0,0 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import { escape, capitalize } from 'lodash';
import GraphBundleMixin from '../../mixins/graph_pipeline_bundle_mixin';
import { reportToSentry } from '../../utils';
import { UPSTREAM, DOWNSTREAM, MAIN } from './constants';
import LinkedPipelinesColumnLegacy from './linked_pipelines_column_legacy.vue';
import StageColumnComponentLegacy from './stage_column_component_legacy.vue';
export default {
name: 'PipelineGraphLegacy',
components: {
GlLoadingIcon,
LinkedPipelinesColumnLegacy,
StageColumnComponentLegacy,
},
mixins: [GraphBundleMixin],
props: {
isLoading: {
type: Boolean,
required: true,
},
pipeline: {
type: Object,
required: true,
},
isLinkedPipeline: {
type: Boolean,
required: false,
default: false,
},
mediator: {
type: Object,
required: true,
},
type: {
type: String,
required: false,
default: MAIN,
},
},
upstream: UPSTREAM,
downstream: DOWNSTREAM,
data() {
return {
downstreamMarginTop: null,
jobName: null,
pipelineExpanded: {
jobName: '',
expanded: false,
},
};
},
computed: {
graph() {
return this.pipeline.details?.stages;
},
hasUpstream() {
return (
this.type !== this.$options.downstream &&
this.upstreamPipelines &&
this.pipeline.triggered_by !== null
);
},
upstreamPipelines() {
return this.pipeline.triggered_by;
},
hasDownstream() {
return (
this.type !== this.$options.upstream &&
this.downstreamPipelines &&
this.pipeline.triggered.length > 0
);
},
downstreamPipelines() {
return this.pipeline.triggered;
},
expandedUpstream() {
return (
this.pipeline.triggered_by &&
Array.isArray(this.pipeline.triggered_by) &&
this.pipeline.triggered_by.find((el) => el.isExpanded)
);
},
expandedDownstream() {
return this.pipeline.triggered && this.pipeline.triggered.find((el) => el.isExpanded);
},
pipelineTypeUpstream() {
return this.type !== this.$options.downstream && this.expandedUpstream;
},
pipelineTypeDownstream() {
return this.type !== this.$options.upstream && this.expandedDownstream;
},
pipelineProjectId() {
return this.pipeline.project.id;
},
},
errorCaptured(err, _vm, info) {
reportToSentry(this.$options.name, `error: ${err}, info: ${info}`);
},
methods: {
capitalizeStageName(name) {
const escapedName = escape(name);
return capitalize(escapedName);
},
isFirstColumn(index) {
return index === 0;
},
stageConnectorClass(index, stage) {
let className;
// If it's the first stage column and only has one job
if (this.isFirstColumn(index) && stage.groups.length === 1) {
className = 'no-margin';
} else if (index > 0) {
// If it is not the first column
className = 'left-margin';
}
return className;
},
refreshPipelineGraph() {
this.$emit('refreshPipelineGraph');
},
/**
* CSS class is applied:
* - if pipeline graph contains only one stage column component
*
* @param {number} index
* @returns {boolean}
*/
shouldAddRightMargin(index) {
return !(index === this.graph.length - 1);
},
handleClickedDownstream(pipeline, clickedIndex, downstreamNode) {
/**
* Calculates the margin top of the clicked downstream pipeline by
* subtracting the clicked downstream pipelines offsetTop by it's parent's
* offsetTop and then subtracting 15
*/
this.downstreamMarginTop = this.calculateMarginTop(downstreamNode, 15);
/**
* If the expanded trigger is defined and the id is different than the
* pipeline we clicked, then it means we clicked on a sibling downstream link
* and we want to reset the pipeline store. Triggering the reset without
* this condition would mean not allowing downstreams of downstreams to expand
*/
if (this.expandedDownstream?.id !== pipeline.id) {
this.$emit('onResetDownstream', this.pipeline, pipeline);
}
this.$emit('onClickDownstreamPipeline', pipeline);
},
calculateMarginTop(downstreamNode, pixelDiff) {
return `${downstreamNode.offsetTop - downstreamNode.offsetParent.offsetTop - pixelDiff}px`;
},
hasOnlyOneJob(stage) {
return stage.groups.length === 1;
},
hasUpstreamColumn(index) {
return index === 0 && this.hasUpstream;
},
setJob(jobName) {
this.jobName = jobName;
},
setPipelineExpanded(jobName, expanded) {
if (expanded) {
this.pipelineExpanded = {
jobName,
expanded,
};
} else {
this.pipelineExpanded = {
expanded,
jobName: '',
};
}
},
},
};
</script>
<template>
<div class="build-content middle-block js-pipeline-graph">
<div
class="pipeline-visualization pipeline-graph"
:class="{ 'pipeline-tab-content': !isLinkedPipeline }"
>
<div class="gl-w-full">
<div class="container-fluid container-limited">
<gl-loading-icon v-if="isLoading" class="m-auto" size="lg" />
<pipeline-graph-legacy
v-if="pipelineTypeUpstream"
:type="$options.upstream"
class="d-inline-block upstream-pipeline"
:class="`js-upstream-pipeline-${expandedUpstream.id}`"
:is-loading="false"
:pipeline="expandedUpstream"
:is-linked-pipeline="true"
:mediator="mediator"
@onClickUpstreamPipeline="clickUpstreamPipeline"
@refreshPipelineGraph="requestRefreshPipelineGraph"
/>
<linked-pipelines-column-legacy
v-if="hasUpstream"
:type="$options.upstream"
:linked-pipelines="upstreamPipelines"
:column-title="__('Upstream')"
:project-id="pipelineProjectId"
@linkedPipelineClick="$emit('onClickUpstreamPipeline', $event)"
/>
<ul
v-if="!isLoading"
:class="{
'inline js-has-linked-pipelines': hasDownstream || hasUpstream,
}"
class="stage-column-list align-top"
>
<stage-column-component-legacy
v-for="(stage, index) in graph"
:key="stage.name"
:class="{
'has-upstream gl-ml-11': hasUpstreamColumn(index),
'has-only-one-job': hasOnlyOneJob(stage),
'gl-mr-26': shouldAddRightMargin(index),
}"
:title="capitalizeStageName(stage.name)"
:groups="stage.groups"
:stage-connector-class="stageConnectorClass(index, stage)"
:is-first-column="isFirstColumn(index)"
:has-upstream="hasUpstream"
:action="stage.status.action"
:job-hovered="jobName"
:pipeline-expanded="pipelineExpanded"
@refreshPipelineGraph="refreshPipelineGraph"
/>
</ul>
<linked-pipelines-column-legacy
v-if="hasDownstream"
:type="$options.downstream"
:linked-pipelines="downstreamPipelines"
:column-title="__('Downstream')"
:project-id="pipelineProjectId"
@linkedPipelineClick="handleClickedDownstream"
@downstreamHovered="setJob"
@pipelineExpandToggle="setPipelineExpanded"
/>
<pipeline-graph-legacy
v-if="pipelineTypeDownstream"
:type="$options.downstream"
class="d-inline-block"
:class="`js-downstream-pipeline-${expandedDownstream.id}`"
:is-loading="false"
:pipeline="expandedDownstream"
:is-linked-pipeline="true"
:style="{ 'margin-top': downstreamMarginTop }"
:mediator="mediator"
@onClickDownstreamPipeline="clickDownstreamPipeline"
@refreshPipelineGraph="requestRefreshPipelineGraph"
/>
</div>
</div>
</div>
</div>
</template>

View File

@ -1,91 +0,0 @@
<script>
import { reportToSentry } from '../../utils';
import { UPSTREAM } from './constants';
import LinkedPipeline from './linked_pipeline.vue';
export default {
components: {
LinkedPipeline,
},
props: {
columnTitle: {
type: String,
required: true,
},
linkedPipelines: {
type: Array,
required: true,
},
type: {
type: String,
required: true,
},
projectId: {
type: Number,
required: true,
},
},
computed: {
columnClass() {
const positionValues = {
right: 'gl-ml-11',
left: 'gl-mr-7',
};
return `graph-position-${this.graphPosition} ${positionValues[this.graphPosition]}`;
},
graphPosition() {
return this.isUpstream ? 'left' : 'right';
},
isExpanded() {
return this.pipeline?.isExpanded || false;
},
isUpstream() {
return this.type === UPSTREAM;
},
},
errorCaptured(err, _vm, info) {
reportToSentry('linked_pipelines_column_legacy', `error: ${err}, info: ${info}`);
},
methods: {
onPipelineClick(downstreamNode, pipeline, index) {
this.$emit('linkedPipelineClick', pipeline, index, downstreamNode);
},
onDownstreamHovered(jobName) {
this.$emit('downstreamHovered', jobName);
},
onPipelineExpandToggle(jobName, expanded) {
// Highlighting only applies to downstream pipelines
if (this.isUpstream) {
return;
}
this.$emit('pipelineExpandToggle', jobName, expanded);
},
},
};
</script>
<template>
<div :class="columnClass" class="stage-column linked-pipelines-column">
<div class="stage-name linked-pipelines-column-title">{{ columnTitle }}</div>
<div v-if="isUpstream" class="cross-project-triangle"></div>
<ul>
<li v-for="(pipeline, index) in linkedPipelines" :key="pipeline.id">
<linked-pipeline
:class="{
active: pipeline.isExpanded,
'left-connector': pipeline.isExpanded && graphPosition === 'left',
}"
:pipeline="pipeline"
:column-title="columnTitle"
:project-id="projectId"
:type="type"
:expanded="isExpanded"
@pipelineClicked="onPipelineClick($event, pipeline, index)"
@downstreamHovered="onDownstreamHovered"
@pipelineExpandToggle="onPipelineExpandToggle"
/>
</li>
</ul>
</div>
</template>

View File

@ -1,112 +0,0 @@
<script>
import { isEmpty, escape } from 'lodash';
import stageColumnMixin from '../../mixins/stage_column_mixin';
import { reportToSentry } from '../../utils';
import ActionComponent from '../jobs_shared/action_component.vue';
import JobGroupDropdown from './job_group_dropdown.vue';
import JobItem from './job_item.vue';
export default {
components: {
JobItem,
JobGroupDropdown,
ActionComponent,
},
mixins: [stageColumnMixin],
props: {
title: {
type: String,
required: true,
},
groups: {
type: Array,
required: true,
},
isFirstColumn: {
type: Boolean,
required: false,
default: false,
},
stageConnectorClass: {
type: String,
required: false,
default: '',
},
action: {
type: Object,
required: false,
default: () => ({}),
},
jobHovered: {
type: String,
required: false,
default: '',
},
pipelineExpanded: {
type: Object,
required: false,
default: () => ({}),
},
},
computed: {
hasAction() {
return !isEmpty(this.action);
},
},
errorCaptured(err, _vm, info) {
reportToSentry('stage_column_component_legacy', `error: ${err}, info: ${info}`);
},
methods: {
groupId(group) {
return `ci-badge-${escape(group.name)}`;
},
pipelineActionRequestComplete() {
this.$emit('refreshPipelineGraph');
},
},
};
</script>
<template>
<li :class="stageConnectorClass" class="stage-column">
<div class="stage-name position-relative" data-testid="stage-column-title">
{{ title }}
<action-component
v-if="hasAction"
:action-icon="action.icon"
:tooltip-text="action.title"
:link="action.path"
class="js-stage-action stage-action rounded"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
</div>
<div class="builds-container">
<ul>
<li
v-for="(group, index) in groups"
:id="groupId(group)"
:key="group.id"
:class="buildConnnectorClass(index)"
class="build"
>
<div class="curve"></div>
<job-item
v-if="group.size === 1"
:job="group.jobs[0]"
:job-hovered="jobHovered"
:pipeline-expanded="pipelineExpanded"
css-class-job-name="build-content"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
<job-group-dropdown
v-if="group.size > 1"
:group="group"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
</li>
</ul>
</div>
</li>
</template>

View File

@ -4,6 +4,7 @@ import { map } from 'lodash';
import { s__ } from '~/locale';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import PipelineBranchNameToken from './tokens/pipeline_branch_name_token.vue';
import PipelineSourceToken from './tokens/pipeline_source_token.vue';
import PipelineStatusToken from './tokens/pipeline_status_token.vue';
import PipelineTagNameToken from './tokens/pipeline_tag_name_token.vue';
import PipelineTriggerAuthorToken from './tokens/pipeline_trigger_author_token.vue';
@ -13,6 +14,7 @@ export default {
branchType: 'ref',
tagType: 'tag',
statusType: 'status',
sourceType: 'source',
defaultTokensLength: 1,
components: {
GlFilteredSearch,
@ -37,7 +39,7 @@ export default {
return this.value.map((i) => i.type);
},
tokens() {
return [
const tokens = [
{
type: this.$options.userType,
icon: 'user',
@ -76,6 +78,19 @@ export default {
operators: OPERATOR_IS_ONLY,
},
];
if (gon.features.pipelineSourceFilter) {
tokens.push({
type: this.$options.sourceType,
icon: 'trigger-source',
title: s__('Pipeline|Source'),
unique: true,
token: PipelineSourceToken,
operators: OPERATOR_IS_ONLY,
});
}
return tokens;
},
parsedParams() {
return map(this.params, (val, key) => ({

View File

@ -0,0 +1,106 @@
<script>
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
import { s__ } from '~/locale';
export default {
components: {
GlFilteredSearchToken,
GlFilteredSearchSuggestion,
},
props: {
config: {
type: Object,
required: true,
},
value: {
type: Object,
required: true,
},
},
computed: {
sources() {
return [
{
text: s__('Pipeline|Source|Push'),
value: 'push',
},
{
text: s__('Pipeline|Source|Web'),
value: 'web',
},
{
text: s__('Pipeline|Source|Trigger'),
value: 'trigger',
},
{
text: s__('Pipeline|Source|Schedule'),
value: 'schedule',
},
{
text: s__('Pipeline|Source|API'),
value: 'api',
},
{
text: s__('Pipeline|Source|External'),
value: 'external',
},
{
text: s__('Pipeline|Source|Pipeline'),
value: 'pipeline',
},
{
text: s__('Pipeline|Source|Chat'),
value: 'chat',
},
{
text: s__('Pipeline|Source|Web IDE'),
value: 'webide',
},
{
text: s__('Pipeline|Source|Merge Request'),
value: 'merge_request_event',
},
{
text: s__('Pipeline|Source|External Pull Request'),
value: 'external_pull_request_event',
},
{
text: s__('Pipeline|Source|Parent Pipeline'),
value: 'parent_pipeline',
},
{
text: s__('Pipeline|Source|On-Demand DAST Scan'),
value: 'ondemand_dast_scan',
},
{
text: s__('Pipeline|Source|On-Demand DAST Validation'),
value: 'ondemand_dast_validation',
},
];
},
findActiveSource() {
return this.sources.find((source) => source.value === this.value.data);
},
},
};
</script>
<template>
<gl-filtered-search-token v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<template #view>
<div class="gl-display-flex gl-align-items-center">
<span>{{ findActiveSource.text }}</span>
</div>
</template>
<template #suggestions>
<gl-filtered-search-suggestion
v-for="source in sources"
:key="source.value"
:value="source.value"
>
{{ source.text }}
</gl-filtered-search-suggestion>
</template>
</gl-filtered-search-token>
</template>

View File

@ -4,7 +4,7 @@ export const CANCEL_REQUEST = 'CANCEL_REQUEST';
export const LAYOUT_CHANGE_DELAY = 300;
export const FILTER_PIPELINES_SEARCH_DELAY = 200;
export const ANY_TRIGGER_AUTHOR = 'Any';
export const SUPPORTED_FILTER_PARAMETERS = ['username', 'ref', 'status'];
export const SUPPORTED_FILTER_PARAMETERS = ['username', 'ref', 'status', 'source'];
export const FILTER_TAG_IDENTIFIER = 'tag';
export const SCHEDULE_ORIGIN = 'schedule';

View File

@ -1,65 +0,0 @@
import createFlash from '~/flash';
import { __ } from '~/locale';
export default {
methods: {
getExpandedPipelines(pipeline) {
this.mediator.service
.getPipeline(this.mediator.getExpandedParameters())
.then((response) => {
this.mediator.store.toggleLoading(pipeline);
this.mediator.store.storePipeline(response.data);
this.mediator.poll.enable({ data: this.mediator.getExpandedParameters() });
})
.catch(() => {
this.mediator.store.toggleLoading(pipeline);
createFlash({
message: __('An error occurred while fetching the pipeline.'),
});
});
},
/**
* Called when a linked pipeline is clicked.
*
* If the pipeline is collapsed we will start polling it & we will reset the other pipelines.
* If the pipeline is expanded we will close it.
*
* @param {String} method Method to fetch the pipeline
* @param {String} storeKey Store property that will be updates
* @param {String} resetStoreKey Store key for the visible pipeline that will need to be reset
* @param {Object} pipeline The clicked pipeline
*/
clickPipeline(pipeline, openMethod, closeMethod) {
if (!pipeline.isExpanded) {
this.mediator.store[openMethod](pipeline);
this.mediator.store.toggleLoading(pipeline);
this.mediator.poll.stop();
this.getExpandedPipelines(pipeline);
} else {
this.mediator.store[closeMethod](pipeline);
this.mediator.poll.stop();
this.mediator.poll.enable({ data: this.mediator.getExpandedParameters() });
}
},
resetDownstreamPipelines(parentPipeline, pipeline) {
this.mediator.store.resetTriggeredPipelines(parentPipeline, pipeline);
},
clickUpstreamPipeline(pipeline) {
this.clickPipeline(pipeline, 'openPipeline', 'closePipeline');
},
clickDownstreamPipeline(pipeline) {
this.clickPipeline(pipeline, 'openPipeline', 'closePipeline');
},
requestRefreshPipelineGraph() {
// When an action is clicked
// (whether in the dropdown or in the main nodes, we refresh the big graph)
this.mediator.refreshPipeline().catch(() =>
createFlash({
message: __('An error occurred while making the request.'),
}),
);
},
},
};

View File

@ -3,15 +3,12 @@ import createFlash from '~/flash';
import { parseBoolean } from '~/lib/utils/common_utils';
import { __ } from '~/locale';
import Translate from '~/vue_shared/translate';
import PipelineGraphLegacy from './components/graph/graph_component_legacy.vue';
import TestReports from './components/test_reports/test_reports.vue';
import GraphBundleMixin from './mixins/graph_pipeline_bundle_mixin';
import createDagApp from './pipeline_details_dag';
import { createPipelinesDetailApp } from './pipeline_details_graph';
import { createPipelineHeaderApp } from './pipeline_details_header';
import { apolloProvider } from './pipeline_shared_client';
import createTestReportsStore from './stores/test_reports';
import { reportToSentry } from './utils';
Vue.use(Translate);
@ -22,44 +19,6 @@ const SELECTORS = {
PIPELINE_TESTS: '#js-pipeline-tests-detail',
};
const createLegacyPipelinesDetailApp = (mediator) => {
if (!document.querySelector(SELECTORS.PIPELINE_GRAPH)) {
return;
}
// eslint-disable-next-line no-new
new Vue({
el: SELECTORS.PIPELINE_GRAPH,
components: {
PipelineGraphLegacy,
},
mixins: [GraphBundleMixin],
data() {
return {
mediator,
};
},
errorCaptured(err, _vm, info) {
reportToSentry('pipeline_details_bundle_legacy_details', `error: ${err}, info: ${info}`);
},
render(createElement) {
return createElement('pipeline-graph-legacy', {
props: {
isLoading: this.mediator.state.isLoading,
pipeline: this.mediator.store.state.pipeline,
mediator: this.mediator,
},
on: {
refreshPipelineGraph: this.requestRefreshPipelineGraph,
onResetDownstream: (parentPipeline, pipeline) =>
this.resetDownstreamPipelines(parentPipeline, pipeline),
onClickUpstreamPipeline: (pipeline) => this.clickUpstreamPipeline(pipeline),
onClickDownstreamPipeline: (pipeline) => this.clickDownstreamPipeline(pipeline),
},
});
},
});
};
const createTestDetails = () => {
const el = document.querySelector(SELECTORS.PIPELINE_TESTS);
const { blobPath, emptyStateImagePath, hasTestReport, summaryEndpoint, suiteEndpoint } =
@ -88,9 +47,6 @@ const createTestDetails = () => {
};
export default async function initPipelineDetailsBundle() {
const canShowNewPipelineDetails =
gon.features.graphqlPipelineDetails || gon.features.graphqlPipelineDetailsUsers;
const { dataset } = document.querySelector(SELECTORS.PIPELINE_DETAILS);
try {
@ -101,22 +57,12 @@ export default async function initPipelineDetailsBundle() {
});
}
if (canShowNewPipelineDetails) {
try {
createPipelinesDetailApp(SELECTORS.PIPELINE_GRAPH, apolloProvider, dataset);
} catch {
createFlash({
message: __('An error occurred while loading the pipeline.'),
});
}
} else {
const { default: PipelinesMediator } = await import(
/* webpackChunkName: 'PipelinesMediator' */ './pipeline_details_mediator'
);
const mediator = new PipelinesMediator({ endpoint: dataset.endpoint });
mediator.fetchPipeline();
createLegacyPipelinesDetailApp(mediator);
try {
createPipelinesDetailApp(SELECTORS.PIPELINE_GRAPH, apolloProvider, dataset);
} catch {
createFlash({
message: __('An error occurred while loading the pipeline.'),
});
}
createDagApp(apolloProvider);

View File

@ -1,81 +0,0 @@
import Visibility from 'visibilityjs';
import createFlash from '~/flash';
import Poll from '../lib/utils/poll';
import { __ } from '../locale';
import PipelineService from './services/pipeline_service';
import PipelineStore from './stores/pipeline_store';
export default class pipelinesMediator {
constructor(options = {}) {
this.options = options;
this.store = new PipelineStore();
this.service = new PipelineService(options.endpoint);
this.state = {};
this.state.isLoading = false;
}
fetchPipeline() {
this.poll = new Poll({
resource: this.service,
method: 'getPipeline',
data: this.store.state.expandedPipelines ? this.getExpandedParameters() : undefined,
successCallback: this.successCallback.bind(this),
errorCallback: this.errorCallback.bind(this),
});
if (!Visibility.hidden()) {
this.state.isLoading = true;
this.poll.makeRequest();
} else {
this.refreshPipeline();
}
Visibility.change(() => {
if (!Visibility.hidden()) {
this.poll.restart();
} else {
this.stopPipelinePoll();
}
});
}
successCallback(response) {
this.state.isLoading = false;
this.store.storePipeline(response.data);
}
errorCallback() {
this.state.isLoading = false;
createFlash({
message: __('An error occurred while fetching the pipeline.'),
});
}
refreshPipeline() {
this.stopPipelinePoll();
return this.service
.getPipeline()
.then((response) => this.successCallback(response))
.catch(() => this.errorCallback())
.finally(() =>
this.poll.restart(
this.store.state.expandedPipelines ? this.getExpandedParameters() : undefined,
),
);
}
stopPipelinePoll() {
this.poll.stop();
}
/**
* Backend expects paramets in the following format: `expanded[]=id&expanded[]=id`
*/
getExpandedParameters() {
return {
expanded: this.store.state.expandedPipelines,
};
}
}

View File

@ -1,21 +0,0 @@
import axios from '../../lib/utils/axios_utils';
export default class PipelineService {
constructor(endpoint) {
this.pipeline = endpoint;
}
getPipeline(params) {
return axios.get(this.pipeline, { params });
}
// eslint-disable-next-line class-methods-use-this
deleteAction(endpoint) {
return axios.delete(`${endpoint}.json`);
}
// eslint-disable-next-line class-methods-use-this
postAction(endpoint) {
return axios.post(`${endpoint}.json`);
}
}

View File

@ -1,206 +0,0 @@
import Vue from 'vue';
export default class PipelineStore {
constructor() {
this.state = {};
this.state.pipeline = {};
this.state.expandedPipelines = [];
}
/**
* For the triggered pipelines adds the `isExpanded` key
*
* For the triggered_by pipeline adds the `isExpanded` key
* and saves it as an array
*
* @param {Object} pipeline
*/
storePipeline(pipeline = {}) {
const pipelineCopy = { ...pipeline };
if (pipelineCopy.triggered_by) {
pipelineCopy.triggered_by = [pipelineCopy.triggered_by];
const oldTriggeredBy =
this.state.pipeline &&
this.state.pipeline.triggered_by &&
this.state.pipeline.triggered_by[0];
this.parseTriggeredByPipelines(oldTriggeredBy, pipelineCopy.triggered_by[0]);
}
if (pipelineCopy.triggered && pipelineCopy.triggered.length) {
pipelineCopy.triggered.forEach((el) => {
const oldPipeline =
this.state.pipeline &&
this.state.pipeline.triggered &&
this.state.pipeline.triggered.find((element) => element.id === el.id);
this.parseTriggeredPipelines(oldPipeline, el);
});
}
this.state.pipeline = pipelineCopy;
}
/**
* Recursiverly parses the triggered by pipelines.
*
* Sets triggered_by as an array, there is always only 1 triggered_by pipeline.
* Adds key `isExpanding`
* Keeps old isExpading value due to polling
*
* @param {Array} parentPipeline
* @param {Object} pipeline
*/
parseTriggeredByPipelines(oldPipeline = {}, newPipeline) {
// keep old value in case it's opened because we're polling
Vue.set(newPipeline, 'isExpanded', oldPipeline.isExpanded || false);
// add isLoading property
Vue.set(newPipeline, 'isLoading', false);
// Because there can only ever be one `triggered_by` for any given pipeline,
// the API returns an object for the value instead of an Array. However,
// it's easier to deal with an array in the FE so we convert it.
if (newPipeline.triggered_by) {
if (!Array.isArray(newPipeline.triggered_by)) {
Object.assign(newPipeline, { triggered_by: [newPipeline.triggered_by] });
}
if (newPipeline.triggered_by?.length > 0) {
newPipeline.triggered_by.forEach((el) => {
const oldTriggeredBy = oldPipeline.triggered_by?.find((element) => element.id === el.id);
this.parseTriggeredPipelines(oldTriggeredBy, el);
});
}
}
}
/**
* Recursively parses the triggered pipelines
* @param {Array} parentPipeline
* @param {Object} pipeline
*/
parseTriggeredPipelines(oldPipeline = {}, newPipeline) {
// keep old value in case it's opened because we're polling
Vue.set(newPipeline, 'isExpanded', oldPipeline.isExpanded || false);
// add isLoading property
Vue.set(newPipeline, 'isLoading', false);
if (newPipeline.triggered && newPipeline.triggered.length > 0) {
newPipeline.triggered.forEach((el) => {
const oldTriggered =
oldPipeline.triggered && oldPipeline.triggered.find((element) => element.id === el.id);
this.parseTriggeredPipelines(oldTriggered, el);
});
}
}
/**
* Recursively resets all triggered by pipelines
*
* @param {Object} pipeline
*/
resetTriggeredByPipeline(parentPipeline, pipeline) {
parentPipeline.triggered_by.forEach((el) => this.closePipeline(el));
if (pipeline.triggered_by && pipeline.triggered_by) {
this.resetTriggeredByPipeline(pipeline, pipeline.triggered_by);
}
}
/**
* Opens the clicked pipeline and closes all other ones.
* @param {Object} pipeline
*/
openTriggeredByPipeline(parentPipeline, pipeline) {
// first we need to reset all triggeredBy pipelines
this.resetTriggeredByPipeline(parentPipeline, pipeline);
this.openPipeline(pipeline);
}
/**
* On click, will close the given pipeline and all nested triggered by pipelines
*
* @param {Object} pipeline
*/
closeTriggeredByPipeline(pipeline) {
this.closePipeline(pipeline);
if (pipeline.triggered_by && pipeline.triggered_by.length) {
pipeline.triggered_by.forEach((triggeredBy) => this.closeTriggeredByPipeline(triggeredBy));
}
}
/**
* Recursively closes all triggered pipelines for the given one.
*
* @param {Object} pipeline
*/
resetTriggeredPipelines(parentPipeline, pipeline) {
parentPipeline.triggered.forEach((el) => this.closePipeline(el));
if (pipeline.triggered && pipeline.triggered.length) {
pipeline.triggered.forEach((el) => this.resetTriggeredPipelines(pipeline, el));
}
}
/**
* Opens the clicked triggered pipeline and closes all other ones.
*
* @param {Object} pipeline
*/
openTriggeredPipeline(parentPipeline, pipeline) {
this.resetTriggeredPipelines(parentPipeline, pipeline);
this.openPipeline(pipeline);
}
/**
* On click, will close the given pipeline and all the nested triggered ones
* @param {Object} pipeline
*/
closeTriggeredPipeline(pipeline) {
this.closePipeline(pipeline);
if (pipeline.triggered && pipeline.triggered.length) {
pipeline.triggered.forEach((triggered) => this.closeTriggeredPipeline(triggered));
}
}
/**
* Utility function, Closes the given pipeline
* @param {Object} pipeline
*/
closePipeline(pipeline) {
Vue.set(pipeline, 'isExpanded', false);
// remove the pipeline from the parameters
this.removeExpandedPipelineToRequestData(pipeline.id);
}
/**
* Utility function, Opens the given pipeline
* @param {Object} pipeline
*/
openPipeline(pipeline) {
Vue.set(pipeline, 'isExpanded', true);
// add the pipeline to the parameters
this.addExpandedPipelineToRequestData(pipeline.id);
}
// eslint-disable-next-line class-methods-use-this
toggleLoading(pipeline) {
Vue.set(pipeline, 'isLoading', !pipeline.isLoading);
}
addExpandedPipelineToRequestData(id) {
this.state.expandedPipelines.push(id);
}
removeExpandedPipelineToRequestData(id) {
this.state.expandedPipelines.splice(
this.state.expandedPipelines.findIndex((el) => el === id),
1,
);
}
}

View File

@ -37,12 +37,10 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
service: {
type: Object,
required: true,
default: () => ({}),
},
},
data() {

View File

@ -14,7 +14,6 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
},
};

View File

@ -45,7 +45,6 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
},
data() {

View File

@ -17,7 +17,6 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
},

View File

@ -25,12 +25,10 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
service: {
type: Object,
required: true,
default: () => ({}),
},
},
data() {

View File

@ -11,7 +11,6 @@ export default {
mr: {
type: Object,
required: true,
default: () => ({}),
},
},
data() {

View File

@ -188,13 +188,6 @@ export default {
return this.mr.preferredAutoMergeStrategy;
},
isSHAMismatch() {
if (this.glFeatures.mergeRequestWidgetGraphql) {
return this.mr.sha !== this.state.diffHeadSha;
}
return this.mr.isSHAMismatch;
},
squashIsSelected() {
if (this.glFeatures.mergeRequestWidgetGraphql) {
return this.isSquashReadOnly ? this.state.squashOnMerge : this.state.squash;
@ -573,21 +566,6 @@ export default {
</div>
</template>
</div>
<div v-if="isSHAMismatch" class="d-flex align-items-center mt-2 js-sha-mismatch">
<gl-icon name="warning-solid" class="text-warning mr-1" />
<span class="text-warning">
<gl-sprintf
:message="
__('New changes were added. %{linkStart}Reload the page to review them%{linkEnd}')
"
>
<template #link="{ content }">
<gl-link :href="mr.mergeRequestDiffsPath">{{ content }}</gl-link>
</template>
</gl-sprintf>
</span>
</div>
<div
v-if="showDangerMessageForMergeTrain"
class="gl-mt-5 gl-text-gray-500"

View File

@ -1,24 +1,42 @@
<script>
import { GlButton } from '@gitlab/ui';
import { I18N_SHA_MISMATCH } from '../../i18n';
import statusIcon from '../mr_widget_status_icon.vue';
export default {
name: 'ShaMismatch',
components: {
statusIcon,
GlButton,
},
i18n: {
I18N_SHA_MISMATCH,
},
props: {
mr: {
type: Object,
required: true,
},
},
};
</script>
<template>
<div class="mr-widget-body media">
<status-icon :show-disabled-button="true" status="warning" />
<div class="media-body space-children">
<span class="bold" data-qa-selector="head_mismatch_content">
{{
s__(`mrWidget|The source branch HEAD has recently changed.
Please reload the page and review the changes before merging`)
}}
<status-icon :show-disabled-button="false" status="warning" />
<div class="media-body">
<span class="gl-font-weight-bold" data-qa-selector="head_mismatch_content">
{{ $options.i18n.I18N_SHA_MISMATCH.warningMessage }}
</span>
<gl-button
class="gl-ml-3"
data-testid="action-button"
size="small"
category="primary"
variant="confirm"
:href="mr.mergeRequestDiffsPath"
>{{ $options.i18n.I18N_SHA_MISMATCH.actionButtonLabel }}</gl-button
>
</div>
</div>
</template>

View File

@ -5,3 +5,8 @@ export const SQUASH_BEFORE_MERGE = {
checkboxLabel: __('Squash commits'),
helpLabel: __('What is squashing?'),
};
export const I18N_SHA_MISMATCH = {
warningMessage: __('Merge blocked: new changes were just added.'),
actionButtonLabel: __('Review changes'),
};

View File

@ -38,6 +38,7 @@ import RebaseState from './components/states/mr_widget_rebase.vue';
import NothingToMergeState from './components/states/nothing_to_merge.vue';
import PipelineFailedState from './components/states/pipeline_failed.vue';
import ReadyToMergeState from './components/states/ready_to_merge.vue';
import ShaMismatch from './components/states/sha_mismatch.vue';
import UnresolvedDiscussionsState from './components/states/unresolved_discussions.vue';
import WorkInProgressState from './components/states/work_in_progress.vue';
// import ExtensionsContainer from './components/extensions/container';
@ -72,7 +73,7 @@ export default {
'mr-widget-not-allowed': NotAllowedState,
'mr-widget-missing-branch': MissingBranchState,
'mr-widget-ready-to-merge': ReadyToMergeState,
'sha-mismatch': ReadyToMergeState,
'sha-mismatch': ShaMismatch,
'mr-widget-checking': CheckingState,
'mr-widget-unresolved-discussions': UnresolvedDiscussionsState,
'mr-widget-pipeline-blocked': PipelineBlockedState,

View File

@ -4,6 +4,6 @@ class Import::AvailableNamespacesController < ApplicationController
feature_category :importers
def index
render json: NamespaceSerializer.new.represent(current_user.manageable_groups_with_routes)
render json: NamespaceSerializer.new.represent(current_user.manageable_groups_with_routes(include_groups_with_developer_maintainer_access: true))
end
end

View File

@ -11,8 +11,7 @@ class Import::BaseController < ApplicationController
format.json do
render json: { imported_projects: serialized_imported_projects,
provider_repos: serialized_provider_repos,
incompatible_repos: serialized_incompatible_repos,
namespaces: serialized_namespaces }
incompatible_repos: serialized_incompatible_repos }
end
format.html
end
@ -74,14 +73,6 @@ class Import::BaseController < ApplicationController
@already_added_projects ||= filtered(find_already_added_projects(provider_name))
end
def serialized_namespaces
NamespaceSerializer.new.represent(namespaces)
end
def namespaces
current_user.manageable_groups_with_routes
end
# rubocop: disable CodeReuse/ActiveRecord
def find_already_added_projects(import_type)
current_user.created_projects.where(import_type: import_type).with_import_state

View File

@ -12,12 +12,12 @@ class Projects::PipelinesController < Projects::ApplicationController
before_action :authorize_read_ci_cd_analytics!, only: [:charts]
before_action :authorize_create_pipeline!, only: [:new, :create, :config_variables]
before_action :authorize_update_pipeline!, only: [:retry, :cancel]
before_action do
push_frontend_feature_flag(:graphql_pipeline_details, project, type: :development, default_enabled: :yaml)
push_frontend_feature_flag(:graphql_pipeline_details_users, current_user, type: :development, default_enabled: :yaml)
end
before_action :ensure_pipeline, only: [:show, :downloadable_artifacts]
before_action do
push_frontend_feature_flag(:pipeline_source_filter, project, type: :development, default_enabled: :yaml)
end
# Will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/225596
before_action :redirect_for_legacy_scope_filter, only: [:index], if: -> { request.format.html? }

View File

@ -33,7 +33,7 @@ module Repositories
end
def authenticate_user
@authentication_result = Gitlab::Auth::Result.new
@authentication_result = Gitlab::Auth::Result::EMPTY
if allow_basic_auth? && basic_auth_provided?
login, password = user_name_and_password(request)

View File

@ -13,7 +13,7 @@ class ApplicationExperiment < Gitlab::Experiment # rubocop:disable Gitlab/Namesp
super
publish_to_client
publish_to_database
publish_to_database if @record
end
def publish_to_client
@ -25,7 +25,6 @@ class ApplicationExperiment < Gitlab::Experiment # rubocop:disable Gitlab/Namesp
end
def publish_to_database
return unless @record
return unless should_track?
# if the context contains a namespace, group, project, user, or actor

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Projects
class CiFeatureUsage < ApplicationRecord
self.table_name = 'project_ci_feature_usages'
belongs_to :project
validates :project, :feature, presence: true
enum feature: {
code_coverage: 1,
security_report: 2
}
def self.insert_usage(project_id:, feature:, default_branch:)
insert(
{
project_id: project_id,
feature: feature,
default_branch: default_branch
},
unique_by: 'index_project_ci_feature_usages_unique_columns'
)
end
end
end

View File

@ -3,7 +3,13 @@
module Ci
class DailyBuildGroupReportResultService
def execute(pipeline)
DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
if DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
Projects::CiFeatureUsage.insert_usage(
project_id: pipeline.project_id,
feature: :code_coverage,
default_branch: pipeline.default_branch?
)
end
end
private

View File

@ -5,9 +5,7 @@
- add_page_specific_style 'page_bundles/pipeline'
- add_page_specific_style 'page_bundles/reports'
- add_page_specific_style 'page_bundles/ci_status'
- if Feature.enabled?(:graphql_pipeline_details, @project, default_enabled: :yaml) || Feature.enabled?(:graphql_pipeline_details_users, @current_user, default_enabled: :yaml)
- add_page_startup_graphql_call('pipelines/get_pipeline_details', { projectPath: @project.full_path, iid: @pipeline.iid })
- add_page_startup_graphql_call('pipelines/get_pipeline_details', { projectPath: @project.full_path, iid: @pipeline.iid })
.js-pipeline-container{ data: { controller_action: "#{controller.action_name}" } }
#js-pipeline-header-vue.pipeline-header-container{ data: { full_path: @project.full_path, pipeline_iid: @pipeline.iid, pipeline_id: @pipeline.id, pipelines_path: project_pipelines_path(@project) } }

View File

@ -1,8 +0,0 @@
---
name: graphql_pipeline_details
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46380
rollout_issue_url:
type: development
group: group::pipeline authoring
default_enabled: true
milestone: '13.6'

View File

@ -1,8 +0,0 @@
---
name: graphql_pipeline_details_users
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52092
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/299112
milestone: '13.9'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class CreateProjectCiFeatureUsages < ActiveRecord::Migration[6.1]
def change
create_table :project_ci_feature_usages do |t|
t.references :project, index: false, foreign_key: { on_delete: :cascade }, null: false
t.integer :feature, null: false, limit: 2
t.boolean :default_branch, default: false, null: false
t.index [:project_id, :feature, :default_branch], unique: true, name: 'index_project_ci_feature_usages_unique_columns'
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class RemoveIndexContainingFaultyRegex < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
INDEX_NAME = "tmp_index_merge_requests_draft_and_status"
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :merge_requests, INDEX_NAME
end
def down
# noop
#
end
end

View File

@ -1,32 +1,13 @@
# frozen_string_literal: true
class ScheduleBackfillDraftStatusOnMergeRequests < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
INDEX_NAME = "tmp_index_merge_requests_draft_and_status"
MIGRATION = 'BackfillDraftStatusOnMergeRequests'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 100
disable_ddl_transaction!
def up
add_concurrent_index :merge_requests, :id,
where: "draft = false AND state_id = 1 AND ((title)::text ~* '^\\[draft\\]|\\(draft\\)|draft:|draft|\\[WIP\\]|WIP:|WIP'::text)",
name: INDEX_NAME
eligible_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible
queue_background_migration_jobs_by_range_at_intervals(
eligible_mrs,
MIGRATION,
DELAY_INTERVAL,
track_jobs: true,
batch_size: BATCH_SIZE
)
# noop
#
end
def down
remove_concurrent_index_by_name :merge_requests, INDEX_NAME
# noop
#
end
end

View File

@ -1,24 +1,9 @@
# frozen_string_literal: true
class ScheduleBackfillDraftColumnOnMergeRequestsRerun < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
MIGRATION = 'BackfillDraftStatusOnMergeRequests'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 50
disable_ddl_transaction!
def up
eligible_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible
queue_background_migration_jobs_by_range_at_intervals(
eligible_mrs,
MIGRATION,
DELAY_INTERVAL,
track_jobs: true,
batch_size: BATCH_SIZE
)
# noop
#
end
def down

View File

@ -0,0 +1 @@
7c62c47ebad110a343c1f9834ae34bd0fa2bad763025da06f911e127a7380542

View File

@ -0,0 +1 @@
1e4d0b062c8e43b1af37c6cf869f9c173248d7bf5451b4aa5468d48c1004b97c

View File

@ -16935,6 +16935,22 @@ CREATE SEQUENCE project_ci_cd_settings_id_seq
ALTER SEQUENCE project_ci_cd_settings_id_seq OWNED BY project_ci_cd_settings.id;
CREATE TABLE project_ci_feature_usages (
id bigint NOT NULL,
project_id bigint NOT NULL,
feature smallint NOT NULL,
default_branch boolean DEFAULT false NOT NULL
);
CREATE SEQUENCE project_ci_feature_usages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE project_ci_feature_usages_id_seq OWNED BY project_ci_feature_usages.id;
CREATE TABLE project_compliance_framework_settings (
project_id bigint NOT NULL,
framework_id bigint,
@ -20649,6 +20665,8 @@ ALTER TABLE ONLY project_auto_devops ALTER COLUMN id SET DEFAULT nextval('projec
ALTER TABLE ONLY project_ci_cd_settings ALTER COLUMN id SET DEFAULT nextval('project_ci_cd_settings_id_seq'::regclass);
ALTER TABLE ONLY project_ci_feature_usages ALTER COLUMN id SET DEFAULT nextval('project_ci_feature_usages_id_seq'::regclass);
ALTER TABLE ONLY project_compliance_framework_settings ALTER COLUMN project_id SET DEFAULT nextval('project_compliance_framework_settings_project_id_seq'::regclass);
ALTER TABLE ONLY project_custom_attributes ALTER COLUMN id SET DEFAULT nextval('project_custom_attributes_id_seq'::regclass);
@ -22239,6 +22257,9 @@ ALTER TABLE ONLY project_auto_devops
ALTER TABLE ONLY project_ci_cd_settings
ADD CONSTRAINT project_ci_cd_settings_pkey PRIMARY KEY (id);
ALTER TABLE ONLY project_ci_feature_usages
ADD CONSTRAINT project_ci_feature_usages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY project_compliance_framework_settings
ADD CONSTRAINT project_compliance_framework_settings_pkey PRIMARY KEY (project_id);
@ -24806,6 +24827,8 @@ CREATE UNIQUE INDEX index_project_auto_devops_on_project_id ON project_auto_devo
CREATE UNIQUE INDEX index_project_ci_cd_settings_on_project_id ON project_ci_cd_settings USING btree (project_id);
CREATE UNIQUE INDEX index_project_ci_feature_usages_unique_columns ON project_ci_feature_usages USING btree (project_id, feature, default_branch);
CREATE INDEX index_project_compliance_framework_settings_on_framework_id ON project_compliance_framework_settings USING btree (framework_id);
CREATE INDEX index_project_compliance_framework_settings_on_project_id ON project_compliance_framework_settings USING btree (project_id);
@ -25720,8 +25743,6 @@ CREATE INDEX tmp_idx_on_namespaces_delayed_project_removal ON namespaces USING b
CREATE INDEX tmp_index_approval_project_rules_scanners ON approval_project_rules USING gin (scanners) WHERE (scanners @> '{cluster_image_scanning}'::text[]);
CREATE INDEX tmp_index_merge_requests_draft_and_status ON merge_requests USING btree (id) WHERE ((draft = false) AND (state_id = 1) AND ((title)::text ~* '^\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP'::text));
CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_child_namespaces ON namespaces USING btree (id) WHERE ((parent_id IS NOT NULL) AND (traversal_ids = '{}'::integer[]));
CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_root_namespaces ON namespaces USING btree (id) WHERE ((parent_id IS NULL) AND (traversal_ids = '{}'::integer[]));
@ -27056,6 +27077,9 @@ ALTER TABLE ONLY epic_user_mentions
ALTER TABLE ONLY approver_groups
ADD CONSTRAINT fk_rails_1cdcbd7723 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_ci_feature_usages
ADD CONSTRAINT fk_rails_1deedbf64b FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_tags
ADD CONSTRAINT fk_rails_1dfc868911 FOREIGN KEY (package_id) REFERENCES packages_packages(id) ON DELETE CASCADE;

View File

@ -193,11 +193,8 @@ This list of limitations only reflects the latest version of GitLab. If you are
- The **primary** site has to be online for OAuth login to happen. Existing sessions and Git are not affected. Support for the **secondary** site to use an OAuth provider independent from the primary is [being planned](https://gitlab.com/gitlab-org/gitlab/-/issues/208465).
- The installation takes multiple manual steps that together can take about an hour depending on circumstances. We are working on improving this experience. See [Omnibus GitLab issue #2978](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/2978) for details.
- Real-time updates of issues/merge requests (for example, via long polling) doesn't work on the **secondary** site.
- [Selective synchronization](replication/configuration.md#selective-synchronization) applies only to files and repositories. Other datasets are replicated to the **secondary** site in full, making it inappropriate for use as an access control mechanism.
- Object pools for forked project deduplication work only on the **primary** site, and are duplicated on the **secondary** site.
- GitLab Runners cannot register with a **secondary** site. Support for this is [planned for the future](https://gitlab.com/gitlab-org/gitlab/-/issues/3294).
- Configuring Geo **secondary** sites to [use high-availability configurations of PostgreSQL](https://gitlab.com/groups/gitlab-org/-/epics/2536) is currently in **alpha** support.
- [Selective synchronization](replication/configuration.md#selective-synchronization) only limits what repositories are replicated. The entire PostgreSQL data is still replicated. Selective synchronization is not built to accommodate compliance / export control use cases.
- [Selective synchronization](replication/configuration.md#selective-synchronization) only limits what repositories and files are replicated. The entire PostgreSQL data is still replicated. Selective synchronization is not built to accommodate compliance / export control use cases.
### Limitations on replication/verification

View File

@ -32,7 +32,6 @@ verification methods:
| Git | Project repository | Geo with Gitaly | Gitaly Checksum |
| Git | Project wiki repository | Geo with Gitaly | Gitaly Checksum |
| Git | Project designs repository | Geo with Gitaly | Gitaly Checksum |
| Git | Object pools for forked project deduplication | Geo with Gitaly | _Not implemented_ |
| Git | Project Snippets | Geo with Gitaly | Gitaly Checksum |
| Git | Personal Snippets | Geo with Gitaly | Gitaly Checksum |
| Git | Group wiki repository | Geo with Gitaly | _Not implemented_ |
@ -69,6 +68,8 @@ or using LVM.
It requires no special file system and can work with NFS or a mounted Storage Appliance (there may be
performance limitations when using a remote file system).
Geo will trigger garbage collection in Gitaly to [deduplicate forked repositories](../../../development/git_object_deduplication.md#git-object-deduplication-and-gitlab-geo) on Geo secondary sites.
Communication is done via Gitaly's own gRPC API. There are three possible ways of synchronization:
- Using regular Git clone/fetch from one Geo site to another (with special authentication).
@ -186,7 +187,6 @@ successfully, you must replicate their data using some other means.
|[CI job artifacts (other than Job Logs)](../../../ci/pipelines/job_artifacts.md) | **Yes** (10.4) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/8923) | Via Object Storage provider if supported. Native Geo support (Beta). | Verified only manually using [Integrity Check Rake Task](../../raketasks/check.md) on both sites and comparing the output between them. |
|[CI Pipeline Artifacts](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/models/ci/pipeline_artifact.rb) | [**Yes** (13.11)](https://gitlab.com/gitlab-org/gitlab/-/issues/238464) | [**Yes** (13.11)](https://gitlab.com/gitlab-org/gitlab/-/issues/238464) | Via Object Storage provider if supported. Native Geo support (Beta). | Persists additional artifacts after a pipeline completes |
|[Job logs](../../job_logs.md) | **Yes** (10.4) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/8923) | Via Object Storage provider if supported. Native Geo support (Beta). | Verified only on transfer or manually using [Integrity Check Rake Task](../../raketasks/check.md) on both sites and comparing the output between them. |
|[Object pools for forked project deduplication](../../../development/git_object_deduplication.md) | **Yes** | No | No | |
|[Container Registry](../../packages/container_registry.md) | **Yes** (12.3) | No | No | Disabled by default. See [instructions](docker_registry.md) to enable. |
|[Content in object storage (beta)](object_storage.md) | **Yes** (12.4) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/13845) | No | |
|[Infrastructure Registry for Terraform Module](../../../user/packages/terraform_module_registry/index.md) | **Yes** (14.0) | [**Yes**](#limitation-of-verification-for-files-in-object-storage) (14.0) | Via Object Storage provider if supported. Native Geo support (Beta). | Behind feature flag `geo_package_file_replication`, enabled by default. |

View File

@ -168,7 +168,7 @@ for a single run of the manual job.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21767) in GitLab 11.4.
When you do not want to run a job immediately, you can use the [`when:delayed`](../yaml/index.md#whendelayed) keyword to
When you do not want to run a job immediately, you can use the [`when:delayed`](../jobs/job_control.md#run-a-job-after-a-delay) keyword to
delay a job's execution for a certain period.
This is especially useful for timed incremental rollout where new code is rolled out gradually.

View File

@ -497,6 +497,120 @@ test:
- "README.md"
```
## Create a job that must be run manually
You can require that a job doesn't run unless a user starts it. This is called a **manual job**.
You might want to use a manual job for something like deploying to production.
To specify a job as manual, add [`when: manual`](../yaml/index.md#when) to the job
in the `.gitlab-ci.yml` file.
By default, manual jobs display as skipped when the pipeline starts.
You can use [protected branches](../../user/project/protected_branches.md) to more strictly
[protect manual deployments](#protect-manual-jobs) from being run by unauthorized users.
### Types of manual jobs
Manual jobs can be either optional or blocking:
- **Optional**: The default setting for manual jobs.
- They have [`allow_failure: true`](../yaml/index.md#allow_failure) by default.
- The status does not contribute to the overall pipeline status. A pipeline can
succeed even if all of its manual jobs fail.
- **Blocking**: An optional setting for manual jobs.
- Add `allow_failure: false` to the job configuration.
- The pipeline stops at the stage where the job is defined. To let the pipeline
continue running, [run the manual job](#run-a-manual-job).
- Merge requests in projects with [merge when pipeline succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md)
enabled can't be merged with a blocked pipeline. Blocked pipelines show a status
of **blocked**.
### Run a manual job
To run a manual job, you must have permission to merge to the assigned branch.
To run a manual job:
1. Go to the pipeline, job, [environment](../environments/index.md#configure-manual-deployments),
or deployment view.
1. Next to the manual job, select **Play** (**{play}**).
### Protect manual jobs **(PREMIUM)**
Use [protected environments](../environments/protected_environments.md)
to define a list of users authorized to run a manual job. You can authorize only
the users associated with a protected environment to trigger manual jobs, which can:
- More precisely limit who can deploy to an environment.
- Block a pipeline until an approved user "approves" it.
To protect a manual job:
1. Add an `environment` to the job. For example:
```yaml
deploy_prod:
stage: deploy
script:
- echo "Deploy to production server"
environment:
name: production
url: https://example.com
when: manual
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
```
1. In the [protected environments settings](../environments/protected_environments.md#protecting-environments),
select the environment (`production` in this example) and add the users, roles or groups
that are authorized to trigger the manual job to the **Allowed to Deploy** list. Only those in
this list can trigger this manual job, as well as GitLab administrators
who are always able to use protected environments.
You can use protected environments with blocking manual jobs to have a list of users
allowed to approve later pipeline stages. Add `allow_failure: false` to the protected
manual job and the pipeline's next stages only run after the manual job is triggered
by authorized users.
## Run a job after a delay
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/51352) in GitLab 11.4.
Use [`when: delayed`](../yaml/index.md#when) to execute scripts after a waiting period, or if you want to avoid
jobs immediately entering the `pending` state.
You can set the period with `start_in` keyword. The value of `start_in` is an elapsed time in seconds, unless a unit is
provided. `start_in` must be less than or equal to one week. Examples of valid values include:
- `'5'` (a value with no unit must be surrounded by single quotes)
- `5 seconds`
- `30 minutes`
- `1 day`
- `1 week`
When a stage includes a delayed job, the pipeline doesn't progress until the delayed job finishes.
You can use this keyword to insert delays between different stages.
The timer of a delayed job starts immediately after the previous stage completes.
Similar to other types of jobs, a delayed job's timer doesn't start unless the previous stage passes.
The following example creates a job named `timed rollout 10%` that is executed 30 minutes after the previous stage completes:
```yaml
timed rollout 10%:
stage: deploy
script: echo 'Rolling out 10% ...'
when: delayed
start_in: 30 minutes
```
To stop the active timer of a delayed job, click the **{time-out}** (**Unschedule**) button.
This job can no longer be scheduled to run automatically. You can, however, execute the job manually.
To start a delayed job immediately, select **Play** (**{play}**).
Soon GitLab Runner starts the job.
## Use predefined CI/CD variables to run jobs only in specific pipeline types
You can use [predefined CI/CD variables](../variables/predefined_variables.md) to choose

View File

@ -188,7 +188,7 @@ release-branch-workflow:
- testing
```
Example of the same workflow using [`when: manual`](../yaml/index.md#whenmanual) in GitLab CI/CD:
Example of the same workflow using [`when: manual`](../jobs/job_control.md#create-a-job-that-must-be-run-manually) in GitLab CI/CD:
```yaml
deploy_prod:

View File

@ -108,8 +108,8 @@ There are some high level differences between the products worth mentioning:
- The `.gitlab-ci.yml` file is checked in to the root of your repository, much like a Jenkinsfile, but
is in the YAML format (see [complete reference](../yaml/index.md)) instead of a Groovy DSL. It's most
analogous to the declarative Jenkinsfile format.
- Manual approvals or gates can be set up as [`when:manual` jobs](../yaml/index.md#whenmanual). These can
also leverage [`protected environments`](../yaml/index.md#protecting-manual-jobs)
- Manual approvals or gates can be set up as [`when:manual` jobs](../jobs/job_control.md#create-a-job-that-must-be-run-manually). These can
also leverage [`protected environments`](../jobs/job_control.md#run-a-job-after-a-delay)
to control who is able to approve them.
- GitLab comes with a [container registry](../../user/packages/container_registry/index.md), and we recommend using
container images to set up your build environment. For example, set up one pipeline that builds your build environment

View File

@ -206,7 +206,7 @@ For each `var` or `file_var`, a key and value are required.
### Add manual interaction to your pipeline
Manual actions, configured using the [`when:manual`](../yaml/index.md#whenmanual) keyword,
[Manual jobs](../jobs/job_control.md#create-a-job-that-must-be-run-manually),
allow you to require manual interaction before moving forward in the pipeline.
You can do this straight from the pipeline graph. Just click the play button

View File

@ -1887,8 +1887,8 @@ variables:
### `allow_failure`
Use `allow_failure` when you want to let a job fail without impacting the rest of the CI
suite. The default value is `false`, except for [manual](#whenmanual) jobs that use
the `when: manual` syntax.
suite. The default value is `false`, except for [manual](../jobs/job_control.md#create-a-job-that-must-be-run-manually) jobs that use
the [`when: manual`](#when) syntax.
In jobs that use [`rules:`](#rules), all jobs default to `allow_failure: false`,
*including* `when: manual` jobs.
@ -1952,28 +1952,23 @@ test_job_2:
### `when`
Use `when` to implement jobs that run in case of failure or despite the
failure.
Use `when` to configure the conditions for when jobs run. If not defined in a job,
the default value is `when: on_success`.
The valid values of `when` are:
**Keyword type**: Job keyword. You can use it only as part of a job.
1. `on_success` (default) - Execute job only when all jobs in earlier stages succeed,
or are considered successful because they have `allow_failure: true`.
1. `on_failure` - Execute job only when at least one job in an earlier stage fails.
1. `always` - Execute job regardless of the status of jobs in earlier stages.
1. `manual` - Execute job [manually](#whenmanual).
1. `delayed` - [Delay the execution of a job](#whendelayed) for a specified duration.
Added in GitLab 11.14.
1. `never`:
- With job [`rules`](#rules), don't execute job.
- With [`workflow:rules`](#workflow), don't run pipeline.
**Possible inputs**:
In the following example, the script:
- `on_success` (default): Run the job only when all jobs in earlier stages succeed
or have `allow_failure: true`.
- `manual`: Run the job only when [triggered manually](../jobs/job_control.md#create-a-job-that-must-be-run-manually).
- `always`: Run the job regardless of the status of jobs in earlier stages.
- `on_failure`: Run the job only when at least one job in an earlier stage fails.
- `delayed`: [Delay the execution of a job](../jobs/job_control.md#run-a-job-after-a-delay)
for a specified duration.
- `never`: Don't run the job.
1. Executes `cleanup_build_job` only when `build_job` fails.
1. Always executes `cleanup_job` as the last step in pipeline regardless of
success or failure.
1. Executes `deploy_job` when you run it manually in the GitLab UI.
**Example of `when`**:
```yaml
stages:
@ -2012,116 +2007,26 @@ cleanup_job:
when: always
```
#### `when:manual`
In this example, the script:
A manual job is a type of job that is not executed automatically and must be explicitly
started by a user. You might want to use manual jobs for things like deploying to production.
1. Executes `cleanup_build_job` only when `build_job` fails.
1. Always executes `cleanup_job` as the last step in pipeline regardless of
success or failure.
1. Executes `deploy_job` when you run it manually in the GitLab UI.
To make a job manual, add `when: manual` to its configuration.
**Additional details**:
When the pipeline starts, manual jobs display as skipped and do not run automatically.
They can be started from the pipeline, job, [environment](../environments/index.md#configure-manual-deployments),
and deployment views.
- In [GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/201938) and later, you
can use `when:manual` in the same job as [`trigger`](#trigger). In GitLab 13.4 and
earlier, using them together causes the error `jobs:#{job-name} when should be on_success, on_failure or always`.
- The default behavior of `allow_failure` changes to `true` with `when: manual`.
However, if you use `when: manual` with [`rules`](#rules), `allow_failure` defaults
to `false`.
Manual jobs can be either optional or blocking:
**Related topics**:
- **Optional**: Manual jobs have [`allow_failure: true](#allow_failure) set by default
and are considered optional. The status of an optional manual job does not contribute
to the overall pipeline status. A pipeline can succeed even if all its manual jobs fail.
- **Blocking**: To make a blocking manual job, add `allow_failure: false` to its configuration.
Blocking manual jobs stop further execution of the pipeline at the stage where the
job is defined. To let the pipeline continue running, click **{play}** (play) on
the blocking manual job.
Merge requests in projects with [merge when pipeline succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md)
enabled can't be merged with a blocked pipeline. Blocked pipelines show a status
of **blocked**.
When you use [`rules:`](#rules), `allow_failure` defaults to `false`, including for manual jobs.
To trigger a manual job, a user must have permission to merge to the assigned branch.
You can use [protected branches](../../user/project/protected_branches.md) to more strictly
[protect manual deployments](#protecting-manual-jobs) from being run by unauthorized users.
In [GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/201938) and later, you
can use `when:manual` in the same job as [`trigger`](#trigger). In GitLab 13.4 and
earlier, using them together causes the error `jobs:#{job-name} when should be on_success, on_failure or always`.
##### Protecting manual jobs **(PREMIUM)**
Use [protected environments](../environments/protected_environments.md)
to define a list of users authorized to run a manual job. You can authorize only
the users associated with a protected environment to trigger manual jobs, which can:
- More precisely limit who can deploy to an environment.
- Block a pipeline until an approved user "approves" it.
To protect a manual job:
1. Add an `environment` to the job. For example:
```yaml
deploy_prod:
stage: deploy
script:
- echo "Deploy to production server"
environment:
name: production
url: https://example.com
when: manual
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
```
1. In the [protected environments settings](../environments/protected_environments.md#protecting-environments),
select the environment (`production` in this example) and add the users, roles or groups
that are authorized to trigger the manual job to the **Allowed to Deploy** list. Only those in
this list can trigger this manual job, as well as GitLab administrators
who are always able to use protected environments.
You can use protected environments with blocking manual jobs to have a list of users
allowed to approve later pipeline stages. Add `allow_failure: false` to the protected
manual job and the pipeline's next stages only run after the manual job is triggered
by authorized users.
#### `when:delayed`
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/51352) in GitLab 11.4.
Use `when: delayed` to execute scripts after a waiting period, or if you want to avoid
jobs immediately entering the `pending` state.
You can set the period with `start_in` keyword. The value of `start_in` is an elapsed time in seconds, unless a unit is
provided. `start_in` must be less than or equal to one week. Examples of valid values include:
- `'5'`
- `5 seconds`
- `30 minutes`
- `1 day`
- `1 week`
When a stage includes a delayed job, the pipeline doesn't progress until the delayed job finishes.
You can use this keyword to insert delays between different stages.
The timer of a delayed job starts immediately after the previous stage completes.
Similar to other types of jobs, a delayed job's timer doesn't start unless the previous stage passes.
The following example creates a job named `timed rollout 10%` that is executed 30 minutes after the previous stage completes:
```yaml
timed rollout 10%:
stage: deploy
script: echo 'Rolling out 10% ...'
when: delayed
start_in: 30 minutes
```
To stop the active timer of a delayed job, click the **{time-out}** (**Unschedule**) button.
This job can no longer be scheduled to run automatically. You can, however, execute the job manually.
To start a delayed job immediately, click the **Play** button.
Soon GitLab Runner picks up and starts the job.
- `when` can be used with [`rules`](#rules) for more dynamic job control.
- `when` can be used with [`workflow`](#workflow) to control when a pipeline can start.
### `environment`
@ -2249,7 +2154,7 @@ In the above example, the `review_app` job deploys to the `review`
environment. A new `stop_review_app` job is listed under `on_stop`.
After the `review_app` job is finished, it triggers the
`stop_review_app` job based on what is defined under `when`. In this case,
it is set to `manual`, so it needs a [manual action](#whenmanual) from
it is set to `manual`, so it needs a [manual action](../jobs/job_control.md#create-a-job-that-must-be-run-manually) from
the GitLab UI to run.
Also in the example, `GIT_STRATEGY` is set to `none`. If the
@ -3697,7 +3602,7 @@ view which job triggered a downstream pipeline. In the [pipeline graph](../pipel
hover over the downstream pipeline job.
In [GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/201938) and later, you
can use [`when:manual`](#whenmanual) in the same job as `trigger`. In GitLab 13.4 and
can use [`when:manual`](#when) in the same job as `trigger`. In GitLab 13.4 and
earlier, using them together causes the error `jobs:#{job-name} when should be on_success, on_failure or always`.
You [cannot start `manual` trigger jobs with the API](https://gitlab.com/gitlab-org/gitlab/-/issues/284086).

View File

@ -79,7 +79,7 @@ case the runner downloads them using a dedicated API endpoint.
Artifacts are stored in object storage, while metadata is kept in the database. An important example of artifacts
are reports (like JUnit, SAST, and DAST) which are parsed and rendered in the merge request.
Job status transitions are not all automated. A user may run [manual jobs](../../ci/yaml/index.md#whenmanual), cancel a pipeline, retry
Job status transitions are not all automated. A user may run [manual jobs](../../ci/jobs/job_control.md#create-a-job-that-must-be-run-manually), cancel a pipeline, retry
specific failed jobs or the entire pipeline. Anything that
causes a job to change status triggers `ProcessPipelineService`, as it's responsible for
tracking the status of the entire pipeline.

View File

@ -469,7 +469,7 @@ If you want to know the in-depth details, here's what's really happening:
The following GitLab features are used among others:
- [Manual actions](../../ci/yaml/index.md#whenmanual)
- [Manual jobs](../../ci/jobs/job_control.md#create-a-job-that-must-be-run-manually)
- [Multi project pipelines](../../ci/pipelines/multi_project_pipelines.md)
- [Review Apps](../../ci/review_apps/index.md)
- [Artifacts](../../ci/yaml/index.md#artifacts)

View File

@ -119,9 +119,16 @@ sudo docker logs -f gitlab
After starting a container you can visit `gitlab.example.com` (or
`http://192.168.59.103` if you used boot2docker on macOS). It might take a while
before the Docker container starts to respond to queries.
The very first time you visit GitLab, you will be asked to set up the admin
password. After you change it, you can log in with username `root` and the
password you set up.
Visit the GitLab URL, and log in with username `root`
and the password from the following command:
```shell
sudo docker exec -it gitlab grep 'Password:' /etc/gitlab/initial_root_password
```
NOTE:
The password file will be automatically deleted in the first reconfigure run after 24 hours.
### Install GitLab using Docker Compose

View File

@ -19,7 +19,7 @@ in your GitLab project with any of your projects in Jira.
### Jira integration
This integration connects one or more GitLab project to a Jira instance. The Jira instance
This integration connects one or more GitLab projects to a Jira instance. The Jira instance
can be hosted by you or in [Atlassian cloud](https://www.atlassian.com/cloud).
The supported Jira versions are `v6.x`, `v7.x`, and `v8.x`.
@ -83,26 +83,31 @@ If these features do not work as expected, it is likely due to a problem with th
### GitLab is unable to comment on a Jira issue
Make sure that the Jira user you set up for the integration has the
correct access permission to post comments on a Jira issue and also to transition
the issue, if you'd like GitLab to also be able to do so.
If GitLab cannot comment on Jira issues, make sure the Jira user you
set up for the integration has permission to:
- Post comments on a Jira issue.
- Transition the Jira issue.
Jira issue references and update comments do not work if the GitLab issue tracker is disabled.
### GitLab is unable to close a Jira issue
Make sure the `Transition ID` you set within the Jira settings matches the one
Make sure the `Transition ID` you set in the Jira settings matches the one
your project needs to close an issue.
Make sure that the Jira issue is not already marked as resolved; that is,
the Jira issue resolution field is not set. (It should not be struck through in
Jira lists.)
Make sure that the Jira issue is not already marked as resolved. That is,
the Jira issue resolution field is not set, and the issue is not struck through in
Jira lists.
### CAPTCHA
CAPTCHA may be triggered after several consecutive failed login attempts
CAPTCHA may be triggered after several consecutive failed login attempts,
which may lead to a `401 unauthorized` error when testing your Jira integration.
If CAPTCHA has been triggered, you can't use Jira's REST API to
authenticate with the Jira site. You need to log in to your Jira instance
authenticate with the Jira site.
To fix this error, sign in to your Jira instance
and complete the CAPTCHA.
## Third-party Jira integrations

View File

@ -18,9 +18,13 @@ is created, based on the user's access permissions:
- Public projects can be selected by any signed-in user as a template for a new project,
if all enabled [project features](../project/settings/index.md#sharing-and-permissions)
except for GitLab Pages are set to **Everyone With Access**.
except for **GitLab Pages** and **Security & Compliance** are set to **Everyone With Access**.
The same applies to internal projects.
- Private projects can be selected only by users who are members of the projects.
The **Metrics Dashboard** is set to **Only Project Members** when you create a new project. Make
sure you change it to **Everyone With Access** before making it a project template.
Repository and database information that are copied over to each new project are
identical to the data exported with the [GitLab Project Import/Export](../project/settings/import_export.md).

View File

@ -49,6 +49,17 @@ If you have need of this, please explain why by filling out the survey [here](ht
## Supported languages and package managers
Dependency Scanning automatically detects the languages used in the repository. All analyzers
matching the detected languages are run. There is usually no need to customize the selection of
analyzers. We recommend not specifying the analyzers so you automatically use the full selection
for best coverage, avoiding the need to make adjustments when there are deprecations or removals.
However, you can override the selection using the variable `DS_EXCLUDED_ANALYZERS`.
The language detection relies on CI job [`rules`](../../../ci/yaml/index.md#rules) and searches a
maximum of two directory levels from the repository's root. For example, the
`gemnasium-dependency_scanning` job is enabled if a repository contains either a `Gemfile` or
`api/Gemfile` file, but not if the only supported dependency file is `api/client/Gemfile`.
The following languages and dependency managers are supported:
<style>

View File

@ -35,7 +35,6 @@ The following resources are migrated to the target instance:
- The user already exists in the target GitLab instance and
- The user has a public email in the source GitLab instance that matches a
confirmed email in the target GitLab instance
confirmed email in the target GitLab instance
- Epics ([Introduced in 13.7](https://gitlab.com/gitlab-org/gitlab/-/issues/250281))
- title
- description
@ -78,8 +77,7 @@ Any other items are **not** migrated.
## Enable or disable GitLab Group Migration
Support for GitLab Group Migration is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
GitLab Migration is deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md) can enable it.
To enable it:

View File

@ -59,7 +59,7 @@ specific environment, there are a lot of use cases. To name a few:
- You want to promote what's running in staging, to production. You go to the
environments list, verify that what's running in staging is what you think is
running, then click on the [manual action](../../ci/yaml/index.md#whenmanual) to deploy to production.
running, then click on the [manual job](../../ci/jobs/job_control.md#create-a-job-that-must-be-run-manually) to deploy to production.
- You trigger a deploy, and you have many containers to upgrade so you know
this takes a while (you've also throttled your deploy to only take down X
containers at a time). But you need to tell someone when it's deployed, so you

View File

@ -119,7 +119,7 @@ For a software developer working in a team:
1. Pushes a commit with their final review.
1. [Approves the merge request](approvals/index.md).
1. Sets it to [merge when pipeline succeeds](merge_when_pipeline_succeeds.md).
1. Your changes get deployed to production with [manual actions](../../../ci/yaml/index.md#whenmanual) for GitLab CI/CD.
1. Your changes get deployed to production with [manual jobs](../../../ci/jobs/job_control.md#create-a-job-that-must-be-run-manually) for GitLab CI/CD.
1. Your implementations were successfully shipped to your customer.
For a web developer writing a webpage for your company's website:

View File

@ -53,7 +53,7 @@ module Gitlab
personal_access_token_check(password, project) ||
deploy_token_check(login, password, project) ||
user_with_password_for_git(login, password) ||
Gitlab::Auth::Result.new
Gitlab::Auth::Result::EMPTY
rate_limit!(rate_limiter, success: result.success?, login: login)
look_to_limit_user(result.actor)

View File

@ -2,8 +2,17 @@
module Gitlab
module Auth
Result = Struct.new(:actor, :project, :type, :authentication_abilities) do
self::EMPTY = self.new(nil, nil, nil, nil).freeze
class Result
attr_reader :actor, :project, :type, :authentication_abilities
def initialize(actor, project, type, authentication_abilities)
@actor = actor
@project = project
@type = type
@authentication_abilities = authentication_abilities
end
EMPTY = self.new(nil, nil, nil, nil).freeze
def ci?(for_project)
type == :ci &&
@ -27,6 +36,7 @@ module Gitlab
def auth_user
actor.is_a?(User) ? actor : nil
end
alias_method :user, :auth_user
def deploy_token
actor.is_a?(DeployToken) ? actor : nil

View File

@ -3644,9 +3644,6 @@ msgstr ""
msgid "An error occurred while fetching the latest pipeline."
msgstr ""
msgid "An error occurred while fetching the pipeline."
msgstr ""
msgid "An error occurred while fetching the releases. Please try again."
msgstr ""
@ -20913,6 +20910,9 @@ msgstr ""
msgid "Merge automatically (%{strategy})"
msgstr ""
msgid "Merge blocked: new changes were just added."
msgstr ""
msgid "Merge blocked: the source branch must be rebased onto the target branch."
msgstr ""
@ -22328,9 +22328,6 @@ msgstr ""
msgid "New branch unavailable"
msgstr ""
msgid "New changes were added. %{linkStart}Reload the page to review them%{linkEnd}"
msgstr ""
msgid "New confidential epic title "
msgstr ""
@ -24795,6 +24792,51 @@ msgstr ""
msgid "Pipeline|Skipped"
msgstr ""
msgid "Pipeline|Source"
msgstr ""
msgid "Pipeline|Source|API"
msgstr ""
msgid "Pipeline|Source|Chat"
msgstr ""
msgid "Pipeline|Source|External"
msgstr ""
msgid "Pipeline|Source|External Pull Request"
msgstr ""
msgid "Pipeline|Source|Merge Request"
msgstr ""
msgid "Pipeline|Source|On-Demand DAST Scan"
msgstr ""
msgid "Pipeline|Source|On-Demand DAST Validation"
msgstr ""
msgid "Pipeline|Source|Parent Pipeline"
msgstr ""
msgid "Pipeline|Source|Pipeline"
msgstr ""
msgid "Pipeline|Source|Push"
msgstr ""
msgid "Pipeline|Source|Schedule"
msgstr ""
msgid "Pipeline|Source|Trigger"
msgstr ""
msgid "Pipeline|Source|Web"
msgstr ""
msgid "Pipeline|Source|Web IDE"
msgstr ""
msgid "Pipeline|Specify variable values to be used in this run. The values specified in %{linkStart}CI/CD settings%{linkEnd} will be used by default."
msgstr ""
@ -28548,6 +28590,9 @@ msgstr ""
msgid "Review App|View latest app"
msgstr ""
msgid "Review changes"
msgstr ""
msgid "Review requested from %{name}"
msgstr ""
@ -39845,9 +39890,6 @@ msgstr ""
msgid "mrWidget|The pipeline for this merge request did not complete. Push a new commit to fix the failure, or check the %{linkStart}troubleshooting documentation%{linkEnd} to see other possible actions."
msgstr ""
msgid "mrWidget|The source branch HEAD has recently changed. Please reload the page and review the changes before merging"
msgstr ""
msgid "mrWidget|The source branch has been deleted"
msgstr ""

View File

@ -115,7 +115,7 @@
"codesandbox-api": "0.0.23",
"compression-webpack-plugin": "^5.0.2",
"copy-webpack-plugin": "^6.4.1",
"core-js": "^3.16.1",
"core-js": "^3.16.2",
"cron-validator": "^1.1.1",
"cropper": "^2.3.0",
"css-loader": "^2.1.1",

View File

@ -4,26 +4,94 @@ require 'spec_helper'
RSpec.describe Import::AvailableNamespacesController do
let_it_be(:user) { create(:user) }
let_it_be(:manageable_groups) { [create(:group), create(:group)] }
before do
sign_in(user)
manageable_groups.each { |group| group.add_maintainer(user) }
end
describe "GET index" do
it "returns list of available namespaces" do
unrelated_group = create(:group)
context "when having group with role never allowed to create projects" do
using RSpec::Parameterized::TableSyntax
get :index
where(
role: [:guest, :reporter],
default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_kind_of(Array)
with_them do
before do
stub_application_setting(default_project_creation: default_project_creation_access)
end
response_ids = json_response.map { |n| n["id"] }
it "does not include group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: group_project_creation_level)
group.add_user(user, role)
get :index
expect(response_ids).not_to include(unrelated_group.id)
expect(response_ids).to contain_exactly(*manageable_groups.map(&:id))
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to include({
'id' => group.id,
'full_path' => group.full_path
})
end
end
end
context "when having group with role always allowed to create projects" do
using RSpec::Parameterized::TableSyntax
where(
role: [:maintainer, :owner],
default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
with_them do
before do
stub_application_setting(default_project_creation: default_project_creation_access)
end
it "does not include group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: group_project_creation_level)
group.add_user(user, role)
get :index
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include({
'id' => group.id,
'full_path' => group.full_path
})
end
end
end
context "when having developer role" do
using RSpec::Parameterized::TableSyntax
where(:default_project_creation_access, :project_creation_level, :is_visible) do
::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | nil | false
::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | true
::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | nil | true
::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | false
end
with_them do
before do
stub_application_setting(default_project_creation: default_project_creation_access)
end
it "#{params[:is_visible] ? 'includes' : 'does not include'} group with access level #{params[:role]} in list" do
group = create(:group, project_creation_level: project_creation_level)
group.add_user(user, :developer)
get :index
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).send(is_visible ? 'to' : 'not_to', include({
'id' => group.id,
'full_path' => group.full_path
}))
end
end
end
context "with an anonymous user" do

View File

@ -74,7 +74,6 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state do
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1[:id])
expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2[:id])
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do

View File

@ -80,6 +80,8 @@ RSpec.describe ApplicationExperiment, :experiment do
end
it "publishes to the database if we've opted for that" do
subject.record!
expect(subject).to receive(:publish_to_database)
subject.publish
@ -121,6 +123,8 @@ RSpec.describe ApplicationExperiment, :experiment do
end
describe '#publish_to_database' do
using RSpec::Parameterized::TableSyntax
shared_examples 'does not record to the database' do
it 'does not create an experiment record' do
expect { subject.publish_to_database }.not_to change(Experiment, :count)
@ -131,55 +135,43 @@ RSpec.describe ApplicationExperiment, :experiment do
end
end
context 'when we explicitly request to record' do
using RSpec::Parameterized::TableSyntax
context 'when there is a usable subject' do
let(:context) { { context_key => context_value } }
before do
subject.record!
where(:context_key, :context_value, :object_type) do
:namespace | build(:namespace) | :namespace
:group | build(:namespace) | :namespace
:project | build(:project) | :project
:user | build(:user) | :user
:actor | build(:user) | :user
end
context 'when there is a usable subject' do
let(:context) { { context_key => context_value } }
with_them do
it 'creates an experiment and experiment subject record' do
expect { subject.publish_to_database }.to change(Experiment, :count).by(1)
where(:context_key, :context_value, :object_type) do
:namespace | build(:namespace) | :namespace
:group | build(:namespace) | :namespace
:project | build(:project) | :project
:user | build(:user) | :user
:actor | build(:user) | :user
end
with_them do
it 'creates an experiment and experiment subject record' do
expect { subject.publish_to_database }.to change(Experiment, :count).by(1)
expect(Experiment.last.name).to eq('namespaced/stub')
expect(ExperimentSubject.last.send(object_type)).to eq(context[context_key])
end
expect(Experiment.last.name).to eq('namespaced/stub')
expect(ExperimentSubject.last.send(object_type)).to eq(context[context_key])
end
end
end
context 'when there is not a usable subject' do
let(:context) { { context_key => context_value } }
context 'when there is not a usable subject' do
let(:context) { { context_key => context_value } }
where(:context_key, :context_value) do
:namespace | nil
:foo | :bar
end
with_them do
include_examples 'does not record to the database'
end
where(:context_key, :context_value) do
:namespace | nil
:foo | :bar
end
context 'but we should not track' do
let(:should_track) { false }
with_them do
include_examples 'does not record to the database'
end
end
context 'when we have not explicitly requested to record' do
context 'but we should not track' do
let(:should_track) { false }
include_examples 'does not record to the database'
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
factory :project_ci_feature_usage, class: 'Projects::CiFeatureUsage' do
project factory: :project
feature { :code_coverage } # rubocop: disable RSpec/EmptyExampleGroup
default_branch { false }
end
end

View File

@ -778,61 +778,6 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id' do
subject { visit project_pipeline_path(project, pipeline) }
# remove when :graphql_pipeline_details flag is removed
# https://gitlab.com/gitlab-org/gitlab/-/issues/299112
context 'when :graphql_pipeline_details flag is off' do
before do
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
end
it 'shows deploy job as created' do
subject
within('.js-pipeline-header-container') do
expect(page).to have_content('pending')
end
within('.js-pipeline-graph') do
within '.stage-column:nth-child(1)' do
expect(page).to have_content('test')
expect(page).to have_css('.ci-status-icon-pending')
end
within '.stage-column:nth-child(2)' do
expect(page).to have_content('deploy')
expect(page).to have_css('.ci-status-icon-created')
end
end
end
context 'when test job succeeded' do
before do
test_job.success!
end
it 'shows deploy job as pending' do
subject
within('.js-pipeline-header-container') do
expect(page).to have_content('running')
end
within('.pipeline-graph') do
within '.stage-column:nth-child(1)' do
expect(page).to have_content('test')
expect(page).to have_css('.ci-status-icon-success')
end
within '.stage-column:nth-child(2)' do
expect(page).to have_content('deploy')
expect(page).to have_css('.ci-status-icon-pending')
end
end
end
end
end
it 'shows deploy job as created' do
subject
@ -902,29 +847,6 @@ RSpec.describe 'Pipeline', :js do
end
end
# remove when :graphql_pipeline_details flag is removed
# https://gitlab.com/gitlab-org/gitlab/-/issues/299112
context 'when :graphql_pipeline_details flag is off' do
before do
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
end
it 'shows deploy job as waiting for resource' do
subject
within('.js-pipeline-header-container') do
expect(page).to have_content('waiting')
end
within('.pipeline-graph') do
within '.stage-column:nth-child(2)' do
expect(page).to have_content('deploy')
expect(page).to have_css('.ci-status-icon-waiting-for-resource')
end
end
end
end
context 'when resource is released from another job' do
before do
another_job.success!
@ -944,29 +866,6 @@ RSpec.describe 'Pipeline', :js do
end
end
end
# remove when :graphql_pipeline_details flag is removed
# https://gitlab.com/gitlab-org/gitlab/-/issues/299112
context 'when :graphql_pipeline_details flag is off' do
before do
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
end
it 'shows deploy job as pending' do
subject
within('.js-pipeline-header-container') do
expect(page).to have_content('running')
end
within('.pipeline-graph') do
within '.stage-column:nth-child(2)' do
expect(page).to have_content('deploy')
expect(page).to have_css('.ci-status-icon-pending')
end
end
end
end
end
context 'when deploy job is a bridge to trigger a downstream pipeline' do
@ -1234,23 +1133,6 @@ RSpec.describe 'Pipeline', :js do
expect(page).not_to have_content('Failed Jobs')
expect(page).to have_selector('.js-pipeline-graph')
end
# remove when :graphql_pipeline_details flag is removed
# https://gitlab.com/gitlab-org/gitlab/-/issues/299112
context 'when :graphql_pipeline_details flag is off' do
before do
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
end
it 'displays the pipeline graph' do
subject
expect(current_path).to eq(pipeline_path(pipeline))
expect(page).not_to have_content('Failed Jobs')
expect(page).to have_selector('.pipeline-visualization')
end
end
end
end

View File

@ -12,8 +12,6 @@ RSpec.describe 'Pipelines', :js do
before do
sign_in(user)
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
project.add_developer(user)
project.update!(auto_devops_attributes: { enabled: false })

View File

@ -7,12 +7,17 @@ RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
include WikiHelpers
include JavaScriptFixturesHelpers
let_it_be(:user) { create(:user) }
let_it_be(:user) { create(:user, username: 'gitlab') }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, :repository, group: group) }
let_it_be(:project_wiki) { create(:project_wiki, user: user) }
let_it_be(:label) { create(:label, project: project, title: 'bug') }
let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) }
let(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) }

View File

@ -112,3 +112,7 @@
| cell | cell | cell |
- name: emoji
markdown: ':sparkles: :heart: :100:'
- name: reference
context: project_wiki
markdown: |-
Hi @gitlab - thank you for reporting this ~bug (#1) we hope to fix it in %1.1 as part of !1

View File

@ -20,6 +20,7 @@ describe('Pipelines filtered search', () => {
const findTagToken = () => getSearchToken('tag');
const findUserToken = () => getSearchToken('username');
const findStatusToken = () => getSearchToken('status');
const findSourceToken = () => getSearchToken('source');
const createComponent = (params = {}) => {
wrapper = mount(PipelinesFilteredSearch, {
@ -32,6 +33,8 @@ describe('Pipelines filtered search', () => {
};
beforeEach(() => {
window.gon = { features: { pipelineSourceFilter: true } };
mock = new MockAdapter(axios);
jest.spyOn(Api, 'projectUsers').mockResolvedValue(users);
@ -70,6 +73,14 @@ describe('Pipelines filtered search', () => {
operators: OPERATOR_IS_ONLY,
});
expect(findSourceToken()).toMatchObject({
type: 'source',
icon: 'trigger-source',
title: 'Source',
unique: true,
operators: OPERATOR_IS_ONLY,
});
expect(findStatusToken()).toMatchObject({
type: 'status',
icon: 'status',

View File

@ -1,300 +0,0 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { setHTMLFixture } from 'helpers/fixtures';
import GraphComponentLegacy from '~/pipelines/components/graph/graph_component_legacy.vue';
import LinkedPipelinesColumnLegacy from '~/pipelines/components/graph/linked_pipelines_column_legacy.vue';
import StageColumnComponentLegacy from '~/pipelines/components/graph/stage_column_component_legacy.vue';
import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
import PipelineStore from '~/pipelines/stores/pipeline_store';
import linkedPipelineJSON from './linked_pipelines_mock_data';
import graphJSON from './mock_data_legacy';
describe('graph component', () => {
let store;
let mediator;
let wrapper;
const findExpandPipelineBtn = () => wrapper.find('[data-testid="expand-pipeline-button"]');
const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expand-pipeline-button"]');
const findStageColumns = () => wrapper.findAll(StageColumnComponentLegacy);
const findStageColumnAt = (i) => findStageColumns().at(i);
beforeEach(() => {
mediator = new PipelinesMediator({ endpoint: '' });
store = new PipelineStore();
store.storePipeline(linkedPipelineJSON);
setHTMLFixture('<div class="layout-page"></div>');
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('while is loading', () => {
it('should render a loading icon', () => {
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: true,
pipeline: {},
mediator,
},
});
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
});
describe('with data', () => {
beforeEach(() => {
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: graphJSON,
mediator,
},
});
});
it('renders the graph', () => {
expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
expect(wrapper.find('.loading-icon').exists()).toBe(false);
expect(wrapper.find('.stage-column-list').exists()).toBe(true);
});
it('renders columns in the graph', () => {
expect(findStageColumns()).toHaveLength(graphJSON.details.stages.length);
});
});
describe('when linked pipelines are present', () => {
beforeEach(() => {
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: store.state.pipeline,
mediator,
},
});
});
describe('rendered output', () => {
it('should include the pipelines graph', () => {
expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
});
it('should not include the loading icon', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
it('should include the stage column', () => {
expect(findStageColumnAt(0).exists()).toBe(true);
});
it('stage column should have no-margin, gl-mr-26, has-only-one-job classes if there is only one job', () => {
expect(findStageColumnAt(0).classes()).toEqual(
expect.arrayContaining(['no-margin', 'gl-mr-26', 'has-only-one-job']),
);
});
it('should include the left-margin class on the second child', () => {
expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
it('should include the left-connector class in the build of the second child', () => {
expect(findStageColumnAt(1).find('.build:nth-child(1)').classes('left-connector')).toBe(
true,
);
});
it('should include the js-has-linked-pipelines flag', () => {
expect(wrapper.find('.js-has-linked-pipelines').exists()).toBe(true);
});
});
describe('computeds and methods', () => {
describe('capitalizeStageName', () => {
it('it capitalizes the stage name', () => {
expect(wrapper.findAll('.stage-column .stage-name').at(1).text()).toBe('Prebuild');
});
});
describe('stageConnectorClass', () => {
it('it returns left-margin when there is a triggerer', () => {
expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
describe('linked pipelines components', () => {
beforeEach(() => {
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: store.state.pipeline,
mediator,
},
});
});
it('should render an upstream pipelines column at first position', () => {
expect(wrapper.find(LinkedPipelinesColumnLegacy).exists()).toBe(true);
expect(wrapper.find('.stage-column .stage-name').text()).toBe('Upstream');
});
it('should render a downstream pipelines column at last position', () => {
const stageColumnNames = wrapper.findAll('.stage-column .stage-name');
expect(wrapper.find(LinkedPipelinesColumnLegacy).exists()).toBe(true);
expect(stageColumnNames.at(stageColumnNames.length - 1).text()).toBe('Downstream');
});
describe('triggered by', () => {
describe('on click', () => {
it('should emit `onClickUpstreamPipeline` when triggered by linked pipeline is clicked', async () => {
const btnWrapper = findExpandPipelineBtn();
btnWrapper.trigger('click');
await nextTick();
expect(wrapper.emitted().onClickUpstreamPipeline).toEqual([
store.state.pipeline.triggered_by,
]);
});
});
describe('with expanded pipeline', () => {
it('should render expanded pipeline', async () => {
// expand the pipeline
store.state.pipeline.triggered_by[0].isExpanded = true;
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: store.state.pipeline,
mediator,
},
});
await nextTick();
expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
});
});
});
describe('triggered', () => {
describe('on click', () => {
// We have to mock this property of HTMLElement since component relies on it
let offsetParentDescriptor;
beforeAll(() => {
offsetParentDescriptor = Object.getOwnPropertyDescriptor(
HTMLElement.prototype,
'offsetParent',
);
Object.defineProperty(HTMLElement.prototype, 'offsetParent', {
get() {
return this.parentNode;
},
});
});
afterAll(() => {
Object.defineProperty(HTMLElement.prototype, offsetParentDescriptor);
});
it('should emit `onClickDownstreamPipeline`', async () => {
const btnWrappers = findAllExpandPipelineBtns();
const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
downstreamBtnWrapper.trigger('click');
await nextTick();
expect(wrapper.emitted().onClickDownstreamPipeline).toEqual([
[store.state.pipeline.triggered[1]],
]);
});
});
describe('with expanded pipeline', () => {
it('should render expanded pipeline', async () => {
// expand the pipeline
store.state.pipeline.triggered[0].isExpanded = true;
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: store.state.pipeline,
mediator,
},
});
await nextTick();
expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
});
});
describe('when column requests a refresh', () => {
beforeEach(() => {
findStageColumnAt(0).vm.$emit('refreshPipelineGraph');
});
it('refreshPipelineGraph is emitted', () => {
expect(wrapper.emitted().refreshPipelineGraph).toHaveLength(1);
});
});
});
});
});
describe('when linked pipelines are not present', () => {
beforeEach(() => {
const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline,
mediator,
},
});
});
describe('rendered output', () => {
it('should include the first column with a no margin', () => {
const firstColumn = wrapper.find('.stage-column');
expect(firstColumn.classes('no-margin')).toBe(true);
});
it('should not render a linked pipelines column', () => {
expect(wrapper.find('.linked-pipelines-column').exists()).toBe(false);
});
});
describe('stageConnectorClass', () => {
it('it returns no-margin when no triggerer and there is one job', () => {
expect(findStageColumnAt(0).classes('no-margin')).toBe(true);
});
it('it returns left-margin when no triggerer and not the first stage', () => {
expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
describe('capitalizeStageName', () => {
it('capitalizes and escapes stage name', () => {
wrapper = mount(GraphComponentLegacy, {
propsData: {
isLoading: false,
pipeline: graphJSON,
mediator,
},
});
expect(findStageColumnAt(1).props('title')).toEqual(
'Deploy &lt;img src=x onerror=alert(document.domain)&gt;',
);
});
});
});

View File

@ -1,40 +0,0 @@
import { shallowMount } from '@vue/test-utils';
import { UPSTREAM } from '~/pipelines/components/graph/constants';
import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
import LinkedPipelinesColumnLegacy from '~/pipelines/components/graph/linked_pipelines_column_legacy.vue';
import mockData from './linked_pipelines_mock_data';
describe('Linked Pipelines Column', () => {
const propsData = {
columnTitle: 'Upstream',
linkedPipelines: mockData.triggered,
graphPosition: 'right',
projectId: 19,
type: UPSTREAM,
};
let wrapper;
beforeEach(() => {
wrapper = shallowMount(LinkedPipelinesColumnLegacy, { propsData });
});
afterEach(() => {
wrapper.destroy();
});
it('renders the pipeline orientation', () => {
const titleElement = wrapper.find('.linked-pipelines-column-title');
expect(titleElement.text()).toBe(propsData.columnTitle);
});
it('renders the correct number of linked pipelines', () => {
const linkedPipelineElements = wrapper.findAll(LinkedPipeline);
expect(linkedPipelineElements.length).toBe(propsData.linkedPipelines.length);
});
it('renders cross project triangle when column is upstream', () => {
expect(wrapper.find('.cross-project-triangle').exists()).toBe(true);
});
});

View File

@ -1,261 +0,0 @@
export default {
id: 123,
user: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url: null,
web_url: 'http://localhost:3000/root',
},
active: false,
coverage: null,
path: '/root/ci-mock/pipelines/123',
details: {
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/pipelines/123',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
},
duration: 9,
finished_at: '2017-04-19T14:30:27.542Z',
stages: [
{
name: 'test',
title: 'test: passed',
groups: [
{
name: 'test',
size: 1,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4153',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4153/retry',
method: 'post',
},
},
jobs: [
{
id: 4153,
name: 'test',
build_path: '/root/ci-mock/builds/4153',
retry_path: '/root/ci-mock/builds/4153/retry',
playable: false,
created_at: '2017-04-13T09:25:18.959Z',
updated_at: '2017-04-13T09:25:23.118Z',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4153',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4153/retry',
method: 'post',
},
},
},
],
},
],
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/pipelines/123#test',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
},
path: '/root/ci-mock/pipelines/123#test',
dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=test',
},
{
name: 'deploy <img src=x onerror=alert(document.domain)>',
title: 'deploy: passed',
groups: [
{
name: 'deploy to production',
size: 1,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4166',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4166/retry',
method: 'post',
},
},
jobs: [
{
id: 4166,
name: 'deploy to production',
build_path: '/root/ci-mock/builds/4166',
retry_path: '/root/ci-mock/builds/4166/retry',
playable: false,
created_at: '2017-04-19T14:29:46.463Z',
updated_at: '2017-04-19T14:30:27.498Z',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4166',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4166/retry',
method: 'post',
},
},
},
],
},
{
name: 'deploy to staging',
size: 1,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4159',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4159/retry',
method: 'post',
},
},
jobs: [
{
id: 4159,
name: 'deploy to staging',
build_path: '/root/ci-mock/builds/4159',
retry_path: '/root/ci-mock/builds/4159/retry',
playable: false,
created_at: '2017-04-18T16:32:08.420Z',
updated_at: '2017-04-18T16:32:12.631Z',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/builds/4159',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4159/retry',
method: 'post',
},
},
},
],
},
],
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/pipelines/123#deploy',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
},
path: '/root/ci-mock/pipelines/123#deploy',
dropdown_path: '/root/ci-mock/pipelines/123/stage.json?stage=deploy',
},
],
artifacts: [],
manual_actions: [
{
name: 'deploy to production',
path: '/root/ci-mock/builds/4166/play',
playable: false,
},
],
},
flags: {
latest: true,
triggered: false,
stuck: false,
yaml_errors: false,
retryable: false,
cancelable: false,
},
ref: {
name: 'main',
path: '/root/ci-mock/tree/main',
tag: false,
branch: true,
},
commit: {
id: '798e5f902592192afaba73f4668ae30e56eae492',
short_id: '798e5f90',
title: "Merge branch 'new-branch' into 'main'\r",
created_at: '2017-04-13T10:25:17.000+01:00',
parent_ids: [
'54d483b1ed156fbbf618886ddf7ab023e24f8738',
'c8e2d38a6c538822e81c57022a6e3a0cfedebbcc',
],
message:
"Merge branch 'new-branch' into 'main'\r\n\r\nAdd new file\r\n\r\nSee merge request !1",
author_name: 'Root',
author_email: 'admin@example.com',
authored_date: '2017-04-13T10:25:17.000+01:00',
committer_name: 'Root',
committer_email: 'admin@example.com',
committed_date: '2017-04-13T10:25:17.000+01:00',
author: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url: null,
web_url: 'http://localhost:3000/root',
},
author_gravatar_url: null,
commit_url:
'http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492',
commit_path: '/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492',
},
created_at: '2017-04-13T09:25:18.881Z',
updated_at: '2017-04-19T14:30:27.561Z',
};

View File

@ -1,130 +0,0 @@
import { shallowMount } from '@vue/test-utils';
import StageColumnComponentLegacy from '~/pipelines/components/graph/stage_column_component_legacy.vue';
describe('stage column component', () => {
const mockJob = {
id: 4250,
name: 'test',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4250',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4250/retry',
method: 'post',
},
},
};
let wrapper;
beforeEach(() => {
const mockGroups = [];
for (let i = 0; i < 3; i += 1) {
const mockedJob = { ...mockJob };
mockedJob.id += i;
mockGroups.push(mockedJob);
}
wrapper = shallowMount(StageColumnComponentLegacy, {
propsData: {
title: 'foo',
groups: mockGroups,
hasTriggeredBy: false,
},
});
});
it('should render provided title', () => {
expect(wrapper.find('.stage-name').text().trim()).toBe('foo');
});
it('should render the provided groups', () => {
expect(wrapper.findAll('.builds-container > ul > li').length).toBe(
wrapper.props('groups').length,
);
});
describe('jobId', () => {
it('escapes job name', () => {
wrapper = shallowMount(StageColumnComponentLegacy, {
propsData: {
groups: [
{
id: 4259,
name: '<img src=x onerror=alert(document.domain)>',
status: {
icon: 'status_success',
label: 'success',
tooltip: '<img src=x onerror=alert(document.domain)>',
},
},
],
title: 'test',
hasTriggeredBy: false,
},
});
expect(wrapper.find('.builds-container li').attributes('id')).toBe(
'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
);
});
});
describe('with action', () => {
it('renders action button', () => {
wrapper = shallowMount(StageColumnComponentLegacy, {
propsData: {
groups: [
{
id: 4259,
name: '<img src=x onerror=alert(document.domain)>',
status: {
icon: 'status_success',
label: 'success',
tooltip: '<img src=x onerror=alert(document.domain)>',
},
},
],
title: 'test',
hasTriggeredBy: false,
action: {
icon: 'play',
title: 'Play all',
path: 'action',
},
},
});
expect(wrapper.find('.js-stage-action').exists()).toBe(true);
});
});
describe('without action', () => {
it('does not render action button', () => {
wrapper = shallowMount(StageColumnComponentLegacy, {
propsData: {
groups: [
{
id: 4259,
name: '<img src=x onerror=alert(document.domain)>',
status: {
icon: 'status_success',
label: 'success',
tooltip: '<img src=x onerror=alert(document.domain)>',
},
},
],
title: 'test',
hasTriggeredBy: false,
},
});
expect(wrapper.find('.js-stage-action').exists()).toBe(false);
});
});
});

View File

@ -1,36 +0,0 @@
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import PipelineMediator from '~/pipelines/pipeline_details_mediator';
describe('PipelineMdediator', () => {
let mediator;
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
mediator = new PipelineMediator({ endpoint: 'foo.json' });
});
afterEach(() => {
mock.restore();
});
it('should set defaults', () => {
expect(mediator.options).toEqual({ endpoint: 'foo.json' });
expect(mediator.state.isLoading).toEqual(false);
expect(mediator.store).toBeDefined();
expect(mediator.service).toBeDefined();
});
describe('request and store data', () => {
it('should store received data', () => {
mock.onGet('foo.json').reply(200, { id: '121123' });
mediator.fetchPipeline();
return waitForPromises().then(() => {
expect(mediator.store.state.pipeline).toEqual({ id: '121123' });
});
});
});
});

View File

@ -1,27 +0,0 @@
import PipelineStore from '~/pipelines/stores/pipeline_store';
describe('Pipeline Store', () => {
let store;
beforeEach(() => {
store = new PipelineStore();
});
it('should set defaults', () => {
expect(store.state.pipeline).toEqual({});
});
describe('storePipeline', () => {
it('should store empty object if none is provided', () => {
store.storePipeline();
expect(store.state.pipeline).toEqual({});
});
it('should store received object', () => {
store.storePipeline({ foo: 'bar' });
expect(store.state.pipeline).toEqual({ foo: 'bar' });
});
});
});

View File

@ -105,6 +105,8 @@ describe('Pipelines', () => {
});
beforeEach(() => {
window.gon = { features: { pipelineSourceFilter: true } };
mock = new MockAdapter(axios);
jest.spyOn(window.history, 'pushState');

View File

@ -1,135 +0,0 @@
import PipelineStore from '~/pipelines/stores/pipeline_store';
import LinkedPipelines from '../linked_pipelines_mock.json';
describe('EE Pipeline store', () => {
let store;
let data;
beforeEach(() => {
store = new PipelineStore();
data = { ...LinkedPipelines };
store.storePipeline(data);
});
describe('storePipeline', () => {
describe('triggered_by', () => {
it('sets triggered_by as an array', () => {
expect(store.state.pipeline.triggered_by.length).toEqual(1);
});
it('adds isExpanding & isLoading keys set to false', () => {
expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
expect(store.state.pipeline.triggered_by[0].isLoading).toEqual(false);
});
it('parses nested triggered_by', () => {
expect(store.state.pipeline.triggered_by[0].triggered_by.length).toEqual(1);
expect(store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded).toEqual(false);
expect(store.state.pipeline.triggered_by[0].triggered_by[0].isLoading).toEqual(false);
});
});
describe('triggered', () => {
it('adds isExpanding & isLoading keys set to false for each triggered pipeline', () => {
store.state.pipeline.triggered.forEach((pipeline) => {
expect(pipeline.isExpanded).toEqual(false);
expect(pipeline.isLoading).toEqual(false);
});
});
it('parses nested triggered pipelines', () => {
store.state.pipeline.triggered[1].triggered.forEach((pipeline) => {
expect(pipeline.isExpanded).toEqual(false);
expect(pipeline.isLoading).toEqual(false);
});
});
});
});
describe('resetTriggeredByPipeline', () => {
it('closes the pipeline & nested ones', () => {
store.state.pipeline.triggered_by[0].isExpanded = true;
store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded = true;
store.resetTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
expect(store.state.pipeline.triggered_by[0].triggered_by[0].isExpanded).toEqual(false);
});
});
describe('openTriggeredByPipeline', () => {
it('opens the given pipeline', () => {
store.openTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(true);
});
});
describe('closeTriggeredByPipeline', () => {
it('closes the given pipeline', () => {
// open it first
store.openTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
store.closeTriggeredByPipeline(store.state.pipeline, store.state.pipeline.triggered_by[0]);
expect(store.state.pipeline.triggered_by[0].isExpanded).toEqual(false);
});
});
describe('resetTriggeredPipelines', () => {
it('closes the pipeline & nested ones', () => {
store.state.pipeline.triggered[0].isExpanded = true;
store.state.pipeline.triggered[0].triggered[0].isExpanded = true;
store.resetTriggeredPipelines(store.state.pipeline, store.state.pipeline.triggered[0]);
expect(store.state.pipeline.triggered[0].isExpanded).toEqual(false);
expect(store.state.pipeline.triggered[0].triggered[0].isExpanded).toEqual(false);
});
});
describe('openTriggeredPipeline', () => {
it('opens the given pipeline', () => {
store.openTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
expect(store.state.pipeline.triggered[0].isExpanded).toEqual(true);
});
});
describe('closeTriggeredPipeline', () => {
it('closes the given pipeline', () => {
// open it first
store.openTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
store.closeTriggeredPipeline(store.state.pipeline, store.state.pipeline.triggered[0]);
expect(store.state.pipeline.triggered[0].isExpanded).toEqual(false);
});
});
describe('toggleLoading', () => {
it('toggles the isLoading property for the given pipeline', () => {
store.toggleLoading(store.state.pipeline.triggered[0]);
expect(store.state.pipeline.triggered[0].isLoading).toEqual(true);
});
});
describe('addExpandedPipelineToRequestData', () => {
it('pushes the given id to expandedPipelines array', () => {
store.addExpandedPipelineToRequestData('213231');
expect(store.state.expandedPipelines).toEqual(['213231']);
});
});
describe('removeExpandedPipelineToRequestData', () => {
it('pushes the given id to expandedPipelines array', () => {
store.removeExpandedPipelineToRequestData('213231');
expect(store.state.expandedPipelines).toEqual([]);
});
});
});

View File

@ -0,0 +1,50 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import PipelineSourceToken from '~/pipelines/components/pipelines_list/tokens/pipeline_source_token.vue';
describe('Pipeline Source Token', () => {
let wrapper;
const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
const findAllFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
const defaultProps = {
config: {
type: 'source',
icon: 'trigger-source',
title: 'Source',
unique: true,
},
value: {
data: '',
},
};
const createComponent = () => {
wrapper = shallowMount(PipelineSourceToken, {
propsData: {
...defaultProps,
},
stubs: {
GlFilteredSearchToken: stubComponent(GlFilteredSearchToken, {
template: `<div><slot name="suggestions"></slot></div>`,
}),
},
});
};
beforeEach(() => {
createComponent();
});
it('passes config correctly', () => {
expect(findFilteredSearchToken().props('config')).toEqual(defaultProps.config);
});
describe('shows sources correctly', () => {
it('renders all pipeline sources available', () => {
expect(findAllFilteredSearchSuggestions()).toHaveLength(wrapper.vm.sources.length);
});
});
});

View File

@ -1,3 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`ReadyToMerge with a mismatched SHA warns the user to refresh to review 1`] = `"<gl-sprintf-stub message=\\"New changes were added. %{linkStart}Reload the page to review them%{linkEnd}\\"></gl-sprintf-stub>"`;

View File

@ -1,4 +1,3 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import simplePoll from '~/lib/utils/simple_poll';
@ -782,26 +781,4 @@ describe('ReadyToMerge', () => {
});
});
});
describe('with a mismatched SHA', () => {
const findMismatchShaBlock = () => wrapper.find('.js-sha-mismatch');
const findMismatchShaTextBlock = () => findMismatchShaBlock().find(GlSprintf);
beforeEach(() => {
createComponent({
mr: {
isSHAMismatch: true,
mergeRequestDiffsPath: '/merge_requests/1/diffs',
},
});
});
it('displays a warning message', () => {
expect(findMismatchShaBlock().exists()).toBe(true);
});
it('warns the user to refresh to review', () => {
expect(findMismatchShaTextBlock().element.outerHTML).toMatchSnapshot();
});
});
});

View File

@ -1,25 +1,42 @@
import Vue from 'vue';
import { removeBreakLine } from 'helpers/text_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue';
import { I18N_SHA_MISMATCH } from '~/vue_merge_request_widget/i18n';
function createComponent({ path = '' } = {}) {
return mount(ShaMismatch, {
propsData: {
mr: {
mergeRequestDiffsPath: path,
},
},
});
}
describe('ShaMismatch', () => {
let vm;
let wrapper;
const findActionButton = () => wrapper.find('[data-testid="action-button"]');
beforeEach(() => {
const Component = Vue.extend(ShaMismatch);
vm = mountComponent(Component);
wrapper = createComponent();
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('should render information message', () => {
expect(vm.$el.querySelector('button').disabled).toEqual(true);
it('should render warning message', () => {
expect(wrapper.element.innerText).toContain(I18N_SHA_MISMATCH.warningMessage);
});
expect(removeBreakLine(vm.$el.textContent).trim()).toContain(
'The source branch HEAD has recently changed. Please reload the page and review the changes before merging',
);
it('action button should have correct label', () => {
expect(findActionButton().text()).toBe(I18N_SHA_MISMATCH.actionButtonLabel);
});
it('action button should link to the diff path', () => {
const DIFF_PATH = '/gitlab-org/gitlab-test/-/merge_requests/6/diffs';
wrapper = createComponent({ path: DIFF_PATH });
expect(findActionButton().attributes('href')).toBe(DIFF_PATH);
});
});

View File

@ -80,14 +80,15 @@ describe('MrWidgetOptions', () => {
describe('computed', () => {
describe('componentName', () => {
it('should return merged component', () => {
expect(wrapper.vm.componentName).toEqual('mr-widget-merged');
});
it.each`
state | componentName
${'merged'} | ${'mr-widget-merged'}
${'conflicts'} | ${'mr-widget-conflicts'}
${'shaMismatch'} | ${'sha-mismatch'}
`('should translate $state into $componentName', ({ state, componentName }) => {
wrapper.vm.mr.state = state;
it('should return conflicts component', () => {
wrapper.vm.mr.state = 'conflicts';
expect(wrapper.vm.componentName).toEqual('mr-widget-conflicts');
expect(wrapper.vm.componentName).toEqual(componentName);
});
});

View File

@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project) }
let(:auth_failure) { { actor: nil, project: nil, type: nil, authentication_abilities: nil } }
let(:gl_auth) { described_class }
describe 'constants' do
@ -159,26 +160,26 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:project) { build.project }
it 'recognises user-less build' do
expect(subject).to eq(Gitlab::Auth::Result.new(nil, build.project, :ci, described_class.build_authentication_abilities))
expect(subject).to have_attributes(actor: nil, project: build.project, type: :ci, authentication_abilities: described_class.build_authentication_abilities)
end
it 'recognises user token' do
build.update(user: create(:user))
expect(subject).to eq(Gitlab::Auth::Result.new(build.user, build.project, :build, described_class.build_authentication_abilities))
expect(subject).to have_attributes(actor: build.user, project: build.project, type: :build, authentication_abilities: described_class.build_authentication_abilities)
end
it 'fails with blocked user token' do
build.update(user: create(:user, :blocked))
expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
expect(subject).to have_attributes(auth_failure)
end
context 'username is not gitlab-ci-token' do
let(:username) { 'another_username' }
it 'fails to authenticate' do
expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
expect(subject).to have_attributes(auth_failure)
end
end
end
@ -189,7 +190,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:project) { build.project }
it 'denies authentication' do
expect(subject).to eq(Gitlab::Auth::Result.new)
expect(subject).to have_attributes(auth_failure)
end
end
end
@ -199,20 +200,20 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
project.create_drone_ci_integration(active: true)
project.drone_ci_integration.update(token: 'token')
expect(gl_auth.find_for_git_client('drone-ci-token', 'token', project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(nil, project, :ci, described_class.build_authentication_abilities))
expect(gl_auth.find_for_git_client('drone-ci-token', 'token', project: project, ip: 'ip')).to have_attributes(actor: nil, project: project, type: :ci, authentication_abilities: described_class.build_authentication_abilities)
end
it 'recognizes master passwords' do
user = create(:user, password: 'password')
expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
include_examples 'user login operation with unique ip limit' do
let(:user) { create(:user, password: 'password') }
def operation
expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
@ -221,14 +222,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user = create(:user)
token = Gitlab::LfsToken.new(user).token
expect(gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :lfs_token, described_class.read_write_project_authentication_abilities))
expect(gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :lfs_token, authentication_abilities: described_class.read_write_project_authentication_abilities)
end
it 'recognizes deploy key lfs tokens' do
key = create(:deploy_key)
token = Gitlab::LfsToken.new(key).token
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_only_authentication_abilities))
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: nil, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_only_authentication_abilities)
end
it 'does not try password auth before oauth' do
@ -245,14 +246,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
create(:deploy_keys_project, :write_access, deploy_key: key, project: project)
token = Gitlab::LfsToken.new(key).token
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_write_authentication_abilities))
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_write_authentication_abilities)
end
it 'does not grant deploy key write permissions' do
key = create(:deploy_key)
token = Gitlab::LfsToken.new(key).token
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, described_class.read_only_authentication_abilities))
expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to have_attributes(actor: key, project: nil, type: :lfs_deploy_token, authentication_abilities: described_class.read_only_authentication_abilities)
end
end
@ -264,18 +265,18 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
shared_examples 'an oauth failure' do
it 'fails' do
expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure)
end
end
it 'succeeds for OAuth tokens with the `api` scope' do
expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(user, nil, :oauth, described_class.full_authentication_abilities))
expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: described_class.full_authentication_abilities)
end
it 'fails for OAuth tokens with other scopes' do
token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'read_user')
expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(nil, nil))
expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to have_attributes(auth_failure)
end
it 'does not try password auth before oauth' do
@ -342,7 +343,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
impersonation_token = create(:personal_access_token, :impersonation, scopes: ['api'])
expect(gl_auth.find_for_git_client('', impersonation_token.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure)
end
it 'limits abilities based on scope' do
@ -365,7 +366,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails if user is blocked' do
expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure)
end
end
@ -373,19 +374,19 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
shared_examples 'with a valid access token' do
it 'successfully authenticates the project bot' do
expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
.to have_attributes(actor: project_bot_user, project: nil, type: :personal_access_token, authentication_abilities: described_class.full_authentication_abilities)
end
it 'successfully authenticates the project bot with a nil project' do
expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
.to have_attributes(actor: project_bot_user, project: nil, type: :personal_access_token, authentication_abilities: described_class.full_authentication_abilities)
end
end
shared_examples 'with an invalid access token' do
it 'fails for a non-member' do
expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure )
end
context 'when project bot user is blocked' do
@ -395,7 +396,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails for a blocked project bot' do
expect(gl_auth.find_for_git_client(project_bot_user.username, access_token.token, project: project, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure )
end
end
end
@ -462,7 +463,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
.to have_attributes(auth_failure)
end
it 'goes through lfs authentication' do
@ -473,7 +474,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
.to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
it 'goes through oauth authentication when the username is oauth2' do
@ -484,14 +485,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities))
.to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
it 'returns double nil for invalid credentials' do
login = 'foo'
expect(gl_auth.find_for_git_client(login, 'bar', project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new)
expect(gl_auth.find_for_git_client(login, 'bar', project: nil, ip: 'ip')).to have_attributes(auth_failure)
end
it 'throws an error suggesting user create a PAT when internal auth is disabled' do
@ -501,27 +502,25 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
context 'while using deploy tokens' do
let(:auth_failure) { Gitlab::Auth::Result.new(nil, nil) }
shared_examples 'registry token scope' do
it 'fails when login is not valid' do
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails when token is not valid' do
expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token is nil' do
expect(gl_auth.find_for_git_client(login, nil, project: nil, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token is not related to project' do
expect(gl_auth.find_for_git_client(login, 'abcdef', project: nil, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token has been revoked' do
@ -529,7 +528,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(deploy_token.revoked?).to be_truthy
expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: nil, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
end
@ -541,7 +540,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login and token are valid' do
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
end
@ -550,7 +549,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login and token are valid' do
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
end
end
@ -561,17 +560,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:deploy_token) { create(:deploy_token, username: username, read_registry: false, projects: [project]) }
it 'succeeds for the token' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client(username, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it 'succeeds for the user' do
auth_success = Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, described_class.full_authentication_abilities)
auth_success = { actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities }
expect(gl_auth.find_for_git_client(username, 'my-secret', project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
end
@ -579,16 +578,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'and belong to the same project' do
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [project]) }
let(:auth_success) { Gitlab::Auth::Result.new(read_repository, project, :deploy_token, [:download_code]) }
let(:auth_success) { { actor: read_repository, project: project, type: :deploy_token, authentication_abilities: [:download_code] } }
it 'succeeds for the right token' do
expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it 'fails for the wrong token' do
expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: project, ip: 'ip'))
.not_to eq(auth_success)
.not_to have_attributes(auth_success)
end
end
@ -597,16 +596,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [other_project]) }
let(:auth_success) { Gitlab::Auth::Result.new(read_repository, other_project, :deploy_token, [:download_code]) }
let(:auth_success) { { actor: read_repository, project: other_project, type: :deploy_token, authentication_abilities: [:download_code] } }
it 'succeeds for the right token' do
expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: other_project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it 'fails for the wrong token' do
expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: other_project, ip: 'ip'))
.not_to eq(auth_success)
.not_to have_attributes(auth_success)
end
end
end
@ -616,18 +615,18 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let(:login) { deploy_token.username }
it 'succeeds when login and token are valid' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it 'succeeds when custom login and token are valid' do
deploy_token = create(:deploy_token, username: 'deployer', read_registry: false, projects: [project])
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:download_code] }
expect(gl_auth.find_for_git_client('deployer', deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it 'does not attempt to rate limit unique IPs for a deploy token' do
@ -638,23 +637,23 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'fails when login is not valid' do
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails when token is not valid' do
expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token is nil' do
expect(gl_auth.find_for_git_client(login, nil, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token is not related to project' do
another_deploy_token = create(:deploy_token)
expect(gl_auth.find_for_git_client(another_deploy_token.username, another_deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
it 'fails if token has been revoked' do
@ -662,7 +661,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(deploy_token.revoked?).to be_truthy
expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
end
@ -674,16 +673,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
subject { gl_auth.find_for_git_client(login, deploy_token.token, project: project_with_group, ip: 'ip') }
it 'succeeds when login and a group deploy token are valid' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project_with_group, :deploy_token, [:download_code, :read_container_image])
auth_success = { actor: deploy_token, project: project_with_group, type: :deploy_token, authentication_abilities: [:download_code, :read_container_image] }
expect(subject).to eq(auth_success)
expect(subject).to have_attributes(auth_success)
end
it 'fails if token is not related to group' do
another_deploy_token = create(:deploy_token, :group, read_repository: true)
expect(gl_auth.find_for_git_client(another_deploy_token.username, another_deploy_token.token, project: project_with_group, ip: 'ip'))
.to eq(auth_failure)
.to have_attributes(auth_failure)
end
end
@ -697,10 +696,10 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'succeeds when login and a project token are valid' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:read_container_image])
auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:read_container_image] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it_behaves_like 'registry token scope'
@ -719,10 +718,10 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'succeeds when login and a project token are valid' do
auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:create_container_image])
auth_success = { actor: deploy_token, project: project, type: :deploy_token, authentication_abilities: [:create_container_image] }
expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_success)
.to have_attributes(auth_success)
end
it_behaves_like 'registry token scope'
@ -932,6 +931,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
def expect_results_with_abilities(personal_access_token, abilities, success = true)
expect(gl_auth.find_for_git_client('', personal_access_token&.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(personal_access_token&.user, nil, personal_access_token.nil? ? nil : :personal_access_token, abilities))
.to have_attributes(actor: personal_access_token&.user, project: nil, type: personal_access_token.nil? ? nil : :personal_access_token, authentication_abilities: abilities)
end
end

View File

@ -37,22 +37,20 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
# Initiating a separate DB connection for the lock
conn = ActiveRecord::Base.connection_pool.checkout
# Initiating a second DB connection for the lock
conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
Fiber.yield
end
ActiveRecordSecond.remove_connection # force disconnect
# Releasing the connection we requested
ActiveRecord::Base.connection_pool.checkin(conn)
end
end
before do
stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
lock_fiber.resume # start the transaction and lock the table
end

View File

@ -37,22 +37,19 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
# Initiating a second DB connection for the lock
conn = ActiveRecordSecond.establish_connection(configuration).connection
# Initiating a separate DB connection for the lock
conn = ActiveRecord::Base.connection_pool.checkout
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
Fiber.yield
end
ActiveRecordSecond.remove_connection # force disconnect
# Releasing the connection we requested
ActiveRecord::Base.connection_pool.checkin(conn)
end
end
before do
stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
lock_fiber.resume # start the transaction and lock the table
end

View File

@ -1,59 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleBackfillDraftStatusOnMergeRequests, :sidekiq do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
let(:project) { projects.create!(namespace_id: group.id) }
let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
def create_merge_request(params)
common_params = {
target_project_id: project.id,
target_branch: 'feature1',
source_branch: 'master'
}
merge_requests.create!(common_params.merge(params))
end
before do
draft_prefixes.each do |prefix|
(1..4).each do |n|
create_merge_request(
title: "#{prefix} This is a title",
draft: false,
state_id: n
)
end
end
stub_const("#{described_class}::BATCH_SIZE", 1)
end
it 'schedules BackfillDraftStatusOnMergeRequests background jobs' do
Sidekiq::Testing.fake! do
draft_mrs = Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests::MergeRequest.eligible
first_mr_id = draft_mrs.first.id
second_mr_id = draft_mrs.second.id
freeze_time do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(7)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, first_mr_id, first_mr_id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(4.minutes, second_mr_id, second_mr_id)
end
end
end
end

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::CiFeatureUsage, type: :model do
describe 'associations' do
it { is_expected.to belong_to(:project) }
end
it_behaves_like 'having unique enum values'
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:feature) }
end
describe '.insert_usage' do
let_it_be(:project) { create(:project) }
context 'when data is not a duplicate' do
it 'creates a new record' do
expect { described_class.insert_usage(project_id: project.id, default_branch: false, feature: :code_coverage) }
.to change { described_class.count }
expect(described_class.first).to have_attributes(
project_id: project.id,
default_branch: false,
feature: 'code_coverage'
)
end
end
context 'when data is a duplicate' do
before do
create(:project_ci_feature_usage, project: project, default_branch: false, feature: :code_coverage)
end
it 'does not create a new record' do
expect { described_class.insert_usage(project_id: project.id, default_branch: false, feature: :code_coverage) }
.not_to change { described_class.count }
end
end
end
end

View File

@ -41,6 +41,17 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
expect(Ci::DailyBuildGroupReportResult.find_by(group_name: 'extra')).to be_nil
end
it 'creates a project_ci_feature_usage record for the pipeline project' do
described_class.new.execute(pipeline)
expect(Projects::CiFeatureUsage.count).to eq(1)
expect(Projects::CiFeatureUsage.first).to have_attributes(
project_id: pipeline.project.id,
feature: 'code_coverage',
default_branch: false
)
end
context 'when there are multiple builds with the same group name that report coverage' do
let!(:test_job_1) { create(:ci_build, pipeline: pipeline, name: 'test 1/2', coverage: 70) }
let!(:test_job_2) { create(:ci_build, pipeline: pipeline, name: 'test 2/2', coverage: 80) }
@ -99,6 +110,16 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
data: { 'coverage' => new_karma_job.coverage }
)
end
it 'does not create a new project_ci_feature_usage record for the pipeline project' do
expect { described_class.new.execute(pipeline) }.not_to change { Projects::CiFeatureUsage.count }
expect(Projects::CiFeatureUsage.first).to have_attributes(
project_id: pipeline.project.id,
feature: 'code_coverage',
default_branch: false
)
end
end
context 'when the ID of the pipeline is older than the last_pipeline_id' do
@ -161,6 +182,8 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
it 'does nothing' do
expect { described_class.new.execute(new_pipeline) }.not_to raise_error
expect(Ci::DailyBuildGroupReportResult.count).to eq(0)
expect(Projects::CiFeatureUsage.count).to eq(0)
end
end
@ -178,6 +201,17 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
expect(coverage.default_branch).to be_truthy
end
end
it 'creates a project_ci_feature_usage record for the pipeline project for default branch' do
described_class.new.execute(pipeline)
expect(Projects::CiFeatureUsage.count).to eq(1)
expect(Projects::CiFeatureUsage.first).to have_attributes(
project_id: pipeline.project.id,
feature: 'code_coverage',
default_branch: true
)
end
end
context 'when pipeline ref_path is not the project default branch' do

View File

@ -80,7 +80,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo.id)
expect(json_response.dig("provider_repos", 1, "id")).to eq(org_repo.id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do
@ -156,7 +155,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it 'filters the list, ignoring the case of the name' do
@ -166,7 +164,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
context 'when user input contains html' do

View File

@ -18,7 +18,6 @@ RSpec.shared_examples 'import controller status' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it "does not show already added project" do

View File

@ -3687,10 +3687,10 @@ core-js-pure@^3.0.0:
resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.6.5.tgz#c79e75f5e38dbc85a662d91eea52b8256d53b813"
integrity sha512-lacdXOimsiD0QyNf9BC/mxivNJ/ybBGJXQFKzRekp1WTHoVUWsUHEn+2T8GJAzzIhyOuXA+gOxCVN3l+5PLPUA==
core-js@^3.1.3, core-js@^3.16.1:
version "3.16.1"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.16.1.tgz#f4485ce5c9f3c6a7cb18fa80488e08d362097249"
integrity sha512-AAkP8i35EbefU+JddyWi12AWE9f2N/qr/pwnDtWz4nyUIBGMJPX99ANFFRSw6FefM374lDujdtLDyhN2A/btHw==
core-js@^3.1.3, core-js@^3.16.2:
version "3.16.2"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.16.2.tgz#3f485822889c7fc48ef463e35be5cc2a4a01a1f4"
integrity sha512-P0KPukO6OjMpjBtHSceAZEWlDD1M2Cpzpg6dBbrjFqFhBHe/BwhxaP820xKOjRn/lZRQirrCusIpLS/n2sgXLQ==
core-js@~2.3.0:
version "2.3.0"