Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-13 21:09:54 +00:00
parent 46e1fdb8bb
commit 0fea036ba7
75 changed files with 1135 additions and 407 deletions

View File

@ -1 +1 @@
2893c46338b2d070bfd8559d09e6c781131fb27f
144dca53fcf99bd9064cb2f6e2631f6367f0eafe

View File

@ -407,7 +407,7 @@ export default {
</span>
</span>
</div>
<div class="board-card-assignee gl-display-flex gl-gap-3 gl-mb-n2">
<div class="board-card-assignee gl-display-flex gl-mb-n2">
<user-avatar-link
v-for="assignee in cappedAssignees"
:key="assignee.id"
@ -428,7 +428,7 @@ export default {
v-if="shouldRenderCounter"
v-gl-tooltip
:title="assigneeCounterTooltip"
class="avatar-counter gl-bg-gray-400 gl-cursor-help gl-font-weight-bold gl-ml-n4 gl-border-0 gl-line-height-24"
class="avatar-counter gl-bg-gray-100 gl-text-gray-900 gl-cursor-help gl-font-weight-bold gl-border-0 gl-line-height-24 gl-ml-n3"
data-placement="bottom"
>{{ assigneeCounterLabel }}</span
>

View File

@ -1,5 +1,5 @@
<script>
import { GlBadge, GlDrawer, GlIcon, GlLink } from '@gitlab/ui';
import { GlBadge, GlDrawer, GlLink, GlButton, GlIcon } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
import { getSeverity } from '~/ci/reports/utils';
@ -27,7 +27,7 @@ export const codeQuality = 'codeQuality';
export default {
i18n,
codeQuality,
components: { GlBadge, GlDrawer, GlIcon, GlLink, DrawerItem },
components: { GlBadge, GlDrawer, GlLink, GlButton, GlIcon, DrawerItem },
props: {
drawer: {
type: Object,
@ -39,22 +39,50 @@ export default {
default: () => {},
},
},
data() {
return {
activeIndex: 0,
};
},
computed: {
getDrawerHeaderHeight() {
return getContentWrapperHeight();
},
isCodeQuality() {
return this.drawer.scale === this.$options.codeQuality;
return this.activeElement.scale === this.$options.codeQuality;
},
activeElement() {
return this.drawer.findings[this.activeIndex];
},
findingsStatus() {
return this.drawer.state === SAST_FINDING_DISMISSED ? 'muted' : 'warning';
return this.activeElement.state === SAST_FINDING_DISMISSED ? 'muted' : 'warning';
},
},
DRAWER_Z_INDEX,
watch: {
drawer(newVal) {
this.activeIndex = newVal.index;
},
},
methods: {
getSeverity,
prev() {
if (this.activeIndex === 0) {
this.activeIndex = this.drawer.findings.length - 1;
} else {
this.activeIndex -= 1;
}
},
next() {
if (this.activeIndex === this.drawer.findings.length - 1) {
this.activeIndex = 0;
} else {
this.activeIndex += 1;
}
},
concatIdentifierName(name, index) {
return name + (index !== this.drawer.identifiers.length - 1 ? ', ' : '');
return name + (index !== this.activeElement.identifiers.length - 1 ? ', ' : '');
},
},
};
@ -68,38 +96,51 @@ export default {
@close="$emit('close')"
>
<template #title>
<h2 class="drawer-heading gl-font-base gl-mt-0 gl-mb-0">
<h2 class="drawer-heading gl-font-base gl-mt-0 gl-mb-0 gl-w-28">
<gl-icon
:size="12"
:name="getSeverity(drawer).name"
:class="getSeverity(drawer).class"
:name="getSeverity(activeElement).name"
:class="getSeverity(activeElement).class"
class="inline-findings-severity-icon gl-vertical-align-baseline!"
/>
<span class="drawer-heading-severity">{{ drawer.severity }}</span>
<span class="drawer-heading-severity">{{ activeElement.severity }}</span>
{{ isCodeQuality ? $options.i18n.codeQualityFinding : $options.i18n.sastFinding }}
</h2>
<div v-if="drawer.findings.length > 1">
<gl-button data-testid="findings-drawer-prev-button" class="gl-p-1!" @click="prev">
<gl-icon :size="24" name="chevron-left" />
</gl-button>
<gl-button class="gl-p-1!" @click="next">
<gl-icon data-testid="findings-drawer-next-button" :size="24" name="chevron-right" />
</gl-button>
</div>
</template>
<template #default>
<ul class="gl-list-style-none gl-border-b-initial gl-mb-0 gl-pb-0!">
<drawer-item v-if="drawer.title" :description="$options.i18n.name" :value="drawer.title" />
<drawer-item
v-if="activeElement.title"
:description="$options.i18n.name"
:value="activeElement.title"
data-testid="findings-drawer-title"
/>
<drawer-item v-if="drawer.state" :description="$options.i18n.status">
<drawer-item v-if="activeElement.state" :description="$options.i18n.status">
<template #value>
<gl-badge :variant="findingsStatus" class="text-capitalize">{{
drawer.state
activeElement.state
}}</gl-badge>
</template>
</drawer-item>
<drawer-item
v-if="drawer.description"
v-if="activeElement.description"
:description="$options.i18n.description"
:value="drawer.description"
:value="activeElement.description"
/>
<drawer-item
v-if="project && drawer.scale !== $options.codeQuality"
v-if="project && activeElement.scale !== $options.codeQuality"
:description="$options.i18n.project"
>
<template #value>
@ -107,23 +148,31 @@ export default {
</template>
</drawer-item>
<drawer-item v-if="drawer.location || drawer.webUrl" :description="$options.i18n.file">
<drawer-item
v-if="activeElement.location || activeElement.webUrl"
:description="$options.i18n.file"
>
<template #value>
<span v-if="drawer.webUrl && drawer.filePath && drawer.line">
<gl-link :href="drawer.webUrl">{{ drawer.filePath }}:{{ drawer.line }}</gl-link>
<span v-if="activeElement.webUrl && activeElement.filePath && activeElement.line">
<gl-link :href="activeElement.webUrl"
>{{ activeElement.filePath }}:{{ activeElement.line }}</gl-link
>
</span>
<span v-else-if="drawer.location">
{{ drawer.location.file }}:{{ drawer.location.startLine }}
<span v-else-if="activeElement.location">
{{ activeElement.location.file }}:{{ activeElement.location.startLine }}
</span>
</template>
</drawer-item>
<drawer-item
v-if="drawer.identifiers && drawer.identifiers.length"
v-if="activeElement.identifiers && activeElement.identifiers.length"
:description="$options.i18n.identifiers"
>
<template #value>
<span v-for="(identifier, index) in drawer.identifiers" :key="identifier.externalId">
<span
v-for="(identifier, index) in activeElement.identifiers"
:key="identifier.externalId"
>
<gl-link v-if="identifier.url" :href="identifier.url">
{{ concatIdentifierName(identifier.name, index) }}
</gl-link>
@ -135,15 +184,15 @@ export default {
</drawer-item>
<drawer-item
v-if="drawer.scale"
v-if="activeElement.scale"
:description="$options.i18n.tool"
:value="isCodeQuality ? $options.i18n.codeQuality : $options.i18n.sast"
/>
<drawer-item
v-if="drawer.engineName"
v-if="activeElement.engineName"
:description="$options.i18n.engine"
:value="drawer.engineName"
:value="activeElement.engineName"
/>
</ul>
</template>

View File

@ -117,6 +117,9 @@
"name": {
"$ref": "#/definitions/workflowName"
},
"auto_cancel": {
"$ref": "#/definitions/workflowAutoCancel"
},
"rules": {
"type": "array",
"items": {
@ -935,6 +938,21 @@
"minLength": 1,
"maxLength": 255
},
"workflowAutoCancel": {
"type": "object",
"markdownDescription": "Define the rules for when pipeline should be automatically cancelled.",
"properties": {
"on_job_failure": {
"markdownDescription": "Define which jobs to stop after a job fails.",
"default": "none",
"type": "string",
"enum": [
"none",
"all"
]
}
}
},
"globalVariables": {
"markdownDescription": "Defines default variables for all jobs. Job level property overrides global variables. [Learn More](https://docs.gitlab.com/ee/ci/yaml/#variables).",
"type": "object",

View File

@ -1,5 +1,5 @@
<script>
import { GlSkeletonLoader, GlTabs, GlTab, GlBadge } from '@gitlab/ui';
import { GlSkeletonLoader, GlTabs, GlTab, GlBadge, GlPagination } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import folderQuery from '../graphql/queries/folder.query.graphql';
import environmentToDeleteQuery from '../graphql/queries/environment_to_delete.query.graphql';
@ -15,6 +15,7 @@ import { ENVIRONMENTS_SCOPE } from '../constants';
export default {
components: {
GlPagination,
GlBadge,
GlTabs,
GlTab,
@ -39,6 +40,10 @@ export default {
required: true,
default: ENVIRONMENTS_SCOPE.ACTIVE,
},
page: {
type: Number,
required: true,
},
},
data() {
return {
@ -47,6 +52,7 @@ export default {
environmentToStop: {},
environmentToChangeCanary: {},
weight: 0,
lastRowCount: 3,
};
},
apollo: {
@ -57,7 +63,8 @@ export default {
environment: this.environmentQueryData,
scope: this.scope,
search: '',
perPage: 10,
perPage: this.$options.perPage,
page: this.page,
};
},
pollInterval: 3000,
@ -97,6 +104,70 @@ export default {
activeTab() {
return this.scope === ENVIRONMENTS_SCOPE.ACTIVE ? 0 : 1;
},
totalItems() {
const environmentsCount =
this.scope === ENVIRONMENTS_SCOPE.ACTIVE
? this.folder?.activeCount
: this.folder?.stoppedCount;
return Number(environmentsCount);
},
totalPages() {
return Math.ceil(this.totalItems / this.$options.perPage);
},
hasNextPage() {
return this.page !== this.totalPages;
},
hasPreviousPage() {
return this.page > 1;
},
pageNumber: {
get() {
return this.page;
},
set(newPageNumber) {
if (newPageNumber !== this.page) {
const query = { ...this.$route.query, page: newPageNumber };
this.$router.push({ query });
}
},
},
},
watch: {
environments(newEnvironments) {
if (newEnvironments?.length) {
this.lastRowCount = newEnvironments.length;
}
// When we load a page, if there's next and/or previous pages existing,
// we'll load their data as well to improve percepted performance.
// The page data is cached by apollo client and is immediately accessible
// and won't trigger additional requests
if (this.hasNextPage) {
this.$apollo.query({
query: folderQuery,
variables: {
environment: this.environmentQueryData,
scope: this.scope,
search: '',
perPage: this.$options.perPage,
page: this.page + 1,
},
});
}
if (this.hasPreviousPage) {
this.$apollo.query({
query: folderQuery,
variables: {
environment: this.environmentQueryData,
scope: this.scope,
search: '',
perPage: this.$options.perPage,
page: this.page - 1,
},
});
}
},
},
methods: {
setScope(scope) {
@ -110,6 +181,7 @@ export default {
active: __('Active'),
stopped: __('Stopped'),
},
perPage: 20,
ENVIRONMENTS_SCOPE,
};
</script>
@ -149,7 +221,7 @@ export default {
</gl-tabs>
<div v-if="isLoading">
<div
v-for="n in 3"
v-for="n in lastRowCount"
:key="`skeleton-box-${n}`"
class="gl-border-gray-100 gl-border-t-solid gl-border-1 gl-py-5 gl-md-pl-7"
>
@ -157,14 +229,28 @@ export default {
</div>
</div>
<div v-else>
<!--
We assign each element's key as index intentionally here.
Creation and destruction of "environments-item" component is quite taxing and leads
to noticeable blocking rendering times for lists of more than 10 items.
By assigning indexes we avoid destroying and re-creating the components when page changes,
thus getting a much better performance.
Correct component state is ensured by deep data-binding of "environment" prop
-->
<environment-item
v-for="environment in environments"
v-for="(environment, index) in environments"
:id="environment.name"
:key="environment.name"
:key="index"
:environment="environment"
class="gl-border-gray-100 gl-border-t-solid gl-border-1 gl-pt-3"
in-folder
/>
</div>
<gl-pagination
v-model="pageNumber"
:per-page="$options.perPage"
:total-items="totalItems"
align="center"
/>
</div>
</template>

View File

@ -35,6 +35,7 @@ export default () => {
component: EnvironmentsFolderApp,
props: (route) => ({
scope: route.query.scope,
page: Number(route.query.page || '1'),
folderName,
folderPath,
}),

View File

@ -3,8 +3,10 @@ query getEnvironmentFolder(
$scope: String
$search: String
$perPage: Int
$page: Int
) {
folder(environment: $environment, scope: $scope, search: $search, perPage: $perPage) @client {
folder(environment: $environment, scope: $scope, search: $search, perPage: $perPage, page: $page)
@client {
activeCount
environments
stoppedCount

View File

@ -59,15 +59,18 @@ export const baseQueries = (endpoint) => ({
};
});
},
folder(_, { environment: { folderPath }, scope, search, perPage }) {
folder(_, { environment: { folderPath }, scope, search, perPage, page }) {
// eslint-disable-next-line camelcase
const per_page = perPage || 3;
return axios.get(folderPath, { params: { scope, search, per_page } }).then((res) => ({
activeCount: res.data.active_count,
environments: res.data.environments.map(mapEnvironment),
stoppedCount: res.data.stopped_count,
__typename: 'LocalEnvironmentFolder',
}));
const pageNumber = page || 1;
return axios
.get(folderPath, { params: { scope, search, per_page, page: pageNumber } })
.then((res) => ({
activeCount: res.data.active_count,
environments: res.data.environments.map(mapEnvironment),
stoppedCount: res.data.stopped_count,
__typename: 'LocalEnvironmentFolder',
}));
},
isLastDeployment(_, { environment }) {
return environment?.lastDeployment?.isLast;

View File

@ -131,6 +131,7 @@
height: $gl-padding-24;
border-radius: $gl-padding-24;
font-size: $gl-font-size-xs;
position: relative;
@include media-breakpoint-down(md) {
min-width: auto;
@ -140,15 +141,9 @@
}
}
.user-avatar-link:not(:only-child) {
margin-left: -$gl-padding;
&:nth-of-type(1) {
z-index: 2;
}
&:nth-of-type(2) {
z-index: 1;
.user-avatar-link {
&:not(:last-of-type) {
@include gl-mr-n3;
}
}

View File

@ -729,16 +729,12 @@ module Ci
end
def artifacts_public?
return true if Feature.disabled?(:non_public_artifacts, project, type: :development)
return true if job_artifacts_archive.nil? # To backward compatibility return true if no artifacts found
job_artifacts_archive.public_access?
end
def artifact_is_public_in_config?
return true if Feature.disabled?(:non_public_artifacts, project, type: :development)
artifacts_public = options.dig(:artifacts, :public)
return true if artifacts_public.nil? # Default artifacts:public to true

View File

@ -362,8 +362,6 @@ module Ci
end
def public_access?
return true unless Feature.enabled?(:non_public_artifacts, project, type: :development)
public_accessibility?
end

View File

@ -97,11 +97,11 @@ module Ci
has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id, inverse_of: :pipeline # rubocop:disable Cop/ActiveRecordDependent
has_many :variables, class_name: 'Ci::PipelineVariable'
has_many :latest_builds, ->(pipeline) { in_partition(pipeline).latest.with_project_and_metadata(pipeline.project) }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build'
has_many :latest_builds, ->(pipeline) { in_partition(pipeline).latest.with_project_and_metadata }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build'
has_many :downloadable_artifacts, -> do
not_expired.or(where_exists(Ci::Pipeline.artifacts_locked.where("#{Ci::Pipeline.quoted_table_name}.id = #{Ci::Build.quoted_table_name}.commit_id"))).downloadable.with_job
end, through: :latest_builds, source: :job_artifacts
has_many :latest_successful_jobs, ->(pipeline) { in_partition(pipeline).latest.success.with_project_and_metadata(pipeline.project) }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Processable'
has_many :latest_successful_jobs, ->(pipeline) { in_partition(pipeline).latest.success.with_project_and_metadata }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Processable'
has_many :messages, class_name: 'Ci::PipelineMessage', inverse_of: :pipeline
@ -151,7 +151,7 @@ module Ci
accepts_nested_attributes_for :variables, reject_if: :persisted?
delegate :full_path, to: :project, prefix: true
delegate :name, to: :pipeline_metadata, allow_nil: true
delegate :name, :auto_cancel_on_job_failure, to: :pipeline_metadata, allow_nil: true
validates :sha, presence: { unless: :importing? }
validates :ref, presence: { unless: :importing? }

View File

@ -10,6 +10,11 @@ module Ci
disabled: 2
}, _prefix: true
enum auto_cancel_on_job_failure: {
none: 0,
all: 1
}, _prefix: true
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_metadata
belongs_to :project, class_name: "Project", inverse_of: :pipeline_metadata

View File

@ -27,10 +27,8 @@ module Ci
before_validation :ensure_metadata, on: :create
scope :with_project_and_metadata, ->(project) do
if Feature.enabled?(:non_public_artifacts, project, type: :development)
joins(:metadata).includes(:metadata).preload(:project)
end
scope :with_project_and_metadata, -> do
joins(:metadata).includes(:metadata).preload(:project)
end
end

View File

@ -5,12 +5,10 @@ module Ci
include RequestAwareEntity
expose :artifacts do |pipeline, options|
artifacts = pipeline.downloadable_artifacts
downloadable_artifacts = pipeline.downloadable_artifacts
project = pipeline.project
if Feature.enabled?(:non_public_artifacts, project)
artifacts = artifacts.select { |artifact| can?(request.current_user, :read_job_artifacts, artifact) }
end
artifacts = downloadable_artifacts.select { |artifact| can?(request.current_user, :read_job_artifacts, artifact) }
BuildArtifactEntity.represent(artifacts, options.merge(project: project))
end

View File

@ -27,11 +27,9 @@ class MergeRequests::PipelineEntity < Grape::Entity
rel = pipeline.downloadable_artifacts
project = pipeline.project
if Feature.enabled?(:non_public_artifacts, project, type: :development)
rel = rel.select { |artifact| can?(request.current_user, :read_job_artifacts, artifact) }
end
allowed_to_read_artifacts = rel.select { |artifact| can?(request.current_user, :read_job_artifacts, artifact) }
BuildArtifactEntity.represent(rel, options.merge(project: project))
BuildArtifactEntity.represent(allowed_to_read_artifacts, options.merge(project: project))
end
expose :detailed_status, as: :status, with: DetailedStatusEntity do |pipeline|

View File

@ -128,8 +128,6 @@ module Ci
def accessibility(params)
accessibility = params[:accessibility]
return :public if Feature.disabled?(:non_public_artifacts, project, type: :development)
return accessibility if accessibility.present?
job.artifact_is_public_in_config? ? :public : :private

View File

@ -21,6 +21,7 @@
- content_for :before_content do
= render 'projects/invite_members_modal', project: @project
= dispensable_render_if_exists "projects/importing_alert", project: @project
= dispensable_render_if_exists "shared/web_hooks/web_hook_disabled_alert"
= dispensable_render_if_exists "projects/free_user_cap_alert", project: @project
= dispensable_render_if_exists 'shared/unlimited_members_during_trial_alert', resource: @project

View File

@ -99,7 +99,7 @@ module ClickHouse
strong_memoize_attr :last_event_id_in_postgresql
def enabled?
ClickHouse::Client.configuration.databases[:main].present? && Feature.enabled?(:event_sync_worker_for_click_house)
ClickHouse::Client.database_configured?(:main) && Feature.enabled?(:event_sync_worker_for_click_house)
end
def next_batch

View File

@ -1,8 +1,8 @@
---
name: explain_vulnerability
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117472
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/407282
milestone: '15.11'
name: auto_cancel_pipeline_on_job_failure
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/137815
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/433163
milestone: '16.7'
type: development
group: group::threat insights
group: group::pipeline execution
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: non_public_artifacts
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49775
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/294503
milestone: '13.8'
type: development
group: group::pipeline security
default_enabled: false

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class CreateCiUsedMinutes < ClickHouse::Migration
def up
execute <<~SQL
CREATE TABLE IF NOT EXISTS ci_used_minutes
(
project_id UInt64 DEFAULT 0,
status LowCardinality(String) DEFAULT '',
runner_type UInt8 DEFAULT 0,
finished_at_bucket DateTime64(6, 'UTC') DEFAULT now64(),
count_builds AggregateFunction(count),
total_duration SimpleAggregateFunction(sum, Int64)
)
ENGINE = AggregatingMergeTree()
ORDER BY (finished_at_bucket, project_id, status, runner_type)
SQL
end
def down
execute <<~SQL
DROP TABLE ci_used_minutes
SQL
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class CreateCiUsedMinutesMv < ClickHouse::Migration
def up
execute <<~SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS ci_used_minutes_mv
TO ci_used_minutes
AS
SELECT
project_id,
status,
runner_type,
toStartOfInterval(finished_at, INTERVAL 1 day) AS finished_at_bucket,
countState() AS count_builds,
sumSimpleState(duration) AS total_duration
FROM ci_finished_builds
GROUP BY project_id, status, runner_type, finished_at_bucket
SQL
end
def down
execute <<~SQL
DROP VIEW ci_used_minutes_mv
SQL
end
end

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillVsCodeSettingsUuid
description: Backfills the uuid column of existing vs_code_settings records
feature_category: web_ide
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/138355
milestone: '16.7'
queued_migration_version: 20231130140901
finalize_after: '2023-12-17'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
class AddAutoCancelOnJobFailureToCiPipelineMetadata < Gitlab::Database::Migration[2.2]
milestone '16.7'
enable_lock_retries!
def change
add_column :ci_pipeline_metadata, :auto_cancel_on_job_failure, :smallint, default: 0, null: false
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class QueueBackfillVsCodeSettingsUuid < Gitlab::Database::Migration[2.2]
milestone "16.7"
MIGRATION = "BackfillVsCodeSettingsUuid"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
:vs_code_settings,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :vs_code_settings, :id, [])
end
end

View File

@ -0,0 +1 @@
97e91312cdf33db897b64f45ec14dc748be91fa6741a05bd119357a3c4d17017

View File

@ -0,0 +1 @@
4e9b26432f7c6cfbcd2486a3867665ba50d66ca9bd49f7d70a349f222a136277

View File

@ -14296,6 +14296,7 @@ CREATE TABLE ci_pipeline_metadata (
pipeline_id bigint NOT NULL,
name text,
auto_cancel_on_new_commit smallint DEFAULT 0 NOT NULL,
auto_cancel_on_job_failure smallint DEFAULT 0 NOT NULL,
CONSTRAINT check_9d3665463c CHECK ((char_length(name) <= 255))
);

View File

@ -19756,6 +19756,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="groupiterationsiid"></a>`iid` | [`ID`](#id) | Internal ID of the Iteration to look up. |
| <a id="groupiterationsin"></a>`in` | [`[IterationSearchableField!]`](#iterationsearchablefield) | Fields in which the fuzzy-search should be performed with the query given in the argument `search`. Defaults to `[title]`. |
| <a id="groupiterationsincludeancestors"></a>`includeAncestors` | [`Boolean`](#boolean) | Whether to include ancestor iterations. Defaults to true. |
| <a id="groupiterationsincludedescendants"></a>`includeDescendants` | [`Boolean`](#boolean) | Whether to include descendant iterations. |
| <a id="groupiterationsiterationcadenceids"></a>`iterationCadenceIds` | [`[IterationsCadenceID!]`](#iterationscadenceid) | Global iteration cadence IDs by which to look up the iterations. |
| <a id="groupiterationssearch"></a>`search` | [`String`](#string) | Query used for fuzzy-searching in the fields selected in the argument `in`. Returns all iterations if empty. |
| <a id="groupiterationssort"></a>`sort` | [`IterationSort`](#iterationsort) | List iterations by sort order. If unspecified, an arbitrary order (subject to change) is used. |
@ -24886,6 +24887,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="projectiterationsiid"></a>`iid` | [`ID`](#id) | Internal ID of the Iteration to look up. |
| <a id="projectiterationsin"></a>`in` | [`[IterationSearchableField!]`](#iterationsearchablefield) | Fields in which the fuzzy-search should be performed with the query given in the argument `search`. Defaults to `[title]`. |
| <a id="projectiterationsincludeancestors"></a>`includeAncestors` | [`Boolean`](#boolean) | Whether to include ancestor iterations. Defaults to true. |
| <a id="projectiterationsincludedescendants"></a>`includeDescendants` | [`Boolean`](#boolean) | Whether to include descendant iterations. |
| <a id="projectiterationsiterationcadenceids"></a>`iterationCadenceIds` | [`[IterationsCadenceID!]`](#iterationscadenceid) | Global iteration cadence IDs by which to look up the iterations. |
| <a id="projectiterationssearch"></a>`search` | [`String`](#string) | Query used for fuzzy-searching in the fields selected in the argument `in`. Returns all iterations if empty. |
| <a id="projectiterationssort"></a>`sort` | [`IterationSort`](#iterationsort) | List iterations by sort order. If unspecified, an arbitrary order (subject to change) is used. |

View File

@ -26,18 +26,20 @@ GET /groups/:id/iterations?state=opened
GET /groups/:id/iterations?state=closed
GET /groups/:id/iterations?search=version
GET /groups/:id/iterations?include_ancestors=false
GET /groups/:id/iterations?include_descendants=true
GET /groups/:id/iterations?updated_before=2013-10-02T09%3A24%3A18Z
GET /groups/:id/iterations?updated_after=2013-10-02T09%3A24%3A18Z
```
| Attribute | Type | Required | Description |
| ------------------- | ------- | -------- | ----------- |
| `state` | string | no | 'Return `opened`, `upcoming`, `current`, `closed`, or `all` iterations.' |
| `search` | string | no | Return only iterations with a title matching the provided string. |
| `in` | array of strings | no | Fields in which fuzzy search should be performed with the query given in the argument `search`. The available options are `title` and `cadence_title`. Default is `[title]`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/350991) in GitLab 16.2. |
| `include_ancestors` | boolean | no | Include iterations from parent group and its ancestors. Defaults to `true`. |
| `updated_before` | datetime | no | Return only iterations updated before the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| `updated_after` | datetime | no | Return only iterations updated after the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| Attribute | Type | Required | Description |
| --------------------- | -------- | -------- | ----------- |
| `state` | string | no | 'Return `opened`, `upcoming`, `current`, `closed`, or `all` iterations.' |
| `search` | string | no | Return only iterations with a title matching the provided string. |
| `in` | array of strings | no | Fields in which fuzzy search should be performed with the query given in the argument `search`. The available options are `title` and `cadence_title`. Default is `[title]`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/350991) in GitLab 16.2. |
| `include_ancestors` | boolean | no | Include iterations for group and its ancestors. Defaults to `true`. |
| `include_descendants` | boolean | no | Include iterations for group and its descendants. Defaults to `false`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135764) in GitLab 16.7. |
| `updated_before` | datetime | no | Return only iterations updated before the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| `updated_after` | datetime | no | Return only iterations updated after the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
Example request:

View File

@ -28,18 +28,20 @@ GET /projects/:id/iterations?state=opened
GET /projects/:id/iterations?state=closed
GET /projects/:id/iterations?search=version
GET /projects/:id/iterations?include_ancestors=false
GET /projects/:id/iterations?include_descendants=true
GET /projects/:id/iterations?updated_before=2013-10-02T09%3A24%3A18Z
GET /projects/:id/iterations?updated_after=2013-10-02T09%3A24%3A18Z
```
| Attribute | Type | Required | Description |
| ------------------- | ------- | -------- | ----------- |
| `state` | string | no | 'Return `opened`, `upcoming`, `current`, `closed`, or `all` iterations.' |
| `search` | string | no | Return only iterations with a title matching the provided string. |
| `in` | array of strings | no | Fields in which fuzzy search should be performed with the query given in the argument `search`. The available options are `title` and `cadence_title`. Default is `[title]`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/350991) in GitLab 16.2. |
| `include_ancestors` | boolean | no | Include iterations from parent group and its ancestors. Defaults to `true`. |
| `updated_before` | datetime | no | Return only iterations updated before the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| `updated_after` | datetime | no | Return only iterations updated after the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| Attribute | Type | Required | Description |
| --------------------- | -------- | -------- | ----------- |
| `state` | string | no | 'Return `opened`, `upcoming`, `current`, `closed`, or `all` iterations.' |
| `search` | string | no | Return only iterations with a title matching the provided string. |
| `in` | array of strings | no | Fields in which fuzzy search should be performed with the query given in the argument `search`. The available options are `title` and `cadence_title`. Default is `[title]`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/350991) in GitLab 16.2. |
| `include_ancestors` | boolean | no | Include iterations for parent group and its ancestors. Defaults to `true`. |
| `include_descendants` | boolean | no | Include iterations for parent group and its descendants. Defaults to `false`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135764) in GitLab 16.7. |
| `updated_before` | datetime | no | Return only iterations updated before the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
| `updated_after` | datetime | no | Return only iterations updated after the given datetime. Expected in ISO 8601 format (`2019-03-15T08:00:00Z`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378662) in GitLab 15.10. |
Example request:

View File

@ -259,19 +259,25 @@ Sometimes, a job hangs with the message `Waiting for resource: <resource_group>`
first check that the resource group is working correctly:
1. Go to the job details page.
1. Select **View job currently using resource**.
1. Check the job status:
- If the status is `running` or `pending`, the feature is working correctly. Wait until the job finishes and releases the resource.
- If the status is `created` and the [process mode](#process-modes) is either **Oldest first** or **Newest first**, the feature is working correctly.
Visit the pipeline page of the job and check which upstream stage or job is blocking the execution.
- If none of the above conditions are met, the feature might not be working correctly.
[Open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) with the following information:
- The job ID.
- The job status.
- How often the problem occurs.
- Steps to reproduce the problem.
1. If the resource is assigned to a job, select **View job currently using resource** and check the job status.
You can also get job information from the GraphQL API. You should use the GraphQL API if you use [pipeline-level concurrency control with cross-project/parent-child pipelines](#pipeline-level-concurrency-control-with-cross-projectparent-child-pipelines) because the trigger jobs are not accessible from the UI.
- If the status is `running` or `pending`, the feature is working correctly. Wait until the job finishes and releases the resource.
- If the status is `created` and the [process mode](#process-modes) is either **Oldest first** or **Newest first**, the feature is working correctly.
Visit the pipeline page of the job and check which upstream stage or job is blocking the execution.
- If none of the above conditions are met, the feature might not be working correctly. [Report the issue to GitLab](#report-an-issue).
1. If **View job currently using resource** is not available, the resource is not assigned to a job. Instead, check the resource's upcoming jobs.
1. Get the resource's upcoming jobs with the [REST API](../../api/resource_groups.md#list-upcoming-jobs-for-a-specific-resource-group).
1. Verify that the job's [process mode](#process-modes) is **Oldest first**.
1. Find the first job in the list of upcoming jobs, and get the job details [with GraphQL](#get-job-details-through-graphql).
1. If the first job's pipeline is an older pipeline, try to cancel the pipeline or the job itself.
1. Optional. Repeat this process if the next upcoming job is still in an older pipeline that should no longer run.
1. If the problem persists, [report the issue to GitLab](#report-an-issue).
#### Get job details through GraphQL
You can get job information from the GraphQL API. You should use the GraphQL API if you use [pipeline-level concurrency control with cross-project/parent-child pipelines](#pipeline-level-concurrency-control-with-cross-projectparent-child-pipelines) because the trigger jobs are not accessible from the UI.
To get job information from the GraphQL API:
@ -284,8 +290,9 @@ To get job information from the GraphQL API:
{
project(fullPath: "<fullpath-to-your-project>") {
name
job(id: "gid://gitlab/Ci::Bridge/<job-id>") {
job(id: "gid://gitlab/Ci::Build/<job-id>") {
name
status
detailedStatus {
action {
path
@ -305,7 +312,7 @@ To get job information from the GraphQL API:
{
project(fullPath: "<fullpath-to-your-project>") {
name
job(id: "gid://gitlab/Ci::Bridge/<job-id-currently-using-the-resource>") {
job(id: "gid://gitlab/Ci::Build/<job-id-currently-using-the-resource>") {
name
status
pipeline {
@ -316,4 +323,13 @@ To get job information from the GraphQL API:
}
```
If the status is not `running` or `pending`, [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and [contact support](https://about.gitlab.com/support/#contact-support) so they can apply the correct labels to the issue.
### Report an issue
[Open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) with the following information:
- The ID of the affected job.
- The job status.
- How often the problem occurs.
- Steps to reproduce the problem.
You can also [contact support](https://about.gitlab.com/support/#contact-support) for further assistance, or to get in touch with the development team.

View File

@ -1251,6 +1251,7 @@ job:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/223273) in GitLab 13.8 [with a flag](../../user/feature_flags.md) named `non_public_artifacts`, disabled by default.
> - [Updated](https://gitlab.com/gitlab-org/gitlab/-/issues/322454) in GitLab 15.10. Artifacts created with `artifacts:public` before 15.10 are not guaranteed to remain private after this update.
> - [Updated](https://gitlab.com/gitlab-org/gitlab/-/issues/294503) in GitLab 16.7. Rolled out and removed a feature flag named `non_public_artifacts`
WARNING:
On self-managed GitLab, by default this feature is not available. To make it available,

View File

@ -66,17 +66,15 @@ The following migration helpers are available in `ee/app/workers/concerns/elasti
Backfills a specific field in an index. In most cases, the mapping for the field should already be added.
Requires the `index_name` and `field_name` methods to backfill a single field.
Requires the `field_name` method and `DOCUMENT_TYPE` constant to backfill a single field.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationBackfillHelper
private
DOCUMENT_TYPE = Issue
def index_name
Issue.__elasticsearch__.index_name
end
private
def field_name
:schema_version
@ -84,17 +82,15 @@ class MigrationName < Elastic::Migration
end
```
Requires the `index_name` and `field_names` methods to backfill multiple fields if any field is null.
Requires the `field_names` method and `DOCUMENT_TYPE` constant to backfill multiple fields if any field is null.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationBackfillHelper
private
DOCUMENT_TYPE = Issue
def index_name
Issue.__elasticsearch__.index_name
end
private
def field_names
%w[schema_version visibility_level]
@ -106,17 +102,15 @@ end
Updates a mapping in an index by calling `put_mapping` with the mapping specified.
Requires the `index_name` and `new_mappings` methods.
Requires the `new_mappings` method and `DOCUMENT_TYPE` constant.
```ruby
class MigrationName < Elastic::Migration
include Elastic::MigrationUpdateMappingsHelper
private
DOCUMENT_TYPE = Issue
def index_name
Issue.__elasticsearch__.index_name
end
private
def new_mappings
{

View File

@ -252,7 +252,7 @@ were introduced by the changes made in the merge request.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `sast_reports_in_inline_diff`.
On GitLab.com, this feature is not available.
On GitLab.com, this feature is available.
SAST results display in the merge request **Changes** view. Lines containing SAST
issues are marked by a symbol beside the gutter. Select the symbol to see the list of issues, then select an issue to see its details.

View File

@ -121,6 +121,7 @@ The following table lists project permissions available for each role:
| [License Scanning](compliance/license_scanning_of_cyclonedx_files/index.md):<br>View License Compliance reports | ✓ (1) | ✓ | ✓ | ✓ | ✓ |
| [License Scanning](compliance/license_scanning_of_cyclonedx_files/index.md):<br>View License list | | ✓ | ✓ | ✓ | ✓ |
| [License approval policies](../user/compliance/license_approval_policies.md):<br>Manage license policy | | | | ✓ | ✓ |
| [Merge requests](project/merge_requests/index.md):<br>View a merge request | ✓ (1) | ✓ | ✓ | ✓ | ✓ |
| [Merge requests](project/merge_requests/index.md):<br>Assign reviewer | | | ✓ | ✓ | ✓ |
| [Merge requests](project/merge_requests/index.md):<br>View list | (25) | ✓ | ✓ | ✓ | ✓ |
| [Merge requests](project/merge_requests/index.md):<br>Apply code change suggestions | | | ✓ | ✓ | ✓ |

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View File

@ -82,7 +82,7 @@ Prerequisites:
To add a time entry using the user interface:
1. In the **Time tracking** section of the sidebar, select **Add time entry** (**{plus}**). A modal opens.
1. In the **Time tracking** section of the sidebar, select **Add time entry** (**{plus}**). A dialog opens.
1. Enter:
- The amount of time spent.
@ -154,11 +154,9 @@ To delete all the time spent at once, use the `/remove_time_spent` [quick action
## View a time tracking report
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/271409) in GitLab 13.12.
### For an issue or merge request
You can view a breakdown of time spent on an issue or merge request.
To view a time tracking report:
To view a time tracking report of time spent on an issue or merge request:
1. Go to an issue or a merge request.
1. In the right sidebar, select **Time tracking report**.
@ -167,22 +165,46 @@ To view a time tracking report:
The breakdown of spent time displayed is limited to a maximum of 100 entries.
### Global time tracking report **(EXPERIMENT)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/344002) in GitLab 15.11 [with a flag](../../administration/feature_flags.md) named `global_time_tracking_report`. Disabled by default.
> - Enabled on GitLab.com in GitLab 16.5.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../../administration/feature_flags.md) named `global_time_tracking_report`.
On GitLab.com, this feature is available.
This feature is not ready for production use.
View a report of time spent in issues and merge requests across all of GitLab.
This feature is an [Experiment](../../policy/experiment-beta-support.md).
If you find a bug, let us know in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/435222).
To view the global time tracking report:
1. In your browser, enter the global report's URL:
- For self-managed, add `/-/timelogs` to your base URL. For example, `https://gitlab.example.com/-/timelogs`.
- For GitLab.com, go to <https://gitlab.com/-/timelogs>.
1. Optional. To filter by a specific user, enter their username without the `@` symbol.
1. Select start and end dates.
1. Select **Run report**.
![global time tracking report](img/global_time_report_v16_5.png)
## Available time units
The following time units are available:
| Time unit | What to type | Conversion rate |
| --------- | --------------------------- | --------------- |
| Month | `mo`, `month`, or `months` | 4 w (160 h) |
| Week | `w`, `week`, or `weeks` | 5 d (40 h) |
| Day | `d`, `day`, or `days` | 8 h |
| Hour | `h`, `hour`, or `hours` | 60 m |
| Month | `mo`, `month`, or `months` | 4 w (160 h) |
| Week | `w`, `week`, or `weeks` | 5 d (40 h) |
| Day | `d`, `day`, or `days` | 8 h |
| Hour | `h`, `hour`, or `hours` | 60 m |
| Minute | `m`, `minute`, or `minutes` | |
### Limit displayed units to hours **(FREE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29469/) in GitLab 12.1.
In GitLab self-managed instances, you can limit the display of time units to hours.
To do so:

View File

@ -30,6 +30,10 @@ module ClickHouse
DatabaseError = Class.new(Error)
QueryError = Class.new(Error)
def self.database_configured?(database, configuration = self.configuration)
!!configuration.databases[database]
end
# Executes a SELECT database query
def self.select(query, database, configuration = self.configuration)
instrumented_execute(query, database, configuration) do |response, instrument|

View File

@ -78,7 +78,9 @@ module CsvBuilder
def row(object)
attributes.map do |attribute|
if attribute.respond_to?(:call)
if object.is_a?(Hash)
excel_sanitize(object[attribute])
elsif attribute.respond_to?(:call)
excel_sanitize(attribute.call(object))
else
excel_sanitize(object.public_send(attribute)) # rubocop:disable GitlabSecurity/PublicSend

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillVsCodeSettingsUuid < BatchedMigrationJob
operation_name :backfill_vs_code_settings_uuid
scope_to ->(relation) { relation.where(uuid: nil) }
feature_category :web_ide
def perform
each_sub_batch do |sub_batch|
vs_code_settings = sub_batch.map do |vs_code_setting|
vs_code_setting.attributes.merge(uuid: SecureRandom.uuid)
end
VsCode::Settings::VsCodeSetting.upsert_all(vs_code_settings)
end
end
end
end
end

View File

@ -8,8 +8,9 @@ module Gitlab
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Validatable
ALLOWED_KEYS = %i[on_new_commit].freeze
ALLOWED_KEYS = %i[on_new_commit on_job_failure].freeze
ALLOWED_ON_NEW_COMMIT_OPTIONS = ::Ci::PipelineMetadata.auto_cancel_on_new_commits.keys.freeze
ALLOWED_ON_JOB_FAILURE_OPTIONS = ::Ci::PipelineMetadata.auto_cancel_on_job_failures.keys.freeze
attributes ALLOWED_KEYS
@ -19,6 +20,10 @@ module Gitlab
in: ALLOWED_ON_NEW_COMMIT_OPTIONS,
message: format(_("must be one of: %{values}"), values: ALLOWED_ON_NEW_COMMIT_OPTIONS.join(', '))
}
validates :on_job_failure, allow_nil: true, type: String, inclusion: {
in: ALLOWED_ON_JOB_FAILURE_OPTIONS,
message: format(_("must be one of: %{values}"), values: ALLOWED_ON_JOB_FAILURE_OPTIONS.join(', '))
}
end
end
end

View File

@ -36,13 +36,31 @@ module Gitlab
def set_auto_cancel
auto_cancel = @command.yaml_processor_result.workflow_auto_cancel
auto_cancel_on_new_commit = auto_cancel&.dig(:on_new_commit)
return if auto_cancel.blank?
set_auto_cancel_on_new_commit(auto_cancel)
set_auto_cancel_on_job_failure(auto_cancel)
end
def set_auto_cancel_on_new_commit(auto_cancel)
auto_cancel_on_new_commit = auto_cancel[:on_new_commit]
return if auto_cancel_on_new_commit.blank?
assign_to_metadata(auto_cancel_on_new_commit: auto_cancel_on_new_commit)
end
def set_auto_cancel_on_job_failure(auto_cancel)
return if Feature.disabled?(:auto_cancel_pipeline_on_job_failure, pipeline.project)
auto_cancel_on_job_failure = auto_cancel[:on_job_failure]
return if auto_cancel_on_job_failure.blank?
assign_to_metadata(auto_cancel_on_job_failure: auto_cancel_on_job_failure)
end
def global_context
Gitlab::Ci::Build::Context::Global.new(
pipeline, yaml_variables: @command.pipeline_seed.root_variables)

View File

@ -25005,6 +25005,9 @@ msgstr ""
msgid "Import|There is not a valid Git repository at this URL. If your HTTP repository is not publicly accessible, verify your credentials."
msgstr ""
msgid "Import|This project is being imported. Do not make any changes to the project until the import is complete."
msgstr ""
msgid "Import|Timeout for decompressing archived files (seconds)"
msgstr ""

View File

@ -119,7 +119,7 @@ module QA
def with_retry_on_too_many_requests
response = nil
Support::Retrier.retry_until(log: false) do
Support::Retrier.retry_until(log: false, message: "Retrying upon receiving 429 HTTP status") do
response = yield
if response.code == HTTP_STATUS_TOO_MANY_REQUESTS

View File

@ -263,7 +263,7 @@ module QA
rows: group_specs.map do |name, result|
[
name_column(name: name, file: result[:file], link: result[:link],
exceptions_and_job_urls: result[:exceptions_and_job_urls], markdown: markdown),
exceptions_and_related_urls: result[:exceptions_and_related_urls], markdown: markdown),
*table_params(result.values)
]
end
@ -345,12 +345,13 @@ module QA
# @param [String] name
# @param [String] file
# @param [String] link
# @param [Hash] exceptions_and_job_urls
# @param [Hash] exceptions_and_related_urls
# @param [Boolean] markdown
# @return [String]
def name_column(name:, file:, link:, exceptions_and_job_urls:, markdown: false)
def name_column(name:, file:, link:, exceptions_and_related_urls:, markdown: false)
if markdown
return "**Name**: #{name}<br>**File**: [#{file}](#{link})#{exceptions_markdown(exceptions_and_job_urls)}"
return "**Name**: #{name}<br>**File**: " \
"[#{file}](#{link})#{exceptions_markdown(exceptions_and_related_urls)}"
end
wrapped_name = name.length > 150 ? "#{name} ".scan(/.{1,150} /).map(&:strip).join("\n") : name
@ -359,13 +360,13 @@ module QA
# Formatted exceptions with link to job url
#
# @param [Hash] exceptions_and_job_urls
# @param [Hash] exceptions_and_related_urls
# @return [String]
def exceptions_markdown(exceptions_and_job_urls)
return '' if exceptions_and_job_urls.empty?
def exceptions_markdown(exceptions_and_related_urls)
return '' if exceptions_and_related_urls.empty?
"<br>**Exceptions**:#{exceptions_and_job_urls.keys.map do |e|
"<br>- [`#{e.truncate(250).tr('`', "'")}`](#{exceptions_and_job_urls[e]})"
"<br>**Exceptions**:#{exceptions_and_related_urls.keys.map do |e|
"<br>- [`#{e.truncate(250).tr('`', "'")}`](#{exceptions_and_related_urls[e]})"
end.join('')}"
end
@ -398,20 +399,13 @@ module QA
failure_rate = (failed.to_f / runs) * 100
records_with_exception = records.reject { |r| !r.values["failure_exception"] }
# Since exception is the key in the below hash, only one instance of an occurrence is kept
exceptions_and_job_urls = records_with_exception.to_h do |r|
[r.values["failure_exception"], r.values["job_url"]]
end
result[stage][product_group] ||= {}
result[stage][product_group][name] = {
file: file,
link: link,
runs: runs,
failed: failed,
exceptions_and_job_urls: exceptions_and_job_urls,
exceptions_and_related_urls: exceptions_and_related_urls(records),
failure_rate: failure_rate == 0 ? failure_rate.round(0) : failure_rate.round(2)
}
end
@ -419,6 +413,19 @@ module QA
# rubocop:enable Metrics/AbcSize
# Return hash of exceptions as key and failure_issue or job_url urls as value
#
# @param [Array<InfluxDB2::FluxRecord>] records
# @return [Hash]
def exceptions_and_related_urls(records)
records_with_exception = records.reject { |r| !r.values["failure_exception"] }
# Since exception is the key in the below hash, only one instance of an occurrence is kept
records_with_exception.to_h do |r|
[r.values["failure_exception"], r.values["failure_issue"] || r.values["job_url"]]
end
end
# Check if failure is allowed
#
# @param [String] failure_exception
@ -465,7 +472,8 @@ module QA
)
|> filter(fn: (r) => r["_field"] == "job_url" or
r["_field"] == "failure_exception" or
r["_field"] == "id"
r["_field"] == "id" or
r["_field"] == "failure_issue"
)
|> pivot(rowKey: ["_time"], columnKey: ["_field"], valueColumn: "_value")
|> group(columns: ["name"])

View File

@ -4,3 +4,9 @@ require_relative '../qa'
require_relative 'scenario_shared_examples'
require_relative('../../jh/qa/spec/spec_helper') if GitlabEdition.jh?
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.max_formatted_output_length = nil
end
end

View File

@ -118,7 +118,8 @@ describe QA::Tools::ReliableReport do
)
|> filter(fn: (r) => r["_field"] == "job_url" or
r["_field"] == "failure_exception" or
r["_field"] == "id"
r["_field"] == "id" or
r["_field"] == "failure_issue"
)
|> pivot(rowKey: ["_time"], columnKey: ["_field"], valueColumn: "_value")
|> group(columns: ["name"])
@ -151,12 +152,12 @@ describe QA::Tools::ReliableReport do
)
end
def name_column(spec_name, exceptions_and_job_urls = {})
"**Name**: #{spec_name}<br>**File**: [spec.rb](https://gitlab.com/gitlab-org/gitlab/-/blob/master/qa/qa/specs/features/some/spec.rb)#{exceptions_markdown(exceptions_and_job_urls)}"
def name_column(spec_name, exceptions_and_related_urls = {})
"**Name**: #{spec_name}<br>**File**: [spec.rb](https://gitlab.com/gitlab-org/gitlab/-/blob/master/qa/qa/specs/features/some/spec.rb)#{exceptions_markdown(exceptions_and_related_urls)}"
end
def exceptions_markdown(exceptions_and_job_urls)
exceptions_and_job_urls.empty? ? '' : "<br>**Exceptions**:<br>- [`#{failure_message}`](https://job/url)"
def exceptions_markdown(exceptions_and_related_urls)
exceptions_and_related_urls.empty? ? '' : "<br>**Exceptions**:<br>- [`#{failure_message}`](https://job/url)"
end
before do
@ -373,4 +374,59 @@ describe QA::Tools::ReliableReport do
%q([Unable to find css "[data-testid=\"user_action_dropdown\"]"]))).to be false
end
end
describe "#exceptions_and_related_urls" do
subject(:reliable_report) { described_class.new(14) }
let(:failure_message) { "This is a failure exception" }
let(:job_url) { "https://example.com/job/url" }
let(:failure_issue_url) { "https://example.com/failure/issue" }
let(:records) do
[instance_double("InfluxDB2::FluxRecord", values: values)]
end
context "without failure_exception" do
let(:values) do
{
"failure_exception" => nil,
"job_url" => job_url,
"failure_issue" => failure_issue_url
}
end
it "returns an empty hash" do
expect(reliable_report.send(:exceptions_and_related_urls, records)).to be_empty
end
context "with failure_exception" do
context "without failure_issue" do
let(:values) do
{
"failure_exception" => failure_message,
"job_url" => job_url
}
end
it "returns job_url as value" do
expect(reliable_report.send(:exceptions_and_related_urls, records).values).to eq([job_url])
end
end
context "with failure_issue and job_url" do
let(:values) do
{
"failure_exception" => failure_message,
"failure_issue" => failure_issue_url,
"job_url" => job_url
}
end
it "returns failure_issue as value" do
expect(reliable_report.send(:exceptions_and_related_urls, records).values).to eq([failure_issue_url])
end
end
end
end
end
end

View File

@ -99,6 +99,30 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
expect(page).not_to have_content(get_env_name(stopped_env))
end
end
describe 'pagination' do
# rubocop:disable RSpec/FactoryBot/ExcessiveCreateList -- need >20 items to test pagination
let!(:envs) { create_list(:environment, 25, :with_folders, project: project, folder: folder_name) }
# rubocop:enable RSpec/FactoryBot/ExcessiveCreateList
it 'shows pagination' do
pagination = find('.pagination')
expect(pagination).to have_content('2')
end
it 'can navigate to the next page and updates the url' do
pagination = find('.pagination')
pagination.scroll_to(:bottom)
within(pagination) do
click_link 'Next'
end
wait_for_requests
expect(current_url).to include('page=2')
end
end
end
describe 'legacy folders page' do

View File

@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`FindingsDrawer matches the snapshot with detected badge 1`] = `
exports[`FindingsDrawer General Rendering matches the snapshot with detected badge 1`] = `
<transition-stub
class="findings-drawer"
name="gl-drawer"
@ -16,7 +16,7 @@ exports[`FindingsDrawer matches the snapshot with detected badge 1`] = `
class="gl-drawer-title"
>
<h2
class="drawer-heading gl-font-base gl-mb-0 gl-mt-0"
class="drawer-heading gl-font-base gl-mb-0 gl-mt-0 gl-w-28"
>
<svg
aria-hidden="true"
@ -61,6 +61,7 @@ exports[`FindingsDrawer matches the snapshot with detected badge 1`] = `
>
<li
class="gl-mb-4"
data-testid="findings-drawer-title"
>
<p
class="gl-line-height-20"
@ -219,7 +220,7 @@ exports[`FindingsDrawer matches the snapshot with detected badge 1`] = `
</transition-stub>
`;
exports[`FindingsDrawer matches the snapshot with dismissed badge 1`] = `
exports[`FindingsDrawer General Rendering matches the snapshot with dismissed badge 1`] = `
<transition-stub
class="findings-drawer"
name="gl-drawer"
@ -235,7 +236,7 @@ exports[`FindingsDrawer matches the snapshot with dismissed badge 1`] = `
class="gl-drawer-title"
>
<h2
class="drawer-heading gl-font-base gl-mb-0 gl-mt-0"
class="drawer-heading gl-font-base gl-mb-0 gl-mt-0 gl-w-28"
>
<svg
aria-hidden="true"
@ -280,6 +281,7 @@ exports[`FindingsDrawer matches the snapshot with dismissed badge 1`] = `
>
<li
class="gl-mb-4"
data-testid="findings-drawer-title"
>
<p
class="gl-line-height-20"
@ -310,9 +312,9 @@ exports[`FindingsDrawer matches the snapshot with dismissed badge 1`] = `
Status
</span>
<span
class="badge badge-muted badge-pill gl-badge md text-capitalize"
class="badge badge-pill badge-warning gl-badge md text-capitalize"
>
dismissed
detected
</span>
</p>
</li>

View File

@ -1,3 +1,4 @@
import { nextTick } from 'vue';
import { GlDrawer } from '@gitlab/ui';
import FindingsDrawer from '~/diffs/components/shared/findings_drawer.vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@ -5,41 +6,101 @@ import {
mockFindingDismissed,
mockFindingDetected,
mockProject,
mockFindingsMultiple,
} from '../../mock_data/findings_drawer';
let wrapper;
const getDrawer = () => wrapper.findComponent(GlDrawer);
const closeEvent = 'close';
const createWrapper = (finding = mockFindingDismissed) => {
return mountExtended(FindingsDrawer, {
propsData: {
drawer: finding,
project: mockProject,
},
});
};
describe('FindingsDrawer', () => {
it('renders without errors', () => {
wrapper = createWrapper();
expect(wrapper.exists()).toBe(true);
let wrapper;
const findPreviousButton = () => wrapper.findByTestId('findings-drawer-prev-button');
const findNextButton = () => wrapper.findByTestId('findings-drawer-next-button');
const findTitle = () => wrapper.findByTestId('findings-drawer-title');
const createWrapper = (
drawer = { findings: [mockFindingDetected], index: 0 },
project = mockProject,
) => {
return mountExtended(FindingsDrawer, {
propsData: {
drawer,
project,
},
});
};
describe('General Rendering', () => {
beforeEach(() => {
wrapper = createWrapper();
});
it('renders without errors', () => {
expect(wrapper.exists()).toBe(true);
});
it('emits close event when gl-drawer emits close event', () => {
wrapper.findComponent(GlDrawer).vm.$emit('close');
expect(wrapper.emitted('close')).toHaveLength(1);
});
it('matches the snapshot with dismissed badge', () => {
expect(wrapper.element).toMatchSnapshot();
});
it('matches the snapshot with detected badge', () => {
expect(wrapper.element).toMatchSnapshot();
});
});
it('emits close event when gl-drawer emits close event', () => {
wrapper = createWrapper();
describe('Prev/Next Buttons with Multiple Items', () => {
it('renders prev/next buttons when there are multiple items', () => {
wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
expect(findPreviousButton().exists()).toBe(true);
expect(findNextButton().exists()).toBe(true);
});
getDrawer().vm.$emit(closeEvent);
expect(wrapper.emitted(closeEvent)).toHaveLength(1);
it('does not render prev/next buttons when there is only one item', () => {
wrapper = createWrapper({ findings: [mockFindingDismissed], index: 0 });
expect(findPreviousButton().exists()).toBe(false);
expect(findNextButton().exists()).toBe(false);
});
it('calls prev method on prev button click and loops correct activeIndex', async () => {
wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
await findPreviousButton().trigger('click');
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
await findPreviousButton().trigger('click');
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[1].title}`);
});
it('calls next method on next button click', async () => {
wrapper = createWrapper({ findings: mockFindingsMultiple, index: 0 });
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
await findNextButton().trigger('click');
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[1].title}`);
await findNextButton().trigger('click');
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
await findNextButton().trigger('click');
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[0].title}`);
});
});
it('matches the snapshot with dismissed badge', () => {
wrapper = createWrapper();
expect(wrapper.element).toMatchSnapshot();
});
describe('Active Index Handling', () => {
it('watcher sets active index on drawer prop change', async () => {
wrapper = createWrapper();
const newFinding = { findings: mockFindingsMultiple, index: 2 };
it('matches the snapshot with detected badge', () => {
wrapper = createWrapper(mockFindingDetected);
expect(wrapper.element).toMatchSnapshot();
await wrapper.setProps({ drawer: newFinding });
await nextTick();
expect(findTitle().text()).toBe(`Name ${mockFindingsMultiple[2].title}`);
});
});
});

View File

@ -31,3 +31,45 @@ export const mockProject = {
nameWithNamespace: 'testname',
fullPath: 'testpath',
};
export const mockFindingsMultiple = [
{
...mockFindingDismissed,
title: 'Finding 1',
severity: 'critical',
engineName: 'Engine 1',
identifiers: [
{
...mockFindingDismissed.identifiers[0],
name: 'identifier 1',
url: 'https://example.com/identifier1',
},
],
},
{
...mockFindingDetected,
title: 'Finding 2',
severity: 'medium',
engineName: 'Engine 2',
identifiers: [
{
...mockFindingDetected.identifiers[0],
name: 'identifier 2',
url: 'https://example.com/identifier2',
},
],
},
{
...mockFindingDetected,
title: 'Finding 3',
severity: 'medium',
engineName: 'Engine 3',
identifiers: [
{
...mockFindingDetected.identifiers[0],
name: 'identifier 3',
url: 'https://example.com/identifier3',
},
],
},
];

View File

@ -38,6 +38,8 @@ import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
import ScriptYaml from './yaml_tests/positive_tests/script.yml';
import AutoCancelPipelineOnJobFailureAllYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml';
import AutoCancelPipelineOnJobFailureNoneYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
@ -64,6 +66,7 @@ import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/pa
import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
import ScriptNegativeYaml from './yaml_tests/negative_tests/script.yml';
import AutoCancelPipelineNegativeYaml from './yaml_tests/negative_tests/auto_cancel_pipeline.yml';
const ajv = new Ajv({
strictTypes: false,
@ -107,6 +110,8 @@ describe('positive tests', () => {
SecretsYaml,
NeedsParallelMatrixYaml,
ScriptYaml,
AutoCancelPipelineOnJobFailureAllYaml,
AutoCancelPipelineOnJobFailureNoneYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@ -152,6 +157,7 @@ describe('negative tests', () => {
NeedsParallelMatrixWrongParallelValueYaml,
NeedsParallelMatrixWrongMatrixValueYaml,
ScriptNegativeYaml,
AutoCancelPipelineNegativeYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a

View File

@ -0,0 +1,4 @@
# invalid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: unexpected_value

View File

@ -0,0 +1,4 @@
# valid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: all

View File

@ -0,0 +1,4 @@
# valid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: none

View File

@ -1,6 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlSkeletonLoader, GlTab } from '@gitlab/ui';
import { GlSkeletonLoader, GlTab, GlPagination } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import EnvironmentsFolderAppComponent from '~/environments/folder/environments_folder_app.vue';
import EnvironmentItem from '~/environments/components/new_environment_item.vue';
@ -62,6 +62,7 @@ describe('EnvironmentsFolderAppComponent', () => {
folderName: mockFolderName,
folderPath: '/gitlab-org/test-project/-/environments/folder/dev',
scope: 'active',
page: 1,
},
});
};
@ -119,5 +120,12 @@ describe('EnvironmentsFolderAppComponent', () => {
expect(modal.props().environment).toEqual(resolvedEnvironment);
expect(modal.props().weight).toBe(1);
});
it('should render pagination component', () => {
const pagination = wrapper.findComponent(GlPagination);
expect(pagination.props().perPage).toBe(20);
expect(pagination.props().totalItems).toBe(2);
});
});
});

View File

@ -131,13 +131,14 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('folder', () => {
it('should fetch the folder url passed to it', async () => {
mock
.onGet(ENDPOINT, { params: { per_page: 3, scope: 'available', search: '' } })
.onGet(ENDPOINT, { params: { per_page: 3, scope: 'available', search: '', page: 1 } })
.reply(HTTP_STATUS_OK, folder);
const environmentFolder = await mockResolvers.Query.folder(null, {
environment: { folderPath: ENDPOINT },
scope: 'available',
search: '',
page: 1,
});
expect(environmentFolder).toEqual(resolvedFolder);

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsUuid, schema: 20231130140901, feature_category: :web_ide do
let!(:vs_code_settings) { table(:vs_code_settings) }
let!(:users) { table(:users) }
let!(:user) do
users.create!(
email: "test1@example.com",
username: "test1",
notification_email: "test@example.com",
name: "test",
state: "active",
projects_limit: 10)
end
subject(:migration) do
described_class.new(
start_id: vs_code_setting_one.id,
end_id: vs_code_setting_two.id,
batch_table: :vs_code_settings,
batch_column: :id,
sub_batch_size: 100,
pause_ms: 0,
connection: ActiveRecord::Base.connection
)
end
describe "#perform" do
context 'when it finds vs_code_setting rows with empty uuid' do
let(:vs_code_setting_one) do
vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}')
end
let(:vs_code_setting_two) do
vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
end
it 'populates uuid column with a generated uuid' do
expect(vs_code_setting_one.uuid).to be_nil
expect(vs_code_setting_two.uuid).to be_nil
migration.perform
expect(vs_code_setting_one.reload.uuid).not_to be_nil
expect(vs_code_setting_two.reload.uuid).not_to be_nil
end
end
context 'when it finds vs_code_setting rows with non-empty uuid' do
let(:vs_code_setting_one) do
vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}', uuid: SecureRandom.uuid)
end
let(:vs_code_setting_two) do
vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
end
it 'populates uuid column with a generated uuid' do
expect(vs_code_setting_one.uuid).not_to be_nil
expect(vs_code_setting_two.uuid).to be_nil
previous_uuid = vs_code_setting_one.uuid
migration.perform
expect(vs_code_setting_one.reload.uuid).to eq(previous_uuid)
expect(vs_code_setting_two.reload.uuid).not_to be_nil
end
end
end
end

View File

@ -30,6 +30,33 @@ RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipelin
end
end
context 'with on_job_failure' do
['all', 'none', nil].each do |value|
context 'when the `on_job_failure` value is valid' do
let(:config_hash) { { on_job_failure: value } }
it { is_expected.to be_valid }
it 'returns value correctly' do
expect(config.value).to eq(on_job_failure: value)
end
end
end
context 'when on_job_failure is invalid' do
let(:config_hash) do
{ on_job_failure: 'invalid' }
end
it { is_expected.not_to be_valid }
it 'returns errors' do
expect(config.errors)
.to include('auto cancel on job failure must be one of: none, all')
end
end
end
context 'with invalid key' do
let(:config_hash) do
{ invalid: 'interruptible' }

View File

@ -123,7 +123,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
let(:workflow_hash) do
{
auto_cancel: {
on_new_commit: 'interruptible'
on_new_commit: 'interruptible',
on_job_failure: 'none'
}
}
end

View File

@ -133,9 +133,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
end
context 'with auto_cancel' do
let(:config) do
{ workflow: { auto_cancel: { on_new_commit: 'interruptible' } }, rspec: { script: 'rspec' } }
end
let(:on_new_commit) { 'interruptible' }
let(:on_job_failure) { 'all' }
let(:auto_cancel) { { on_new_commit: on_new_commit, on_job_failure: on_job_failure } }
let(:config) { { workflow: { auto_cancel: auto_cancel }, rspec: { script: 'rspec' } } }
it_behaves_like 'not breaking the chain'
@ -143,6 +144,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
@ -155,20 +157,89 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
end
context 'with auto_cancel: nil' do
let(:config) do
{ workflow: { auto_cancel: nil }, rspec: { script: 'rspec' } }
end
let(:auto_cancel) { nil }
it_behaves_like 'not saving pipeline metadata'
end
context 'with auto_cancel_on_new_commit: nil' do
let(:config) do
{ workflow: { auto_cancel: { on_new_commit: nil } }, rspec: { script: 'rspec' } }
context 'with auto_cancel_on_new_commit and no auto_cancel_on_job_failure' do
let(:auto_cancel) { { on_new_commit: on_new_commit } }
it 'builds pipeline_metadata' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
context 'with auto_cancel_on_job_failure and no auto_cancel_on_new_commit' do
let(:auto_cancel) { { on_job_failure: on_job_failure } }
it 'builds pipeline_metadata' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure: nil' do
let(:on_new_commit) { nil }
let(:on_job_failure) { nil }
it_behaves_like 'not saving pipeline metadata'
end
context 'with auto_cancel_on_new_commit valid and auto_cancel_on_job_failure: nil' do
let(:on_job_failure) { nil }
it 'builds pipeline_metadata' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure valid' do
let(:on_new_commit) { nil }
it 'builds pipeline_metadata' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
context 'when auto_cancel_on_job_failure: none' do
let(:on_job_failure) { 'none' }
it 'builds pipeline_metadata' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
context 'when auto_cancel_pipeline_on_job_failure feature is disabled' do
before do
stub_feature_flags(auto_cancel_pipeline_on_job_failure: false)
end
it 'ignores the auto_cancel_on_job_failure value' do
run_chain
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
end
context 'with both pipeline name and auto_cancel' do
@ -176,7 +247,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
{
workflow: {
name: 'Pipeline name',
auto_cancel: { on_new_commit: 'interruptible' }
auto_cancel: {
on_new_commit: 'interruptible',
on_job_failure: 'none'
}
},
rspec: { script: 'rspec' }
}
@ -189,6 +263,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end

View File

@ -502,6 +502,7 @@ module Gitlab
workflow:
auto_cancel:
on_new_commit: interruptible
on_job_failure: all
hello:
script: echo world
@ -509,7 +510,10 @@ module Gitlab
end
it 'parses the workflow:auto_cancel as workflow_auto_cancel' do
expect(subject.workflow_auto_cancel).to eq(on_new_commit: 'interruptible')
expect(subject.workflow_auto_cancel).to eq({
on_new_commit: 'interruptible',
on_job_failure: 'all'
})
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillVsCodeSettingsUuid, feature_category: :web_ide do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :vs_code_settings,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -987,24 +987,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe '#artifacts_public?' do
subject { build.artifacts_public? }
context 'when non_public_artifacts flag is disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
context 'artifacts with defaults - public' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
context 'non public artifacts' do
let(:build) { create(:ci_build, :private_artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
end
context 'artifacts with defaults - public' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
@ -1027,30 +1009,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe '#artifact_is_public_in_config?' do
subject { build.artifact_is_public_in_config? }
context 'when non_public_artifacts flag is disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
context 'artifacts with defaults' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
context 'non public artifacts' do
let(:build) { create(:ci_build, :with_private_artifacts_config, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
context 'public artifacts' do
let(:build) { create(:ci_build, :with_public_artifacts_config, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
end
context 'artifacts with defaults' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
@ -5270,7 +5228,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe '.with_project_and_metadata' do
it 'does not join across databases' do
with_cross_joins_prevented do
::Ci::Build.with_project_and_metadata(project).to_a
::Ci::Build.with_project_and_metadata.to_a
end
end
end

View File

@ -176,16 +176,6 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
let!(:artifact) { build(:ci_job_artifact, :private) }
it { is_expected.to be_falsey }
context 'and the non_public_artifacts feature flag is disabled' do
let!(:artifact) { build(:ci_job_artifact, :private) }
before do
stub_feature_flags(non_public_artifacts: false)
end
it { is_expected.to be_truthy }
end
end
end

View File

@ -18,5 +18,13 @@ RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
conservative: 0, interruptible: 1, disabled: 2
).with_prefix
end
it do
is_expected.to define_enum_for(
:auto_cancel_on_job_failure
).with_values(
none: 0, all: 1
).with_prefix
end
end
end

View File

@ -86,6 +86,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
it { is_expected.to delegate_method(:name).to(:pipeline_metadata).allow_nil }
it { is_expected.to delegate_method(:auto_cancel_on_job_failure).to(:pipeline_metadata).allow_nil }
describe 'validations' do
it { is_expected.to validate_presence_of(:sha) }

View File

@ -197,21 +197,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it 'allows access to artifacts' do
project.update_column(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when project is public with builds access disabled' do
@ -445,17 +430,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
it 'rejects access and hides existence of artifacts' do
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'allows access to artifacts' do
expect(response).to have_gitlab_http_status(:ok)
end
end
end
it 'does not return job artifacts if not uploaded' do
@ -651,18 +625,6 @@ RSpec.describe API::Ci::JobArtifacts, feature_category: :build_artifacts do
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it 'allows access to artifacts', :sidekiq_might_not_need_inline do
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when project is private' do

View File

@ -29,13 +29,5 @@ RSpec.describe 'merge request content spec', feature_category: :code_review_work
describe 'GET cached_widget' do
it_behaves_like 'cached widget request'
context 'with non_public_artifacts disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it_behaves_like 'cached widget request'
end
end
end

View File

@ -286,16 +286,6 @@ RSpec.describe BuildDetailsEntity do
it 'does not expose non public artifacts' do
expect(subject.keys).not_to include(:artifact)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it 'exposes artifact details' do
expect(subject[:artifact].keys).to include(:download_path, :browse_path, :locked)
end
end
end
end

View File

@ -14,49 +14,156 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
stub_ci_pipeline_yaml_file(config)
end
context 'when on_new_commit is set to interruptible' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_new_commit: interruptible
describe 'on_new_commit' do
context 'when is set to interruptible' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_new_commit: interruptible
test1:
script: exit 0
YAML
test1:
script: exit 0
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
it 'creates a pipeline with on_new_commit' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
end
end
before do
stub_ci_pipeline_yaml_file(config)
end
context 'when is set to invalid' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_new_commit: invalid
it 'creates a pipeline with on_new_commit' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
test1:
script: exit 0
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
end
end
end
context 'when on_new_commit is set to invalid' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_new_commit: invalid
describe 'on_job_failure' do
context 'when is set to none' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_job_failure: none
test1:
script: exit 0
YAML
test1:
script: exit 0
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
it 'creates a pipeline with on_job_failure' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
end
end
before do
stub_ci_pipeline_yaml_file(config)
context 'when is set to all' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_job_failure: all
test1:
script: exit 0
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
it 'creates a pipeline with on_job_failure' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
end
context 'when auto_cancel_pipeline_on_job_failure feature flag is disabled' do
before do
stub_feature_flags(auto_cancel_pipeline_on_job_failure: false)
end
context 'when there are no other metadata settings present' do
it 'creates a pipeline without metadata' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata).to be_nil
end
end
context 'when other metadata settings are present' do
let(:config) do
<<~YAML
workflow:
name: pipeline_name
auto_cancel:
on_job_failure: all
test1:
script: exit 0
YAML
end
it 'creates a pipeline with on_job_failure' do
expect(pipeline).to be_persisted
expect(pipeline.errors).to be_empty
expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
end
end
end
end
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
context 'when on_job_failure is set to invalid' do
let(:config) do
<<~YAML
workflow:
auto_cancel:
on_job_failure: invalid
test1:
script: exit 0
YAML
end
before do
stub_ci_pipeline_yaml_file(config)
end
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
'workflow:auto_cancel on job failure must be one of: none, all')
end
end
end
end

View File

@ -155,52 +155,6 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
end
end
context 'when non_public_artifacts flag is disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it_behaves_like 'public accessibility'
end
context 'when non_public_artifacts flag is enabled' do
context 'and accessibility is defined in the params' do
context 'and is passed as private' do
before do
params.merge!('accessibility' => 'private')
end
it_behaves_like 'private accessibility'
end
context 'and is passed as public' do
before do
params.merge!('accessibility' => 'public')
end
it_behaves_like 'public accessibility'
end
end
context 'and accessibility is not defined in the params' do
context 'and job has no public artifacts defined in its CI config' do
it_behaves_like 'public accessibility'
end
context 'and job artifacts defined as private in the CI config' do
let(:job) { create(:ci_build, :with_private_artifacts_config, project: project) }
it_behaves_like 'private accessibility'
end
context 'and job artifacts defined as public in the CI config' do
let(:job) { create(:ci_build, :with_public_artifacts_config, project: project) }
it_behaves_like 'public accessibility'
end
end
end
context 'when accessibility passed as invalid value' do
before do
params.merge!('accessibility' => 'foo')

View File

@ -151,7 +151,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when clickhouse is not configured' do
before do
allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
allow(ClickHouse::Client).to receive(:database_configured?).and_return(false)
end
it 'skips execution' do
@ -165,7 +165,7 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
context 'when exclusive lease error happens' do
it 'skips execution' do
stub_feature_flags(event_sync_worker_for_click_house: true)
allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
allow(ClickHouse::Client).to receive(:database_configured?).with(:main).and_return(true)
expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })