Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-07-07 06:08:10 +00:00
parent 896eadaa13
commit f23de8014c
32 changed files with 798 additions and 64 deletions

View File

@ -8,17 +8,22 @@ import {
GlFormGroup,
GlFormInput,
GlFormTextarea,
GlLoadingIcon,
} from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { createAlert } from '~/alert';
import { visitUrl } from '~/lib/utils/url_utility';
import { visitUrl, queryToObject } from '~/lib/utils/url_utility';
import { REF_TYPE_BRANCHES, REF_TYPE_TAGS } from '~/ref/constants';
import RefSelector from '~/ref/components/ref_selector.vue';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
import IntervalPatternInput from '~/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue';
import createPipelineScheduleMutation from '../graphql/mutations/create_pipeline_schedule.mutation.graphql';
import updatePipelineScheduleMutation from '../graphql/mutations/update_pipeline_schedule.mutation.graphql';
import getPipelineSchedulesQuery from '../graphql/queries/get_pipeline_schedules.query.graphql';
import { VARIABLE_TYPE, FILE_TYPE } from '../constants';
const scheduleId = queryToObject(window.location.search).id;
export default {
components: {
GlButton,
@ -29,20 +34,12 @@ export default {
GlFormGroup,
GlFormInput,
GlFormTextarea,
GlLoadingIcon,
RefSelector,
TimezoneDropdown,
IntervalPatternInput,
},
inject: [
'fullPath',
'projectId',
'defaultBranch',
'cron',
'cronTimezone',
'dailyLimit',
'settingsLink',
'schedulesPath',
],
inject: ['fullPath', 'projectId', 'defaultBranch', 'dailyLimit', 'settingsLink', 'schedulesPath'],
props: {
timezoneData: {
type: Array,
@ -58,24 +55,74 @@ export default {
required: true,
},
},
apollo: {
schedule: {
query: getPipelineSchedulesQuery,
variables() {
return {
projectPath: this.fullPath,
ids: scheduleId,
};
},
update(data) {
return data.project?.pipelineSchedules?.nodes[0] || {};
},
result({ data }) {
if (data) {
const {
project: {
pipelineSchedules: { nodes },
},
} = data;
const schedule = nodes[0];
const variables = schedule.variables?.nodes || [];
this.description = schedule.description;
this.cron = schedule.cron;
this.cronTimezone = schedule.cronTimezone;
this.scheduleRef = schedule.ref;
this.variables = variables.map((variable) => {
return {
id: variable.id,
variableType: variable.variableType,
key: variable.key,
value: variable.value,
destroy: false,
};
});
this.addEmptyVariable();
this.activated = schedule.active;
}
},
skip() {
return !this.editing;
},
error() {
createAlert({ message: this.$options.i18n.scheduleFetchError });
},
},
},
data() {
return {
cronValue: this.cron,
cron: '',
description: '',
scheduleRef: this.defaultBranch,
activated: true,
timezone: this.cronTimezone,
cronTimezone: '',
variables: [],
schedule: {},
};
},
i18n: {
activated: __('Activated'),
cronTimezone: s__('PipelineSchedules|Cron timezone'),
cronTimezoneText: s__('PipelineSchedules|Cron timezone'),
description: s__('PipelineSchedules|Description'),
shortDescriptionPipeline: s__(
'PipelineSchedules|Provide a short description for this pipeline',
),
savePipelineSchedule: s__('PipelineSchedules|Save pipeline schedule'),
editScheduleBtnText: s__('PipelineSchedules|Edit pipeline schedule'),
createScheduleBtnText: s__('PipelineSchedules|Create pipeline schedule'),
cancel: __('Cancel'),
targetBranchTag: __('Select target branch or tag'),
intervalPattern: s__('PipelineSchedules|Interval Pattern'),
@ -87,6 +134,12 @@ export default {
scheduleCreateError: s__(
'PipelineSchedules|An error occurred while creating the pipeline schedule.',
),
scheduleUpdateError: s__(
'PipelineSchedules|An error occurred while updating the pipeline schedule.',
),
scheduleFetchError: s__(
'PipelineSchedules|An error occurred while trying to fetch the pipeline schedule.',
),
},
typeOptions: {
[VARIABLE_TYPE]: __('Variable'),
@ -114,9 +167,26 @@ export default {
getEnabledRefTypes() {
return [REF_TYPE_BRANCHES, REF_TYPE_TAGS];
},
preparedVariables() {
preparedVariablesUpdate() {
return this.variables.filter((variable) => variable.key !== '');
},
preparedVariablesCreate() {
return this.preparedVariablesUpdate.map((variable) => {
return {
key: variable.key,
value: variable.value,
variableType: variable.variableType,
};
});
},
loading() {
return this.$apollo.queries.schedule.loading;
},
buttonText() {
return this.editing
? this.$options.i18n.editScheduleBtnText
: this.$options.i18n.createScheduleBtnText;
},
},
created() {
this.addEmptyVariable();
@ -133,6 +203,7 @@ export default {
variableType: VARIABLE_TYPE,
key: '',
value: '',
destroy: false,
});
},
setVariableAttribute(key, attribute, value) {
@ -140,16 +211,11 @@ export default {
variable[attribute] = value;
},
removeVariable(index) {
this.variables.splice(index, 1);
this.variables[index].destroy = true;
},
canRemove(index) {
return index < this.variables.length - 1;
},
scheduleHandler() {
if (!this.editing) {
this.createPipelineSchedule();
}
},
async createPipelineSchedule() {
try {
const {
@ -161,10 +227,10 @@ export default {
variables: {
input: {
description: this.description,
cron: this.cronValue,
cronTimezone: this.timezone,
cron: this.cron,
cronTimezone: this.cronTimezone,
ref: this.scheduleRef,
variables: this.preparedVariables,
variables: this.preparedVariablesCreate,
active: this.activated,
projectPath: this.fullPath,
},
@ -180,11 +246,48 @@ export default {
createAlert({ message: this.$options.i18n.scheduleCreateError });
}
},
async updatePipelineSchedule() {
try {
const {
data: {
pipelineScheduleUpdate: { errors },
},
} = await this.$apollo.mutate({
mutation: updatePipelineScheduleMutation,
variables: {
input: {
id: this.schedule.id,
description: this.description,
cron: this.cron,
cronTimezone: this.cronTimezone,
ref: this.scheduleRef,
variables: this.preparedVariablesUpdate,
active: this.activated,
},
},
});
if (errors.length > 0) {
createAlert({ message: errors[0] });
} else {
visitUrl(this.schedulesPath);
}
} catch {
createAlert({ message: this.$options.i18n.scheduleUpdateError });
}
},
scheduleHandler() {
if (this.editing) {
this.updatePipelineSchedule();
} else {
this.createPipelineSchedule();
}
},
setCronValue(cron) {
this.cronValue = cron;
this.cron = cron;
},
setTimezone(timezone) {
this.timezone = timezone.identifier || '';
this.cronTimezone = timezone.identifier || '';
},
},
};
@ -192,7 +295,8 @@ export default {
<template>
<div class="col-lg-8 gl-pl-0">
<gl-form>
<gl-loading-icon v-if="loading && editing" size="lg" />
<gl-form v-else>
<!--Description-->
<gl-form-group :label="$options.i18n.description" label-for="schedule-description">
<gl-form-input
@ -215,10 +319,10 @@ export default {
/>
</gl-form-group>
<!--Timezone-->
<gl-form-group :label="$options.i18n.cronTimezone" label-for="schedule-timezone">
<gl-form-group :label="$options.i18n.cronTimezoneText" label-for="schedule-timezone">
<timezone-dropdown
id="schedule-timezone"
:value="timezone"
:value="cronTimezone"
:timezone-data="timezoneData"
name="schedule-timezone"
@input="setTimezone"
@ -242,12 +346,12 @@ export default {
<div
v-for="(variable, index) in variables"
:key="`var-${index}`"
class="gl-mb-3 gl-pb-2"
data-testid="ci-variable-row"
data-qa-selector="ci_variable_row_container"
>
<div
class="gl-display-flex gl-align-items-stretch gl-flex-direction-column gl-md-flex-direction-row"
v-if="!variable.destroy"
class="gl-display-flex gl-align-items-stretch gl-flex-direction-column gl-md-flex-direction-row gl-mb-3 gl-pb-2"
data-testid="ci-variable-row"
>
<gl-dropdown
:text="$options.typeOptions[variable.variableType]"
@ -308,7 +412,7 @@ export default {
</gl-form-checkbox>
<gl-button variant="confirm" data-testid="schedule-submit-button" @click="scheduleHandler">
{{ $options.i18n.savePipelineSchedule }}
{{ buttonText }}
</gl-button>
<gl-button :href="schedulesPath" data-testid="schedule-cancel-button">
{{ $options.i18n.cancel }}

View File

@ -0,0 +1,6 @@
mutation updatePipelineSchedule($input: PipelineScheduleUpdateInput!) {
pipelineScheduleUpdate(input: $input) {
clientMutationId
errors
}
}

View File

@ -1,15 +1,22 @@
query getPipelineSchedulesQuery($projectPath: ID!, $status: PipelineScheduleStatus) {
query getPipelineSchedulesQuery(
$projectPath: ID!
$status: PipelineScheduleStatus
$ids: [ID!] = null
) {
currentUser {
id
username
}
project(fullPath: $projectPath) {
id
pipelineSchedules(status: $status) {
pipelineSchedules(status: $status, ids: $ids) {
count
nodes {
id
description
cron
cronTimezone
ref
forTag
editPath
refPath
@ -35,6 +42,14 @@ query getPipelineSchedulesQuery($projectPath: ID!, $status: PipelineScheduleStat
name
webPath
}
variables {
nodes {
id
variableType
key
value
}
}
userPermissions {
playPipelineSchedule
updatePipelineSchedule

View File

@ -18,10 +18,8 @@ export default (selector, editing = false) => {
const {
fullPath,
cron,
dailyLimit,
timezoneData,
cronTimezone,
projectId,
defaultBranch,
settingsLink,
@ -37,8 +35,6 @@ export default (selector, editing = false) => {
projectId,
defaultBranch,
dailyLimit: dailyLimit ?? '',
cronTimezone: cronTimezone ?? '',
cron: cron ?? '',
settingsLink,
schedulesPath,
},

View File

@ -43,7 +43,7 @@ module Mutations
def resolve(id:, variables: [], **pipeline_schedule_attrs)
schedule = authorized_find!(id: id)
params = pipeline_schedule_attrs.merge(variables_attributes: variables.map(&:to_h))
params = pipeline_schedule_attrs.merge(variables_attributes: variable_attributes_for(variables))
service_response = ::Ci::PipelineSchedules::UpdateService
.new(schedule, current_user, params)
@ -54,6 +54,18 @@ module Mutations
errors: service_response.errors
}
end
private
def variable_attributes_for(variables)
variables.map do |variable|
variable.to_h.tap do |hash|
hash[:id] = GlobalID::Locator.locate(hash[:id]).id if hash[:id]
hash[:_destroy] = hash.delete(:destroy)
end
end
end
end
end
end

View File

@ -8,11 +8,18 @@ module Mutations
description 'Attributes for the pipeline schedule variable.'
PipelineScheduleVariableID = ::Types::GlobalIDType[::Ci::PipelineScheduleVariable]
argument :id, PipelineScheduleVariableID, required: false, description: 'ID of the variable to mutate.'
argument :key, GraphQL::Types::String, required: true, description: 'Name of the variable.'
argument :value, GraphQL::Types::String, required: true, description: 'Value of the variable.'
argument :variable_type, Types::Ci::VariableTypeEnum, required: true, description: 'Type of the variable.'
argument :destroy, GraphQL::Types::Boolean, required: false,
description: 'Boolean option to destroy the variable.'
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Ci
module PipelineSchedulesHelper
def js_pipeline_schedules_form_data(project, schedule)
{
full_path: project.full_path,
daily_limit: schedule.daily_limit,
timezone_data: timezone_data.to_json,
project_id: project.id,
default_branch: project.default_branch,
settings_link: project_settings_ci_cd_path(project),
schedules_path: pipeline_schedules_path(project)
}
end
end
end
Ci::PipelineSchedulesHelper.prepend_mod_with('Ci::PipelineSchedulesHelper')

View File

@ -5,9 +5,8 @@
%h1.page-title.gl-font-size-h-display
= _("Edit Pipeline Schedule")
%hr
- if Feature.enabled?(:pipeline_schedules_vue, @project)
#pipeline-schedules-form-edit{ data: { full_path: @project.full_path } }
#pipeline-schedules-form-edit{ data: js_pipeline_schedules_form_data(@project, @schedule) }
- else
= render "form"

View File

@ -9,6 +9,6 @@
= _("Schedule a new pipeline")
- if Feature.enabled?(:pipeline_schedules_vue, @project)
#pipeline-schedules-form-new{ data: { full_path: @project.full_path, cron: @schedule.cron, daily_limit: @schedule.daily_limit, timezone_data: timezone_data.to_json, cron_timezone: @schedule.cron_timezone, project_id: @project.id, default_branch: @project.default_branch, settings_link: project_settings_ci_cd_path(@project), schedules_path: pipeline_schedules_path(@project) } }
#pipeline-schedules-form-new{ data: js_pipeline_schedules_form_data(@project, @schedule) }
- else
= render "form"

View File

@ -0,0 +1,6 @@
---
migration_job_name: BackfillMissingCiCdSettings
description: Backfills ci_cd_settings for projects that do not have them
feature_category: source_code_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/issues/393502
milestone: 16.2

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddRelayStateAllowlistApplicationSettings < Gitlab::Database::Migration[2.1]
def change
add_column :application_settings, :relay_state_domain_allowlist,
:text,
array: true,
default: [],
null: false
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddRelayStateAllowlistSamlProviders < Gitlab::Database::Migration[2.1]
def change
add_column :saml_providers, :relay_state_domain_allowlist,
:text,
array: true,
default: [],
null: false
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class QueueBackfillMissingCiCdSettings < Gitlab::Database::Migration[2.1]
MIGRATION = "BackfillMissingCiCdSettings"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10_000
SUB_BATCH_SIZE = 500
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
:projects,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :projects, :id, [])
end
end

View File

@ -0,0 +1 @@
59e4b358359514dbb49b2b73c829a99f646100442f02aa36287935d6e8fa76ab

View File

@ -0,0 +1 @@
8a16b05cd573528b6e8baa2d86e761a2b431584c026918e3eda9a630b30ec727

View File

@ -0,0 +1 @@
149cdb7863460246fb89d02d3c8e1709bdb1d38378304d44c9a916c4bd4ee4ed

View File

@ -11771,6 +11771,7 @@ CREATE TABLE application_settings (
gitlab_shell_operation_limit integer DEFAULT 600,
elasticsearch_requeue_workers boolean DEFAULT false NOT NULL,
elasticsearch_worker_number_of_shards integer DEFAULT 2 NOT NULL,
relay_state_domain_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
@ -22230,7 +22231,8 @@ CREATE TABLE saml_providers (
enforced_group_managed_accounts boolean DEFAULT false NOT NULL,
prohibited_outer_forks boolean DEFAULT true NOT NULL,
default_membership_role smallint DEFAULT 10 NOT NULL,
git_check_enforced boolean DEFAULT false NOT NULL
git_check_enforced boolean DEFAULT false NOT NULL,
relay_state_domain_allowlist text[] DEFAULT '{}'::text[] NOT NULL
);
CREATE SEQUENCE saml_providers_id_seq

View File

@ -27181,6 +27181,12 @@ A `CiPipelineScheduleID` is a global ID. It is encoded as a string.
An example `CiPipelineScheduleID` is: `"gid://gitlab/Ci::PipelineSchedule/1"`.
### `CiPipelineScheduleVariableID`
A `CiPipelineScheduleVariableID` is a global ID. It is encoded as a string.
An example `CiPipelineScheduleVariableID` is: `"gid://gitlab/Ci::PipelineScheduleVariable/1"`.
### `CiRunnerID`
A `CiRunnerID` is a global ID. It is encoded as a string.
@ -29032,6 +29038,8 @@ Attributes for the pipeline schedule variable.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="pipelineschedulevariableinputdestroy"></a>`destroy` | [`Boolean`](#boolean) | Boolean option to destroy the variable. |
| <a id="pipelineschedulevariableinputid"></a>`id` | [`CiPipelineScheduleVariableID`](#cipipelineschedulevariableid) | ID of the variable to mutate. |
| <a id="pipelineschedulevariableinputkey"></a>`key` | [`String!`](#string) | Name of the variable. |
| <a id="pipelineschedulevariableinputvalue"></a>`value` | [`String!`](#string) | Value of the variable. |
| <a id="pipelineschedulevariableinputvariabletype"></a>`variableType` | [`CiVariableType!`](#civariabletype) | Type of the variable. |

View File

@ -83,6 +83,7 @@ GitLab CI/CD features, grouped by DevOps stage, include:
| [Connect to cloud services](cloud_services/index.md) | Connect to cloud providers using OpenID Connect (OIDC) to retrieve temporary credentials to access services or secrets. |
| **Verify** | |
| [CI services](services/index.md) | Link Docker containers with your base image. |
| [Code Quality](testing/code_quality.md) | Analyze your source code quality. |
| [GitLab CI/CD for external repositories](ci_cd_for_external_repos/index.md) | Get the benefits of GitLab CI/CD combined with repositories in GitHub and Bitbucket Cloud. |
| [Interactive Web Terminals](interactive_web_terminal/index.md) | Open an interactive web terminal to debug the running jobs. |
| [Review Apps](review_apps/index.md) | Configure GitLab CI/CD to preview code changes. |
@ -98,11 +99,19 @@ GitLab CI/CD features, grouped by DevOps stage, include:
| [GitLab Releases](../user/project/releases/index.md) | Add release notes to Git tags. |
| [Cloud deployment](cloud_deployment/index.md) | Deploy your application to a main cloud provider. |
| **Secure** | |
| [Code Quality](testing/code_quality.md) | Analyze your source code quality. |
| [Container Scanning](../user/application_security/container_scanning/index.md) | Check your Docker containers for known vulnerabilities. |
| [Container Scanning](../user/application_security/container_scanning/index.md) | Scan your container images for known vulnerabilities. |
| [Coverage-guided fuzz testing](../user/application_security/coverage_fuzzing/index.md) | Test your application's behavior by providing randomized input. |
| [Dynamic Application Security Testing](../user/application_security/dast/index.md) | Test your application's runtime behavior for vulnerabilities. |
| [Dependency Scanning](../user/application_security/dependency_scanning/index.md) | Analyze your dependencies for known vulnerabilities. |
| [Infrastructure as Code scanning](../user/application_security/iac_scanning/index.md) | Scan your IaC configuration files for known vulnerabilities. |
| [License Compliance](../user/compliance/license_compliance/index.md) | Search your project dependencies for their licenses. |
| [Security Test reports](../user/application_security/index.md) | Check for app vulnerabilities. |
| [Secret Detection](../user/application_security/secret_detection/index.md) | Search your application's source code for secrets. |
| [Static Application Security Testing](../user/application_security/sast/index.md) | Test your application's source code for known vulnerabilities. |
| [Web API fuzz testing](../user/application_security/api_fuzzing/index.md) | Test your application's API behavior by providing randomized input. |
| **Govern** | |
| [Compliance frameworks](../user/group/compliance_frameworks.md) | Enforce a GitLab CI/CD configuration on all projects in a group. |
| [Scan execution policies](../user/application_security/policies/scan-execution-policies.md) | Enforce security scans run on a specified schedule or with the project pipeline. |
| [Scan results policies](../user/application_security/policies/scan-result-policies.md) | Enforce action based on results of a pipeline security scan. |
## Examples

View File

@ -5,9 +5,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: reference
---
# The scope of runners **(FREE)**
# Manage runners
Runners are available based on who you want to have access:
GitLab Runner has the following types of runners, which are available based on who you want to have access:
- [Shared runners](#shared-runners) are available to all groups and projects in a GitLab instance.
- [Group runners](#group-runners) are available to all projects and subgroups in a group.

View File

@ -18,6 +18,32 @@ most to least severe:
- `Info`
- `Unknown`
GitLab analyzers make an effort to fit the severity descriptions below, but they may not always be correct. Analyzers and scanners provided by third-party vendors may not follow the same classification.
## Critical severity
Vulnerabilities identified at the Critical Severity level should be investigated immediately. Vulnerabilities at this level assume exploitation of the flaw could lead to full system or data compromise. Examples of critical severity flaws are Command/Code Injection and SQL Injection. Typically these flaws are rated with CVSS 3.1 between 9.0-10.0.
## High severity
High severity vulnerabilities can be characterized as flaws that may lead to an attacker accessing application resources or unintended exposure of data. Examples of high severity flaws are External XML Entity Injection (XXE), Server Side Request Forgery (SSRF), Local File Include/Path Traversal and certain forms of Cross-Site Scripting (XSS). Typically these flaws are rated with CVSS 3.1 between 7.0-8.9.
## Medium severity
Medium severity vulnerabilities usually arise from misconfiguration of systems or lack of security controls. Exploitation of these vulnerabilities may lead to accessing a restricted amount of data or could be used in conjunction with other flaws to gain unintended access to systems or resources. Examples of medium severity flaws are reflected XSS, incorrect HTTP session handling, and missing security controls. Typically these flaws are rated with CVSS 3.1 between 4.0-6.9.
## Low severity
Low severity vulnerabilities contain flaws that may not be directly exploitable but introduce unnecessary weakness to an application or system. These flaws are usually due to missing security controls, or unnecessary disclose information about the application environment. Examples of low severity vulnerabilities are missing cookie security directives, verbose error or exception messages. Typically these flaws are rated with CVSS 3.1 between 1.0-3.9.
## Info severity
Info level severity vulnerabilities contain information that may have value, but are not necessarily associated to a particular flaw or weakness. Typically these issues do not have a CVSS rating.
## Unknown severity
Issues identified at this level do not have enough context to clearly demonstrate severity.
Most GitLab vulnerability analyzers are wrappers around popular open source scanning tools. Each
open source scanning tool provides their own native vulnerability severity level value. These values
can be one of the following:

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# backfills project_ci_cd_settings
class BackfillMissingCiCdSettings < BatchedMigrationJob
# migrations only version of `project_ci_cd_settings` table
class ProjectCiCdSetting < ::ApplicationRecord
self.table_name = 'project_ci_cd_settings'
end
operation_name :backfill_missing_ci_cd_settings
feature_category :source_code_management
def perform
each_sub_batch do |sub_batch|
sub_batch = sub_batch.where(%{
NOT EXISTS (
SELECT 1
FROM project_ci_cd_settings
WHERE project_ci_cd_settings.project_id = projects.id
)
})
next unless sub_batch.present?
ci_cd_attributes = sub_batch.map do |project|
{
project_id: project.id,
default_git_depth: 20,
forward_deployment_enabled: true
}
end
ProjectCiCdSetting.insert_all(ci_cd_attributes)
end
end
end
end
end

View File

@ -33395,6 +33395,12 @@ msgstr ""
msgid "PipelineSchedules|An error occurred while creating the pipeline schedule."
msgstr ""
msgid "PipelineSchedules|An error occurred while trying to fetch the pipeline schedule."
msgstr ""
msgid "PipelineSchedules|An error occurred while updating the pipeline schedule."
msgstr ""
msgid "PipelineSchedules|Are you sure you want to delete this pipeline schedule?"
msgstr ""
@ -33404,6 +33410,9 @@ msgstr ""
msgid "PipelineSchedules|Create a new pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Create pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Cron timezone"
msgstr ""
@ -33461,9 +33470,6 @@ msgstr ""
msgid "PipelineSchedules|Runs with the same project permissions as the schedule owner."
msgstr ""
msgid "PipelineSchedules|Save pipeline schedule"
msgstr ""
msgid "PipelineSchedules|Successfully scheduled a pipeline to run. Go to the %{linkStart}Pipelines page%{linkEnd} for details. "
msgstr ""

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import { GlForm } from '@gitlab/ui';
import { GlForm, GlLoadingIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@ -14,8 +14,14 @@ import { REF_TYPE_BRANCHES, REF_TYPE_TAGS } from '~/ref/constants';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
import IntervalPatternInput from '~/pages/projects/pipeline_schedules/shared/components/interval_pattern_input.vue';
import createPipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/mutations/create_pipeline_schedule.mutation.graphql';
import updatePipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/mutations/update_pipeline_schedule.mutation.graphql';
import getPipelineSchedulesQuery from '~/ci/pipeline_schedules/graphql/queries/get_pipeline_schedules.query.graphql';
import { timezoneDataFixture } from '../../../vue_shared/components/timezone_dropdown/helpers';
import { createScheduleMutationResponse } from '../mock_data';
import {
createScheduleMutationResponse,
updateScheduleMutationResponse,
mockSinglePipelineScheduleNode,
} from '../mock_data';
Vue.use(VueApollo);
@ -23,8 +29,20 @@ jest.mock('~/alert');
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
joinPaths: jest.fn().mockReturnValue(''),
queryToObject: jest.fn().mockReturnValue({ id: '1' }),
}));
const {
data: {
project: {
pipelineSchedules: { nodes },
},
},
} = mockSinglePipelineScheduleNode;
const schedule = nodes[0];
const variables = schedule.variables.nodes;
describe('Pipeline schedules form', () => {
let wrapper;
const defaultBranch = 'main';
@ -32,8 +50,13 @@ describe('Pipeline schedules form', () => {
const cron = '';
const dailyLimit = '';
const querySuccessHandler = jest.fn().mockResolvedValue(mockSinglePipelineScheduleNode);
const queryFailedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const createMutationHandlerSuccess = jest.fn().mockResolvedValue(createScheduleMutationResponse);
const createMutationHandlerFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const updateMutationHandlerSuccess = jest.fn().mockResolvedValue(updateScheduleMutationResponse);
const updateMutationHandlerFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const createMockApolloProvider = (
requestHandlers = [[createPipelineScheduleMutation, createMutationHandlerSuccess]],
@ -52,8 +75,6 @@ describe('Pipeline schedules form', () => {
fullPath: 'gitlab-org/gitlab',
projectId,
defaultBranch,
cron,
cronTimezone: '',
dailyLimit,
settingsLink: '',
schedulesPath: '/root/ci-project/-/pipeline_schedules',
@ -69,6 +90,7 @@ describe('Pipeline schedules form', () => {
const findRefSelector = () => wrapper.findComponent(RefSelector);
const findSubmitButton = () => wrapper.findByTestId('schedule-submit-button');
const findCancelButton = () => wrapper.findByTestId('schedule-cancel-button');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
// Variables
const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row');
const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key');
@ -187,7 +209,38 @@ describe('Pipeline schedules form', () => {
});
});
describe('schedule creation', () => {
describe('Button text', () => {
it.each`
editing | expectedText
${true} | ${'Edit pipeline schedule'}
${false} | ${'Create pipeline schedule'}
`(
'button text is $expectedText when editing is $editing',
async ({ editing, expectedText }) => {
createComponent(shallowMountExtended, editing, [
[getPipelineSchedulesQuery, querySuccessHandler],
]);
await waitForPromises();
expect(findSubmitButton().text()).toBe(expectedText);
},
);
});
describe('Schedule creation', () => {
it('when creating a schedule the query is not called', () => {
createComponent();
expect(querySuccessHandler).not.toHaveBeenCalled();
});
it('does not show loading state when creating new schedule', () => {
createComponent();
expect(findLoadingIcon().exists()).toBe(false);
});
describe('schedule creation success', () => {
let mock;
@ -259,4 +312,125 @@ describe('Pipeline schedules form', () => {
});
});
});
describe('Schedule editing', () => {
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
it('shows loading state when editing', async () => {
createComponent(shallowMountExtended, true, [
[getPipelineSchedulesQuery, querySuccessHandler],
]);
expect(findLoadingIcon().exists()).toBe(true);
await waitForPromises();
expect(findLoadingIcon().exists()).toBe(false);
});
describe('schedule fetch success', () => {
it('fetches schedule and sets form data correctly', async () => {
createComponent(mountExtended, true, [[getPipelineSchedulesQuery, querySuccessHandler]]);
expect(querySuccessHandler).toHaveBeenCalled();
await waitForPromises();
expect(findDescription().element.value).toBe(schedule.description);
expect(findIntervalComponent().props('initialCronInterval')).toBe(schedule.cron);
expect(findTimezoneDropdown().props('value')).toBe(schedule.cronTimezone);
expect(findRefSelector().props('value')).toBe(schedule.ref);
expect(findVariableRows()).toHaveLength(3);
expect(findKeyInputs().at(0).element.value).toBe(variables[0].key);
expect(findKeyInputs().at(1).element.value).toBe(variables[1].key);
expect(findValueInputs().at(0).element.value).toBe(variables[0].value);
expect(findValueInputs().at(1).element.value).toBe(variables[1].value);
});
});
it('schedule fetch failure', async () => {
createComponent(shallowMountExtended, true, [
[getPipelineSchedulesQuery, queryFailedHandler],
]);
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while trying to fetch the pipeline schedule.',
});
});
it('edit schedule success', async () => {
createComponent(mountExtended, true, [
[getPipelineSchedulesQuery, querySuccessHandler],
[updatePipelineScheduleMutation, updateMutationHandlerSuccess],
]);
await waitForPromises();
findDescription().element.value = 'Updated schedule';
findDescription().trigger('change');
findIntervalComponent().vm.$emit('cronValue', '0 22 16 * *');
// Ensures variable is sent with destroy property set true
findRemoveIcons().at(0).vm.$emit('click');
findSubmitButton().vm.$emit('click');
await waitForPromises();
expect(updateMutationHandlerSuccess).toHaveBeenCalledWith({
input: {
active: schedule.active,
cron: '0 22 16 * *',
cronTimezone: schedule.cronTimezone,
id: schedule.id,
ref: schedule.ref,
description: 'Updated schedule',
variables: [
{
destroy: true,
id: variables[0].id,
key: variables[0].key,
value: variables[0].value,
variableType: variables[0].variableType,
},
{
destroy: false,
id: variables[1].id,
key: variables[1].key,
value: variables[1].value,
variableType: variables[1].variableType,
},
],
},
});
});
it('edit schedule failure', async () => {
createComponent(shallowMountExtended, true, [
[getPipelineSchedulesQuery, querySuccessHandler],
[updatePipelineScheduleMutation, updateMutationHandlerFailed],
]);
await waitForPromises();
findSubmitButton().vm.$emit('click');
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while updating the pipeline schedule.',
});
});
});
});

View File

@ -2,6 +2,7 @@
import mockGetPipelineSchedulesGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.json';
import mockGetPipelineSchedulesAsGuestGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.as_guest.json';
import mockGetPipelineSchedulesTakeOwnershipGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.take_ownership.json';
import mockGetSinglePipelineScheduleGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.single.json';
const {
data: {
@ -30,10 +31,10 @@ const {
export const mockPipelineScheduleNodes = nodes;
export const mockPipelineScheduleCurrentUser = currentUser;
export const mockPipelineScheduleAsGuestNodes = guestNodes;
export const mockTakeOwnershipNodes = takeOwnershipNodes;
export const mockSinglePipelineScheduleNode = mockGetSinglePipelineScheduleGraphQLResponse;
export const emptyPipelineSchedulesResponse = {
data: {
project: {
@ -89,4 +90,14 @@ export const createScheduleMutationResponse = {
},
};
export const updateScheduleMutationResponse = {
data: {
pipelineScheduleUpdate: {
clientMutationId: null,
errors: [],
__typename: 'PipelineScheduleUpdatePayload',
},
},
};
export { mockGetPipelineSchedulesGraphQLResponse };

View File

@ -63,6 +63,12 @@ RSpec.describe 'Pipeline schedules (JavaScript fixtures)' do
expect_graphql_errors_to_be_empty
end
it "#{fixtures_path}#{get_pipeline_schedules_query}.single.json" do
post_graphql(query, current_user: user, variables: { projectPath: project.full_path, ids: pipeline_schedule_populated.id })
expect_graphql_errors_to_be_empty
end
it "#{fixtures_path}#{get_pipeline_schedules_query}.as_guest.json" do
guest = create(:user)
project.add_guest(user)

View File

@ -5,5 +5,5 @@ require 'spec_helper'
RSpec.describe Mutations::Ci::PipelineSchedule::VariableInputType, feature_category: :continuous_integration do
specify { expect(described_class.graphql_name).to eq('PipelineScheduleVariableInput') }
it { expect(described_class.arguments.keys).to match_array(%w[key value variableType]) }
it { expect(described_class.arguments.keys).to match_array(%w[id key value variableType destroy]) }
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::PipelineSchedulesHelper, feature_category: :continuous_integration do
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:user) { build_stubbed(:user) }
let_it_be(:pipeline_schedule) { build_stubbed(:ci_pipeline_schedule, project: project, owner: user) }
let_it_be(:timezones) { [{ identifier: "Pacific/Honolulu", name: "Hawaii" }] }
let_it_be(:pipeline_schedule_variable) do
build_stubbed(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
end
describe '#js_pipeline_schedules_form_data' do
before do
allow(helper).to receive(:timezone_data).and_return(timezones)
end
it 'returns pipeline schedule form data' do
expect(helper.js_pipeline_schedules_form_data(project, pipeline_schedule)).to include({
full_path: project.full_path,
daily_limit: nil,
project_id: project.id,
schedules_path: pipeline_schedules_path(project),
settings_link: project_settings_ci_cd_path(project),
timezone_data: timezones.to_json
})
end
end
end

View File

@ -0,0 +1,98 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillMissingCiCdSettings, schema: 20230628023103, feature_category: :source_code_management do # rubocop:disable Layout/LineLength
let(:projects_table) { table(:projects) }
let(:namespaces_table) { table(:namespaces) }
let(:ci_cd_settings_table) { table(:project_ci_cd_settings) }
let(:namespace_1) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-1') }
let(:project_namespace_2) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-2', type: 'Project') }
let(:project_namespace_3) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-3', type: 'Project') }
let(:project_namespace_4) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-4', type: 'Project') }
let(:project_namespace_5) { namespaces_table.create!(name: 'namespace', path: 'namespace-path-4', type: 'Project') }
let!(:project_1) do
projects_table
.create!(
name: 'project1',
path: 'path1',
namespace_id: namespace_1.id,
project_namespace_id: project_namespace_2.id,
visibility_level: 0
)
end
let!(:project_2) do
projects_table
.create!(
name: 'project2',
path: 'path2',
namespace_id: namespace_1.id,
project_namespace_id: project_namespace_3.id,
visibility_level: 0
)
end
let!(:project_3) do
projects_table
.create!(
name: 'project3',
path: 'path3',
namespace_id: namespace_1.id,
project_namespace_id: project_namespace_4.id,
visibility_level: 0
)
end
let!(:ci_cd_settings_3) do
ci_cd_settings_table.create!(project_id: project_3.id)
end
let!(:project_4) do
projects_table
.create!(
name: 'project4',
path: 'path4',
namespace_id: namespace_1.id,
project_namespace_id: project_namespace_5.id,
visibility_level: 0
)
end
subject(:perform_migration) do
described_class.new(start_id: projects_table.minimum(:id),
end_id: projects_table.maximum(:id),
batch_table: :projects,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: projects_table.connection)
.perform
end
it 'creates ci_cd_settings for projects without ci_cd_settings' do
expect { subject }.to change { ci_cd_settings_table.count }.from(1).to(4)
end
it 'creates ci_cd_settings with default values' do
ci_cd_settings_table.where.not(project_id: ci_cd_settings_3.project_id).each do |ci_cd_setting|
expect(ci_cd_setting.attributes.except('id', 'project_id')).to eq({
"group_runners_enabled" => true,
"merge_pipelines_enabled" => nil,
"default_git_depth" => 20,
"forward_deployment_enabled" => true,
"merge_trains_enabled" => false,
"auto_rollback_enabled" => false,
"keep_latest_artifact" => false,
"restrict_user_defined_variables" => false,
"job_token_scope_enabled" => false,
"runner_token_expiration_interval" => nil,
"separated_caches" => true,
"allow_fork_pipelines_to_run_in_parent_project" => true,
"inbound_job_token_scope_enabled" => true
})
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillMissingCiCdSettings, feature_category: :source_code_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :projects,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -9,6 +9,14 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
let_it_be(:variable_one) do
create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
end
let_it_be(:variable_two) do
create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule)
end
let(:mutation) do
variables = {
id: pipeline_schedule.to_global_id.to_s,
@ -30,6 +38,7 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
nodes {
key
value
variableType
}
}
}
@ -88,8 +97,37 @@ RSpec.describe 'PipelineScheduleUpdate', feature_category: :continuous_integrati
expect(mutation_response['pipelineSchedule']['refForDisplay']).to eq(pipeline_schedule_parameters[:ref])
expect(mutation_response['pipelineSchedule']['variables']['nodes'][0]['key']).to eq('AAA')
expect(mutation_response['pipelineSchedule']['variables']['nodes'][0]['value']).to eq('AAA123')
expect(mutation_response['pipelineSchedule']['variables']['nodes'][2]['key']).to eq('AAA')
expect(mutation_response['pipelineSchedule']['variables']['nodes'][2]['value']).to eq('AAA123')
end
end
context 'when updating and removing variables' do
let(:pipeline_schedule_parameters) do
{
variables: [
{ key: 'ABC', value: "ABC123", variableType: 'ENV_VAR', destroy: false },
{ id: variable_one.to_global_id.to_s,
key: 'foo', value: "foovalue",
variableType: 'ENV_VAR',
destroy: true },
{ id: variable_two.to_global_id.to_s, key: 'newbar', value: "newbarvalue", variableType: 'ENV_VAR' }
]
}
end
it 'processes variables correctly' do
post_graphql_mutation(mutation, current_user: user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['pipelineSchedule']['variables']['nodes'])
.to match_array(
[
{ "key" => 'newbar', "value" => 'newbarvalue', "variableType" => 'ENV_VAR' },
{ "key" => 'ABC', "value" => "ABC123", "variableType" => 'ENV_VAR' }
]
)
end
end

View File

@ -8,9 +8,16 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
let_it_be(:pipeline_schedule_variable) do
create(:ci_pipeline_schedule_variable,
key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule)
end
before_all do
project.add_maintainer(user)
project.add_reporter(reporter)
pipeline_schedule.reload
end
describe "execute" do
@ -35,7 +42,10 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
description: 'updated_desc',
ref: 'patch-x',
active: false,
cron: '*/1 * * * *'
cron: '*/1 * * * *',
variables_attributes: [
{ id: pipeline_schedule_variable.id, key: 'bar', secret_value: 'barvalue' }
]
}
end
@ -47,6 +57,42 @@ RSpec.describe Ci::PipelineSchedules::UpdateService, feature_category: :continuo
.and change { pipeline_schedule.ref }.from('master').to('patch-x')
.and change { pipeline_schedule.active }.from(true).to(false)
.and change { pipeline_schedule.cron }.from('0 1 * * *').to('*/1 * * * *')
.and change { pipeline_schedule.variables.last.key }.from('foo').to('bar')
.and change { pipeline_schedule.variables.last.value }.from('foovalue').to('barvalue')
end
context 'when creating a variable' do
let(:params) do
{
variables_attributes: [
{ key: 'ABC', secret_value: 'ABC123' }
]
}
end
it 'creates the new variable' do
expect { service.execute }.to change { Ci::PipelineScheduleVariable.count }.by(1)
expect(pipeline_schedule.variables.last.key).to eq('ABC')
expect(pipeline_schedule.variables.last.value).to eq('ABC123')
end
end
context 'when deleting a variable' do
let(:params) do
{
variables_attributes: [
{
id: pipeline_schedule_variable.id,
_destroy: true
}
]
}
end
it 'deletes the existing variable' do
expect { service.execute }.to change { Ci::PipelineScheduleVariable.count }.by(-1)
end
end
it 'returns ServiceResponse.success' do