Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
37439afe69
commit
16cdacff02
|
|
@ -49,7 +49,7 @@ AllCops:
|
|||
- 'db/ci_migrate/*.rb' # since the `db/ci_migrate` is a symlinked to `db/migrate`
|
||||
# Use absolute path to avoid orphan directories with changed workspace root.
|
||||
CacheRootDirectory: <%= Dir.getwd %>/tmp
|
||||
MaxFilesInCache: 35000
|
||||
MaxFilesInCache: 1_000_000
|
||||
NewCops: disable
|
||||
SuggestExtensions: false
|
||||
|
||||
|
|
|
|||
|
|
@ -3639,7 +3639,6 @@ Layout/LineLength:
|
|||
- 'scripts/perf/query_limiting_report.rb'
|
||||
- 'scripts/pipeline_test_report_builder.rb'
|
||||
- 'scripts/review_apps/automated_cleanup.rb'
|
||||
- 'scripts/rubocop-max-files-in-cache-check'
|
||||
- 'scripts/security-harness'
|
||||
- 'scripts/static-analysis'
|
||||
- 'scripts/trigger-build.rb'
|
||||
|
|
|
|||
17
.yamllint
17
.yamllint
|
|
@ -23,15 +23,18 @@ ignore: |
|
|||
node_modules/
|
||||
tmp/
|
||||
|
||||
# Why disabling all of those rules?
|
||||
# In CI some YAML files are linted using different rules.
|
||||
# See `.gitlab/ci/yaml.gitlab-ci.yml`.
|
||||
#
|
||||
# For the scope of https://gitlab.com/gitlab-org/gitlab/-/issues/359968,
|
||||
# we would like to catch syntax errors as soon as possible.
|
||||
# Style "errors" are not as important right now, but they should ideally be added later on.
|
||||
#
|
||||
# Please consider enabling a rule, and fixing the issues you'll see in an MR.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/385693 tracks to enable all
|
||||
# rules below:
|
||||
rules:
|
||||
braces: disable
|
||||
braces:
|
||||
min-spaces-inside: 1
|
||||
max-spaces-inside: 1
|
||||
min-spaces-inside-empty: 0
|
||||
max-spaces-inside-empty: 0
|
||||
|
||||
colons: disable
|
||||
comments-indentation: disable
|
||||
comments: disable
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
c624c31af05ffd1605524970972cad69b3a7fd8c
|
||||
f2b896476395d3071f59b72cbc7d4d5a6d947194
|
||||
|
|
|
|||
|
|
@ -1,3 +1,12 @@
|
|||
import { useNewFonts } from '~/lib/utils/common_utils';
|
||||
import { getCssVariable } from '~/lib/utils/css_utils';
|
||||
|
||||
const fontOptions = {};
|
||||
|
||||
if (useNewFonts()) {
|
||||
fontOptions.fontFamily = getCssVariable('--code-editor-font');
|
||||
}
|
||||
|
||||
export const defaultEditorOptions = {
|
||||
model: null,
|
||||
readOnly: false,
|
||||
|
|
@ -9,6 +18,7 @@ export const defaultEditorOptions = {
|
|||
wordWrap: 'on',
|
||||
glyphMargin: true,
|
||||
automaticLayout: true,
|
||||
...fontOptions,
|
||||
};
|
||||
|
||||
export const defaultDiffOptions = {
|
||||
|
|
|
|||
|
|
@ -715,3 +715,16 @@ export const getFirstPropertyValue = (data) => {
|
|||
|
||||
return data[key];
|
||||
};
|
||||
|
||||
// TODO: remove when FF `new_fonts` is removed https://gitlab.com/gitlab-org/gitlab/-/issues/379147
|
||||
/**
|
||||
* This method checks the FF `new_fonts`
|
||||
* as well as a query parameter `new_fonts`.
|
||||
* If either of them is enabled, new fonts will be applied.
|
||||
*
|
||||
* @returns Boolean Whether to apply new fonts
|
||||
*/
|
||||
export const useNewFonts = () => {
|
||||
const hasQueryParam = new URLSearchParams(window.location.search).has('new_fonts');
|
||||
return window?.gon.features?.newFonts || hasQueryParam;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ export default {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<div class="gl-card-body gl-relative gl-pb-0 gl-px-0" data-qa-selector="terms_content">
|
||||
<div class="gl-relative gl-pb-0 gl-px-0" data-qa-selector="terms_content">
|
||||
<div
|
||||
class="terms-fade gl-absolute gl-left-5 gl-right-5 gl-bottom-0 gl-h-11 gl-pointer-events-none"
|
||||
></div>
|
||||
|
|
@ -97,7 +97,7 @@ export default {
|
|||
</gl-intersection-observer>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="isLoggedIn" class="gl-card-footer gl-display-flex gl-justify-content-end">
|
||||
<div v-if="isLoggedIn" class="gl-display-flex gl-justify-content-end">
|
||||
<form v-if="permissions.canDecline" method="post" :action="paths.decline">
|
||||
<gl-button type="submit">{{ $options.i18n.decline }}</gl-button>
|
||||
<input :value="$options.csrf.token" type="hidden" name="authenticity_token" />
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import {
|
|||
WIDGET_TYPE_DESCRIPTION,
|
||||
WIDGET_TYPE_START_AND_DUE_DATE,
|
||||
WIDGET_TYPE_WEIGHT,
|
||||
WIDGET_TYPE_PROGRESS,
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
WIDGET_TYPE_MILESTONE,
|
||||
WIDGET_TYPE_ITERATION,
|
||||
|
|
@ -73,6 +74,7 @@ export default {
|
|||
WorkItemTitle,
|
||||
WorkItemState,
|
||||
WorkItemWeight: () => import('ee_component/work_items/components/work_item_weight.vue'),
|
||||
WorkItemProgress: () => import('ee_component/work_items/components/work_item_progress.vue'),
|
||||
WorkItemTypeIcon,
|
||||
WorkItemIteration: () => import('ee_component/work_items/components/work_item_iteration.vue'),
|
||||
WorkItemMilestone,
|
||||
|
|
@ -252,6 +254,9 @@ export default {
|
|||
workItemWeight() {
|
||||
return this.isWidgetPresent(WIDGET_TYPE_WEIGHT);
|
||||
},
|
||||
workItemProgress() {
|
||||
return this.isWidgetPresent(WIDGET_TYPE_PROGRESS);
|
||||
},
|
||||
workItemHierarchy() {
|
||||
return this.isWidgetPresent(WIDGET_TYPE_HIERARCHY);
|
||||
},
|
||||
|
|
@ -564,6 +569,17 @@ export default {
|
|||
:query-variables="queryVariables"
|
||||
@error="updateError = $event"
|
||||
/>
|
||||
<work-item-progress
|
||||
v-if="workItemProgress"
|
||||
class="gl-mb-5"
|
||||
:can-update="canUpdate"
|
||||
:progress="workItemProgress.progress"
|
||||
:work-item-id="workItem.id"
|
||||
:work-item-type="workItemType"
|
||||
:fetch-by-iid="fetchByIid"
|
||||
:query-variables="queryVariables"
|
||||
@error="updateError = $event"
|
||||
/>
|
||||
<work-item-iteration
|
||||
v-if="workItemIteration"
|
||||
class="gl-mb-5"
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ export const WIDGET_TYPE_DESCRIPTION = 'DESCRIPTION';
|
|||
export const WIDGET_TYPE_LABELS = 'LABELS';
|
||||
export const WIDGET_TYPE_START_AND_DUE_DATE = 'START_AND_DUE_DATE';
|
||||
export const WIDGET_TYPE_WEIGHT = 'WEIGHT';
|
||||
export const WIDGET_TYPE_PROGRESS = 'PROGRESS';
|
||||
export const WIDGET_TYPE_HIERARCHY = 'HIERARCHY';
|
||||
export const WIDGET_TYPE_MILESTONE = 'MILESTONE';
|
||||
export const WIDGET_TYPE_ITERATION = 'ITERATION';
|
||||
|
|
|
|||
|
|
@ -6,7 +6,13 @@ import { createRouter } from './router';
|
|||
|
||||
export const initWorkItemsRoot = () => {
|
||||
const el = document.querySelector('#js-work-items');
|
||||
const { fullPath, hasIssueWeightsFeature, issuesListPath, hasIterationsFeature } = el.dataset;
|
||||
const {
|
||||
fullPath,
|
||||
hasIssueWeightsFeature,
|
||||
issuesListPath,
|
||||
hasIterationsFeature,
|
||||
hasOkrsFeature,
|
||||
} = el.dataset;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
|
|
@ -17,6 +23,7 @@ export const initWorkItemsRoot = () => {
|
|||
fullPath,
|
||||
projectPath: fullPath,
|
||||
hasIssueWeightsFeature: parseBoolean(hasIssueWeightsFeature),
|
||||
hasOkrsFeature: parseBoolean(hasOkrsFeature),
|
||||
issuesListPath,
|
||||
hasIterationsFeature: parseBoolean(hasIterationsFeature),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -14,6 +14,28 @@ input[type='text'].danger {
|
|||
text-shadow: 0 1px 1px $white;
|
||||
}
|
||||
|
||||
/**
|
||||
* When form input type is number, Firefox & Safari show the up/down arrows
|
||||
* on the right side of the input persistently, while Chrome shows it only
|
||||
* on hover or focus, this fix allows us to hide the arrows in all browsers.
|
||||
* You can conditionally add/remove `hide-spinners` class to have consistent
|
||||
* behaviour across browsers.
|
||||
*/
|
||||
|
||||
/* stylelint-disable property-no-vendor-prefix */
|
||||
input[type='number'].hide-spinners {
|
||||
-moz-appearance: textfield;
|
||||
appearance: textfield;
|
||||
|
||||
&::-webkit-inner-spin-button,
|
||||
&::-webkit-outer-spin-button {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
/* stylelint-enable property-no-vendor-prefix */
|
||||
|
||||
.datetime-controls {
|
||||
select {
|
||||
width: 100px;
|
||||
|
|
|
|||
|
|
@ -772,3 +772,8 @@ textarea {
|
|||
wbr {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
// The font used in Monaco editor - Web IDE, Snippets, single file editor
|
||||
:root {
|
||||
--code-editor-font: #{$monospace-font};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ class Admin::PlanLimitsController < Admin::ApplicationController
|
|||
ci_needs_size_limit
|
||||
ci_registered_group_runners
|
||||
ci_registered_project_runners
|
||||
pipeline_hierarchy_size
|
||||
])
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -24,11 +24,18 @@ module Ci
|
|||
# NOTE: This is concurrency-safe method that the subquery in the `UPDATE`
|
||||
# works as explicit locking.
|
||||
def assign_resource_to(processable)
|
||||
resources.free.limit(1).update_all(build_id: processable.id) > 0
|
||||
attrs = {
|
||||
build_id: processable.id,
|
||||
partition_id: processable.partition_id
|
||||
}
|
||||
|
||||
resources.free.limit(1).update_all(attrs) > 0
|
||||
end
|
||||
|
||||
def release_resource_from(processable)
|
||||
resources.retained_by(processable).update_all(build_id: nil) > 0
|
||||
attrs = { build_id: nil, partition_id: nil }
|
||||
|
||||
resources.retained_by(processable).update_all(attrs) > 0
|
||||
end
|
||||
|
||||
def upcoming_processables
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ module Ci
|
|||
DuplicateDownstreamPipelineError = Class.new(StandardError)
|
||||
|
||||
MAX_NESTED_CHILDREN = 2
|
||||
MAX_HIERARCHY_SIZE = 1000
|
||||
|
||||
def execute(bridge)
|
||||
@bridge = bridge
|
||||
|
|
@ -156,7 +155,13 @@ module Ci
|
|||
return false unless @bridge.triggers_downstream_pipeline?
|
||||
|
||||
# Applies to the entire pipeline tree across all projects
|
||||
@bridge.pipeline.complete_hierarchy_count >= MAX_HIERARCHY_SIZE
|
||||
# A pipeline tree can be shared between multiple namespaces (customers), the limit that is used here
|
||||
# is the limit of the namespace that has added a downstream pipeline to a pipeline tree.
|
||||
@bridge.project.actual_limits.exceeded?(:pipeline_hierarchy_size, complete_hierarchy_count)
|
||||
end
|
||||
|
||||
def complete_hierarchy_count
|
||||
@bridge.pipeline.complete_hierarchy_count
|
||||
end
|
||||
|
||||
def config_checksum(pipeline)
|
||||
|
|
|
|||
|
|
@ -101,4 +101,7 @@
|
|||
.form-group
|
||||
= f.label :ci_registered_project_runners, s_('AdminSettings|Maximum number of runners registered per project')
|
||||
= f.number_field :ci_registered_project_runners, class: 'form-control gl-form-input'
|
||||
.form-group
|
||||
= f.label :pipeline_hierarchy_size, s_("AdminSettings|Maximum number of downstream pipelines in a pipeline's hierarchy tree")
|
||||
= f.number_field :pipeline_hierarchy_size, class: 'form-control gl-form-input'
|
||||
= f.submit s_('AdminSettings|Save %{name} limits').html_safe % { name: plan.name.capitalize }, pajamas_button: true
|
||||
|
|
|
|||
|
|
@ -62,6 +62,6 @@
|
|||
= f.datetime_select :ends_at, {}, class: 'form-control form-control-inline'
|
||||
.form-actions
|
||||
- if @broadcast_message.persisted?
|
||||
= f.submit _("Update broadcast message"), class: "btn gl-button btn-confirm"
|
||||
= f.submit _("Update broadcast message"), pajamas_button: true
|
||||
- else
|
||||
= f.submit _("Add broadcast message"), class: "btn gl-button btn-confirm"
|
||||
= f.submit _("Add broadcast message"), pajamas_button: true
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: ci_refactoring_external_mapper
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/106408
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385179
|
||||
milestone: '15.7'
|
||||
type: development
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
||||
|
|
@ -1058,7 +1058,7 @@ production: &base
|
|||
# disable_ssl_verification: false,
|
||||
# login_url: '/cas/login',
|
||||
# service_validate_url: '/cas/p3/serviceValidate',
|
||||
# logout_url: '/cas/logout'} }
|
||||
# logout_url: '/cas/logout' } }
|
||||
# - { name: 'authentiq',
|
||||
# # for client credentials (client ID and secret), go to https://www.authentiq.com/developers
|
||||
# app_id: 'YOUR_CLIENT_ID',
|
||||
|
|
@ -1567,7 +1567,7 @@ test:
|
|||
disable_ssl_verification: false,
|
||||
login_url: '/cas/login',
|
||||
service_validate_url: '/cas/p3/serviceValidate',
|
||||
logout_url: '/cas/logout'} }
|
||||
logout_url: '/cas/logout' } }
|
||||
- { name: 'github',
|
||||
app_id: 'YOUR_APP_ID',
|
||||
app_secret: 'YOUR_APP_SECRET',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPipelineHierarchySizeToPlanLimits < Gitlab::Database::Migration[2.1]
|
||||
def change
|
||||
add_column(:plan_limits, :pipeline_hierarchy_size, :integer, default: 1000, null: false)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveConstraintsFromCiResourcesForPartitionId < Gitlab::Database::Migration[2.1]
|
||||
enable_lock_retries!
|
||||
|
||||
def up
|
||||
change_column_null :ci_resources, :partition_id, true
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
# Adding back the not null constraint requires a long exclusive lock.
|
||||
# Also depending on when it gets called, it might not even be possible to
|
||||
# execute because the application could have inserted null values.
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ChangeDefaultPartitionIdOnCiResources < Gitlab::Database::Migration[2.1]
|
||||
enable_lock_retries!
|
||||
|
||||
def change
|
||||
change_column_default :ci_resources, :partition_id, from: 100, to: nil
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
72063c052e88d9351dbf7aedc373dadedb685f63cfbbadc992ddf322c546579b
|
||||
|
|
@ -0,0 +1 @@
|
|||
e205d116057a4e6770b8e8b7e49a87a180fb470087a4394d1a4e529ff1dba631
|
||||
|
|
@ -0,0 +1 @@
|
|||
0677f23100c5a4b010c2601d64c29116150b51735c7b920fa2c87a95de293176
|
||||
|
|
@ -13374,7 +13374,7 @@ CREATE TABLE ci_resources (
|
|||
updated_at timestamp with time zone NOT NULL,
|
||||
resource_group_id bigint NOT NULL,
|
||||
build_id bigint,
|
||||
partition_id bigint DEFAULT 100 NOT NULL
|
||||
partition_id bigint
|
||||
);
|
||||
|
||||
CREATE SEQUENCE ci_resources_id_seq
|
||||
|
|
@ -19533,7 +19533,8 @@ CREATE TABLE plan_limits (
|
|||
group_ci_variables integer DEFAULT 200 NOT NULL,
|
||||
ci_max_artifact_size_cyclonedx integer DEFAULT 1 NOT NULL,
|
||||
rpm_max_file_size bigint DEFAULT '5368709120'::bigint NOT NULL,
|
||||
ci_max_artifact_size_requirements_v2 integer DEFAULT 0 NOT NULL
|
||||
ci_max_artifact_size_requirements_v2 integer DEFAULT 0 NOT NULL,
|
||||
pipeline_hierarchy_size integer DEFAULT 1000 NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE plan_limits_id_seq
|
||||
|
|
|
|||
|
|
@ -18057,12 +18057,14 @@ Returns [`Requirement`](#requirement).
|
|||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="projectrequirementauthorusername"></a>`authorUsername` | [`[String!]`](#string) | Filter requirements by author username. |
|
||||
| <a id="projectrequirementiid"></a>`iid` | [`ID`](#id) | IID of the requirement, e.g., "1". |
|
||||
| <a id="projectrequirementiids"></a>`iids` | [`[ID!]`](#id) | List of IIDs of requirements, e.g., `[1, 2]`. |
|
||||
| <a id="projectrequirementiid"></a>`iid` | [`ID`](#id) | IID of the requirement, for example, "1". |
|
||||
| <a id="projectrequirementiids"></a>`iids` | [`[ID!]`](#id) | List of IIDs of requirements, for example, `[1, 2]`. |
|
||||
| <a id="projectrequirementlasttestreportstate"></a>`lastTestReportState` | [`RequirementStatusFilter`](#requirementstatusfilter) | State of latest requirement test report. |
|
||||
| <a id="projectrequirementsearch"></a>`search` | [`String`](#string) | Search query for requirement title. |
|
||||
| <a id="projectrequirementsort"></a>`sort` | [`Sort`](#sort) | List requirements by sort order. |
|
||||
| <a id="projectrequirementstate"></a>`state` | [`RequirementState`](#requirementstate) | Filter requirements by state. |
|
||||
| <a id="projectrequirementworkitemiid"></a>`workItemIid` | [`ID`](#id) | IID of the requirement work item, for example, "1". |
|
||||
| <a id="projectrequirementworkitemiids"></a>`workItemIids` | [`[ID!]`](#id) | List of IIDs of requirement work items, for example, `[1, 2]`. |
|
||||
|
||||
##### `Project.requirements`
|
||||
|
||||
|
|
@ -18079,12 +18081,14 @@ four standard [pagination arguments](#connection-pagination-arguments):
|
|||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="projectrequirementsauthorusername"></a>`authorUsername` | [`[String!]`](#string) | Filter requirements by author username. |
|
||||
| <a id="projectrequirementsiid"></a>`iid` | [`ID`](#id) | IID of the requirement, e.g., "1". |
|
||||
| <a id="projectrequirementsiids"></a>`iids` | [`[ID!]`](#id) | List of IIDs of requirements, e.g., `[1, 2]`. |
|
||||
| <a id="projectrequirementsiid"></a>`iid` | [`ID`](#id) | IID of the requirement, for example, "1". |
|
||||
| <a id="projectrequirementsiids"></a>`iids` | [`[ID!]`](#id) | List of IIDs of requirements, for example, `[1, 2]`. |
|
||||
| <a id="projectrequirementslasttestreportstate"></a>`lastTestReportState` | [`RequirementStatusFilter`](#requirementstatusfilter) | State of latest requirement test report. |
|
||||
| <a id="projectrequirementssearch"></a>`search` | [`String`](#string) | Search query for requirement title. |
|
||||
| <a id="projectrequirementssort"></a>`sort` | [`Sort`](#sort) | List requirements by sort order. |
|
||||
| <a id="projectrequirementsstate"></a>`state` | [`RequirementState`](#requirementstate) | Filter requirements by state. |
|
||||
| <a id="projectrequirementsworkitemiid"></a>`workItemIid` | [`ID`](#id) | IID of the requirement work item, for example, "1". |
|
||||
| <a id="projectrequirementsworkitemiids"></a>`workItemIids` | [`[ID!]`](#id) | List of IIDs of requirement work items, for example, `[1, 2]`. |
|
||||
|
||||
##### `Project.runners`
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 106 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
|
|
@ -0,0 +1,315 @@
|
|||
---
|
||||
status: proposed
|
||||
creation-date: "2022-11-15"
|
||||
authors: [ "@vtak" ]
|
||||
coach: "@grzesiek"
|
||||
approvers: [ "@ericschurter", "@oregand" ]
|
||||
owning-stage: "~devops::create"
|
||||
participating-stages: []
|
||||
---
|
||||
|
||||
# Remote Development
|
||||
|
||||
## Summary
|
||||
|
||||
Remote Development is a new architecture for our software-as-a-service platform that provides a more consistent user experience writing code hosted in GitLab. It may also provide additional features in the future, such as a purely browser-based workspace and the ability to connect to an already running VM/Container or to use a GitLab-hosted VM/Container.
|
||||
|
||||
## Web IDE and Remote Development
|
||||
|
||||
It is important to note that `Remote Development !== Web IDE`, and this is something we want to be explicit about in this document as the terms can become conflated when they shouldn't. Our new Web IDE is a separate ongoing effort that is running in parallel to Remote Development.
|
||||
|
||||
These two separate categories do have some overlap as it is a goal to allow a user to connect a running workspace to the Web IDE, **but** this does not mean the two are dependent on one another.
|
||||
|
||||
You can use the [Web IDE](../../../user/project/web_ide/index.md) to commit changes to a project directly from your web browser without installing any dependencies or cloning any repositories. The Web IDE, however, lacks a native runtime environment on which you would compile code, run tests, or generate real-time feedback in the IDE. For a more complete IDE experience, you can pair the Web IDE with a Remote Development workspace that has been properly configured to run as a host.
|
||||
|
||||

|
||||
|
||||
## Long-term vision
|
||||
|
||||
As a [new Software Developer to a team such as Sasha](https://about.gitlab.com/handbook/product/personas/#sasha-software-developer) with no local development environment, I should be able to:
|
||||
|
||||
- Navigate to a repository on GitLab.com or self-managed.
|
||||
- Click a button that will provide a list of current workspaces for this repository.
|
||||
- Click a button that will create a new workspace or select an existing workspace from a list.
|
||||
- Go through a configuration wizard that will let me select various options for my workspace (memory/CPU).
|
||||
- Start up a workspace from the Web IDE and within a minute have a fully interactive terminal panel at my disposal.
|
||||
- Make code changes, run tests, troubleshoot based on the terminal output, and commit new changes.
|
||||
- Submit MRs of any kind without having to clone the repository locally or to manually update a local development environment.
|
||||
|
||||
## User Flow Diagram
|
||||
|
||||

|
||||
|
||||
## Terminology
|
||||
|
||||
We use the following terms to describe components and properties of the Remote Development architecture.
|
||||
|
||||
### Remote Development
|
||||
|
||||
Remote Development allows you to use a secure development environment in the cloud that you can connect to from your local machine through a web browser or a client-based solution with the purpose of developing a software product there.
|
||||
|
||||
#### Remote Development properties
|
||||
|
||||
- Separate your development environment to avoid impacting your local machine configuration.
|
||||
- Make it easy for new contributors to get started and keep everyone on a consistent environment.
|
||||
- Use tools or runtimes not available on your local OS or manage multiple versions of them.
|
||||
- Access an existing development environment from multiple machines or locations.
|
||||
|
||||
Discouraged synonyms: VS Code for web, Remote Development Extension, browser-only WebIDE, Client only WebIDE
|
||||
|
||||
### Workspace
|
||||
|
||||
Container/VM-based developer machines providing all the tools and dependencies needed to code, build, test, run, and debug applications.
|
||||
|
||||
#### Workspace properties
|
||||
|
||||
- Workspaces should be isolated from each other by default and are responsible for managing the lifecycle of their components. This isolation can be multi-layered: namespace isolation, network isolation, resources isolation, node isolation, sandboxing containers, etc. ([reference](https://kubernetes.io/docs/concepts/security/multi-tenancy/)).
|
||||
- A workspace should contain project components as well as editor components.
|
||||
- A workspace should be a combination of resources that support cloud-based development environment.
|
||||
- Workspaces are constrained by the amount of resources provided to them.
|
||||
|
||||
### Legacy Web IDE
|
||||
|
||||
The current production [Web IDE](../../../user/project/web_ide/index.md).
|
||||
|
||||
#### Legacy Web IDE properties
|
||||
|
||||
An advanced editor with commit staging that currently supports:
|
||||
|
||||
- [Live Preview](../../../user/project/web_ide/index.md#live-preview)
|
||||
- [Interactive Web Terminals](../../../user/project/web_ide/index.md#interactive-web-terminals-for-the-web-ide)
|
||||
|
||||
### Web IDE
|
||||
|
||||
VS Code for web - replacement of our current legacy Web IDE.
|
||||
|
||||
#### Web IDE properties
|
||||
|
||||
A package for bootstrapping GitLab context-aware Web IDE that:
|
||||
|
||||
- Is built on top of Microsoft's VS Code. We customize and add VS Code features in the [GitLab fork of the VS Code project](https://gitlab.com/gitlab-org/gitlab-web-ide-vscode-fork).
|
||||
- Can be configured in a way that it connects to the workspace rather than only using the browser. When connected to a workspace, a user should be able to do the following from the Web IDE:
|
||||
- Edit, build, or debug on a different OS than they are running locally.
|
||||
- Make use of larger or more specialized hardware than their local machine for development.
|
||||
- Separate developer environments to avoid conflicts, improve security, and speed up onboarding.
|
||||
|
||||
### Remote Development Extension for Desktop
|
||||
|
||||
Something that plugs into the desktop IDE and connects you to the workspace.
|
||||
|
||||
#### Remote Development Extension for Desktop properties
|
||||
|
||||
- Allows you to open any folder in a workspace.
|
||||
- Should be desktop IDE agnostic.
|
||||
- Should have access to local files or APIs.
|
||||
|
||||
## Goals
|
||||
|
||||
### A consistent experience
|
||||
|
||||
Organizations should have the same user experience on our SaaS platform as they do on a self-managed GitLab instance. We want to abstract away the user's development environment to avoid impacting their local machine configuration. We also want to provide support for developing on the same operating system you deploy to or use larger or more specialized hardware.
|
||||
|
||||
A major goal is that each member of a development team should have the same development experience minus any specialized local configuration. This will also make it easy for new contributors to get started and keep everyone on a consistent environment.
|
||||
|
||||
### Increased availability
|
||||
|
||||
A workspace should allow access to an existing development environment from multiple machines and locations across a single or multiple teams. It should also allow a user to make use of tools or runtimes not available on their local OS or manage multiple versions of them.
|
||||
|
||||
Additionally, Remote Development workspaces could provide a way to implement disaster recovery if we are able to leverage the capabilities of [Pods](../../../architecture/blueprints/pods/index.md).
|
||||
|
||||
### Scalability
|
||||
|
||||
As an organization begins to scale, they quickly realize the need to support additional types of projects that might require extensive workflows. Remote Development workspaces aim to solve that issue by abstracting away the burden of complex machine configuration, dependency management, and possible data-seeding issues.
|
||||
|
||||
To facilitate working on different features across different projects, Remote Development should allow each user to provision multiple workspaces to enable quick context switching.
|
||||
|
||||
Eventually, we should be able to allow users to vertically scale their workspaces with more compute cores, memory, and other resources. If a user is currently working against a 2 CPU and 4 GB RAM workspace but comes to find they need more CPU, they should be able to upgrade their compute layer to something more suitable with a click or CLI command within the workspace.
|
||||
|
||||
### Provide built-in security and enterprise readiness
|
||||
|
||||
As Remote Development becomes a viable replacement for Virtual Desktop Infrastructure solutions, they must be secure and support enterprise requirements, such as role-based access control and the ability to remove all source code from developer machines.
|
||||
|
||||
### Accelerate project and developer onboarding
|
||||
|
||||
As a zero-install development environment that runs in your browser, Remote Development makes it easy for anyone to join your team and contribute to a project.
|
||||
|
||||
### Regions
|
||||
|
||||
GitLab.com is only hosted within the United States of America. Organizations located in other regions have voiced demand for local SaaS offerings. BYO infrastructure helps work in conjunction with [GitLab Regions](https://gitlab.com/groups/gitlab-org/-/epics/6037) because a user's workspace may be deployed within different geographies. The ability to deploy workspaces to different geographies might also help to solve data residency and compliance problems.
|
||||
|
||||
## High-level architecture problems to solve
|
||||
|
||||
A number of technical issues need to be resolved to implement a stable Remote Development offering. This section will be expanded.
|
||||
|
||||
- Who is our main persona for BYO infrastructure?
|
||||
- How do users authenticate?
|
||||
- How do we support more than one IDE?
|
||||
- How are workspaces provisioned?
|
||||
- How can workspaces implement disaster recovery capabilities?
|
||||
- If we cannot use SSH, what are the viable alternatives for establishing a secure WebSocket connection?
|
||||
- Are we running into any limitations in functionality with the Web IDE by not having it running in the container itself? For example, are we going to get code completion, linting, and language server type features to work with our approach?
|
||||
- How will our environments be provisioned, managed, created, destroyed, etc.?
|
||||
- To what extent do we need to provide the user with a UI to interact with the provisioned environments?
|
||||
- How will the files inside the workspace get live updated based on changes in the Web IDE? Are we going to use a [CRDT](https://en.wikipedia.org/wiki/Conflict-free_replicated_data_type)-like setup to patch files in a container? Are we going to generate a diff and send it though a WebSocket connection?
|
||||
|
||||
## Iteration plan
|
||||
|
||||
We can't ship the entire Remote Development architecture in one go - it is too large. Instead, we are adopting an iteration plan that provides value along the way.
|
||||
|
||||
- Use GitLab Agent for Kubernetes Remote Development Module.
|
||||
- Integrate Remote Development with the UI and Web IDE.
|
||||
- Improve security and usability.
|
||||
|
||||
### High-level approach
|
||||
|
||||
The nuts and bolts are being worked out at [Remote Development GA4K Architecture](https://gitlab.com/gitlab-org/remote-development/gitlab-remote-development-docs/-/blob/main/doc/architecture.md) to keep a SSoT. Once we have hammered out the details, we'll replace this section with the diagram in the above repository.
|
||||
|
||||
### Iteration 0: [GitLab Agent for Kubernetes Remote Development Module (plumbing)](https://gitlab.com/groups/gitlab-org/-/epics/9138)
|
||||
|
||||
#### Goals
|
||||
|
||||
- Use the [GitLab Agent](../../../user/clusters/agent/index.md) integration.
|
||||
- Create a workspace in a Kubernetes cluster based on a `devfile` in a public repository.
|
||||
- Install the IDE and dependencies as defined.
|
||||
- Report the status of the environment (via the terminal or through an endpoint).
|
||||
- Connect to an IDE in the workspace.
|
||||
|
||||
#### Requirements
|
||||
|
||||
- Remote environment running on a Kubernetes cluster based on a `devfile` in a repo.
|
||||
|
||||
These are **not** part of Iteration 0:
|
||||
|
||||
- Authentication/authorization with GitLab and a user.
|
||||
- Integration of Remote Development with the GitLab UI and Web IDE.
|
||||
- Using GA4K instead of an Ingress controller.
|
||||
|
||||
#### Assumptions
|
||||
|
||||
- We will use [`devworkspace-operator` v0.17.0 (latest version)](https://github.com/devfile/devworkspace-operator/releases/tag/v0.17.0). A prerequisite is [`cert-manager`](https://github.com/devfile/devworkspace-operator#with-yaml-resources).
|
||||
- We have an Ingress controller ([Ingress-NGINX](https://github.com/kubernetes/ingress-nginx)), which is accessible over the network.
|
||||
- The initial server is stubbed.
|
||||
|
||||
#### Success criteria
|
||||
|
||||
- Using GA4K to communicate with the Kubernetes API from the `remote_dev` agent module.
|
||||
- All calls to the Kubernetes API are done through GA4K.
|
||||
- A workspace in a Kubernetes cluster created using DevWorkspace Operator.
|
||||
|
||||
### Iteration 1: [Rails endpoints, authentication, and authorization](https://gitlab.com/groups/gitlab-org/-/epics/9323)
|
||||
|
||||
#### Goals
|
||||
|
||||
- Add endpoints in Rails to accept work from a user.
|
||||
- Poll Rails for work from KAS.
|
||||
- Add authentication and authorization to the workspaces created in the Kubernetes cluster.
|
||||
- Extend the GA4K `remote_dev` agent module to accept more types of work (get details of a workspace, list workspaces for a user, etc).
|
||||
- Build an editor injector for the GitLab fork of VS Code.
|
||||
|
||||
#### Requirements
|
||||
|
||||
- [GitLab Agent for Kubernetes Remote Development Module (plumbing)](https://gitlab.com/groups/gitlab-org/-/epics/9138) is complete.
|
||||
|
||||
These are **not** part of Iteration 1:
|
||||
|
||||
- Integration of Remote Development with the GitLab UI and Web IDE.
|
||||
- Using GA4K instead of an Ingress controller.
|
||||
|
||||
#### Assumptions
|
||||
|
||||
- TBA
|
||||
|
||||
#### Success criteria
|
||||
|
||||
- Poll Rails for work from KAS.
|
||||
- Rails endpoints to create/delete/get/list workspaces.
|
||||
- All requests are correctly authenticated and authorized except where the user has requested the traffic to be public (for example, opening a server while developing and making it public).
|
||||
- A user can create a workspace, start a server on that workspace, and have that traffic become private/internal/public.
|
||||
- We are using the GitLab fork of VS Code as an editor.
|
||||
|
||||
### Iteration 2: [Integrate Remote Development with the UI and Web IDE](https://gitlab.com/groups/gitlab-org/-/epics/9169)
|
||||
|
||||
#### Goals
|
||||
|
||||
- Allow users full control of their workspaces via the GitLab UI.
|
||||
|
||||
#### Requirements
|
||||
|
||||
- [GitLab Agent for Kubernetes Remote Development Module](https://gitlab.com/groups/gitlab-org/-/epics/9138).
|
||||
|
||||
These are **not** part of Iteration 2:
|
||||
|
||||
- Usability improvements
|
||||
- Security improvements
|
||||
|
||||
#### Success criteria
|
||||
|
||||
- Be able to list/create/delete/stop/start/restart workspaces from the UI.
|
||||
- Be able to create workspaces for the user in the Web IDE.
|
||||
- Allow the Web IDE terminal to connect to different containers in the workspace.
|
||||
- Configure DevWorkspace Operator for user-expected configuration (30-minute workspace timeout, a separate persistent volume for each workspace that is deleted when the workspace is deleted, etc.).
|
||||
|
||||
### Iteration 3: [Improve security and usability](https://gitlab.com/groups/gitlab-org/-/epics/9170)
|
||||
|
||||
#### Goals
|
||||
|
||||
- Improve security and usability of our Remote Development solution.
|
||||
|
||||
#### Requirements
|
||||
|
||||
- [Integrate Remote Development with the UI and Web IDE](https://gitlab.com/groups/gitlab-org/-/epics/9169) is complete.
|
||||
|
||||
#### Assumptions
|
||||
|
||||
- We are allowing for internal feedback and closed/early customer feedback that can be iterated on.
|
||||
- We have explored or are exploring the feasibility of using GA4K with Ingresses in [Solving Ingress problems for Remote Development](https://gitlab.com/gitlab-org/gitlab/-/issues/378998).
|
||||
- We have explored or are exploring Kata containers for providing root access to workspace users in [Investigate Kata Containers / Firecracker / gVisor](https://gitlab.com/gitlab-org/gitlab/-/issues/367043).
|
||||
- We have explored or are exploring how Ingress/Egress requests cannot be misused from [resources within or outside the cluster](https://gitlab.com/gitlab-org/remote-development/gitlab-remote-development-docs/-/blob/main/doc/securing-the-workspace.md) (security hardening).
|
||||
|
||||
#### Success criteria
|
||||
|
||||
Add options to:
|
||||
|
||||
- Create different classes of workspaces (1gb-2cpu, 4gb-8cpu, etc.).
|
||||
- Vertically scale up workspace resources.
|
||||
- Inject secrets from a GitLab user/group/repository.
|
||||
- Configure timeouts of workspaces at multiple levels.
|
||||
- Allow users to expose endpoints in their workspace (for example, not allow anyone in the organization to expose any endpoint publicly).
|
||||
|
||||
## Market analysis
|
||||
|
||||
We have conducted a market analysis to understand the broader market and what others can offer us by way of open-source libraries, integrations, or partnership opportunities. We have broken down the effort into a set of issues where we investigate each potential competitor/pathway/partnership as a spike.
|
||||
|
||||
- [Market analysis](https://gitlab.com/groups/gitlab-org/-/epics/8131)
|
||||
- [YouTube results](https://www.youtube.com/playlist?list=PL05JrBw4t0KrRQhnSYRNh1s1mEUypx67-)
|
||||
|
||||
### Next Steps
|
||||
|
||||
While our spike proved fruitful, we have paused this investigation until we reach our goals in [Viable Maturity](https://gitlab.com/groups/gitlab-org/-/epics/9190).
|
||||
|
||||
## Che versus a custom-built solution
|
||||
|
||||
After an investigation into using [Che](https://gitlab.com/gitlab-org/gitlab/-/issues/366052) as our backend to accelerate Remote Development, we ultimately opted to [write our own custom-built solution](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97449#note_1131215629).
|
||||
|
||||
Some advantages of us opting to write our own custom-built solution are:
|
||||
|
||||
- We can still use the core DevWorkspace Operator and build on top of it.
|
||||
- It is easier to add support for other configurations apart from `devfile` in the future if the need arises.
|
||||
- We have the ability to choose which tech stack to use (for example, instead of using Traefik which is used in Che, explore NGINX itself or use GitLab Agent for Kubernetes).
|
||||
|
||||
## Links
|
||||
|
||||
- [Remote Development presentation](https://docs.google.com/presentation/d/1XHH_ZilZPufQoWVWViv3evipI-BnAvRQrdvzlhBuumw/edit#slide=id.g131f2bb72e4_0_8)
|
||||
- [Category Strategy epic](https://gitlab.com/groups/gitlab-org/-/epics/7419)
|
||||
- [Minimal Maturity epic](https://gitlab.com/groups/gitlab-org/-/epics/9189)
|
||||
- [Viable Maturity epic](https://gitlab.com/groups/gitlab-org/-/epics/9190)
|
||||
- [Complete Maturity epic](https://gitlab.com/groups/gitlab-org/-/epics/9191)
|
||||
- [Bi-weekly sync](https://docs.google.com/document/d/1hWVvksIc7VzZjG-0iSlzBnLpyr-OjwBVCYMxsBB3h_E/edit#)
|
||||
- [Market analysis and architecture](https://gitlab.com/groups/gitlab-org/-/epics/8131)
|
||||
- [GA4K Architecture](https://gitlab.com/gitlab-org/remote-development/gitlab-remote-development-docs/-/blob/main/doc/architecture.md)
|
||||
- [BYO infrastructure](https://gitlab.com/groups/gitlab-org/-/epics/8290)
|
||||
- [Browser runtime](https://gitlab.com/groups/gitlab-org/-/epics/8291)
|
||||
- [GitLab-hosted infrastructure](https://gitlab.com/groups/gitlab-org/-/epics/8292)
|
||||
- [Browser runtime spike](https://gitlab.com/gitlab-org/gitlab-web-ide/-/merge_requests/58).
|
||||
- [Remote Development direction](https://about.gitlab.com/direction/create/editor/remote_development)
|
||||
- [Ideal user journey](https://about.gitlab.com/direction/create/editor/remote_development/#ideal-user-journey)
|
||||
|
|
@ -425,7 +425,7 @@ A configuration with different pipeline names depending on the pipeline conditio
|
|||
|
||||
```yaml
|
||||
variables:
|
||||
PIPELINE_NAME: 'Default pipeline name'
|
||||
PIPELINE_NAME: 'Default pipeline name' # A default is not required.
|
||||
|
||||
workflow:
|
||||
name: '$PIPELINE_NAME'
|
||||
|
|
@ -438,6 +438,11 @@ workflow:
|
|||
PIPELINE_NAME: 'Ruby 3 pipeline'
|
||||
```
|
||||
|
||||
**Additional details**:
|
||||
|
||||
- If the name is an empty string, the pipeline is not assigned a name. A name consisting
|
||||
of only CI/CD variables could evaluate to an empty string if all the variables are also empty.
|
||||
|
||||
#### `workflow:rules`
|
||||
|
||||
The `rules` keyword in `workflow` is similar to [`rules` defined in jobs](#rules),
|
||||
|
|
|
|||
|
|
@ -1467,11 +1467,51 @@ GitLab uses [`factory_bot`](https://github.com/thoughtbot/factory_bot) as a test
|
|||
resulting record to pass validation.
|
||||
- When instantiating from a factory, don't supply attributes that aren't
|
||||
required by the test.
|
||||
- Prefer [implicit](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#implicit-definition),
|
||||
- Use [implicit](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#implicit-definition),
|
||||
[explicit](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#explicit-definition), or
|
||||
[inline](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#inline-definition) associations
|
||||
over `create` / `build` for association setup in callbacks.
|
||||
instead of `create` / `build` for association setup in callbacks.
|
||||
See [issue #262624](https://gitlab.com/gitlab-org/gitlab/-/issues/262624) for further context.
|
||||
|
||||
When creating factories with a [`has_many`](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#has_many-associations) and `belongs_to` association, use the `instance` method to refer to the object being built.
|
||||
This prevents [creation of unnecessary records](https://gitlab.com/gitlab-org/gitlab/-/issues/378183) by using [interconnected associations](https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#interconnected-associations).
|
||||
|
||||
For example, if we have the following classes:
|
||||
|
||||
```ruby
|
||||
class Car < ApplicationRecord
|
||||
has_many :wheels, inverse_of: :car, foreign_key: :car_id
|
||||
end
|
||||
|
||||
class Wheel < ApplicationRecord
|
||||
belongs_to :car, foreign_key: :car_id, inverse_of: :wheel, optional: false
|
||||
end
|
||||
```
|
||||
|
||||
We can create the following factories:
|
||||
|
||||
```ruby
|
||||
FactoryBot.define do
|
||||
factory :car do
|
||||
transient do
|
||||
wheels_count { 2 }
|
||||
end
|
||||
|
||||
wheels do
|
||||
Array.new(wheels_count) do
|
||||
association(:wheel, car: instance)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
FactoryBot.define do
|
||||
factory :wheel do
|
||||
car { association :car }
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
- Factories don't have to be limited to `ActiveRecord` objects.
|
||||
[See example](https://gitlab.com/gitlab-org/gitlab-foss/commit/0b8cefd3b2385a21cfed779bd659978c0402766d).
|
||||
- Factories and their traits should produce valid objects that are [verified by specs](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/models/factories_spec.rb).
|
||||
|
|
|
|||
|
|
@ -29,11 +29,11 @@ The GitLab Operator does not include the GitLab Runner. To install and manage a
|
|||
|
||||
## Unsupported GitLab features
|
||||
|
||||
### Secure and Protect
|
||||
### Secure
|
||||
|
||||
- License Compliance
|
||||
- Code Quality scanning
|
||||
- Cluster Image Scanning
|
||||
- [License Compliance](../../user/compliance/license_compliance/index.md)
|
||||
- [Code Quality scanning](../../ci/testing/code_quality.md)
|
||||
- [Operational Container Scanning](../../user/clusters/agent/vulnerabilities.md) (Note: Pipeline [Container Scanning](../../user/application_security/container_scanning/index.md) is supported)
|
||||
|
||||
### Docker-in-Docker
|
||||
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ Check the [SAST direction page](https://about.gitlab.com/direction/secure/static
|
|||
| React | [Semgrep](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep) with GitLab-managed rules | 13.10 |
|
||||
| Ruby | [brakeman](https://gitlab.com/gitlab-org/security-products/analyzers/brakeman) | 13.9 |
|
||||
| Ruby on Rails | [brakeman](https://gitlab.com/gitlab-org/security-products/analyzers/brakeman) | 10.3 |
|
||||
| Scala (any build system) | [Semgrep](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep) with GitLab-managed rules | 15.7 |
|
||||
| Scala<sup>2</sup> | [SpotBugs](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs) with the find-sec-bugs plugin | 11.0 (SBT) & 11.9 (Gradle, Maven) |
|
||||
| Swift (iOS) | [MobSF (beta)](https://gitlab.com/gitlab-org/security-products/analyzers/mobsf) | 13.5 |
|
||||
| TypeScript<sup>3</sup> | [ESLint security plugin](https://gitlab.com/gitlab-org/security-products/analyzers/eslint) | 11.9, [merged](https://gitlab.com/gitlab-org/gitlab/-/issues/36059) with ESLint in 13.2 |
|
||||
|
|
|
|||
|
|
@ -10,8 +10,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
> - [Disabled by default for GitLab personal access tokens](https://gitlab.com/gitlab-org/gitlab/-/issues/371658) in GitLab 15.6 [with a flag](../../../administration/feature_flags.md) named `gitlab_pat_auto_revocation`. Available to GitLab.com only.
|
||||
|
||||
FLAG:
|
||||
By default, auto revocation of GitLab personal access tokens is not available. To opt-in on GitLab.com,
|
||||
please reach out to GitLab support.
|
||||
By default, auto revocation of GitLab personal access tokens is not available. To opt-in on GitLab.com
|
||||
during the [Beta period](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga), please
|
||||
[let us know by completing this form](https://docs.google.com/forms/d/e/1FAIpQLSdRbFhvA5jvI-Rt_Qnl1PQ1znOXKK8m6lRtmM0uva4upetKvQ/viewform).
|
||||
|
||||
GitLab supports running post-processing hooks after detecting a secret. These
|
||||
hooks can perform actions, like notifying the cloud service that issued the secret.
|
||||
|
|
|
|||
|
|
@ -83,11 +83,6 @@ once.
|
|||
|
||||
A finding that doesn't exist but is incorrectly reported as existing.
|
||||
|
||||
### Feedback
|
||||
|
||||
Feedback the user provides about a finding. Types of feedback include dismissal, creating an issue,
|
||||
or creating a merge request.
|
||||
|
||||
### Finding
|
||||
|
||||
An asset that has the potential to be vulnerable, identified in a project by an analyzer. Assets
|
||||
|
|
@ -96,6 +91,11 @@ applications, and infrastructure.
|
|||
|
||||
Findings are all potential vulnerability items scanners identify in MRs/feature branches. Only after merging to default does a finding become a [vulnerability](#vulnerability).
|
||||
|
||||
You can interact with vulnerability findings in two ways.
|
||||
|
||||
1. You can open an issue or merge request for the vulnerability finding.
|
||||
1. You can dismiss the vulnerability finding. Dismissing the finding hides it from the default views.
|
||||
|
||||
### Grouping
|
||||
|
||||
A flexible and non-destructive way to visually organize vulnerabilities in groups when there are multiple findings
|
||||
|
|
|
|||
|
|
@ -21,6 +21,13 @@ For a demo of Group Sync using Azure, see [Demo: SAML Group Sync](https://youtu.
|
|||
|
||||
## Configure SAML Group Sync
|
||||
|
||||
NOTE:
|
||||
You must include the SAML configuration block on all Sidekiq nodes in addition to Rails application nodes if you:
|
||||
|
||||
- Use SAML Group Sync.
|
||||
- Have multiple GitLab nodes, for example in a distributed or highly available architecture.
|
||||
|
||||
WARNING:
|
||||
To prevent users being accidentally removed from the GitLab group, follow these instructions closely before
|
||||
enabling Group Sync in GitLab.
|
||||
|
||||
|
|
@ -182,4 +189,4 @@ Because of a [known issue with Azure AD](https://support.esri.com/en/technical-a
|
|||
in the user's SAML assertion.
|
||||
|
||||
To work around this issue, allow more than 150 group IDs to be sent in SAML token using configuration steps in the
|
||||
[Azure AD documentation](https://support.esri.com/en/technical-article/000022190).
|
||||
[Azure AD documentation](https://support.esri.com/en/technical-article/000022190).
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ The following table lists project permissions available for each role:
|
|||
| [Security dashboard](application_security/security_dashboard/index.md):<br>Use security dashboard | | | ✓ | ✓ | ✓ |
|
||||
| [Security dashboard](application_security/security_dashboard/index.md):<br>View vulnerability | | | ✓ | ✓ | ✓ |
|
||||
| [Security dashboard](application_security/security_dashboard/index.md):<br>View vulnerability findings in [dependency list](application_security/dependency_list/index.md) | | | ✓ | ✓ | ✓ |
|
||||
| [Tasks](tasks.md):<br>Create (*17*) | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [Tasks](tasks.md):<br>Create (*17*) | | ✓ | ✓ | ✓ | ✓ |
|
||||
| [Tasks](tasks.md):<br>Edit | | ✓ | ✓ | ✓ | ✓ |
|
||||
| [Tasks](tasks.md):<br>Remove from issue | | ✓ | ✓ | ✓ | ✓ |
|
||||
| [Tasks](tasks.md):<br>Delete (*21*) | | | | | ✓ |
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ the task opens in a full-page view.
|
|||
|
||||
Prerequisites:
|
||||
|
||||
- You must have at least the Guest role for the project, or the project must be public.
|
||||
- You must have at least the Reporter role for the project, or the project must be public.
|
||||
|
||||
To create a task:
|
||||
|
||||
|
|
|
|||
|
|
@ -70,6 +70,8 @@ module API
|
|||
optional :terraform_module_max_file_size, type: Integer,
|
||||
desc: 'Maximum Terraform Module package file size in bytes'
|
||||
optional :storage_size_limit, type: Integer, desc: 'Maximum storage size for the root namespace in megabytes'
|
||||
optional :pipeline_hierarchy_size, type: Integer,
|
||||
desc: "Maximum number of downstream pipelines in a pipeline's hierarchy tree"
|
||||
end
|
||||
put "application/plan_limits" do
|
||||
params = declared_params(include_missing: false)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ module API
|
|||
expose :maven_max_file_size, documentation: { type: 'integer', example: 3221225472 }
|
||||
expose :npm_max_file_size, documentation: { type: 'integer', example: 524288000 }
|
||||
expose :nuget_max_file_size, documentation: { type: 'integer', example: 524288000 }
|
||||
expose :pipeline_hierarchy_size, documentation: { type: 'integer', example: 1000 }
|
||||
expose :pypi_max_file_size, documentation: { type: 'integer', example: 3221225472 }
|
||||
expose :terraform_module_max_file_size, documentation: { type: 'integer', example: 1073741824 }
|
||||
expose :storage_size_limit, documentation: { type: 'integer', example: 15000 }
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def validate!
|
||||
validate_execution_time!
|
||||
context.check_execution_time! if ::Feature.disabled?(:ci_refactoring_external_mapper, context.project)
|
||||
validate_location!
|
||||
validate_context! if valid?
|
||||
fetch_and_validate_content! if valid?
|
||||
|
|
@ -87,10 +87,6 @@ module Gitlab
|
|||
nil
|
||||
end
|
||||
|
||||
def validate_execution_time!
|
||||
context.check_execution_time!
|
||||
end
|
||||
|
||||
def validate_location!
|
||||
if invalid_location_type?
|
||||
errors.push("Included file `#{masked_location}` needs to be a string")
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ module Gitlab
|
|||
class Mapper
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
# Will be removed with FF ci_refactoring_external_mapper
|
||||
FILE_CLASSES = [
|
||||
External::File::Local,
|
||||
External::File::Project,
|
||||
|
|
@ -15,6 +16,7 @@ module Gitlab
|
|||
External::File::Artifact
|
||||
].freeze
|
||||
|
||||
# Will be removed with FF ci_refactoring_external_mapper
|
||||
FILE_SUBKEYS = FILE_CLASSES.map { |f| f.name.demodulize.downcase }.freeze
|
||||
|
||||
Error = Class.new(StandardError)
|
||||
|
|
@ -22,27 +24,43 @@ module Gitlab
|
|||
TooManyIncludesError = Class.new(Error)
|
||||
|
||||
def initialize(values, context)
|
||||
@locations = Array.wrap(values.fetch(:include, []))
|
||||
@locations = Array.wrap(values.fetch(:include, [])).compact
|
||||
@context = context
|
||||
end
|
||||
|
||||
def process
|
||||
return [] if locations.empty?
|
||||
return [] if @locations.empty?
|
||||
|
||||
logger.instrument(:config_mapper_process) do
|
||||
process_without_instrumentation
|
||||
context.logger.instrument(:config_mapper_process) do
|
||||
if ::Feature.enabled?(:ci_refactoring_external_mapper, context.project)
|
||||
process_without_instrumentation
|
||||
else
|
||||
legacy_process_without_instrumentation
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :locations, :context
|
||||
attr_reader :context
|
||||
|
||||
delegate :expandset, :logger, to: :context
|
||||
|
||||
def process_without_instrumentation
|
||||
locations
|
||||
.compact
|
||||
locations = Normalizer.new(context).process(@locations)
|
||||
locations = Filter.new(context).process(locations)
|
||||
locations = LocationExpander.new(context).process(locations)
|
||||
locations = VariablesExpander.new(context).process(locations)
|
||||
|
||||
files = Matcher.new(context).process(locations)
|
||||
Verifier.new(context).process(files)
|
||||
|
||||
files
|
||||
end
|
||||
|
||||
# This and the following methods will be removed with FF ci_refactoring_external_mapper
|
||||
def legacy_process_without_instrumentation
|
||||
@locations
|
||||
.map(&method(:normalize_location))
|
||||
.filter_map(&method(:verify_rules))
|
||||
.flat_map(&method(:expand_project_files))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Base class for mapper classes
|
||||
class Base
|
||||
def initialize(context)
|
||||
@context = context
|
||||
end
|
||||
|
||||
def process(*args)
|
||||
context.logger.instrument(mapper_instrumentation_key) do
|
||||
process_without_instrumentation(*args)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :context
|
||||
|
||||
def process_without_instrumentation
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def mapper_instrumentation_key
|
||||
"config_mapper_#{self.class.name.demodulize.downcase}".to_sym
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Filters locations according to rules
|
||||
class Filter < Base
|
||||
private
|
||||
|
||||
def process_without_instrumentation(locations)
|
||||
locations.select do |location|
|
||||
Rules.new(location[:rules]).evaluate(context).pass?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Expands locations to include all files matching the pattern
|
||||
class LocationExpander < Base
|
||||
private
|
||||
|
||||
def process_without_instrumentation(locations)
|
||||
locations.flat_map do |location|
|
||||
if location[:project]
|
||||
expand_project_files(location)
|
||||
elsif location[:local]
|
||||
expand_wildcard_paths(location)
|
||||
else
|
||||
location
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def expand_project_files(location)
|
||||
Array.wrap(location[:file]).map do |file|
|
||||
location.merge(file: file)
|
||||
end
|
||||
end
|
||||
|
||||
def expand_wildcard_paths(location)
|
||||
return location unless location[:local].include?('*')
|
||||
|
||||
context.project.repository.search_files_by_wildcard_path(location[:local], context.sha).map do |path|
|
||||
{ local: path }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Matches the first file type that matches the given location
|
||||
class Matcher < Base
|
||||
FILE_CLASSES = [
|
||||
External::File::Local,
|
||||
External::File::Project,
|
||||
External::File::Remote,
|
||||
External::File::Template,
|
||||
External::File::Artifact
|
||||
].freeze
|
||||
|
||||
FILE_SUBKEYS = FILE_CLASSES.map { |f| f.name.demodulize.downcase }.freeze
|
||||
|
||||
private
|
||||
|
||||
def process_without_instrumentation(locations)
|
||||
locations.map do |location|
|
||||
matching = FILE_CLASSES.map do |file_class|
|
||||
file_class.new(location, context)
|
||||
end.select(&:matching?)
|
||||
|
||||
if matching.one?
|
||||
matching.first
|
||||
elsif matching.empty?
|
||||
raise Mapper::AmbigiousSpecificationError,
|
||||
"`#{masked_location(location.to_json)}` does not have a valid subkey for include. " \
|
||||
"Valid subkeys are: `#{FILE_SUBKEYS.join('`, `')}`"
|
||||
else
|
||||
raise Mapper::AmbigiousSpecificationError,
|
||||
"Each include must use only one of: `#{FILE_SUBKEYS.join('`, `')}`"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def masked_location(location)
|
||||
context.mask_variables_from(location)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Converts locations to canonical form (local:/remote:) if String
|
||||
class Normalizer < Base
|
||||
def initialize(context)
|
||||
super
|
||||
|
||||
@variables_expander = VariablesExpander.new(context)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :variables_expander
|
||||
|
||||
def process_without_instrumentation(locations)
|
||||
locations.map do |location|
|
||||
if location.is_a?(String)
|
||||
# We need to expand before normalizing because the information of
|
||||
# whether if it's a remote or local path may be hidden inside the variable.
|
||||
location = variables_expander.expand(location)
|
||||
|
||||
normalize_location_string(location)
|
||||
else
|
||||
location.deep_symbolize_keys
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def normalize_location_string(location)
|
||||
if ::Gitlab::UrlSanitizer.valid?(location)
|
||||
{ remote: location }
|
||||
else
|
||||
{ local: location }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Handles variable expansion
|
||||
class VariablesExpander < Base
|
||||
def expand(data)
|
||||
if data.is_a?(String)
|
||||
expand_variable(data)
|
||||
else
|
||||
transform_and_expand_variable(data)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def process_without_instrumentation(locations)
|
||||
locations.map { |location| expand(location) }
|
||||
end
|
||||
|
||||
def transform_and_expand_variable(data)
|
||||
data.transform_values do |values|
|
||||
case values
|
||||
when Array
|
||||
values.map { |value| expand_variable(value.to_s) }
|
||||
when String
|
||||
expand_variable(values)
|
||||
else
|
||||
values
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def expand_variable(data)
|
||||
ExpandVariables.expand(data, -> { variables })
|
||||
end
|
||||
|
||||
def variables
|
||||
@variables ||= context.variables_hash
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Ci
|
||||
class Config
|
||||
module External
|
||||
class Mapper
|
||||
# Fetches file contents and verifies them
|
||||
class Verifier < Base
|
||||
private
|
||||
|
||||
def process_without_instrumentation(files)
|
||||
files.select do |file|
|
||||
verify_max_includes!
|
||||
verify_execution_time!
|
||||
|
||||
file.validate!
|
||||
|
||||
context.expandset.add(file)
|
||||
end
|
||||
end
|
||||
|
||||
def verify_max_includes!
|
||||
return if context.expandset.count < context.max_includes
|
||||
|
||||
raise Mapper::TooManyIncludesError, "Maximum of #{context.max_includes} nested includes are allowed!"
|
||||
end
|
||||
|
||||
def verify_execution_time!
|
||||
context.check_execution_time!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -238,6 +238,8 @@ semgrep-sast:
|
|||
- '**/*.java'
|
||||
- '**/*.cs'
|
||||
- '**/*.html'
|
||||
- '**/*.scala'
|
||||
- '**/*.sc'
|
||||
|
||||
sobelow-sast:
|
||||
extends: .sast-analyzer
|
||||
|
|
|
|||
|
|
@ -299,6 +299,8 @@ semgrep-sast:
|
|||
- '**/*.java'
|
||||
- '**/*.html'
|
||||
- '**/*.cs'
|
||||
- '**/*.scala'
|
||||
- '**/*.sc'
|
||||
- if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline.
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH # If there's no open merge request, add it to a *branch* pipeline instead.
|
||||
|
|
@ -313,6 +315,8 @@ semgrep-sast:
|
|||
- '**/*.java'
|
||||
- '**/*.html'
|
||||
- '**/*.cs'
|
||||
- '**/*.scala'
|
||||
- '**/*.sc'
|
||||
|
||||
sobelow-sast:
|
||||
extends: .sast-analyzer
|
||||
|
|
|
|||
|
|
@ -68,6 +68,7 @@ module Gitlab
|
|||
push_frontend_feature_flag(:vscode_web_ide, current_user)
|
||||
push_frontend_feature_flag(:integration_slack_app_notifications)
|
||||
push_frontend_feature_flag(:vue_group_select)
|
||||
push_frontend_feature_flag(:new_fonts, current_user)
|
||||
end
|
||||
|
||||
# Exposes the state of a feature flag to the frontend code.
|
||||
|
|
|
|||
|
|
@ -154,7 +154,9 @@ namespace :gitlab do
|
|||
|
||||
desc 'GitLab | Assets | Check that scss mixins do not introduce any sideffects'
|
||||
task :check_page_bundle_mixins_css_for_sideeffects do
|
||||
system('./scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js')
|
||||
unless system('./scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js')
|
||||
abort 'Error: At least one CSS changes introduces an unwanted sideeffect'.color(:red)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2917,6 +2917,9 @@ msgstr ""
|
|||
msgid "AdminSettings|Maximum number of custom domains per project"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSettings|Maximum number of downstream pipelines in a pipeline's hierarchy tree"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSettings|Maximum number of jobs in a single pipeline"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@
|
|||
"@gitlab/svgs": "3.13.0",
|
||||
"@gitlab/ui": "52.3.0",
|
||||
"@gitlab/visual-review-tools": "1.7.3",
|
||||
"@gitlab/web-ide": "0.0.1-dev-20221212205235",
|
||||
"@gitlab/web-ide": "0.0.1-dev-20221214231216",
|
||||
"@rails/actioncable": "6.1.4-7",
|
||||
"@rails/ujs": "6.1.4-7",
|
||||
"@sourcegraph/code-host-integration": "0.0.84",
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../config/bundler_setup'
|
||||
require 'rubocop'
|
||||
|
||||
MINIMUM_MAX_FILES_IN_CACHE_MARGIN = 1.05
|
||||
RECOMMENDED_MAX_FILES_IN_CACHE_MARGIN = 1.25
|
||||
RUBOCOP_LIST_TARGET_FILES_COMMAND = 'bundle exec rubocop --list-target-files | wc -l'
|
||||
|
||||
RuboCopMaxFilesInCacheIsTooSmall = Class.new(StandardError)
|
||||
|
||||
rubocop_target_files_count = `#{RUBOCOP_LIST_TARGET_FILES_COMMAND}`.strip.to_i
|
||||
|
||||
raise Error, "#{RUBOCOP_LIST_TARGET_FILES_COMMAND} failed with status #{$?}!" if rubocop_target_files_count == 0
|
||||
|
||||
rubocop_target_files_count = rubocop_target_files_count.to_i
|
||||
rubocop_current_max_files_in_cache = RuboCop::ConfigLoader.load_yaml_configuration(File.expand_path('../.rubocop.yml', __dir__)).dig('AllCops', 'MaxFilesInCache').to_i
|
||||
minimum_max_files_in_cache = (rubocop_target_files_count * MINIMUM_MAX_FILES_IN_CACHE_MARGIN).round(-3)
|
||||
|
||||
# We want AllCops.MaxFilesInCache to be at least 5% above the actual files count at any time to give us enough time to increase it accordingly
|
||||
if rubocop_current_max_files_in_cache <= minimum_max_files_in_cache
|
||||
recommended_max_files_in_cache = (rubocop_target_files_count * RECOMMENDED_MAX_FILES_IN_CACHE_MARGIN).round(-3)
|
||||
raise RuboCopMaxFilesInCacheIsTooSmall, "Current count of RuboCop target file is #{rubocop_target_files_count} but AllCops.MaxFilesInCache is set to #{rubocop_current_max_files_in_cache}. We recommend to increase it to #{recommended_max_files_in_cache}."
|
||||
else
|
||||
puts "Current count of RuboCop target file is #{rubocop_target_files_count} and AllCops.MaxFilesInCache is set to #{rubocop_current_max_files_in_cache}. All good."
|
||||
exit(0)
|
||||
end
|
||||
|
|
@ -50,7 +50,6 @@ class StaticAnalysis
|
|||
Task.new(%w[bin/rake gettext:lint], 105),
|
||||
Task.new(%W[scripts/license-check.sh #{project_path}], 200),
|
||||
Task.new(%w[bin/rake lint:static_verification], 40),
|
||||
Task.new(%w[scripts/rubocop-max-files-in-cache-check], 25),
|
||||
Task.new(%w[bin/rake config_lint], 10),
|
||||
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
|
||||
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
|
||||
|
|
|
|||
|
|
@ -29,6 +29,26 @@ RSpec.describe Admin::PlanLimitsController do
|
|||
end
|
||||
end
|
||||
|
||||
context "when pipeline_hierarchy_size is passed in params" do
|
||||
let(:params) do
|
||||
{
|
||||
plan_limits: {
|
||||
plan_id: plan.id,
|
||||
pipeline_hierarchy_size: 200, id: plan_limits.id
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
it "updates the pipeline_hierarchy_size plan limit" do
|
||||
sign_in(create(:admin))
|
||||
|
||||
post :create, params: params
|
||||
|
||||
expect(response).to redirect_to(general_admin_application_settings_path)
|
||||
expect(plan_limits.reload.pipeline_hierarchy_size).to eq(params[:plan_limits][:pipeline_hierarchy_size])
|
||||
end
|
||||
end
|
||||
|
||||
context 'without admin access' do
|
||||
let(:file_size) { 1.megabytes }
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ FactoryBot.define do
|
|||
|
||||
trait(:retained) do
|
||||
processable factory: :ci_build
|
||||
partition_id { processable.partition_id }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import * as commonUtils from '~/lib/utils/common_utils';
|
||||
import setWindowLocation from 'helpers/set_window_location_helper';
|
||||
|
||||
describe('common_utils', () => {
|
||||
describe('getPagePath', () => {
|
||||
|
|
@ -1069,4 +1070,35 @@ describe('common_utils', () => {
|
|||
expect(result).toEqual([{ hello: '' }, { helloWorld: '' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useNewFonts', () => {
|
||||
let beforeGon;
|
||||
const beforeLocation = window.location.href;
|
||||
|
||||
beforeEach(() => {
|
||||
window.gon = window.gon || {};
|
||||
beforeGon = { ...window.gon };
|
||||
});
|
||||
|
||||
describe.each`
|
||||
featureFlag | queryParameter | fontEnabled
|
||||
${false} | ${false} | ${false}
|
||||
${true} | ${false} | ${true}
|
||||
${false} | ${true} | ${true}
|
||||
`('new font', ({ featureFlag, queryParameter, fontEnabled }) => {
|
||||
it(`will ${fontEnabled ? '' : 'NOT '}be applied when feature flag is ${
|
||||
featureFlag ? '' : 'NOT '
|
||||
}set and query parameter is ${queryParameter ? '' : 'NOT '}present`, () => {
|
||||
const search = queryParameter ? `?new_fonts` : '';
|
||||
setWindowLocation(search);
|
||||
window.gon = { features: { newFonts: featureFlag } };
|
||||
expect(commonUtils.useNewFonts()).toBe(fontEnabled);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
window.gon = beforeGon;
|
||||
setWindowLocation(beforeLocation);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -121,6 +121,7 @@ describe('WorkItemDetail component', () => {
|
|||
},
|
||||
hasIssueWeightsFeature: true,
|
||||
hasIterationsFeature: true,
|
||||
hasOkrsFeature: true,
|
||||
projectNamespace: 'namespace',
|
||||
fullPath: 'group/project',
|
||||
},
|
||||
|
|
|
|||
|
|
@ -254,6 +254,7 @@ export const workItemResponseFactory = ({
|
|||
datesWidgetPresent = true,
|
||||
labelsWidgetPresent = true,
|
||||
weightWidgetPresent = true,
|
||||
progressWidgetPresent = true,
|
||||
milestoneWidgetPresent = true,
|
||||
iterationWidgetPresent = true,
|
||||
confidential = false,
|
||||
|
|
@ -347,6 +348,13 @@ export const workItemResponseFactory = ({
|
|||
},
|
||||
}
|
||||
: { type: 'MOCK TYPE' },
|
||||
progressWidgetPresent
|
||||
? {
|
||||
__typename: 'WorkItemWidgetProgress',
|
||||
type: 'PROGRESS',
|
||||
progress: 0,
|
||||
}
|
||||
: { type: 'MOCK TYPE' },
|
||||
milestoneWidgetPresent
|
||||
? {
|
||||
__typename: 'WorkItemWidgetMilestone',
|
||||
|
|
|
|||
|
|
@ -66,6 +66,7 @@ describe('Work items router', () => {
|
|||
issuesListPath: 'full-path/-/issues',
|
||||
hasIssueWeightsFeature: false,
|
||||
hasIterationsFeature: false,
|
||||
hasOkrsFeature: false,
|
||||
},
|
||||
stubs: {
|
||||
WorkItemWeight: true,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Resolvers::NamespaceProjectsResolver do
|
||||
RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroups do
|
||||
include GraphqlHelpers
|
||||
|
||||
let(:current_user) { create(:user) }
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ RSpec.describe API::Entities::PlanLimit do
|
|||
:nuget_max_file_size,
|
||||
:pypi_max_file_size,
|
||||
:terraform_module_max_file_size,
|
||||
:storage_size_limit
|
||||
:storage_size_limit,
|
||||
:pipeline_hierarchy_size
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
require 'ffaker'
|
||||
|
||||
RSpec.describe Banzai::Filter::CommitTrailersFilter do
|
||||
RSpec.describe Banzai::Filter::CommitTrailersFilter, feature_category: :source_code_management do
|
||||
include FilterSpecHelper
|
||||
include CommitTrailersSpecHelper
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::Base, feature_category: :pipeline_authoring do
|
||||
let(:test_class) do
|
||||
Class.new(described_class) do
|
||||
def self.name
|
||||
'TestClass'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
let(:context) { Gitlab::Ci::Config::External::Context.new }
|
||||
let(:mapper) { test_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
subject(:process) { mapper.process }
|
||||
|
||||
context 'when the method is not implemented' do
|
||||
it 'raises NotImplementedError' do
|
||||
expect { process }.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the method is implemented' do
|
||||
before do
|
||||
test_class.class_eval do
|
||||
def process_without_instrumentation
|
||||
'test'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'calls the method' do
|
||||
expect(process).to eq('test')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::Filter, feature_category: :pipeline_authoring do
|
||||
let_it_be(:variables) do
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
variables.append(key: 'VARIABLE1', value: 'hello')
|
||||
end
|
||||
end
|
||||
|
||||
let_it_be(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(variables: variables)
|
||||
end
|
||||
|
||||
subject(:filter) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
let(:locations) do
|
||||
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] },
|
||||
{ remote: 'https://example.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE2' }] }]
|
||||
end
|
||||
|
||||
subject(:process) { filter.process(locations) }
|
||||
|
||||
it 'filters locations according to rules' do
|
||||
is_expected.to eq(
|
||||
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] }]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::LocationExpander, feature_category: :pipeline_authoring do
|
||||
include RepoHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { project.owner }
|
||||
|
||||
let(:sha) { project.commit.sha }
|
||||
|
||||
let(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(project: project, user: user, sha: sha)
|
||||
end
|
||||
|
||||
subject(:location_expander) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
subject(:process) { location_expander.process(locations) }
|
||||
|
||||
context 'when there are project files' do
|
||||
let(:locations) do
|
||||
[{ project: 'gitlab-org/gitlab-1', file: ['builds.yml', 'tests.yml'] },
|
||||
{ project: 'gitlab-org/gitlab-2', file: 'deploy.yml' }]
|
||||
end
|
||||
|
||||
it 'returns expanded locations' do
|
||||
is_expected.to eq(
|
||||
[{ project: 'gitlab-org/gitlab-1', file: 'builds.yml' },
|
||||
{ project: 'gitlab-org/gitlab-1', file: 'tests.yml' },
|
||||
{ project: 'gitlab-org/gitlab-2', file: 'deploy.yml' }]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are local files' do
|
||||
let(:locations) do
|
||||
[{ local: 'builds/*.yml' },
|
||||
{ local: 'tests.yml' }]
|
||||
end
|
||||
|
||||
let(:project_files) do
|
||||
{ 'builds/1.yml' => 'a', 'builds/2.yml' => 'b', 'tests.yml' => 'c' }
|
||||
end
|
||||
|
||||
around do |example|
|
||||
create_and_delete_files(project, project_files) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns expanded locations' do
|
||||
is_expected.to eq(
|
||||
[{ local: 'builds/1.yml' },
|
||||
{ local: 'builds/2.yml' },
|
||||
{ local: 'tests.yml' }]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are other files' do
|
||||
let(:locations) do
|
||||
[{ remote: 'https://gitlab.com/gitlab-org/gitlab-ce/raw/master/.gitlab-ci.yml' }]
|
||||
end
|
||||
|
||||
it 'returns the same location' do
|
||||
is_expected.to eq(locations)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: :pipeline_authoring do
|
||||
let_it_be(:variables) do
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
variables.append(key: 'A_MASKED_VAR', value: 'this-is-secret', masked: true)
|
||||
end
|
||||
end
|
||||
|
||||
let_it_be(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(variables: variables)
|
||||
end
|
||||
|
||||
subject(:matcher) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
let(:locations) do
|
||||
[{ local: 'file.yml' },
|
||||
{ file: 'file.yml', project: 'namespace/project' },
|
||||
{ remote: 'https://example.com/.gitlab-ci.yml' },
|
||||
{ template: 'file.yml' },
|
||||
{ artifact: 'generated.yml', job: 'test' }]
|
||||
end
|
||||
|
||||
subject(:process) { matcher.process(locations) }
|
||||
|
||||
it 'returns an array of file objects' do
|
||||
is_expected.to contain_exactly(
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Project),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Remote),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Template),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Artifact)
|
||||
)
|
||||
end
|
||||
|
||||
context 'when a location is not valid' do
|
||||
let(:locations) { [{ invalid: 'file.yml' }] }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { process }.to raise_error(
|
||||
Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError,
|
||||
'`{"invalid":"file.yml"}` does not have a valid subkey for include. ' \
|
||||
'Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when the invalid location includes a masked variable' do
|
||||
let(:locations) { [{ invalid: 'this-is-secret.yml' }] }
|
||||
|
||||
it 'raises an error with a masked sentence' do
|
||||
expect { process }.to raise_error(
|
||||
Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError,
|
||||
'`{"invalid":"xxxxxxxxxxxxxx.yml"}` does not have a valid subkey for include. ' \
|
||||
'Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a location is ambiguous' do
|
||||
let(:locations) { [{ local: 'file.yml', remote: 'https://example.com/.gitlab-ci.yml' }] }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { process }.to raise_error(
|
||||
Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError,
|
||||
"Each include must use only one of: `local`, `project`, `remote`, `template`, `artifact`"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::Normalizer, feature_category: :pipeline_authoring do
|
||||
let_it_be(:variables) do
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
variables.append(key: 'VARIABLE1', value: 'config')
|
||||
variables.append(key: 'VARIABLE2', value: 'https://example.com')
|
||||
end
|
||||
end
|
||||
|
||||
let_it_be(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(variables: variables)
|
||||
end
|
||||
|
||||
subject(:normalizer) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
let(:locations) do
|
||||
['https://example.com/.gitlab-ci.yml',
|
||||
'config/.gitlab-ci.yml',
|
||||
{ local: 'config/.gitlab-ci.yml' },
|
||||
{ remote: 'https://example.com/.gitlab-ci.yml' },
|
||||
{ template: 'Template.gitlab-ci.yml' },
|
||||
'$VARIABLE1/.gitlab-ci.yml',
|
||||
'$VARIABLE2/.gitlab-ci.yml']
|
||||
end
|
||||
|
||||
subject(:process) { normalizer.process(locations) }
|
||||
|
||||
it 'converts locations to canonical form' do
|
||||
is_expected.to eq(
|
||||
[{ remote: 'https://example.com/.gitlab-ci.yml' },
|
||||
{ local: 'config/.gitlab-ci.yml' },
|
||||
{ local: 'config/.gitlab-ci.yml' },
|
||||
{ remote: 'https://example.com/.gitlab-ci.yml' },
|
||||
{ template: 'Template.gitlab-ci.yml' },
|
||||
{ local: 'config/.gitlab-ci.yml' },
|
||||
{ remote: 'https://example.com/.gitlab-ci.yml' }]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::VariablesExpander, feature_category: :pipeline_authoring do
|
||||
let_it_be(:variables) do
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
variables.append(key: 'VARIABLE1', value: 'hello')
|
||||
end
|
||||
end
|
||||
|
||||
let_it_be(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(variables: variables)
|
||||
end
|
||||
|
||||
subject(:variables_expander) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
subject(:process) { variables_expander.process(locations) }
|
||||
|
||||
context 'when locations are strings' do
|
||||
let(:locations) { ['$VARIABLE1.gitlab-ci.yml'] }
|
||||
|
||||
it 'expands variables' do
|
||||
is_expected.to eq(['hello.gitlab-ci.yml'])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when locations are hashes' do
|
||||
let(:locations) { [{ local: '$VARIABLE1.gitlab-ci.yml' }] }
|
||||
|
||||
it 'expands variables' do
|
||||
is_expected.to eq([{ local: 'hello.gitlab-ci.yml' }])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when locations are arrays' do
|
||||
let(:locations) { [{ local: ['$VARIABLE1.gitlab-ci.yml'] }] }
|
||||
|
||||
it 'expands variables' do
|
||||
is_expected.to eq([{ local: ['hello.gitlab-ci.yml'] }])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: :pipeline_authoring do
|
||||
include RepoHelpers
|
||||
include StubRequests
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { project.owner }
|
||||
|
||||
let(:context) do
|
||||
Gitlab::Ci::Config::External::Context.new(project: project, user: user, sha: project.commit.id)
|
||||
end
|
||||
|
||||
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
|
||||
|
||||
let(:project_files) do
|
||||
{
|
||||
'myfolder/file1.yml' => <<~YAML,
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
'myfolder/file2.yml' => <<~YAML,
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
'nested_configs.yml' => <<~YAML
|
||||
include:
|
||||
- local: myfolder/file1.yml
|
||||
- local: myfolder/file2.yml
|
||||
- remote: #{remote_url}
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
around(:all) do |example|
|
||||
create_and_delete_files(project, project_files) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
stub_full_request(remote_url).to_return(
|
||||
body: <<~YAML
|
||||
remote_test:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
)
|
||||
end
|
||||
|
||||
subject(:verifier) { described_class.new(context) }
|
||||
|
||||
describe '#process' do
|
||||
subject(:process) { verifier.process(files) }
|
||||
|
||||
context 'when files are local' do
|
||||
let(:files) do
|
||||
[
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context),
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file2.yml' }, context)
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns an array of file objects' do
|
||||
expect(process.map(&:location)).to contain_exactly('myfolder/file1.yml', 'myfolder/file2.yml')
|
||||
end
|
||||
|
||||
it 'adds files to the expandset' do
|
||||
expect { process }.to change { context.expandset.count }.by(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a file includes other files' do
|
||||
let(:files) do
|
||||
[
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'nested_configs.yml' }, context)
|
||||
]
|
||||
end
|
||||
|
||||
it 'returns an array of file objects with combined hash' do
|
||||
expect(process.map(&:to_hash)).to contain_exactly(
|
||||
{ my_build: { script: 'echo Hello World' },
|
||||
my_test: { script: 'echo Hello World' },
|
||||
remote_test: { script: 'echo Hello World' } }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is an invalid file' do
|
||||
let(:files) do
|
||||
[
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/invalid.yml' }, context)
|
||||
]
|
||||
end
|
||||
|
||||
it 'adds an error to the file' do
|
||||
expect(process.first.errors).to include("Local file `myfolder/invalid.yml` does not exist!")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when max_includes is exceeded' do
|
||||
context 'when files are nested' do
|
||||
let(:files) do
|
||||
[
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'nested_configs.yml' }, context)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(context).to receive(:max_includes).and_return(1)
|
||||
end
|
||||
|
||||
it 'raises Processor::IncludeError' do
|
||||
expect { process }.to raise_error(Gitlab::Ci::Config::External::Processor::IncludeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when files are not nested' do
|
||||
let(:files) do
|
||||
[
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context),
|
||||
Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file2.yml' }, context)
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
allow(context).to receive(:max_includes).and_return(1)
|
||||
end
|
||||
|
||||
it 'raises Mapper::TooManyIncludesError' do
|
||||
expect { process }.to raise_error(Gitlab::Ci::Config::External::Mapper::TooManyIncludesError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do
|
||||
# This will be removed with FF ci_refactoring_external_mapper and moved to below.
|
||||
RSpec.shared_context 'gitlab_ci_config_external_mapper' do
|
||||
include StubRequests
|
||||
include RepoHelpers
|
||||
|
||||
|
|
@ -39,7 +40,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
|
|||
it 'propagates the pipeline logger' do
|
||||
process
|
||||
|
||||
fetch_content_log_count = mapper
|
||||
fetch_content_log_count = context
|
||||
.logger
|
||||
.observations_hash
|
||||
.dig(key, 'count')
|
||||
|
|
@ -232,7 +233,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
|
|||
|
||||
it 'has expanset with one' do
|
||||
process
|
||||
expect(mapper.expandset.size).to eq(1)
|
||||
expect(context.expandset.size).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -457,8 +458,20 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
|
|||
|
||||
it 'has expanset with two' do
|
||||
process
|
||||
expect(mapper.expandset.size).to eq(2)
|
||||
expect(context.expandset.size).to eq(2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do
|
||||
it_behaves_like 'gitlab_ci_config_external_mapper'
|
||||
|
||||
context 'when the FF ci_refactoring_external_mapper is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_refactoring_external_mapper: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'gitlab_ci_config_external_mapper'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::Reindexing do
|
||||
RSpec.describe Gitlab::Database::Reindexing, feature_category: :database do
|
||||
include ExclusiveLeaseHelpers
|
||||
include Database::DatabaseHelpers
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::Transaction::Context do
|
||||
RSpec.describe Gitlab::Database::Transaction::Context, feature_category: :database do
|
||||
subject { described_class.new }
|
||||
|
||||
let(:data) { subject.context }
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ require 'spec_helper'
|
|||
require 'json'
|
||||
require 'tempfile'
|
||||
|
||||
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged do
|
||||
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitlay do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:repository) { project.repository }
|
||||
let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
|
||||
RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups do
|
||||
include ImportExport::CommonUtil
|
||||
|
||||
shared_examples 'group restoration' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
|
||||
RSpec.describe Gitlab::Metrics::Exporter::BaseExporter, feature_category: :application_performance do
|
||||
let(:settings) { double('settings') }
|
||||
let(:log_enabled) { false }
|
||||
let(:exporter) { described_class.new(settings, log_enabled: log_enabled, log_file: 'test_exporter.log') }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require_relative '../../../lib/gitlab/process_supervisor'
|
||||
|
||||
RSpec.describe Gitlab::ProcessSupervisor do
|
||||
RSpec.describe Gitlab::ProcessSupervisor, feature_category: :application_performance do
|
||||
let(:health_check_interval_seconds) { 0.1 }
|
||||
let(:check_terminate_interval_seconds) { 1 }
|
||||
let(:forwarded_signals) { [] }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Redis::MultiStore do
|
||||
RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let_it_be(:redis_store_class) do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SlashCommands::Deploy do
|
||||
RSpec.describe Gitlab::SlashCommands::Deploy, feature_category: :team_planning do
|
||||
describe '#execute' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:user) { create(:user) }
|
||||
|
|
|
|||
|
|
@ -33,7 +33,13 @@ RSpec.describe Ci::ResourceGroup do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#assign_resource_to' do
|
||||
describe '#assign_resource_to', :ci_partitionable do
|
||||
include Ci::PartitioningHelpers
|
||||
|
||||
before do
|
||||
stub_current_partition_id
|
||||
end
|
||||
|
||||
subject { resource_group.assign_resource_to(build) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
|
|
@ -41,10 +47,12 @@ RSpec.describe Ci::ResourceGroup do
|
|||
|
||||
it 'retains resource for the processable' do
|
||||
expect(resource_group.resources.first.processable).to be_nil
|
||||
expect(resource_group.resources.first.partition_id).to be_nil
|
||||
|
||||
is_expected.to eq(true)
|
||||
|
||||
expect(resource_group.resources.first.processable).to eq(build)
|
||||
expect(resource_group.resources.first.partition_id).to eq(build.partition_id)
|
||||
end
|
||||
|
||||
context 'when there are no free resources' do
|
||||
|
|
@ -66,7 +74,13 @@ RSpec.describe Ci::ResourceGroup do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#release_resource_from' do
|
||||
describe '#release_resource_from', :ci_partitionable do
|
||||
include Ci::PartitioningHelpers
|
||||
|
||||
before do
|
||||
stub_current_partition_id
|
||||
end
|
||||
|
||||
subject { resource_group.release_resource_from(build) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
|
|
@ -79,10 +93,12 @@ RSpec.describe Ci::ResourceGroup do
|
|||
|
||||
it 'releases resource from the build' do
|
||||
expect(resource_group.resources.first.processable).to eq(build)
|
||||
expect(resource_group.resources.first.partition_id).to eq(build.partition_id)
|
||||
|
||||
is_expected.to eq(true)
|
||||
|
||||
expect(resource_group.resources.first.processable).to be_nil
|
||||
expect(resource_group.resources.first.partition_id).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe JiraImportState do
|
||||
RSpec.describe JiraImportState, feature_category: :integrations do
|
||||
describe "associations" do
|
||||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to belong_to(:user) }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe NotificationRecipient do
|
||||
RSpec.describe NotificationRecipient, feature_category: :team_planning do
|
||||
let(:user) { create(:user) }
|
||||
let(:project) { create(:project, namespace: user.namespace) }
|
||||
let(:target) { create(:issue, project: project) }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe PagesDeployment do
|
||||
RSpec.describe PagesDeployment, feature_category: :pages do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
describe 'associations' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
|
||||
RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :release_orchestration do
|
||||
let(:fixture_dir_glob) { Dir.glob(File.join(Rails.root, 'spec', 'fixtures', 'whats_new', '*.yml')).grep(/\d*_(\d*_\d*)\.yml$/) }
|
||||
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
expect(json_response['pypi_max_file_size']).to eq(Plan.default.actual_limits.pypi_max_file_size)
|
||||
expect(json_response['terraform_module_max_file_size']).to eq(Plan.default.actual_limits.terraform_module_max_file_size)
|
||||
expect(json_response['storage_size_limit']).to eq(Plan.default.actual_limits.storage_size_limit)
|
||||
expect(json_response['pipeline_hierarchy_size']).to eq(Plan.default.actual_limits.pipeline_hierarchy_size)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -70,6 +71,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
expect(json_response['pypi_max_file_size']).to eq(Plan.default.actual_limits.pypi_max_file_size)
|
||||
expect(json_response['terraform_module_max_file_size']).to eq(Plan.default.actual_limits.terraform_module_max_file_size)
|
||||
expect(json_response['storage_size_limit']).to eq(Plan.default.actual_limits.storage_size_limit)
|
||||
expect(json_response['pipeline_hierarchy_size']).to eq(Plan.default.actual_limits.pipeline_hierarchy_size)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -118,7 +120,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
'nuget_max_file_size': 50,
|
||||
'pypi_max_file_size': 60,
|
||||
'terraform_module_max_file_size': 70,
|
||||
'storage_size_limit': 80
|
||||
'storage_size_limit': 80,
|
||||
'pipeline_hierarchy_size': 250
|
||||
}
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -140,6 +143,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
expect(json_response['pypi_max_file_size']).to eq(60)
|
||||
expect(json_response['terraform_module_max_file_size']).to eq(70)
|
||||
expect(json_response['storage_size_limit']).to eq(80)
|
||||
expect(json_response['pipeline_hierarchy_size']).to eq(250)
|
||||
end
|
||||
|
||||
it 'updates single plan limits' do
|
||||
|
|
@ -183,7 +187,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
'nuget_max_file_size': 'e',
|
||||
'pypi_max_file_size': 'f',
|
||||
'terraform_module_max_file_size': 'g',
|
||||
'storage_size_limit': 'j'
|
||||
'storage_size_limit': 'j',
|
||||
'pipeline_hierarchy_size': 'r'
|
||||
}
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
|
|
@ -204,7 +209,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :not_owne
|
|||
'nuget_max_file_size is invalid',
|
||||
'pypi_max_file_size is invalid',
|
||||
'terraform_module_max_file_size is invalid',
|
||||
'storage_size_limit is invalid'
|
||||
'storage_size_limit is invalid',
|
||||
'pipeline_hierarchy_size is invalid'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -948,25 +948,47 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
|
|||
let_it_be(:child) { create(:ci_pipeline, child_of: parent) }
|
||||
let_it_be(:sibling) { create(:ci_pipeline, child_of: parent) }
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::MAX_HIERARCHY_SIZE", 3)
|
||||
end
|
||||
|
||||
let(:project) { build(:project, :repository) }
|
||||
let(:bridge) do
|
||||
create(:ci_bridge, status: :pending, user: user, options: trigger, pipeline: child)
|
||||
create(:ci_bridge, status: :pending, user: user, options: trigger, pipeline: child, project: project)
|
||||
end
|
||||
|
||||
it 'does not create a new pipeline' do
|
||||
expect { subject }.not_to change { Ci::Pipeline.count }
|
||||
expect(subject).to be_error
|
||||
expect(subject.message).to eq("Pre-conditions not met")
|
||||
context 'when limit was specified by admin' do
|
||||
before do
|
||||
project.actual_limits.update!(pipeline_hierarchy_size: 3)
|
||||
end
|
||||
|
||||
it 'does not create a new pipeline' do
|
||||
expect { subject }.not_to change { Ci::Pipeline.count }
|
||||
end
|
||||
|
||||
it 'drops the trigger job with an explanatory reason' do
|
||||
subject
|
||||
|
||||
expect(bridge.reload).to be_failed
|
||||
expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
|
||||
end
|
||||
end
|
||||
|
||||
it 'drops the trigger job with an explanatory reason' do
|
||||
subject
|
||||
context 'when there was no limit specified by admin' do
|
||||
before do
|
||||
allow(bridge.pipeline).to receive(:complete_hierarchy_count).and_return(1000)
|
||||
end
|
||||
|
||||
expect(bridge.reload).to be_failed
|
||||
expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
|
||||
context 'when pipeline count reaches the default limit of 1000' do
|
||||
it 'does not create a new pipeline' do
|
||||
expect { subject }.not_to change { Ci::Pipeline.count }
|
||||
expect(subject).to be_error
|
||||
expect(subject.message).to eq("Pre-conditions not met")
|
||||
end
|
||||
|
||||
it 'drops the trigger job with an explanatory reason' do
|
||||
subject
|
||||
|
||||
expect(bridge.reload).to be_failed
|
||||
expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with :ci_limit_complete_hierarchy_size disabled' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
|
||||
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category: :continuous_integration do
|
||||
describe 'Pipeline Processing Service Tests With Yaml' do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { project.first_owner }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Database::ConsistencyCheckService do
|
||||
RSpec.describe Database::ConsistencyCheckService, feature_category: :database do
|
||||
let(:batch_size) { 5 }
|
||||
let(:max_batches) { 2 }
|
||||
|
||||
|
|
|
|||
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe GoogleCloud::FetchGoogleIpListService,
|
||||
:use_clean_rails_memory_store_caching, :clean_gitlab_redis_rate_limiting do
|
||||
RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
|
||||
:clean_gitlab_redis_rate_limiting, feature_category: :continuous_integration do
|
||||
include StubRequests
|
||||
|
||||
let(:google_cloud_ips) { File.read(Rails.root.join('spec/fixtures/cdn/google_cloud.json')) }
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ RSpec.describe 'admin/application_settings/_ci_cd' do
|
|||
ci_pipeline_schedules: 40,
|
||||
ci_needs_size_limit: 50,
|
||||
ci_registered_group_runners: 60,
|
||||
ci_registered_project_runners: 70
|
||||
ci_registered_project_runners: 70,
|
||||
pipeline_hierarchy_size: 300
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -58,6 +59,11 @@ RSpec.describe 'admin/application_settings/_ci_cd' do
|
|||
|
||||
expect(rendered).to have_field('Maximum number of runners registered per project', type: 'number')
|
||||
expect(page.find_field('Maximum number of runners registered per project').value).to eq('70')
|
||||
|
||||
expect(rendered).to have_field("Maximum number of downstream pipelines in a pipeline's hierarchy tree",
|
||||
type: 'number')
|
||||
expect(page.find_field("Maximum number of downstream pipelines in a pipeline's hierarchy tree").value)
|
||||
.to eq('300')
|
||||
end
|
||||
|
||||
it 'does not display the plan name when there is only one plan' do
|
||||
|
|
|
|||
|
|
@ -1155,10 +1155,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@gitlab/visual-review-tools/-/visual-review-tools-1.7.3.tgz#9ea641146436da388ffbad25d7f2abe0df52c235"
|
||||
integrity sha512-NMV++7Ew1FSBDN1xiZaauU9tfeSfgDHcOLpn+8bGpP+O5orUPm2Eu66R5eC5gkjBPaXosNAxNWtriee+aFk4+g==
|
||||
|
||||
"@gitlab/web-ide@0.0.1-dev-20221212205235":
|
||||
version "0.0.1-dev-20221212205235"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/web-ide/-/web-ide-0.0.1-dev-20221212205235.tgz#c35ada9c72080df5e92a6899cc334f66d673249c"
|
||||
integrity sha512-F2Lod0oeRVdlgotqmTyRY6SyhaaPftbN82K94gymxOJt//HJQ2mQIQSwhCaWD9TgFdQVkYClh+VUtHQI/SoqwA==
|
||||
"@gitlab/web-ide@0.0.1-dev-20221214231216":
|
||||
version "0.0.1-dev-20221214231216"
|
||||
resolved "https://registry.yarnpkg.com/@gitlab/web-ide/-/web-ide-0.0.1-dev-20221214231216.tgz#d48c7c8b49d1e4a2f711b8a7b159ded92bac31df"
|
||||
integrity sha512-PtaBlsc60hIrWXWBkyX0OUsg7b1PN74DrtzyG2d7qH55rG9wqfuRw71ieAObv1ApSDqZ+kcf66YkYrd9l8/u5g==
|
||||
|
||||
"@graphql-eslint/eslint-plugin@3.12.0":
|
||||
version "3.12.0"
|
||||
|
|
|
|||
Loading…
Reference in New Issue