Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
95e18e3283
commit
abe11a6a2c
2
Gemfile
2
Gemfile
|
|
@ -455,7 +455,7 @@ group :ed25519 do
|
|||
end
|
||||
|
||||
# Gitaly GRPC protocol definitions
|
||||
gem 'gitaly', '~> 12.9.0.pre.rc4'
|
||||
gem 'gitaly', '~> 13.0.0.pre.rc1'
|
||||
|
||||
gem 'grpc', '~> 1.24.0'
|
||||
|
||||
|
|
|
|||
|
|
@ -378,7 +378,7 @@ GEM
|
|||
po_to_json (>= 1.0.0)
|
||||
rails (>= 3.2.0)
|
||||
git (1.5.0)
|
||||
gitaly (12.9.0.pre.rc4)
|
||||
gitaly (13.0.0.pre.rc1)
|
||||
grpc (~> 1.0)
|
||||
github-markup (1.7.0)
|
||||
gitlab-chronic (0.10.5)
|
||||
|
|
@ -1236,7 +1236,7 @@ DEPENDENCIES
|
|||
gettext (~> 3.2.2)
|
||||
gettext_i18n_rails (~> 1.8.0)
|
||||
gettext_i18n_rails_js (~> 1.3)
|
||||
gitaly (~> 12.9.0.pre.rc4)
|
||||
gitaly (~> 13.0.0.pre.rc1)
|
||||
github-markup (~> 1.7.0)
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-labkit (= 0.12.0)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import canaryCalloutMixin from 'ee_else_ce/environments/mixins/canary_callout_mixin';
|
||||
import canaryCalloutMixin from '../mixins/canary_callout_mixin';
|
||||
import environmentsFolderApp from './environments_folder_view.vue';
|
||||
import { parseBoolean } from '../../lib/utils/common_utils';
|
||||
import Translate from '../../vue_shared/translate';
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
<script>
|
||||
import folderMixin from 'ee_else_ce/environments/mixins/environments_folder_view_mixin';
|
||||
import environmentsMixin from '../mixins/environments_mixin';
|
||||
import CIPaginationMixin from '../../vue_shared/mixins/ci_pagination_api_mixin';
|
||||
import StopEnvironmentModal from '../components/stop_environment_modal.vue';
|
||||
|
|
@ -11,7 +10,7 @@ export default {
|
|||
DeleteEnvironmentModal,
|
||||
},
|
||||
|
||||
mixins: [environmentsMixin, CIPaginationMixin, folderMixin],
|
||||
mixins: [environmentsMixin, CIPaginationMixin],
|
||||
|
||||
props: {
|
||||
endpoint: {
|
||||
|
|
@ -30,6 +29,31 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
canaryDeploymentFeatureId: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
showCanaryDeploymentCallout: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
userCalloutsPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
lockPromotionSvgPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
helpCanaryDeploymentsPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
successCallback(resp) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import canaryCalloutMixin from 'ee_else_ce/environments/mixins/canary_callout_mixin';
|
||||
import canaryCalloutMixin from './mixins/canary_callout_mixin';
|
||||
import environmentsComponent from './components/environments_app.vue';
|
||||
import { parseBoolean } from '../lib/utils/common_utils';
|
||||
import Translate from '../vue_shared/translate';
|
||||
|
|
|
|||
|
|
@ -1,5 +1,26 @@
|
|||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
|
||||
export default {
|
||||
data() {
|
||||
const data = document.querySelector(this.$options.el).dataset;
|
||||
|
||||
return {
|
||||
canaryDeploymentFeatureId: data.environmentsDataCanaryDeploymentFeatureId,
|
||||
showCanaryDeploymentCallout: parseBoolean(data.environmentsDataShowCanaryDeploymentCallout),
|
||||
userCalloutsPath: data.environmentsDataUserCalloutsPath,
|
||||
lockPromotionSvgPath: data.environmentsDataLockPromotionSvgPath,
|
||||
helpCanaryDeploymentsPath: data.environmentsDataHelpCanaryDeploymentsPath,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
canaryCalloutProps() {},
|
||||
canaryCalloutProps() {
|
||||
return {
|
||||
canaryDeploymentFeatureId: this.canaryDeploymentFeatureId,
|
||||
showCanaryDeploymentCallout: this.showCanaryDeploymentCallout,
|
||||
userCalloutsPath: this.userCalloutsPath,
|
||||
lockPromotionSvgPath: this.lockPromotionSvgPath,
|
||||
helpCanaryDeploymentsPath: this.helpCanaryDeploymentsPath,
|
||||
};
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,29 +0,0 @@
|
|||
export default {
|
||||
props: {
|
||||
canaryDeploymentFeatureId: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
showCanaryDeploymentCallout: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
userCalloutsPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
lockPromotionSvgPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
helpCanaryDeploymentsPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -15,6 +15,8 @@ module Mutations
|
|||
end
|
||||
|
||||
def authorized_resource?(snippet)
|
||||
return false if snippet.nil?
|
||||
|
||||
Ability.allowed?(context[:current_user], ability_for(snippet), snippet)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -106,7 +106,23 @@ class RemoteMirror < ApplicationRecord
|
|||
update_status == 'started'
|
||||
end
|
||||
|
||||
def update_repository(options)
|
||||
def update_repository
|
||||
Gitlab::Git::RemoteMirror.new(
|
||||
project.repository.raw,
|
||||
remote_name,
|
||||
**options_for_update
|
||||
).update
|
||||
end
|
||||
|
||||
def options_for_update
|
||||
options = {
|
||||
keep_divergent_refs: keep_divergent_refs?
|
||||
}
|
||||
|
||||
if only_protected_branches?
|
||||
options[:only_branches_matching] = project.protected_branches.pluck(:name)
|
||||
end
|
||||
|
||||
if ssh_mirror_url?
|
||||
if ssh_key_auth? && ssh_private_key.present?
|
||||
options[:ssh_key] = ssh_private_key
|
||||
|
|
@ -117,13 +133,7 @@ class RemoteMirror < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
options[:keep_divergent_refs] = keep_divergent_refs?
|
||||
|
||||
Gitlab::Git::RemoteMirror.new(
|
||||
project.repository.raw,
|
||||
remote_name,
|
||||
**options
|
||||
).update
|
||||
options
|
||||
end
|
||||
|
||||
def sync?
|
||||
|
|
|
|||
|
|
@ -29,14 +29,16 @@ module Projects
|
|||
remote_mirror.ensure_remote!
|
||||
repository.fetch_remote(remote_mirror.remote_name, ssh_auth: remote_mirror, no_tags: true)
|
||||
|
||||
opts = {}
|
||||
if remote_mirror.only_protected_branches?
|
||||
opts[:only_branches_matching] = project.protected_branches.select(:name).map(&:name)
|
||||
response = remote_mirror.update_repository
|
||||
|
||||
if response.divergent_refs.any?
|
||||
message = "Some refs have diverged and have not been updated on the remote:"
|
||||
message += "\n\n#{response.divergent_refs.join("\n")}"
|
||||
|
||||
remote_mirror.mark_as_failed!(message)
|
||||
else
|
||||
remote_mirror.update_finish!
|
||||
end
|
||||
|
||||
remote_mirror.update_repository(opts)
|
||||
|
||||
remote_mirror.update_finish!
|
||||
end
|
||||
|
||||
def retry_or_fail(mirror, message, tries)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add snippet repository backfilling migration
|
||||
merge_request: 29927
|
||||
author:
|
||||
type: other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix 500 error for non-existing snippet on graphql mutations
|
||||
merge_request: 30632
|
||||
author: Sashi Kumar
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class BackfillSnippetRepositories < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
INTERVAL = 3.minutes
|
||||
BATCH_SIZE = 100
|
||||
MIGRATION = 'BackfillSnippetRepositories'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
class Snippet < ActiveRecord::Base
|
||||
include EachBatch
|
||||
|
||||
self.table_name = 'snippets'
|
||||
self.inheritance_column = :_type_disabled
|
||||
end
|
||||
|
||||
def up
|
||||
queue_background_migration_jobs_by_range_at_intervals(Snippet,
|
||||
MIGRATION,
|
||||
INTERVAL,
|
||||
batch_size: BATCH_SIZE)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
@ -13545,6 +13545,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200417044453
|
||||
20200417145946
|
||||
20200420092011
|
||||
20200420094444
|
||||
20200420104303
|
||||
20200420104323
|
||||
20200420162730
|
||||
|
|
|
|||
|
|
@ -138,6 +138,7 @@ initializer
|
|||
initializers
|
||||
interdependencies
|
||||
interdependency
|
||||
interruptible
|
||||
Irker
|
||||
Istio
|
||||
jasmine-jquery
|
||||
|
|
@ -294,6 +295,7 @@ Splunk
|
|||
SSH
|
||||
storable
|
||||
strace
|
||||
subpath
|
||||
subfolder
|
||||
subfolders
|
||||
sublicense
|
||||
|
|
@ -309,6 +311,8 @@ subqueried
|
|||
subqueries
|
||||
subquery
|
||||
subquerying
|
||||
substring
|
||||
substrings
|
||||
syslog
|
||||
Tiller
|
||||
todos
|
||||
|
|
@ -363,6 +367,7 @@ unreferenced
|
|||
unresolve
|
||||
unresolved
|
||||
unresolving
|
||||
unschedule
|
||||
unstage
|
||||
unstaged
|
||||
unstages
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ project namespace. For example, `https://gitlab.example.com/gitlab-org/project-1
|
|||
### Unavailable names for jobs
|
||||
|
||||
Each job must have a unique name, but there are a few **reserved `keywords` that
|
||||
cannot be used as job names**:
|
||||
can't be used as job names**:
|
||||
|
||||
- `image`
|
||||
- `services`
|
||||
|
|
@ -100,9 +100,9 @@ The following table lists available parameters for jobs:
|
|||
| [`stage`](#stage) | Defines a job stage (default: `test`). |
|
||||
| [`only`](#onlyexcept-basic) | Limit when jobs are created. Also available: [`only:refs`, `only:kubernetes`, `only:variables`, and `only:changes`](#onlyexcept-advanced). |
|
||||
| [`except`](#onlyexcept-basic) | Limit when jobs are not created. Also available: [`except:refs`, `except:kubernetes`, `except:variables`, and `except:changes`](#onlyexcept-advanced). |
|
||||
| [`rules`](#rules) | List of conditions to evaluate and determine selected attributes of a job, and whether or not it is created. May not be used alongside `only`/`except`. |
|
||||
| [`rules`](#rules) | List of conditions to evaluate and determine selected attributes of a job, and whether or not it's created. May not be used alongside `only`/`except`. |
|
||||
| [`tags`](#tags) | List of tags which are used to select Runner. |
|
||||
| [`allow_failure`](#allow_failure) | Allow job to fail. Failed job doesn't contribute to commit status. |
|
||||
| [`allow_failure`](#allow_failure) | Allow job to fail. Failed job does not contribute to commit status. |
|
||||
| [`when`](#when) | When to run job. Also available: `when:manual` and `when:delayed`. |
|
||||
| [`environment`](#environment) | Name of an environment to which the job deploys. Also available: `environment:name`, `environment:url`, `environment:on_stop`, `environment:auto_stop_in` and `environment:action`. |
|
||||
| [`cache`](#cache) | List of files that should be cached between subsequent runs. Also available: `cache:paths`, `cache:key`, `cache:untracked`, and `cache:policy`. |
|
||||
|
|
@ -201,13 +201,13 @@ In the example below:
|
|||
- **will** inherit: Nothing.
|
||||
- `rspec`:
|
||||
- **will** inherit: the default `image` and the `WEBHOOK_URL` variable.
|
||||
- **will not** inherit: the default `before_script` and the `DOMAIN` variable.
|
||||
- will **not** inherit: the default `before_script` and the `DOMAIN` variable.
|
||||
- `capybara`:
|
||||
- **will** inherit: the default `before_script` and `image`.
|
||||
- **will not** inherit: the `DOMAIN` and `WEBHOOK_URL` variables.
|
||||
- will **not** inherit: the `DOMAIN` and `WEBHOOK_URL` variables.
|
||||
- `karma`:
|
||||
- **will** inherit: the default `image` and `before_script`, and the `DOMAIN` variable.
|
||||
- **will not** inherit: `WEBHOOK_URL` variable.
|
||||
- will **not** inherit: `WEBHOOK_URL` variable.
|
||||
|
||||
```yaml
|
||||
default:
|
||||
|
|
@ -275,7 +275,7 @@ There are also two edge cases worth mentioning:
|
|||
|
||||
1. If no `stages` are defined in `.gitlab-ci.yml`, then the `build`,
|
||||
`test` and `deploy` are allowed to be used as job's stage by default.
|
||||
1. If a job doesn't specify a `stage`, the job is assigned the `test` stage.
|
||||
1. If a job does not specify a `stage`, the job is assigned the `test` stage.
|
||||
|
||||
### `workflow:rules`
|
||||
|
||||
|
|
@ -308,15 +308,15 @@ workflow:
|
|||
|
||||
> - Introduced in [GitLab Premium](https://about.gitlab.com/pricing/) 10.5.
|
||||
> - Available for Starter, Premium and Ultimate since 10.6.
|
||||
> - [Moved](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21603) to GitLab Core in 11.4.
|
||||
> - [Moved](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/42861) to GitLab Core in 11.4.
|
||||
|
||||
Using the `include` keyword allows the inclusion of external YAML files. This helps
|
||||
to break down the CI/CD configuration into multiple files and increases readability for long configuration files.
|
||||
It is also possible to have template files stored in a central repository and projects include their
|
||||
It's also possible to have template files stored in a central repository and projects include their
|
||||
configuration files. This helps avoid duplicated configuration, for example, global default variables for all projects.
|
||||
|
||||
`include` requires the external YAML file to have the extensions `.yml` or `.yaml`,
|
||||
otherwise the external file will not be included.
|
||||
otherwise the external file won't be included.
|
||||
|
||||
`include` supports the following inclusion methods:
|
||||
|
||||
|
|
@ -330,7 +330,7 @@ otherwise the external file will not be included.
|
|||
NOTE: **Note:**
|
||||
`.gitlab-ci.yml` configuration included by all methods is evaluated at pipeline creation.
|
||||
The configuration is a snapshot in time and persisted in the database. Any changes to
|
||||
referenced `.gitlab-ci.yml` configuration will not be reflected in GitLab until the next pipeline is created.
|
||||
referenced `.gitlab-ci.yml` configuration won't be reflected in GitLab until the next pipeline is created.
|
||||
|
||||
The files defined in `include` are:
|
||||
|
||||
|
|
@ -357,7 +357,7 @@ your configuration file is on. In other words, when using a `include:local`, mak
|
|||
sure that both `.gitlab-ci.yml` and the local file are on the same branch.
|
||||
|
||||
All [nested includes](#nested-includes) will be executed in the scope of the same project,
|
||||
so it is possible to use local, project, remote, or template includes.
|
||||
so it's possible to use local, project, remote, or template includes.
|
||||
|
||||
NOTE: **Note:**
|
||||
Including local files through Git submodules paths is not supported.
|
||||
|
|
@ -410,7 +410,7 @@ include:
|
|||
```
|
||||
|
||||
All [nested includes](#nested-includes) will be executed in the scope of the target project,
|
||||
so it is possible to use local (relative to target project), project, remote
|
||||
so it's possible to use local (relative to target project), project, remote
|
||||
or template includes.
|
||||
|
||||
#### `include:remote`
|
||||
|
|
@ -452,7 +452,7 @@ include:
|
|||
```
|
||||
|
||||
All [nested includes](#nested-includes) will be executed only with the permission of the user,
|
||||
so it is possible to use project, remote or template includes.
|
||||
so it's possible to use project, remote or template includes.
|
||||
|
||||
#### Nested includes
|
||||
|
||||
|
|
@ -557,7 +557,7 @@ a "key: value" pair. Be careful when using special characters:
|
|||
`:`, `{`, `}`, `[`, `]`, `,`, `&`, `*`, `#`, `?`, `|`, `-`, `<`, `>`, `=`, `!`, `%`, `@`, `` ` ``.
|
||||
|
||||
If any of the script commands return an exit code different from zero, the job
|
||||
will fail and further commands will not be executed. This behavior can be avoided by
|
||||
will fail and further commands won't be executed. This behavior can be avoided by
|
||||
storing the exit code in a variable:
|
||||
|
||||
```yaml
|
||||
|
|
@ -591,7 +591,7 @@ Scripts specified in `after_script` are executed in a new shell, separate from a
|
|||
software installed by a `before_script` or `script` script.
|
||||
- Have a separate timeout, which is hard coded to 5 minutes. See
|
||||
[related issue](https://gitlab.com/gitlab-org/gitlab-runner/issues/2716) for details.
|
||||
- Do not affect the job's exit code. If the `script` section succeeds and the
|
||||
- Don't affect the job's exit code. If the `script` section succeeds and the
|
||||
`after_script` times out or fails, the job will exit with code `0` (`Job Succeeded`).
|
||||
|
||||
It's possible to overwrite a globally defined `before_script` or `after_script`
|
||||
|
|
@ -672,7 +672,7 @@ The following stages are available to every pipeline:
|
|||
|
||||
User-defined stages are executed after `.pre` and before `.post`.
|
||||
|
||||
The order of `.pre` and `.post` cannot be changed, even if defined out of order in `.gitlab-ci.yml`.
|
||||
The order of `.pre` and `.post` can't be changed, even if defined out of order in `.gitlab-ci.yml`.
|
||||
For example, the following are equivalent configuration:
|
||||
|
||||
- Configured in order:
|
||||
|
|
@ -704,7 +704,7 @@ For example, the following are equivalent configuration:
|
|||
```
|
||||
|
||||
NOTE: **Note:**
|
||||
A pipeline will not be created if it only contains jobs in `.pre` or `.post` stages.
|
||||
A pipeline won't be created if it only contains jobs in `.pre` or `.post` stages.
|
||||
|
||||
### `extends`
|
||||
|
||||
|
|
@ -713,7 +713,7 @@ A pipeline will not be created if it only contains jobs in `.pre` or `.post` sta
|
|||
`extends` defines entry names that a job that uses `extends` is going to
|
||||
inherit from.
|
||||
|
||||
It is an alternative to using [YAML anchors](#anchors) and is a little
|
||||
It's an alternative to using [YAML anchors](#anchors) and is a little
|
||||
more flexible and readable:
|
||||
|
||||
```yaml
|
||||
|
|
@ -759,7 +759,7 @@ If you do want to include the `rake test`, see [`before_script` and `after_scrip
|
|||
`.tests` in this example is a [hidden key](#hide-jobs), but it's
|
||||
possible to inherit from regular jobs as well.
|
||||
|
||||
`extends` supports multi-level inheritance, however it is not recommended to
|
||||
`extends` supports multi-level inheritance, however it's not recommended to
|
||||
use more than three levels. The maximum nesting level that is supported is 10.
|
||||
The following example has two levels of inheritance:
|
||||
|
||||
|
|
@ -854,11 +854,11 @@ the `.template` job, and uses the `alpine` Docker image as defined in the local
|
|||
|
||||
### `rules`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/29011) in GitLab 12.3.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/27863) in GitLab 12.3.
|
||||
|
||||
`rules` allows for a list of individual rule objects to be evaluated
|
||||
*in order*, until one matches and dynamically provides attributes to the job.
|
||||
Note that `rules` cannot be used in combination with `only/except` since it is intended
|
||||
Note that `rules` can't be used in combination with `only/except` since it's intended
|
||||
to replace that functionality. If you attempt to do this the linter will return a
|
||||
`key may not be used with rules` error.
|
||||
|
||||
|
|
@ -888,7 +888,7 @@ In this example, if the first rule:
|
|||
- Matches, the job will be given the `when:always` attribute.
|
||||
- Does not match, the second and third rules will be evaluated sequentially
|
||||
until a match is found. That is, the job will be given either the:
|
||||
- `when: manual` attribute if the second rule matches. **The stage will not complete until this manual job is triggered and completes successfully.**
|
||||
- `when: manual` attribute if the second rule matches. **The stage won't complete until this manual job is triggered and completes successfully.**
|
||||
- `when: on_success` attribute if the second rule does not match. The third
|
||||
rule will always match when reached because it has no conditional clauses.
|
||||
|
||||
|
|
@ -944,7 +944,7 @@ In this example, a job either set to:
|
|||
|
||||
#### `rules:exists`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/16574) in GitLab 12.4.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24021) in GitLab 12.4.
|
||||
|
||||
`exists` accepts an array of paths and will match if any of these paths exist
|
||||
as files in the repository.
|
||||
|
|
@ -1002,11 +1002,11 @@ Jobs with `rules:` can cause two pipelines to be created unexpectedly:
|
|||
- One pipeline from pushing a commit to a branch.
|
||||
- A second ["detached" pipeline for a merge request](../merge_request_pipelines/index.md).
|
||||
|
||||
`only` and `except` jobs do not trigger merge request pipelines by default, but this
|
||||
`only` and `except` jobs don't trigger merge request pipelines by default, but this
|
||||
is not the case for jobs with `rules:`, which may be surprising if migrating from `only`
|
||||
and `except` to `rules:`.
|
||||
|
||||
If you are using `rules:` and you see two pipelines for commits to branches that have
|
||||
If you're using `rules:` and you see two pipelines for commits to branches that have
|
||||
a merge request, you have two options:
|
||||
|
||||
- Individually exclude each job that uses `rules:` from merge request pipelines. The
|
||||
|
|
@ -1045,7 +1045,7 @@ In the following example:
|
|||
|
||||
- We run the job manually if `Dockerfile` or any file in `docker/scripts/`
|
||||
has changed AND `$VAR == "string value"`.
|
||||
- Otherwise, the job will not be included in the pipeline.
|
||||
- Otherwise, the job won't be included in the pipeline.
|
||||
|
||||
```yaml
|
||||
docker build:
|
||||
|
|
@ -1095,7 +1095,7 @@ docker build:
|
|||
```
|
||||
|
||||
Additional job configuration may be added to rules in the future. If something
|
||||
useful isn't available, please
|
||||
useful is not available, please
|
||||
[open an issue](https://gitlab.com/gitlab-org/gitlab/issues).
|
||||
|
||||
### `only`/`except` (basic)
|
||||
|
|
@ -1191,7 +1191,7 @@ The above example will run `job` for all branches on `gitlab-org/gitlab`,
|
|||
except `master` and those with names prefixed with `release/`.
|
||||
|
||||
If a job does not have an `only` rule, `only: ['branches', 'tags']` is set by
|
||||
default. If it doesn't have an `except` rule, it is empty.
|
||||
default. If it does not have an `except` rule, it's empty.
|
||||
|
||||
For example,
|
||||
|
||||
|
|
@ -1254,7 +1254,7 @@ Feature.enable(:allow_unsafe_ruby_regexp)
|
|||
### `only`/`except` (advanced)
|
||||
|
||||
CAUTION: **Warning:**
|
||||
This is an _alpha_ feature, and it is subject to change at any time without
|
||||
This is an _alpha_ feature, and is subject to change at any time without
|
||||
prior notice!
|
||||
|
||||
GitLab supports both simple and complex strategies, so it's possible to use an
|
||||
|
|
@ -1306,7 +1306,7 @@ This means the keys are treated as if joined by an OR. This relationship could b
|
|||
In the example below, the `test` job will **not** be created when **any** of the following are true:
|
||||
|
||||
- The pipeline runs for the `master`.
|
||||
- There are changes to the `README.md` file in the root directory of the repo.
|
||||
- There are changes to the `README.md` file in the root directory of the repository.
|
||||
|
||||
```yaml
|
||||
test:
|
||||
|
|
@ -1400,7 +1400,7 @@ This means the `only:changes` policy is useful for pipelines where:
|
|||
|
||||
If there is no Git push event, such as for pipelines with
|
||||
[sources other than the three above](../variables/predefined_variables.md#variables-reference),
|
||||
`changes` cannot determine if a given file is new or old, and will always
|
||||
`changes` can't determine if a given file is new or old, and will always
|
||||
return true.
|
||||
|
||||
A basic example of using `only: changes`:
|
||||
|
|
@ -1427,10 +1427,10 @@ commits contains changes to any of the following:
|
|||
|
||||
CAUTION: **Warning:**
|
||||
If using `only:changes` with [only allow merge requests to be merged if the pipeline succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md#only-allow-merge-requests-to-be-merged-if-the-pipeline-succeeds),
|
||||
undesired behavior could result if you do not [also use `only:merge_requests`](#using-onlychanges-with-pipelines-for-merge-requests).
|
||||
undesired behavior could result if you don't [also use `only:merge_requests`](#using-onlychanges-with-pipelines-for-merge-requests).
|
||||
|
||||
You can also use glob patterns to match multiple files in either the root directory
|
||||
of the repo, or in _any_ directory within the repo, but they must be wrapped
|
||||
of the repository, or in _any_ directory within the repository, but they must be wrapped
|
||||
in double quotes or GitLab will fail to parse the `.gitlab-ci.yml`. For example:
|
||||
|
||||
```yaml
|
||||
|
|
@ -1443,7 +1443,7 @@ test:
|
|||
```
|
||||
|
||||
The following example will skip the `build` job if a change is detected in any file
|
||||
in the root directory of the repo with a `.md` extension:
|
||||
in the root directory of the repository with a `.md` extension:
|
||||
|
||||
```yaml
|
||||
build:
|
||||
|
|
@ -1464,7 +1464,7 @@ There are some points to be aware of when
|
|||
##### Using `only:changes` with pipelines for merge requests
|
||||
|
||||
With [pipelines for merge requests](../merge_request_pipelines/index.md),
|
||||
it is possible to define a job to be created based on files modified
|
||||
it's possible to define a job to be created based on files modified
|
||||
in a merge request.
|
||||
|
||||
In order to deduce the correct base SHA of the source branch, we recommend combining
|
||||
|
|
@ -1506,7 +1506,7 @@ docker build service one:
|
|||
|
||||
In the example above, a pipeline could fail due to changes to a file in `service-one/**/*`.
|
||||
A later commit could then be pushed that does not include any changes to this file,
|
||||
but includes changes to the `Dockerfile`, and this pipeline could pass because it is only
|
||||
but includes changes to the `Dockerfile`, and this pipeline could pass because it's only
|
||||
testing the changes to the `Dockerfile`. GitLab checks the **most recent pipeline**,
|
||||
that **passed**, and will show the merge request as mergeable, despite the earlier
|
||||
failed pipeline caused by a change that was not yet corrected.
|
||||
|
|
@ -1595,8 +1595,7 @@ This example creates four paths of execution:
|
|||
- If `needs:` is set to point to a job that is not instantiated
|
||||
because of `only/except` rules or otherwise does not exist, the
|
||||
pipeline will be created with YAML error.
|
||||
- We are temporarily limiting the maximum number of jobs that a single job can
|
||||
need in the `needs:` array:
|
||||
- The maximum number of jobs that a single job can need in the `needs:` array is limited:
|
||||
- For GitLab.com, the limit is ten. For more information, see our
|
||||
[infrastructure issue](https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/7541).
|
||||
- For self-managed instances, the limit is:
|
||||
|
|
@ -1605,7 +1604,7 @@ This example creates four paths of execution:
|
|||
- If `needs:` refers to a job that is marked as `parallel:`.
|
||||
the current job will depend on all parallel jobs created.
|
||||
- `needs:` is similar to `dependencies:` in that it needs to use jobs from prior stages,
|
||||
meaning it is impossible to create circular dependencies. Depending on jobs in the
|
||||
meaning it's impossible to create circular dependencies. Depending on jobs in the
|
||||
current stage is not possible either, but support [is planned](https://gitlab.com/gitlab-org/gitlab/issues/30632).
|
||||
- Related to the above, stages must be explicitly defined for all jobs
|
||||
that have the keyword `needs:` or are referred to by one.
|
||||
|
|
@ -1635,7 +1634,7 @@ When using `needs`, artifact downloads are controlled with `artifacts: true` or
|
|||
The `dependencies` keyword should not be used with `needs`, as this is deprecated since GitLab 12.6.
|
||||
|
||||
In the example below, the `rspec` job will download the `build_job` artifacts, while the
|
||||
`rubocop` job will not:
|
||||
`rubocop` job won't:
|
||||
|
||||
```yaml
|
||||
build_job:
|
||||
|
|
@ -1775,7 +1774,7 @@ show the same orange warning. However, the associated commit will be marked
|
|||
"passed", without warnings.
|
||||
|
||||
In the example below, `job1` and `job2` will run in parallel, but if `job1`
|
||||
fails, it will not stop the next stage from running, since it's marked with
|
||||
fails, it won't stop the next stage from running, since it's marked with
|
||||
`allow_failure: true`:
|
||||
|
||||
```yaml
|
||||
|
|
@ -1877,14 +1876,14 @@ block the execution of the pipeline at the stage this action is defined in. It's
|
|||
possible to resume execution of the pipeline when someone executes a blocking
|
||||
manual action by clicking a _play_ button.
|
||||
|
||||
When a pipeline is blocked, it will not be merged if Merge When Pipeline Succeeds
|
||||
When a pipeline is blocked, it won't be merged if Merge When Pipeline Succeeds
|
||||
is set. Blocked pipelines also do have a special status, called _manual_.
|
||||
When the `when:manual` syntax is used, manual actions are non-blocking by
|
||||
default. If you want to make manual action blocking, it is necessary to add
|
||||
default. If you want to make manual action blocking, it's necessary to add
|
||||
`allow_failure: false` to the job's definition in `.gitlab-ci.yml`.
|
||||
|
||||
Optional manual actions have `allow_failure: true` set by default and their
|
||||
Statuses do not contribute to the overall pipeline status. So, if a manual
|
||||
Statuses don't contribute to the overall pipeline status. So, if a manual
|
||||
action fails, the pipeline will eventually succeed.
|
||||
|
||||
NOTE: **Note:**
|
||||
|
|
@ -1894,7 +1893,7 @@ Manual actions are considered to be write actions, so permissions for
|
|||
[protected branches](../../user/project/protected_branches.md) are used when
|
||||
a user wants to trigger an action. In other words, in order to trigger a manual
|
||||
action assigned to a branch that the pipeline is running for, the user needs to
|
||||
have the ability to merge to this branch. It is possible to use protected environments
|
||||
have the ability to merge to this branch. It's possible to use protected environments
|
||||
to more strictly [protect manual deployments](#protecting-manual-jobs-premium) from being
|
||||
run by unauthorized users.
|
||||
|
||||
|
|
@ -1907,7 +1906,7 @@ being used.
|
|||
|
||||
It's possible to use [protected environments](../environments/protected_environments.md)
|
||||
to define a precise list of users authorized to run a manual job. By allowing only
|
||||
users associated with a protected environment to trigger manual jobs, it is possible
|
||||
users associated with a protected environment to trigger manual jobs, it's possible
|
||||
to implement some special use cases, such as:
|
||||
|
||||
- More precisely limiting who can deploy to an environment.
|
||||
|
|
@ -1937,13 +1936,13 @@ To do this, you must:
|
|||
who are always able to use protected environments.
|
||||
|
||||
Additionally, if a manual job is defined as blocking by adding `allow_failure: false`,
|
||||
the next stages of the pipeline will not run until the manual job is triggered. This
|
||||
the next stages of the pipeline won't run until the manual job is triggered. This
|
||||
can be used as a way to have a defined list of users allowed to "approve" later pipeline
|
||||
stages by triggering the blocking manual job.
|
||||
|
||||
#### `when:delayed`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21767) in GitLab 11.4.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/51352) in GitLab 11.4.
|
||||
|
||||
Delayed job are for executing scripts after a certain period.
|
||||
This is useful if you want to avoid jobs entering `pending` state immediately.
|
||||
|
|
@ -1957,11 +1956,11 @@ provided. `start_in` key must be less than or equal to one week. Examples of val
|
|||
- `1 day`
|
||||
- `1 week`
|
||||
|
||||
When there is a delayed job in a stage, the pipeline will not progress until the delayed job has finished.
|
||||
When there is a delayed job in a stage, the pipeline won't progress until the delayed job has finished.
|
||||
This means this keyword can also be used for inserting delays between different stages.
|
||||
|
||||
The timer of a delayed job starts immediately after the previous stage has completed.
|
||||
Similar to other types of jobs, a delayed job's timer will not start unless the previous stage passed.
|
||||
Similar to other types of jobs, a delayed job's timer won't start unless the previous stage passed.
|
||||
|
||||
The following example creates a job named `timed rollout 10%` that is executed 30 minutes after the previous stage has completed:
|
||||
|
||||
|
|
@ -1973,7 +1972,7 @@ timed rollout 10%:
|
|||
start_in: 30 minutes
|
||||
```
|
||||
|
||||
You can stop the active timer of a delayed job by clicking the **Unschedule** button.
|
||||
You can stop the active timer of a delayed job by clicking the **{time-out}** (**Unschedule**) button.
|
||||
This job will never be executed in the future unless you execute the job manually.
|
||||
|
||||
You can start a delayed job immediately by clicking the **Play** button.
|
||||
|
|
@ -2009,7 +2008,7 @@ deployment to the `production` environment.
|
|||
> `name` keyword.
|
||||
> - The `name` parameter can use any of the defined CI variables,
|
||||
> including predefined, secure variables and `.gitlab-ci.yml` [`variables`](#variables).
|
||||
> You however cannot use variables defined under `script`.
|
||||
> You however can't use variables defined under `script`.
|
||||
|
||||
The `environment` name can contain:
|
||||
|
||||
|
|
@ -2027,7 +2026,7 @@ Common names are `qa`, `staging`, and `production`, but you can use whatever
|
|||
name works with your workflow.
|
||||
|
||||
Instead of defining the name of the environment right after the `environment`
|
||||
keyword, it is also possible to define it as a separate value. For that, use
|
||||
keyword, it's also possible to define it as a separate value. For that, use
|
||||
the `name` keyword under `environment`:
|
||||
|
||||
```yaml
|
||||
|
|
@ -2045,7 +2044,7 @@ deploy to production:
|
|||
> recommended way now is to define it in `.gitlab-ci.yml`.
|
||||
> - The `url` parameter can use any of the defined CI variables,
|
||||
> including predefined, secure variables and `.gitlab-ci.yml` [`variables`](#variables).
|
||||
> You however cannot use variables defined under `script`.
|
||||
> You however can't use variables defined under `script`.
|
||||
|
||||
This is an optional value that when set, it exposes buttons in various places
|
||||
in GitLab which when clicked take you to the defined URL.
|
||||
|
|
@ -2065,7 +2064,7 @@ deploy to production:
|
|||
|
||||
#### `environment:on_stop`
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/6669) in GitLab 8.13.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/22191) in GitLab 8.13.
|
||||
> - Starting with GitLab 8.14, when you have an environment that has a stop action
|
||||
> defined, GitLab will automatically trigger a stop action when the associated
|
||||
> branch is deleted.
|
||||
|
|
@ -2078,7 +2077,7 @@ Read the `environment:action` section for an example.
|
|||
|
||||
#### `environment:action`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/6669) in GitLab 8.13.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/22191) in GitLab 8.13.
|
||||
|
||||
The `action` keyword is to be used in conjunction with `on_stop` and is defined
|
||||
in the job that is called to close the environment.
|
||||
|
|
@ -2185,11 +2184,11 @@ To follow progress on support for GitLab-managed clusters, see the
|
|||
|
||||
#### Dynamic environments
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/6323) in GitLab 8.12 and GitLab Runner 1.6.
|
||||
> - The `$CI_ENVIRONMENT_SLUG` was [introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7983) in GitLab 8.15.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/21971) in GitLab 8.12 and GitLab Runner 1.6.
|
||||
> - The `$CI_ENVIRONMENT_SLUG` was [introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/22864) in GitLab 8.15.
|
||||
> - The `name` and `url` parameters can use any of the defined CI variables,
|
||||
> including predefined, secure variables and `.gitlab-ci.yml` [`variables`](#variables).
|
||||
> You however cannot use variables defined under `script`.
|
||||
> You however can't use variables defined under `script`.
|
||||
|
||||
For example:
|
||||
|
||||
|
|
@ -2233,13 +2232,13 @@ Read how caching works and find out some good practices in the
|
|||
cached between jobs. You can only use paths that are within the local working
|
||||
copy.
|
||||
|
||||
If `cache` is defined outside the scope of jobs, it means it is set
|
||||
If `cache` is defined outside the scope of jobs, it means it's set
|
||||
globally and all jobs will use that definition.
|
||||
|
||||
#### `cache:paths`
|
||||
|
||||
Use the `paths` directive to choose which files or directories will be cached. Paths
|
||||
are relative to the project directory (`$CI_PROJECT_DIR`) and cannot directly link outside it.
|
||||
are relative to the project directory (`$CI_PROJECT_DIR`) and can't directly link outside it.
|
||||
Wildcards can be used that follow the [glob](https://en.wikipedia.org/wiki/Glob_(programming))
|
||||
patterns and [`filepath.Match`](https://golang.org/pkg/path/filepath/#Match).
|
||||
|
||||
|
|
@ -2293,7 +2292,7 @@ set, is just literal `default` which means everything is shared between
|
|||
pipelines and jobs by default, starting from GitLab 9.0.
|
||||
|
||||
NOTE: **Note:**
|
||||
The `cache:key` variable cannot contain the `/` character, or the equivalent
|
||||
The `cache:key` variable can't contain the `/` character, or the equivalent
|
||||
URI-encoded `%2F`; a value made only of dots (`.`, `%2E`) is also forbidden.
|
||||
|
||||
For example, to enable per-branch caching:
|
||||
|
|
@ -2339,7 +2338,7 @@ cache:
|
|||
- node_modules
|
||||
```
|
||||
|
||||
In this example we are creating a cache for Ruby and Node.js dependencies that
|
||||
In this example we're creating a cache for Ruby and Node.js dependencies that
|
||||
is tied to current versions of the `Gemfile.lock` and `package.json` files. Whenever one of
|
||||
these files changes, a new cache key is computed and a new cache is created. Any future
|
||||
job runs using the same `Gemfile.lock` and `package.json` with `cache:key:files` will
|
||||
|
|
@ -2415,7 +2414,7 @@ execution, and to re-upload them at the end. This allows any changes made by the
|
|||
job to be persisted for future runs, and is known as the `pull-push` cache
|
||||
policy.
|
||||
|
||||
If you know the job doesn't alter the cached files, you can skip the upload step
|
||||
If you know the job does not alter the cached files, you can skip the upload step
|
||||
by setting `policy: pull` in the job specification. Typically, this would be
|
||||
twinned with an ordinary cache job at an earlier stage to ensure the cache
|
||||
is updated from time to time:
|
||||
|
|
@ -2471,7 +2470,7 @@ be available for download in the GitLab UI.
|
|||
|
||||
#### `artifacts:paths`
|
||||
|
||||
Paths are relative to the project directory (`$CI_PROJECT_DIR`) and cannot directly
|
||||
Paths are relative to the project directory (`$CI_PROJECT_DIR`) and can't directly
|
||||
link outside it. Wildcards can be used that follow the [glob](https://en.wikipedia.org/wiki/Glob_(programming))
|
||||
patterns and [`filepath.Match`](https://golang.org/pkg/path/filepath/#Match).
|
||||
|
||||
|
|
@ -2498,7 +2497,7 @@ job:
|
|||
You may want to create artifacts only for tagged releases to avoid filling the
|
||||
build server storage with temporary build artifacts.
|
||||
|
||||
Create artifacts only for tags (`default-job` will not create artifacts):
|
||||
Create artifacts only for tags (`default-job` won't create artifacts):
|
||||
|
||||
```yaml
|
||||
default-job:
|
||||
|
|
@ -2579,7 +2578,7 @@ The default name is `artifacts`, which becomes `artifacts.zip` when downloaded.
|
|||
|
||||
NOTE: **Note:**
|
||||
If your branch-name contains forward slashes
|
||||
(e.g. `feature/my-feature`) it is advised to use `$CI_COMMIT_REF_SLUG`
|
||||
(for example `feature/my-feature`) it's advised to use `$CI_COMMIT_REF_SLUG`
|
||||
instead of `$CI_COMMIT_REF_NAME` for proper naming of the artifact.
|
||||
|
||||
To create an archive with a name of the current job:
|
||||
|
|
@ -2710,15 +2709,15 @@ After their expiry, artifacts are deleted hourly by default (via a cron job),
|
|||
and are not accessible anymore.
|
||||
|
||||
The value of `expire_in` is an elapsed time in seconds, unless a unit is
|
||||
provided. Examples of parsable values:
|
||||
provided. Examples of valid values:
|
||||
|
||||
- '42'
|
||||
- '3 mins 4 sec'
|
||||
- '2 hrs 20 min'
|
||||
- '2h20min'
|
||||
- '6 mos 1 day'
|
||||
- '47 yrs 6 mos and 4d'
|
||||
- '3 weeks and 2 days'
|
||||
- `42`
|
||||
- `3 mins 4 sec`
|
||||
- `2 hrs 20 min`
|
||||
- `2h20min`
|
||||
- `6 mos 1 day`
|
||||
- `47 yrs 6 mos and 4d`
|
||||
- `3 weeks and 2 days`
|
||||
|
||||
To expire artifacts 1 week after being uploaded:
|
||||
|
||||
|
|
@ -2770,7 +2769,7 @@ You can only define jobs from stages that are executed before the current one.
|
|||
An error will be shown if you define jobs from the current stage or next ones.
|
||||
Defining an empty array will skip downloading any artifacts for that job.
|
||||
The status of the previous job is not considered when using `dependencies`, so
|
||||
if it failed or it is a manual job that was not run, no error occurs.
|
||||
if it failed or it's a manual job that was not run, no error occurs.
|
||||
|
||||
In the following example, we define two jobs with artifacts, `build:osx` and
|
||||
`build:linux`. When the `test:osx` is executed, the artifacts from `build:osx`
|
||||
|
|
@ -2828,7 +2827,7 @@ and bring back the old behavior.
|
|||
|
||||
### `coverage`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7447) in GitLab 8.17.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/20428) in GitLab 8.17.
|
||||
|
||||
`coverage` allows you to configure how code coverage will be extracted from the
|
||||
job output.
|
||||
|
|
@ -2848,13 +2847,13 @@ job1:
|
|||
|
||||
### `retry`
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/12909) in GitLab 9.5.
|
||||
> - [Behavior expanded](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/21758) in GitLab 11.5 to control on which failures to retry.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/3442) in GitLab 9.5.
|
||||
> - [Behavior expanded](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3515) in GitLab 11.5 to control on which failures to retry.
|
||||
|
||||
`retry` allows you to configure how many times a job is going to be retried in
|
||||
case of a failure.
|
||||
|
||||
When a job fails and has `retry` configured, it is going to be processed again
|
||||
When a job fails and has `retry` configured, it's going to be processed again
|
||||
up to the amount of times specified by the `retry` keyword.
|
||||
|
||||
If `retry` is set to 2, and a job succeeds in a second run (first retry), it won't be retried
|
||||
|
|
@ -2906,7 +2905,7 @@ Possible values for `when` are:
|
|||
Please make sure to update `RETRY_WHEN_IN_DOCUMENTATION` array in
|
||||
`spec/lib/gitlab/ci/config/entry/retry_spec.rb` if you change any of
|
||||
the documented values below. The test there makes sure that all documented
|
||||
values are really valid as a config option and therefore should always
|
||||
values are really valid as a configuration option and therefore should always
|
||||
stay in sync with this documentation.
|
||||
-->
|
||||
|
||||
|
|
@ -2915,12 +2914,12 @@ Possible values for `when` are:
|
|||
- `script_failure`: Retry when the script failed.
|
||||
- `api_failure`: Retry on API failure.
|
||||
- `stuck_or_timeout_failure`: Retry when the job got stuck or timed out.
|
||||
- `runner_system_failure`: Retry if there was a runner system failure (e.g. setting up the job failed).
|
||||
- `runner_system_failure`: Retry if there was a runner system failure (for example, job setup failed).
|
||||
- `missing_dependency_failure`: Retry if a dependency was missing.
|
||||
- `runner_unsupported`: Retry if the runner was unsupported.
|
||||
- `stale_schedule`: Retry if a delayed job could not be executed.
|
||||
- `job_execution_timeout`: Retry if the script exceeded the maximum execution time set for the job.
|
||||
- `archived_failure`: Retry if the job is archived and cannot be run.
|
||||
- `archived_failure`: Retry if the job is archived and can't be run.
|
||||
- `unmet_prerequisites`: Retry if the job failed to complete prerequisite tasks.
|
||||
- `scheduler_failure`: Retry if the scheduler failed to assign the job to a runner.
|
||||
- `data_integrity_failure`: Retry if there was a structural integrity problem detected.
|
||||
|
|
@ -2942,17 +2941,17 @@ test:
|
|||
```
|
||||
|
||||
The job-level timeout can exceed the
|
||||
[project-level timeout](../pipelines/settings.md#timeout) but can not
|
||||
[project-level timeout](../pipelines/settings.md#timeout) but can't
|
||||
exceed the Runner-specific timeout.
|
||||
|
||||
### `parallel`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/22631) in GitLab 11.5.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/21480) in GitLab 11.5.
|
||||
|
||||
`parallel` allows you to configure how many instances of a job to run in
|
||||
parallel. This value has to be greater than or equal to two (2) and less than or equal to 50.
|
||||
|
||||
This creates N instances of the same job that run in parallel. They're named
|
||||
This creates N instances of the same job that run in parallel. They are named
|
||||
sequentially from `job_name 1/N` to `job_name N/N`.
|
||||
|
||||
For every job, `CI_NODE_INDEX` and `CI_NODE_TOTAL` [environment variables](../variables/README.md#predefined-environment-variables) are set.
|
||||
|
|
@ -3030,7 +3029,7 @@ staging:
|
|||
|
||||
#### Complex `trigger` syntax for multi-project pipelines
|
||||
|
||||
It is possible to configure a branch name that GitLab will use to create
|
||||
It's possible to configure a branch name that GitLab will use to create
|
||||
a downstream pipeline with:
|
||||
|
||||
```yaml
|
||||
|
|
@ -3045,7 +3044,7 @@ staging:
|
|||
branch: stable
|
||||
```
|
||||
|
||||
It is possible to mirror the status from a triggered pipeline:
|
||||
It's possible to mirror the status from a triggered pipeline:
|
||||
|
||||
```yaml
|
||||
trigger_job:
|
||||
|
|
@ -3054,7 +3053,7 @@ trigger_job:
|
|||
strategy: depend
|
||||
```
|
||||
|
||||
It is possible to mirror the status from an upstream pipeline:
|
||||
It's possible to mirror the status from an upstream pipeline:
|
||||
|
||||
```yaml
|
||||
upstream_bridge:
|
||||
|
|
@ -3077,7 +3076,7 @@ trigger_job:
|
|||
```
|
||||
|
||||
Similar to [multi-project pipelines](../multi_project_pipelines.md#mirroring-status-from-triggered-pipeline),
|
||||
it is possible to mirror the status from a triggered pipeline:
|
||||
it's possible to mirror the status from a triggered pipeline:
|
||||
|
||||
```yaml
|
||||
trigger_job:
|
||||
|
|
@ -3144,7 +3143,7 @@ Not to be confused with the [`trigger`](#trigger) parameter.
|
|||
|
||||
### `interruptible`
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/23464) in GitLab 12.3.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/32022) in GitLab 12.3.
|
||||
|
||||
`interruptible` is used to indicate that a job should be canceled if made redundant by a newer pipeline run. Defaults to `false`.
|
||||
This value will only be used if the [automatic cancellation of redundant pipelines feature](../pipelines/settings.md#auto-cancel-pending-pipelines)
|
||||
|
|
@ -3152,8 +3151,8 @@ is enabled.
|
|||
|
||||
When enabled, a pipeline on the same branch will be canceled when:
|
||||
|
||||
- It is made redundant by a newer pipeline run.
|
||||
- Either all jobs are set as interruptible, or any uninterruptible jobs have not started.
|
||||
- it's made redundant by a newer pipeline run.
|
||||
- Either all jobs are set as interruptible, or any uninterruptible jobs haven't started.
|
||||
|
||||
Pending jobs are always considered interruptible.
|
||||
|
||||
|
|
@ -3202,7 +3201,7 @@ Sometimes running multiples jobs or pipelines at the same time in an environment
|
|||
can lead to errors during the deployment.
|
||||
|
||||
To avoid these errors, the `resource_group` attribute can be used to ensure that
|
||||
the Runner will not run certain jobs simultaneously.
|
||||
the Runner won't run certain jobs simultaneously.
|
||||
|
||||
When the `resource_group` key is defined for a job in `.gitlab-ci.yml`,
|
||||
job executions are mutually exclusive across different pipelines for the same project.
|
||||
|
|
@ -3219,7 +3218,7 @@ deploy-to-production:
|
|||
```
|
||||
|
||||
In this case, if a `deploy-to-production` job is running in a pipeline, and a new
|
||||
`deploy-to-production` job is created in a different pipeline, it will not run until
|
||||
`deploy-to-production` job is created in a different pipeline, it won't run until
|
||||
the currently running/pending `deploy-to-production` job is finished. As a result,
|
||||
you can ensure that concurrent deployments will never happen to the production environment.
|
||||
|
||||
|
|
@ -3228,7 +3227,8 @@ is when deploying to physical devices. You may have more than one physical devic
|
|||
one can be deployed to, but there can be only one deployment per device at any given time.
|
||||
|
||||
NOTE: **Note:**
|
||||
This key can only contain letters, digits, `-`, `_`, `/`, `$`, `{`, `}`, `.`, and spaces, but it cannot start or end with `/`.
|
||||
This key can only contain letters, digits, `-`, `_`, `/`, `$`, `{`, `}`, `.`, and spaces.
|
||||
It can't start or end with `/`.
|
||||
|
||||
### `pages`
|
||||
|
||||
|
|
@ -3240,7 +3240,7 @@ requirements below must be met:
|
|||
- `artifacts` with a path to the `public/` directory must be defined.
|
||||
|
||||
The example below simply moves all files from the root of the project to the
|
||||
`public/` directory. The `.public` workaround is so `cp` doesn't also copy
|
||||
`public/` directory. The `.public` workaround is so `cp` does not also copy
|
||||
`public/` to itself in an infinite loop:
|
||||
|
||||
```yaml
|
||||
|
|
@ -3265,7 +3265,7 @@ Read more on [GitLab Pages user documentation](../../user/project/pages/index.md
|
|||
|
||||
NOTE: **Note:**
|
||||
Integers (as well as strings) are legal both for variable's name and value.
|
||||
Floats are not legal and cannot be used.
|
||||
Floats are not legal and can't be used.
|
||||
|
||||
GitLab CI/CD allows you to define variables inside `.gitlab-ci.yml` that are
|
||||
then passed in the job environment. They can be set globally and per-job.
|
||||
|
|
@ -3319,7 +3319,7 @@ variables:
|
|||
```
|
||||
|
||||
`fetch` is faster as it re-uses the local working copy (falling back to `clone`
|
||||
if it doesn't exist). `git clean` is used to undo any changes made by the last
|
||||
if it does not exist). `git clean` is used to undo any changes made by the last
|
||||
job, and `git fetch` is used to retrieve commits made since the last job ran.
|
||||
|
||||
```yaml
|
||||
|
|
@ -3328,9 +3328,9 @@ variables:
|
|||
```
|
||||
|
||||
`none` also re-uses the local working copy, but skips all Git operations
|
||||
(including GitLab Runner's pre-clone script, if present). It is mostly useful
|
||||
for jobs that operate exclusively on artifacts (e.g., `deploy`). Git repository
|
||||
data may be present, but it is certain to be out of date, so you should only
|
||||
(including GitLab Runner's pre-clone script, if present). It's mostly useful
|
||||
for jobs that operate exclusively on artifacts (for examples `deploy`). Git repository
|
||||
data may be present, but it's certain to be out of date, so you should only
|
||||
rely on files brought into the local working copy from cache or artifacts.
|
||||
|
||||
```yaml
|
||||
|
|
@ -3353,10 +3353,10 @@ globally or per-job in the [`variables`](#variables) section.
|
|||
|
||||
There are three possible values: `none`, `normal`, and `recursive`:
|
||||
|
||||
- `none` means that submodules will not be included when fetching the project
|
||||
- `none` means that submodules won't be included when fetching the project
|
||||
code. This is the default, which matches the pre-v1.10 behavior.
|
||||
|
||||
- `normal` means that only the top-level submodules will be included. It is
|
||||
- `normal` means that only the top-level submodules will be included. It's
|
||||
equivalent to:
|
||||
|
||||
```shell
|
||||
|
|
@ -3367,7 +3367,7 @@ There are three possible values: `none`, `normal`, and `recursive`:
|
|||
- `recursive` means that all submodules (including submodules of submodules)
|
||||
will be included. This feature needs Git v1.8.1 and later. When using a
|
||||
GitLab Runner with an executor not based on Docker, make sure the Git version
|
||||
meets that requirement. It is equivalent to:
|
||||
meets that requirement. It's equivalent to:
|
||||
|
||||
```shell
|
||||
git submodule sync --recursive
|
||||
|
|
@ -3480,7 +3480,7 @@ jobs, jobs may fail.
|
|||
|
||||
Since Git fetching and cloning is based on a ref, such as a branch name, Runners
|
||||
can't clone a specific commit SHA. If there are multiple jobs in the queue, or
|
||||
you are retrying an old job, the commit to be tested needs to be within the
|
||||
you're retrying an old job, the commit to be tested needs to be within the
|
||||
Git history that is cloned. Setting too small a value for `GIT_DEPTH` can make
|
||||
it impossible to run these old commits. You will see `unresolved reference` in
|
||||
job logs. You should then reconsider changing `GIT_DEPTH` to a higher value.
|
||||
|
|
@ -3499,7 +3499,7 @@ You can set it globally or per-job in the [`variables`](#variables) section.
|
|||
|
||||
### Custom build directories
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1267) in GitLab Runner 11.10
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2211) in GitLab Runner 11.10
|
||||
|
||||
NOTE: **Note:**
|
||||
This can only be used when `custom_build_dir` is enabled in the [Runner's
|
||||
|
|
@ -3530,7 +3530,7 @@ setting.
|
|||
An executor using a concurrency greater than `1` might lead
|
||||
to failures because multiple jobs might be working on the same directory if the `builds_dir`
|
||||
is shared between jobs.
|
||||
GitLab Runner does not try to prevent this situation. It is up to the administrator
|
||||
GitLab Runner does not try to prevent this situation. It's up to the administrator
|
||||
and developers to comply with the requirements of Runner configuration.
|
||||
|
||||
To avoid this scenario, you can use a unique path within `$CI_BUILDS_DIR`, because Runner
|
||||
|
|
@ -3566,7 +3566,7 @@ test:
|
|||
#### Nested paths
|
||||
|
||||
The value of `GIT_CLONE_PATH` is expanded once and nesting variables
|
||||
within it is not supported.
|
||||
within is not supported.
|
||||
|
||||
For example, you define both the variables below in your
|
||||
`.gitlab-ci.yml` file:
|
||||
|
|
@ -3797,7 +3797,7 @@ lines where the job is defined:
|
|||
# - run test
|
||||
```
|
||||
|
||||
you can instead start its name with a dot (`.`) and it will not be processed by
|
||||
you can instead start its name with a dot (`.`) and it won't be processed by
|
||||
GitLab CI/CD. In the following example, `.hidden_job` will be ignored:
|
||||
|
||||
```yaml
|
||||
|
|
|
|||
|
|
@ -77,13 +77,24 @@ whether the move is necessary), and ensure that a technical writer reviews this
|
|||
change prior to merging.
|
||||
|
||||
If you indeed need to change a document's location, do not remove the old
|
||||
document, but instead replace all of its content with a new line:
|
||||
document, but instead replace all of its content with the following:
|
||||
|
||||
```md
|
||||
This document was moved to [another location](path/to/new_doc.md).
|
||||
---
|
||||
redirect_to: '../path/to/file/index.md'
|
||||
---
|
||||
|
||||
This document was moved to [another location](../path/to/file/index.md).
|
||||
```
|
||||
|
||||
where `path/to/new_doc.md` is the relative path to the root directory `doc/`.
|
||||
Where `../path/to/file/index.md` is usually the relative path to the old document.
|
||||
|
||||
The `redirect_to` variable supports both full and relative URLs, for example
|
||||
`https://docs.gitlab.com/ee/path/to/file.html`, `../path/to/file.html`, `path/to/file.md`.
|
||||
It ensures that the redirect will work for <https://docs.gitlab.com> and any `*.md` paths
|
||||
will be compiled to `*.html`.
|
||||
The new line underneath the frontmatter informs the user that the document
|
||||
changed location and is useful for someone that browses that file from the repository.
|
||||
|
||||
For example, if you move `doc/workflow/lfs/index.md` to
|
||||
`doc/administration/lfs.md`, then the steps would be:
|
||||
|
|
@ -92,12 +103,16 @@ For example, if you move `doc/workflow/lfs/index.md` to
|
|||
1. Replace the contents of `doc/workflow/lfs/index.md` with:
|
||||
|
||||
```md
|
||||
---
|
||||
redirect_to: '../../administration/lfs.md'
|
||||
---
|
||||
|
||||
This document was moved to [another location](../../administration/lfs.md).
|
||||
```
|
||||
|
||||
1. Find and replace any occurrences of the old location with the new one.
|
||||
A quick way to find them is to use `git grep`. First go to the root directory
|
||||
where you cloned the `gitlab` repository and then do:
|
||||
A quick way to find them is to use `git grep` on the repository you changed
|
||||
the file from:
|
||||
|
||||
```shell
|
||||
git grep -n "workflow/lfs/lfs_administration"
|
||||
|
|
@ -124,24 +139,6 @@ Things to note:
|
|||
built-in help page, that's why we omit it in `git grep`.
|
||||
- Use the checklist on the "Change documentation location" MR description template.
|
||||
|
||||
### Alternative redirection method
|
||||
|
||||
You can also replace the content
|
||||
of the old file with a frontmatter containing a redirect link:
|
||||
|
||||
```yaml
|
||||
---
|
||||
redirect_to: '../path/to/file/README.md'
|
||||
---
|
||||
```
|
||||
|
||||
It supports both full and relative URLs, e.g. `https://docs.gitlab.com/ee/path/to/file.html`, `../path/to/file.html`, `path/to/file.md`. Note that any `*.md` paths will be compiled to `*.html`.
|
||||
|
||||
NOTE: **Note:**
|
||||
This redirection method will not provide a redirect fallback on GitLab `/help`. When using
|
||||
it, make sure to add a link to the new page on the doc, otherwise it's a dead end for users that
|
||||
land on the doc via `/help`.
|
||||
|
||||
### Redirections for pages with Disqus comments
|
||||
|
||||
If the documentation page being relocated already has Disqus comments,
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 100 KiB |
|
|
@ -2,14 +2,23 @@
|
|||
|
||||
Integrating a security scanner into GitLab consists of providing end users
|
||||
with a [CI job definition](../../ci/yaml/README.md#introduction)
|
||||
they can add to their CI configuration files, to scan their GitLab projects.
|
||||
they can add to their CI configuration files to scan their GitLab projects.
|
||||
This CI job should then output its results in a GitLab-specified format. These results are then
|
||||
automatically presented in various places in GitLab, such as the Pipeline view, Merge Request
|
||||
widget, and Security Dashboard.
|
||||
|
||||
The scanning job is usually based on a [Docker image](https://docs.docker.com/)
|
||||
that contains the scanner and all its dependencies in a self-contained environment.
|
||||
This page documents requirements and guidelines for writing CI jobs implementing a security scanner,
|
||||
as well as requirements and guidelines for the Docker image itself.
|
||||
|
||||
This page documents requirements and guidelines for writing CI jobs that implement a security
|
||||
scanner, as well as requirements and guidelines for the Docker image.
|
||||
|
||||
## Job definition
|
||||
|
||||
This section desribes several important fields to add to the security scanner's job
|
||||
definition file. Full documentation on these and other available fields can be viewed
|
||||
in the [CI documentation](../../ci/yaml/README.md#image).
|
||||
|
||||
### Name
|
||||
|
||||
For consistency, scanning jobs should be named after the scanner, in lower case.
|
||||
|
|
@ -26,8 +35,8 @@ containing the security scanner.
|
|||
### Script
|
||||
|
||||
The [`script`](../../ci/yaml/README.md#script) keyword
|
||||
is used to specify the command that the job runs.
|
||||
Because the `script` cannot be left empty, it must be set to the command that performs the scan.
|
||||
is used to specify the commands to run the scanner.
|
||||
Because the `script` entry can't be left empty, it must be set to the command that performs the scan.
|
||||
It is not possible to rely on the predefined `ENTRYPOINT` and `CMD` of the Docker image
|
||||
to perform the scan automatically, without passing any command.
|
||||
|
||||
|
|
@ -60,37 +69,34 @@ For example, here is the definition of a SAST job that generates a file named `g
|
|||
and uploads it as a SAST report:
|
||||
|
||||
```yaml
|
||||
mysec_dependency_scanning:
|
||||
mysec_sast_scanning:
|
||||
image: registry.gitlab.com/secure/mysec
|
||||
artifacts:
|
||||
reports:
|
||||
sast: gl-sast-report.json
|
||||
```
|
||||
|
||||
`gl-sast-report.json` is an example file path. See [the Output file section](#output-file) for more details.
|
||||
It is processed as a SAST report because it is declared as such in the job definition.
|
||||
Note that `gl-sast-report.json` is an example file path but any other file name can be used. See
|
||||
[the Output file section](#output-file) for more details. It's processed as a SAST report because
|
||||
it's declared under the `reports:sast` key in the job definition, not because of the file name.
|
||||
|
||||
### Policies
|
||||
|
||||
Scanning jobs should be skipped unless the corresponding feature is listed
|
||||
in the `GITLAB_FEATURES` variable (comma-separated list of values).
|
||||
So Dependency Scanning, Container Scanning, SAST, and DAST should be skipped
|
||||
unless `GITLAB_FEATURES` contains `dependency_scanning`, `container_scanning`, `sast`, and `dast`, respectively.
|
||||
See [GitLab CI/CD predefined variables](../../ci/variables/predefined_variables.md).
|
||||
Certain GitLab workflows, such as [AutoDevOps](../../topics/autodevops/customize.md#disable-jobs),
|
||||
define variables to indicate that given scans should be disabled. You can check for this by looking
|
||||
for variables such as `DEPENDENCY_SCANNING_DISABLED`, `CONTAINER_SCANNING_DISABLED`,
|
||||
`SAST_DISABLED`, and `DAST_DISABLED`. If appropriate based on the scanner type, you should then
|
||||
disable running the custom scanner.
|
||||
|
||||
Also, scanning jobs should be skipped when the corresponding variable prefixed with `_DISABLED` is present.
|
||||
See `DEPENDENCY_SCANNING_DISABLED`, `CONTAINER_SCANNING_DISABLED`, `SAST_DISABLED`, and `DAST_DISABLED`
|
||||
in [Auto DevOps documentation](../../topics/autodevops/customize.md#disable-jobs).
|
||||
|
||||
Finally, SAST and Dependency Scanning job definitions should use
|
||||
`CI_PROJECT_REPOSITORY_LANGUAGES` (comma-separated list of values)
|
||||
in order to skip the job when the language or technology is not supported.
|
||||
GitLab also defines a `CI_PROJECT_REPOSITORY_LANGUAGES` variable, which provides the list of
|
||||
languages in the repo. Depending on this value, your scanner may or may not do something different.
|
||||
Language detection currently relies on the [`linguist`](https://github.com/github/linguist) Ruby gem.
|
||||
See [GitLab CI/CD prefined variables](../../ci/variables/predefined_variables.md#variables-reference).
|
||||
|
||||
For instance, here is how to skip the Dependency Scanning job `mysec_dependency_scanning`
|
||||
unless the project repository contains Java source code,
|
||||
and the `dependency_scanning` feature is enabled:
|
||||
#### Policy checking example
|
||||
|
||||
This example shows how to skip a custom Dependency Scanning job, `mysec_dependency_scanning`, unless
|
||||
the project repository contains Java source code and the `dependency_scanning` feature is enabled:
|
||||
|
||||
```yaml
|
||||
mysec_dependency_scanning:
|
||||
|
|
@ -111,6 +117,8 @@ for a particular branch or when a particular set of files changes.
|
|||
|
||||
The Docker image is a self-contained environment that combines
|
||||
the scanner with all the libraries and tools it depends on.
|
||||
Packaging your scanner into a Docker image makes its dependencies and configuration always present,
|
||||
regardless of the individual machine the scanner runs on.
|
||||
|
||||
### Image size
|
||||
|
||||
|
|
@ -144,7 +152,7 @@ It also generates text output on the standard output and standard error streams,
|
|||
All CI variables are passed to the scanner as environment variables.
|
||||
The scanned project is described by the [predefined CI variables](../../ci/variables/README.md).
|
||||
|
||||
#### SAST, Dependency Scanning
|
||||
#### SAST and Dependency Scanning
|
||||
|
||||
SAST and Dependency Scanning scanners must scan the files in the project directory, given by the `CI_PROJECT_DIR` variable.
|
||||
|
||||
|
|
@ -223,11 +231,8 @@ The DAST variant of the report JSON format is not documented at the moment.
|
|||
|
||||
### Version
|
||||
|
||||
The documentation of
|
||||
[SAST](../../user/application_security/sast/index.md#reports-json-format),
|
||||
[Dependency Scanning](../../user/application_security/dependency_scanning/index.md#reports-json-format),
|
||||
and [Container Scanning](../../user/application_security/container_scanning/index.md#reports-json-format)
|
||||
describes the Secure report format version.
|
||||
This field specifies the version of the report schema you are using. Please reference individual scanner
|
||||
pages for the specific versions to use.
|
||||
|
||||
### Vulnerabilities
|
||||
|
||||
|
|
@ -251,12 +256,17 @@ The `id` should not collide with any other scanner another integrator would prov
|
|||
|
||||
#### Name, message, and description
|
||||
|
||||
The `name` and `message` fields contain a short description of the vulnerability,
|
||||
whereas the `description` field provides more details.
|
||||
The `name` and `message` fields contain a short description of the vulnerability.
|
||||
The `description` field provides more details.
|
||||
|
||||
The `name` is context-free and contains no information on where the vulnerability has been found,
|
||||
The `name` field is context-free and contains no information on where the vulnerability has been found,
|
||||
whereas the `message` may repeat the location.
|
||||
|
||||
As a visual example, this screenshot highlights where these fields are used when viewing a
|
||||
vulnerability as part of a pipeline view.
|
||||
|
||||

|
||||
|
||||
For instance, a `message` for a vulnerability
|
||||
reported by Dependency Scanning gives information on the vulnerable dependency,
|
||||
which is redundant with the `location` field of the vulnerability.
|
||||
|
|
@ -288,21 +298,17 @@ It should not repeat the other fields of the vulnerability object.
|
|||
In particular, the `description` should not repeat the `location` (what is affected)
|
||||
or the `solution` (how to mitigate the risk).
|
||||
|
||||
There is a proposal to remove either the `name` or the `message`, to remove ambiguities.
|
||||
See [issue #36779](https://gitlab.com/gitlab-org/gitlab/issues/36779).
|
||||
|
||||
#### Solution
|
||||
|
||||
The `solution` field may contain instructions users should follow to fix the vulnerability or to mitigate the risk.
|
||||
It is intended for users whereas the `remediations` objects are processed automatically by GitLab.
|
||||
You can use the `solution` field to instruct users how to fix the identified vulnerability or to mitigate
|
||||
the risk. End-users interact with this field, whereas GitLab automatically processes the
|
||||
`remediations` objects.
|
||||
|
||||
#### Identifiers
|
||||
|
||||
The `identifiers` array describes the vulnerability flaw that has been detected.
|
||||
An identifier object has a `type` and a `value`;
|
||||
these technical fields are used to tell if two identifiers are the same.
|
||||
It also has a `name` and a `url`;
|
||||
these fields are used to display the identifier in the user interface.
|
||||
The `identifiers` array describes the detected vulnerability. An identifier object's `type` and
|
||||
`value` fields are used to tell if two identifiers are the same. The user interface uses the
|
||||
object's `name` and `url` fields to display the identifier.
|
||||
|
||||
It is recommended to reuse the identifiers the GitLab scanners already define:
|
||||
|
||||
|
|
@ -316,18 +322,15 @@ It is recommended to reuse the identifiers the GitLab scanners already define:
|
|||
| [RHSA](https://access.redhat.com/errata/#/) | `rhsa` | RHSA-2020:0111 |
|
||||
| [ELSA](https://linux.oracle.com/security/) | `elsa` | ELSA-2020-0085 |
|
||||
|
||||
The generic identifiers listed above are defined in the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common);
|
||||
this library is shared by the analyzers maintained by GitLab,
|
||||
and this is where you can [contribute](https://gitlab.com/gitlab-org/security-products/analyzers/common/blob/master/issue/identifier.go) new generic identifiers.
|
||||
Analyzers may also produce vendor-specific or product-specific identifiers;
|
||||
these do not belong to the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common).
|
||||
The generic identifiers listed above are defined in the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common),
|
||||
which is shared by the analyzers that GitLab maintains. You can [contribute](https://gitlab.com/gitlab-org/security-products/analyzers/common/blob/master/issue/identifier.go)
|
||||
new generic identifiers to if needed. Analyzers may also produce vendor-specific or product-specific
|
||||
identifiers, which don't belong in the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common).
|
||||
|
||||
The first item of the `identifiers` array is called the primary identifier.
|
||||
The primary identifier is particularly important, because it is used to
|
||||
[track vulnerabilities](#tracking-merging-vulnerabilities)
|
||||
as new commits are pushed to the repository.
|
||||
|
||||
Identifiers are used to [merge duplicate vulnerabilities](#tracking-merging-vulnerabilities)
|
||||
[track vulnerabilities](#tracking-and-merging-vulnerabilities) as new commits are pushed to the repository.
|
||||
Identifiers are also used to [merge duplicate vulnerabilities](#tracking-and-merging-vulnerabilities)
|
||||
reported for the same commit, except for `CWE` and `WASC`.
|
||||
|
||||
### Location
|
||||
|
|
@ -336,7 +339,7 @@ The `location` indicates where the vulnerability has been detected.
|
|||
The format of the location depends on the type of scanning.
|
||||
|
||||
Internally GitLab extracts some attributes of the `location` to generate the **location fingerprint**,
|
||||
which is used to [track vulnerabilities](#tracking-merging-vulnerabilities)
|
||||
which is used to track vulnerabilities
|
||||
as new commits are pushed to the repository.
|
||||
The attributes used to generate the location fingerprint also depend on the type of scanning.
|
||||
|
||||
|
|
@ -426,12 +429,12 @@ combines `file`, `start_line`, and `end_line`,
|
|||
so these attributes are mandatory.
|
||||
All other attributes are optional.
|
||||
|
||||
### Tracking, merging vulnerabilities
|
||||
### Tracking and merging vulnerabilities
|
||||
|
||||
Users may give feedback on a vulnerability:
|
||||
|
||||
- they may dismiss a vulnerability if it does not apply to their projects
|
||||
- or they may create an issue for a vulnerability, if there is a possible threat
|
||||
- They may dismiss a vulnerability if it doesn't apply to their projects
|
||||
- They may create an issue for a vulnerability if there's a possible threat
|
||||
|
||||
GitLab tracks vulnerabilities so that user feedback is not lost
|
||||
when new Git commits are pushed to the repository.
|
||||
|
|
|
|||
|
|
@ -44,16 +44,19 @@ module Gitlab
|
|||
create_commit(snippet)
|
||||
end
|
||||
|
||||
# Removing the db record
|
||||
def destroy_snippet_repository(snippet)
|
||||
# Removing the db record
|
||||
snippet.snippet_repository&.destroy
|
||||
snippet.snippet_repository&.delete
|
||||
rescue => e
|
||||
logger.error(message: "Snippet Migration: error destroying snippet repository. Reason: #{e.message}", snippet: snippet.id)
|
||||
end
|
||||
|
||||
# Removing the repository in disk
|
||||
def delete_repository(snippet)
|
||||
# Removing the repository in disk
|
||||
snippet.repository.remove if snippet.repository_exists?
|
||||
return unless snippet.repository_exists?
|
||||
|
||||
snippet.repository.remove
|
||||
snippet.repository.expire_exists_cache
|
||||
rescue => e
|
||||
logger.error(message: "Snippet Migration: error deleting repository. Reason: #{e.message}", snippet: snippet.id)
|
||||
end
|
||||
|
|
@ -82,7 +85,24 @@ module Gitlab
|
|||
end
|
||||
|
||||
def create_commit(snippet)
|
||||
snippet.snippet_repository.multi_files_action(snippet.author, snippet_action(snippet), commit_attrs)
|
||||
snippet.snippet_repository.multi_files_action(commit_author(snippet), snippet_action(snippet), commit_attrs)
|
||||
end
|
||||
|
||||
# If the user is not allowed to access git or update the snippet
|
||||
# because it is blocked, internal, ghost, ... we cannot commit
|
||||
# files because these users are not allowed to, but we need to
|
||||
# migrate their snippets as well.
|
||||
# In this scenario an admin user will be the one that will commit the files.
|
||||
def commit_author(snippet)
|
||||
if Gitlab::UserAccessSnippet.new(snippet.author, snippet: snippet).can_do_action?(:update_snippet)
|
||||
snippet.author
|
||||
else
|
||||
admin_user
|
||||
end
|
||||
end
|
||||
|
||||
def admin_user
|
||||
@admin_user ||= User.admins.active.first
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21041,6 +21041,9 @@ msgstr ""
|
|||
msgid "They can be managed using the %{link}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Third Party Advisory Link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Third party offers"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -4,5 +4,10 @@ FactoryBot.define do
|
|||
factory :remote_mirror, class: 'RemoteMirror' do
|
||||
association :project, :repository
|
||||
url { "http://foo:bar@test.com" }
|
||||
|
||||
trait :ssh do
|
||||
url { 'ssh://git@test.com:foo/bar.git' }
|
||||
auth_method { 'ssh_public_key' }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
/* eslint-disable class-methods-use-this */
|
||||
export default class WebWorkerMock {
|
||||
addEventListener() {}
|
||||
|
||||
removeEventListener() {}
|
||||
|
||||
terminate() {}
|
||||
|
||||
postMessage() {}
|
||||
}
|
||||
|
|
@ -26,7 +26,7 @@ describe('IDE activity bar', () => {
|
|||
|
||||
describe('updateActivityBarView', () => {
|
||||
beforeEach(() => {
|
||||
spyOn(vm, 'updateActivityBarView');
|
||||
jest.spyOn(vm, 'updateActivityBarView').mockImplementation(() => {});
|
||||
|
||||
vm.$mount();
|
||||
});
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import { createStore } from '~/ide/stores';
|
||||
import Bar from '~/ide/components/file_templates/bar.vue';
|
||||
import { resetStore, file } from '../../helpers';
|
||||
|
|
@ -35,7 +35,7 @@ describe('IDE file templates bar component', () => {
|
|||
});
|
||||
|
||||
it('calls setSelectedTemplateType when clicking item', () => {
|
||||
spyOn(vm, 'setSelectedTemplateType').and.stub();
|
||||
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation();
|
||||
|
||||
vm.$el.querySelector('.dropdown-content button').click();
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ describe('IDE file templates bar component', () => {
|
|||
});
|
||||
|
||||
it('calls fetchTemplate on click', () => {
|
||||
spyOn(vm, 'fetchTemplate').and.stub();
|
||||
jest.spyOn(vm, 'fetchTemplate').mockImplementation();
|
||||
|
||||
vm.$el
|
||||
.querySelectorAll('.dropdown-content')[1]
|
||||
|
|
@ -90,7 +90,7 @@ describe('IDE file templates bar component', () => {
|
|||
});
|
||||
|
||||
it('calls undoFileTemplate when clicking undo button', () => {
|
||||
spyOn(vm, 'undoFileTemplate').and.stub();
|
||||
jest.spyOn(vm, 'undoFileTemplate').mockImplementation();
|
||||
|
||||
vm.$el.querySelector('.btn-default').click();
|
||||
|
||||
|
|
@ -100,7 +100,7 @@ describe('IDE file templates bar component', () => {
|
|||
it('calls setSelectedTemplateType if activeFile name matches a template', done => {
|
||||
const fileName = '.gitlab-ci.yml';
|
||||
|
||||
spyOn(vm, 'setSelectedTemplateType');
|
||||
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation(() => {});
|
||||
vm.$store.state.openFiles[0].name = fileName;
|
||||
|
||||
vm.setInitialType();
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import store from '~/ide/stores';
|
||||
import ideSidebar from '~/ide/components/ide_side_bar.vue';
|
||||
import { leftSidebarViews } from '~/ide/constants';
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import store from '~/ide/stores';
|
||||
import ide from '~/ide/components/ide.vue';
|
||||
import { file, resetStore } from '../helpers';
|
||||
|
|
@ -35,7 +35,7 @@ describe('IDE tree list', () => {
|
|||
beforeEach(() => {
|
||||
bootstrapWithTree();
|
||||
|
||||
spyOn(vm, 'updateViewer').and.callThrough();
|
||||
jest.spyOn(vm, 'updateViewer');
|
||||
|
||||
vm.$mount();
|
||||
});
|
||||
|
|
@ -64,7 +64,7 @@ describe('IDE tree list', () => {
|
|||
beforeEach(() => {
|
||||
bootstrapWithTree(emptyBranchTree);
|
||||
|
||||
spyOn(vm, 'updateViewer').and.callThrough();
|
||||
jest.spyOn(vm, 'updateViewer');
|
||||
|
||||
vm.$mount();
|
||||
});
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import Vue from 'vue';
|
||||
import { trimText } from 'spec/helpers/text_helper';
|
||||
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { trimText } from 'helpers/text_helper';
|
||||
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import NavDropdownButton from '~/ide/components/nav_dropdown_button.vue';
|
||||
import { createStore } from '~/ide/stores';
|
||||
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
import $ from 'jquery';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { createStore } from '~/ide/stores';
|
||||
import NavDropdown from '~/ide/components/nav_dropdown.vue';
|
||||
import { PERMISSION_READ_MR } from '~/ide/constants';
|
||||
|
||||
const TEST_PROJECT_ID = 'lorem-ipsum';
|
||||
|
||||
describe('IDE NavDropdown', () => {
|
||||
let store;
|
||||
let wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore();
|
||||
Object.assign(store.state, {
|
||||
currentProjectId: TEST_PROJECT_ID,
|
||||
currentBranchId: 'master',
|
||||
projects: {
|
||||
[TEST_PROJECT_ID]: {
|
||||
userPermissions: {
|
||||
[PERMISSION_READ_MR]: true,
|
||||
},
|
||||
branches: {
|
||||
master: { id: 'master' },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.spyOn(store, 'dispatch').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
const createComponent = () => {
|
||||
wrapper = mount(NavDropdown, {
|
||||
store,
|
||||
});
|
||||
};
|
||||
|
||||
const findIcon = name => wrapper.find(`.ic-${name}`);
|
||||
const findMRIcon = () => findIcon('merge-request');
|
||||
const findNavForm = () => wrapper.find('.ide-nav-form');
|
||||
const showDropdown = () => {
|
||||
$(wrapper.vm.$el).trigger('show.bs.dropdown');
|
||||
};
|
||||
const hideDropdown = () => {
|
||||
$(wrapper.vm.$el).trigger('hide.bs.dropdown');
|
||||
};
|
||||
|
||||
describe('default', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it('renders nothing initially', () => {
|
||||
expect(findNavForm().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders nav form when show.bs.dropdown', done => {
|
||||
showDropdown();
|
||||
|
||||
wrapper.vm
|
||||
.$nextTick()
|
||||
.then(() => {
|
||||
expect(findNavForm().exists()).toBe(true);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
});
|
||||
|
||||
it('destroys nav form when closed', done => {
|
||||
showDropdown();
|
||||
hideDropdown();
|
||||
|
||||
wrapper.vm
|
||||
.$nextTick()
|
||||
.then(() => {
|
||||
expect(findNavForm().exists()).toBe(false);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
});
|
||||
|
||||
it('renders merge request icon', () => {
|
||||
expect(findMRIcon().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when user cannot read merge requests', () => {
|
||||
beforeEach(() => {
|
||||
store.state.projects[TEST_PROJECT_ID].userPermissions = {};
|
||||
|
||||
createComponent();
|
||||
});
|
||||
|
||||
it('does not render merge requests', () => {
|
||||
expect(findMRIcon().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import mountComponent from 'spec/helpers/vue_mount_component_helper';
|
||||
import mountComponent from 'helpers/vue_mount_component_helper';
|
||||
import Button from '~/ide/components/new_dropdown/button.vue';
|
||||
|
||||
describe('IDE new entry dropdown button component', () => {
|
||||
|
|
@ -16,7 +16,7 @@ describe('IDE new entry dropdown button component', () => {
|
|||
icon: 'doc-new',
|
||||
});
|
||||
|
||||
spyOn(vm, '$emit');
|
||||
jest.spyOn(vm, '$emit').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import store from '~/ide/stores';
|
||||
import newDropdown from '~/ide/components/new_dropdown/index.vue';
|
||||
import { resetStore } from '../../helpers';
|
||||
|
|
@ -23,7 +23,7 @@ describe('new dropdown component', () => {
|
|||
tree: [],
|
||||
};
|
||||
|
||||
spyOn(vm, 'openNewEntryModal');
|
||||
jest.spyOn(vm, 'openNewEntryModal').mockImplementation(() => {});
|
||||
|
||||
vm.$mount();
|
||||
});
|
||||
|
|
@ -58,11 +58,11 @@ describe('new dropdown component', () => {
|
|||
|
||||
describe('isOpen', () => {
|
||||
it('scrolls dropdown into view', done => {
|
||||
spyOn(vm.$refs.dropdownMenu, 'scrollIntoView');
|
||||
jest.spyOn(vm.$refs.dropdownMenu, 'scrollIntoView').mockImplementation(() => {});
|
||||
|
||||
vm.isOpen = true;
|
||||
|
||||
setTimeout(() => {
|
||||
setImmediate(() => {
|
||||
expect(vm.$refs.dropdownMenu.scrollIntoView).toHaveBeenCalledWith({
|
||||
block: 'nearest',
|
||||
});
|
||||
|
|
@ -74,7 +74,7 @@ describe('new dropdown component', () => {
|
|||
|
||||
describe('delete entry', () => {
|
||||
it('calls delete action', () => {
|
||||
spyOn(vm, 'deleteEntry');
|
||||
jest.spyOn(vm, 'deleteEntry').mockImplementation(() => {});
|
||||
|
||||
vm.$el.querySelectorAll('.dropdown-menu button')[4].click();
|
||||
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
import Vue from 'vue';
|
||||
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import { createStore } from '~/ide/stores';
|
||||
import modal from '~/ide/components/new_dropdown/modal.vue';
|
||||
import createFlash from '~/flash';
|
||||
|
||||
jest.mock('~/flash');
|
||||
|
||||
describe('new file modal component', () => {
|
||||
const Component = Vue.extend(modal);
|
||||
|
|
@ -11,47 +14,45 @@ describe('new file modal component', () => {
|
|||
vm.$destroy();
|
||||
});
|
||||
|
||||
['tree', 'blob'].forEach(type => {
|
||||
describe(type, () => {
|
||||
beforeEach(() => {
|
||||
const store = createStore();
|
||||
store.state.entryModal = {
|
||||
type,
|
||||
describe.each(['tree', 'blob'])('%s', type => {
|
||||
beforeEach(() => {
|
||||
const store = createStore();
|
||||
store.state.entryModal = {
|
||||
type,
|
||||
path: '',
|
||||
entry: {
|
||||
path: '',
|
||||
entry: {
|
||||
path: '',
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
vm = createComponentWithStore(Component, store).$mount();
|
||||
vm = createComponentWithStore(Component, store).$mount();
|
||||
|
||||
vm.name = 'testing';
|
||||
});
|
||||
vm.name = 'testing';
|
||||
});
|
||||
|
||||
it(`sets modal title as ${type}`, () => {
|
||||
const title = type === 'tree' ? 'directory' : 'file';
|
||||
it(`sets modal title as ${type}`, () => {
|
||||
const title = type === 'tree' ? 'directory' : 'file';
|
||||
|
||||
expect(vm.$el.querySelector('.modal-title').textContent.trim()).toBe(`Create new ${title}`);
|
||||
});
|
||||
expect(vm.$el.querySelector('.modal-title').textContent.trim()).toBe(`Create new ${title}`);
|
||||
});
|
||||
|
||||
it(`sets button label as ${type}`, () => {
|
||||
const title = type === 'tree' ? 'directory' : 'file';
|
||||
it(`sets button label as ${type}`, () => {
|
||||
const title = type === 'tree' ? 'directory' : 'file';
|
||||
|
||||
expect(vm.$el.querySelector('.btn-success').textContent.trim()).toBe(`Create ${title}`);
|
||||
});
|
||||
expect(vm.$el.querySelector('.btn-success').textContent.trim()).toBe(`Create ${title}`);
|
||||
});
|
||||
|
||||
it(`sets form label as ${type}`, () => {
|
||||
expect(vm.$el.querySelector('.label-bold').textContent.trim()).toBe('Name');
|
||||
});
|
||||
it(`sets form label as ${type}`, () => {
|
||||
expect(vm.$el.querySelector('.label-bold').textContent.trim()).toBe('Name');
|
||||
});
|
||||
|
||||
it(`${type === 'tree' ? 'does not show' : 'shows'} file templates`, () => {
|
||||
const templateFilesEl = vm.$el.querySelector('.file-templates');
|
||||
if (type === 'tree') {
|
||||
expect(templateFilesEl).toBeNull();
|
||||
} else {
|
||||
expect(templateFilesEl instanceof Element).toBeTruthy();
|
||||
}
|
||||
});
|
||||
it(`${type === 'tree' ? 'does not show' : 'shows'} file templates`, () => {
|
||||
const templateFilesEl = vm.$el.querySelector('.file-templates');
|
||||
if (type === 'tree') {
|
||||
expect(templateFilesEl).toBeNull();
|
||||
} else {
|
||||
expect(templateFilesEl instanceof Element).toBeTruthy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -131,16 +132,15 @@ describe('new file modal component', () => {
|
|||
};
|
||||
|
||||
vm = createComponentWithStore(Component, store).$mount();
|
||||
const flashSpy = spyOnDependency(modal, 'flash');
|
||||
|
||||
expect(flashSpy).not.toHaveBeenCalled();
|
||||
expect(createFlash).not.toHaveBeenCalled();
|
||||
|
||||
vm.submitForm();
|
||||
|
||||
expect(flashSpy).toHaveBeenCalledWith(
|
||||
expect(createFlash).toHaveBeenCalledWith(
|
||||
'The name "test-path/test" is already taken in this directory.',
|
||||
'alert',
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
null,
|
||||
false,
|
||||
true,
|
||||
|
|
@ -17,7 +17,7 @@ describe('RepoTab', () => {
|
|||
}
|
||||
|
||||
beforeEach(() => {
|
||||
spyOn(router, 'push');
|
||||
jest.spyOn(router, 'push').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -47,7 +47,7 @@ describe('RepoTab', () => {
|
|||
},
|
||||
});
|
||||
|
||||
spyOn(vm, 'openPendingTab');
|
||||
jest.spyOn(vm, 'openPendingTab').mockImplementation(() => {});
|
||||
|
||||
vm.$el.click();
|
||||
|
||||
|
|
@ -63,7 +63,7 @@ describe('RepoTab', () => {
|
|||
tab: file(),
|
||||
});
|
||||
|
||||
spyOn(vm, 'clickFile');
|
||||
jest.spyOn(vm, 'clickFile').mockImplementation(() => {});
|
||||
|
||||
vm.$el.click();
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ describe('RepoTab', () => {
|
|||
tab: file(),
|
||||
});
|
||||
|
||||
spyOn(vm, 'closeFile');
|
||||
jest.spyOn(vm, 'closeFile').mockImplementation(() => {});
|
||||
|
||||
vm.$el.querySelector('.multi-file-tab-close').click();
|
||||
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import mountComponent from 'spec/helpers/vue_mount_component_helper';
|
||||
import mountComponent from 'helpers/vue_mount_component_helper';
|
||||
import TokenedInput from '~/ide/components/shared/tokened_input.vue';
|
||||
|
||||
const TEST_PLACEHOLDER = 'Searching in test';
|
||||
|
|
@ -36,7 +36,7 @@ describe('IDE shared/TokenedInput', () => {
|
|||
value: TEST_VALUE,
|
||||
});
|
||||
|
||||
spyOn(vm, '$emit');
|
||||
jest.spyOn(vm, '$emit').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -72,7 +72,7 @@ describe('IDE shared/TokenedInput', () => {
|
|||
});
|
||||
|
||||
it('when input triggers backspace event, it calls "onBackspace"', () => {
|
||||
spyOn(vm, 'onBackspace');
|
||||
jest.spyOn(vm, 'onBackspace').mockImplementation(() => {});
|
||||
|
||||
vm.$refs.input.dispatchEvent(createBackspaceEvent());
|
||||
vm.$refs.input.dispatchEvent(createBackspaceEvent());
|
||||
|
|
@ -28,7 +28,7 @@ describe('Multi-file editor library model manager', () => {
|
|||
});
|
||||
|
||||
it('adds model into disposable', () => {
|
||||
spyOn(instance.disposable, 'add').and.callThrough();
|
||||
jest.spyOn(instance.disposable, 'add');
|
||||
|
||||
instance.addModel(file());
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ describe('Multi-file editor library model manager', () => {
|
|||
});
|
||||
|
||||
it('returns cached model', () => {
|
||||
spyOn(instance.models, 'get').and.callThrough();
|
||||
jest.spyOn(instance.models, 'get');
|
||||
|
||||
instance.addModel(file());
|
||||
instance.addModel(file());
|
||||
|
|
@ -46,13 +46,13 @@ describe('Multi-file editor library model manager', () => {
|
|||
|
||||
it('adds eventHub listener', () => {
|
||||
const f = file();
|
||||
spyOn(eventHub, '$on').and.callThrough();
|
||||
jest.spyOn(eventHub, '$on');
|
||||
|
||||
instance.addModel(f);
|
||||
|
||||
expect(eventHub.$on).toHaveBeenCalledWith(
|
||||
`editor.update.model.dispose.${f.key}`,
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
@ -95,13 +95,13 @@ describe('Multi-file editor library model manager', () => {
|
|||
});
|
||||
|
||||
it('removes eventHub listener', () => {
|
||||
spyOn(eventHub, '$off').and.callThrough();
|
||||
jest.spyOn(eventHub, '$off');
|
||||
|
||||
instance.removeCachedModel(f);
|
||||
|
||||
expect(eventHub.$off).toHaveBeenCalledWith(
|
||||
`editor.update.model.dispose.${f.key}`,
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
@ -116,7 +116,7 @@ describe('Multi-file editor library model manager', () => {
|
|||
});
|
||||
|
||||
it('calls disposable dispose', () => {
|
||||
spyOn(instance.disposable, 'dispose').and.callThrough();
|
||||
jest.spyOn(instance.disposable, 'dispose');
|
||||
|
||||
instance.dispose();
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ describe('Multi-file editor library model', () => {
|
|||
let model;
|
||||
|
||||
beforeEach(() => {
|
||||
spyOn(eventHub, '$on').and.callThrough();
|
||||
jest.spyOn(eventHub, '$on');
|
||||
|
||||
const f = file('path');
|
||||
f.mrChange = { diff: 'ABC' };
|
||||
|
|
@ -44,7 +44,7 @@ describe('Multi-file editor library model', () => {
|
|||
it('adds eventHub listener', () => {
|
||||
expect(eventHub.$on).toHaveBeenCalledWith(
|
||||
`editor.update.model.dispose.${model.file.key}`,
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -82,13 +82,13 @@ describe('Multi-file editor library model', () => {
|
|||
|
||||
describe('onChange', () => {
|
||||
it('calls callback on change', done => {
|
||||
const spy = jasmine.createSpy();
|
||||
const spy = jest.fn();
|
||||
model.onChange(spy);
|
||||
|
||||
model.getModel().setValue('123');
|
||||
|
||||
setTimeout(() => {
|
||||
expect(spy).toHaveBeenCalledWith(model, jasmine.anything());
|
||||
setImmediate(() => {
|
||||
expect(spy).toHaveBeenCalledWith(model, expect.anything());
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
@ -96,7 +96,7 @@ describe('Multi-file editor library model', () => {
|
|||
|
||||
describe('dispose', () => {
|
||||
it('calls disposable dispose', () => {
|
||||
spyOn(model.disposable, 'dispose').and.callThrough();
|
||||
jest.spyOn(model.disposable, 'dispose');
|
||||
|
||||
model.dispose();
|
||||
|
||||
|
|
@ -114,18 +114,18 @@ describe('Multi-file editor library model', () => {
|
|||
});
|
||||
|
||||
it('removes eventHub listener', () => {
|
||||
spyOn(eventHub, '$off').and.callThrough();
|
||||
jest.spyOn(eventHub, '$off');
|
||||
|
||||
model.dispose();
|
||||
|
||||
expect(eventHub.$off).toHaveBeenCalledWith(
|
||||
`editor.update.model.dispose.${model.file.key}`,
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('calls onDispose callback', () => {
|
||||
const disposeSpy = jasmine.createSpy();
|
||||
const disposeSpy = jest.fn();
|
||||
|
||||
model.onDispose(disposeSpy);
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ describe('Multi-file editor library decorations controller', () => {
|
|||
});
|
||||
|
||||
it('calls decorate method', () => {
|
||||
spyOn(controller, 'decorate');
|
||||
jest.spyOn(controller, 'decorate').mockImplementation(() => {});
|
||||
|
||||
controller.addDecorations(model, 'key', [{ decoration: 'decorationValue' }]);
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ describe('Multi-file editor library decorations controller', () => {
|
|||
|
||||
describe('decorate', () => {
|
||||
it('sets decorations on editor instance', () => {
|
||||
spyOn(controller.editor.instance, 'deltaDecorations');
|
||||
jest.spyOn(controller.editor.instance, 'deltaDecorations').mockImplementation(() => {});
|
||||
|
||||
controller.decorate(model);
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ describe('Multi-file editor library decorations controller', () => {
|
|||
});
|
||||
|
||||
it('caches decorations', () => {
|
||||
spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]);
|
||||
jest.spyOn(controller.editor.instance, 'deltaDecorations').mockReturnValue([]);
|
||||
|
||||
controller.decorate(model);
|
||||
|
||||
|
|
@ -86,7 +86,7 @@ describe('Multi-file editor library decorations controller', () => {
|
|||
});
|
||||
|
||||
it('caches decorations by model URL', () => {
|
||||
spyOn(controller.editor.instance, 'deltaDecorations').and.returnValue([]);
|
||||
jest.spyOn(controller.editor.instance, 'deltaDecorations').mockReturnValue([]);
|
||||
|
||||
controller.decorate(model);
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
|
||||
describe('attachModel', () => {
|
||||
it('adds change event callback', () => {
|
||||
spyOn(model, 'onChange');
|
||||
jest.spyOn(model, 'onChange').mockImplementation(() => {});
|
||||
|
||||
controller.attachModel(model);
|
||||
|
||||
|
|
@ -83,7 +83,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
});
|
||||
|
||||
it('adds dispose event callback', () => {
|
||||
spyOn(model, 'onDispose');
|
||||
jest.spyOn(model, 'onDispose').mockImplementation(() => {});
|
||||
|
||||
controller.attachModel(model);
|
||||
|
||||
|
|
@ -91,7 +91,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
});
|
||||
|
||||
it('calls throttledComputeDiff on change', () => {
|
||||
spyOn(controller, 'throttledComputeDiff');
|
||||
jest.spyOn(controller, 'throttledComputeDiff').mockImplementation(() => {});
|
||||
|
||||
controller.attachModel(model);
|
||||
|
||||
|
|
@ -109,7 +109,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
|
||||
describe('computeDiff', () => {
|
||||
it('posts to worker', () => {
|
||||
spyOn(controller.dirtyDiffWorker, 'postMessage');
|
||||
jest.spyOn(controller.dirtyDiffWorker, 'postMessage').mockImplementation(() => {});
|
||||
|
||||
controller.computeDiff(model);
|
||||
|
||||
|
|
@ -123,7 +123,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
|
||||
describe('reDecorate', () => {
|
||||
it('calls computeDiff when no decorations are cached', () => {
|
||||
spyOn(controller, 'computeDiff');
|
||||
jest.spyOn(controller, 'computeDiff').mockImplementation(() => {});
|
||||
|
||||
controller.reDecorate(model);
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
});
|
||||
|
||||
it('calls decorate when decorations are cached', () => {
|
||||
spyOn(controller.decorationsController, 'decorate');
|
||||
jest.spyOn(controller.decorationsController, 'decorate').mockImplementation(() => {});
|
||||
|
||||
controller.decorationsController.decorations.set(model.url, 'test');
|
||||
|
||||
|
|
@ -143,19 +143,19 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
|
||||
describe('decorate', () => {
|
||||
it('adds decorations into decorations controller', () => {
|
||||
spyOn(controller.decorationsController, 'addDecorations');
|
||||
jest.spyOn(controller.decorationsController, 'addDecorations').mockImplementation(() => {});
|
||||
|
||||
controller.decorate({ data: { changes: [], path: model.path } });
|
||||
|
||||
expect(controller.decorationsController.addDecorations).toHaveBeenCalledWith(
|
||||
model,
|
||||
'dirtyDiff',
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('adds decorations into editor', () => {
|
||||
const spy = spyOn(controller.decorationsController.editor.instance, 'deltaDecorations');
|
||||
const spy = jest.spyOn(controller.decorationsController.editor.instance, 'deltaDecorations');
|
||||
|
||||
controller.decorate({
|
||||
data: { changes: computeDiff('123', '1234'), path: model.path },
|
||||
|
|
@ -178,7 +178,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
|
||||
describe('dispose', () => {
|
||||
it('calls disposable dispose', () => {
|
||||
spyOn(controller.disposable, 'dispose').and.callThrough();
|
||||
jest.spyOn(controller.disposable, 'dispose');
|
||||
|
||||
controller.dispose();
|
||||
|
||||
|
|
@ -186,7 +186,7 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
});
|
||||
|
||||
it('terminates worker', () => {
|
||||
spyOn(controller.dirtyDiffWorker, 'terminate').and.callThrough();
|
||||
jest.spyOn(controller.dirtyDiffWorker, 'terminate');
|
||||
|
||||
controller.dispose();
|
||||
|
||||
|
|
@ -194,13 +194,13 @@ describe('Multi-file editor library dirty diff controller', () => {
|
|||
});
|
||||
|
||||
it('removes worker event listener', () => {
|
||||
spyOn(controller.dirtyDiffWorker, 'removeEventListener').and.callThrough();
|
||||
jest.spyOn(controller.dirtyDiffWorker, 'removeEventListener');
|
||||
|
||||
controller.dispose();
|
||||
|
||||
expect(controller.dirtyDiffWorker.removeEventListener).toHaveBeenCalledWith(
|
||||
'message',
|
||||
jasmine.anything(),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import { editor as monacoEditor } from 'monaco-editor';
|
||||
import Editor from '~/ide/lib/editor';
|
||||
import { defaultEditorOptions } from '~/ide/lib/editor_options';
|
||||
import { file } from '../helpers';
|
||||
|
||||
describe('Multi-file editor library', () => {
|
||||
|
|
@ -7,6 +8,14 @@ describe('Multi-file editor library', () => {
|
|||
let el;
|
||||
let holder;
|
||||
|
||||
const setNodeOffsetWidth = val => {
|
||||
Object.defineProperty(instance.instance.getDomNode(), 'offsetWidth', {
|
||||
get() {
|
||||
return val;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
el = document.createElement('div');
|
||||
holder = document.createElement('div');
|
||||
|
|
@ -18,7 +27,9 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
afterEach(() => {
|
||||
instance.modelManager.dispose();
|
||||
instance.dispose();
|
||||
Editor.editorInstance = null;
|
||||
|
||||
el.remove();
|
||||
});
|
||||
|
|
@ -33,7 +44,7 @@ describe('Multi-file editor library', () => {
|
|||
|
||||
describe('createInstance', () => {
|
||||
it('creates editor instance', () => {
|
||||
spyOn(monacoEditor, 'create').and.callThrough();
|
||||
jest.spyOn(monacoEditor, 'create');
|
||||
|
||||
instance.createInstance(holder);
|
||||
|
||||
|
|
@ -55,33 +66,25 @@ describe('Multi-file editor library', () => {
|
|||
|
||||
describe('createDiffInstance', () => {
|
||||
it('creates editor instance', () => {
|
||||
spyOn(monacoEditor, 'createDiffEditor').and.callThrough();
|
||||
jest.spyOn(monacoEditor, 'createDiffEditor');
|
||||
|
||||
instance.createDiffInstance(holder);
|
||||
|
||||
expect(monacoEditor.createDiffEditor).toHaveBeenCalledWith(holder, {
|
||||
model: null,
|
||||
contextmenu: true,
|
||||
minimap: {
|
||||
enabled: false,
|
||||
},
|
||||
readOnly: true,
|
||||
scrollBeyondLastLine: false,
|
||||
renderWhitespace: 'none',
|
||||
...defaultEditorOptions,
|
||||
quickSuggestions: false,
|
||||
occurrencesHighlight: false,
|
||||
wordWrap: 'on',
|
||||
renderSideBySide: true,
|
||||
renderSideBySide: false,
|
||||
readOnly: true,
|
||||
renderLineHighlight: 'all',
|
||||
hideCursorInOverviewRuler: false,
|
||||
theme: 'vs white',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createModel', () => {
|
||||
it('calls model manager addModel', () => {
|
||||
spyOn(instance.modelManager, 'addModel');
|
||||
jest.spyOn(instance.modelManager, 'addModel').mockImplementation(() => {});
|
||||
|
||||
instance.createModel('FILE');
|
||||
|
||||
|
|
@ -105,7 +108,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('attaches the model to the current instance', () => {
|
||||
spyOn(instance.instance, 'setModel');
|
||||
jest.spyOn(instance.instance, 'setModel').mockImplementation(() => {});
|
||||
|
||||
instance.attachModel(model);
|
||||
|
||||
|
|
@ -113,8 +116,8 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('sets original & modified when diff editor', () => {
|
||||
spyOn(instance.instance, 'getEditorType').and.returnValue('vs.editor.IDiffEditor');
|
||||
spyOn(instance.instance, 'setModel');
|
||||
jest.spyOn(instance.instance, 'getEditorType').mockReturnValue('vs.editor.IDiffEditor');
|
||||
jest.spyOn(instance.instance, 'setModel').mockImplementation(() => {});
|
||||
|
||||
instance.attachModel(model);
|
||||
|
||||
|
|
@ -125,7 +128,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('attaches the model to the dirty diff controller', () => {
|
||||
spyOn(instance.dirtyDiffController, 'attachModel');
|
||||
jest.spyOn(instance.dirtyDiffController, 'attachModel').mockImplementation(() => {});
|
||||
|
||||
instance.attachModel(model);
|
||||
|
||||
|
|
@ -133,7 +136,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('re-decorates with the dirty diff controller', () => {
|
||||
spyOn(instance.dirtyDiffController, 'reDecorate');
|
||||
jest.spyOn(instance.dirtyDiffController, 'reDecorate').mockImplementation(() => {});
|
||||
|
||||
instance.attachModel(model);
|
||||
|
||||
|
|
@ -155,7 +158,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('sets original & modified', () => {
|
||||
spyOn(instance.instance, 'setModel');
|
||||
jest.spyOn(instance.instance, 'setModel').mockImplementation(() => {});
|
||||
|
||||
instance.attachMergeRequestModel(model);
|
||||
|
||||
|
|
@ -170,7 +173,7 @@ describe('Multi-file editor library', () => {
|
|||
it('resets the editor model', () => {
|
||||
instance.createInstance(document.createElement('div'));
|
||||
|
||||
spyOn(instance.instance, 'setModel');
|
||||
jest.spyOn(instance.instance, 'setModel').mockImplementation(() => {});
|
||||
|
||||
instance.clearEditor();
|
||||
|
||||
|
|
@ -180,7 +183,7 @@ describe('Multi-file editor library', () => {
|
|||
|
||||
describe('dispose', () => {
|
||||
it('calls disposble dispose method', () => {
|
||||
spyOn(instance.disposable, 'dispose').and.callThrough();
|
||||
jest.spyOn(instance.disposable, 'dispose');
|
||||
|
||||
instance.dispose();
|
||||
|
||||
|
|
@ -198,7 +201,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('does not dispose modelManager', () => {
|
||||
spyOn(instance.modelManager, 'dispose');
|
||||
jest.spyOn(instance.modelManager, 'dispose').mockImplementation(() => {});
|
||||
|
||||
instance.dispose();
|
||||
|
||||
|
|
@ -206,7 +209,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('does not dispose decorationsController', () => {
|
||||
spyOn(instance.decorationsController, 'dispose');
|
||||
jest.spyOn(instance.decorationsController, 'dispose').mockImplementation(() => {});
|
||||
|
||||
instance.dispose();
|
||||
|
||||
|
|
@ -219,7 +222,7 @@ describe('Multi-file editor library', () => {
|
|||
it('does not update options', () => {
|
||||
instance.createInstance(holder);
|
||||
|
||||
spyOn(instance.instance, 'updateOptions');
|
||||
jest.spyOn(instance.instance, 'updateOptions').mockImplementation(() => {});
|
||||
|
||||
instance.updateDiffView();
|
||||
|
||||
|
|
@ -231,11 +234,11 @@ describe('Multi-file editor library', () => {
|
|||
beforeEach(() => {
|
||||
instance.createDiffInstance(holder);
|
||||
|
||||
spyOn(instance.instance, 'updateOptions').and.callThrough();
|
||||
jest.spyOn(instance.instance, 'updateOptions');
|
||||
});
|
||||
|
||||
it('sets renderSideBySide to false if el is less than 700 pixels', () => {
|
||||
spyOnProperty(instance.instance.getDomNode(), 'offsetWidth').and.returnValue(600);
|
||||
setNodeOffsetWidth(600);
|
||||
|
||||
expect(instance.instance.updateOptions).not.toHaveBeenCalledWith({
|
||||
renderSideBySide: false,
|
||||
|
|
@ -243,7 +246,7 @@ describe('Multi-file editor library', () => {
|
|||
});
|
||||
|
||||
it('sets renderSideBySide to false if el is more than 700 pixels', () => {
|
||||
spyOnProperty(instance.instance.getDomNode(), 'offsetWidth').and.returnValue(800);
|
||||
setNodeOffsetWidth(800);
|
||||
|
||||
expect(instance.instance.updateOptions).not.toHaveBeenCalledWith({
|
||||
renderSideBySide: true,
|
||||
|
|
@ -269,7 +272,7 @@ describe('Multi-file editor library', () => {
|
|||
it('sets quickSuggestions to false when language is markdown', () => {
|
||||
instance.createInstance(holder);
|
||||
|
||||
spyOn(instance.instance, 'updateOptions').and.callThrough();
|
||||
jest.spyOn(instance.instance, 'updateOptions');
|
||||
|
||||
const model = instance.createModel({
|
||||
...file(),
|
||||
|
|
@ -1,8 +1 @@
|
|||
/* eslint-disable class-methods-use-this */
|
||||
export default class TreeWorkerMock {
|
||||
addEventListener() {}
|
||||
|
||||
terminate() {}
|
||||
|
||||
postMessage() {}
|
||||
}
|
||||
export { default } from 'helpers/web_worker_mock';
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
export { default } from 'helpers/web_worker_mock';
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
import $ from 'jquery';
|
||||
import Vue from 'vue';
|
||||
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
|
||||
import store from '~/ide/stores';
|
||||
import NavDropdown from '~/ide/components/nav_dropdown.vue';
|
||||
import { PERMISSION_READ_MR } from '~/ide/constants';
|
||||
|
||||
const TEST_PROJECT_ID = 'lorem-ipsum';
|
||||
|
||||
describe('IDE NavDropdown', () => {
|
||||
const Component = Vue.extend(NavDropdown);
|
||||
let vm;
|
||||
let $dropdown;
|
||||
|
||||
beforeEach(() => {
|
||||
store.state.currentProjectId = TEST_PROJECT_ID;
|
||||
Vue.set(store.state.projects, TEST_PROJECT_ID, {
|
||||
userPermissions: {
|
||||
[PERMISSION_READ_MR]: true,
|
||||
},
|
||||
});
|
||||
vm = mountComponentWithStore(Component, { store });
|
||||
$dropdown = $(vm.$el);
|
||||
|
||||
// block dispatch from doing anything
|
||||
spyOn(vm.$store, 'dispatch');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vm.$destroy();
|
||||
});
|
||||
|
||||
const findIcon = name => vm.$el.querySelector(`.ic-${name}`);
|
||||
const findMRIcon = () => findIcon('merge-request');
|
||||
|
||||
it('renders nothing initially', () => {
|
||||
expect(vm.$el).not.toContainElement('.ide-nav-form');
|
||||
});
|
||||
|
||||
it('renders nav form when show.bs.dropdown', done => {
|
||||
$dropdown.trigger('show.bs.dropdown');
|
||||
|
||||
vm.$nextTick()
|
||||
.then(() => {
|
||||
expect(vm.$el).toContainElement('.ide-nav-form');
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
});
|
||||
|
||||
it('destroys nav form when closed', done => {
|
||||
$dropdown.trigger('show.bs.dropdown');
|
||||
$dropdown.trigger('hide.bs.dropdown');
|
||||
|
||||
vm.$nextTick()
|
||||
.then(() => {
|
||||
expect(vm.$el).not.toContainElement('.ide-nav-form');
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
});
|
||||
|
||||
it('renders merge request icon', () => {
|
||||
expect(findMRIcon()).not.toBeNull();
|
||||
});
|
||||
|
||||
describe('when user cannot read merge requests', () => {
|
||||
beforeEach(done => {
|
||||
store.state.projects[TEST_PROJECT_ID].userPermissions = {};
|
||||
|
||||
vm.$nextTick()
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
});
|
||||
|
||||
it('does not render merge requests', () => {
|
||||
expect(findMRIcon()).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -2,13 +2,30 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do
|
||||
describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_04_20_094444 do
|
||||
let(:gitlab_shell) { Gitlab::Shell.new }
|
||||
let(:users) { table(:users) }
|
||||
let(:snippets) { table(:snippets) }
|
||||
let(:snippet_repositories) { table(:snippet_repositories) }
|
||||
|
||||
let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') }
|
||||
let(:user_state) { 'active' }
|
||||
let(:ghost) { false }
|
||||
let(:user_type) { nil }
|
||||
|
||||
let!(:user) do
|
||||
users.create(id: 1,
|
||||
email: 'user@example.com',
|
||||
projects_limit: 10,
|
||||
username: 'test',
|
||||
name: 'Test',
|
||||
state: user_state,
|
||||
ghost: ghost,
|
||||
last_activity_on: 1.minute.ago,
|
||||
user_type: user_type,
|
||||
confirmed_at: 1.day.ago)
|
||||
end
|
||||
|
||||
let!(:admin) { users.create(id: 2, email: 'admin@example.com', projects_limit: 10, username: 'admin', name: 'Admin', admin: true, state: 'active') }
|
||||
let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
|
||||
let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
|
||||
let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
|
||||
|
|
@ -54,14 +71,51 @@ describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, s
|
|||
end
|
||||
|
||||
shared_examples 'commits the file to the repository' do
|
||||
it do
|
||||
subject
|
||||
context 'when author can update snippet and use git' do
|
||||
it 'creates the repository and commit the file' do
|
||||
subject
|
||||
|
||||
blob = blob_at(snippet, file_name)
|
||||
blob = blob_at(snippet, file_name)
|
||||
last_commit = raw_repository(snippet).commit
|
||||
|
||||
aggregate_failures do
|
||||
expect(blob).to be
|
||||
expect(blob.data).to eq content
|
||||
aggregate_failures do
|
||||
expect(blob).to be
|
||||
expect(blob.data).to eq content
|
||||
expect(last_commit.author_name).to eq user.name
|
||||
expect(last_commit.author_email).to eq user.email
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when author cannot update snippet or use git' do
|
||||
shared_examples 'admin user commits files' do
|
||||
it do
|
||||
subject
|
||||
|
||||
last_commit = raw_repository(snippet).commit
|
||||
|
||||
expect(last_commit.author_name).to eq admin.name
|
||||
expect(last_commit.author_email).to eq admin.email
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is blocked' do
|
||||
let(:user_state) { 'blocked' }
|
||||
|
||||
it_behaves_like 'admin user commits files'
|
||||
end
|
||||
|
||||
context 'when user is deactivated' do
|
||||
let(:user_state) { 'deactivated' }
|
||||
|
||||
it_behaves_like 'admin user commits files'
|
||||
end
|
||||
|
||||
context 'when user is a ghost' do
|
||||
let(:ghost) { true }
|
||||
let(:user_type) { 'ghost' }
|
||||
|
||||
it_behaves_like 'admin user commits files'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require Rails.root.join('db', 'post_migrate', '20200420094444_backfill_snippet_repositories.rb')
|
||||
|
||||
describe BackfillSnippetRepositories do
|
||||
let(:users) { table(:users) }
|
||||
let(:snippets) { table(:snippets) }
|
||||
let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test', state: 'active') }
|
||||
|
||||
def create_snippet(id)
|
||||
params = {
|
||||
id: id,
|
||||
type: 'PersonalSnippet',
|
||||
author_id: user.id,
|
||||
file_name: 'foo',
|
||||
content: 'bar'
|
||||
}
|
||||
|
||||
snippets.create!(params)
|
||||
end
|
||||
|
||||
it 'correctly schedules background migrations' do
|
||||
create_snippet(1)
|
||||
create_snippet(2)
|
||||
create_snippet(3)
|
||||
|
||||
stub_const("#{described_class.name}::BATCH_SIZE", 2)
|
||||
|
||||
Sidekiq::Testing.fake! do
|
||||
Timecop.freeze do
|
||||
migrate!
|
||||
|
||||
expect(described_class::MIGRATION)
|
||||
.to be_scheduled_delayed_migration(3.minutes, 1, 2)
|
||||
|
||||
expect(described_class::MIGRATION)
|
||||
.to be_scheduled_delayed_migration(6.minutes, 3, 3)
|
||||
|
||||
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -143,22 +143,54 @@ describe RemoteMirror, :mailer do
|
|||
end
|
||||
|
||||
describe '#update_repository' do
|
||||
let(:git_remote_mirror) { spy }
|
||||
it 'performs update including options' do
|
||||
git_remote_mirror = stub_const('Gitlab::Git::RemoteMirror', spy)
|
||||
mirror = build(:remote_mirror)
|
||||
|
||||
before do
|
||||
stub_const('Gitlab::Git::RemoteMirror', git_remote_mirror)
|
||||
end
|
||||
|
||||
it 'includes the `keep_divergent_refs` setting' do
|
||||
mirror = build_stubbed(:remote_mirror, keep_divergent_refs: true)
|
||||
|
||||
mirror.update_repository({})
|
||||
expect(mirror).to receive(:options_for_update).and_return(options: true)
|
||||
mirror.update_repository
|
||||
|
||||
expect(git_remote_mirror).to have_received(:new).with(
|
||||
anything,
|
||||
mirror.project.repository.raw,
|
||||
mirror.remote_name,
|
||||
hash_including(keep_divergent_refs: true)
|
||||
options: true
|
||||
)
|
||||
expect(git_remote_mirror).to have_received(:update)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#options_for_update' do
|
||||
it 'includes the `keep_divergent_refs` option' do
|
||||
mirror = build_stubbed(:remote_mirror, keep_divergent_refs: true)
|
||||
|
||||
options = mirror.options_for_update
|
||||
|
||||
expect(options).to include(keep_divergent_refs: true)
|
||||
end
|
||||
|
||||
it 'includes the `only_branches_matching` option' do
|
||||
branch = create(:protected_branch)
|
||||
mirror = build_stubbed(:remote_mirror, project: branch.project, only_protected_branches: true)
|
||||
|
||||
options = mirror.options_for_update
|
||||
|
||||
expect(options).to include(only_branches_matching: [branch.name])
|
||||
end
|
||||
|
||||
it 'includes the `ssh_key` option' do
|
||||
mirror = build(:remote_mirror, :ssh, ssh_private_key: 'private-key')
|
||||
|
||||
options = mirror.options_for_update
|
||||
|
||||
expect(options).to include(ssh_key: 'private-key')
|
||||
end
|
||||
|
||||
it 'includes the `known_hosts` option' do
|
||||
mirror = build(:remote_mirror, :ssh, ssh_known_hosts: 'known-hosts')
|
||||
|
||||
options = mirror.options_for_update
|
||||
|
||||
expect(options).to include(known_hosts: 'known-hosts')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ describe 'Destroying a Snippet' do
|
|||
include GraphqlHelpers
|
||||
|
||||
let(:current_user) { snippet.author }
|
||||
let(:snippet_gid) { snippet.to_global_id.to_s }
|
||||
let(:mutation) do
|
||||
variables = {
|
||||
id: snippet.to_global_id.to_s
|
||||
id: snippet_gid
|
||||
}
|
||||
|
||||
graphql_mutation(:destroy_snippet, variables)
|
||||
|
|
@ -49,9 +50,11 @@ describe 'Destroying a Snippet' do
|
|||
end
|
||||
|
||||
describe 'PersonalSnippet' do
|
||||
it_behaves_like 'graphql delete actions' do
|
||||
let_it_be(:snippet) { create(:personal_snippet) }
|
||||
end
|
||||
let_it_be(:snippet) { create(:personal_snippet) }
|
||||
|
||||
it_behaves_like 'graphql delete actions'
|
||||
|
||||
it_behaves_like 'when the snippet is not found'
|
||||
end
|
||||
|
||||
describe 'ProjectSnippet' do
|
||||
|
|
@ -85,5 +88,7 @@ describe 'Destroying a Snippet' do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'when the snippet is not found'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,9 +10,11 @@ describe 'Mark snippet as spam', :do_not_mock_admin_mode do
|
|||
let_it_be(:snippet) { create(:personal_snippet) }
|
||||
let_it_be(:user_agent_detail) { create(:user_agent_detail, subject: snippet) }
|
||||
let(:current_user) { snippet.author }
|
||||
|
||||
let(:snippet_gid) { snippet.to_global_id.to_s }
|
||||
let(:mutation) do
|
||||
variables = {
|
||||
id: snippet.to_global_id.to_s
|
||||
id: snippet_gid
|
||||
}
|
||||
|
||||
graphql_mutation(:mark_as_spam_snippet, variables)
|
||||
|
|
@ -30,6 +32,8 @@ describe 'Mark snippet as spam', :do_not_mock_admin_mode do
|
|||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'when the snippet is not found'
|
||||
|
||||
context 'when the user does not have permission' do
|
||||
let(:current_user) { other_user }
|
||||
|
||||
|
|
|
|||
|
|
@ -15,9 +15,10 @@ describe 'Updating a Snippet' do
|
|||
let(:updated_file_name) { 'Updated file_name' }
|
||||
let(:current_user) { snippet.author }
|
||||
|
||||
let(:snippet_gid) { GitlabSchema.id_from_object(snippet).to_s }
|
||||
let(:mutation) do
|
||||
variables = {
|
||||
id: GitlabSchema.id_from_object(snippet).to_s,
|
||||
id: snippet_gid,
|
||||
content: updated_content,
|
||||
description: updated_description,
|
||||
visibility_level: 'public',
|
||||
|
|
@ -90,16 +91,18 @@ describe 'Updating a Snippet' do
|
|||
end
|
||||
|
||||
describe 'PersonalSnippet' do
|
||||
it_behaves_like 'graphql update actions' do
|
||||
let(:snippet) do
|
||||
create(:personal_snippet,
|
||||
:private,
|
||||
file_name: original_file_name,
|
||||
title: original_title,
|
||||
content: original_content,
|
||||
description: original_description)
|
||||
end
|
||||
let(:snippet) do
|
||||
create(:personal_snippet,
|
||||
:private,
|
||||
file_name: original_file_name,
|
||||
title: original_title,
|
||||
content: original_content,
|
||||
description: original_description)
|
||||
end
|
||||
|
||||
it_behaves_like 'graphql update actions'
|
||||
|
||||
it_behaves_like 'when the snippet is not found'
|
||||
end
|
||||
|
||||
describe 'ProjectSnippet' do
|
||||
|
|
@ -142,5 +145,7 @@ describe 'Updating a Snippet' do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'when the snippet is not found'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ require 'spec_helper'
|
|||
describe Projects::UpdateRemoteMirrorService do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:remote_project) { create(:forked_project_with_submodules) }
|
||||
let(:remote_mirror) { project.remote_mirrors.create!(url: remote_project.http_url_to_repo, enabled: true, only_protected_branches: false) }
|
||||
let(:remote_mirror) { create(:remote_mirror, project: project, enabled: true) }
|
||||
let(:remote_name) { remote_mirror.remote_name }
|
||||
|
||||
subject(:service) { described_class.new(project, project.creator) }
|
||||
|
|
@ -16,7 +16,9 @@ describe Projects::UpdateRemoteMirrorService do
|
|||
before do
|
||||
project.repository.add_branch(project.owner, 'existing-branch', 'master')
|
||||
|
||||
allow(remote_mirror).to receive(:update_repository).and_return(true)
|
||||
allow(remote_mirror)
|
||||
.to receive(:update_repository)
|
||||
.and_return(double(divergent_refs: []))
|
||||
end
|
||||
|
||||
it 'ensures the remote exists' do
|
||||
|
|
@ -53,7 +55,7 @@ describe Projects::UpdateRemoteMirrorService do
|
|||
it 'marks the mirror as failed and raises the error when an unexpected error occurs' do
|
||||
allow(project.repository).to receive(:fetch_remote).and_raise('Badly broken')
|
||||
|
||||
expect { execute! }.to raise_error /Badly broken/
|
||||
expect { execute! }.to raise_error(/Badly broken/)
|
||||
|
||||
expect(remote_mirror).to be_failed
|
||||
expect(remote_mirror.last_error).to include('Badly broken')
|
||||
|
|
@ -83,32 +85,21 @@ describe Projects::UpdateRemoteMirrorService do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when syncing all branches' do
|
||||
it 'push all the branches the first time' do
|
||||
context 'when there are divergent refs' do
|
||||
before do
|
||||
stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
|
||||
|
||||
expect(remote_mirror).to receive(:update_repository).with({})
|
||||
|
||||
execute!
|
||||
end
|
||||
end
|
||||
|
||||
context 'when only syncing protected branches' do
|
||||
it 'sync updated protected branches' do
|
||||
stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
|
||||
protected_branch = create_protected_branch(project)
|
||||
remote_mirror.only_protected_branches = true
|
||||
|
||||
expect(remote_mirror)
|
||||
.to receive(:update_repository)
|
||||
.with(only_branches_matching: [protected_branch.name])
|
||||
|
||||
execute!
|
||||
end
|
||||
|
||||
def create_protected_branch(project)
|
||||
branch_name = project.repository.branch_names.find { |n| n != 'existing-branch' }
|
||||
create(:protected_branch, project: project, name: branch_name)
|
||||
it 'marks the mirror as failed and sets an error message' do
|
||||
response = double(divergent_refs: %w[refs/heads/master refs/heads/develop])
|
||||
expect(remote_mirror).to receive(:update_repository).and_return(response)
|
||||
|
||||
execute!
|
||||
|
||||
expect(remote_mirror).to be_failed
|
||||
expect(remote_mirror.last_error).to include("Some refs have diverged")
|
||||
expect(remote_mirror.last_error).to include("refs/heads/master\n")
|
||||
expect(remote_mirror.last_error).to include("refs/heads/develop")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'when the snippet is not found' do
|
||||
let(:snippet_gid) do
|
||||
"gid://gitlab/#{snippet.class.name}/#{non_existing_record_id}"
|
||||
end
|
||||
|
||||
it_behaves_like 'a mutation that returns top-level errors',
|
||||
errors: [Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR]
|
||||
end
|
||||
Loading…
Reference in New Issue