Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-01-26 21:11:52 +00:00
parent cea01cb817
commit e04b8c1e60
68 changed files with 628 additions and 291 deletions

View File

@ -1 +1 @@
6d228addeac5e159eaf89ce54aa523f71e336a29
4a014f5227b3defe785d9cec776abe2b1a944826

View File

@ -16,15 +16,23 @@ import {
import * as Sentry from '@sentry/browser';
import { uniqueId } from 'lodash';
import Vue from 'vue';
import { fetchPolicies } from '~/lib/graphql';
import SafeHtml from '~/vue_shared/directives/safe_html';
import { redirectTo } from '~/lib/utils/url_utility';
import { s__, __, n__ } from '~/locale';
import { VARIABLE_TYPE, FILE_TYPE, CC_VALIDATION_REQUIRED_ERROR } from '../constants';
import {
CC_VALIDATION_REQUIRED_ERROR,
CONFIG_VARIABLES_TIMEOUT,
FILE_TYPE,
VARIABLE_TYPE,
} from '../constants';
import createPipelineMutation from '../graphql/mutations/create_pipeline.mutation.graphql';
import ciConfigVariablesQuery from '../graphql/queries/ci_config_variables.graphql';
import filterVariables from '../utils/filter_variables';
import RefsDropdown from './refs_dropdown.vue';
let pollTimeout;
export const POLLING_INTERVAL = 2000;
const i18n = {
variablesDescription: s__(
'Pipeline|Specify variable values to be used in this run. The values specified in %{linkStart}CI/CD settings%{linkEnd} will be used by default.',
@ -115,10 +123,11 @@ export default {
// https://gitlab.com/gitlab-org/gitlab/-/issues/287815
fullName: this.refParam === this.defaultBranch ? `refs/heads/${this.refParam}` : undefined,
},
configVariablesWithDescription: {},
form: {},
errorTitle: null,
error: null,
predefinedValueOptions: {},
predefinedVariables: null,
warnings: [],
totalWarnings: 0,
isWarningDismissed: false,
@ -128,6 +137,7 @@ export default {
},
apollo: {
ciConfigVariables: {
fetchPolicy: fetchPolicies.NO_CACHE,
query: ciConfigVariablesQuery,
// Skip when variables already cached in `form`
skip() {
@ -140,46 +150,40 @@ export default {
};
},
update({ project }) {
return project?.ciConfigVariables || [];
return project?.ciConfigVariables;
},
result({ data }) {
const predefinedVars = data?.project?.ciConfigVariables || [];
const params = {};
const descriptions = {};
this.predefinedVariables = data?.project?.ciConfigVariables;
predefinedVars.forEach(({ description, key, value, valueOptions }) => {
if (description) {
params[key] = value;
descriptions[key] = description;
this.predefinedValueOptions[key] = valueOptions;
}
});
Vue.set(this.form, this.refFullName, { descriptions, variables: [] });
// Add default variables from yml
this.setVariableParams(this.refFullName, VARIABLE_TYPE, params);
// Add/update variables, e.g. from query string
if (this.variableParams) {
this.setVariableParams(this.refFullName, VARIABLE_TYPE, this.variableParams);
// API cache is empty when predefinedVariables === null, so we need to
// poll while cache values are being populated in the backend.
// After CONFIG_VARIABLES_TIMEOUT ms have passed, we stop polling
// and populate the form regardless.
if (this.isFetchingCiConfigVariables && !pollTimeout) {
pollTimeout = setTimeout(() => {
this.predefinedVariables = [];
this.clearPolling();
this.populateForm();
}, CONFIG_VARIABLES_TIMEOUT);
}
if (this.fileParams) {
this.setVariableParams(this.refFullName, FILE_TYPE, this.fileParams);
if (!this.isFetchingCiConfigVariables) {
this.clearPolling();
this.populateForm();
}
// Adds empty var at the end of the form
this.addEmptyVariable(this.refFullName);
},
error(error) {
Sentry.captureException(error);
},
pollInterval: POLLING_INTERVAL,
},
},
computed: {
isFetchingCiConfigVariables() {
return this.predefinedVariables === null;
},
isLoading() {
return this.$apollo.queries.ciConfigVariables.loading;
return this.$apollo.queries.ciConfigVariables.loading || this.isFetchingCiConfigVariables;
},
overMaxWarningsLimit() {
return this.totalWarnings > this.maxWarnings;
@ -228,6 +232,48 @@ export default {
value: '',
});
},
clearPolling() {
clearTimeout(pollTimeout);
this.$apollo.queries.ciConfigVariables.stopPolling();
},
populateForm() {
this.configVariablesWithDescription = this.predefinedVariables.reduce(
(accumulator, { description, key, value, valueOptions }) => {
if (description) {
accumulator.descriptions[key] = description;
accumulator.values[key] = value;
accumulator.options[key] = valueOptions;
}
return accumulator;
},
{ descriptions: {}, values: {}, options: {} },
);
Vue.set(this.form, this.refFullName, {
descriptions: this.configVariablesWithDescription.descriptions,
variables: [],
});
// Add default variables from yml
this.setVariableParams(
this.refFullName,
VARIABLE_TYPE,
this.configVariablesWithDescription.values,
);
// Add/update variables, e.g. from query string
if (this.variableParams) {
this.setVariableParams(this.refFullName, VARIABLE_TYPE, this.variableParams);
}
if (this.fileParams) {
this.setVariableParams(this.refFullName, FILE_TYPE, this.fileParams);
}
// Adds empty var at the end of the form
this.addEmptyVariable(this.refFullName);
},
setVariable(refValue, type, key, value) {
const { variables } = this.form[refValue];
@ -255,7 +301,7 @@ export default {
});
},
shouldShowValuesDropdown(key) {
return this.predefinedValueOptions[key]?.length > 1;
return this.configVariablesWithDescription.options[key]?.length > 1;
},
removeVariable(index) {
this.variables.splice(index, 1);
@ -403,13 +449,13 @@ export default {
data-qa-selector="ci_variable_value_dropdown"
>
<gl-dropdown-item
v-for="value in predefinedValueOptions[variable.key]"
:key="value"
v-for="option in configVariablesWithDescription.options[variable.key]"
:key="option"
data-testid="pipeline-form-ci-variable-value-dropdown-items"
data-qa-selector="ci_variable_value_dropdown_item"
@click="setVariableAttribute(variable.key, 'value', value)"
@click="setVariableAttribute(variable.key, 'value', option)"
>
{{ value }}
{{ option }}
</gl-dropdown-item>
</gl-dropdown>
<gl-form-textarea

View File

@ -134,8 +134,16 @@ export default {
</div>
</template>
<template #left-secondary>
<div v-if="!errorStatusRow" class="gl-display-flex" data-testid="left-secondary-infos">
<span>{{ packageEntity.version }}</span>
<div
v-if="!errorStatusRow"
class="gl-display-flex gl-align-items-center"
data-testid="left-secondary-infos"
>
<gl-truncate
class="gl-max-w-15 gl-md-max-w-26"
:text="packageEntity.version"
:with-tooltip="true"
/>
<div v-if="pipelineUser" class="gl-display-none gl-sm-display-flex gl-ml-2">
<gl-sprintf :message="s__('PackageRegistry|published by %{author}')">

View File

@ -36,7 +36,7 @@ export default () => {
const attachMainComponent = () =>
new Vue({
el,
name: 'PackageRegistery',
name: 'PackageRegistry',
router,
apolloProvider,
provide: {

View File

@ -161,7 +161,7 @@ export default {
<gl-sprintf
:message="
__(
'Releases are based on Git tags. We recommend tags that use semantic versioning, for example %{codeStart}v1.0.0%{codeEnd}, %{codeStart}v2.1.0-pre%{codeEnd}.',
'Releases are based on Git tags. We recommend tags that use semantic versioning, for example %{codeStart}1.0.0%{codeEnd}, %{codeStart}2.1.0-pre%{codeEnd}.',
)
"
>

View File

@ -126,10 +126,9 @@ export default {
<slot name="right-action"></slot>
</div>
</div>
<div class="gl-display-flex">
<div v-if="isDetailsShown" class="gl-display-flex">
<div class="gl-w-7"></div>
<div
v-if="isDetailsShown"
class="gl-display-flex gl-flex-direction-column gl-flex-grow-1 gl-bg-gray-10 gl-rounded-base gl-inset-border-1-gray-100 gl-mb-3"
>
<div

View File

@ -27,6 +27,16 @@ $border-radius-medium: 3px;
}
}
.search-max-w-inherit {
max-width: inherit;
}
.search-wrap-f-md-down {
@include gl-media-breakpoint-down(md) {
white-space: normal !important;
}
}
.search {
margin: 0 8px;

View File

@ -63,10 +63,13 @@ class Analytics::CycleAnalytics::Aggregation < ApplicationRecord
group = group_or_project_namespace.is_a?(Group) ? group_or_project_namespace : group_or_project_namespace.parent
top_level_group = group.root_ancestor
aggregation = find_by(group_id: top_level_group.id)
return aggregation if aggregation.present?
return aggregation if aggregation&.enabled?
insert({ group_id: top_level_group.id }, unique_by: :group_id)
find_by(group_id: top_level_group.id)
# At this point we're sure that the group is licensed, we can always enable the aggregation.
# This re-enables the aggregation in case the group downgraded and later upgraded the license.
upsert({ group_id: top_level_group.id, enabled: true })
find(top_level_group.id)
end
private

View File

@ -70,33 +70,52 @@ module BulkImports
)
bulk_import.create_configuration!(credentials.slice(:url, :access_token))
Array.wrap(params).each do |entity|
track_access_level(entity)
Array.wrap(params).each do |entity_params|
track_access_level(entity_params)
validate_destination_full_path(entity_params)
BulkImports::Entity.create!(
bulk_import: bulk_import,
source_type: entity[:source_type],
source_full_path: entity[:source_full_path],
destination_slug: entity[:destination_slug],
destination_namespace: entity[:destination_namespace],
migrate_projects: Gitlab::Utils.to_boolean(entity[:migrate_projects], default: true)
source_type: entity_params[:source_type],
source_full_path: entity_params[:source_full_path],
destination_slug: entity_params[:destination_slug],
destination_namespace: entity_params[:destination_namespace],
migrate_projects: Gitlab::Utils.to_boolean(entity_params[:migrate_projects], default: true)
)
end
bulk_import
end
end
def track_access_level(entity)
def track_access_level(entity_params)
Gitlab::Tracking.event(
self.class.name,
'create',
label: 'import_access_level',
user: current_user,
extra: { user_role: user_role(entity[:destination_namespace]), import_type: 'bulk_import_group' }
extra: { user_role: user_role(entity_params[:destination_namespace]), import_type: 'bulk_import_group' }
)
end
def validate_destination_full_path(entity_params)
source_type = entity_params[:source_type]
full_path = [
entity_params[:destination_namespace],
entity_params[:destination_slug]
].reject(&:blank?).join('/')
case source_type
when 'group_entity'
return if Namespace.find_by_full_path(full_path).nil?
when 'project_entity'
return if Project.find_by_full_path(full_path).nil?
end
raise BulkImports::Error.destination_full_path_validation_failure(full_path)
end
def user_role(destination_namespace)
namespace = Namespace.find_by_full_path(destination_namespace)
# if there is no parent namespace we assume user will be group creator/owner

View File

@ -1,12 +1,12 @@
.search-results-status
.gl-display-flex.gl-flex-direction-column
.gl-p-5.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
.gl-p-5.gl-display-flex.gl-max-w-full.gl-sm-flex-direction-column
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1.gl-text-truncate
- unless @search_service_presenter.without_count?
= search_entries_info(@search_objects, @scope, @search_term)
- unless @search_service_presenter.show_snippets?
- if @project
- link_to_project = link_to(@project.full_name, @project, class: 'ml-md-1')
- link_to_project = link_to(@project.full_name, @project, class: 'ml-md-1 gl-text-truncate search-wrap-f-md-down')
- if @scope == 'blobs'
= _("in")
.mx-md-1

View File

@ -3,11 +3,11 @@
- noteable_identifier = note.noteable.try(:iid) || note.noteable.try(:id)
.search-result-row
%h5.note-search-caption.str-truncated
= sprite_icon('comment', css_class: 'gl-vertical-align-text-bottom')
= link_to_member(project, note.author, avatar: false)
- link_to_project = link_to(project.full_name, project)
= _("commented on %{link_to_project}").html_safe % { link_to_project: link_to_project }
%h5.note-search-caption.gl-max-w-full
%span.gl-display-inline-block.gl-text-truncate.search-max-w-inherit.gl-vertical-align-bottom
= sprite_icon('comment', css_class: 'gl-vertical-align-text-bottom')
= link_to_member(project, note.author, avatar: false)
= _("commented on %{link_to_project}").html_safe % { link_to_project: link_to(project.full_name, project) }
&middot;
- if note.for_commit?

View File

@ -714,7 +714,7 @@ Customer master keys (CMKs) and SSE-C encryption are
Setting a default encryption on an S3 bucket is the easiest way to
enable encryption, but you may want to
[set a bucket policy to ensure only encrypted objects are uploaded](https://aws.amazon.com/premiumsupport/knowledge-center/s3-bucket-store-kms-encrypted-objects/).
[set a bucket policy to ensure only encrypted objects are uploaded](https://repost.aws/knowledge-center/s3-bucket-store-kms-encrypted-objects).
To do this, you must configure GitLab to send the proper encryption headers
in the `storage_options` configuration section:

View File

@ -70,7 +70,7 @@ All trademarks, materials, documentation, and other intellectual property remain
### Trademark Requirements
Use of GitLab Trademarks must be in compliance with the standards set forth in [our guidelines](https://about.gitlab.com/handbook/marketing/corporate-marketing/brand-activation/trademark-guidelines/) (as updated from time to time).
Use of GitLab Trademarks must be in compliance with the standards set forth in [our guidelines](https://about.gitlab.com/handbook/marketing/brand-and-product-marketing/brand/brand-activation/trademark-guidelines/) (as updated from time to time).
CHEF® and all Chef marks are owned by Progress Software Corporation and must be used in accordance with the [Progress Software Trademark Usage Policy](https://www.progress.com/legal/trademarks).
When using a GitLab or 3rd party trademark in documentation, include the (R) symbol in the first instance, for example, "Chef(R) is used for configuring...." You may omit the symbol in subsequent instances.

View File

@ -475,7 +475,7 @@ To configure the `s3` storage driver in Omnibus:
`bucket_name.host/object`. [Set to false for AWS S3](https://aws.amazon.com/blogs/aws/amazon-s3-path-deprecation-plan-the-rest-of-the-story/).
You can set a rate limit on connections to S3 to avoid 503 errors from the S3 API. To do this,
set `maxrequestspersecond` to a number within the [S3 request rate threshold](https://aws.amazon.com/premiumsupport/knowledge-center/s3-503-within-request-rate-prefix/):
set `maxrequestspersecond` to a number within the [S3 request rate threshold](https://repost.aws/knowledge-center/s3-503-within-request-rate-prefix):
```ruby
registry['storage'] = {
@ -552,7 +552,7 @@ you can pull from the Container Registry, but you cannot push.
NOTE:
If you have a lot of data, you may be able to improve performance by
[running parallel sync operations](https://aws.amazon.com/premiumsupport/knowledge-center/s3-improve-transfer-sync-command/).
[running parallel sync operations](https://repost.aws/knowledge-center/s3-improve-transfer-sync-command).
1. To perform the final data sync,
[put the Container Registry in `read-only` mode](#performing-garbage-collection-without-downtime) and

View File

@ -16,7 +16,7 @@ Depending on your workflow, the following recommended reference architectures
may need to be adapted accordingly. Your workload is influenced by factors
including how active your users are, how much automation you use, mirroring,
and repository/change size. Additionally, the displayed memory values are
provided by [GCP machine types](https://cloud.google.com/compute/docs/machine-types).
provided by [GCP machine types](https://cloud.google.com/compute/docs/machine-resource).
For different cloud vendors, attempt to select options that best match the
provided architecture.

View File

@ -148,7 +148,7 @@ POST /projects/:id/feature_flags
| `description` | string | no | The description of the feature flag. |
| `active` | boolean | no | The active state of the flag. Defaults to true. [Supported](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38350) in GitLab 13.3 and later. |
| `strategies` | JSON | no | The feature flag [strategies](../operations/feature_flags.md#feature-flag-strategies). |
| `strategies:name` | JSON | no | The strategy name. Can be `default`, `gradualRolloutUserId`, `userWithId`, or `gitlabUserList`. In [GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/36380) and later, can be [`flexibleRollout`](https://docs.getunleash.io/user_guide/activation_strategy#gradual-rollout). |
| `strategies:name` | JSON | no | The strategy name. Can be `default`, `gradualRolloutUserId`, `userWithId`, or `gitlabUserList`. In [GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/36380) and later, can be [`flexibleRollout`](https://docs.getunleash.io/user_guide/activation_strategy/#gradual-rollout). |
| `strategies:parameters` | JSON | no | The strategy parameters. |
| `strategies:scopes` | JSON | no | The scopes for the strategy. |
| `strategies:scopes:environment_scope` | string | no | The environment scope of the scope. |

View File

@ -316,7 +316,7 @@ of commits, GitLab generates a changelog for all commits that use a particular
a new Markdown-formatted section to a changelog file in the Git repository of
the project. The output format can be customized.
For user-facing documentation, see [Changelogs](../user/project/changelog.md).
For user-facing documentation, see [Changelogs](../user/project/changelogs.md).
```plaintext
POST /projects/:id/repository/changelog
@ -446,5 +446,5 @@ Example Response:
## Related topics
- User documentation for [changelogs](../user/project/changelog.md)
- User documentation for [changelogs](../user/project/changelogs.md)
- Developer documentation for [changelog entries](../development/changelog.md) in GitLab.

View File

@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98354) in GitLab 15.5.
GitLab provides an SCIM API that both implements [the RFC7644 protocol](https://tools.ietf.org/html/rfc7644)
GitLab provides an SCIM API that both implements [the RFC7644 protocol](https://www.rfc-editor.org/rfc/rfc7644)
and provides the `/Users` endpoint. The base URL is `/api/scim/v2/groups/:group_path/Users/`.
To use this API, [Group SSO](../user/group/saml_sso/index.md) must be enabled for the group.

View File

@ -175,7 +175,7 @@ export const fuzzify = (id) => { /* ... */ };
#### Dependency Injection
[Dependency Injection](https://en.wikipedia.org/wiki/Dependency_injection) is an approach which breaks
coupling by declaring a module's dependencies to be injected from outside the module (for example, through constructor parameters, a bona-fide Dependency Injection framework, and even Vue's `provide/inject`).
coupling by declaring a module's dependencies to be injected from outside the module (for example, through constructor parameters, a bona-fide Dependency Injection framework, and even in Vue `provide/inject`).
```javascript
// bad - Vue component coupled to Singleton

View File

@ -96,7 +96,7 @@ mysec_sast:
sast: gl-sast-report.json
```
Note that `gl-sast-report.json` is an example file path but any other filename can be used. See
`gl-sast-report.json` is an example file path but any other filename can be used. See
[the Output file section](#output-file) for more details. It's processed as a SAST report because
it's declared under the `reports:sast` key in the job definition, not because of the filename.
@ -150,7 +150,7 @@ regardless of the individual machine the scanner runs on.
Depending on the CI infrastructure,
the CI may have to fetch the Docker image every time the job runs.
For the scanning job to run fast and avoid wasting bandwidth, Docker images should be as small as
possible. You should aim for 50MB or smaller. If that isn't possible, try to keep it below 1.46 GB,
possible. You should aim for 50 MB or smaller. If that isn't possible, try to keep it below 1.46 GB,
which is the size of a DVD-ROM.
If the scanner requires a fully functional Linux environment,
@ -199,7 +199,7 @@ SAST and Dependency Scanning scanners must scan the files in the project directo
#### Container Scanning
In order to be consistent with the official Container Scanning for GitLab,
To be consistent with the official Container Scanning for GitLab,
scanners must scan the Docker image whose name and tag are given by
`CI_APPLICATION_REPOSITORY` and `CI_APPLICATION_TAG`, respectively. If the `DOCKER_IMAGE`
CI/CD variable is provided, then the `CI_APPLICATION_REPOSITORY` and `CI_APPLICATION_TAG` variables
@ -225,7 +225,7 @@ If you use the `CIS_KUBECONFIG` CI/CD variable, then the
the `CIS_KUBECONFIG` CI/CD variable, the value defaults to the value of
`$KUBECONFIG`. `$KUBECONFIG` is a predefined CI/CD variable configured when the project is assigned to a
Kubernetes cluster. When multiple contexts are provided in the `KUBECONFIG` variable, the context
selected as `current-context` will be used to fetch vulnerabilities.
selected as `current-context` is used to fetch vulnerabilities.
#### Configuration files
@ -440,7 +440,7 @@ Even when the [`Vulnerabilities`](#vulnerabilities) array for a given scan may b
should contain the complete list of potential identifiers to inform the Rails application of which
rules were executed.
When populated, the Rails application will automatically resolve previously detected vulnerabilities as no
When populated, the Rails application automatically resolves previously detected vulnerabilities as no
longer relevant when their primary identifier is not included.
##### Name, message, and description
@ -526,7 +526,7 @@ Not all vulnerabilities have CVEs, and a CVE can be identified multiple times. A
isn't a stable identifier and you shouldn't assume it as such when tracking vulnerabilities.
The maximum number of identifiers for a vulnerability is set as 20. If a vulnerability has more than 20 identifiers,
the system saves only the first 20 of them. Note that vulnerabilities in the [Pipeline Security](../../user/application_security/vulnerability_report/pipeline.md#view-vulnerabilities-in-a-pipeline)
the system saves only the first 20 of them. The vulnerabilities in the [Pipeline Security](../../user/application_security/vulnerability_report/pipeline.md#view-vulnerabilities-in-a-pipeline)
tab do not enforce this limit and all identifiers present in the report artifact are displayed.
#### Details

View File

@ -85,7 +85,7 @@ a critical component to both describing and tracking vulnerabilities.
In most other cases, the `identifiers` collection is unordered, where the remaining secondary identifiers act as metadata for grouping vulnerabilities
(see [Analyzer vulnerability translation](#analyzer-vulnerability-translation) below for the exception).
Any time the primary identifier changes and a project pipeline is re-run, ingestion of the new report will “orphan” the previous DB record.
Any time the primary identifier changes and a project pipeline is re-run, ingestion of the new report will "orphan" the previous DB record.
Because our processing logic relies on generating a delta of two different vulnerabilities, it can end up looking rather confusing. For example:
[!Screenshot of primary identifier mismatch in MR widget](img/primary_identifier_changed_v15_6.png)
@ -95,14 +95,14 @@ After being [merged](../integrations/secure.md#tracking-and-merging-vulnerabilit
### Guiding principles for ensuring primary identifier stability
- A primary identifier should never change unless we have a compelling reason.
- Analyzer supporting vulnerability translation must include the legacy primary identifiers in a secondary position to prevent “orphaning” of results.
- Analyzer supporting vulnerability translation must include the legacy primary identifiers in a secondary position to prevent "orphaning" of results.
- Beyond the primary identifier, the order of secondary identifiers does not matter.
- The identifier is unique based on a combination of the `Type` and `Value` fields (see [identifier fingerprint](https://gitlab.com/gitlab-org/gitlab/-/blob/v15.5.1-ee/lib/gitlab/ci/reports/security/identifier.rb#L63)).
- If we change the primary identifier, rolling back analyzers to previous versions will not fix the orphaned results. The data previously ingested into our database is an artifact of previous jobs with few ways of automating data migrations.
### Analyzer vulnerability translation
In the case of the SAST Semgrep analyzer, there is a secondary identifier of particular importance: the identifier linking the reports vulnerability
In the case of the SAST Semgrep analyzer, there is a secondary identifier of particular importance: the identifier linking the report's vulnerability
to the legacy analyzer (that is, bandit or ESLint).
To [enable vulnerability translation](../../user/application_security/sast/analyzers.md#vulnerability-translation)

View File

@ -12,7 +12,7 @@ description: "GitLab's development guidelines for Wikis"
## Overview
The wiki functionality in GitLab is based on [Gollum 4.x](https://github.com/gollum/gollum/).
It's used in [Gitaly's](gitaly.md) Ruby service, and accessed from the Rails app through Gitaly RPC calls.
It's used in the [Gitaly](gitaly.md) Ruby service, and accessed from the Rails app through Gitaly RPC calls.
Wikis use Git repositories as storage backend, and can be accessed through:
@ -40,9 +40,9 @@ We only use Gollum as a storage abstraction layer, to handle the mapping between
When rendering wiki pages, we don't use Gollum at all and instead go through a
[custom Banzai pipeline](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/banzai/pipeline/wiki_pipeline.rb).
This adds some [wiki-specific markup](../user/markdown.md#wiki-specific-markdown), such as Gollum's `[[link]]` syntax.
This adds some [wiki-specific markup](../user/markdown.md#wiki-specific-markdown), such as the Gollum `[[link]]` syntax.
Because we do not make use of most of Gollum's features, we plan to move away from it entirely at some point.
Because we do not make use of most of the Gollum features, we plan to move away from it entirely at some point.
[See this epic](https://gitlab.com/groups/gitlab-org/-/epics/2381) for reference.
## Model classes

View File

@ -237,3 +237,14 @@ actions:
- scan: secret_detection
- scan: container_scanning
```
## Avoiding duplicate scans
Scan execution policies can cause the same type of scanner to run more than once if developers include scan jobs in the project's
`.gitlab-ci.yml` file. This behavior is intentional as scanners can run more than once with different variables and settings. For example, a
developer may want to try running a SAST scan with different variables than the one enforced by the security and compliance team. In
this case, two SAST jobs run in the pipeline, one with the developer's variables and one with the security and compliance team's variables.
If you want to avoid running duplicate scans, you can either remove the scans from the project's `.gitlab-ci.yml` file or disable your
local jobs by setting `SAST_DISABLED: true`. Disabling jobs this way does not prevent the security jobs defined by scan execution
policies from running.

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Operational Container Scanning **(ULTIMATE)**
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/6346) in GitLab 14.8.
> - [Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/368828) the starboard directive in GitLab 15.4. The starboard directive will be removed in GitLab 16.0.
> - [Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/368828) the starboard directive in GitLab 15.4. The starboard directive is scheduled for removal in GitLab 16.0.
To view cluster vulnerabilities, you can view the [vulnerability report](../../application_security/vulnerabilities/index.md).
You can also configure your agent so the vulnerabilities are displayed with other agent information in GitLab.
@ -24,7 +24,7 @@ In GitLab 15.0 and later, you do not need to install Starboard operator in the K
### Enable via agent configuration
To enable scanning of all images within your Kubernetes cluster via the agent configuration, add a `container_scanning` configuration block to your agent
configuration with a `cadence` field containing a [CRON expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm) for when the scans will be run.
configuration with a `cadence` field containing a [CRON expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm) for when the scans are run.
```yaml
container_scanning:
@ -42,7 +42,7 @@ Other elements of the [CRON syntax](https://docs.oracle.com/cd/E12058_01/doc/doc
NOTE:
The CRON expression is evaluated in [UTC](https://www.timeanddate.com/worldclock/timezone/utc) using the system-time of the Kubernetes-agent pod.
By default, operational container scanning will attempt to scan the workloads in all
By default, operational container scanning attempts to scan the workloads in all
namespaces for vulnerabilities. You can set the `vulnerability_report` block with the `namespaces`
field which can be used to restrict which namespaces are scanned. For example,
if you would like to scan only the `default`, `kube-system` namespaces, you can use this configuration:
@ -60,10 +60,10 @@ container_scanning:
To enable scanning of all images within your Kubernetes cluster via scan execution policies, we can use the
[scan execution policy editor](../../application_security/policies/scan-execution-policies.md#scan-execution-policy-editor)
in order to create a new schedule rule.
To create a new schedule rule.
NOTE:
The Kubernetes agent must be running in your cluster in order to scan running container images
The Kubernetes agent must be running in your cluster to scan running container images
Here is an example of a policy which enables operational container scanning within the cluster the Kubernetes agent is attached to:
@ -84,9 +84,9 @@ Here is an example of a policy which enables operational container scanning with
The keys for a schedule rule are:
- `cadence` (required): a [CRON expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm) for when the scans will be run
- `cadence` (required): a [CRON expression](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm) for when the scans are run
- `agents:<agent-name>` (required): The name of the agent to use for scanning
- `agents:<agent-name>:namespaces` (optional): The Kubernetes namespaces to scan. If omitted, all namespaces will be scanned
- `agents:<agent-name>:namespaces` (optional): The Kubernetes namespaces to scan. If omitted, all namespaces are scanned
NOTE:
Other elements of the [CRON syntax](https://docs.oracle.com/cd/E12058_01/doc/doc.1014/e12030/cron_expressions.htm) may work in the cadence field if supported by the [cron](https://github.com/robfig/cron) we are using in our implementation, however, GitLab does not officially test or support them.

View File

@ -77,8 +77,7 @@ From there you can create a new iteration or select an iteration to get a more d
NOTE:
If a project has issue tracking
[turned off](../../project/settings/index.md#configure-project-visibility-features-and-permissions),
you can view the iterations list
by going to its URL. To do so, add: `/-/cadences` to your project or group URL.
to view the iterations list, enter its URL. To do so, add: `/-/cadences` to your project or group URL.
For example `https://gitlab.com/gitlab-org/sample-data-templates/sample-gitlab-project/-/cadences`.
This is tracked in [issue 339009](https://gitlab.com/gitlab-org/gitlab/-/issues/339009).

View File

@ -14,5 +14,9 @@ module BulkImports
def self.invalid_url
self.new("Import aborted as it was not possible to connect to the provided GitLab instance URL.")
end
def self.destination_full_path_validation_failure(full_path)
self.new("Import aborted as '#{full_path}' already exists. Change the destination and try again.")
end
end
end

View File

@ -35088,7 +35088,7 @@ msgstr ""
msgid "Releases are based on Git tags and mark specific points in a project's development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software."
msgstr ""
msgid "Releases are based on Git tags. We recommend tags that use semantic versioning, for example %{codeStart}v1.0.0%{codeEnd}, %{codeStart}v2.1.0-pre%{codeEnd}."
msgid "Releases are based on Git tags. We recommend tags that use semantic versioning, for example %{codeStart}1.0.0%{codeEnd}, %{codeStart}2.1.0-pre%{codeEnd}."
msgstr ""
msgid "Releases documentation"

View File

@ -8,7 +8,7 @@ FactoryBot.define do
sequence(:source_full_path) { |n| "source-path-#{n}" }
sequence(:destination_namespace) { |n| "destination-path-#{n}" }
destination_name { 'Imported Entity' }
destination_name { 'imported-entity' }
sequence(:source_xid)
migrate_projects { true }

View File

@ -41,7 +41,7 @@ RSpec.describe 'User edits Release', :js, feature_category: :continuous_delivery
end
it 'renders the edit Release form' do
expect(page).to have_content('Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0.0, v2.1.0-pre.')
expect(page).to have_content('Releases are based on Git tags. We recommend tags that use semantic versioning, for example 1.0.0, 2.1.0-pre.')
expect(find_field('Tag name', disabled: true).value).to eq(release.tag)
expect(find_field('Release title').value).to eq(release.name)

View File

@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import testActionFn from './vuex_action_helper';
const testActionFnWithOptionsArg = (...args) => {
@ -102,7 +102,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
};
it('returns original data of successful promise while checking actions/mutations', async () => {
mock.onGet(TEST_HOST).replyOnce(200, 42);
mock.onGet(TEST_HOST).replyOnce(HTTP_STATUS_OK, 42);
assertion = { mutations: [{ type: 'SUCCESS' }], actions: [{ type: 'ACTION' }] };
@ -138,7 +138,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
});
};
mock.onGet(TEST_HOST).replyOnce(200, 42);
mock.onGet(TEST_HOST).replyOnce(HTTP_STATUS_OK, 42);
assertion = { mutations: [{ type: 'SUCCESS' }], actions: [{ type: 'ACTION' }] };

View File

@ -5,7 +5,7 @@ import * as actions from '~/admin/statistics_panel/store/actions';
import * as types from '~/admin/statistics_panel/store/mutation_types';
import getInitialState from '~/admin/statistics_panel/store/state';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import mockStatistics from '../mock_data';
describe('Admin statistics panel actions', () => {
@ -20,7 +20,7 @@ describe('Admin statistics panel actions', () => {
describe('fetchStatistics', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(200, mockStatistics);
mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(HTTP_STATUS_OK, mockStatistics);
});
it('dispatches success with received data', () => {

View File

@ -55,7 +55,9 @@ describe('GroupsApi', () => {
const params = { page: 1 };
const expectedUrl = `${mockUrlRoot}/api/${mockApiVersion}/groups/${mockGroupId}/transfer_locations`;
mock.onGet(expectedUrl).replyOnce(200, { data: getGroupTransferLocationsResponse });
mock
.onGet(expectedUrl)
.replyOnce(HTTP_STATUS_OK, { data: getGroupTransferLocationsResponse });
await expect(getGroupTransferLocations(mockGroupId, params)).resolves.toMatchObject({
data: { data: getGroupTransferLocationsResponse },

View File

@ -3,6 +3,7 @@ import getTransferLocationsResponse from 'test_fixtures/api/projects/transfer_lo
import * as projectsApi from '~/api/projects_api';
import { DEFAULT_PER_PAGE } from '~/api';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('~/api/projects_api.js', () => {
let mock;
@ -34,7 +35,7 @@ describe('~/api/projects_api.js', () => {
const query = '';
const options = {};
mock.onGet(expectedUrl).reply(200, { data: expectedProjects });
mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, { data: expectedProjects });
return projectsApi.getProjects(query, options).then(({ data }) => {
expect(axios.get).toHaveBeenCalledWith(expectedUrl, expectedParams);
@ -53,7 +54,7 @@ describe('~/api/projects_api.js', () => {
const expectedUrl = '/api/v7/projects/1/import_project_members/2';
const expectedMessage = 'Successfully imported';
mock.onPost(expectedUrl).replyOnce(200, expectedMessage);
mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedMessage);
return projectsApi.importProjectMembers(projectId, targetId).then(({ data }) => {
expect(axios.post).toHaveBeenCalledWith(expectedUrl);
@ -71,7 +72,7 @@ describe('~/api/projects_api.js', () => {
const params = { page: 1 };
const expectedUrl = '/api/v7/projects/1/transfer_locations';
mock.onGet(expectedUrl).replyOnce(200, { data: getTransferLocationsResponse });
mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, { data: getTransferLocationsResponse });
await expect(projectsApi.getTransferLocations(projectId, params)).resolves.toMatchObject({
data: { data: getTransferLocationsResponse },

View File

@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import { followUser, unfollowUser, associationsCount, updateUserStatus } from '~/api/user_api';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import {
associationsCount as associationsCountData,
userStatus as mockUserStatus,
@ -31,7 +32,7 @@ describe('~/api/user_api', () => {
const expectedUrl = '/api/v4/users/1/follow';
const expectedResponse = { message: 'Success' };
axiosMock.onPost(expectedUrl).replyOnce(200, expectedResponse);
axiosMock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse);
await expect(followUser(1)).resolves.toEqual(
expect.objectContaining({ data: expectedResponse }),
@ -45,7 +46,7 @@ describe('~/api/user_api', () => {
const expectedUrl = '/api/v4/users/1/unfollow';
const expectedResponse = { message: 'Success' };
axiosMock.onPost(expectedUrl).replyOnce(200, expectedResponse);
axiosMock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse);
await expect(unfollowUser(1)).resolves.toEqual(
expect.objectContaining({ data: expectedResponse }),
@ -59,7 +60,7 @@ describe('~/api/user_api', () => {
const expectedUrl = '/api/v4/users/1/associations_count';
const expectedResponse = { data: associationsCountData };
axiosMock.onGet(expectedUrl).replyOnce(200, expectedResponse);
axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse);
await expect(associationsCount(1)).resolves.toEqual(
expect.objectContaining({ data: expectedResponse }),
@ -79,7 +80,7 @@ describe('~/api/user_api', () => {
};
const expectedResponse = { data: mockUserStatus };
axiosMock.onPatch(expectedUrl).replyOnce(200, expectedResponse);
axiosMock.onPatch(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse);
await expect(
updateUserStatus({

View File

@ -14,7 +14,9 @@ import {
HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
import PipelineNewForm from '~/ci/pipeline_new/components/pipeline_new_form.vue';
import PipelineNewForm, {
POLLING_INTERVAL,
} from '~/ci/pipeline_new/components/pipeline_new_form.vue';
import ciConfigVariablesQuery from '~/ci/pipeline_new/graphql/queries/ci_config_variables.graphql';
import { resolvers } from '~/ci/pipeline_new/graphql/resolvers';
import RefsDropdown from '~/ci/pipeline_new/components/refs_dropdown.vue';
@ -24,6 +26,7 @@ import {
mockCiConfigVariablesResponseWithoutDesc,
mockEmptyCiConfigVariablesResponse,
mockError,
mockNoCachedCiConfigVariablesResponse,
mockQueryParams,
mockPostParams,
mockProjectId,
@ -69,6 +72,10 @@ describe('Pipeline New Form', () => {
const findCCAlert = () => wrapper.findComponent(CreditCardValidationRequiredAlert);
const getFormPostParams = () => JSON.parse(mock.history.post[0].data);
const advanceToNextFetch = (milliseconds) => {
jest.advanceTimersByTime(milliseconds);
};
const selectBranch = async (branch) => {
// Select a branch in the dropdown
findRefsDropdown().vm.$emit('input', {
@ -266,17 +273,98 @@ describe('Pipeline New Form', () => {
});
});
describe('when yml defines a variable', () => {
it('loading icon is shown when content is requested and hidden when received', async () => {
mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
createComponentWithApollo({ props: mockQueryParams, method: mountExtended });
describe('When there are no variables in the API cache', () => {
beforeEach(async () => {
mockCiConfigVariables.mockResolvedValue(mockNoCachedCiConfigVariablesResponse);
createComponentWithApollo({ method: mountExtended });
await waitForPromises();
});
it('stops polling after CONFIG_VARIABLES_TIMEOUT ms have passed', async () => {
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(mockCiConfigVariables).toHaveBeenCalledTimes(3);
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(mockCiConfigVariables).toHaveBeenCalledTimes(3);
});
it('shows loading icon while query polls for updated values', async () => {
expect(findLoadingIcon().exists()).toBe(true);
expect(mockCiConfigVariables).toHaveBeenCalledTimes(1);
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(findLoadingIcon().exists()).toBe(true);
expect(mockCiConfigVariables).toHaveBeenCalledTimes(2);
});
it('hides loading icon and stops polling after query fetches the updated values', async () => {
expect(findLoadingIcon().exists()).toBe(true);
mockCiConfigVariables.mockResolvedValue(mockCiConfigVariablesResponse);
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(findLoadingIcon().exists()).toBe(false);
expect(mockCiConfigVariables).toHaveBeenCalledTimes(2);
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(mockCiConfigVariables).toHaveBeenCalledTimes(2);
});
});
const testBehaviorWhenCacheIsPopulated = (queryResponse) => {
beforeEach(async () => {
mockCiConfigVariables.mockResolvedValue(queryResponse);
createComponentWithApollo({ method: mountExtended });
});
it('does not poll for new values', async () => {
await waitForPromises();
expect(mockCiConfigVariables).toHaveBeenCalledTimes(1);
advanceToNextFetch(POLLING_INTERVAL);
await waitForPromises();
expect(mockCiConfigVariables).toHaveBeenCalledTimes(1);
});
it('loading icon is shown when content is requested and hidden when received', async () => {
expect(findLoadingIcon().exists()).toBe(true);
await waitForPromises();
expect(findLoadingIcon().exists()).toBe(false);
});
};
describe('When no variables are defined in the CI configuration and the cache is updated', () => {
testBehaviorWhenCacheIsPopulated(mockEmptyCiConfigVariablesResponse);
it('displays an empty form', async () => {
mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
createComponentWithApollo({ method: mountExtended });
await waitForPromises();
expect(findKeyInputs().at(0).element.value).toBe('');
expect(findValueInputs().at(0).element.value).toBe('');
expect(findVariableTypes().at(0).props('text')).toBe('Variable');
});
});
describe('When CI configuration has defined variables and they are stored in the cache', () => {
testBehaviorWhenCacheIsPopulated(mockCiConfigVariablesResponse);
describe('with different predefined values', () => {
beforeEach(async () => {

View File

@ -132,3 +132,4 @@ export const mockEmptyCiConfigVariablesResponse = mockCiConfigVariablesQueryResp
export const mockCiConfigVariablesResponseWithoutDesc = mockCiConfigVariablesQueryResponse(
mockYamlVariablesWithoutDesc,
);
export const mockNoCachedCiConfigVariablesResponse = mockCiConfigVariablesQueryResponse(null);

View File

@ -4,7 +4,7 @@ import testAction from 'helpers/vuex_action_helper';
import actions from '~/code_navigation/store/actions';
import { setCurrentHoverElement, addInteractionClass } from '~/code_navigation/utils';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/code_navigation/utils');
@ -46,7 +46,7 @@ describe('Code navigation actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(codeNavigationPath).replyOnce(200, [
mock.onGet(codeNavigationPath).replyOnce(HTTP_STATUS_OK, [
{
start_line: 0,
start_char: 0,

View File

@ -3,7 +3,7 @@ import { nextTick } from 'vue';
import { GlButton, GlFormCheckbox, GlFormInput, GlFormInputGroup, GlDatepicker } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { TEST_HOST } from 'helpers/test_constants';
import NewDeployToken from '~/deploy_tokens/components/new_deploy_token.vue';
import waitForPromises from 'helpers/wait_for_promises';
@ -184,7 +184,7 @@ describe('New Deploy Token', () => {
write_package_registry: true,
},
})
.replyOnce(200, { username: 'test token username', token: 'test token' });
.replyOnce(HTTP_STATUS_OK, { username: 'test token username', token: 'test token' });
return submitTokenThenCheck();
});
@ -217,7 +217,7 @@ describe('New Deploy Token', () => {
write_package_registry: true,
},
})
.replyOnce(200, { username: 'test token username', token: 'test token' });
.replyOnce(HTTP_STATUS_OK, { username: 'test token username', token: 'test token' });
return submitTokenThenCheck();
});

View File

@ -20,6 +20,7 @@ import {
HTTP_STATUS_BAD_REQUEST,
HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_NOT_FOUND,
HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import eventHub from '~/notes/event_hub';
@ -147,7 +148,7 @@ describe('DiffsStoreActions', () => {
endpointBatch,
),
)
.reply(200, res1)
.reply(HTTP_STATUS_OK, res1)
.onGet(
mergeUrlParams(
{
@ -159,7 +160,7 @@ describe('DiffsStoreActions', () => {
endpointBatch,
),
)
.reply(200, res2);
.reply(HTTP_STATUS_OK, res2);
return testAction(
diffActions.fetchDiffFilesBatch,
@ -191,7 +192,7 @@ describe('DiffsStoreActions', () => {
});
it('should fetch diff meta information', () => {
mock.onGet(endpointMetadata).reply(200, diffMetadata);
mock.onGet(endpointMetadata).reply(HTTP_STATUS_OK, diffMetadata);
return testAction(
diffActions.fetchDiffFilesMeta,
@ -253,7 +254,7 @@ describe('DiffsStoreActions', () => {
it('should commit SET_COVERAGE_DATA with received response', () => {
const data = { files: { 'app.js': { 1: 0, 2: 1 } } };
mock.onGet(endpointCoverage).reply(200, { data });
mock.onGet(endpointCoverage).reply(HTTP_STATUS_OK, { data });
return testAction(
diffActions.fetchCoverageFiles,
@ -550,7 +551,7 @@ describe('DiffsStoreActions', () => {
const nextLineNumbers = {};
const options = { endpoint, params, lineNumbers, fileHash, isExpandDown, nextLineNumbers };
const contextLines = { contextLines: [{ lineCode: 6 }] };
mock.onGet(endpoint).reply(200, contextLines);
mock.onGet(endpoint).reply(HTTP_STATUS_OK, contextLines);
return testAction(
diffActions.loadMoreLines,
@ -573,7 +574,7 @@ describe('DiffsStoreActions', () => {
const file = { hash: 123, load_collapsed_diff_url: '/load/collapsed/diff/url' };
const data = { hash: 123, parallelDiffLines: [{ lineCode: 1 }] };
const commit = jest.fn();
mock.onGet(file.loadCollapsedDiffUrl).reply(200, data);
mock.onGet(file.loadCollapsedDiffUrl).reply(HTTP_STATUS_OK, data);
return diffActions
.loadCollapsedDiff({ commit, getters: { commitId: null }, state }, file)
@ -1012,7 +1013,7 @@ describe('DiffsStoreActions', () => {
putSpy = jest.spyOn(axios, 'put');
gon = window.gon;
mock.onPut(endpointUpdateUser).reply(200, {});
mock.onPut(endpointUpdateUser).reply(HTTP_STATUS_OK, {});
jest.spyOn(eventHub, '$emit').mockImplementation();
});
@ -1089,7 +1090,7 @@ describe('DiffsStoreActions', () => {
describe('fetchFullDiff', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(`${TEST_HOST}/context`).replyOnce(200, ['test']);
mock.onGet(`${TEST_HOST}/context`).replyOnce(HTTP_STATUS_OK, ['test']);
});
it('commits the success and dispatches an action to expand the new lines', () => {
@ -1174,7 +1175,7 @@ describe('DiffsStoreActions', () => {
describe('success', () => {
beforeEach(() => {
renamedFile = { ...testFile, context_lines_path: SUCCESS_URL };
mock.onGet(SUCCESS_URL).replyOnce(200, testData);
mock.onGet(SUCCESS_URL).replyOnce(HTTP_STATUS_OK, testData);
});
it.each`
@ -1274,7 +1275,7 @@ describe('DiffsStoreActions', () => {
describe('setSuggestPopoverDismissed', () => {
it('commits SET_SHOW_SUGGEST_POPOVER', async () => {
const state = { dismissEndpoint: `${TEST_HOST}/-/user_callouts` };
mock.onPost(state.dismissEndpoint).reply(200, {});
mock.onPost(state.dismissEndpoint).reply(HTTP_STATUS_OK, {});
jest.spyOn(axios, 'post');
@ -1449,7 +1450,7 @@ describe('DiffsStoreActions', () => {
beforeEach(() => {
putSpy = jest.spyOn(axios, 'put');
mock.onPut(updateUserEndpoint).reply(200, {});
mock.onPut(updateUserEndpoint).reply(HTTP_STATUS_OK, {});
});
it.each`

View File

@ -10,6 +10,7 @@ import EditFeatureFlag from '~/feature_flags/components/edit_feature_flag.vue';
import Form from '~/feature_flags/components/form.vue';
import createStore from '~/feature_flags/store/edit';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
Vue.use(Vuex);
@ -35,7 +36,7 @@ describe('Edit feature flag form', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(HTTP_STATUS_OK, {
id: 21,
iid: 5,
active: true,

View File

@ -11,7 +11,7 @@ import FeatureFlagsComponent from '~/feature_flags/components/feature_flags.vue'
import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
import createStore from '~/feature_flags/store/index';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
import { getRequestData } from '../mock_data';
@ -75,7 +75,7 @@ describe('Feature flags', () => {
beforeEach(() => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { page: '1' } })
.reply(200, getRequestData, {});
.reply(HTTP_STATUS_OK, getRequestData, {});
factory(provideData);
return waitForPromises();
});
@ -120,7 +120,7 @@ describe('Feature flags', () => {
beforeEach(() => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { page: '1' } })
.reply(200, getRequestData, {});
.reply(HTTP_STATUS_OK, getRequestData, {});
factory(provideData);
return waitForPromises();
});
@ -142,7 +142,7 @@ describe('Feature flags', () => {
it('renders a loading icon', () => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { page: '1' } })
.replyOnce(200, getRequestData, {});
.replyOnce(HTTP_STATUS_OK, getRequestData, {});
factory();
@ -159,7 +159,7 @@ describe('Feature flags', () => {
beforeEach(async () => {
mock.onGet(mockState.endpoint, { params: { page: '1' } }).reply(
200,
HTTP_STATUS_OK,
{
feature_flags: [],
count: {
@ -204,14 +204,16 @@ describe('Feature flags', () => {
describe('with paginated feature flags', () => {
beforeEach(() => {
mock.onGet(mockState.endpoint, { params: { page: '1' } }).replyOnce(200, getRequestData, {
'x-next-page': '2',
'x-page': '1',
'X-Per-Page': '2',
'X-Prev-Page': '',
'X-TOTAL': '37',
'X-Total-Pages': '5',
});
mock
.onGet(mockState.endpoint, { params: { page: '1' } })
.replyOnce(HTTP_STATUS_OK, getRequestData, {
'x-next-page': '2',
'x-page': '1',
'X-Per-Page': '2',
'X-Prev-Page': '',
'X-TOTAL': '37',
'X-Total-Pages': '5',
});
factory();
jest.spyOn(store, 'dispatch');
@ -306,7 +308,7 @@ describe('Feature flags', () => {
beforeEach(() => {
mock
.onGet(`${TEST_HOST}/endpoint.json`, { params: { page: '1' } })
.reply(200, getRequestData, {});
.reply(HTTP_STATUS_OK, getRequestData, {});
factory();
return waitForPromises();
});

View File

@ -20,7 +20,7 @@ import {
import * as types from '~/feature_flags/store/index/mutation_types';
import state from '~/feature_flags/store/index/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { getRequestData, rotateData, featureFlag } from '../../mock_data';
jest.mock('~/api.js');
@ -58,7 +58,7 @@ describe('Feature flags actions', () => {
describe('success', () => {
it('dispatches requestFeatureFlags and receiveFeatureFlagsSuccess', () => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, getRequestData, {});
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(HTTP_STATUS_OK, getRequestData, {});
return testAction(
fetchFeatureFlags,
@ -157,7 +157,7 @@ describe('Feature flags actions', () => {
describe('success', () => {
it('dispatches requestRotateInstanceId and receiveRotateInstanceIdSuccess', () => {
mock.onPost(`${TEST_HOST}/endpoint.json`).replyOnce(200, rotateData, {});
mock.onPost(`${TEST_HOST}/endpoint.json`).replyOnce(HTTP_STATUS_OK, rotateData, {});
return testAction(
rotateInstanceId,
@ -257,7 +257,7 @@ describe('Feature flags actions', () => {
});
describe('success', () => {
it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', () => {
mock.onPut(featureFlag.update_path).replyOnce(200, featureFlag, {});
mock.onPut(featureFlag.update_path).replyOnce(HTTP_STATUS_OK, featureFlag, {});
return testAction(
toggleFeatureFlag,

View File

@ -12,6 +12,7 @@ import eventHub from '~/frequent_items/event_hub';
import { createStore } from '~/frequent_items/store';
import { getTopFrequentItems } from '~/frequent_items/utils';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
Vue.use(Vuex);
@ -115,7 +116,9 @@ describe('Frequent Items App Component', () => {
});
it('should render searched projects list', async () => {
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects.data);
mock
.onGet(/\/api\/v4\/projects.json(.*)$/)
.replyOnce(HTTP_STATUS_OK, mockSearchedProjects.data);
setSearch('gitlab');
await nextTick();

View File

@ -5,7 +5,7 @@ import * as types from '~/frequent_items/store/mutation_types';
import state from '~/frequent_items/store/state';
import AccessorUtilities from '~/lib/utils/accessor';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import {
mockNamespace,
@ -174,7 +174,9 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
it('should dispatch `receiveSearchedItemsSuccess`', () => {
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects, {});
mock
.onGet(/\/api\/v4\/projects.json(.*)$/)
.replyOnce(HTTP_STATUS_OK, mockSearchedProjects, {});
return testAction(
actions.fetchSearchedItems,

View File

@ -7,6 +7,7 @@ import { createStore } from '~/ide/stores';
import * as actions from '~/ide/stores/actions/file';
import * as types from '~/ide/stores/mutation_types';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { stubPerformanceWebAPI } from 'helpers/performance';
import { file, createTriggerRenameAction, createTriggerUpdatePayload } from '../../helpers';
@ -243,7 +244,7 @@ describe('IDE store file actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`).replyOnce(
200,
HTTP_STATUS_OK,
{
raw_path: 'raw_path',
},
@ -320,7 +321,7 @@ describe('IDE store file actions', () => {
store.state.entries[localFile.path] = localFile;
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/-/7297abc/old-dull-file`).replyOnce(
200,
HTTP_STATUS_OK,
{
raw_path: 'raw_path',
},
@ -377,7 +378,7 @@ describe('IDE store file actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/(.*)/).replyOnce(200, 'raw');
mock.onGet(/(.*)/).replyOnce(HTTP_STATUS_OK, 'raw');
});
it('calls getRawFileData service method', () => {
@ -470,7 +471,7 @@ describe('IDE store file actions', () => {
describe('return JSON', () => {
beforeEach(() => {
mock.onGet(/(.*)/).replyOnce(200, JSON.stringify({ test: '123' }));
mock.onGet(/(.*)/).replyOnce(HTTP_STATUS_OK, JSON.stringify({ test: '123' }));
});
it('does not parse returned JSON', () => {

View File

@ -8,7 +8,7 @@ import { createStore } from '~/ide/stores';
import { showTreeEntry, getFiles, setDirectoryData } from '~/ide/stores/actions/tree';
import * as types from '~/ide/stores/mutation_types';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { file, createEntriesFromPaths } from '../../helpers';
describe('Multi-file store tree actions', () => {
@ -53,7 +53,7 @@ describe('Multi-file store tree actions', () => {
mock
.onGet(/(.*)/)
.replyOnce(200, [
.replyOnce(HTTP_STATUS_OK, [
'file.txt',
'folder/fileinfolder.js',
'folder/subfolder/fileinsubfolder.js',

View File

@ -10,7 +10,7 @@ import {
import * as types from '~/ide/stores/modules/branches/mutation_types';
import state from '~/ide/stores/modules/branches/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { branches, projectData } from '../../../mock_data';
describe('IDE branches actions', () => {
@ -95,7 +95,9 @@ describe('IDE branches actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/\d+\/repository\/branches(.*)$/).replyOnce(200, branches);
mock
.onGet(/\/api\/v4\/projects\/\d+\/repository\/branches(.*)$/)
.replyOnce(HTTP_STATUS_OK, branches);
});
it('calls API with params', () => {

View File

@ -4,7 +4,7 @@ import * as actions from '~/ide/stores/modules/file_templates/actions';
import * as types from '~/ide/stores/modules/file_templates/mutation_types';
import createState from '~/ide/stores/modules/file_templates/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('IDE file templates actions', () => {
let state;
@ -75,7 +75,7 @@ describe('IDE file templates actions', () => {
const page = pages[pageNum - 1];
const hasNextPage = pageNum < pages.length;
return [200, page, hasNextPage ? { 'X-NEXT-PAGE': pageNum + 1 } : {}];
return [HTTP_STATUS_OK, page, hasNextPage ? { 'X-NEXT-PAGE': pageNum + 1 } : {}];
});
});
@ -200,10 +200,10 @@ describe('IDE file templates actions', () => {
beforeEach(() => {
mock
.onGet(/api\/(.*)\/templates\/licenses\/mit/)
.replyOnce(200, { content: 'MIT content' });
.replyOnce(HTTP_STATUS_OK, { content: 'MIT content' });
mock
.onGet(/api\/(.*)\/templates\/licenses\/testing/)
.replyOnce(200, { content: 'testing content' });
.replyOnce(HTTP_STATUS_OK, { content: 'testing content' });
});
it('dispatches setFileTemplate if template already has content', () => {

View File

@ -10,7 +10,7 @@ import {
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
import state from '~/ide/stores/modules/merge_requests/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { mergeRequests } from '../../../mock_data';
describe('IDE merge requests actions', () => {
@ -81,7 +81,7 @@ describe('IDE merge requests actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/merge_requests\/?/).replyOnce(200, mergeRequests);
mock.onGet(/\/api\/v4\/merge_requests\/?/).replyOnce(HTTP_STATUS_OK, mergeRequests);
});
it('calls API with params', () => {
@ -133,7 +133,9 @@ describe('IDE merge requests actions', () => {
describe('success without type', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/.+\/merge_requests\/?$/).replyOnce(200, mergeRequests);
mock
.onGet(/\/api\/v4\/projects\/.+\/merge_requests\/?$/)
.replyOnce(HTTP_STATUS_OK, mergeRequests);
});
it('calls API with project', () => {

View File

@ -25,7 +25,11 @@ import {
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import state from '~/ide/stores/modules/pipelines/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
import {
HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_NOT_FOUND,
HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
import { pipelines, jobs } from '../../../mock_data';
@ -119,7 +123,7 @@ describe('IDE pipelines actions', () => {
beforeEach(() => {
mock
.onGet('/abc/def/commit/abc123def456ghi789jkl/pipelines')
.reply(200, { data: { foo: 'bar' } }, { 'poll-interval': '10000' });
.reply(HTTP_STATUS_OK, { data: { foo: 'bar' } }, { 'poll-interval': '10000' });
});
it('dispatches request', async () => {
@ -241,7 +245,7 @@ describe('IDE pipelines actions', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(stage.dropdownPath).replyOnce(200, jobs);
mock.onGet(stage.dropdownPath).replyOnce(HTTP_STATUS_OK, jobs);
});
it('dispatches request', () => {
@ -370,7 +374,7 @@ describe('IDE pipelines actions', () => {
describe('success', () => {
beforeEach(() => {
jest.spyOn(axios, 'get');
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(HTTP_STATUS_OK, { html: 'html' });
});
it('dispatches request', () => {

View File

@ -3,6 +3,7 @@ import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { AutocompleteCache } from '~/issues/dashboard/utils';
import { MAX_LIST_SIZE } from '~/issues/list/constants';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('AutocompleteCache', () => {
let autocompleteCache;
@ -42,7 +43,7 @@ describe('AutocompleteCache', () => {
let response;
beforeEach(async () => {
axiosMock.onGet(url).replyOnce(200, data);
axiosMock.onGet(url).replyOnce(HTTP_STATUS_OK, data);
response = await autocompleteCache.fetch({ url, cacheName, searchProperty });
});
@ -59,7 +60,7 @@ describe('AutocompleteCache', () => {
let response;
beforeEach(async () => {
axiosMock.onGet(url).replyOnce(200, data);
axiosMock.onGet(url).replyOnce(HTTP_STATUS_OK, data);
jest.spyOn(fuzzaldrinPlus, 'filter');
// Populate cache
await autocompleteCache.fetch({ url, cacheName, searchProperty });

View File

@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import * as actions from '~/issues/related_merge_requests/store/actions';
import * as types from '~/issues/related_merge_requests/store/mutation_types';
@ -73,7 +73,9 @@ describe('RelatedMergeRequest store actions', () => {
describe('for a successful request', () => {
it('should dispatch success action', () => {
const data = { a: 1 };
mock.onGet(`${state.apiEndpoint}?per_page=100`).replyOnce(200, data, { 'x-total': 2 });
mock
.onGet(`${state.apiEndpoint}?per_page=100`)
.replyOnce(HTTP_STATUS_OK, data, { 'x-total': 2 });
return testAction(
actions.fetchMergeRequests,

View File

@ -15,6 +15,7 @@ import StuckBlock from '~/jobs/components/job/stuck_block.vue';
import UnmetPrerequisitesBlock from '~/jobs/components/job/unmet_prerequisites_block.vue';
import createStore from '~/jobs/store';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import job from '../../mock_data';
describe('Job App', () => {
@ -48,8 +49,8 @@ describe('Job App', () => {
};
const setupAndMount = async ({ jobData = {}, jobLogData = {} } = {}) => {
mock.onGet(initSettings.endpoint).replyOnce(200, { ...job, ...jobData });
mock.onGet(`${initSettings.pagePath}/trace.json`).reply(200, jobLogData);
mock.onGet(initSettings.endpoint).replyOnce(HTTP_STATUS_OK, { ...job, ...jobData });
mock.onGet(`${initSettings.pagePath}/trace.json`).reply(HTTP_STATUS_OK, jobLogData);
const asyncInit = store.dispatch('init', initSettings);

View File

@ -30,7 +30,7 @@ import {
import * as types from '~/jobs/store/mutation_types';
import state from '~/jobs/store/state';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('Job State actions', () => {
let mockedState;
@ -113,7 +113,9 @@ describe('Job State actions', () => {
describe('success', () => {
it('dispatches requestJob and receiveJobSuccess', () => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 121212, name: 'karma' });
mock
.onGet(`${TEST_HOST}/endpoint.json`)
.replyOnce(HTTP_STATUS_OK, { id: 121212, name: 'karma' });
return testAction(
fetchJob,
@ -215,7 +217,7 @@ describe('Job State actions', () => {
describe('success', () => {
it('dispatches requestJobLog, receiveJobLogSuccess and stopPollingJobLog when job is complete', () => {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, {
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(HTTP_STATUS_OK, {
html: 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- :',
complete: true,
});
@ -253,7 +255,7 @@ describe('Job State actions', () => {
complete: false,
};
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(200, jobLogPayload);
mock.onGet(`${TEST_HOST}/endpoint/trace.json`).replyOnce(HTTP_STATUS_OK, jobLogPayload);
});
it('dispatches startPollingJobLog', () => {
@ -425,9 +427,10 @@ describe('Job State actions', () => {
describe('success', () => {
it('dispatches requestJobsForStage and receiveJobsForStageSuccess', () => {
mock
.onGet(`${TEST_HOST}/jobs.json`)
.replyOnce(200, { latest_statuses: [{ id: 121212, name: 'build' }], retried: [] });
mock.onGet(`${TEST_HOST}/jobs.json`).replyOnce(HTTP_STATUS_OK, {
latest_statuses: [{ id: 121212, name: 'build' }],
retried: [],
});
return testAction(
fetchJobsForStage,

View File

@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { setFaviconOverlay, resetFavicon } from '~/lib/utils/favicon';
import { setCiStatusFavicon } from '~/lib/utils/favicon_ci';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/lib/utils/favicon');
@ -29,7 +29,7 @@ describe('~/lib/utils/favicon_ci', () => {
`(
'with response=$response',
async ({ response, setFaviconOverlayCalls, resetFaviconCalls }) => {
mock.onGet(TEST_URL).replyOnce(200, response);
mock.onGet(TEST_URL).replyOnce(HTTP_STATUS_OK, response);
expect(setFaviconOverlay).not.toHaveBeenCalled();
expect(resetFavicon).not.toHaveBeenCalled();

View File

@ -101,18 +101,6 @@ exports[`packages_list_row renders 1`] = `
</div>
</div>
<div
class="gl-display-flex"
>
<div
class="gl-w-7"
/>
<!---->
<div
class="gl-w-9"
/>
</div>
<!---->
</div>
`;

View File

@ -87,18 +87,6 @@ exports[`VersionRow renders 1`] = `
<!---->
</div>
<div
class="gl-display-flex"
>
<div
class="gl-w-7"
/>
<!---->
<div
class="gl-w-9"
/>
</div>
<!---->
</div>
`;

View File

@ -54,12 +54,15 @@ exports[`packages_list_row renders 1`] = `
class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-min-h-6 gl-min-w-0 gl-flex-grow-1"
>
<div
class="gl-display-flex"
class="gl-display-flex gl-align-items-center"
data-testid="left-secondary-infos"
>
<span>
1.0.0
</span>
<gl-truncate-stub
class="gl-max-w-15 gl-md-max-w-26"
position="end"
text="1.0.0"
withtooltip="true"
/>
<!---->
@ -135,18 +138,6 @@ exports[`packages_list_row renders 1`] = `
</div>
</div>
<div
class="gl-display-flex"
>
<div
class="gl-w-7"
/>
<!---->
<div
class="gl-w-9"
/>
</div>
<!---->
</div>
`;

View File

@ -43,6 +43,7 @@ describe('packages_list_row', () => {
const findPackageLink = () => wrapper.findByTestId('details-link');
const findWarningIcon = () => wrapper.findByTestId('warning-icon');
const findLeftSecondaryInfos = () => wrapper.findByTestId('left-secondary-infos');
const findPackageVersion = () => findLeftSecondaryInfos().findComponent(GlTruncate);
const findPublishMethod = () => wrapper.findComponent(PublishMethod);
const findCreatedDateText = () => wrapper.findByTestId('created-date');
const findTimeAgoTooltip = () => wrapper.findComponent(TimeagoTooltip);
@ -213,7 +214,10 @@ describe('packages_list_row', () => {
it('has the package version', () => {
mountComponent();
expect(findLeftSecondaryInfos().text()).toContain(packageWithoutTags.version);
expect(findPackageVersion().props()).toMatchObject({
text: packageWithoutTags.version,
withTooltip: true,
});
});
it('if the pipeline exists show the author message', () => {

View File

@ -3,6 +3,7 @@ import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import '~/lib/utils/common_utils';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { addDelimiter } from '~/lib/utils/text_utility';
import Todos from '~/pages/dashboard/todos/index/todos';
@ -41,7 +42,7 @@ describe('Todos', () => {
// Arrange
mock
.onDelete(path)
.replyOnce(200, { count: TEST_COUNT_BIG, done_count: TEST_DONE_COUNT_BIG });
.replyOnce(HTTP_STATUS_OK, { count: TEST_COUNT_BIG, done_count: TEST_DONE_COUNT_BIG });
onToggleSpy = jest.fn();
document.addEventListener('todo:toggle', onToggleSpy);

View File

@ -5,7 +5,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PipelineMultiActions, {
i18n,
} from '~/pipelines/components/pipelines_list/pipeline_multi_actions.vue';
@ -80,7 +80,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
describe('Artifacts', () => {
it('should fetch artifacts and show search box on dropdown click', async () => {
const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
mockAxios.onGet(endpoint).replyOnce(200, { artifacts });
mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts });
createComponent();
findDropdown().vm.$emit('show');
await waitForPromises();

View File

@ -125,7 +125,7 @@ describe('Release edit/new component', () => {
it('renders the description text at the top of the page', () => {
expect(wrapper.find('.js-subtitle-text').text()).toBe(
'Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0.0, v2.1.0-pre.',
'Releases are based on Git tags. We recommend tags that use semantic versioning, for example 1.0.0, 2.1.0-pre.',
);
});

View File

@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import SingleFileDiff from '~/single_file_diff';
describe('SingleFileDiff', () => {
@ -11,7 +11,9 @@ describe('SingleFileDiff', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(blobDiffPath).replyOnce(200, { html: `<div class="diff-content">MOCKED</div>` });
mock
.onGet(blobDiffPath)
.replyOnce(HTTP_STATUS_OK, { html: `<div class="diff-content">MOCKED</div>` });
});
afterEach(() => {

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
RSpec.describe BulkImports::Groups::Loaders::GroupLoader, feature_category: :importers do
describe '#load' do
let_it_be(:user) { create(:user) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
@ -29,19 +29,9 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
end
end
context 'when group exists' do
it 'raises an error' do
group1 = create(:group)
group2 = create(:group, parent: group1)
entity.update!(destination_namespace: group1.full_path)
data = { 'path' => group2.path }
expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Group exists')
end
end
context 'when there are other group errors' do
it 'raises an error with those errors' do
entity.update!(destination_namespace: '')
group = ::Group.new
group.validate
expected_errors = group.errors.full_messages.to_sentence

View File

@ -158,6 +158,16 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model, feature_cat
end.not_to change { described_class.count }
end
end
context 'when the aggregation was disabled for some reason' do
it 're-enables the aggregation' do
create(:cycle_analytics_aggregation, enabled: false, namespace: group)
aggregation = described_class.safe_create_for_namespace(group)
expect(aggregation).to be_enabled
end
end
end
describe '#load_batch' do

View File

@ -45,6 +45,21 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).to be_valid
end
it 'is invalid when destination_namespace is nil' do
entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_namespace: nil)
expect(entity).not_to be_valid
end
it 'is invalid when destination_slug is empty' do
entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_slug: '')
expect(entity).not_to be_valid
end
it 'is invalid when destination_slug is nil' do
entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_slug: nil)
expect(entity).not_to be_valid
end
it 'is invalid as a project_entity' do
stub_feature_flags(bulk_import_projects: true)

View File

@ -13,22 +13,21 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination group 1',
destination_slug: 'destination-group-1',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
},
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group2',
destination_slug: 'destination group 2',
destination_slug: 'destination-group-2',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
},
{
source_type: 'project_entity',
source_full_path: 'full/path/to/project1',
destination_slug: 'destination project 1',
destination_slug: 'destination-project-1',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
}
@ -267,56 +266,188 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do
extra: { user_role: 'Not a member', import_type: 'bulk_import_group' }
)
end
end
context 'when there is a destination_namespace but no parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination-group-1',
destination_namespace: 'destination1'
}
]
context 'when there is a destination_namespace but no parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination-group-1',
destination_namespace: 'destination1'
}
]
end
it 'defines access_level from destination_namespace' do
destination_group.add_developer(user)
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
)
end
end
it 'defines access_level from destination_namespace' do
destination_group.add_developer(user)
subject.execute
context 'when there is no destination_namespace or parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destinationational-mcdestiny',
destination_namespace: 'destinational-mcdestiny'
}
]
end
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
)
it 'defines access_level as owner' do
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
end
end
context 'when there is no destination_namespace or parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destinationational mcdestiny',
destination_namespace: 'destinational-mcdestiny'
}
]
describe '.validate_destination_full_path' do
context 'when the source_type is a group' do
context 'when the provided destination_slug already exists in the destination_namespace' do
let_it_be(:existing_subgroup) { create(:group, path: 'existing-subgroup', parent_id: parent_group.id ) }
let_it_be(:existing_subgroup_2) { create(:group, path: 'existing-subgroup_2', parent_id: parent_group.id ) }
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_subgroup.path,
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import aborted as 'parent-group/existing-subgroup' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug conflicts with an existing top-level namespace' do
let_it_be(:existing_top_level_group) { create(:group, path: 'top-level-group') }
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_top_level_group.path,
destination_namespace: '',
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import aborted as 'top-level-group' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug does not conflict with an existing top-level namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'new-group',
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns success ServiceResponse' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_success
end
end
end
it 'defines access_level as owner' do
subject.execute
context 'when the source_type is a project' do
context 'when the provided destination_slug already exists in the destination_namespace' do
let_it_be(:existing_group) { create(:group, path: 'existing-group' ) }
let_it_be(:existing_project) { create(:project, path: 'existing-project', parent_id: existing_group.id ) }
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_project.path,
destination_namespace: existing_group.path,
migrate_projects: migrate_projects
}
]
end
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import aborted as 'existing-group/existing-project' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug does not conflict with an existing project' do
let_it_be(:existing_group) { create(:group, path: 'existing-group' ) }
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'new-project',
destination_namespace: 'existing-group',
migrate_projects: migrate_projects
}
]
end
it 'returns success ServiceResponse' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_success
end
end
end
end
end