Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-04-26 12:12:07 +00:00
parent 5e7fac8adf
commit 1cdda1cbfb
73 changed files with 563 additions and 334 deletions

View File

@ -1,9 +0,0 @@
---
# Cop supports --autocorrect.
Lint/AmbiguousRange:
Exclude:
- 'app/models/ci/runner.rb'
- 'app/models/ci/runner_manager.rb'
- 'app/services/clusters/agent_tokens/track_usage_service.rb'
- 'lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder.rb'
- 'spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb'

View File

@ -1,4 +1,3 @@
import Vue from 'vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { formatMedianValues } from '../utils';
import { PAGINATION_SORT_DIRECTION_DESC, PAGINATION_SORT_FIELD_DURATION } from '../constants';
@ -15,11 +14,11 @@ export default {
state.createdAfter = createdAfter;
state.features = features;
Vue.set(state, 'pagination', {
state.pagination = {
page: pagination.page ?? state.pagination.page,
sort: pagination.sort ?? state.pagination.sort,
direction: pagination.direction ?? state.pagination.direction,
});
};
},
[types.SET_LOADING](state, loadingState) {
state.isLoading = loadingState;
@ -38,12 +37,12 @@ export default {
state.predefinedDateRange = predefinedDateRange;
},
[types.SET_PAGINATION](state, { page, hasNextPage, sort, direction }) {
Vue.set(state, 'pagination', {
state.pagination = {
page,
hasNextPage,
sort: sort || PAGINATION_SORT_FIELD_DURATION,
direction: direction || PAGINATION_SORT_DIRECTION_DESC,
});
};
},
[types.SET_NO_ACCESS_ERROR](state) {
state.hasNoAccessError = true;

View File

@ -42,7 +42,7 @@ export default {
},
bodyText() {
return this.issueType.toLowerCase() === TYPE_EPIC
? __('Delete this epic and all descendants?')
? __('Delete this epic and release all child items?')
: sprintf(__('%{issuableType} will be removed! Are you sure?'), {
issuableType: capitalizeFirstCharacter(this.issueType),
});

View File

@ -22,6 +22,7 @@ import {
sprintfWorkItem,
I18N_WORK_ITEM_DELETE,
I18N_WORK_ITEM_ARE_YOU_SURE_DELETE,
I18N_WORK_ITEM_ARE_YOU_SURE_DELETE_HIERARCHY,
TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION,
TEST_ID_NOTIFICATIONS_TOGGLE_FORM,
TEST_ID_DELETE_ACTION,
@ -157,6 +158,11 @@ export default {
required: false,
default: false,
},
hasChildren: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
@ -184,7 +190,6 @@ export default {
i18n() {
return {
deleteWorkItem: sprintfWorkItem(I18N_WORK_ITEM_DELETE, this.workItemType),
areYouSureDelete: sprintfWorkItem(I18N_WORK_ITEM_ARE_YOU_SURE_DELETE, this.workItemType),
convertError: sprintfWorkItem(I18N_WORK_ITEM_ERROR_CONVERTING, this.workItemType),
copyCreateNoteEmail: sprintfWorkItem(
I18N_WORK_ITEM_COPY_CREATE_NOTE_EMAIL,
@ -197,6 +202,11 @@ export default {
),
};
},
areYouSureDeleteMessage() {
return this.hasChildren
? sprintfWorkItem(I18N_WORK_ITEM_ARE_YOU_SURE_DELETE_HIERARCHY, this.workItemType)
: sprintfWorkItem(I18N_WORK_ITEM_ARE_YOU_SURE_DELETE, this.workItemType);
},
canLockWorkItem() {
return this.canUpdate && this.glFeatures.workItemsBeta;
},
@ -463,7 +473,7 @@ export default {
@ok="handleDeleteWorkItem"
@hide="handleCancelDeleteWorkItem"
>
{{ i18n.areYouSureDelete }}
{{ areYouSureDeleteMessage }}
</gl-modal>
</div>
</template>

View File

@ -233,6 +233,9 @@ export default {
children() {
return this.workItem ? findHierarchyWidgetChildren(this.workItem) : [];
},
hasChildren() {
return !isEmpty(this.children);
},
workItemBodyClass() {
return {
'gl-pt-5': !this.updateError && !this.isModal,
@ -500,6 +503,7 @@ export default {
:work-item-create-note-email="workItem.createNoteEmail"
:is-modal="isModal"
:work-item-state="workItem.state"
:has-children="hasChildren"
@deleteWorkItem="$emit('deleteWorkItem', { workItemType, workItemId: workItem.id })"
@toggleWorkItemConfidentiality="toggleConfidentiality"
@error="updateError = $event"

View File

@ -79,6 +79,9 @@ export const I18N_WORK_ITEM_DELETE = s__('WorkItem|Delete %{workItemType}');
export const I18N_WORK_ITEM_ARE_YOU_SURE_DELETE = s__(
'WorkItem|Are you sure you want to delete the %{workItemType}? This action cannot be reversed.',
);
export const I18N_WORK_ITEM_ARE_YOU_SURE_DELETE_HIERARCHY = s__(
'WorkItem|Delete this %{workItemType} and release all child items? This action cannot be reversed.',
);
export const I18N_WORK_ITEM_DELETED = s__('WorkItem|%{workItemType} deleted');
export const I18N_WORK_ITEM_FETCH_ITERATIONS_ERROR = s__(

View File

@ -2,7 +2,7 @@
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
import { STATUS_OPEN } from '~/issues/constants';
import { STATUS_ALL, STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
import setSortPreferenceMutation from '~/issues/list/queries/set_sort_preference.mutation.graphql';
import { deriveSortKey } from '~/issues/list/utils';
import { __, s__ } from '~/locale';
@ -27,6 +27,7 @@ export default {
searchTokens: [],
sortKey: deriveSortKey({ sort: this.initialSort, sortMap: urlSortParams }),
state: STATUS_OPEN,
tabCounts: {},
workItems: [],
};
},
@ -43,6 +44,14 @@ export default {
update(data) {
return data.group.workItems.nodes ?? [];
},
result({ data }) {
const { all, closed, opened } = data?.group.workItemStateCounts ?? {};
this.tabCounts = {
[STATUS_OPEN]: opened,
[STATUS_CLOSED]: closed,
[STATUS_ALL]: all,
};
},
error(error) {
this.error = s__(
'WorkItem|Something went wrong when fetching work items. Please try again.',
@ -104,6 +113,7 @@ export default {
:search-tokens="searchTokens"
show-work-item-type-icon
:sort-options="$options.sortOptions"
:tab-counts="tabCounts"
:tabs="$options.issuableListTabs"
@click-tab="handleClickTab"
@dismiss-alert="error = undefined"

View File

@ -3,6 +3,11 @@
query getWorkItems($fullPath: ID!, $sort: WorkItemSort, $state: IssuableState) {
group(fullPath: $fullPath) {
id
workItemStateCounts(sort: $sort, state: $state, types: EPIC) {
all
closed
opened
}
workItems(sort: $sort, state: $state, types: EPIC) {
nodes {
id

View File

@ -60,7 +60,7 @@ module Ci
RUNNER_QUEUE_EXPIRY_TIME = 1.hour
# The `UPDATE_CONTACT_COLUMN_EVERY` defines how often the Runner DB entry can be updated
UPDATE_CONTACT_COLUMN_EVERY = (40.minutes..55.minutes)
UPDATE_CONTACT_COLUMN_EVERY = ((40.minutes)..(55.minutes))
# The `STALE_TIMEOUT` constant defines the how far past the last contact or creation date a runner will be considered stale
STALE_TIMEOUT = 3.months

View File

@ -4,7 +4,7 @@ module Clusters
module AgentTokens
class TrackUsageService
# The `UPDATE_USED_COLUMN_EVERY` defines how often the token DB entry can be updated
UPDATE_USED_COLUMN_EVERY = (40.minutes..55.minutes)
UPDATE_USED_COLUMN_EVERY = ((40.minutes)..(55.minutes))
delegate :agent, to: :token

View File

@ -11,3 +11,4 @@ module WorkItems
end
end
end
WorkItems::Callbacks::Base.prepend_mod

View File

@ -3,15 +3,25 @@
module WorkItems
module Callbacks
class Description < Base
include Gitlab::Utils::StrongMemoize
def after_initialize
params[:description] = nil if excluded_in_new_type?
return unless params.present? && params.key?(:description)
return unless has_permission?(:update_work_item)
return unless update_description?
work_item.description = params[:description]
work_item.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
private
def update_description?
params.present? && params.key?(:description) && has_permission?(:update_work_item)
end
strong_memoize_attr :update_description?
end
end
end
WorkItems::Callbacks::Description.prepend_mod

View File

@ -3,14 +3,24 @@
module WorkItems
module Callbacks
class StartAndDueDate < Base
include Gitlab::Utils::StrongMemoize
def before_update
return work_item.assign_attributes({ start_date: nil, due_date: nil }) if excluded_in_new_type?
return if params.blank?
return unless has_permission?(:set_work_item_metadata)
return unless update_start_and_due_date?
work_item.assign_attributes(params.slice(:start_date, :due_date))
end
private
def update_start_and_due_date?
params.present? && has_permission?(:set_work_item_metadata)
end
strong_memoize_attr :update_start_and_due_date?
end
end
end
WorkItems::Callbacks::StartAndDueDate.prepend_mod

View File

@ -1,16 +1,26 @@
- add_page_specific_style 'page_bundles/labels'
- if labels.any?
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c|
- c.with_body do
%ul.manage-labels-list.gl-px-0.gl-mb-0
- labels.each do |label|
- options = { milestone_title: @milestone.title, label_name: label.title }
- open_issues = milestone_issues_by_label_count(@milestone, label, state: :opened)
- closed_issues = milestone_issues_by_label_count(@milestone, label, state: :closed)
%ul.bordered-list.manage-labels-list
- labels.each do |label|
- options = { milestone_title: @milestone.title, label_name: label.title }
%li.no-border
= render_label(label, tooltip: false, link: milestones_issues_path(options))
%span.prepend-description-left
= markdown_field(label, :description)
.gl-float-right.d-none.d-lg-block
= link_button_to milestones_issues_path(options.merge(state: 'opened')), category: :tertiary do
= n_('open issue', 'open issues', milestone_issues_by_label_count(@milestone, label, state: :opened))
= link_button_to milestones_issues_path(options.merge(state: 'closed')), category: :tertiary do
= n_('closed issue', 'closed issues', milestone_issues_by_label_count(@milestone, label, state: :closed))
%li.gl-list-style-none.gl-border-b.gl-last-of-type-border-b-0.gl-md-display-flex.gl-px-5.gl-py-4.gl-gap-5.gl-align-items-baseline
.gl-md-w-20.gl-flex-shrink-0.gl-flex-grow-1.gl-mb-4.gl-md-mb-0
= render_label(label, tooltip: false, link: milestones_issues_path(options))
- if markdown_field(label, :description).present?
.gl-w-full.gl-mb-4.gl-md-mb-0
= markdown_field(label, :description)
= render Pajamas::ButtonComponent.new(variant: :link, disabled: open_issues == 0, href: milestones_issues_path(options.merge(state: 'opened'))) do
= open_issues
= n_('Open issue', 'Open issues', open_issues)
= render Pajamas::ButtonComponent.new(variant: :link, disabled: closed_issues == 0, href: milestones_issues_path(options.merge(state: 'closed'))) do
= closed_issues
= n_('Closed issue', 'Closed issues', closed_issues)
- else
= render Pajamas::EmptyStateComponent.new(svg_path: 'illustrations/empty-state/empty-labels-md.svg',
title: s_('Milestones|No labels found')) do |c|
- c.with_description do
= s_('Milestones|Labels from issues in this milestone will appear here.')

View File

@ -14,3 +14,4 @@ Grape::Validations.register_validator(:project_portable, ::API::Validations::Val
Grape::Validations.register_validator(:destination_namespace_path, ::API::Validations::Validators::BulkImports::DestinationNamespacePath) # rubocop: disable Layout/LineLength
Grape::Validations.register_validator(:destination_slug_path, ::API::Validations::Validators::BulkImports::DestinationSlugPath) # rubocop: disable Layout/LineLength
Grape::Validations.register_validator(:source_full_path, ::API::Validations::Validators::BulkImports::SourceFullPath)
Grape::Validations.register_validator(:limit, ::API::Validations::Validators::Limit)

View File

@ -1,11 +1,11 @@
---
key_path: counts_monthly.aggregated_metrics.xmau_plan
description: Unique users interacting with Plan features
description: Removed as duplicate of counts_monthly.aggregated_metrics.users_work_items
product_section: dev
product_stage: plan
product_group: project_management
value_type: number
status: active
status: removed
milestone: '14.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81336
time_frame: 28d
@ -36,3 +36,5 @@ tier:
- free
- premium
- ultimate
milestone_removed: '17.0'
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150643

View File

@ -1,11 +1,11 @@
---
key_path: counts_monthly.aggregated_metrics.xmau_project_management
description: Unique users interacting with Project Management features
description: Removed as duplicate of counts_monthly.aggregated_metrics.users_work_items
product_section: dev
product_stage: plan
product_group: project_management
value_type: number
status: active
status: removed
milestone: '14.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81336
time_frame: 28d
@ -36,3 +36,5 @@ tier:
- free
- premium
- ultimate
milestone_removed: '17.0'
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150643

View File

@ -1,11 +1,11 @@
---
key_path: counts_weekly.aggregated_metrics.xmau_plan
description: Unique users interacting with Plan features
description: Removed as duplicate was counts_weekly.aggregated_metrics.users_work_items
product_section: dev
product_stage: plan
product_group: project_management
value_type: number
status: active
status: removed
milestone: '14.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81336
instrumentation_class: RedisHLLMetric
@ -36,3 +36,5 @@ tier:
- free
- premium
- ultimate
milestone_removed: '17.0'
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150643

View File

@ -1,11 +1,11 @@
---
key_path: counts_weekly.aggregated_metrics.xmau_project_management
description: Unique users interacting with Project Management features
description: Removed as duplicate was counts_weekly.aggregated_metrics.users_work_items
product_section: dev
product_stage: plan
product_group: project_management
value_type: number
status: active
status: removed
milestone: '14.9'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81336
time_frame: 7d
@ -36,3 +36,5 @@ tier:
- free
- premium
- ultimate
milestone_removed: '17.0'
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150643

View File

@ -3,7 +3,7 @@ table_name: group_merge_request_approval_settings
classes:
- GroupMergeRequestApprovalSetting
feature_categories:
- security_policy_management
- code_review_workflow
description: Keeps merge request approval settings per group
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/50256
milestone: '13.8'

View File

@ -10,7 +10,8 @@ class PrepareTmpBackfillIndexForPipelineIdsToVulnerabilityOccurrences < Gitlab::
INITIAL_PIPELINE_COLUMNS = [:id, :initial_pipeline_id]
LATEST_PIPELINE_COLUMNS = [:id, :latest_pipeline_id]
# TODO: Index to be created synchronously in https://gitlab.com/gitlab-org/gitlab/-/work_items/454239
# Index created synchronously in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148514
# TODO remove tmp index in https://gitlab.com/gitlab-org/gitlab/-/issues/454243
def up
prepare_async_index TABLE_NAME, INITIAL_PIPELINE_COLUMNS, name: INITIAL_PIPELINE_INDEX,
where: 'initial_pipeline_id IS NULL'

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
class AddTmpBackfillIndexForPipelineIdsToVulnerabilityOccurrences < Gitlab::Database::Migration[2.2]
milestone '17.0'
TABLE_NAME = :vulnerability_occurrences
INITIAL_PIPELINE_INDEX = 'tmp_index_vulnerability_occurrences_id_and_initial_pipline_id'
LATEST_PIPELINE_INDEX = 'tmp_index_vulnerability_occurrences_id_and_latest_pipeline_id'
INITIAL_PIPELINE_COLUMNS = [:id, :initial_pipeline_id]
LATEST_PIPELINE_COLUMNS = [:id, :latest_pipeline_id]
disable_ddl_transaction!
# TODO remove in https://gitlab.com/gitlab-org/gitlab/-/issues/454243
def up
add_concurrent_index TABLE_NAME, INITIAL_PIPELINE_COLUMNS, name: INITIAL_PIPELINE_INDEX,
where: 'initial_pipeline_id IS NULL'
add_concurrent_index TABLE_NAME, LATEST_PIPELINE_COLUMNS, name: LATEST_PIPELINE_INDEX,
where: 'latest_pipeline_id IS NULL'
end
def down
remove_concurrent_index_by_name TABLE_NAME, name: INITIAL_PIPELINE_INDEX,
where: 'initial_pipeline_id IS NULL'
remove_concurrent_index_by_name TABLE_NAME, name: LATEST_PIPELINE_INDEX,
where: 'latest_pipeline_id IS NULL'
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
class PrepareAsyncIndexForBuildsPart6 < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
milestone '17.0'
INDEXES = [
{
name: :p_ci_builds_user_id_created_at_idx_bigint,
columns: [:user_id_convert_to_bigint, :created_at],
options: { where: "type::text = 'Ci::Build'::text" }
},
{
name: :p_ci_builds_user_id_idx_bigint,
columns: [:user_id_convert_to_bigint]
},
{
name: :p_ci_builds_user_id_name_created_at_idx_bigint,
columns: [:user_id_convert_to_bigint, :name, :created_at],
options: { where: "type::text = 'Ci::Build'::text AND (name::text = ANY (ARRAY['container_scanning'::character varying::text, 'dast'::character varying::text, 'dependency_scanning'::character varying::text, 'license_management'::character varying::text, 'license_scanning'::character varying::text, 'sast'::character varying::text, 'coverage_fuzzing'::character varying::text, 'apifuzzer_fuzz'::character varying::text, 'apifuzzer_fuzz_dnd'::character varying::text, 'secret_detection'::character varying::text]))" } # rubocop:disable Layout/LineLength -- just too long
},
{
name: :p_ci_builds_user_id_name_idx_bigint,
columns: [:user_id_convert_to_bigint, :name],
options: { where: "type::text = 'Ci::Build'::text AND (name::text = ANY (ARRAY['container_scanning'::character varying::text, 'dast'::character varying::text, 'dependency_scanning'::character varying::text, 'license_management'::character varying::text, 'license_scanning'::character varying::text, 'sast'::character varying::text, 'coverage_fuzzing'::character varying::text, 'secret_detection'::character varying::text]))" } # rubocop:disable Layout/LineLength -- just too long
}
]
TABLE_NAME = :p_ci_builds
def up
Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
INDEXES.each do |definition|
name, columns, options = definition.values_at(:name, :columns, :options)
index_name = generated_index_name(partition.identifier, name)
prepare_async_index partition.identifier, columns, name: index_name, **(options || {})
end
end
end
def down
Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
INDEXES.each do |definition|
name, columns, options = definition.values_at(:name, :columns, :options)
index_name = generated_index_name(partition.identifier, name)
unprepare_async_index partition.identifier, columns, name: index_name, **(options || {})
end
end
end
end

View File

@ -0,0 +1 @@
ccf5e6094224d22b8e943b8ca28d6cd5f424179646f4e0a8dc9ede441fd011e1

View File

@ -0,0 +1 @@
41f770f5e8be56173ffbe67be15148454d371120ac1271c395950a81b50cb438

View File

@ -28175,6 +28175,10 @@ CREATE INDEX tmp_index_project_statistics_updated_at ON project_statistics USING
CREATE INDEX tmp_index_vulnerability_dismissal_info ON vulnerabilities USING btree (id) WHERE ((state = 2) AND ((dismissed_at IS NULL) OR (dismissed_by_id IS NULL)));
CREATE INDEX tmp_index_vulnerability_occurrences_id_and_initial_pipline_id ON vulnerability_occurrences USING btree (id, initial_pipeline_id) WHERE (initial_pipeline_id IS NULL);
CREATE INDEX tmp_index_vulnerability_occurrences_id_and_latest_pipeline_id ON vulnerability_occurrences USING btree (id, latest_pipeline_id) WHERE (latest_pipeline_id IS NULL);
CREATE INDEX tmp_index_vulnerability_overlong_title_html ON vulnerabilities USING btree (id) WHERE (length(title_html) > 800);
CREATE UNIQUE INDEX u_project_compliance_standards_adherence_for_reporting ON project_compliance_standards_adherence USING btree (project_id, check_name, standard);

View File

@ -74,6 +74,7 @@ this method only supports replies, and not the other features of [incoming email
> - Accepting `Received` headers [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/81489) in GitLab 14.9.
> - Accepting `Cc` headers [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/348572) in GitLab 16.5.
> - Accepting `X-Original-To` headers [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/149874) in GitLab 17.0.
Email is processed correctly when a configured email address is present in one of the following headers
(sorted in the order they are checked):
@ -82,6 +83,7 @@ Email is processed correctly when a configured email address is present in one o
- `Delivered-To`
- `Envelope-To` or `X-Envelope-To`
- `Received`
- `X-Original-To`
- `Cc`
The `References` header is also accepted, however it is used specifically to relate email responses to existing discussion threads. It is not used for creating issues by email.
@ -92,7 +94,7 @@ also checks accepted headers.
Usually, the "To" field contains the email address of the primary receiver.
However, it might not include the configured GitLab email address if:
- The address is in the "BCC" field.
- The address is in the `BCC` field.
- The email was forwarded.
The `Received` header can contain multiple email addresses. These are checked in the order that they appear.

View File

@ -11,7 +11,11 @@ DETAILS:
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
Every API call to [project](../user/project/index.md) statistics must be authenticated.
Retrieving these statistics requires write access to the repository.
Retrieving these statistics requires read access to the repository.
For use with a [personal access token](../user/profile/personal_access_tokens.md),
use a token with `read_api` scope. For a [group access token](../user/group/settings/group_access_tokens.md),
you can use Reporter role and `read_api` scope.
This API retrieves the number of times the project is either cloned or pulled
with the HTTP method. SSH fetches are not included.

View File

@ -59,7 +59,7 @@ listed here that also do not work properly in FIPS mode:
- [Container Scanning](../user/application_security/container_scanning/index.md) support for scanning images in repositories that require authentication.
- [Code Quality](../ci/testing/code_quality.md) does not support operating in FIPS-compliant mode.
- [Dependency scanning](../user/application_security/dependency_scanning/index.md) support for Gradle.
- [Dynamic Application Security Testing (DAST)](../user/application_security/dast/proxy-based.md) supports a reduced set of analyzers. The proxy-based analyzer and on-demand scanning is not available in FIPS mode today, however browser-based DAST, DAST API, and DAST API Fuzzing images are available.
- [Dynamic Application Security Testing (DAST)](../user/application_security/dast/proxy-based.md) supports a reduced set of analyzers. The proxy-based analyzer and on-demand scanning is not available in FIPS mode today, however browser-based DAST, API security testing, and DAST API Fuzzing images are available.
- [Solutions for vulnerabilities](../user/application_security/vulnerabilities/index.md#resolve-a-vulnerability)
for yarn projects.
- [Static Application Security Testing (SAST)](../user/application_security/sast/index.md)

View File

@ -4,13 +4,15 @@ group: Dynamic Analysis
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# DAST API vulnerability checks
# API security testing vulnerability checks
DETAILS:
**Tier:** Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
[DAST API](../index.md) provides vulnerability checks that are used to
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/457449) from **DAST API vulnerability checks** to **API security testing vulnerability checks** in GitLab 17.0.
[API security testing](../index.md) provides vulnerability checks that are used to
scan for vulnerabilities in the API under test.
## Passive checks
@ -45,7 +47,7 @@ scan for vulnerabilities in the API under test.
| [XML external entity](xml_external_entity.md) | High | Active | Active Full, Full |
| [XML injection](xml_injection_check.md) | Medium | Active | Active-Quick, Active Full, Quick, Full |
## DAST API checks by profile
## API security testing checks by profile
### Passive-Quick

View File

@ -51,17 +51,16 @@ If you do not want to Base64-encode the password (or if you are using GitLab 15.
Bearer tokens are used by several different authentication mechanisms, including OAuth2 and JSON Web
Tokens (JWT). Bearer tokens are transmitted using the `Authorization` HTTP header. To use Bearer
tokens with DAST API, you need one of the following:
tokens with API security testing, you need one of the following:
- A token that doesn't expire.
- A way to generate a token that lasts the length of testing.
- A Python script that DAST API can call to generate the token.
- A Python script that API security testing can call to generate the token.
#### Token doesn't expire
If the Bearer token doesn't expire, use the `DAST_API_OVERRIDES_ENV` variable to provide it. This
variable's content is a JSON snippet that provides headers and cookies to add to DAST API's
outgoing HTTP requests.
variable's content is a JSON snippet that provides headers and cookies to add to outgoing HTTP requests for API security testing.
Follow these steps to provide the Bearer token with `DAST_API_OVERRIDES_ENV`:
@ -90,15 +89,15 @@ Follow these steps to provide the Bearer token with `DAST_API_OVERRIDES_ENV`:
DAST_API_OVERRIDES_ENV: $TEST_API_BEARERAUTH
```
1. To validate that authentication is working, run a DAST API test and review the job logs
1. To validate that authentication is working, run API security testing and review the job logs
and the test API's application logs.
#### Token generated at test runtime
If the Bearer token must be generated and doesn't expire during testing, you can provide DAST API with a file that has the token. A prior stage and job, or part of the DAST API job, can
If the Bearer token must be generated and doesn't expire during testing, you can provide API security testing with a file that has the token. A prior stage and job, or part of the API security testing job, can
generate this file.
DAST API expects to receive a JSON file with the following structure:
API security testing expects to receive a JSON file with the following structure:
```json
{
@ -108,7 +107,7 @@ DAST API expects to receive a JSON file with the following structure:
}
```
This file can be generated by a prior stage and provided to DAST API through the
This file can be generated by a prior stage and provided to API security testing through the
`DAST_API_OVERRIDES_FILE` CI/CD variable.
Set `DAST_API_OVERRIDES_FILE` in your `.gitlab-ci.yml` file:
@ -127,13 +126,13 @@ variables:
DAST_API_OVERRIDES_FILE: dast-api-overrides.json
```
To validate that authentication is working, run a DAST API test and review the job logs and
To validate that authentication is working, run API security testing and review the job logs and
the test API's application logs.
#### Token has short expiration
If the Bearer token must be generated and expires prior to the scan's completion, you can provide a
program or script for the DAST API scanner to execute on a provided interval. The provided script runs in
program or script for the API security testing scanner to execute on a provided interval. The provided script runs in
an Alpine Linux container that has Python 3 and Bash installed. If the Python script requires
additional packages, it must detect this and install the packages at runtime.
@ -171,11 +170,11 @@ variables:
DAST_API_OVERRIDES_INTERVAL: 300
```
To validate that authentication is working, run an DAST API test and review the job logs and the test API's application logs. See the [overrides section](#overrides) for more information about override commands.
To validate that authentication is working, run API security testing and review the job logs and the test API's application logs. See the [overrides section](#overrides) for more information about override commands.
## Overrides
DAST API provides a method to add or override specific items in your request, for example:
API security testing provides a method to add or override specific items in your request, for example:
- Headers
- Cookies
@ -392,7 +391,7 @@ variables:
### Using a command
If the value must be generated or regenerated on expiration, you can provide a program or script for
the DAST API scanner to execute on a specified interval. The provided command runs in an Alpine Linux
the API security testing scanner to execute on a specified interval. The provided command runs in an Alpine Linux
container that has Python 3 and Bash installed.
You have to set the environment variable `DAST_API_OVERRIDES_CMD` to the program or script you would like

View File

@ -16,10 +16,10 @@ You can specify the API you want to scan by using:
## OpenAPI Specification
The [OpenAPI Specification](https://www.openapis.org/) (formerly the Swagger Specification) is an API description format for REST APIs.
This section shows you how to configure DAST API scanning using an OpenAPI Specification to provide information about the target API to test.
This section shows you how to configure API security testing scanning using an OpenAPI Specification to provide information about the target API to test.
OpenAPI Specifications are provided as a file system resource or URL. Both JSON and YAML OpenAPI formats are supported.
DAST API uses an OpenAPI document to generate the request body. When a request body is required,
API security testing uses an OpenAPI document to generate the request body. When a request body is required,
the body generation is limited to these body types:
- `application/x-www-form-urlencoded`
@ -30,24 +30,24 @@ the body generation is limited to these body types:
## OpenAPI and media types
A media type (formerly known as MIME type) is an identifier for file formats and format contents transmitted. A OpenAPI document lets you specify that a given operation can accept different media types, hence a given request can send data using different file content. As for example, a `PUT /user` operation to update user data could accept data in either XML (media type `application/xml`) or JSON (media type `application/json`) format.
OpenAPI 2.x lets you specify the accepted media types globally or per operation, and OpenAPI 3.x lets you specify the accepted media types per operation. DAST API will check the listed media types, and try to produce sample data for each supported media type.
OpenAPI 2.x lets you specify the accepted media types globally or per operation, and OpenAPI 3.x lets you specify the accepted media types per operation. API security testing will check the listed media types, and try to produce sample data for each supported media type.
- The default behavior is to select one of the supported media types to use. The first supported media type is chosen from the list. This behavior is configurable.
Testing the same operation (for example, `POST /user`) using different media types (for example, `application/json` and `application/xml`) is not always desirable.
For example, if the target application executes the same code regardless of the request content type, it will take longer to finish the test session, and it may report duplicated vulnerabilities related to the request body depending on the target app.
The environment variable `DAST_API_OPENAPI_ALL_MEDIA_TYPES` lets you specify whether or not to use all supported media types instead of one when generating requests for a given operation. When the environment variable `DAST_API_OPENAPI_ALL_MEDIA_TYPES` is set to any value, DAST API tries to generate requests for all supported media types instead of one in a given operation. This will cause testing to take longer as testing is repeated for each provided media type.
The environment variable `DAST_API_OPENAPI_ALL_MEDIA_TYPES` lets you specify whether or not to use all supported media types instead of one when generating requests for a given operation. When the environment variable `DAST_API_OPENAPI_ALL_MEDIA_TYPES` is set to any value, API security testing tries to generate requests for all supported media types instead of one in a given operation. This will cause testing to take longer as testing is repeated for each provided media type.
Alternatively, the variable `DAST_API_OPENAPI_MEDIA_TYPES` is used to provide a list of media types that will each be tested. Providing more than one media type causes testing to take longer, as testing is performed for each media type selected. When the environment variable `DAST_API_OPENAPI_MEDIA_TYPES` is set to a list of media types, only the listed media types are included when creating requests.
Multiple media types in `DAST_API_OPENAPI_MEDIA_TYPES` are separated by a colon (`:`). For example, to limit request generation to the media types `application/x-www-form-urlencoded` and `multipart/form-data`, set the environment variable `DAST_API_OPENAPI_MEDIA_TYPES` to `application/x-www-form-urlencoded:multipart/form-data`. Only supported media types in this list are included when creating requests, though non-supported media types are always skipped. A media type text may contain different sections. For example, `application/vnd.api+json; charset=UTF-8`, is a compound of `type "/" [tree "."] subtype ["+" suffix]* [";" parameter]`. Parameters are not taken into account when performing the filtering media types on request generation.
The environment variables `DAST_API_OPENAPI_ALL_MEDIA_TYPES` and `DAST_API_OPENAPI_MEDIA_TYPES` allow you to decide how to handle media types. These settings are mutually exclusive. If both are enabled, DAST API reports an error.
The environment variables `DAST_API_OPENAPI_ALL_MEDIA_TYPES` and `DAST_API_OPENAPI_MEDIA_TYPES` allow you to decide how to handle media types. These settings are mutually exclusive. If both are enabled, API security testing reports an error.
### Configure DAST API with an OpenAPI Specification
### Configure API security testing with an OpenAPI Specification
To configure DAST API scanning with an OpenAPI Specification:
To configure API security testing scanning with an OpenAPI Specification:
1. [Include](../../../../ci/yaml/index.md#includetemplate)
the [`DAST-API.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml) in your `.gitlab-ci.yml` file.
@ -63,8 +63,8 @@ To configure DAST API scanning with an OpenAPI Specification:
variable or an `environment_url.txt` file.
Adding the URL in an `environment_url.txt` file at your project's root is great for testing in
dynamic environments. To run DAST API against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. DAST API
dynamic environments. To run API security testing against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. API security testing
automatically parses that file to find its scan target. You can see an
[example of this in our Auto DevOps CI YAML](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml).
@ -83,7 +83,7 @@ variables:
DAST_API_TARGET_URL: http://test-deployment/
```
This is a minimal configuration for DAST API. From here you can:
This is a minimal configuration for API security testing. From here you can:
- [Run your first scan](#running-your-first-scan).
- [Add authentication](customizing_analyzer_settings.md#authentication).
@ -92,8 +92,8 @@ This is a minimal configuration for DAST API. From here you can:
## HTTP Archive (HAR)
The [HTTP Archive format (HAR)](../../api_fuzzing/create_har_files.md) is an archive file format for
logging HTTP transactions. When used with the GitLab DAST API scanner, the HAR file must contain
records of calling the web API to test. The DAST API scanner extracts all of the requests and uses them
logging HTTP transactions. When used with the GitLab API security testing scanner, the HAR file must contain
records of calling the web API to test. The API security testing scanner extracts all of the requests and uses them
to perform testing.
You can use various tools to generate HAR files:
@ -108,9 +108,9 @@ WARNING:
HAR files may contain sensitive information such as authentication tokens, API keys, and session
cookies. We recommend that you review the HAR file contents before adding them to a repository.
### DAST API scanning with a HAR file
### API security testing scanning with a HAR file
To configure DAST API to use a HAR file that provides information about the target API to test:
To configure API security testing to use a HAR file that provides information about the target API to test:
1. [Include](../../../../ci/yaml/index.md#includetemplate)
the [`DAST-API.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml) in your `.gitlab-ci.yml` file.
@ -127,8 +127,8 @@ To configure DAST API to use a HAR file that provides information about the targ
variable or an `environment_url.txt` file.
Adding the URL in an `environment_url.txt` file at your project's root is great for testing in
dynamic environments. To run DAST API against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. DAST API
dynamic environments. To run API security testing against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. API security testing
automatically parses that file to find its scan target. You can see an
[example of this in our Auto DevOps CI YAML](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml).
@ -147,7 +147,7 @@ variables:
DAST_API_TARGET_URL: http://test-deployment/
```
This example is a minimal configuration for DAST API. From here you can:
This example is a minimal configuration for API security testing. From here you can:
- [Run your first scan](#running-your-first-scan).
- [Add authentication](customizing_analyzer_settings.md#authentication).
@ -158,25 +158,25 @@ This example is a minimal configuration for DAST API. From here you can:
> - Support for GraphQL Schema was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/352780) in GitLab 15.4.
GraphQL is a query language for your API and an alternative to REST APIs.
DAST API supports testing GraphQL endpoints multiple ways:
API security testing supports testing GraphQL endpoints multiple ways:
- Test using the GraphQL Schema. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/352780) in GitLab 15.4.
- Test using a recording (HAR) of GraphQL queries.
- Test using a Postman Collection containing GraphQL queries.
This section documents how to test using a GraphQL schema. The GraphQL schema support in
DAST API is able to query the schema from endpoints that support [introspection](https://graphql.org/learn/introspection/).
API security testing is able to query the schema from endpoints that support [introspection](https://graphql.org/learn/introspection/).
Introspection is enabled by default to allow tools like GraphiQL to work.
For details on how to enable introspection, see your GraphQL framework documentation.
### DAST API scanning with a GraphQL endpoint URL
### API security testing scanning with a GraphQL endpoint URL
The GraphQL support in DAST API is able to query a GraphQL endpoint for the schema.
The GraphQL support in API security testing is able to query a GraphQL endpoint for the schema.
NOTE:
The GraphQL endpoint must support introspection queries for this method to work correctly.
To configure DAST API to use a GraphQL endpoint URL that provides information about the target API to test:
To configure API security testing to use a GraphQL endpoint URL that provides information about the target API to test:
1. [Include](../../../../ci/yaml/index.md#includetemplate)
the [`DAST-API.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml) in your `.gitlab-ci.yml` file.
@ -204,17 +204,17 @@ dast_api:
DAST_API_TARGET_URL: http://test-deployment/
```
This example is a minimal configuration for DAST API. From here you can:
This example is a minimal configuration for API security testing. From here you can:
- [Run your first scan](#running-your-first-scan).
- [Add authentication](customizing_analyzer_settings.md#authentication).
- Learn how to [handle false positives](#handling-false-positives).
### DAST API scanning with a GraphQL Schema file
### API security testing scanning with a GraphQL Schema file
DAST API can use a GraphQL schema file to understand and test a GraphQL endpoint that has introspection disabled. To use a GraphQL schema file, it must be in the introspection JSON format. A GraphQL schema can be converted to a the introspection JSON format using an online 3rd party tool: [https://transform.tools/graphql-to-introspection-json](https://transform.tools/graphql-to-introspection-json).
API security testing can use a GraphQL schema file to understand and test a GraphQL endpoint that has introspection disabled. To use a GraphQL schema file, it must be in the introspection JSON format. A GraphQL schema can be converted to a the introspection JSON format using an online 3rd party tool: [https://transform.tools/graphql-to-introspection-json](https://transform.tools/graphql-to-introspection-json).
To configure DAST API to use a GraphQL schema file that provides information about the target API to test:
To configure API security testing to use a GraphQL schema file that provides information about the target API to test:
1. [Include](../../../../ci/yaml/index.md#includetemplate)
the [`DAST-API.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml) in your `.gitlab-ci.yml` file.
@ -262,7 +262,7 @@ dast_api:
DAST_API_TARGET_URL: http://test-deployment/
```
This example is a minimal configuration for DAST API. From here you can:
This example is a minimal configuration for API security testing. From here you can:
- [Run your first scan](#running-your-first-scan).
- [Add authentication](customizing_analyzer_settings.md#authentication).
@ -273,11 +273,11 @@ This example is a minimal configuration for DAST API. From here you can:
The [Postman API Client](https://www.postman.com/product/api-client/) is a popular tool that
developers and testers use to call various types of APIs. The API definitions
[can be exported as a Postman Collection file](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-postman-data)
for use with DAST API. When exporting, make sure to select a supported version of Postman
for use with API security testing. When exporting, make sure to select a supported version of Postman
Collection: v2.0 or v2.1.
When used with the GitLab DAST API scanner, Postman Collections must contain definitions of the web API to
test with valid data. The DAST API scanner extracts all the API definitions and uses them to perform
When used with the GitLab API security testing scanner, Postman Collections must contain definitions of the web API to
test with valid data. The API security testing scanner extracts all the API definitions and uses them to perform
testing.
WARNING:
@ -285,9 +285,9 @@ Postman Collection files may contain sensitive information such as authenticatio
and session cookies. We recommend that you review the Postman Collection file contents before adding
them to a repository.
### DAST API scanning with a Postman Collection file
### API security testing scanning with a Postman Collection file
To configure DAST API to use a Postman Collection file that provides information about the target
To configure API security testing to use a Postman Collection file that provides information about the target
API to test:
1. [Include](../../../../ci/yaml/index.md#includetemplate)
@ -304,8 +304,8 @@ API to test:
variable or an `environment_url.txt` file.
Adding the URL in an `environment_url.txt` file at your project's root is great for testing in
dynamic environments. To run DAST API against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. DAST API
dynamic environments. To run API security testing against an app dynamically created during a GitLab CI/CD
pipeline, have the app persist its URL in an `environment_url.txt` file. API security testing
automatically parses that file to find its scan target. You can see an
[example of this in our Auto DevOps CI YAML](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml).
@ -324,7 +324,7 @@ variables:
DAST_API_TARGET_URL: http://test-deployment/
```
This is a minimal configuration for DAST API. From here you can:
This is a minimal configuration for API security testing. From here you can:
- [Run your first scan](#running-your-first-scan).
- [Add authentication](customizing_analyzer_settings.md#authentication).
@ -363,31 +363,31 @@ As mentioned above, there are different variable scopes, and each of them has a
> If a variable with the same name is declared in two different scopes, the value stored in the variable with narrowest scope is used. For example, if there is a global variable named `username` and a local variable named `username`, the local value is used when the request runs.
The following is a summary of the variable scopes supported by the Postman Client and DAST API:
The following is a summary of the variable scopes supported by the Postman Client and API security testing:
- **Global Environment (Global) scope** is a special pre-defined environment that is available throughout a workspace. We can also refer to the _global environment_ scope as the _global_ scope. The Postman Client allows exporting the global environment into a JSON file, which can be used with DAST API.
- **Global Environment (Global) scope** is a special pre-defined environment that is available throughout a workspace. We can also refer to the _global environment_ scope as the _global_ scope. The Postman Client allows exporting the global environment into a JSON file, which can be used with API security testing.
- **Environment scope** is a named group of variables created by a user in the Postman Client.
The Postman Client supports a single active environment along with the global environment. The variables defined in an active user-created environment take precedence over variables defined in the global environment. The Postman Client allows exporting your environment into a JSON file, which can be used with DAST API.
The Postman Client supports a single active environment along with the global environment. The variables defined in an active user-created environment take precedence over variables defined in the global environment. The Postman Client allows exporting your environment into a JSON file, which can be used with API security testing.
- **Collection scope** is a group of variables declared in a given collection. The collection variables are available to the collection where they have been declared and the nested requests or collections. Variables defined in the collection scope take precedence over the _global environment_ scope and also the _environment_ scope.
The Postman Client can export one or more collections into a JSON file, this JSON file contains selected collections, requests, and collection variables.
- **DAST API Scope** is a new scope added by DAST API to allow users to provide extra variables, or override variables defined in other supported scopes. This scope is not supported by Postman. The _DAST API Scope_ variables are provided using a [custom JSON file format](#dast-api-scope-custom-json-file-format).
- **API security testing scope** is a new scope added by API security testing to allow users to provide extra variables, or override variables defined in other supported scopes. This scope is not supported by Postman. The _API security testing scope_ variables are provided using a [custom JSON file format](#api-security-testing-scope-custom-json-file-format).
- Override values defined in the environment or collection
- Defining variables from scripts
- Define a single row of data from the unsupported _data scope_
- **Data scope** is a group of variables in which their name and values come from JSON or CSV files. A Postman collection runner like [Newman](https://learning.postman.com/docs/running-collections/using-newman-cli/command-line-integration-with-newman/) or [Postman Collection Runner](https://learning.postman.com/docs/running-collections/intro-to-collection-runs/) executes the requests in a collection as many times as entries have the JSON or CSV file. A good use case for these variables is to automate tests using scripts in Postman.
DAST API does **not** support reading data from a CSV or JSON file.
- **Local scope** are variables that are defined in Postman scripts. DAST API does **not** support Postman scripts and by extension, variables defined in scripts. You can still provide values for the script-defined variables by defining them in one of the supported scopes, or our custom JSON format.
API security testing does **not** support reading data from a CSV or JSON file.
- **Local scope** are variables that are defined in Postman scripts. API security testing does **not** support Postman scripts and by extension, variables defined in scripts. You can still provide values for the script-defined variables by defining them in one of the supported scopes, or our custom JSON format.
Not all scopes are supported by DAST API and variables defined in scripts are not supported. The following table is sorted by broadest scope to narrowest scope.
Not all scopes are supported by API security testing and variables defined in scripts are not supported. The following table is sorted by broadest scope to narrowest scope.
| Scope |Postman | DAST API | Comment |
| ------------------ |:---------:|:------------:| :--------|
| Global Environment | Yes | Yes | Special pre-defined environment |
| Environment | Yes | Yes | Named environments |
| Collection | Yes | Yes | Defined in your postman collection |
| DAST API Scope | No | Yes | Custom scope added by DAST API |
| Data | Yes | No | External files in CSV or JSON format |
| Local | Yes | No | Variables defined in scripts |
| Scope | Postman | API security testing | Comment |
|----------------------------|:-------:|:--------------------:|:-------------------------------------------|
| Global Environment | Yes | Yes | Special pre-defined environment |
| Environment | Yes | Yes | Named environments |
| Collection | Yes | Yes | Defined in your postman collection |
| API security testing scope | No | Yes | Custom scope added by API security testing |
| Data | Yes | No | External files in CSV or JSON format |
| Local | Yes | No | Variables defined in scripts |
For more details on how to define variables and export variables in different scopes, see:
@ -408,11 +408,11 @@ For more details on exporting variables in different supported scopes, see:
- [Exporting environments](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments)
- [Downloading global environments](https://learning.postman.com/docs/sending-requests/variables/#downloading-global-environments)
#### DAST API Scope, custom JSON file format
#### API security testing scope, custom JSON file format
Our custom JSON file format is a JSON object where each object property represents a variable name and the property value represents the variable value. This file can be created using your favorite text editor, or it can be produced by an earlier job in your pipeline.
This example defines two variables `base_url` and `token` in the DAST API scope:
This example defines two variables `base_url` and `token` in the API security testing scope:
```json
{
@ -421,18 +421,18 @@ This example defines two variables `base_url` and `token` in the DAST API scope:
}
```
#### Using scopes with DAST API
#### Using scopes with API security testing
The scopes: _global_, _environment_, _collection_, and _GitLab DAST API_ are supported in [GitLab 15.1 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/356312). GitLab 15.0 and earlier, supports only the _collection_, and _GitLab DAST API_ scopes.
The scopes: _global_, _environment_, _collection_, and _GitLab API security testing_ are supported in [GitLab 15.1 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/356312). GitLab 15.0 and earlier, supports only the _collection_, and _GitLab API security testing_ scopes.
The following table provides a quick reference for mapping scope files/URLs to DAST API configuration variables:
The following table provides a quick reference for mapping scope files/URLs to API security testing configuration variables:
| Scope | How to Provide |
| ------------------ | --------------- |
| Global Environment | DAST_API_POSTMAN_COLLECTION_VARIABLES |
| Global environment | DAST_API_POSTMAN_COLLECTION_VARIABLES |
| Environment | DAST_API_POSTMAN_COLLECTION_VARIABLES |
| Collection | DAST_API_POSTMAN_COLLECTION |
| DAST API Scope | DAST_API_POSTMAN_COLLECTION_VARIABLES |
| API security testing scope | DAST_API_POSTMAN_COLLECTION_VARIABLES |
| Data | Not supported |
| Local | Not supported |
@ -444,17 +444,17 @@ The configuration variable `DAST_API_POSTMAN_COLLECTION_VARIABLES` can be set to
- [Exported Global environment](https://learning.postman.com/docs/sending-requests/variables/#downloading-global-environments)
- [Exported environments](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments)
- [DAST API Custom JSON format](#dast-api-scope-custom-json-file-format)
- [API security testing Custom JSON format](#api-security-testing-scope-custom-json-file-format)
#### Undefined Postman variables
There is a chance that DAST API engine does not find all variables references that your Postman collection file is using. Some cases can be:
There is a chance that API security testing engine does not find all variables references that your Postman collection file is using. Some cases can be:
- You are using _data_ or _local_ scoped variables, and as stated previously these scopes are not supported by DAST API. Thus, assuming the values for these variables have not been provided through [the DAST API scope](#dast-api-scope-custom-json-file-format), then the values of the _data_ and _local_ scoped variables are undefined.
- You are using _data_ or _local_ scoped variables, and as stated previously these scopes are not supported by API security testing. Thus, assuming the values for these variables have not been provided through [the API security testing scope](#api-security-testing-scope-custom-json-file-format), then the values of the _data_ and _local_ scoped variables are undefined.
- A variable name was typed incorrectly, and the name does not match the defined variable.
- Postman Client supports a new dynamic variable that is not supported by DAST API.
- Postman Client supports a new dynamic variable that is not supported by API security testing.
When possible, DAST API follows the same behavior as the Postman Client does when dealing with undefined variables. The text of the variable reference remains the same, and there is no text substitution. The same behavior also applies to any unsupported dynamic variables.
When possible, API security testing follows the same behavior as the Postman Client does when dealing with undefined variables. The text of the variable reference remains the same, and there is no text substitution. The same behavior also applies to any unsupported dynamic variables.
For example, if a request definition in the Postman Collection references the variable `{{full_url}}` and the variable is not found it is left unchanged with the value `{{full_url}}`.
@ -462,7 +462,7 @@ For example, if a request definition in the Postman Collection references the va
In addition to variables that a user can define at various scope levels, Postman has a set of pre-defined variables called _dynamic_ variables. The [_dynamic_ variables](https://learning.postman.com/docs/writing-scripts/script-references/variables-list/) are already defined and their name is prefixed with a dollar sign (`$`), for instance, `$guid`. _Dynamic_ variables can be used like any other variable, and in the Postman Client, they produce random values during the request/collection run.
An important difference between DAST API and Postman is that DAST API returns the same value for each usage of the same dynamic variables. This differs from the Postman Client behavior which returns a random value on each use of the same dynamic variable. In other words, DAST API uses static values for dynamic variables while Postman uses random values.
An important difference between API security testing and Postman is that API security testing returns the same value for each usage of the same dynamic variables. This differs from the Postman Client behavior which returns a random value on each use of the same dynamic variable. In other words, API security testing uses static values for dynamic variables while Postman uses random values.
The supported dynamic variables during the scanning process are:
@ -589,7 +589,7 @@ The supported dynamic variables during the scanning process are:
#### Example: Global Scope
In this example, [the _global_ scope is exported](https://learning.postman.com/docs/sending-requests/variables/#downloading-global-environments) from the Postman Client as `global-scope.json` and provided to DAST API through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
In this example, [the _global_ scope is exported](https://learning.postman.com/docs/sending-requests/variables/#downloading-global-environments) from the Postman Client as `global-scope.json` and provided to API security testing through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
Here is an example of using `DAST_API_POSTMAN_COLLECTION_VARIABLES`:
@ -609,7 +609,7 @@ variables:
#### Example: Environment Scope
In this example, [the _environment_ scope is exported](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments) from the Postman Client as `environment-scope.json` and provided to DAST API through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
In this example, [the _environment_ scope is exported](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments) from the Postman Client as `environment-scope.json` and provided to API security testing through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
Here is an example of using `DAST_API_POSTMAN_COLLECTION_VARIABLES`:
@ -646,9 +646,9 @@ variables:
DAST_API_TARGET_URL: http://test-deployment/
```
#### Example: DAST API Scope
#### Example: API security testing scope
The DAST API Scope is used for two main purposes, defining _data_ and _local_ scope variables that are not supported by DAST API, and changing the value of an existing variable defined in another scope. The DAST API Scope is provided through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
The API security testing scope is used for two main purposes, defining _data_ and _local_ scope variables that are not supported by API security testing, and changing the value of an existing variable defined in another scope. The API security testing scope is provided through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable.
Here is an example of using `DAST_API_POSTMAN_COLLECTION_VARIABLES`:
@ -666,7 +666,7 @@ variables:
DAST_API_TARGET_URL: http://test-deployment/
```
The file `dast-api-scope.json` uses our [custom JSON file format](#dast-api-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
The file `dast-api-scope.json` uses our [custom JSON file format](#api-security-testing-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
values. For example:
```json
@ -684,7 +684,7 @@ In this example, a _global_ scope, _environment_ scope, and _collection_ scope a
- [Export the _environment_ scope](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments) as `environment-scope.json`
- Export the Postman Collection which includes the _collection_ scope as `postman-collection.json`
The Postman Collection is provided using the `DAST_API_POSTMAN_COLLECTION` variable, while the other scopes are provided using the `DAST_API_POSTMAN_COLLECTION_VARIABLES`. DAST API can identify which scope the provided files match using data provided in each file.
The Postman Collection is provided using the `DAST_API_POSTMAN_COLLECTION` variable, while the other scopes are provided using the `DAST_API_POSTMAN_COLLECTION_VARIABLES`. API security testing can identify which scope the provided files match using data provided in each file.
```yaml
stages:
@ -702,12 +702,12 @@ variables:
#### Example: Changing a Variables Value
When using exported scopes, it's often the case that the value of a variable must be changed for use with DAST API. For example, a _collection_ scoped variable might contain a variable named `api_version` with a value of `v2`, while your test needs a value of `v1`. Instead of modifying the exported collection to change the value, the DAST API scope can be used to change its value. This works because the _DAST API_ scope takes precedence over all other scopes.
When using exported scopes, it's often the case that the value of a variable must be changed for use with API security testing. For example, a _collection_ scoped variable might contain a variable named `api_version` with a value of `v2`, while your test needs a value of `v1`. Instead of modifying the exported collection to change the value, the API security testing scope can be used to change its value. This works because the _API security testing_ scope takes precedence over all other scopes.
The _collection_ scope variables are included in the exported Postman Collection file and provided through the `DAST_API_POSTMAN_COLLECTION` configuration variable.
The DAST API Scope is provided through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable, but first, we must create the file.
The file `dast-api-scope.json` uses our [custom JSON file format](#dast-api-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
The API security testing scope is provided through the `DAST_API_POSTMAN_COLLECTION_VARIABLES` configuration variable, but first, we must create the file.
The file `dast-api-scope.json` uses our [custom JSON file format](#api-security-testing-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
values. For example:
```json
@ -734,15 +734,15 @@ variables:
#### Example: Changing a Variables Value with Multiple Scopes
When using exported scopes, it's often the case that the value of a variable must be changed for use with DAST API. For example, an _environment_ scope might contain a variable named `api_version` with a value of `v2`, while your test needs a value of `v1`. Instead of modifying the exported file to change the value, the DAST API scope can be used. This works because the _DAST API_ scope takes precedence over all other scopes.
When using exported scopes, it's often the case that the value of a variable must be changed for use with API security testing. For example, an _environment_ scope might contain a variable named `api_version` with a value of `v2`, while your test needs a value of `v1`. Instead of modifying the exported file to change the value, the API security testing scope can be used. This works because the _API security testing_ scope takes precedence over all other scopes.
In this example, a _global_ scope, _environment_ scope, _collection_ scope, and _DAST API_ scope are configured. The first step is to export and create our various scopes.
In this example, a _global_ scope, _environment_ scope, _collection_ scope, and _API security testing_ scope are configured. The first step is to export and create our various scopes.
- [Export the _global_ scope](https://learning.postman.com/docs/sending-requests/variables/#downloading-global-environments) as `global-scope.json`
- [Export the _environment_ scope](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/#exporting-environments) as `environment-scope.json`
- Export the Postman Collection which includes the _collection_ scope as `postman-collection.json`
The DAST API scope is used by creating a file `dast-api-scope.json` using our [custom JSON file format](#dast-api-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
The API security testing scope is used by creating a file `dast-api-scope.json` using our [custom JSON file format](#api-security-testing-scope-custom-json-file-format). This JSON is an object with key-value pairs for properties. The keys are the variables' names, and the values are the variables'
values. For example:
```json
@ -751,7 +751,7 @@ values. For example:
}
```
The Postman Collection is provided using the `DAST_API_POSTMAN_COLLECTION` variable, while the other scopes are provided using the `DAST_API_POSTMAN_COLLECTION_VARIABLES`. DAST API can identify which scope the provided files match using data provided in each file.
The Postman Collection is provided using the `DAST_API_POSTMAN_COLLECTION` variable, while the other scopes are provided using the `DAST_API_POSTMAN_COLLECTION_VARIABLES`. API security testing can identify which scope the provided files match using data provided in each file.
```yaml
stages:
@ -771,18 +771,18 @@ variables:
When configured correctly, a CI/CD pipeline contains a `dast` stage and an `dast_api` job. The job only fails when an invalid configuration is provided. During typical operation, the job always succeeds even if vulnerabilities are identified during testing.
Vulnerabilities are displayed on the **Security** pipeline tab with the suite name. When testing against the repositories default branch, the DAST API vulnerabilities are also shown on the Security and Compliance's Vulnerability Report page.
Vulnerabilities are displayed on the **Security** pipeline tab with the suite name. When testing against the repositories default branch, the API security testing vulnerabilities are also shown on the Security and Compliance's Vulnerability Report page.
To prevent an excessive number of reported vulnerabilities, the DAST API scanner limits the number of vulnerabilities it reports per operation.
To prevent an excessive number of reported vulnerabilities, the API security testing scanner limits the number of vulnerabilities it reports per operation.
## Viewing DAST API vulnerabilities
## Viewing API security testing vulnerabilities
The DAST API analyzer produces a JSON report that is collected and used
[to populate the vulnerabilities into GitLab vulnerability screens](#view-details-of-a-dast-api-vulnerability).
The API security testing analyzer produces a JSON report that is collected and used
[to populate the vulnerabilities into GitLab vulnerability screens](#view-details-of-an-api-security-testing-vulnerability).
See [handling false positives](#handling-false-positives) for information about configuration changes you can make to limit the number of false positives reported.
### View details of a DAST API vulnerability
### View details of an API security testing vulnerability
Follow these steps to view details of a vulnerability:
@ -791,7 +791,7 @@ Follow these steps to view details of a vulnerability:
- In a project, go to the project's **Secure > Vulnerability report**
page. This page shows all vulnerabilities from the default branch only.
- In a merge request, go the merge request's **Security** section and select the **Expand**
button. DAST API vulnerabilities are available in a section labeled
button. API security testing vulnerabilities are available in a section labeled
**DAST detected N potential vulnerabilities**. Select the title to display the vulnerability
details.
@ -807,7 +807,7 @@ Follow these steps to view details of a vulnerability:
| Unmodified Response | Response from an unmodified request. This is what a typical working response looks like.|
| Actual Response | Response received from test request. |
| Evidence | How we determined a vulnerability occurred. |
| Identifiers | The DAST API check used to find this vulnerability. |
| Identifiers | The API security testing check used to find this vulnerability. |
| Severity | Severity of the vulnerability. |
| Scanner Type | Scanner used to perform testing. |
@ -827,7 +827,7 @@ False positives can be handled in several ways:
- Dismiss the vulnerability.
- Some checks have several methods of detecting when a vulnerability is identified, called _Assertions_.
Assertions can also be turned off and configured. For example, the DAST API scanner by default uses HTTP
Assertions can also be turned off and configured. For example, the API security testing scanner by default uses HTTP
status codes to help identify when something is a real issue. If an API returns a 500 error during
testing, this creates a vulnerability. This isn't always desired, as some frameworks return 500 errors often.
- Turn off the Check producing the false positive. This prevents the check from generating any

View File

@ -6,19 +6,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Offline configuration
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access to external resources through the internet, some adjustments are required for the DAST API testing job to successfully run.
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access to external resources through the internet, some adjustments are required for the API security testing job to successfully run.
Steps:
1. Host the Docker image in a local container registry.
1. Set the `SECURE_ANALYZERS_PREFIX` to the local container registry.
The Docker image for DAST API must be pulled (downloaded) from the public registry and then pushed (imported) into a local registry. The GitLab container registry can be used to locally host the Docker image. This process can be performed using a special template. See [loading Docker images onto your offline host](../../offline_deployments/index.md#loading-docker-images-onto-your-offline-host) for instructions.
The Docker image for API security testing must be pulled (downloaded) from the public registry and then pushed (imported) into a local registry. The GitLab container registry can be used to locally host the Docker image. This process can be performed using a special template. See [loading Docker images onto your offline host](../../offline_deployments/index.md#loading-docker-images-onto-your-offline-host) for instructions.
Once the Docker image is hosted locally, the `SECURE_ANALYZERS_PREFIX` variable is set with the location of the local registry. The variable must be set such that concatenating `/api-security:2` results in a valid image location.
NOTE:
DAST API and API Fuzzing both use the same underlying Docker image `api-security:2`.
API security testing and API Fuzzing both use the same underlying Docker image `api-security:2`.
For example, the below line sets a registry for the image `registry.gitlab.com/security-products/api-security:2`:

View File

@ -5,7 +5,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: reference, howto
---
# Overriding DAST API jobs
# Overriding API security testing jobs
To override a job definition, (for example, change properties like `variables`, `dependencies`, or [`rules`](../../../../ci/yaml/index.md#rules)),
declare a job with the same name as the DAST job to override. Place this new job after the template

View File

@ -33,7 +33,7 @@ type: reference, howto
## Recommendations
- Configure runners to use the [always pull policy](https://docs.gitlab.com/runner/executors/docker.html#using-the-always-pull-policy) to run the latest versions of the analyzers.
- By default, DAST API downloads all artifacts defined by previous jobs in the pipeline. If
- By default, API security testing downloads all artifacts defined by previous jobs in the pipeline. If
your DAST job does not rely on `environment_url.txt` to define the URL under test or any other files created
in previous jobs, we recommend you don't download artifacts. To avoid downloading
artifacts, extend the analyzer CI/CD job to specify no dependencies. For example, for the DAST proxy-based analyzer add the following to your `.gitlab-ci.yml` file:
@ -45,10 +45,10 @@ type: reference, howto
## Application deployment options
DAST API requires a deployed application to be available to scan.
API security testing requires a deployed application to be available to scan.
Depending on the complexity of the target application, there are a few options as to how to deploy and configure
the DAST API template.
the API security testing template.
### Review Apps

View File

@ -11,13 +11,13 @@ info: To determine the technical writer assigned to the Stage/Group associated w
| CI/CD variable | Description |
|------------------------------------------------------|--------------------|
| `SECURE_ANALYZERS_PREFIX` | Specify the Docker registry base address from which to download the analyzer. |
| `DAST_API_DISABLED` | Set to 'true' or '1' to disable DAST API scanning. |
| `DAST_API_DISABLED_FOR_DEFAULT_BRANCH` | Set to 'true' or '1' to disable DAST API scanning for only the default (production) branch. |
| `DAST_API_VERSION` | Specify DAST API container version. Defaults to `3`. |
| `DAST_API_DISABLED` | Set to 'true' or '1' to disable API security testing scanning. |
| `DAST_API_DISABLED_FOR_DEFAULT_BRANCH` | Set to 'true' or '1' to disable API security testing scanning for only the default (production) branch. |
| `DAST_API_VERSION` | Specify API security testing container version. Defaults to `3`. |
| `DAST_API_IMAGE_SUFFIX` | Specify a container image suffix. Defaults to none. |
| `DAST_API_API_PORT` | Specify the communication port number used by DAST API engine. Defaults to `5500`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367734) in GitLab 15.5. |
| `DAST_API_API_PORT` | Specify the communication port number used by API security testing engine. Defaults to `5500`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367734) in GitLab 15.5. |
| `DAST_API_TARGET_URL` | Base URL of API testing target. |
|[`DAST_API_CONFIG`](#configuration-files) | DAST API configuration file. Defaults to `.gitlab-dast-api.yml`. |
|[`DAST_API_CONFIG`](#configuration-files) | API security testing configuration file. Defaults to `.gitlab-dast-api.yml`. |
|[`DAST_API_PROFILE`](#configuration-files) | Configuration profile to use during testing. Defaults to `Quick`. |
|[`DAST_API_EXCLUDE_PATHS`](customizing_analyzer_settings.md#exclude-paths) | Exclude API URL paths from testing. |
|[`DAST_API_EXCLUDE_URLS`](customizing_analyzer_settings.md#exclude-urls) | Exclude API URL from testing. |

View File

@ -4,47 +4,48 @@ group: Dynamic Analysis
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# DAST API analyzer
# API security testing analyzer
DETAILS:
**Tier:** Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
> DAST API analyzer [became the default analyzer for on-demand DAST API scans](https://gitlab.com/groups/gitlab-org/-/epics/4254) in GitLab 15.6.
> - API security testing analyzer [became the default analyzer for on-demand API security testing scans](https://gitlab.com/groups/gitlab-org/-/epics/4254) in GitLab 15.6.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/457449) from **DAST API analyzer** to **API security testing analyzer** in GitLab 17.0.
Perform Dynamic Application Security Testing (DAST) of web APIs to help discover bugs and potential
security issues that other QA processes may miss. Use DAST API tests in addition to
security issues that other QA processes may miss. Use API security testing in addition to
other [GitLab Secure](../index.md) security scanners and your own test processes. You can run DAST
API tests either as part your CI/CD workflow, [on-demand](../dast/on-demand_scan.md), or both.
WARNING:
Do not run DAST API testing against a production server. Not only can it perform _any_ function that
Do not run API security testing against a production server. Not only can it perform _any_ function that
the API can, it may also trigger bugs in the API. This includes actions like modifying and deleting
data. Only run DAST API against a test server.
data. Only run API security testing against a test server.
DAST API can test the following web API types:
API security testing can test the following web API types:
- REST API
- SOAP
- GraphQL
- Form bodies, JSON, or XML
## When DAST API scans run
## When API security testing scans run
When run in your CI/CD pipeline, DAST API scanning runs in the `dast` stage by default. To ensure
DAST API scanning examines the latest code, ensure your CI/CD pipeline deploys changes to a test
When run in your CI/CD pipeline, API security testing scanning runs in the `dast` stage by default. To ensure
API security testing scanning examines the latest code, ensure your CI/CD pipeline deploys changes to a test
environment in a stage before the `dast` stage.
If your pipeline is configured to deploy to the same web server on each run, running a pipeline
while another is still running could cause a race condition in which one pipeline overwrites the
code from another. The API to be scanned should be excluded from changes for the duration of a
DAST API scan. The only changes to the API should be from the DAST API scanner. Changes made to the
API security testing scan. The only changes to the API should be from the API security testing scanner. Changes made to the
API (for example, by users, scheduled tasks, database changes, code changes, other pipelines, or
other scanners) during a scan could cause inaccurate results.
## Example DAST API scanning configurations
## Example API security testing scanning configurations
The following projects demonstrate DAST API scanning:
The following projects demonstrate API security testing scanning:
- [Example OpenAPI v2 Specification project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/openapi-example)
- [Example HTTP Archive (HAR) project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/har-example)
@ -57,8 +58,8 @@ The following projects demonstrate DAST API scanning:
To get support for your particular problem, use the [getting help channels](https://about.gitlab.com/get-help/).
The [GitLab issue tracker on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues) is the right place for bugs and feature proposals about API Security and DAST API.
Use `~"Category:API Security"` [label](../../../development/labels/index.md) when opening a new issue regarding DAST API to ensure it is quickly reviewed by the right people. Refer to our [review response SLO](https://handbook.gitlab.com/handbook/engineering/workflow/code-review/#review-response-slo) to understand when you should receive a response.
The [GitLab issue tracker on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues) is the right place for bugs and feature proposals about API Security and API security testing.
Use `~"Category:API Security"` [label](../../../development/labels/index.md) when opening a new issue regarding API security testing to ensure it is quickly reviewed by the right people. Refer to our [review response SLO](https://handbook.gitlab.com/handbook/engineering/workflow/code-review/#review-response-slo) to understand when you should receive a response.
[Search the issue tracker](https://gitlab.com/gitlab-org/gitlab/-/issues) for similar entries before submitting your own, there's a good chance somebody else had the same issue or feature proposal. Show your support with an emoji reaction or join the discussion.
@ -79,6 +80,6 @@ WARNING:
and Status Code are common Assertions used together by checks. Checks with multiple Assertions
allow them to be turned on and off.
- Check: Performs a specific type of test, or performed a check for a type of vulnerability. For
example, the SQL Injection Check performs DAST testing for SQL Injection vulnerabilities. The DAST API scanner is comprised of several checks. Checks can be turned on and off in a profile.
example, the SQL Injection Check performs DAST testing for SQL Injection vulnerabilities. The API security testing scanner is comprised of several checks. Checks can be turned on and off in a profile.
- Profile: A configuration file has one or more testing profiles, or sub-configurations. You may
have a profile for feature branches and another with extra testing for a main branch.

View File

@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Performance tuning and testing speed
Security tools that perform dynamic analysis testing, such as DAST API, perform testing by sending requests to an instance of your running application. The requests are engineered to test for specific vulnerabilities that might exist in your application. The speed of a dynamic analysis test depends on the following:
Security tools that perform dynamic analysis testing, such as API security testing, perform testing by sending requests to an instance of your running application. The requests are engineered to test for specific vulnerabilities that might exist in your application. The speed of a dynamic analysis test depends on the following:
- How many requests per second can be sent to your application by our tooling
- How fast your application responds to requests
@ -14,13 +14,13 @@ Security tools that perform dynamic analysis testing, such as DAST API, perform
- How many operations your API is comprised of
- How many fields are in each operation (think JSON bodies, headers, query string, cookies, etc.)
If the DAST API testing job still takes longer than expected reach after following the advice in this performance guide, reach out to support for further assistance.
If the API security testing job still takes longer than expected reach after following the advice in this performance guide, reach out to support for further assistance.
## Diagnosing performance issues
The first step to resolving performance issues is to understand what is contributing to the slower-than-expected testing time. Some common issues we see are:
- DAST API is running on a low-vCPU runner
- API security testing is running on a low-vCPU runner
- The application deployed to a slow/single-CPU instance and is not able to keep up with the testing load
- The application contains a slow operation that impacts the overall test speed (> 1/2 second)
- The application contains an operation that returns a large amount of data (> 500K+)
@ -28,7 +28,7 @@ The first step to resolving performance issues is to understand what is contribu
### The application contains a slow operation that impacts the overall test speed (> 1/2 second)
The DAST API job output contains helpful information about how fast we are testing, how fast each operation being tested responds, and summary information. Let's take a look at some sample output to see how it can be used in tracking down performance issues:
The API security testing job output contains helpful information about how fast we are testing, how fast each operation being tested responds, and summary information. Let's take a look at some sample output to see how it can be used in tracking down performance issues:
```shell
DAST API: Loaded 10 operations from: assets/har-large-response/large_responses.har
@ -45,22 +45,22 @@ DAST API: - Average call time: 2 seconds and 82.69 milliseconds (2.082693 secon
DAST API: - Time to complete: 14 minutes, 8 seconds and 788.36 milliseconds (848.788358 seconds)
```
This job console output snippet starts by telling us how many operations were found (10), followed by notifications that testing has started on a specific operation and a summary of the operation has been completed. The summary is the most interesting part of this log output. In the summary, we can see that it took DAST API 767 requests to fully test this operation and its related fields. We can also see that the average response time was 2 seconds and the time to complete was 14 minutes for this one operation.
This job console output snippet starts by telling us how many operations were found (10), followed by notifications that testing has started on a specific operation and a summary of the operation has been completed. The summary is the most interesting part of this log output. In the summary, we can see that it took API security testing 767 requests to fully test this operation and its related fields. We can also see that the average response time was 2 seconds and the time to complete was 14 minutes for this one operation.
An average response time of 2 seconds is a good initial indicator that this specific operation takes a long time to test. Further, we can see that the response body size is quite large. The large body size is the culprit here, transferring that much data on each request is what takes the majority of that 2 seconds.
For this issue, the team might decide to:
- Use a runner with more vCPUs, as this allows DAST API to parallelize the work being performed. This helps lower the test time, but getting the test down under 10 minutes might still be problematic without moving to a high CPU machine due to how long the operation takes to test. While larger runners are more costly, you also pay for less minutes if the job executions are quicker.
- [Exclude this operation](#excluding-slow-operations) from the DAST API test. While this is the simplest, it has the downside of a gap in security test coverage.
- [Exclude the operation from feature branch DAST API tests, but include it in the default branch test](#excluding-operations-in-feature-branches-but-not-default-branch).
- [Split up the DAST API testing into multiple jobs](#splitting-a-test-into-multiple-jobs).
- Use a runner with more vCPUs, as this allows API security testing to parallelize the work being performed. This helps lower the test time, but getting the test down under 10 minutes might still be problematic without moving to a high CPU machine due to how long the operation takes to test. While larger runners are more costly, you also pay for less minutes if the job executions are quicker.
- [Exclude this operation](#excluding-slow-operations) from API security testing. While this is the simplest, it has the downside of a gap in security test coverage.
- [Exclude the operation from feature branch API security testing, but include it in the default branch test](#excluding-operations-in-feature-branches-but-not-default-branch).
- [Split up API security testing into multiple jobs](#splitting-a-test-into-multiple-jobs).
The likely solution is to use a combination of these solutions to reach an acceptable test time, assuming your team's requirements are in the 5-7 minute range.
## Addressing performance issues
The following sections document various options for addressing performance issues for DAST API:
The following sections document various options for addressing performance issues for API security testing:
- [Using a larger runner](#using-a-larger-runner)
- [Excluding slow operations](#excluding-slow-operations)
@ -69,7 +69,7 @@ The following sections document various options for addressing performance issue
### Using a larger runner
One of the easiest performance boosts can be achieved using a [larger runner](../../../ci/runners/hosted_runners/linux.md#machine-types-available-for-linux-x86-64) with DAST API. This table shows statistics collected during benchmarking of a Java Spring Boot REST API. In this benchmark, the target and DAST API share a single runner instance.
One of the easiest performance boosts can be achieved using a [larger runner](../../../ci/runners/hosted_runners/linux.md#machine-types-available-for-linux-x86-64) with API security testing. This table shows statistics collected during benchmarking of a Java Spring Boot REST API. In this benchmark, the target and API security testing share a single runner instance.
| Hosted runner on Linux tag | Requests per Second |
|------------------------------------|-----------|
@ -78,7 +78,7 @@ One of the easiest performance boosts can be achieved using a [larger runner](..
As we can see from this table, increasing the size of the runner and vCPU count can have a large impact on testing speed/performance.
Here is an example job definition for DAST API that adds a `tags` section to use the medium SaaS runner on Linux. The job extends the job definition included through the DAST API template.
Here is an example job definition for API security testing that adds a `tags` section to use the medium SaaS runner on Linux. The job extends the job definition included through the API security testing template.
```yaml
dast_api:
@ -98,7 +98,7 @@ In the case of one or two slow operations, the team might decide to skip testing
In this example, we have an operation that returns a large amount of data. The operation is `GET http://target:7777/api/large_response_json`. To exclude it we provide the `DAST_API_EXCLUDE_PATHS` configuration variable with the path portion of our operation URL `/api/large_response_json`.
To verify the operation is excluded, run the DAST API job and review the job console output. It includes a list of included and excluded operations at the end of the test.
To verify the operation is excluded, run the API security testing job and review the job console output. It includes a list of included and excluded operations at the end of the test.
```yaml
dast_api:
@ -111,9 +111,9 @@ Excluding operations from testing could allow some vulnerabilities to go undetec
### Splitting a test into multiple jobs
Splitting a test into multiple jobs is supported by DAST API through the use of [`DAST_API_EXCLUDE_PATHS`](configuration/customizing_analyzer_settings.md#exclude-paths) and [`DAST_API_EXCLUDE_URLS`](configuration/customizing_analyzer_settings.md#exclude-urls). When splitting a test up, a good pattern is to disable the `dast_api` job and replace it with two jobs with identifying names. In this example we have two jobs, each job is testing a version of the API, so our names reflect that. However, this technique can be applied to any situation, not just with versions of an API.
Splitting a test into multiple jobs is supported by API security testing through the use of [`DAST_API_EXCLUDE_PATHS`](configuration/customizing_analyzer_settings.md#exclude-paths) and [`DAST_API_EXCLUDE_URLS`](configuration/customizing_analyzer_settings.md#exclude-urls). When splitting a test up, a good pattern is to disable the `dast_api` job and replace it with two jobs with identifying names. In this example we have two jobs, each job is testing a version of the API, so our names reflect that. However, this technique can be applied to any situation, not just with versions of an API.
The rules we are using in the `dast_api_v1` and `dast_api_v2` jobs are copied from the [DAST API template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml).
The rules we are using in the `dast_api_v1` and `dast_api_v2` jobs are copied from the [API security testing template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml).
```yaml
# Disable the main dast_api job
@ -166,7 +166,7 @@ In the case of one or two slow operations, the team might decide to skip testing
In this example, we have an operation that returns a large amount of data. The operation is `GET http://target:7777/api/large_response_json`. To exclude it we provide the `DAST_API_EXCLUDE_PATHS` configuration variable with the path portion of our operation URL `/api/large_response_json`. Our configuration disables the main `dast_api` job and creates two new jobs `dast_api_main` and `dast_api_branch`. The `dast_api_branch` is set up to exclude the long operation and only run on non-default branches (for example, feature branches). The `dast_api_main` branch is set up to only execute on the default branch (`main` in this example). The `dast_api_branch` jobs run faster, allowing for quick development cycles, while the `dast_api_main` job which only runs on default branch builds, takes longer to run.
To verify the operation is excluded, run the DAST API job and review the job console output. It includes a list of included and excluded operations at the end of the test.
To verify the operation is excluded, run the API security testing job and review the job console output. It includes a list of included and excluded operations at the end of the test.
```yaml
# Disable the main job so we can create two jobs with
@ -176,7 +176,7 @@ dast_api:
- if: $CI_COMMIT_BRANCH
when: never
# DAST API for feature branch work, excludes /api/large_response_json
# API security testing for feature branch work, excludes /api/large_response_json
dast_api_branch:
extends: dast_api
variables:
@ -198,7 +198,7 @@ dast_api_branch:
when: never
- if: $CI_COMMIT_BRANCH
# DAST API for default branch (main in our case)
# API security testing for default branch (main in our case)
# Includes the long running operations
dast_api_main:
extends: dast_api

View File

@ -6,9 +6,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Troubleshooting
## DAST API job times out after N hours
## API security testing job times out after N hours
For larger repositories, the DAST API job could time out on the [small hosted runner on Linux](../../../ci/runners/hosted_runners/linux.md#machine-types-available-for-linux-x86-64), which is set per default. If this happens in your jobs, you should scale up to a [larger runner](performance.md#using-a-larger-runner).
For larger repositories, the API security testing job could time out on the [small hosted runner on Linux](../../../ci/runners/hosted_runners/linux.md#machine-types-available-for-linux-x86-64), which is set per default. If this happens in your jobs, you should scale up to a [larger runner](performance.md#using-a-larger-runner).
See the following documentation sections for assistance:
@ -17,13 +17,13 @@ See the following documentation sections for assistance:
- [Excluding operations by path](configuration/customizing_analyzer_settings.md#exclude-paths)
- [Excluding slow operations](performance.md#excluding-slow-operations)
## DAST API job takes too long to complete
## API security testing job takes too long to complete
See [Performance Tuning and Testing Speed](performance.md#performance-tuning-and-testing-speed)
## Error: `Error waiting for DAST API 'http://127.0.0.1:5000' to become available`
A bug exists in versions of the DAST API analyzer prior to v1.6.196 that can cause a background process to fail under certain conditions. The solution is to update to a newer version of the DAST API analyzer.
A bug exists in versions of the API security testing analyzer prior to v1.6.196 that can cause a background process to fail under certain conditions. The solution is to update to a newer version of the API security testing analyzer.
The version information can be found in the job details for the `dast_api` job.
@ -41,7 +41,7 @@ If the issue is occurring with versions v1.6.196 or greater, contact Support and
## `Failed to start scanner session (version header not found)`
The DAST API engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `dast_api` job. A common cause of this issue is changing the `DAST_API_API` variable from its default.
The API security testing engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `dast_api` job. A common cause of this issue is changing the `DAST_API_API` variable from its default.
**Error message**
@ -49,12 +49,12 @@ The DAST API engine outputs an error message when it cannot establish a connecti
**Solution**
- Remove the `DAST_API_API` variable from the `.gitlab-ci.yml` file. The value inherits from the DAST API CI/CD template. We recommend this method instead of manually setting a value.
- If removing the variable is not possible, check to see if this value has changed in the latest version of the [DAST API CI/CD template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml). If so, update the value in the `.gitlab-ci.yml` file.
- Remove the `DAST_API_API` variable from the `.gitlab-ci.yml` file. The value inherits from the API security testing CI/CD template. We recommend this method instead of manually setting a value.
- If removing the variable is not possible, check to see if this value has changed in the latest version of the [API security testing CI/CD template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml). If so, update the value in the `.gitlab-ci.yml` file.
## `Failed to start session with scanner. Please retry, and if the problem persists reach out to support.`
The DAST API engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `dast_api` job. A common cause for this issue is that the background component cannot use the selected port as it's already in use. This error can occur intermittently if timing plays a part (race condition). This issue occurs most often with Kubernetes environments when other services are mapped into the container causing port conflicts.
The API security testing engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `dast_api` job. A common cause for this issue is that the background component cannot use the selected port as it's already in use. This error can occur intermittently if timing plays a part (race condition). This issue occurs most often with Kubernetes environments when other services are mapped into the container causing port conflicts.
Before proceeding with a solution, it is important to confirm that the error message was produced because the port was already taken. To confirm this was the cause:
@ -91,13 +91,13 @@ Once you have confirmed the issue was produced because the port was already take
## `Application cannot determine the base URL for the target API`
The DAST API engine outputs an error message when it cannot determine the target API after inspecting the OpenAPI document. This error message is shown when the target API has not been set in the `.gitlab-ci.yml` file, it is not available in the `environment_url.txt` file, and it could not be computed using the OpenAPI document.
The API security testing engine outputs an error message when it cannot determine the target API after inspecting the OpenAPI document. This error message is shown when the target API has not been set in the `.gitlab-ci.yml` file, it is not available in the `environment_url.txt` file, and it could not be computed using the OpenAPI document.
There is a order of precedence in which the DAST API engine tries to get the target API when checking the different sources. First, it tries to use the `DAST_API_TARGET_URL`. If the environment variable has not been set, then the DAST API engine attempts to use the `environment_url.txt` file. If there is no file `environment_url.txt`, then the DAST API engine uses the OpenAPI document contents and the URL provided in `DAST_API_OPENAPI` (if a URL is provided) to try to compute the target API.
There is a order of precedence in which the API security testing engine tries to get the target API when checking the different sources. First, it tries to use the `DAST_API_TARGET_URL`. If the environment variable has not been set, then the API security testing engine attempts to use the `environment_url.txt` file. If there is no file `environment_url.txt`, then the API security testing engine uses the OpenAPI document contents and the URL provided in `DAST_API_OPENAPI` (if a URL is provided) to try to compute the target API.
The best-suited solution depends on whether or not your target API changes for each deployment. In static environments, the target API is the same for each deployment, in this case refer to the [static environment solution](#static-environment-solution). If the target API changes for each deployment a [dynamic environment solution](#dynamic-environment-solutions) should be applied.
## DAST API job excludes some paths from operations
## API security testing job excludes some paths from operations
If you find that some paths are being excluded from operations, ensure that the `consumes` array is defined and has a valid type in the target definition JSON file. This is required.
@ -129,7 +129,7 @@ In a dynamic environment your target API changes for each different deployment.
**Use environment_url.txt**
To support dynamic environments in which the target API URL changes during each pipeline, DAST API engine supports the use of an `environment_url.txt` file that contains the URL to use. This file is not checked into the repository, instead it's created during the pipeline by the job that deploys the test target and collected as an artifact that can be used by later jobs in the pipeline. The job that creates the `environment_url.txt` file must run before the DAST API engine job.
To support dynamic environments in which the target API URL changes during each pipeline, API security testing engine supports the use of an `environment_url.txt` file that contains the URL to use. This file is not checked into the repository, instead it's created during the pipeline by the job that deploys the test target and collected as an artifact that can be used by later jobs in the pipeline. The job that creates the `environment_url.txt` file must run before the API security testing engine job.
1. Modify the test target deployment job adding the base URL in an `environment_url.txt` file at the root of your project.
1. Modify the test target deployment job collecting the `environment_url.txt` as an artifact.
@ -150,7 +150,7 @@ deploy-test-target:
## Use OpenAPI with an invalid schema
There are cases where the document is autogenerated with an invalid schema or cannot be edited manually in a timely manner. In those scenarios, the DAST API is able to perform a relaxed validation by setting the variable `DAST_API_OPENAPI_RELAXED_VALIDATION`. We recommend providing a fully compliant OpenAPI document to prevent unexpected behaviors.
There are cases where the document is autogenerated with an invalid schema or cannot be edited manually in a timely manner. In those scenarios, the API security testing is able to perform a relaxed validation by setting the variable `DAST_API_OPENAPI_RELAXED_VALIDATION`. We recommend providing a fully compliant OpenAPI document to prevent unexpected behaviors.
### Edit a non-compliant OpenAPI file
@ -167,7 +167,7 @@ If your OpenAPI document is generated manually, load your document in the editor
Relaxed validation is meant for cases when the OpenAPI document cannot meet OpenAPI specifications, but it still has enough content to be consumed by different tools. A validation is performed but less strictly in regards to document schema.
DAST API can still try to consume an OpenAPI document that does not fully comply with OpenAPI specifications. To instruct DAST API to perform a relaxed validation, set the variable `DAST_API_OPENAPI_RELAXED_VALIDATION` to any value, for example:
API security testing can still try to consume an OpenAPI document that does not fully comply with OpenAPI specifications. To instruct API security testing to perform a relaxed validation, set the variable `DAST_API_OPENAPI_RELAXED_VALIDATION` to any value, for example:
```yaml
stages:
@ -185,7 +185,7 @@ variables:
## `No operation in the OpenAPI document is consuming any supported media type`
DAST API uses the specified media types in the OpenAPI document to generate requests. If no request can be created due to the lack of supported media types, then an error is thrown.
API security testing uses the specified media types in the OpenAPI document to generate requests. If no request can be created due to the lack of supported media types, then an error is thrown.
**Error message**
@ -198,8 +198,8 @@ DAST API uses the specified media types in the OpenAPI document to generate requ
## ``Error, error occurred trying to download `<URL>`: There was an error when retrieving content from Uri:' <URL>'. Error:The SSL connection could not be established, see inner exception.``
DAST API is compatible with a broad range of TLS configurations, including outdated protocols and ciphers.
Despite broad support, you might encounter connection errors. This error occurs because DAST API could not establish a secure connection with the server at the given URL.
API security testing is compatible with a broad range of TLS configurations, including outdated protocols and ciphers.
Despite broad support, you might encounter connection errors. This error occurs because API security testing could not establish a secure connection with the server at the given URL.
To resolve the issue:

View File

@ -76,11 +76,11 @@ When available, the vendor severity level takes precedence and is used by the an
|------------------------------------------------------------------------------------------|------------------------------|----------------------------|-------------------------------------|
| [`Browser-based DAST`](../dast/browser/index.md) | **{check-circle}** Yes | String | `HIGH`, `MEDIUM`, `LOW`, `INFO` |
## DAST API
## API security testing
| GitLab analyzer | Outputs severity levels? | Native severity level type | Native severity level example |
|------------------------------------------------------------------------------------------|------------------------------|----------------------------|-------------------------------------|
| [`DAST API`](../dast_api/index.md) | **{check-circle}** Yes | String | `HIGH`, `MEDIUM`, `LOW` |
| [`API security testing`](../dast_api/index.md) | **{check-circle}** Yes | String | `HIGH`, `MEDIUM`, `LOW` |
## Dependency Scanning

View File

@ -213,6 +213,14 @@ requirements_confirmation:
requirements: tmp/requirements.json
```
Because requirements and [test cases](../../../ci/test_cases/index.md) are being
[migrated to work items](https://gitlab.com/groups/gitlab-org/-/epics/5171), if you have enabled work items
in a project, you must replace `requirements` in above configs with `requirements_v2`:
```yaml
requirements_v2: tmp/requirements.json
```
## Import requirements from a CSV file
You must have at least the Reporter role.

View File

@ -1,7 +1,5 @@
# frozen_string_literal: true
require_dependency 'api/validations/validators/limit'
module API
module Terraform
class State < ::API::Base

View File

@ -50,6 +50,7 @@ module Gitlab
delivered_to: delivered_to.map(&:value),
envelope_to: envelope_to.map(&:value),
x_envelope_to: x_envelope_to.map(&:value),
x_original_to: x_original_to.map(&:value),
cc_address: cc,
# reduced down to what looks like an email in the received headers
received_recipients: recipients_from_received_headers,
@ -112,6 +113,7 @@ module Gitlab
find_first_key_from(envelope_to) ||
find_first_key_from(x_envelope_to) ||
find_first_key_from(recipients_from_received_headers) ||
find_first_key_from(x_original_to) ||
find_first_key_from(cc)
end
@ -163,6 +165,10 @@ module Gitlab
Array(mail[:received])
end
def x_original_to
Array(mail[:x_original_to])
end
def recipients_from_received_headers
strong_memoize :emails_from_received_headers do
received.filter_map { |header| header.value[RECEIVED_HEADER_REGEX, 1] }

View File

@ -146,7 +146,7 @@ module Gitlab
if finished_at
job_finished_at = Random.rand(job_started_at..finished_at)
elsif job_status == 'running'
job_finished_at = job_started_at + Random.rand(1 * 60..PIPELINE_FINISH_RANGE_MAX_IN_SECONDS)
job_finished_at = job_started_at + Random.rand((1 * 60)..PIPELINE_FINISH_RANGE_MAX_IN_SECONDS)
end
# Do not use the first 2 runner tags ('runner-fleet', "#{registration_prefix}runner").

View File

@ -7,12 +7,9 @@ module SystemCheck
def multi_check
active_users = User.active.count
if active_users > 0
$stdout.puts active_users.to_s.color(:green)
else
$stdout.puts active_users.to_s.color(:red)
end
color_status = :red
color_status = :green if active_users > 0
$stdout.puts Rainbow(active_users.to_s).color(color_status)
end
end
end

View File

@ -17,9 +17,9 @@ module SystemCheck
$stdout.print sanitized_message(project)
if project.namespace
$stdout.puts 'yes'.color(:green)
$stdout.puts Rainbow('yes').green
else
$stdout.puts 'no'.color(:red)
$stdout.puts Rainbow('no').red
show_error
end
end

View File

@ -12,7 +12,7 @@ namespace :gettext do
"--output-dir", Rails.root.join('app/assets/javascripts/locale').to_s
]
abort 'Error: Unable to convert gettext files to js.'.color(:red) unless Kernel.system(*command)
abort Rainbow('Error: Unable to convert gettext files to js.').red unless Kernel.system(*command)
end
desc 'Regenerate gitlab.pot file'

View File

@ -93,7 +93,7 @@ namespace :gitlab do
cmd += '> /dev/null 2>&1' if args[:silent].present?
unless system(cmd)
abort 'Error: Unable to build Tailwind CSS bundle.'.color(:red)
abort Rainbow('Error: Unable to build Tailwind CSS bundle.').red
end
end
@ -125,7 +125,7 @@ namespace :gitlab do
puts "You can inspect the webpack log here: #{ENV['CI_JOB_URL']}/artifacts/file/#{log_path}" if log_path && ENV['CI_JOB_URL']
unless system(cmd)
abort 'Error: Unable to compile webpack production bundle.'.color(:red)
abort Rainbow('Error: Unable to compile webpack production bundle.').red
end
Gitlab::TaskHelpers.invoke_and_time_task('gitlab:assets:fix_urls')
@ -173,14 +173,14 @@ namespace :gitlab do
desc 'GitLab | Assets | Compile vendor assets'
task :vendor do
unless system('yarn webpack-vendor')
abort 'Error: Unable to compile webpack DLL.'.color(:red)
abort Rainbow('Error: Unable to compile webpack DLL.').red
end
end
desc 'GitLab | Assets | Check that scss mixins do not introduce any sideffects'
task :check_page_bundle_mixins_css_for_sideeffects do
unless system('./scripts/frontend/check_page_bundle_mixins_css_for_sideeffects.js')
abort 'Error: At least one CSS changes introduces an unwanted sideeffect'.color(:red)
abort Rainbow('Error: At least one CSS changes introduces an unwanted sideeffect').red
end
end
end

View File

@ -7,7 +7,7 @@ namespace :gitlab do
desc 'Synchronously finish executing a batched background migration'
task :finalize, [:job_class_name, :table_name, :column_name, :job_arguments] => :environment do |_, args|
if Gitlab::Database.db_config_names(with_schema: :gitlab_shared).size > 1
puts "Please specify the database".color(:red)
puts Rainbow("Please specify the database").red
exit 1
end
@ -77,7 +77,7 @@ namespace :gitlab do
connection: connection
)
puts "Done.".color(:green)
puts Rainbow("Done.").green
end
def display_migration_status(database_name, connection)
@ -104,7 +104,7 @@ namespace :gitlab do
def validate_finalization_arguments!(args)
[:job_class_name, :table_name, :column_name, :job_arguments].each do |argument|
unless args[argument]
puts "Must specify #{argument} as an argument".color(:red)
puts Rainbow("Must specify #{argument} as an argument").red
exit 1
end
end

View File

@ -68,7 +68,7 @@ module Tasks
yield
ensure
backup_progress.puts(
"#{Time.current} " + '-- Deleting backup and restore PID file ... '.color(:blue) + 'done'.color(:green)
"#{Time.current} #{Rainbow('-- Deleting backup and restore PID file ...').blue} #{Rainbow('done').green}"
)
File.delete(PID_FILE)
end
@ -77,7 +77,7 @@ module Tasks
def self.read_pid(file_content)
Process.getpgid(file_content.to_i)
backup_progress.puts(<<~MESSAGE.color(:red))
backup_progress.puts(Rainbow(<<~MESSAGE).red)
Backup and restore in progress:
There is a backup and restore task in progress (PID #{file_content}).
Try to run the current task once the previous one ends.
@ -85,7 +85,7 @@ module Tasks
exit 1
rescue Errno::ESRCH
backup_progress.puts(<<~MESSAGE.color(:blue))
backup_progress.puts(Rainbow(<<~MESSAGE).blue)
The PID file #{PID_FILE} exists and contains #{file_content}, but the process is not running.
The PID file will be rewritten with the current process ID #{PID}.
MESSAGE

View File

@ -7,14 +7,14 @@ namespace :gitlab do
task :bump_cell_sequences, [:increase_by] => :environment do |_t, args|
# We do not want to run this on production environment, even accidentally.
unless Gitlab.dev_or_test_env?
puts 'This rake task cannot be run in production environment'.color(:red)
puts Rainbow('This rake task cannot be run in production environment').red
exit 1
end
increase_by = args.increase_by.to_i
if increase_by < 1
puts 'Please specify a positive integer `increase_by` value'.color(:red)
puts 'Example: rake gitlab:db:cells:bump_cell_sequences[100000]'.color(:green)
puts Rainbow('Please specify a positive integer `increase_by` value').red
puts Rainbow('Example: rake gitlab:db:cells:bump_cell_sequences[100000]').green
exit 1
end

View File

@ -8,8 +8,8 @@ namespace :gitlab do
task :bump_ci_sequences, [:increase_by] => :environment do |_t, args|
increase_by = args.increase_by.to_i
if increase_by < 1
puts 'Please specify a positive integer `increase_by` value'.color(:red)
puts 'Example: rake gitlab:db:decomposition:rollback:bump_ci_sequences[100000]'.color(:green)
puts Rainbow('Please specify a positive integer `increase_by` value').red
puts Rainbow('Example: rake gitlab:db:decomposition:rollback:bump_ci_sequences[100000]').green
exit 1
end

View File

@ -189,14 +189,14 @@ namespace :tw do
File.write(codeowners_path, new_codeowners_content)
if current_codeowners_content == new_codeowners_content
puts "~ CODEOWNERS already up to date".color(:yellow)
puts Rainbow("~ CODEOWNERS already up to date").yellow
else
puts "✓ CODEOWNERS updated".color(:green)
puts Rainbow("✓ CODEOWNERS updated").green
end
if errors.present?
puts ""
puts "✘ Files with missing metadata found:".color(:red)
puts Rainbow("✘ Files with missing metadata found:").red
errors.map { |file| puts file }
end
end

View File

@ -2,7 +2,7 @@
desc "GitLab | Build internal ids for issues and merge requests"
task migrate_iids: :environment do
puts 'Issues'.color(:yellow)
puts Rainbow('Issues').yellow
Issue.where(iid: nil).find_each(batch_size: 100) do |issue|
issue.set_iid
@ -16,7 +16,7 @@ task migrate_iids: :environment do
end
puts 'done'
puts 'Merge Requests'.color(:yellow)
puts Rainbow('Merge Requests').yellow
MergeRequest.where(iid: nil).find_each(batch_size: 100) do |mr|
mr.set_iid
@ -30,7 +30,7 @@ task migrate_iids: :environment do
end
puts 'done'
puts 'Milestones'.color(:yellow)
puts Rainbow('Milestones').yellow
Milestone.where(iid: nil).find_each(batch_size: 100) do |m|
m.set_iid

View File

@ -5,8 +5,8 @@ namespace :yarn do
task :available do
unless system('yarn --version', out: File::NULL)
warn(
'Error: Yarn executable was not detected in the system.'.color(:red),
'Download Yarn at https://yarnpkg.com/en/docs/install'.color(:green)
Rainbow('Error: Yarn executable was not detected in the system.').red,
Rainbow('Download Yarn at https://yarnpkg.com/en/docs/install').green
)
abort
end
@ -16,8 +16,8 @@ namespace :yarn do
task check: ['yarn:available'] do
unless system('yarn check --ignore-engines', out: File::NULL)
warn(
'Error: You have unmet dependencies. (`yarn check` command failed)'.color(:red),
'Run `yarn install` to install missing modules.'.color(:green)
Rainbow('Error: You have unmet dependencies. (`yarn check` command failed)').red,
Rainbow('Run `yarn install` to install missing modules.').green
)
abort
end
@ -26,13 +26,13 @@ namespace :yarn do
desc 'Install Node dependencies with Yarn'
task install: ['yarn:available'] do
unless system('yarn install --pure-lockfile --ignore-engines --prefer-offline')
abort 'Error: Unable to install node modules.'.color(:red)
abort Rainbow('Error: Unable to install node modules.').red
end
end
desc 'Remove Node dependencies'
task :clobber do
warn 'Purging ./node_modules directory'.color(:red)
warn Rainbow('Purging ./node_modules directory').red
FileUtils.rm_rf 'node_modules'
end
end

View File

@ -11495,6 +11495,11 @@ msgstr ""
msgid "Closed date"
msgstr ""
msgid "Closed issue"
msgid_plural "Closed issues"
msgstr[0] ""
msgstr[1] ""
msgid "Closed issues"
msgstr ""
@ -17153,7 +17158,7 @@ msgstr ""
msgid "Delete this attachment"
msgstr ""
msgid "Delete this epic and all descendants?"
msgid "Delete this epic and release all child items?"
msgstr ""
msgid "Delete this project"
@ -32587,9 +32592,15 @@ msgstr ""
msgid "Milestones|Group Milestone"
msgstr ""
msgid "Milestones|Labels from issues in this milestone will appear here."
msgstr ""
msgid "Milestones|Milestone %{milestoneTitle} was not found"
msgstr ""
msgid "Milestones|No labels found"
msgstr ""
msgid "Milestones|Ongoing Issues (open and assigned)"
msgstr ""
@ -35749,6 +35760,11 @@ msgstr ""
msgid "Open in your IDE"
msgstr ""
msgid "Open issue"
msgid_plural "Open issues"
msgstr[0] ""
msgstr[1] ""
msgid "Open new window"
msgstr ""
@ -46172,6 +46188,9 @@ msgstr ""
msgid "SecurityConfiguration|Vulnerability details and statistics in the merge request"
msgstr ""
msgid "SecurityOrchestraation|(Formerly Scan result policy)"
msgstr ""
msgid "SecurityOrchestration| and "
msgstr ""
@ -46767,6 +46786,12 @@ msgstr ""
msgid "SecurityOrchestration|Summary of syntax changes:"
msgstr ""
msgid "SecurityOrchestration|The %{oldNameStart}Scan result policy%{oldNameEnd} is now called the %{newNameStart}Merge request approval policy%{newNameEnd} to better align with its purpose. For more details, see %{linkStart}the release notes%{linkEnd}."
msgstr ""
msgid "SecurityOrchestration|The Scan result policy is now called the Merge request approval policy to better align with its purpose. For more details, see %{linkStart}the release notes%{linkEnd}."
msgstr ""
msgid "SecurityOrchestration|The following branches do not exist on this development project: %{branches}. Please review all protected branches to ensure the values are accurate before updating this policy."
msgstr ""
@ -46839,6 +46864,9 @@ msgstr ""
msgid "SecurityOrchestration|Update via merge request"
msgstr ""
msgid "SecurityOrchestration|Updated policy name"
msgstr ""
msgid "SecurityOrchestration|Use a merge request approval policy to create rules that check for security vulnerabilities and license compliance before merging a merge request."
msgstr ""
@ -58441,6 +58469,9 @@ msgstr ""
msgid "WorkItem|Delete %{workItemType}"
msgstr ""
msgid "WorkItem|Delete this %{workItemType} and release all child items? This action cannot be reversed."
msgstr ""
msgid "WorkItem|Discard changes"
msgstr ""
@ -60774,11 +60805,6 @@ msgstr ""
msgid "closed %{timeago}"
msgstr ""
msgid "closed issue"
msgid_plural "closed issues"
msgstr[0] ""
msgstr[1] ""
msgid "comment"
msgstr ""
@ -61939,11 +61965,6 @@ msgstr ""
msgid "only supports valid HTTP(S) URLs"
msgstr ""
msgid "open issue"
msgid_plural "open issues"
msgstr[0] ""
msgstr[1] ""
msgid "or"
msgstr ""

View File

@ -100,7 +100,7 @@ describe('DeleteIssueModal component', () => {
it('renders', () => {
wrapper = mountComponent({ issueType: 'epic' });
expect(findForm().text()).toBe('Delete this epic and all descendants?');
expect(findForm().text()).toBe('Delete this epic and release all child items?');
});
});
});

View File

@ -120,6 +120,7 @@ describe('WorkItemActions component', () => {
workItemReference = mockWorkItemReference,
workItemCreateNoteEmail = mockWorkItemCreateNoteEmail,
hideSubscribe = undefined,
hasChildren = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemActions, {
isLoggedIn: isLoggedIn(),
@ -136,6 +137,7 @@ describe('WorkItemActions component', () => {
workItemState: STATE_OPEN,
fullPath: 'gitlab-org/gitlab-test',
workItemId: 'gid://gitlab/WorkItem/1',
workItemIid: '1',
canUpdate,
canDelete,
isConfidential,
@ -146,6 +148,7 @@ describe('WorkItemActions component', () => {
workItemReference,
workItemCreateNoteEmail,
hideSubscribe,
hasChildren,
},
provide: {
isGroup: false,
@ -310,12 +313,25 @@ describe('WorkItemActions component', () => {
});
describe('delete action', () => {
it('shows confirm modal when clicked', () => {
it('shows confirm modal with delete confirmation message when clicked', () => {
createComponent();
findDeleteButton().vm.$emit('action');
expect(modalShowSpy).toHaveBeenCalled();
expect(findModal().text()).toBe(
'Are you sure you want to delete the task? This action cannot be reversed.',
);
});
it('shows confirm modal with delete hierarchy confirmation message when clicked', () => {
createComponent({ hasChildren: true });
findDeleteButton().vm.$emit('action');
expect(findModal().text()).toBe(
'Delete this task and release all child items? This action cannot be reversed.',
);
});
it('emits event when clicking OK button', () => {

View File

@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
@ -69,6 +70,17 @@ describe('WorkItemsListApp component', () => {
});
});
it('renders tab counts', async () => {
mountComponent();
await waitForPromises();
expect(cloneDeep(findIssuableList().props('tabCounts'))).toEqual({
all: 3,
closed: 1,
opened: 2,
});
});
it('renders IssueCardStatistics component', () => {
mountComponent();

View File

@ -3868,6 +3868,11 @@ export const groupWorkItemsQueryResponse = {
data: {
group: {
id: 'gid://gitlab/Group/3',
workItemStateCounts: {
all: 3,
closed: 1,
opened: 2,
},
workItems: {
nodes: [
{

View File

@ -119,7 +119,7 @@ RSpec.describe Gitlab::Database::Reindexing::ReindexConcurrently, '#perform' do
end
def iname(name, suffix = '')
"#{name[0...63 - suffix.size]}#{suffix}"
"#{name[0...(63 - suffix.size)]}#{suffix}"
end
def expect_to_execute_in_order(*queries)

View File

@ -21,6 +21,10 @@ RSpec.describe Gitlab::Database::Triggers::AssignDesiredShardingKey, feature_cat
}
end
before do
connection.schema_cache.clear!
end
describe '#create' do
let(:model) { Class.new(ActiveRecord::Base) }

View File

@ -9,6 +9,10 @@ RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do
let(:connection) { ActiveRecord::Base.connection }
let(:copy_trigger) { described_class.on_table(table_name, connection: connection) }
before do
connection.schema_cache.clear!
end
describe '#name' do
context 'when a single column name is given' do
subject(:trigger_name) { copy_trigger.name('id', 'other_id') }

View File

@ -33,7 +33,7 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
metadata = receiver.mail_metadata
expect(metadata.keys).to match_array(%i[mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients cc_address])
expect(metadata.keys).to match_array(%i[mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients cc_address x_original_to])
expect(metadata[:meta]).to include(client_id: client_id, project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
@ -57,6 +57,9 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
end
context 'when the email contains a valid email address in a header' do
let(:incoming_email) { "incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com" }
let(:meta_value) { [incoming_email] }
before do
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
end
@ -64,7 +67,7 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
context 'when in a Delivered-To header' do
let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
let(:meta_key) { :delivered_to }
let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"] }
let(:meta_value) { [incoming_email, "support@example.com"] }
it_behaves_like 'successful receive'
end
@ -72,7 +75,6 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
context 'when in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
let(:meta_key) { :envelope_to }
let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
it_behaves_like 'successful receive'
end
@ -80,7 +82,6 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
context 'when in an X-Envelope-To header' do
let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
let(:meta_key) { :x_envelope_to }
let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
it_behaves_like 'successful receive'
end
@ -88,7 +89,7 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
context 'when enclosed with angle brackets in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') }
let(:meta_key) { :envelope_to }
let(:meta_value) { ["<incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com>"] }
let(:meta_value) { ["<#{incoming_email}>"] }
it_behaves_like 'successful receive'
end
@ -106,7 +107,7 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
context 'when all other headers are missing' do
let(:email_raw) { fixture_file('emails/missing_delivered_to_header.eml') }
let(:meta_key) { :received_recipients }
let(:meta_value) { ['incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com', 'incoming+gitlabhq/gitlabhq@example.com'] }
let(:meta_value) { [incoming_email, 'incoming+gitlabhq/gitlabhq@example.com'] }
describe 'it uses receive headers to find the key' do
it_behaves_like 'successful receive'
@ -118,7 +119,7 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
<<~EMAIL
From: jake@example.com
To: to@example.com
Cc: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
Cc: #{incoming_email}
Subject: Issue titile
Issue description
@ -126,7 +127,23 @@ RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
end
let(:meta_key) { :cc_address }
let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
it_behaves_like 'successful receive'
end
context 'when in a X-Original-To header' do
let(:email_raw) do
<<~EMAIL
From: jake@example.com
To: to@example.com
X-Original-To: #{incoming_email}
Subject: Issue titile
Issue description
EMAIL
end
let(:meta_key) { :x_original_to }
it_behaves_like 'successful receive'
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Email::ServiceDeskReceiver do
RSpec.describe Gitlab::Email::ServiceDeskReceiver, feature_category: :service_desk do
let(:email) { fixture_file('emails/service_desk_custom_address.eml') }
let(:receiver) { described_class.new(email) }
@ -31,6 +31,8 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
end
context 'when the email contains a valid email address in a header' do
let(:service_desk_email) { "support+project_slug-project_key@example.com" }
context 'when in a Delivered-To header' do
let(:email) { fixture_file('emails/service_desk_custom_address_reply.eml') }
@ -49,12 +51,27 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
it_behaves_like 'received successfully'
end
context 'when in a X-Original-To header' do
let(:email) do
<<~EMAIL
From: from@example.com
To: to@example.com
X-Original-To: #{service_desk_email}
Subject: Issue titile
Issue description
EMAIL
end
it_behaves_like 'received successfully'
end
context 'when in a Cc header' do
let(:email) do
<<~EMAIL
From: from@example.com
To: to@example.com
Cc: support+project_slug-project_key@example.com
Cc: #{service_desk_email}
Subject: Issue titile
Issue description

View File

@ -5783,7 +5783,6 @@
- './spec/lib/gitlab/database/transaction/observer_spec.rb'
- './spec/lib/gitlab/database/type/color_spec.rb'
- './spec/lib/gitlab/database/type/json_pg_safe_spec.rb'
- './spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb'
- './spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb'
- './spec/lib/gitlab/database/with_lock_retries_spec.rb'
- './spec/lib/gitlab/data_builder/alert_spec.rb'

View File

@ -4,11 +4,7 @@ RSpec.shared_context 'group integration activation' do
include_context 'instance and group integration activation'
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
before_all do
group.add_owner(user)
end
let_it_be(:user) { create(:user, owner_of: group) }
before do
sign_in(user)

View File

@ -18,7 +18,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project1, reload: true) do
allow_gitaly_n_plus_1 { create(:project, :public, group: group) }
allow_gitaly_n_plus_1 { create(:project, :public, group: group, maintainers: user) }
end
# We cannot use `let_it_be` here otherwise we get:
# Failure/Error: allow(RepositoryForkWorker).to receive(:perform_async).and_return(true)
@ -39,15 +39,15 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
end
let_it_be(:project4, reload: true) do
allow_gitaly_n_plus_1 { create(:project, :repository, group: subgroup) }
allow_gitaly_n_plus_1 { create(:project, :repository, group: subgroup, developers: user) }
end
let_it_be(:project5, reload: true) do
allow_gitaly_n_plus_1 { create(:project, group: subgroup) }
allow_gitaly_n_plus_1 { create(:project, group: subgroup, developers: user) }
end
let_it_be(:project6, reload: true) do
allow_gitaly_n_plus_1 { create(:project, group: subgroup) }
allow_gitaly_n_plus_1 { create(:project, group: subgroup, developers: user) }
end
let_it_be(:label) { create(:label, project: project1) }
@ -97,13 +97,6 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
let!(:label_link) { create(:label_link, label: label, target: merge_request2) }
let!(:label_link2) { create(:label_link, label: label2, target: merge_request3) }
before_all do
project1.add_maintainer(user)
project4.add_developer(user)
project5.add_developer(user)
project6.add_developer(user)
end
before do
project2.add_developer(user)
project3.add_developer(user)

View File

@ -1,21 +1,21 @@
# frozen_string_literal: true
RSpec.shared_context 'GroupPolicy context' do
let_it_be(:guest) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:owner) { create(:user) }
let_it_be(:admin) { create(:admin, :without_default_org) }
let_it_be(:non_group_member) { create(:user) }
let_it_be(:organization) { create(:organization) }
let_it_be(:organization_owner) { create(:organization_user, :owner, organization: organization).user }
let_it_be(:group, refind: true) do
create(:group, :private, :owner_subgroup_creation_only, organization: organization)
end
let_it_be(:guest) { create(:user, guest_of: group) }
let_it_be(:reporter) { create(:user, reporter_of: group) }
let_it_be(:developer) { create(:user, developer_of: group) }
let_it_be(:maintainer) { create(:user, maintainer_of: group) }
let_it_be(:owner) { create(:user, owner_of: group) }
let_it_be(:admin) { create(:admin, :without_default_org) }
let_it_be(:non_group_member) { create(:user) }
let_it_be(:organization_owner) { create(:organization_user, :owner, organization: organization).user }
let(:public_permissions) do
%i[
read_group read_counts read_issue read_namespace
@ -93,13 +93,5 @@ RSpec.shared_context 'GroupPolicy context' do
let(:admin_permissions) { %i[read_confidential_issues read_internal_note] }
before_all do
group.add_guest(guest)
group.add_reporter(reporter)
group.add_developer(developer)
group.add_maintainer(maintainer)
group.add_owner(owner)
end
subject { described_class.new(current_user, group) }
end

View File

@ -3,8 +3,8 @@
RSpec.shared_examples 'with cross-reference system notes' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user) }
let_it_be(:project) { create(:project, :small_repo) }
let_it_be(:project2) { create(:project, :small_repo) }
let_it_be(:project) { create(:project, :small_repo, developers: user) }
let_it_be(:project2) { create(:project, :small_repo, developers: user) }
let_it_be(:project3) { create(:project, :small_repo) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@ -21,11 +21,6 @@ RSpec.shared_examples 'with cross-reference system notes' do
let(:hidden_cross_reference) { "test commit #{hidden_commit.to_reference(project)}" }
let(:hidden_commit) { hidden_merge_request.project.commit }
before_all do
project.add_developer(user)
project2.add_developer(user)
end
it 'returns only the note that the user should see' do
get api(url, user, personal_access_token: pat)

View File

@ -2,7 +2,7 @@
RSpec.shared_context 'for auto_merge strategy context' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository, maintainers: user) }
let(:mr_merge_if_green_enabled) do
create(:merge_request,
@ -17,10 +17,6 @@ RSpec.shared_context 'for auto_merge strategy context' do
let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
before_all do
project.add_maintainer(user)
end
before do
allow(MergeWorker).to receive(:with_status).and_return(MergeWorker)
end

View File

@ -5,19 +5,14 @@
RSpec.shared_examples 'issuable record that supports quick actions' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:assignee) { create(:user) }
let_it_be(:user) { create(:user, maintainer_of: project) }
let_it_be(:assignee) { create(:user, maintainer_of: project) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:labels) { create_list(:label, 3, project: project) }
let(:base_params) { { title: 'My issuable title' } }
let(:params) { base_params.merge(defined?(default_params) ? default_params : {}).merge(example_params) }
before_all do
project.add_maintainer(user)
project.add_maintainer(assignee)
end
before do
issuable.reload
end

View File

@ -12,6 +12,8 @@ RSpec.describe 'Database::WithoutCheckConstraint' do
let(:model) { table(table_name) }
before do
connection.schema_cache.clear!
# Drop test table in case it's left from a previous execution.
connection.exec_query("DROP TABLE IF EXISTS #{table_name}")
# Model has an attribute called 'name' that can't be NULL.