Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ec4891efa7
commit
fea86fb8bf
|
|
@ -118,3 +118,23 @@ package_hunter-bundler:
|
|||
- .reports:rules:package_hunter-bundler
|
||||
variables:
|
||||
PACKAGE_MANAGER: bundler
|
||||
|
||||
xray_scan:
|
||||
extends:
|
||||
- .default-retry
|
||||
- .reports:rules:x-ray
|
||||
stage: lint
|
||||
needs: []
|
||||
image: registry.gitlab.com/gitlab-org/code-creation/repository-x-ray:rc
|
||||
variables:
|
||||
OUTPUT_DIR: reports
|
||||
allow_failure: true
|
||||
script:
|
||||
- x-ray-scan -p "$CI_PROJECT_DIR" -o "$OUTPUT_DIR"
|
||||
artifacts:
|
||||
# this line uses xray_scan job output as source for GitLab Rails code gen feature
|
||||
reports:
|
||||
repository_xray: "$OUTPUT_DIR/*/*.json"
|
||||
# this line saves xray_scan job output in raw form for inspection for testing purposes
|
||||
paths:
|
||||
- "$OUTPUT_DIR/*/*.json"
|
||||
|
|
|
|||
|
|
@ -2652,6 +2652,18 @@
|
|||
- <<: *if-merge-request
|
||||
changes: ["Gemfile.lock"]
|
||||
|
||||
.reports:rules:x-ray:
|
||||
rules:
|
||||
- <<: *if-default-branch-refs
|
||||
changes: *dependency-patterns
|
||||
- <<: *if-merge-request
|
||||
changes: *dependency-patterns
|
||||
when: never
|
||||
- <<: *if-merge-request
|
||||
changes: [".gitlab/ci/reports.gitlab-ci.yml"]
|
||||
when: manual
|
||||
- when: never
|
||||
|
||||
################
|
||||
# Review rules #
|
||||
################
|
||||
|
|
|
|||
|
|
@ -7,15 +7,7 @@ import { helpPagePath } from '~/helpers/help_page_helper';
|
|||
import {
|
||||
REPORT_TYPE_SAST,
|
||||
REPORT_TYPE_SAST_IAC,
|
||||
REPORT_TYPE_DAST,
|
||||
REPORT_TYPE_DAST_PROFILES,
|
||||
REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
|
||||
REPORT_TYPE_SECRET_DETECTION,
|
||||
REPORT_TYPE_DEPENDENCY_SCANNING,
|
||||
REPORT_TYPE_CONTAINER_SCANNING,
|
||||
REPORT_TYPE_COVERAGE_FUZZING,
|
||||
REPORT_TYPE_CORPUS_MANAGEMENT,
|
||||
REPORT_TYPE_API_FUZZING,
|
||||
} from '~/vue_shared/security_reports/constants';
|
||||
|
||||
import configureSastMutation from './graphql/configure_sast.mutation.graphql';
|
||||
|
|
@ -23,132 +15,35 @@ import configureSastIacMutation from './graphql/configure_iac.mutation.graphql';
|
|||
import configureSecretDetectionMutation from './graphql/configure_secret_detection.mutation.graphql';
|
||||
|
||||
/**
|
||||
* Translations & helpPagePaths for Security Configuration Page
|
||||
* Translations for Security Configuration Page
|
||||
* Make sure to add new scanner translations to the SCANNER_NAMES_MAP below.
|
||||
*/
|
||||
|
||||
export const SAST_NAME = __('Static Application Security Testing (SAST)');
|
||||
export const SAST_SHORT_NAME = s__('ciReport|SAST');
|
||||
export const SAST_DESCRIPTION = __('Analyze your source code for known vulnerabilities.');
|
||||
export const SAST_HELP_PATH = helpPagePath('user/application_security/sast/index');
|
||||
export const SAST_CONFIG_HELP_PATH = helpPagePath('user/application_security/sast/index', {
|
||||
anchor: 'configuration',
|
||||
});
|
||||
|
||||
export const SAST_IAC_NAME = __('Infrastructure as Code (IaC) Scanning');
|
||||
export const SAST_IAC_SHORT_NAME = s__('ciReport|SAST IaC');
|
||||
export const SAST_IAC_DESCRIPTION = __(
|
||||
'Analyze your infrastructure as code configuration files for known vulnerabilities.',
|
||||
);
|
||||
export const SAST_IAC_HELP_PATH = helpPagePath('user/application_security/iac_scanning/index');
|
||||
export const SAST_IAC_CONFIG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/iac_scanning/index',
|
||||
{
|
||||
anchor: 'configuration',
|
||||
},
|
||||
);
|
||||
|
||||
export const DAST_NAME = __('Dynamic Application Security Testing (DAST)');
|
||||
export const DAST_SHORT_NAME = s__('ciReport|DAST');
|
||||
export const DAST_DESCRIPTION = s__(
|
||||
'ciReport|Analyze a deployed version of your web application for known vulnerabilities by examining it from the outside in. DAST works by simulating external attacks on your application while it is running.',
|
||||
);
|
||||
export const DAST_HELP_PATH = helpPagePath('user/application_security/dast/index');
|
||||
export const DAST_CONFIG_HELP_PATH = helpPagePath('user/application_security/dast/index', {
|
||||
anchor: 'enable-automatic-dast-run',
|
||||
});
|
||||
export const DAST_BADGE_TEXT = __('Available on demand');
|
||||
export const DAST_BADGE_TOOLTIP = __(
|
||||
'On-demand scans run outside of the DevOps cycle and find vulnerabilities in your projects',
|
||||
);
|
||||
|
||||
export const DAST_PROFILES_NAME = __('DAST profiles');
|
||||
export const DAST_PROFILES_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Manage profiles for use by DAST scans.',
|
||||
);
|
||||
export const DAST_PROFILES_CONFIG_TEXT = s__('SecurityConfiguration|Manage profiles');
|
||||
export const DAST_HELP_PATH = helpPagePath('user/application_security/dast/index');
|
||||
|
||||
export const BAS_BADGE_TEXT = s__('SecurityConfiguration|Incubating feature');
|
||||
export const BAS_BADGE_TOOLTIP = s__(
|
||||
'SecurityConfiguration|Breach and Attack Simulation is an incubating feature extending existing security testing by simulating adversary activity.',
|
||||
);
|
||||
export const BAS_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Simulate breach and attack scenarios against your running application by attempting to detect and exploit known vulnerabilities.',
|
||||
);
|
||||
export const BAS_HELP_PATH = helpPagePath(
|
||||
'user/application_security/breach_and_attack_simulation/index',
|
||||
);
|
||||
export const BAS_NAME = s__('SecurityConfiguration|Breach and Attack Simulation (BAS)');
|
||||
export const BAS_SHORT_NAME = s__('SecurityConfiguration|BAS');
|
||||
|
||||
export const BAS_DAST_FEATURE_FLAG_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Enable incubating Breach and Attack Simulation focused features such as callback attacks in your DAST scans.',
|
||||
);
|
||||
export const BAS_DAST_FEATURE_FLAG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/breach_and_attack_simulation/index',
|
||||
{ anchor: 'extend-dynamic-application-security-testing-dast' },
|
||||
);
|
||||
export const BAS_DAST_FEATURE_FLAG_NAME = s__(
|
||||
'SecurityConfiguration|Out-of-Band Application Security Testing (OAST)',
|
||||
);
|
||||
|
||||
export const SECRET_DETECTION_NAME = __('Secret Detection');
|
||||
export const SECRET_DETECTION_DESCRIPTION = __(
|
||||
'Analyze your source code and git history for secrets.',
|
||||
);
|
||||
export const SECRET_DETECTION_HELP_PATH = helpPagePath(
|
||||
'user/application_security/secret_detection/index',
|
||||
);
|
||||
export const SECRET_DETECTION_CONFIG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/secret_detection/index',
|
||||
{ anchor: 'configuration' },
|
||||
);
|
||||
|
||||
export const DEPENDENCY_SCANNING_NAME = __('Dependency Scanning');
|
||||
export const DEPENDENCY_SCANNING_DESCRIPTION = __(
|
||||
'Analyze your dependencies for known vulnerabilities.',
|
||||
);
|
||||
export const DEPENDENCY_SCANNING_HELP_PATH = helpPagePath(
|
||||
'user/application_security/dependency_scanning/index',
|
||||
);
|
||||
export const DEPENDENCY_SCANNING_CONFIG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/dependency_scanning/index',
|
||||
{ anchor: 'configuration' },
|
||||
);
|
||||
|
||||
export const CONTAINER_SCANNING_NAME = __('Container Scanning');
|
||||
export const CONTAINER_SCANNING_DESCRIPTION = __(
|
||||
'Check your Docker images for known vulnerabilities.',
|
||||
);
|
||||
export const CONTAINER_SCANNING_HELP_PATH = helpPagePath(
|
||||
'user/application_security/container_scanning/index',
|
||||
);
|
||||
export const CONTAINER_SCANNING_CONFIG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/container_scanning/index',
|
||||
{ anchor: 'configuration' },
|
||||
);
|
||||
|
||||
export const COVERAGE_FUZZING_NAME = __('Coverage Fuzzing');
|
||||
export const COVERAGE_FUZZING_DESCRIPTION = __(
|
||||
'Find bugs in your code with coverage-guided fuzzing.',
|
||||
);
|
||||
export const COVERAGE_FUZZING_HELP_PATH = helpPagePath(
|
||||
'user/application_security/coverage_fuzzing/index',
|
||||
);
|
||||
export const COVERAGE_FUZZING_CONFIG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/coverage_fuzzing/index',
|
||||
{ anchor: 'enable-coverage-guided-fuzz-testing' },
|
||||
);
|
||||
|
||||
export const CORPUS_MANAGEMENT_NAME = __('Corpus Management');
|
||||
export const CORPUS_MANAGEMENT_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Manage corpus files used as seed inputs with coverage-guided fuzzing.',
|
||||
);
|
||||
export const CORPUS_MANAGEMENT_CONFIG_TEXT = s__('SecurityConfiguration|Manage corpus');
|
||||
|
||||
export const API_FUZZING_NAME = __('API Fuzzing');
|
||||
export const API_FUZZING_DESCRIPTION = __('Find bugs in your code with API fuzzing.');
|
||||
export const API_FUZZING_HELP_PATH = helpPagePath('user/application_security/api_fuzzing/index');
|
||||
|
||||
export const CLUSTER_IMAGE_SCANNING_NAME = s__('ciReport|Cluster Image Scanning');
|
||||
|
||||
|
|
@ -166,105 +61,6 @@ export const SCANNER_NAMES_MAP = {
|
|||
GENERIC: s__('ciReport|Manually added'),
|
||||
};
|
||||
|
||||
export const securityFeatures = [
|
||||
{
|
||||
name: SAST_NAME,
|
||||
shortName: SAST_SHORT_NAME,
|
||||
description: SAST_DESCRIPTION,
|
||||
helpPath: SAST_HELP_PATH,
|
||||
configurationHelpPath: SAST_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_SAST,
|
||||
},
|
||||
{
|
||||
name: SAST_IAC_NAME,
|
||||
shortName: SAST_IAC_SHORT_NAME,
|
||||
description: SAST_IAC_DESCRIPTION,
|
||||
helpPath: SAST_IAC_HELP_PATH,
|
||||
configurationHelpPath: SAST_IAC_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_SAST_IAC,
|
||||
},
|
||||
{
|
||||
badge: {
|
||||
text: DAST_BADGE_TEXT,
|
||||
tooltipText: DAST_BADGE_TOOLTIP,
|
||||
variant: 'info',
|
||||
},
|
||||
secondary: {
|
||||
type: REPORT_TYPE_DAST_PROFILES,
|
||||
name: DAST_PROFILES_NAME,
|
||||
description: DAST_PROFILES_DESCRIPTION,
|
||||
configurationText: DAST_PROFILES_CONFIG_TEXT,
|
||||
},
|
||||
name: DAST_NAME,
|
||||
shortName: DAST_SHORT_NAME,
|
||||
description: DAST_DESCRIPTION,
|
||||
helpPath: DAST_HELP_PATH,
|
||||
configurationHelpPath: DAST_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_DAST,
|
||||
anchor: 'dast',
|
||||
},
|
||||
{
|
||||
name: DEPENDENCY_SCANNING_NAME,
|
||||
description: DEPENDENCY_SCANNING_DESCRIPTION,
|
||||
helpPath: DEPENDENCY_SCANNING_HELP_PATH,
|
||||
configurationHelpPath: DEPENDENCY_SCANNING_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_DEPENDENCY_SCANNING,
|
||||
anchor: 'dependency-scanning',
|
||||
},
|
||||
{
|
||||
name: CONTAINER_SCANNING_NAME,
|
||||
description: CONTAINER_SCANNING_DESCRIPTION,
|
||||
helpPath: CONTAINER_SCANNING_HELP_PATH,
|
||||
configurationHelpPath: CONTAINER_SCANNING_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_CONTAINER_SCANNING,
|
||||
},
|
||||
{
|
||||
name: SECRET_DETECTION_NAME,
|
||||
description: SECRET_DETECTION_DESCRIPTION,
|
||||
helpPath: SECRET_DETECTION_HELP_PATH,
|
||||
configurationHelpPath: SECRET_DETECTION_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_SECRET_DETECTION,
|
||||
},
|
||||
{
|
||||
name: API_FUZZING_NAME,
|
||||
description: API_FUZZING_DESCRIPTION,
|
||||
helpPath: API_FUZZING_HELP_PATH,
|
||||
type: REPORT_TYPE_API_FUZZING,
|
||||
},
|
||||
{
|
||||
name: COVERAGE_FUZZING_NAME,
|
||||
description: COVERAGE_FUZZING_DESCRIPTION,
|
||||
helpPath: COVERAGE_FUZZING_HELP_PATH,
|
||||
configurationHelpPath: COVERAGE_FUZZING_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_COVERAGE_FUZZING,
|
||||
secondary: {
|
||||
type: REPORT_TYPE_CORPUS_MANAGEMENT,
|
||||
name: CORPUS_MANAGEMENT_NAME,
|
||||
description: CORPUS_MANAGEMENT_DESCRIPTION,
|
||||
configurationText: CORPUS_MANAGEMENT_CONFIG_TEXT,
|
||||
},
|
||||
},
|
||||
{
|
||||
anchor: 'bas',
|
||||
badge: {
|
||||
alwaysDisplay: true,
|
||||
text: BAS_BADGE_TEXT,
|
||||
tooltipText: BAS_BADGE_TOOLTIP,
|
||||
variant: 'info',
|
||||
},
|
||||
description: BAS_DESCRIPTION,
|
||||
name: BAS_NAME,
|
||||
helpPath: BAS_HELP_PATH,
|
||||
secondary: {
|
||||
configurationHelpPath: BAS_DAST_FEATURE_FLAG_HELP_PATH,
|
||||
description: BAS_DAST_FEATURE_FLAG_DESCRIPTION,
|
||||
name: BAS_DAST_FEATURE_FLAG_NAME,
|
||||
},
|
||||
shortName: BAS_SHORT_NAME,
|
||||
type: REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
|
||||
},
|
||||
];
|
||||
|
||||
export const featureToMutationMap = {
|
||||
[REPORT_TYPE_SAST]: {
|
||||
mutationId: 'configureSast',
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import VueApollo from 'vue-apollo';
|
|||
import createDefaultClient from '~/lib/graphql';
|
||||
import { parseBooleanDataAttributes } from '~/lib/utils/dom_utils';
|
||||
import SecurityConfigurationApp from './components/app.vue';
|
||||
import { securityFeatures } from './constants';
|
||||
import { augmentFeatures } from './utils';
|
||||
|
||||
export const initSecurityConfiguration = (el) => {
|
||||
|
|
@ -28,10 +27,7 @@ export const initSecurityConfiguration = (el) => {
|
|||
vulnerabilityTrainingDocsPath,
|
||||
} = el.dataset;
|
||||
|
||||
const { augmentedSecurityFeatures } = augmentFeatures(
|
||||
securityFeatures,
|
||||
features ? JSON.parse(features) : [],
|
||||
);
|
||||
const { augmentedSecurityFeatures } = augmentFeatures(features ? JSON.parse(features) : []);
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
|
|
|
|||
|
|
@ -1,33 +1,41 @@
|
|||
import { isEmpty } from 'lodash';
|
||||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import { SCANNER_NAMES_MAP } from '~/security_configuration/constants';
|
||||
import { REPORT_TYPE_DAST } from '~/vue_shared/security_reports/constants';
|
||||
|
||||
/**
|
||||
* This function takes in 3 arrays of objects, securityFeatures and features.
|
||||
* securityFeatures are static arrays living in the constants.
|
||||
* This function takes in a arrays of features.
|
||||
* features is dynamic and coming from the backend.
|
||||
* This function builds a superset of those arrays.
|
||||
* It looks for matching keys within the dynamic and the static arrays
|
||||
* and will enrich the objects with the available static data.
|
||||
* @param [{}] securityFeatures
|
||||
* securityFeatures is nested in features and are static arrays living in backend constants
|
||||
* This function takes the nested securityFeatures config and flattens it to the top level object.
|
||||
* It then filters out any scanner features that lack a security config for rednering in the UI
|
||||
* @param [{}] features
|
||||
* @returns {Object} Object with enriched features from constants divided into Security and Compliance Features
|
||||
*/
|
||||
|
||||
export const augmentFeatures = (securityFeatures, features = []) => {
|
||||
export const augmentFeatures = (features = []) => {
|
||||
const featuresByType = features.reduce((acc, feature) => {
|
||||
acc[feature.type] = convertObjectPropsToCamelCase(feature, { deep: true });
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
/**
|
||||
* Track feature configs that are used as nested elements in the UI
|
||||
* so they aren't rendered at the top level as a seperate card
|
||||
*/
|
||||
const secondaryFeatures = [];
|
||||
|
||||
// Modify each feature
|
||||
const augmentFeature = (feature) => {
|
||||
const augmented = {
|
||||
...feature,
|
||||
...featuresByType[feature.type],
|
||||
};
|
||||
|
||||
// Secondary layer copies some values from the first layer
|
||||
if (augmented.secondary) {
|
||||
augmented.secondary = { ...augmented.secondary, ...featuresByType[feature.secondary.type] };
|
||||
secondaryFeatures.push(feature.secondary.type);
|
||||
}
|
||||
|
||||
if (augmented.type === REPORT_TYPE_DAST && !augmented.onDemandAvailable) {
|
||||
|
|
@ -41,8 +49,20 @@ export const augmentFeatures = (securityFeatures, features = []) => {
|
|||
return augmented;
|
||||
};
|
||||
|
||||
// Filter out any features that lack a security feature definition or is used as a nested UI element
|
||||
const filterFeatures = (feature) => {
|
||||
return !secondaryFeatures.includes(feature.type) && !isEmpty(feature.securityFeatures || {});
|
||||
};
|
||||
|
||||
// Convert backend provided properties to camelCase, and spread nested security config to the root
|
||||
// level for UI rendering.
|
||||
const flattenFeatures = (feature) => {
|
||||
const flattenedFeature = convertObjectPropsToCamelCase(feature, { deep: true });
|
||||
return augmentFeature({ ...flattenedFeature, ...flattenedFeature.securityFeatures });
|
||||
};
|
||||
|
||||
return {
|
||||
augmentedSecurityFeatures: securityFeatures.map((feature) => augmentFeature(feature)),
|
||||
augmentedSecurityFeatures: features.map(flattenFeatures).filter(filterFeatures),
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -2,24 +2,41 @@
|
|||
= content_for :head do
|
||||
= stylesheet_link_tag 'mailers/highlighted_diff_email'
|
||||
|
||||
%table{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;margin:0 auto;border-collapse:separate;border-spacing:0;" }
|
||||
%tbody
|
||||
%tr
|
||||
%td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;background-color:#ffffff;text-align:left;overflow:hidden;" }
|
||||
%table{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;border-collapse:separate;border-spacing:0;" }
|
||||
%tbody
|
||||
%tr
|
||||
%td{ style: "color:#333333;border-bottom:1px solid #ededed;font-weight:bold;line-height:1.4;padding: 20px 0;" }
|
||||
- mr_link = link_to(@merge_request.to_reference(@project), project_merge_request_url(@project, @merge_request))
|
||||
- mr_author_link = link_to(@author_name, user_url(@author))
|
||||
= _('Merge request %{mr_link} was reviewed by %{mr_author}').html_safe % { mr_link: mr_link, mr_author: mr_author_link }
|
||||
%tr
|
||||
%td{ style: "overflow:hidden;line-height:1.4;display:grid;" }
|
||||
- @notes.each do |note|
|
||||
-# Get preloaded note discussion
|
||||
- discussion = @discussions[note.discussion_id] if note.part_of_discussion?
|
||||
-# Preload project for discussions first note
|
||||
- discussion.first_note.project = @project if discussion&.first_note
|
||||
- target_url = project_merge_request_url(@project, @merge_request, anchor: "note_#{note.id}")
|
||||
= render 'note_email', note: note, diff_limit: 3, target_url: target_url, note_style: "border-bottom:1px solid #ededed; padding-bottom: 1em;", include_stylesheet_link: false, discussion: discussion, author: @author
|
||||
= render_if_exists 'notify/review_summary'
|
||||
- if Feature.enabled?(:enhanced_review_email, @project, type: :gitlab_com_derisk)
|
||||
%div{ style: "color:#333333;border-bottom:8px solid #ededed;font-weight:bold;line-height:1.4;padding: 20px 0;" }
|
||||
- mr_link = link_to(@merge_request.to_reference(@project), project_merge_request_url(@project, @merge_request))
|
||||
- mr_author_link = link_to(@author_name, user_url(@author))
|
||||
= _('Merge request %{mr_link} was reviewed by %{mr_author}').html_safe % { mr_link: mr_link, mr_author: mr_author_link }
|
||||
|
||||
- @notes.each do |note|
|
||||
-# Get preloaded note discussion
|
||||
- discussion = @discussions[note.discussion_id] if note.part_of_discussion?
|
||||
-# Preload project for discussions first note
|
||||
- discussion.first_note.project = @project if discussion&.first_note
|
||||
- target_url = project_merge_request_url(@project, @merge_request, anchor: "note_#{note.id}")
|
||||
= render 'note_email', note: note, diff_limit: 3, target_url: target_url, note_style: "border-bottom:4px solid #ededed; padding-bottom: 1em;", include_stylesheet_link: false, discussion: discussion, author: @author
|
||||
= render_if_exists 'notify/review_summary'
|
||||
|
||||
- else
|
||||
|
||||
%table{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;margin:0 auto;border-collapse:separate;border-spacing:0;" }
|
||||
%tbody
|
||||
%tr
|
||||
%td{ style: "font-family:'Helvetica Neue',Helvetica,Arial,sans-serif;background-color:#ffffff;text-align:left;overflow:hidden;" }
|
||||
%table{ border: "0", cellpadding: "0", cellspacing: "0", style: "width:100%;border-collapse:separate;border-spacing:0;" }
|
||||
%tbody
|
||||
%tr
|
||||
%td{ style: "color:#333333;border-bottom:1px solid #ededed;font-weight:bold;line-height:1.4;padding: 20px 0;" }
|
||||
- mr_link = link_to(@merge_request.to_reference(@project), project_merge_request_url(@project, @merge_request))
|
||||
- mr_author_link = link_to(@author_name, user_url(@author))
|
||||
= _('Merge request %{mr_link} was reviewed by %{mr_author}').html_safe % { mr_link: mr_link, mr_author: mr_author_link }
|
||||
%tr
|
||||
%td{ style: "overflow:hidden;line-height:1.4;display:grid;" }
|
||||
- @notes.each do |note|
|
||||
-# Get preloaded note discussion
|
||||
- discussion = @discussions[note.discussion_id] if note.part_of_discussion?
|
||||
-# Preload project for discussions first note
|
||||
- discussion.first_note.project = @project if discussion&.first_note
|
||||
- target_url = project_merge_request_url(@project, @merge_request, anchor: "note_#{note.id}")
|
||||
= render 'note_email', note: note, diff_limit: 3, target_url: target_url, note_style: "border-bottom:1px solid #ededed; padding-bottom: 1em;", include_stylesheet_link: false, discussion: discussion, author: @author
|
||||
= render_if_exists 'notify/review_summary'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: enhanced_review_email
|
||||
feature_issue_url:
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141187
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/437582
|
||||
milestone: '16.8'
|
||||
group: group::code review
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# We force require this to trigger the autoload and so that our monkeypatch will
|
||||
# be applied in correct order, which is only after the class is loaded.
|
||||
require 'fog/storage/google_json/requests/list_objects'
|
||||
|
||||
#
|
||||
# Monkey patching the list_objects to support match_glob parameter
|
||||
# See https://github.com/fog/fog-google/issues/614
|
||||
#
|
||||
module Fog
|
||||
module Storage
|
||||
class GoogleJSON
|
||||
class Real
|
||||
# This an identical copy of
|
||||
# https://github.com/fog/fog-google/blob/v1.19.0/lib/fog/storage/google_json/requests/list_objects.rb
|
||||
# with just match_glob added to the allowed_opts
|
||||
def list_objects(bucket, options = {})
|
||||
# rubocop: disable Style/PercentLiteralDelimiters -- this is an exact copy of the original method, just added match_glob here.
|
||||
allowed_opts = %i(
|
||||
delimiter
|
||||
match_glob
|
||||
max_results
|
||||
page_token
|
||||
prefix
|
||||
projection
|
||||
versions
|
||||
)
|
||||
# rubocop: enable Style/PercentLiteralDelimiters
|
||||
|
||||
# rubocop: disable Gitlab/ModuleWithInstanceVariables -- this is an exact copy of the original method
|
||||
@storage_json.list_objects(
|
||||
bucket,
|
||||
**options.select { |k, _| allowed_opts.include? k }
|
||||
)
|
||||
# rubocop: enable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# We just need to add the match_glob attribute support here
|
||||
module Fog
|
||||
module Storage
|
||||
class GoogleJSON
|
||||
class Files < Fog::Collection
|
||||
attribute :match_glob, aliases: "matchGlob"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -876,27 +876,27 @@ POST /projects/:id/merge_requests/:merge_request_iid/discussions
|
|||
|
||||
Parameters for all comments:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| ---------------------------------------- | -------------- | -------- | ----------- |
|
||||
| `body` | string | yes | The content of the thread. |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding). |
|
||||
| `merge_request_iid` | integer | yes | The IID of a merge request. |
|
||||
| `position[base_sha]` | string | yes | Base commit SHA in the source branch. |
|
||||
| `position[head_sha]` | string | yes | SHA referencing HEAD of this merge request. |
|
||||
| `position[start_sha]` | string | yes | SHA referencing commit in target branch. |
|
||||
| Attribute | Type | Required | Description |
|
||||
| ---------------------------------------- | -------------- |--------------------------------------| ----------- |
|
||||
| `body` | string | yes | The content of the thread. |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding). |
|
||||
| `merge_request_iid` | integer | yes | The IID of a merge request. |
|
||||
| `position[base_sha]` | string | yes (if `position*` is supplied) | Base commit SHA in the source branch. |
|
||||
| `position[head_sha]` | string | yes (if `position*` is supplied) | SHA referencing HEAD of this merge request. |
|
||||
| `position[start_sha]` | string | yes (if `position*` is supplied) | SHA referencing commit in target branch. |
|
||||
| `position[new_path]` | string | yes (if the position type is `text`) | File path after change. |
|
||||
| `position[old_path]` | string | yes (if the position type is `text`) | File path before change. |
|
||||
| `position[position_type]` | string | yes | Type of the position reference. Allowed values: `text` or `image`. |
|
||||
| `commit_id` | string | no | SHA referencing commit to start this thread on. |
|
||||
| `created_at` | string | no | Date time string, ISO 8601 formatted, such as `2016-03-11T03:45:40Z`. Requires administrator or project/group owner rights. |
|
||||
| `position` | hash | no | Position when creating a diff note. |
|
||||
| `position[new_line]` | integer | no | For `text` diff notes, the line number after change. |
|
||||
| `position[old_line]` | integer | no | For `text` diff notes, the line number before change. |
|
||||
| `position[line_range]` | hash | no | Line range for a multi-line diff note. |
|
||||
| `position[width]` | integer | no | For `image` diff notes, width of the image. |
|
||||
| `position[height]` | integer | no | For `image` diff notes, height of the image. |
|
||||
| `position[x]` | float | no | For `image` diff notes, X coordinate. |
|
||||
| `position[y]` | float | no | For `image` diff notes, Y coordinate. |
|
||||
| `position[position_type]` | string | yes (if position* is supplied) | Type of the position reference. Allowed values: `text` or `image`. |
|
||||
| `commit_id` | string | no | SHA referencing commit to start this thread on. |
|
||||
| `created_at` | string | no | Date time string, ISO 8601 formatted, such as `2016-03-11T03:45:40Z`. Requires administrator or project/group owner rights. |
|
||||
| `position` | hash | no | Position when creating a diff note. |
|
||||
| `position[new_line]` | integer | no | For `text` diff notes, the line number after change. |
|
||||
| `position[old_line]` | integer | no | For `text` diff notes, the line number before change. |
|
||||
| `position[line_range]` | hash | no | Line range for a multi-line diff note. |
|
||||
| `position[width]` | integer | no | For `image` diff notes, width of the image. |
|
||||
| `position[height]` | integer | no | For `image` diff notes, height of the image. |
|
||||
| `position[x]` | float | no | For `image` diff notes, X coordinate. |
|
||||
| `position[y]` | float | no | For `image` diff notes, Y coordinate. |
|
||||
|
||||
#### Create a new thread on the overview page
|
||||
|
||||
|
|
@ -1287,17 +1287,17 @@ POST /projects/:id/repository/commits/:commit_id/discussions
|
|||
|
||||
Parameters:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| ------------------------- | -------------- | -------- | ----------- |
|
||||
| `body` | string | yes | The content of the thread. |
|
||||
| `commit_id` | string | yes | The SHA of a commit. |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding). |
|
||||
| `position[base_sha]` | string | yes | SHA of the parent commit. |
|
||||
| `position[head_sha]` | string | yes | The SHA of this commit. Same as `commit_id`. |
|
||||
| `position[start_sha]` | string | yes | SHA of the parent commit. |
|
||||
| `position[position_type]` | string | yes | Type of the position reference. Allowed values: `text` or `image`. |
|
||||
| `created_at` | string | no | Date time string, ISO 8601 formatted, such as `2016-03-11T03:45:40Z`. Requires administrator or project/group owner rights. |
|
||||
| `position` | hash | no | Position when creating a diff note. |
|
||||
| Attribute | Type | Required | Description |
|
||||
| ------------------------- | -------------- |----------------------------------| ----------- |
|
||||
| `body` | string | yes | The content of the thread. |
|
||||
| `commit_id` | string | yes | The SHA of a commit. |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding). |
|
||||
| `position[base_sha]` | string | yes (if `position*` is supplied) | SHA of the parent commit. |
|
||||
| `position[head_sha]` | string | yes (if `position*` is supplied) | The SHA of this commit. Same as `commit_id`. |
|
||||
| `position[start_sha]` | string | yes (if `position*` is supplied) | SHA of the parent commit. |
|
||||
| `position[position_type]` | string | yes (if `position*` is supplied) | Type of the position reference. Allowed values: `text` or `image`. |
|
||||
| `created_at` | string | no | Date time string, ISO 8601 formatted, such as `2016-03-11T03:45:40Z`. Requires administrator or project/group owner rights. |
|
||||
| `position` | hash | no | Position when creating a diff note. |
|
||||
|
||||
| `position[new_path]` | string | no | File path after change. |
|
||||
| `position[new_line]` | integer | no | Line number after change. |
|
||||
|
|
|
|||
|
|
@ -263,7 +263,7 @@ including a large number of false positives.
|
|||
| `CS_DISABLE_LANGUAGE_VULNERABILITY_SCAN` | `"true"` | Disable scanning for language-specific packages installed in the scanned image. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/345434) in GitLab 14.6. | All |
|
||||
| `CS_DOCKER_INSECURE` | `"false"` | Allow access to secure Docker registries using HTTPS without validating the certificates. | All |
|
||||
| `CS_DOCKERFILE_PATH` | `Dockerfile` | The path to the `Dockerfile` to use for generating remediations. By default, the scanner looks for a file named `Dockerfile` in the root directory of the project. You should configure this variable only if your `Dockerfile` is in a non-standard location, such as a subdirectory. See [Solutions for vulnerabilities](#solutions-for-vulnerabilities-auto-remediation) for more details. | All |
|
||||
| `CS_IGNORE_STATUSES` | `""` | Force the analyzer to ignore vulnerability findings with specified statuses in a comma-delimited list. For `trivy`, the following values are allowed: `unknown,not_affected,affected,fixed,under_investigation,will_not_fix,fix_deferred,end_of_life`. For `grype`, the following values are allowed: `fixed,not-fixed,unknown,wont-fix` | All |
|
||||
| `CS_IGNORE_STATUSES`<sup><b><a href="#notes-regarding-cs-ignore-statuses">1</a></b></sup> | `""` | Force the analyzer to ignore vulnerability findings with specified statuses in a comma-delimited list. For `trivy`, the following values are allowed: `unknown,not_affected,affected,fixed,under_investigation,will_not_fix,fix_deferred,end_of_life`. For `grype`, the following values are allowed: `fixed,not-fixed,unknown,wont-fix` | All |
|
||||
| `CS_IGNORE_UNFIXED` | `"false"` | Ignore vulnerabilities that are not fixed. | All |
|
||||
| `CS_IMAGE` | `$CI_APPLICATION_REPOSITORY:$CI_APPLICATION_TAG` | The Docker image to be scanned. If set, this variable overrides the `$CI_APPLICATION_REPOSITORY` and `$CI_APPLICATION_TAG` variables. | All |
|
||||
| `CS_IMAGE_SUFFIX` | `""` | Suffix added to `CS_ANALYZER_IMAGE`. If set to `-fips`, `FIPS-enabled` image is used for scan. See [FIPS-enabled images](#fips-enabled-images) for more details. [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/7630) in GitLab 14.10. | All |
|
||||
|
|
@ -275,6 +275,15 @@ including a large number of false positives.
|
|||
| `CS_TRIVY_JAVA_DB` | `"ghcr.io/aquasecurity/trivy-java-db"` | Specify an alternate location for the [trivy-java-db](https://github.com/aquasecurity/trivy-java-db) vulnerability database. | Trivy |
|
||||
| `SECURE_LOG_LEVEL` | `info` | Set the minimum logging level. Messages of this logging level or higher are output. From highest to lowest severity, the logging levels are: `fatal`, `error`, `warn`, `info`, `debug`. | All |
|
||||
|
||||
<ol>
|
||||
<li>
|
||||
<a id="notes-regarding-cs-ignore-statuses"></a>
|
||||
<p>
|
||||
Fix status information is highly dependent on accurate fix availability data from the software vendor and container image operating system package metadata. It is also subject to interpretation by individual container scanners. In cases where a container scanner misreports the availability of a fixed package for a vulnerability, using `CS_IGNORE_STATUSES` can lead to false positive or false negative filtering of findings when this setting is enabled.
|
||||
</p>
|
||||
</li>
|
||||
</ol>
|
||||
|
||||
### Supported distributions
|
||||
|
||||
Support depends on which scanner is used:
|
||||
|
|
|
|||
|
|
@ -398,3 +398,78 @@ scan_execution_policy:
|
|||
script:
|
||||
- echo "Hello World"
|
||||
```
|
||||
|
||||
### Security policy scopes
|
||||
|
||||
> The `policy_scope` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135398) in GitLab 16.7 [with a flag](../../../administration/feature_flags.md) named `security_policies_policy_scope`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
an administrator can [enable the feature flag](../../../administration/feature_flags.md)
|
||||
named `security_policies_policy_scope`.
|
||||
On GitLab.com, this feature is not available.
|
||||
|
||||
Security policy enforcement depends first on establishing a link between the group, subgroup, or
|
||||
project on which you want to enforce policies, and the security policy project that contains the
|
||||
policies. For example, if you are linking policies to a group, a group owner must create the link to
|
||||
the security policy project. Then, all policies in the security policy project are inherited by all
|
||||
projects in the group.
|
||||
|
||||
You can refine a security policy's scope to:
|
||||
|
||||
- _Include_ only projects containing a compliance framework label.
|
||||
- _Include_ or _exclude_ selected projects from enforcement.
|
||||
|
||||
#### Policy scope schema
|
||||
|
||||
| Field | Type | Required | Possible values | Description |
|
||||
|-------|------|----------|-----------------|-------------|
|
||||
| `policy_scope` | `object` | false | `compliance_frameworks`, `projects` | Scopes the policy based on compliance framework labels or projects you define. |
|
||||
|
||||
#### `policy_scope` scope type
|
||||
|
||||
| Field | Type | Possible values | Description |
|
||||
|-------|------|-----------------|-------------|
|
||||
| `compliance_frameworks` | `object` | `ids` | List of IDs of the compliance frameworks in scope of enforcement, in an `ids` array. |
|
||||
| `projects` | `object` | `including`, `excluding` | Use `excluding:` or `including:` then list the IDs of the projects you wish to include or exclude, in an `ids` array. |
|
||||
|
||||
#### Example `policy.yml` with security policy scopes
|
||||
|
||||
```yaml
|
||||
---
|
||||
scan_execution_policy:
|
||||
- name: Enforce DAST in every release pipeline
|
||||
description: This policy enforces pipeline configuration to have a job with DAST scan for release branches
|
||||
enabled: true
|
||||
rules:
|
||||
- type: pipeline
|
||||
branches:
|
||||
- release/*
|
||||
actions:
|
||||
- scan: dast
|
||||
scanner_profile: Scanner Profile A
|
||||
site_profile: Site Profile B
|
||||
policy_scope:
|
||||
compliance_frameworks:
|
||||
ids:
|
||||
- 2
|
||||
- 11
|
||||
- name: Enforce Secret Detection and Container Scanning in every default branch pipeline
|
||||
description: This policy enforces pipeline configuration to have a job with Secret Detection and Container Scanning scans for the default branch
|
||||
enabled: true
|
||||
rules:
|
||||
- type: pipeline
|
||||
branches:
|
||||
- main
|
||||
actions:
|
||||
- scan: secret_detection
|
||||
- scan: sast
|
||||
variables:
|
||||
SAST_EXCLUDED_ANALYZERS: brakeman
|
||||
policy_scope:
|
||||
projects:
|
||||
excluding:
|
||||
ids:
|
||||
- 24
|
||||
- 27
|
||||
```
|
||||
|
|
|
|||
|
|
@ -362,6 +362,78 @@ We have identified in [epic 11020](https://gitlab.com/groups/gitlab-org/-/epics/
|
|||
- Findings or errors that cause approval to be required on a scan result policy may not be evident in the Security MR Widget. By using `merge base` in [issue 428518](https://gitlab.com/gitlab-org/gitlab/-/issues/428518) some cases will be addressed. We will additionally be [displaying more granular details](https://gitlab.com/groups/gitlab-org/-/epics/11185) about what caused security policy violations.
|
||||
- Security policy violations are distinct compared to findings displayed in the MR widgets. Some violations may not be present in the MR widget. We are working to harmonize our features in [epic 11020](https://gitlab.com/groups/gitlab-org/-/epics/11020) and to display policy violations explicitly in merge requests in [epic 11185](https://gitlab.com/groups/gitlab-org/-/epics/11185).
|
||||
|
||||
## Experimental features **(EXPERIMENT)**
|
||||
|
||||
### Security policy scopes
|
||||
|
||||
> The `policy_scope` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135398) in GitLab 16.7 [with a flag](../../../administration/feature_flags.md) named `security_policies_policy_scope`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
an administrator can [enable the feature flag](../../../administration/feature_flags.md)
|
||||
named `security_policies_policy_scope`.
|
||||
On GitLab.com, this feature is not available.
|
||||
|
||||
Security policy enforcement depends first on establishing a link between the group, subgroup, or
|
||||
project on which you want to enforce policies, and the security policy project that contains the
|
||||
policies. For example, if you are linking policies to a group, a group owner must create the link to
|
||||
the security policy project. Then, all policies in the security policy project are inherited by all
|
||||
projects in the group.
|
||||
|
||||
You can refine a security policy's scope to:
|
||||
|
||||
- _Include_ only projects containing a compliance framework label.
|
||||
- _Include_ or _exclude_ selected projects from enforcement.
|
||||
|
||||
#### Policy scope schema
|
||||
|
||||
| Field | Type | Required | Possible values | Description |
|
||||
|-------|------|----------|-----------------|-------------|
|
||||
| `policy_scope` | `object` | false | `compliance_frameworks`, `projects` | Scopes the policy based on compliance framework labels or projects you define. |
|
||||
|
||||
#### `policy_scope` scope type
|
||||
|
||||
| Field | Type | Possible values | Description |
|
||||
|-------|------|-----------------|-------------|
|
||||
| `compliance_frameworks` | `object` | `ids` | List of IDs of the compliance frameworks in scope of enforcement, in an `ids` array. |
|
||||
| `projects` | `object` | `including`, `excluding` | Use `excluding:` or `including:` then list the IDs of the projects you wish to include or exclude, in an `ids` array. |
|
||||
|
||||
#### Example `policy.yml` with security policy scopes
|
||||
|
||||
```yaml
|
||||
---
|
||||
scan_result_policy:
|
||||
- name: critical vulnerability CS approvals
|
||||
description: critical severity level only for container scanning
|
||||
enabled: true
|
||||
rules:
|
||||
- type: scan_finding
|
||||
branches:
|
||||
- main
|
||||
scanners:
|
||||
- container_scanning
|
||||
vulnerabilities_allowed: 1
|
||||
severity_levels:
|
||||
- critical
|
||||
vulnerability_states:
|
||||
- newly_detected
|
||||
actions:
|
||||
- type: require_approval
|
||||
approvals_required: 1
|
||||
user_approvers:
|
||||
- adalberto.dare
|
||||
policy_scope:
|
||||
compliance_frameworks:
|
||||
ids:
|
||||
- 2
|
||||
- 11
|
||||
projects:
|
||||
including:
|
||||
ids:
|
||||
- 24
|
||||
- 27
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Merge request rules widget shows a scan result policy is invalid or duplicated **(ULTIMATE SELF)**
|
||||
|
|
|
|||
|
|
@ -40,12 +40,11 @@ shows a total of 15 months for the chart in the GitLab.org group.
|
|||
|
||||
## Enhanced issue analytics **(ULTIMATE ALL)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/233905/) in GitLab 16.4 [with a flag](../../../administration/feature_flags.md) named `issues_completed_analytics_feature_flag`. Disabled by default.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/233905/) in GitLab 16.3 [with a flag](../../../administration/feature_flags.md) named `issues_completed_analytics_feature_flag`. Disabled by default.
|
||||
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/437542) in GitLab 16.8.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can
|
||||
[enable the feature flag](../../../administration/feature_flags.md) named `issues_completed_analytics_feature_flag`. On GitLab.com, this feature is not
|
||||
available. This feature is not ready for production use.
|
||||
On self-managed GitLab, by default this feature is available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `issues_completed_analytics_feature_flag`. On GitLab.com, this feature is available.
|
||||
|
||||
Enhanced issue analytics display the additional metric "Issues closed", which represents the total number of resolved issues in your group over a selected period.
|
||||
You can use this metric to improve the overall turn-around time and value delivered to your customers.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Cleanup
|
||||
module OrphanJobArtifactFinalObjects
|
||||
class JobArtifactObject
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
attr_reader :path, :size
|
||||
|
||||
def initialize(fog_file, bucket_prefix: nil)
|
||||
@fog_file = fog_file
|
||||
@path = fog_file.key
|
||||
@size = fog_file.content_length
|
||||
@bucket_prefix = bucket_prefix
|
||||
end
|
||||
|
||||
def in_final_location?
|
||||
path.include?('/@final/')
|
||||
end
|
||||
|
||||
def orphan?
|
||||
!job_artifact_record_exists? && !pending_direct_upload?
|
||||
end
|
||||
|
||||
def delete
|
||||
fog_file.destroy
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :fog_file, :bucket_prefix
|
||||
|
||||
def job_artifact_record_exists?
|
||||
::Ci::JobArtifact.exists?(file_final_path: path_without_bucket_prefix) # rubocop:disable CodeReuse/ActiveRecord -- too simple and specific for this usecase to be its own AR method
|
||||
end
|
||||
|
||||
def pending_direct_upload?
|
||||
::ObjectStorage::PendingDirectUpload.exists?(:artifacts, path_without_bucket_prefix) # rubocop:disable CodeReuse/ActiveRecord -- `exists?` here is not the same as the AR method
|
||||
end
|
||||
|
||||
def path_without_bucket_prefix
|
||||
# `path` contains the fog file's key. It is the object path relative to the artifacts bucket, for example:
|
||||
# aa/bb/abc123/@final/12/34/def12345
|
||||
#
|
||||
# But if the instance is configured to only use a single bucket combined with bucket prefixes,
|
||||
# for example if the `bucket_prefix` is "my/artifacts", the `path` would then look like:
|
||||
# my/artifacts/aa/bb/abc123/@final/12/34/def12345
|
||||
#
|
||||
# For `orphan?` to function properly, we need to strip the bucket_prefix
|
||||
# off of the `path` because we need this to match the correct job artifact record by
|
||||
# its `file_final_path` column, or the pending direct upload redis entry, which both contains
|
||||
# the object's path without `bucket_prefix`.
|
||||
#
|
||||
# If bucket_prefix is not present, this will just return the original path.
|
||||
Pathname.new(path).relative_path_from(bucket_prefix.to_s).to_s
|
||||
end
|
||||
strong_memoize_attr :path_without_bucket_prefix
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Cleanup
|
||||
module OrphanJobArtifactFinalObjects
|
||||
module Paginators
|
||||
class Aws < BasePaginator
|
||||
def page_marker_filter_key
|
||||
:marker
|
||||
end
|
||||
|
||||
def max_results_filter_key
|
||||
:max_keys
|
||||
end
|
||||
|
||||
def last_page?(batch)
|
||||
batch.empty?
|
||||
end
|
||||
|
||||
def get_next_marker(batch)
|
||||
batch.last.key
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Cleanup
|
||||
module OrphanJobArtifactFinalObjects
|
||||
module Paginators
|
||||
class BasePaginator
|
||||
BATCH_SIZE = Rails.env.development? ? 5 : 200
|
||||
|
||||
def initialize(bucket_prefix: nil)
|
||||
@bucket_prefix = bucket_prefix
|
||||
end
|
||||
|
||||
def filters(marker)
|
||||
{
|
||||
page_marker_filter_key => marker,
|
||||
max_results_filter_key => BATCH_SIZE,
|
||||
prefix: bucket_prefix
|
||||
}
|
||||
end
|
||||
|
||||
def last_page?(batch)
|
||||
# Fog providers have different indicators of last page, so we want to delegate this
|
||||
# knowledge to the specific provider implementation.
|
||||
raise NotImplementedError, "Subclasses must define `last_page?(batch)` instance method"
|
||||
end
|
||||
|
||||
def get_next_marker(batch)
|
||||
# Fog providers have different ways to get the next marker, so we want to delegate this
|
||||
# knowledge to the specific provider implementation.
|
||||
raise NotImplementedError, "Subclasses must define `get_next_marker(batch)` instance method"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :bucket_prefix
|
||||
|
||||
def page_marker_filter_key
|
||||
raise NotImplementedError, "Subclasses must define `page_marker_key` instance method"
|
||||
end
|
||||
|
||||
def max_results_filter_key
|
||||
raise NotImplementedError, "Subclasses must define `max_results_filter_key` instance method"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Cleanup
|
||||
module OrphanJobArtifactFinalObjects
|
||||
module Paginators
|
||||
class Google < BasePaginator
|
||||
def filters(marker)
|
||||
pattern = [bucket_prefix, '*/*/*/@final/**'].compact.join('/')
|
||||
super.merge(match_glob: pattern)
|
||||
end
|
||||
|
||||
def page_marker_filter_key
|
||||
:page_token
|
||||
end
|
||||
|
||||
def max_results_filter_key
|
||||
:max_results
|
||||
end
|
||||
|
||||
def last_page?(batch)
|
||||
batch.next_page_token.nil?
|
||||
end
|
||||
|
||||
def get_next_marker(batch)
|
||||
batch.next_page_token
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,161 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Cleanup
|
||||
class OrphanJobArtifactFinalObjectsCleaner
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
UnsupportedProviderError = Class.new(StandardError)
|
||||
|
||||
PAGINATORS = {
|
||||
google: Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::Google,
|
||||
aws: Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::Aws
|
||||
}.freeze
|
||||
|
||||
LAST_PAGE_MARKER_REDIS_KEY = 'orphan-job-artifact-objects-cleanup-last-page-marker'
|
||||
|
||||
def initialize(provider: nil, dry_run: true, force_restart: false, logger: Gitlab::AppLogger)
|
||||
@paginator = determine_paginator!(provider)
|
||||
@dry_run = dry_run
|
||||
@force_restart = force_restart
|
||||
@logger = logger
|
||||
end
|
||||
|
||||
def run!
|
||||
log_info('Looking for orphan job artifact objects under the `@final` directories')
|
||||
|
||||
each_final_object do |object|
|
||||
next unless object.orphan?
|
||||
|
||||
object.delete unless dry_run
|
||||
log_info("Delete #{object.path} (#{object.size} bytes)")
|
||||
end
|
||||
|
||||
log_info("Done.")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :paginator, :dry_run, :force_restart, :logger
|
||||
|
||||
def determine_paginator!(provided_provider)
|
||||
# provider can be nil if user didn't specify it when running the clean up task.
|
||||
# In this case, we automatically determine the provider based on the object storage configuration.
|
||||
provider = provided_provider
|
||||
provider ||= configuration.connection.provider
|
||||
klass = PAGINATORS.fetch(provider.downcase.to_sym)
|
||||
klass.new(bucket_prefix: bucket_prefix)
|
||||
rescue KeyError
|
||||
msg = if provided_provider.present?
|
||||
"The provided provider is unsupported. Please select from #{PAGINATORS.keys.join(', ')}."
|
||||
else
|
||||
<<-MSG.strip_heredoc
|
||||
The provider found in the object storage configuration is unsupported.
|
||||
Please re-run the task and specify a provider from #{PAGINATORS.keys.join(', ')},
|
||||
whichever is compatible with your provider's object storage API."
|
||||
MSG
|
||||
end
|
||||
|
||||
raise UnsupportedProviderError, msg
|
||||
end
|
||||
|
||||
def each_final_object
|
||||
each_batch do |files|
|
||||
files.each_file_this_page do |fog_file|
|
||||
object = ::Gitlab::Cleanup::OrphanJobArtifactFinalObjects::JobArtifactObject.new(
|
||||
fog_file,
|
||||
bucket_prefix: bucket_prefix
|
||||
)
|
||||
|
||||
# We still need to check here if the object is in the final location because
|
||||
# if the provider does not support filtering objects by glob pattern, we will
|
||||
# then receive all job artifact objects here, even the ones not in the @final directory.
|
||||
yield object if object.in_final_location?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def each_batch
|
||||
next_marker = resume_from_last_page_marker
|
||||
|
||||
loop do
|
||||
batch = fetch_batch(next_marker)
|
||||
yield batch
|
||||
|
||||
break if paginator.last_page?(batch)
|
||||
|
||||
next_marker = paginator.get_next_marker(batch)
|
||||
save_last_page_marker(next_marker)
|
||||
end
|
||||
|
||||
clear_last_page_marker
|
||||
end
|
||||
|
||||
def fetch_batch(marker)
|
||||
page_name = marker ? "marker: #{marker}" : "first page"
|
||||
log_info("Loading page (#{page_name})")
|
||||
|
||||
# We are using files.all instead of files.each because we want to track the
|
||||
# current page token so that we can resume from it if ever the task is abruptly interrupted.
|
||||
artifacts_directory.files.all(
|
||||
paginator.filters(marker)
|
||||
)
|
||||
end
|
||||
|
||||
def resume_from_last_page_marker
|
||||
if force_restart
|
||||
log_info("Force restarted. Will not resume from last known page marker.")
|
||||
nil
|
||||
else
|
||||
get_last_page_marker
|
||||
end
|
||||
end
|
||||
|
||||
def get_last_page_marker
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
marker = redis.get(LAST_PAGE_MARKER_REDIS_KEY)
|
||||
log_info("Resuming from last page marker: #{marker}") if marker
|
||||
marker
|
||||
end
|
||||
end
|
||||
|
||||
def save_last_page_marker(marker)
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
# Set TTL to 1 day (86400 seconds)
|
||||
redis.set(LAST_PAGE_MARKER_REDIS_KEY, marker, ex: 86400)
|
||||
end
|
||||
end
|
||||
|
||||
def clear_last_page_marker
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.del(LAST_PAGE_MARKER_REDIS_KEY)
|
||||
end
|
||||
end
|
||||
|
||||
def connection
|
||||
::Fog::Storage.new(configuration['connection'].symbolize_keys)
|
||||
end
|
||||
|
||||
def configuration
|
||||
Gitlab.config.artifacts.object_store
|
||||
end
|
||||
|
||||
def bucket
|
||||
configuration.remote_directory
|
||||
end
|
||||
|
||||
def bucket_prefix
|
||||
configuration.bucket_prefix
|
||||
end
|
||||
|
||||
def artifacts_directory
|
||||
connection.directories.new(key: bucket)
|
||||
end
|
||||
strong_memoize_attr :artifacts_directory
|
||||
|
||||
def log_info(msg)
|
||||
logger.info("#{'[DRY RUN] ' if dry_run}#{msg}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -12,16 +12,16 @@ module Gitlab
|
|||
short_name: _('SAST'),
|
||||
description: _('Analyze your source code for known vulnerabilities.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/sast/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/sast/index',
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/sast/index',
|
||||
anchor: 'configuration'),
|
||||
type: 'sast'
|
||||
},
|
||||
sast_iac: {
|
||||
name: _('Infrastructure as Code (IaC) Scanning'),
|
||||
short_name: _('ciReport|SAST IaC'),
|
||||
short_name: s_('ciReport|SAST IaC'),
|
||||
description: _('Analyze your infrastructure as code configuration files for known vulnerabilities.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/iac_scanning/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/iac_scanning/index',
|
||||
anchor: 'configuration'),
|
||||
type: 'sast_iac'
|
||||
|
|
@ -36,16 +36,16 @@ module Gitlab
|
|||
secondary: {
|
||||
type: 'dast_profiles',
|
||||
name: _('DAST profiles'),
|
||||
description: _('SecurityConfiguration|Manage profiles for use by DAST scans.'),
|
||||
configuration_text: _('SecurityConfiguration|Manage profiles')
|
||||
description: s_('SecurityConfiguration|Manage profiles for use by DAST scans.'),
|
||||
configuration_text: s_('SecurityConfiguration|Manage profiles')
|
||||
},
|
||||
name: _('Dynamic Application Security Testing (DAST)'),
|
||||
short_name: _('ciReport|DAST'),
|
||||
description: _('ciReport|Analyze a deployed version of your web application for known vulnerabilities by ' \
|
||||
'examining it from the outside in. DAST works by simulating external attacks ' \
|
||||
'on your application while it is running.'),
|
||||
short_name: s_('ciReport|DAST'),
|
||||
description: s_('ciReport|Analyze a deployed version of your web application for known ' \
|
||||
'vulnerabilities by examining it from the outside in. DAST works ' \
|
||||
'by simulating external attacks on your application while it is running.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/dast/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/dast/index',
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path('user/application_security/dast/index',
|
||||
anchor: 'enable-automatic-dast-run'),
|
||||
type: 'dast',
|
||||
anchor: 'dast'
|
||||
|
|
@ -55,7 +55,7 @@ module Gitlab
|
|||
description: _('Analyze your dependencies for known vulnerabilities.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/dependency_scanning/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/dependency_scanning/index', anchor: 'configuration'),
|
||||
type: 'dependency_scanning',
|
||||
anchor: 'dependency-scanning'
|
||||
|
|
@ -65,7 +65,7 @@ module Gitlab
|
|||
description: _('Check your Docker images for known vulnerabilities.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/container_scanning/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/container_scanning/index', anchor: 'configuration'),
|
||||
type: 'container_scanning'
|
||||
},
|
||||
|
|
@ -74,7 +74,7 @@ module Gitlab
|
|||
description: _('Analyze your source code and git history for secrets.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/secret_detection/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/secret_detection/index', anchor: 'configuration'),
|
||||
type: 'secret_detection'
|
||||
},
|
||||
|
|
@ -90,40 +90,40 @@ module Gitlab
|
|||
description: _('Find bugs in your code with coverage-guided fuzzing.'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/coverage_fuzzing/index'),
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/coverage_fuzzing/index', anchor: 'enable-coverage-guided-fuzz-testing'),
|
||||
type: 'coverage_fuzzing',
|
||||
secondary: {
|
||||
type: 'corpus_management',
|
||||
name: _('Corpus Management'),
|
||||
description: _('SecurityConfiguration|Manage corpus files used as seed ' \
|
||||
'inputs with coverage-guided fuzzing.'),
|
||||
configuration_text: _('SecurityConfiguration|Manage corpus')
|
||||
description: s_('SecurityConfiguration|Manage corpus files used as seed ' \
|
||||
'inputs with coverage-guided fuzzing.'),
|
||||
configuration_text: s_('SecurityConfiguration|Manage corpus')
|
||||
}
|
||||
},
|
||||
breach_and_attack_simulation: {
|
||||
anchor: 'bas',
|
||||
badge: {
|
||||
always_display: true,
|
||||
text: _('SecurityConfiguration|Incubating feature'),
|
||||
tooltip_text: _('SecurityConfiguration|Breach and Attack Simulation is an incubating ' \
|
||||
'feature extending existing security testing by simulating adversary activity.'),
|
||||
text: s_('SecurityConfiguration|Incubating feature'),
|
||||
tooltip_text: s_('SecurityConfiguration|Breach and Attack Simulation is an incubating ' \
|
||||
'feature extending existing security testing by simulating adversary activity.'),
|
||||
variant: 'info'
|
||||
},
|
||||
description: _('SecurityConfiguration|Simulate breach and attack scenarios against your ' \
|
||||
'running application by attempting to detect and exploit known vulnerabilities.'),
|
||||
name: _('SecurityConfiguration|Breach and Attack Simulation (BAS)'),
|
||||
description: s_('SecurityConfiguration|Simulate breach and attack scenarios against your ' \
|
||||
'running application by attempting to detect and exploit known vulnerabilities.'),
|
||||
name: s_('SecurityConfiguration|Breach and Attack Simulation (BAS)'),
|
||||
help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/breach_and_attack_simulation/index'),
|
||||
secondary: {
|
||||
config_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
configuration_help_path: Gitlab::Routing.url_helpers.help_page_path(
|
||||
'user/application_security/breach_and_attack_simulation/index',
|
||||
anchor: 'extend-dynamic-application-security-testing-dast'),
|
||||
description: _('SecurityConfiguration|Enable incubating Breach and Attack Simulation focused ' \
|
||||
'features such as callback attacks in your DAST scans.'),
|
||||
name: _('SecurityConfiguration|Out-of-Band Application Security Testing (OAST)')
|
||||
description: s_('SecurityConfiguration|Enable incubating Breach and Attack Simulation focused ' \
|
||||
'features such as callback attacks in your DAST scans.'),
|
||||
name: s_('SecurityConfiguration|Out-of-Band Application Security Testing (OAST)')
|
||||
},
|
||||
short_name: _('SecurityConfiguration|BAS'),
|
||||
short_name: s_('SecurityConfiguration|BAS'),
|
||||
type: 'breach_and_attack_simulation'
|
||||
}
|
||||
}.freeze
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ namespace :gitlab do
|
|||
end
|
||||
end
|
||||
|
||||
desc 'GitLab | Cleanup | Clean orphan job artifact files'
|
||||
desc 'GitLab | Cleanup | Clean orphan job artifact files in local storage'
|
||||
task orphan_job_artifact_files: :gitlab_environment do
|
||||
warn_user_is_not_gitlab
|
||||
|
||||
|
|
@ -63,6 +63,31 @@ namespace :gitlab do
|
|||
end
|
||||
end
|
||||
|
||||
desc 'GitLab | Cleanup | Clean orphan job artifact files stored in the @final directory in object storage'
|
||||
task :orphan_job_artifact_final_objects, [:provider] => :gitlab_environment do |_, args|
|
||||
warn_user_is_not_gitlab
|
||||
|
||||
force_restart = ENV['FORCE_RESTART'].present?
|
||||
|
||||
begin
|
||||
cleaner = Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner.new(
|
||||
provider: args.provider,
|
||||
force_restart: force_restart,
|
||||
dry_run: dry_run?,
|
||||
logger: logger
|
||||
)
|
||||
|
||||
cleaner.run!
|
||||
|
||||
if dry_run?
|
||||
logger.info "To clean up all orphan files that were found, run this command with DRY_RUN=false".color(:yellow)
|
||||
end
|
||||
rescue Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner::UnsupportedProviderError => e
|
||||
abort %(#{e.message}
|
||||
Usage: rake "gitlab:cleanup:orphan_job_artifact_final_objects[provider]")
|
||||
end
|
||||
end
|
||||
|
||||
desc 'GitLab | Cleanup | Clean orphan LFS file references'
|
||||
task orphan_lfs_file_references: :gitlab_environment do
|
||||
warn_user_is_not_gitlab
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { GlIcon } from '@gitlab/ui';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
|
||||
import { securityFeatures } from '~/security_configuration/constants';
|
||||
import { securityFeatures } from 'jest/security_configuration/mock_data';
|
||||
import FeatureCard from '~/security_configuration/components/feature_card.vue';
|
||||
import FeatureCardBadge from '~/security_configuration/components/feature_card_badge.vue';
|
||||
import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
|
||||
|
|
|
|||
|
|
@ -1,11 +1,17 @@
|
|||
import {
|
||||
SAST_NAME,
|
||||
SAST_SHORT_NAME,
|
||||
SAST_DESCRIPTION,
|
||||
SAST_HELP_PATH,
|
||||
SAST_CONFIG_HELP_PATH,
|
||||
SAST_IAC_NAME,
|
||||
SAST_IAC_SHORT_NAME,
|
||||
} from '~/security_configuration/constants';
|
||||
import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants';
|
||||
import { __, s__ } from '~/locale';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
|
||||
import {
|
||||
REPORT_TYPE_SAST,
|
||||
REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
|
||||
REPORT_TYPE_SAST_IAC,
|
||||
} from '~/vue_shared/security_reports/constants';
|
||||
|
||||
export const testProjectPath = 'foo/bar';
|
||||
export const testProviderIds = [101, 102, 103];
|
||||
|
|
@ -16,6 +22,71 @@ export const testTrainingUrls = [
|
|||
'https://www.vendornamethree.com/url',
|
||||
];
|
||||
|
||||
const SAST_DESCRIPTION = __('Analyze your source code for known vulnerabilities.');
|
||||
const SAST_HELP_PATH = helpPagePath('user/application_security/sast/index');
|
||||
const SAST_CONFIG_HELP_PATH = helpPagePath('user/application_security/sast/index', {
|
||||
anchor: 'configuration',
|
||||
});
|
||||
|
||||
const BAS_BADGE_TEXT = s__('SecurityConfiguration|Incubating feature');
|
||||
const BAS_BADGE_TOOLTIP = s__(
|
||||
'SecurityConfiguration|Breach and Attack Simulation is an incubating feature extending existing security testing by simulating adversary activity.',
|
||||
);
|
||||
const BAS_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Simulate breach and attack scenarios against your running application by attempting to detect and exploit known vulnerabilities.',
|
||||
);
|
||||
const BAS_HELP_PATH = helpPagePath('user/application_security/breach_and_attack_simulation/index');
|
||||
const BAS_NAME = s__('SecurityConfiguration|Breach and Attack Simulation (BAS)');
|
||||
const BAS_SHORT_NAME = s__('SecurityConfiguration|BAS');
|
||||
const BAS_DAST_FEATURE_FLAG_DESCRIPTION = s__(
|
||||
'SecurityConfiguration|Enable incubating Breach and Attack Simulation focused features such as callback attacks in your DAST scans.',
|
||||
);
|
||||
const BAS_DAST_FEATURE_FLAG_HELP_PATH = helpPagePath(
|
||||
'user/application_security/breach_and_attack_simulation/index',
|
||||
{ anchor: 'extend-dynamic-application-security-testing-dast' },
|
||||
);
|
||||
const BAS_DAST_FEATURE_FLAG_NAME = s__(
|
||||
'SecurityConfiguration|Out-of-Band Application Security Testing (OAST)',
|
||||
);
|
||||
|
||||
const SAST_IAC_DESCRIPTION = __(
|
||||
'Analyze your infrastructure as code configuration files for known vulnerabilities.',
|
||||
);
|
||||
const SAST_IAC_HELP_PATH = helpPagePath('user/application_security/iac_scanning/index');
|
||||
const SAST_IAC_CONFIG_HELP_PATH = helpPagePath('user/application_security/iac_scanning/index', {
|
||||
anchor: 'configuration',
|
||||
});
|
||||
|
||||
export const securityFeatures = [
|
||||
{
|
||||
anchor: 'bas',
|
||||
badge: {
|
||||
alwaysDisplay: true,
|
||||
text: BAS_BADGE_TEXT,
|
||||
tooltipText: BAS_BADGE_TOOLTIP,
|
||||
variant: 'info',
|
||||
},
|
||||
description: BAS_DESCRIPTION,
|
||||
name: BAS_NAME,
|
||||
helpPath: BAS_HELP_PATH,
|
||||
secondary: {
|
||||
configurationHelpPath: BAS_DAST_FEATURE_FLAG_HELP_PATH,
|
||||
description: BAS_DAST_FEATURE_FLAG_DESCRIPTION,
|
||||
name: BAS_DAST_FEATURE_FLAG_NAME,
|
||||
},
|
||||
shortName: BAS_SHORT_NAME,
|
||||
type: REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
|
||||
},
|
||||
{
|
||||
name: SAST_IAC_NAME,
|
||||
shortName: SAST_IAC_SHORT_NAME,
|
||||
description: SAST_IAC_DESCRIPTION,
|
||||
helpPath: SAST_IAC_HELP_PATH,
|
||||
configurationHelpPath: SAST_IAC_CONFIG_HELP_PATH,
|
||||
type: REPORT_TYPE_SAST_IAC,
|
||||
},
|
||||
];
|
||||
|
||||
const createSecurityTrainingProviders = ({ providerOverrides = {} }) => [
|
||||
{
|
||||
id: testProviderIds[0],
|
||||
|
|
|
|||
|
|
@ -6,6 +6,46 @@ describe('augmentFeatures', () => {
|
|||
{
|
||||
name: 'SAST',
|
||||
type: 'SAST',
|
||||
security_features: {
|
||||
type: 'SAST',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const expectedMockSecurityFeatures = [
|
||||
{
|
||||
name: 'SAST',
|
||||
type: 'SAST',
|
||||
securityFeatures: {
|
||||
type: 'SAST',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const expectedInvalidMockSecurityFeatures = [
|
||||
{
|
||||
foo: 'bar',
|
||||
name: 'SAST',
|
||||
type: 'SAST',
|
||||
securityFeatures: {
|
||||
type: 'SAST',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const expectedSecondarymockSecurityFeatures = [
|
||||
{
|
||||
name: 'DAST',
|
||||
type: 'DAST',
|
||||
helpPath: '/help/user/application_security/dast/index',
|
||||
secondary: {
|
||||
type: 'DAST PROFILES',
|
||||
name: 'DAST PROFILES',
|
||||
},
|
||||
securityFeatures: {
|
||||
type: 'DAST',
|
||||
helpPath: '/help/user/application_security/dast/index',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -17,6 +57,10 @@ describe('augmentFeatures', () => {
|
|||
type: 'DAST PROFILES',
|
||||
name: 'DAST PROFILES',
|
||||
},
|
||||
security_features: {
|
||||
type: 'DAST',
|
||||
help_path: '/help/user/application_security/dast/index',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -31,6 +75,9 @@ describe('augmentFeatures', () => {
|
|||
name: 'SAST',
|
||||
type: 'SAST',
|
||||
customField: 'customvalue',
|
||||
securityFeatures: {
|
||||
type: 'SAST',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -38,6 +85,9 @@ describe('augmentFeatures', () => {
|
|||
{
|
||||
name: 'DAST',
|
||||
type: 'dast',
|
||||
security_features: {
|
||||
type: 'DAST',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -48,6 +98,9 @@ describe('augmentFeatures', () => {
|
|||
customField: 'customvalue',
|
||||
onDemandAvailable: false,
|
||||
badge: {},
|
||||
security_features: {
|
||||
type: 'dast',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -58,6 +111,9 @@ describe('augmentFeatures', () => {
|
|||
customField: 'customvalue',
|
||||
onDemandAvailable: true,
|
||||
badge: {},
|
||||
security_features: {
|
||||
type: 'dast',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
@ -70,11 +126,15 @@ describe('augmentFeatures', () => {
|
|||
];
|
||||
|
||||
const expectedOutputDefault = {
|
||||
augmentedSecurityFeatures: mockSecurityFeatures,
|
||||
augmentedSecurityFeatures: expectedMockSecurityFeatures,
|
||||
};
|
||||
|
||||
const expectedInvalidOutputDefault = {
|
||||
augmentedSecurityFeatures: expectedInvalidMockSecurityFeatures,
|
||||
};
|
||||
|
||||
const expectedOutputSecondary = {
|
||||
augmentedSecurityFeatures: mockSecurityFeatures,
|
||||
augmentedSecurityFeatures: expectedSecondarymockSecurityFeatures,
|
||||
};
|
||||
|
||||
const expectedOutputCustomFeature = {
|
||||
|
|
@ -88,6 +148,9 @@ describe('augmentFeatures', () => {
|
|||
type: 'dast',
|
||||
customField: 'customvalue',
|
||||
onDemandAvailable: false,
|
||||
securityFeatures: {
|
||||
type: 'dast',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -100,52 +163,62 @@ describe('augmentFeatures', () => {
|
|||
customField: 'customvalue',
|
||||
onDemandAvailable: true,
|
||||
badge: {},
|
||||
securityFeatures: {
|
||||
type: 'dast',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('returns an object with augmentedSecurityFeatures when', () => {
|
||||
it('given an empty array', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures, [])).toEqual(expectedOutputDefault);
|
||||
it('given an properly formatted array', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures)).toEqual(expectedOutputDefault);
|
||||
});
|
||||
|
||||
it('given an invalid populated array', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures, mockInvalidCustomFeature)).toEqual(
|
||||
expectedOutputDefault,
|
||||
);
|
||||
expect(
|
||||
augmentFeatures([{ ...mockSecurityFeatures[0], ...mockInvalidCustomFeature[0] }]),
|
||||
).toEqual(expectedInvalidOutputDefault);
|
||||
});
|
||||
|
||||
it('features have secondary key', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures, mockFeaturesWithSecondary, [])).toEqual(
|
||||
expectedOutputSecondary,
|
||||
);
|
||||
expect(
|
||||
augmentFeatures([{ ...mockSecurityFeatures[0], ...mockFeaturesWithSecondary[0] }]),
|
||||
).toEqual(expectedOutputSecondary);
|
||||
});
|
||||
|
||||
it('given a valid populated array', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures, mockValidCustomFeature)).toEqual(
|
||||
expectedOutputCustomFeature,
|
||||
);
|
||||
expect(
|
||||
augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeature[0] }]),
|
||||
).toEqual(expectedOutputCustomFeature);
|
||||
});
|
||||
});
|
||||
|
||||
describe('returns an object with camelcased keys', () => {
|
||||
it('given a customfeature in snakecase', () => {
|
||||
expect(augmentFeatures(mockSecurityFeatures, mockValidCustomFeatureSnakeCase)).toEqual(
|
||||
expectedOutputCustomFeature,
|
||||
);
|
||||
expect(
|
||||
augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeatureSnakeCase[0] }]),
|
||||
).toEqual(expectedOutputCustomFeature);
|
||||
});
|
||||
});
|
||||
|
||||
describe('follows onDemandAvailable', () => {
|
||||
it('deletes badge when false', () => {
|
||||
expect(
|
||||
augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableFalse),
|
||||
augmentFeatures([
|
||||
{
|
||||
...mockSecurityFeaturesDast[0],
|
||||
...mockValidCustomFeatureWithOnDemandAvailableFalse[0],
|
||||
},
|
||||
]),
|
||||
).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableFalse);
|
||||
});
|
||||
|
||||
it('keeps badge when true', () => {
|
||||
expect(
|
||||
augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableTrue),
|
||||
augmentFeatures([
|
||||
{ ...mockSecurityFeaturesDast[0], ...mockValidCustomFeatureWithOnDemandAvailableTrue[0] },
|
||||
]),
|
||||
).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableTrue);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjects::JobArtifactObject, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
|
||||
let(:job_artifact_object) do
|
||||
described_class.new(
|
||||
fog_file,
|
||||
bucket_prefix: bucket_prefix
|
||||
)
|
||||
end
|
||||
|
||||
# rubocop:disable RSpec/VerifiedDoubles -- For some reason it can't see Fog::AWS::Storage::File
|
||||
let(:fog_file) { double(key: fog_file_key, content_length: 145) }
|
||||
# rubocop:enable RSpec/VerifiedDoubles
|
||||
|
||||
let(:fog_file_key) { 'aaa/bbb/123' }
|
||||
let(:bucket_prefix) { nil }
|
||||
|
||||
describe '#path' do
|
||||
subject { job_artifact_object.path }
|
||||
|
||||
it { is_expected.to eq(fog_file.key) }
|
||||
end
|
||||
|
||||
describe '#size' do
|
||||
subject { job_artifact_object.size }
|
||||
|
||||
it { is_expected.to eq(fog_file.content_length) }
|
||||
end
|
||||
|
||||
describe '#in_final_location?' do
|
||||
subject { job_artifact_object.in_final_location? }
|
||||
|
||||
context 'when path has @final in it' do
|
||||
let(:fog_file_key) { 'aaa/bbb/@final/123/ccc' }
|
||||
|
||||
it { is_expected.to eq(true) }
|
||||
end
|
||||
|
||||
context 'when path has no @final in it' do
|
||||
let(:fog_file_key) { 'aaa/bbb/ccc' }
|
||||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#orphan?' do
|
||||
shared_examples_for 'identifying orphan object' do
|
||||
let(:artifact_final_path) { 'aaa/@final/bbb' }
|
||||
let(:fog_file_key) { File.join([bucket_prefix, artifact_final_path].compact) }
|
||||
|
||||
subject { job_artifact_object.orphan? }
|
||||
|
||||
context 'when there is job artifact record with a file_final_path that matches the object path' do
|
||||
before do
|
||||
# We don't store the bucket_prefix if ever in the file_final_path
|
||||
create(:ci_job_artifact, file_final_path: artifact_final_path)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
|
||||
context 'when there are no job artifact records with a file_final_path that matches the object path' do
|
||||
context 'and there is a pending direct upload entry that matches the object path' do
|
||||
before do
|
||||
# We don't store the bucket_prefix if ever in the pending direct upload entry
|
||||
ObjectStorage::PendingDirectUpload.prepare(:artifacts, artifact_final_path)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
|
||||
context 'and there are no pending direct upload entries that match the object path' do
|
||||
it { is_expected.to eq(true) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when bucket prefix is not present' do
|
||||
it_behaves_like 'identifying orphan object'
|
||||
end
|
||||
|
||||
context 'when bucket prefix is present' do
|
||||
let(:bucket_prefix) { 'my/prefix' }
|
||||
|
||||
it_behaves_like 'identifying orphan object'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,263 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner, :orphan_final_artifacts_cleanup, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
|
||||
describe '#run!' do
|
||||
let(:cleaner) do
|
||||
described_class.new(
|
||||
provider: specified_provider,
|
||||
force_restart: force_restart,
|
||||
dry_run: dry_run
|
||||
)
|
||||
end
|
||||
|
||||
let(:dry_run) { true }
|
||||
let(:force_restart) { false }
|
||||
let(:remote_directory) { 'artifacts' }
|
||||
let(:bucket_prefix) { nil }
|
||||
|
||||
subject(:run) { cleaner.run! }
|
||||
|
||||
before do
|
||||
stub_const('Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::BasePaginator::BATCH_SIZE', 2)
|
||||
|
||||
Rake.application.rake_require 'tasks/gitlab/cleanup'
|
||||
|
||||
Gitlab.config.artifacts.object_store.tap do |config|
|
||||
config[:remote_directory] = remote_directory
|
||||
config[:bucket_prefix] = bucket_prefix
|
||||
end
|
||||
|
||||
allow(Gitlab::AppLogger).to receive(:info)
|
||||
end
|
||||
|
||||
shared_examples_for 'cleaning up orphan final job artifact objects' do
|
||||
let(:fog_connection) do
|
||||
stub_object_storage_uploader(
|
||||
config: Gitlab.config.artifacts.object_store,
|
||||
uploader: JobArtifactUploader,
|
||||
direct_upload: true
|
||||
)
|
||||
end
|
||||
|
||||
let!(:orphan_final_object_1) { create_fog_file }
|
||||
let!(:orphan_final_object_2) { create_fog_file }
|
||||
let!(:orphan_non_final_object) { create_fog_file(final: false) }
|
||||
|
||||
let!(:non_orphan_final_object_1) do
|
||||
create_fog_file.tap do |file|
|
||||
create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
|
||||
end
|
||||
end
|
||||
|
||||
let!(:non_orphan_final_object_2) do
|
||||
create_fog_file.tap do |file|
|
||||
create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
|
||||
end
|
||||
end
|
||||
|
||||
shared_context 'when resuming from marker' do
|
||||
let(:dummy_error) { Class.new(StandardError) }
|
||||
|
||||
before do
|
||||
fetch_counter = 0
|
||||
|
||||
allow(cleaner).to receive(:fetch_batch).and_wrap_original do |m, *args|
|
||||
raise dummy_error if fetch_counter == 1
|
||||
|
||||
fetch_counter += 1
|
||||
m.call(*args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'handling dry run mode' do
|
||||
context 'when on dry run (which is default)' do
|
||||
it 'logs orphan objects to delete but does not delete them' do
|
||||
run
|
||||
|
||||
expect_start_log_message
|
||||
expect_first_page_loading_log_message
|
||||
expect_page_loading_via_marker_log_message(times: 3)
|
||||
expect_delete_log_message(orphan_final_object_1)
|
||||
expect_delete_log_message(orphan_final_object_2)
|
||||
expect_no_delete_log_message(orphan_non_final_object)
|
||||
expect_no_delete_log_message(non_orphan_final_object_1)
|
||||
expect_no_delete_log_message(non_orphan_final_object_2)
|
||||
expect_done_log_message
|
||||
|
||||
expect_object_to_exist(orphan_final_object_1)
|
||||
expect_object_to_exist(orphan_final_object_2)
|
||||
expect_object_to_exist(orphan_non_final_object)
|
||||
expect_object_to_exist(non_orphan_final_object_1)
|
||||
expect_object_to_exist(non_orphan_final_object_2)
|
||||
end
|
||||
|
||||
context 'when interrupted in the middle of processing pages' do
|
||||
include_context 'when resuming from marker'
|
||||
|
||||
it 'resumes from last known page marker on the next run' do
|
||||
expect { cleaner.run! }.to raise_error(dummy_error)
|
||||
saved_marker = fetch_saved_marker
|
||||
|
||||
new_cleaner = described_class.new(
|
||||
provider: specified_provider,
|
||||
force_restart: false,
|
||||
dry_run: true
|
||||
)
|
||||
|
||||
new_cleaner.run!
|
||||
|
||||
expect_resuming_from_marker_log_message(saved_marker)
|
||||
|
||||
# Given we can't guarantee the order of the objects because
|
||||
# of random path generation, we can't tell which page they will
|
||||
# fall in, so we will just ensure that they
|
||||
# were all logged in the end.
|
||||
expect_delete_log_message(orphan_final_object_1)
|
||||
expect_delete_log_message(orphan_final_object_2)
|
||||
|
||||
# Ensure that they were not deleted because this is just dry run.
|
||||
expect_object_to_exist(orphan_final_object_1)
|
||||
expect_object_to_exist(orphan_final_object_2)
|
||||
end
|
||||
|
||||
context 'and force_restart is true' do
|
||||
it 'starts from the first page on the next run' do
|
||||
expect { cleaner.run! }.to raise_error(dummy_error)
|
||||
|
||||
new_cleaner = described_class.new(
|
||||
provider: specified_provider,
|
||||
force_restart: true,
|
||||
dry_run: true
|
||||
)
|
||||
|
||||
new_cleaner.run!
|
||||
|
||||
expect_no_resuming_from_marker_log_message
|
||||
|
||||
# Ensure that they were not deleted because this is just dry run.
|
||||
expect_object_to_exist(orphan_final_object_1)
|
||||
expect_object_to_exist(orphan_final_object_2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when dry run is set to false' do
|
||||
let(:dry_run) { false }
|
||||
|
||||
it 'logs orphan objects to delete and deletes them' do
|
||||
expect_object_to_exist(orphan_final_object_1)
|
||||
expect_object_to_exist(orphan_final_object_2)
|
||||
|
||||
run
|
||||
|
||||
expect_start_log_message
|
||||
expect_first_page_loading_log_message
|
||||
expect_page_loading_via_marker_log_message(times: 3)
|
||||
expect_delete_log_message(orphan_final_object_1)
|
||||
expect_delete_log_message(orphan_final_object_2)
|
||||
expect_no_delete_log_message(orphan_non_final_object)
|
||||
expect_no_delete_log_message(non_orphan_final_object_1)
|
||||
expect_no_delete_log_message(non_orphan_final_object_2)
|
||||
expect_done_log_message
|
||||
|
||||
expect_object_to_be_deleted(orphan_final_object_1)
|
||||
expect_object_to_be_deleted(orphan_final_object_2)
|
||||
expect_object_to_exist(orphan_non_final_object)
|
||||
expect_object_to_exist(non_orphan_final_object_1)
|
||||
expect_object_to_exist(non_orphan_final_object_2)
|
||||
end
|
||||
|
||||
context 'when interrupted in the middle of processing pages' do
|
||||
include_context 'when resuming from marker'
|
||||
|
||||
it 'resumes from last known page marker on the next run' do
|
||||
expect { cleaner.run! }.to raise_error(dummy_error)
|
||||
saved_marker = fetch_saved_marker
|
||||
|
||||
new_cleaner = described_class.new(
|
||||
provider: specified_provider,
|
||||
force_restart: false,
|
||||
dry_run: false
|
||||
)
|
||||
|
||||
new_cleaner.run!
|
||||
|
||||
expect_resuming_from_marker_log_message(saved_marker)
|
||||
|
||||
# Given we can't guarantee the order of the objects because
|
||||
# of random path generation, we can't tell which page they will
|
||||
# fall in, so we will just ensure that they
|
||||
# were all logged in the end.
|
||||
expect_delete_log_message(orphan_final_object_1)
|
||||
expect_delete_log_message(orphan_final_object_2)
|
||||
|
||||
# Ensure that they were deleted because this is not dry run.
|
||||
expect_object_to_be_deleted(orphan_final_object_1)
|
||||
expect_object_to_be_deleted(orphan_final_object_2)
|
||||
end
|
||||
|
||||
context 'and force_restart is true' do
|
||||
it 'starts from the first page on the next run' do
|
||||
expect { cleaner.run! }.to raise_error(dummy_error)
|
||||
|
||||
new_cleaner = described_class.new(
|
||||
provider: specified_provider,
|
||||
force_restart: true,
|
||||
dry_run: false
|
||||
)
|
||||
|
||||
new_cleaner.run!
|
||||
|
||||
expect_no_resuming_from_marker_log_message
|
||||
|
||||
# Ensure that they were deleted because this is not a dry run.
|
||||
expect_object_to_be_deleted(orphan_final_object_1)
|
||||
expect_object_to_be_deleted(orphan_final_object_2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when not configured to use bucket_prefix' do
|
||||
let(:remote_directory) { 'artifacts' }
|
||||
let(:bucket_prefix) { nil }
|
||||
|
||||
it_behaves_like 'handling dry run mode'
|
||||
end
|
||||
|
||||
context 'when configured to use bucket_prefix' do
|
||||
let(:remote_directory) { 'main-bucket' }
|
||||
let(:bucket_prefix) { 'my/artifacts' }
|
||||
|
||||
it_behaves_like 'handling dry run mode'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when defaulting to provider in the object store configuration' do
|
||||
let(:specified_provider) { nil }
|
||||
|
||||
it_behaves_like 'cleaning up orphan final job artifact objects'
|
||||
end
|
||||
|
||||
context 'when provider is specified' do
|
||||
context 'and provider is supported' do
|
||||
let(:specified_provider) { 'aws' }
|
||||
|
||||
it_behaves_like 'cleaning up orphan final job artifact objects'
|
||||
end
|
||||
|
||||
context 'and provider is not supported' do
|
||||
let(:specified_provider) { 'somethingelse' }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { run }.to raise_error(described_class::UnsupportedProviderError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -97,13 +97,13 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
|
|||
short_name: "SAST",
|
||||
description: "Analyze your source code for known vulnerabilities.",
|
||||
help_path: "/help/user/application_security/sast/index",
|
||||
config_help_path: "/help/user/application_security/sast/index#configuration",
|
||||
configuration_help_path: "/help/user/application_security/sast/index#configuration",
|
||||
type: "sast" }
|
||||
:sast_iac | { name: "Infrastructure as Code (IaC) Scanning",
|
||||
short_name: "ciReport|SAST IaC",
|
||||
short_name: "SAST IaC",
|
||||
description: "Analyze your infrastructure as code configuration files for known vulnerabilities.",
|
||||
help_path: "/help/user/application_security/iac_scanning/index",
|
||||
config_help_path: "/help/user/application_security/iac_scanning/index#configuration",
|
||||
configuration_help_path: "/help/user/application_security/iac_scanning/index#configuration",
|
||||
type: "sast_iac" }
|
||||
:dast | {
|
||||
badge: { text: "Available on demand",
|
||||
|
|
@ -113,34 +113,34 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
|
|||
secondary: {
|
||||
type: "dast_profiles",
|
||||
name: "DAST profiles",
|
||||
description: "SecurityConfiguration|Manage profiles for use by DAST scans.",
|
||||
configuration_text: "SecurityConfiguration|Manage profiles"
|
||||
description: "Manage profiles for use by DAST scans.",
|
||||
configuration_text: "Manage profiles"
|
||||
},
|
||||
name: "Dynamic Application Security Testing (DAST)",
|
||||
short_name: "ciReport|DAST",
|
||||
description: "ciReport|Analyze a deployed version of your web application for known " \
|
||||
short_name: "DAST",
|
||||
description: "Analyze a deployed version of your web application for known " \
|
||||
"vulnerabilities by examining it from the outside in. DAST works by simulating " \
|
||||
"external attacks on your application while it is running.",
|
||||
help_path: "/help/user/application_security/dast/index",
|
||||
config_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
|
||||
configuration_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
|
||||
type: "dast",
|
||||
anchor: "dast"
|
||||
}
|
||||
:dependency_scanning | { name: "Dependency Scanning",
|
||||
description: "Analyze your dependencies for known vulnerabilities.",
|
||||
help_path: "/help/user/application_security/dependency_scanning/index",
|
||||
config_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
|
||||
configuration_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
|
||||
type: "dependency_scanning",
|
||||
anchor: "dependency-scanning" }
|
||||
:container_scanning | { name: "Container Scanning",
|
||||
description: "Check your Docker images for known vulnerabilities.",
|
||||
help_path: "/help/user/application_security/container_scanning/index",
|
||||
config_help_path: "/help/user/application_security/container_scanning/index#configuration",
|
||||
configuration_help_path: "/help/user/application_security/container_scanning/index#configuration",
|
||||
type: "container_scanning" }
|
||||
:secret_detection | { name: "Secret Detection",
|
||||
description: "Analyze your source code and git history for secrets.",
|
||||
help_path: "/help/user/application_security/secret_detection/index",
|
||||
config_help_path: "/help/user/application_security/secret_detection/index#configuration",
|
||||
configuration_help_path: "/help/user/application_security/secret_detection/index#configuration",
|
||||
type: "secret_detection" }
|
||||
:api_fuzzing | { name: "API Fuzzing",
|
||||
description: "Find bugs in your code with API fuzzing.",
|
||||
|
|
@ -149,32 +149,33 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
|
|||
:coverage_fuzzing | { name: "Coverage Fuzzing",
|
||||
description: "Find bugs in your code with coverage-guided fuzzing.",
|
||||
help_path: "/help/user/application_security/coverage_fuzzing/index",
|
||||
config_help_path: "/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
|
||||
configuration_help_path: \
|
||||
"/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
|
||||
type: "coverage_fuzzing",
|
||||
secondary: { type: "corpus_management",
|
||||
name: "Corpus Management",
|
||||
description: "SecurityConfiguration|Manage corpus files used as " \
|
||||
description: "Manage corpus files used as " \
|
||||
"seed inputs with coverage-guided fuzzing.",
|
||||
configuration_text: "SecurityConfiguration|Manage corpus" } }
|
||||
configuration_text: "Manage corpus" } }
|
||||
:breach_and_attack_simulation | { anchor: "bas",
|
||||
badge: { always_display: true,
|
||||
text: "SecurityConfiguration|Incubating feature",
|
||||
tooltip_text: "SecurityConfiguration|Breach and Attack Simulation is an incubating feature " \
|
||||
text: "Incubating feature",
|
||||
tooltip_text: "Breach and Attack Simulation is an incubating feature " \
|
||||
"extending existing security " \
|
||||
"testing by simulating adversary activity.",
|
||||
variant: "info" },
|
||||
description: "SecurityConfiguration|Simulate breach and attack scenarios against your running " \
|
||||
description: "Simulate breach and attack scenarios against your running " \
|
||||
"application by attempting to detect " \
|
||||
"and exploit known vulnerabilities.",
|
||||
name: "SecurityConfiguration|Breach and Attack Simulation (BAS)",
|
||||
name: "Breach and Attack Simulation (BAS)",
|
||||
help_path: "/help/user/application_security/breach_and_attack_simulation/index",
|
||||
secondary: { config_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
|
||||
"index#extend-dynamic-application-security-testing-dast",
|
||||
description: "SecurityConfiguration|Enable incubating Breach and " \
|
||||
secondary: { configuration_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
|
||||
"index#extend-dynamic-application-security-testing-dast",
|
||||
description: "Enable incubating Breach and " \
|
||||
"Attack Simulation focused features " \
|
||||
"such as callback attacks in your DAST scans.",
|
||||
name: "SecurityConfiguration|Out-of-Band Application Security Testing (OAST)" },
|
||||
short_name: "SecurityConfiguration|BAS",
|
||||
name: "Out-of-Band Application Security Testing (OAST)" },
|
||||
short_name: "BAS",
|
||||
type: "breach_and_attack_simulation" }
|
||||
:invalid | {}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -204,6 +204,7 @@ RSpec.configure do |config|
|
|||
config.include LabelsHelper, type: :feature
|
||||
config.include UnlockPipelinesHelpers, :unlock_pipelines
|
||||
config.include UserWithNamespaceShim
|
||||
config.include OrphanFinalArtifactsCleanupHelpers, :orphan_final_artifacts_cleanup
|
||||
|
||||
config.include_context 'when rendered has no HTML escapes', type: :view
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module OrphanFinalArtifactsCleanupHelpers
|
||||
def create_fog_file(final: true)
|
||||
path = if final
|
||||
JobArtifactUploader.generate_final_store_path(root_id: 123)
|
||||
else
|
||||
JobArtifactUploader.generate_remote_id
|
||||
end
|
||||
|
||||
fog_connection.directories.new(key: remote_directory)
|
||||
.files
|
||||
.create( # rubocop:disable Rails/SaveBang -- not the AR method
|
||||
key: path_with_bucket_prefix(path),
|
||||
body: 'content'
|
||||
)
|
||||
end
|
||||
|
||||
def path_without_bucket_prefix(path)
|
||||
Pathname.new(path).relative_path_from(bucket_prefix.to_s).to_s
|
||||
end
|
||||
|
||||
def path_with_bucket_prefix(path)
|
||||
File.join([bucket_prefix, path].compact)
|
||||
end
|
||||
|
||||
def expect_object_to_exist(fog_file)
|
||||
expect { fog_connection.get_object(remote_directory, fog_file.key) }.not_to raise_error
|
||||
end
|
||||
|
||||
def expect_object_to_be_deleted(fog_file)
|
||||
expect { fog_connection.get_object(remote_directory, fog_file.key) }.to raise_error(Excon::Error::NotFound)
|
||||
end
|
||||
|
||||
def expect_start_log_message
|
||||
expect_log_message("Looking for orphan job artifact objects")
|
||||
end
|
||||
|
||||
def expect_done_log_message
|
||||
expect_log_message("Done")
|
||||
end
|
||||
|
||||
def expect_first_page_loading_log_message
|
||||
expect_log_message("Loading page (first page)", times: 1)
|
||||
end
|
||||
|
||||
def expect_page_loading_via_marker_log_message(times:)
|
||||
expect_log_message("Loading page (marker:", times: times)
|
||||
end
|
||||
|
||||
def expect_resuming_from_marker_log_message(marker)
|
||||
expect_log_message("Resuming from last page marker: #{marker}", times: 1)
|
||||
end
|
||||
|
||||
def expect_no_resuming_from_marker_log_message
|
||||
expect(Gitlab::AppLogger).not_to have_received(:info).with(a_string_including("Resuming"))
|
||||
end
|
||||
|
||||
def expect_delete_log_message(fog_file)
|
||||
expect_log_message("Delete #{fog_file.key} (#{fog_file.content_length} bytes)")
|
||||
end
|
||||
|
||||
def expect_no_delete_log_message(fog_file)
|
||||
expect_no_log_message("Delete #{fog_file.key} (#{fog_file.content_length} bytes)")
|
||||
end
|
||||
|
||||
def expect_log_message(message, times: 1)
|
||||
message = "[DRY RUN] #{message}" if dry_run
|
||||
expect(Gitlab::AppLogger).to have_received(:info).with(a_string_including(message)).exactly(times).times
|
||||
end
|
||||
|
||||
def expect_no_log_message(message)
|
||||
message = "[DRY RUN] #{message}" if dry_run
|
||||
expect(Gitlab::AppLogger).not_to have_received(:info).with(a_string_including(message))
|
||||
end
|
||||
|
||||
def fetch_saved_marker
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.get(described_class::LAST_PAGE_MARKER_REDIS_KEY)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -383,4 +383,88 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'cleanup:orphan_job_artifact_final_objects' do
|
||||
subject(:rake_task) { run_rake_task('gitlab:cleanup:orphan_job_artifact_final_objects', provider) }
|
||||
|
||||
before do
|
||||
stub_artifacts_object_storage
|
||||
end
|
||||
|
||||
shared_examples_for 'running the cleaner' do
|
||||
it 'runs the task without errors' do
|
||||
expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
dry_run: true,
|
||||
force_restart: false,
|
||||
provider: provider,
|
||||
logger: anything
|
||||
)
|
||||
.and_call_original
|
||||
|
||||
expect { rake_task }.not_to raise_error
|
||||
end
|
||||
|
||||
context 'with FORCE_RESTART defined' do
|
||||
before do
|
||||
stub_env('FORCE_RESTART', '1')
|
||||
end
|
||||
|
||||
it 'passes force_restart correctly' do
|
||||
expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
dry_run: true,
|
||||
force_restart: true,
|
||||
provider: provider,
|
||||
logger: anything
|
||||
)
|
||||
.and_call_original
|
||||
|
||||
expect { rake_task }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'with DRY_RUN set to false' do
|
||||
before do
|
||||
stub_env('DRY_RUN', 'false')
|
||||
end
|
||||
|
||||
it 'passes dry_run correctly' do
|
||||
expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
dry_run: false,
|
||||
force_restart: false,
|
||||
provider: provider,
|
||||
logger: anything
|
||||
)
|
||||
.and_call_original
|
||||
|
||||
expect { rake_task }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when provider is not specified' do
|
||||
let(:provider) { nil }
|
||||
|
||||
it_behaves_like 'running the cleaner'
|
||||
end
|
||||
|
||||
context 'when provider is specified' do
|
||||
let(:provider) { 'aws' }
|
||||
|
||||
it_behaves_like 'running the cleaner'
|
||||
end
|
||||
|
||||
context 'when unsupported provider is given' do
|
||||
let(:provider) { 'somethingelse' }
|
||||
|
||||
it 'exits with error' do
|
||||
expect { rake_task }.to raise_error(SystemExit)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue