Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ee9e3d0135
commit
bf76d94f5a
|
|
@ -25,7 +25,7 @@ config:
|
|||
no-emphasis-as-heading: false # MD036
|
||||
no-inline-html: false # MD033
|
||||
no-trailing-punctuation: # MD026
|
||||
punctuation: ".,;:!。,;:!?"
|
||||
punctuation: ".,;:!。,;:!"
|
||||
no-trailing-spaces: false # MD009
|
||||
ol-prefix: # MD029
|
||||
style: "one"
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
40017b4d1cf9940aa7a8013a8fdeb1376959281d
|
||||
2a1fbd44ddcb02aa5a4ed3a43bdd8f9364565919
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -759,4 +759,4 @@ gem 'paper_trail', '~> 15.0', feature_category: :shared
|
|||
|
||||
gem "i18n_data", "~> 0.13.1", feature_category: :system_access
|
||||
|
||||
gem "gitlab-cloud-connector", "~> 1.5", require: 'gitlab/cloud_connector', feature_category: :cloud_connector
|
||||
gem "gitlab-cloud-connector", "~> 1.11", require: 'gitlab/cloud_connector', feature_category: :cloud_connector
|
||||
|
|
|
|||
|
|
@ -2162,7 +2162,7 @@ DEPENDENCIES
|
|||
gitlab-active-context!
|
||||
gitlab-backup-cli!
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-cloud-connector (~> 1.5)
|
||||
gitlab-cloud-connector (~> 1.11)
|
||||
gitlab-dangerfiles (~> 4.9.0)
|
||||
gitlab-duo-workflow-service-client (~> 0.1)!
|
||||
gitlab-experiment (~> 0.9.1)
|
||||
|
|
|
|||
|
|
@ -2162,7 +2162,7 @@ DEPENDENCIES
|
|||
gitlab-active-context!
|
||||
gitlab-backup-cli!
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-cloud-connector (~> 1.5)
|
||||
gitlab-cloud-connector (~> 1.11)
|
||||
gitlab-dangerfiles (~> 4.9.0)
|
||||
gitlab-duo-workflow-service-client (~> 0.1)!
|
||||
gitlab-experiment (~> 0.9.1)
|
||||
|
|
|
|||
|
|
@ -97,3 +97,6 @@ export const getSharedGroups = (groupId, params = {}) => {
|
|||
|
||||
return axios.get(url, { params: { ...defaultParams, ...params } });
|
||||
};
|
||||
|
||||
// no-op: See EE code for implementation
|
||||
export const fetchGroupEnterpriseUsers = () => {};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://gitlab.com/.gitlab-ci.yml",
|
||||
"markdownDescription": "Gitlab has a built-in solution for doing CI called Gitlab CI. It is configured by supplying a file called `.gitlab-ci.yml`, which will list all the jobs that are going to run for the project. A full list of all options can be found [here](https://docs.gitlab.com/ci/yaml/). [Learn More](https://docs.gitlab.com/ci/).",
|
||||
"markdownDescription": "GitLab has a built-in solution for doing CI called GitLab CI. It is configured by supplying a file called `.gitlab-ci.yml`, which will list all the jobs that are going to run for the project. A full list of all options can be found [here](https://docs.gitlab.com/ci/yaml/). [Learn More](https://docs.gitlab.com/ci/).",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": {
|
||||
|
|
@ -132,7 +132,7 @@
|
|||
},
|
||||
"pages": {
|
||||
"$ref": "#/definitions/job",
|
||||
"markdownDescription": "A special job used to upload static sites to Gitlab pages. Requires a `public/` directory with `artifacts.path` pointing to it. [Learn More](https://docs.gitlab.com/ci/yaml/#pages)."
|
||||
"markdownDescription": "A special job used to upload static sites to GitLab pages. Requires a `public/` directory with `artifacts.path` pointing to it. [Learn More](https://docs.gitlab.com/ci/yaml/#pages)."
|
||||
},
|
||||
"workflow": {
|
||||
"type": "object",
|
||||
|
|
@ -210,7 +210,7 @@
|
|||
"object",
|
||||
"null"
|
||||
],
|
||||
"markdownDescription": "Used to specify a list of files and directories that should be attached to the job if it succeeds. Artifacts are sent to Gitlab where they can be downloaded. [Learn More](https://docs.gitlab.com/ci/yaml/#artifacts).",
|
||||
"markdownDescription": "Used to specify a list of files and directories that should be attached to the job if it succeeds. Artifacts are sent to GitLab where they can be downloaded. [Learn More](https://docs.gitlab.com/ci/yaml/#artifacts).",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"paths": {
|
||||
|
|
@ -269,7 +269,7 @@
|
|||
},
|
||||
"reports": {
|
||||
"type": "object",
|
||||
"markdownDescription": "Reports will be uploaded as artifacts, and often displayed in the Gitlab UI, such as in merge requests. [Learn More](https://docs.gitlab.com/ci/yaml/#artifactsreports).",
|
||||
"markdownDescription": "Reports will be uploaded as artifacts, and often displayed in the GitLab UI, such as in merge requests. [Learn More](https://docs.gitlab.com/ci/yaml/#artifactsreports).",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"annotations": {
|
||||
|
|
@ -715,7 +715,7 @@
|
|||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "Specifies the docker image to use for the job or globally for all jobs. Job configuration takes precedence over global setting. Requires a certain kind of Gitlab runner executor.",
|
||||
"description": "Specifies the docker image to use for the job or globally for all jobs. Job configuration takes precedence over global setting. Requires a certain kind of GitLab runner executor.",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"name": {
|
||||
|
|
@ -792,7 +792,7 @@
|
|||
]
|
||||
}
|
||||
],
|
||||
"markdownDescription": "Specifies the docker image to use for the job or globally for all jobs. Job configuration takes precedence over global setting. Requires a certain kind of Gitlab runner executor. [Learn More](https://docs.gitlab.com/ci/yaml/#image)."
|
||||
"markdownDescription": "Specifies the docker image to use for the job or globally for all jobs. Job configuration takes precedence over global setting. Requires a certain kind of GitLab runner executor. [Learn More](https://docs.gitlab.com/ci/yaml/#image)."
|
||||
},
|
||||
"services": {
|
||||
"type": "array",
|
||||
|
|
@ -1748,7 +1748,7 @@
|
|||
"enum": [
|
||||
"external"
|
||||
],
|
||||
"description": "When using CI services other than Gitlab"
|
||||
"description": "When using CI services other than GitLab"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
|
|
@ -1778,7 +1778,7 @@
|
|||
"enum": [
|
||||
"web"
|
||||
],
|
||||
"description": "For pipelines created using *Run pipeline* button in Gitlab UI (under your project's *Pipelines*)."
|
||||
"description": "For pipelines created using *Run pipeline* button in GitLab UI (under your project's *Pipelines*)."
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -2220,7 +2220,7 @@
|
|||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "When set, this will expose buttons in various places for the current environment in Gitlab, that will take you to the defined URL.",
|
||||
"description": "When set, this will expose buttons in various places for the current environment in GitLab, that will take you to the defined URL.",
|
||||
"format": "uri",
|
||||
"pattern": "^(https?://.+|\\$[A-Za-z]+)"
|
||||
},
|
||||
|
|
@ -2241,11 +2241,11 @@
|
|||
},
|
||||
"auto_stop_in": {
|
||||
"type": "string",
|
||||
"description": "The amount of time it should take before Gitlab will automatically stop the environment. Supports a wide variety of formats, e.g. '1 week', '3 mins 4 sec', '2 hrs 20 min', '2h20min', '6 mos 1 day', '47 yrs 6 mos and 4d', '3 weeks and 2 days'."
|
||||
"description": "The amount of time it should take before GitLab will automatically stop the environment. Supports a wide variety of formats, e.g. '1 week', '3 mins 4 sec', '2 hrs 20 min', '2h20min', '6 mos 1 day', '47 yrs 6 mos and 4d', '3 weeks and 2 days'."
|
||||
},
|
||||
"kubernetes": {
|
||||
"type": "object",
|
||||
"description": "Used to configure the kubernetes deployment for this environment. This is currently not supported for kubernetes clusters that are managed by Gitlab.",
|
||||
"description": "Used to configure the kubernetes deployment for this environment. This is currently not supported for kubernetes clusters that are managed by GitLab.",
|
||||
"properties": {
|
||||
"namespace": {
|
||||
"type": "string",
|
||||
|
|
@ -2254,7 +2254,7 @@
|
|||
},
|
||||
"agent": {
|
||||
"type": "string",
|
||||
"description": "Specifies the Gitlab Agent for Kubernetes. The format is `path/to/agent/project:agent-name`."
|
||||
"description": "Specifies the GitLab Agent for Kubernetes. The format is `path/to/agent/project:agent-name`."
|
||||
},
|
||||
"flux_resource_path": {
|
||||
"type": "string",
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ export const initMembersApp = (el, context, options) => {
|
|||
namespaceUserLimit,
|
||||
availableRoles,
|
||||
reassignmentCsvPath,
|
||||
restrictReassignmentToEnterprise,
|
||||
allowInactivePlaceholderReassignment,
|
||||
...vuexStoreAttributes
|
||||
} = parseDataAttributes(el);
|
||||
|
|
@ -86,6 +87,7 @@ export const initMembersApp = (el, context, options) => {
|
|||
availableRoles,
|
||||
context,
|
||||
reassignmentCsvPath,
|
||||
restrictReassignmentToEnterprise,
|
||||
allowInactivePlaceholderReassignment: parseBoolean(allowInactivePlaceholderReassignment),
|
||||
group: {
|
||||
id: isGroup ? sourceId : null,
|
||||
|
|
|
|||
|
|
@ -2,11 +2,15 @@
|
|||
import produce from 'immer';
|
||||
import { debounce, isEmpty, isNull } from 'lodash';
|
||||
import { GlAvatarLabeled, GlButton, GlCollapsibleListbox } from '@gitlab/ui';
|
||||
import {
|
||||
getFirstPropertyValue,
|
||||
normalizeHeaders,
|
||||
parseIntPagination,
|
||||
} from '~/lib/utils/common_utils';
|
||||
import { __, s__ } from '~/locale';
|
||||
import { createAlert } from '~/alert';
|
||||
import { getFirstPropertyValue } from '~/lib/utils/common_utils';
|
||||
|
||||
import searchUsersQuery from '~/graphql_shared/queries/users_search_all_paginated.query.graphql';
|
||||
import { fetchGroupEnterpriseUsers } from 'ee_else_ce/api/groups_api';
|
||||
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
|
||||
import {
|
||||
PLACEHOLDER_STATUS_AWAITING_APPROVAL,
|
||||
|
|
@ -34,6 +38,12 @@ export default {
|
|||
GlCollapsibleListbox,
|
||||
},
|
||||
inject: {
|
||||
group: {
|
||||
default: {},
|
||||
},
|
||||
restrictReassignmentToEnterprise: {
|
||||
default: false,
|
||||
},
|
||||
allowInactivePlaceholderReassignment: {
|
||||
default: false,
|
||||
},
|
||||
|
|
@ -51,11 +61,17 @@ export default {
|
|||
isConfirmLoading: false,
|
||||
isCancelLoading: false,
|
||||
isNotifyLoading: false,
|
||||
isLoadingInitial: true,
|
||||
isLoadingMore: false,
|
||||
apolloIsLoadingInitial: true,
|
||||
apolloIsLoadingMore: false,
|
||||
isValidated: false,
|
||||
search: '',
|
||||
selectedUserToReassign: null,
|
||||
enterpriseUsers: [],
|
||||
enterpriseUsersPageInfo: {
|
||||
nextPage: null,
|
||||
},
|
||||
enterpriseUsersIsLoadingInitial: false,
|
||||
enterpriseUsersIsLoadingMore: false,
|
||||
};
|
||||
},
|
||||
|
||||
|
|
@ -69,7 +85,10 @@ export default {
|
|||
};
|
||||
},
|
||||
result() {
|
||||
this.isLoadingInitial = false;
|
||||
this.apolloIsLoadingInitial = false;
|
||||
},
|
||||
skip() {
|
||||
return this.restrictReassignmentToEnterprise;
|
||||
},
|
||||
error() {
|
||||
this.onError();
|
||||
|
|
@ -90,11 +109,35 @@ export default {
|
|||
},
|
||||
|
||||
hasNextPage() {
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
return Boolean(this.enterpriseUsersPageInfo.nextPage);
|
||||
}
|
||||
|
||||
return this.users?.pageInfo?.hasNextPage;
|
||||
},
|
||||
|
||||
isLoading() {
|
||||
return this.$apollo.queries.users.loading && !this.isLoadingMore;
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
return this.enterpriseUsersIsLoadingInitial;
|
||||
}
|
||||
|
||||
return this.$apollo.queries.users.loading && !this.apolloIsLoadingMore;
|
||||
},
|
||||
|
||||
isLoadingMore() {
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
return this.enterpriseUsersIsLoadingMore;
|
||||
}
|
||||
|
||||
return this.apolloIsLoadingMore;
|
||||
},
|
||||
|
||||
isLoadingInitial() {
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
return this.enterpriseUsersIsLoadingInitial;
|
||||
}
|
||||
|
||||
return this.apolloIsLoadingInitial;
|
||||
},
|
||||
|
||||
userSelectInvalid() {
|
||||
|
|
@ -102,6 +145,10 @@ export default {
|
|||
},
|
||||
|
||||
userItems() {
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
return this.enterpriseUsers?.map((user) => this.createUserObjectFromEnterprise(user));
|
||||
}
|
||||
|
||||
return this.users?.nodes?.map((user) => createUserObject(user));
|
||||
},
|
||||
|
||||
|
|
@ -146,10 +193,45 @@ export default {
|
|||
},
|
||||
|
||||
methods: {
|
||||
async fetchEnterpriseUsers(page) {
|
||||
try {
|
||||
const { data, headers } = await fetchGroupEnterpriseUsers(this.group.id, {
|
||||
page,
|
||||
per_page: USERS_PER_PAGE,
|
||||
search: this.search,
|
||||
});
|
||||
|
||||
this.enterpriseUsersPageInfo = parseIntPagination(normalizeHeaders(headers));
|
||||
this.enterpriseUsers.push(...data);
|
||||
} catch (error) {
|
||||
this.onError();
|
||||
}
|
||||
},
|
||||
async loadInitialEnterpriseUsers() {
|
||||
if (!this.restrictReassignmentToEnterprise || this.enterpriseUsers.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.enterpriseUsersIsLoadingInitial = true;
|
||||
await this.fetchEnterpriseUsers(1);
|
||||
this.enterpriseUsersIsLoadingInitial = false;
|
||||
},
|
||||
|
||||
async loadMoreEnterpriseUsers() {
|
||||
this.enterpriseUsersIsLoadingMore = true;
|
||||
await this.fetchEnterpriseUsers(this.enterpriseUsersPageInfo.nextPage);
|
||||
this.enterpriseUsersIsLoadingMore = false;
|
||||
},
|
||||
|
||||
async loadMoreUsers() {
|
||||
if (!this.hasNextPage) return;
|
||||
|
||||
this.isLoadingMore = true;
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
this.loadMoreEnterpriseUsers();
|
||||
return;
|
||||
}
|
||||
|
||||
this.apolloIsLoadingMore = true;
|
||||
|
||||
try {
|
||||
await this.$apollo.queries.users.fetchMore({
|
||||
|
|
@ -166,7 +248,7 @@ export default {
|
|||
} catch (error) {
|
||||
this.onError();
|
||||
} finally {
|
||||
this.isLoadingMore = false;
|
||||
this.apolloIsLoadingMore = false;
|
||||
}
|
||||
},
|
||||
|
||||
|
|
@ -178,6 +260,32 @@ export default {
|
|||
|
||||
setSearch(searchTerm) {
|
||||
this.search = searchTerm;
|
||||
|
||||
if (this.restrictReassignmentToEnterprise) {
|
||||
this.enterpriseUsers = [];
|
||||
this.loadInitialEnterpriseUsers();
|
||||
}
|
||||
},
|
||||
|
||||
createUserObjectFromEnterprise({
|
||||
id,
|
||||
username,
|
||||
web_url: webUrl,
|
||||
web_path: webPath,
|
||||
avatar_url: avatarUrl,
|
||||
name,
|
||||
}) {
|
||||
const gid = `gid://gitlab/User/${id}`;
|
||||
|
||||
return {
|
||||
username,
|
||||
webUrl,
|
||||
webPath,
|
||||
avatarUrl,
|
||||
id: gid,
|
||||
text: name,
|
||||
value: gid,
|
||||
};
|
||||
},
|
||||
|
||||
onSelect(value) {
|
||||
|
|
@ -301,6 +409,7 @@ export default {
|
|||
:searching="isLoading"
|
||||
infinite-scroll
|
||||
:infinite-scroll-loading="isLoadingMore"
|
||||
@shown="loadInitialEnterpriseUsers"
|
||||
@search="debouncedSetSearch"
|
||||
@select="onSelect"
|
||||
@bottom-reached="loadMoreUsers"
|
||||
|
|
|
|||
|
|
@ -7,4 +7,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/537556
|
|||
milestone: '18.0'
|
||||
group: group::code review
|
||||
type: beta
|
||||
default_enabled: false
|
||||
default_enabled: true
|
||||
|
|
|
|||
|
|
@ -175,6 +175,10 @@ production: &base
|
|||
# Number of seconds to wait for HTTP response after sending webhook HTTP POST request (default: 10)
|
||||
# webhook_timeout: 10
|
||||
|
||||
## Session cookie settings
|
||||
# signed_cookie_salt: 'signed cookie'
|
||||
# authenticated_encrypted_cookie_salt: 'authenticated encrypted cookie'
|
||||
|
||||
## HTTP client settings
|
||||
http_client:
|
||||
# Filename of HTTP client pem
|
||||
|
|
|
|||
|
|
@ -1160,24 +1160,12 @@ Gitlab.ee do
|
|||
|
||||
Settings.duo_workflow.reverse_merge!(
|
||||
secure: true,
|
||||
service_url: nil, # service_url is constructued in Gitlab::DuoWorkflow::Client
|
||||
debug: false,
|
||||
executor_binary_url: "https://gitlab.com/api/v4/projects/58711783/packages/generic/duo-workflow-executor/#{executor_version}/duo-workflow-executor.tar.gz",
|
||||
executor_binary_urls: executor_binary_urls,
|
||||
executor_version: executor_version
|
||||
)
|
||||
|
||||
# Default to proxy via Cloud Connector
|
||||
unless Settings.duo_workflow['service_url'].present?
|
||||
cloud_connector_uri = URI.parse(Settings.cloud_connector.base_url)
|
||||
|
||||
# Cloudflare has been disabled untill
|
||||
# gets resolved https://gitlab.com/gitlab-org/gitlab/-/issues/509586
|
||||
# Settings.duo_workflow['service_url'] = "#{cloud_connector_uri.host}:#{cloud_connector_uri.port}"
|
||||
|
||||
service_url = "duo-workflow#{cloud_connector_uri.host.include?('staging') ? '.staging' : ''}.runway.gitlab.net:#{cloud_connector_uri.port}"
|
||||
Settings.duo_workflow['service_url'] = service_url
|
||||
Settings.duo_workflow['secure'] = cloud_connector_uri.scheme == 'https'
|
||||
end
|
||||
end
|
||||
|
||||
#
|
||||
|
|
|
|||
|
|
@ -47,4 +47,7 @@ session_store_class, options = Gitlab::Sessions::StoreBuilder.new(cookie_key, se
|
|||
Rails.application.configure do
|
||||
config.session_store(session_store_class, **options)
|
||||
config.middleware.insert_after session_store_class, Gitlab::Middleware::UnauthenticatedSessionExpiry
|
||||
config.action_dispatch.signed_cookie_salt = Settings['gitlab']['signed_cookie_salt'] || 'signed cookie'
|
||||
config.action_dispatch.authenticated_encrypted_cookie_salt =
|
||||
Settings['gitlab']['authenticated_encrypted_cookie_salt'] || 'authenticated encrypted cookie'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
migration_job_name: ResolveVulnerabilitiesForRemovedAnalyzers
|
||||
description: Resolves all detected vulnerabilities for removed analyzers.
|
||||
feature_category: static_application_security_testing
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162691
|
||||
milestone: '17.4'
|
||||
queued_migration_version: 20240814085540
|
||||
finalized_by: 20241015185528
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddNamespaceIdToSentNotifications < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.0'
|
||||
|
||||
def change
|
||||
add_column :sent_notifications, :namespace_id, :bigint, null: false, default: 0 # rubocop:disable Migration/PreventAddingColumns -- Sharding key is a permitted exception
|
||||
end
|
||||
end
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueResolveVulnerabilitiesForRemovedAnalyzers < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.3'
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
MIGRATION = "ResolveVulnerabilitiesForRemovedAnalyzers"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 10_000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
# no-op because there was a bug in the original migration, which has been
|
||||
# fixed in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162527
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op because there was a bug in the original migration, which has been
|
||||
# fixed in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162527
|
||||
end
|
||||
end
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RequeueResolveVulnerabilitiesForRemovedAnalyzers < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.4'
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
MIGRATION = "ResolveVulnerabilitiesForRemovedAnalyzers"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 10_000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
Gitlab::Database::QueryAnalyzers::Base.suppress_schema_issues_for_decomposed_tables do
|
||||
# Clear previous background migration execution from QueueResolveVulnerabilitiesForRemovedAnalyzers
|
||||
delete_batched_background_migration(MIGRATION, :vulnerability_reads, :id, [])
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:vulnerability_reads,
|
||||
:id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
Gitlab::Database::QueryAnalyzers::Base.suppress_schema_issues_for_decomposed_tables do
|
||||
delete_batched_background_migration(MIGRATION, :vulnerability_reads, :id, [])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeResolveVulnerabilitiesForRemovedAnalyzers < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.6'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'ResolveVulnerabilitiesForRemovedAnalyzers',
|
||||
table_name: 'vulnerability_reads',
|
||||
column_name: 'id',
|
||||
job_arguments: [],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareAsyncIndexSentNotificationsNamespaceId < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.0'
|
||||
|
||||
INDEX_NAME = 'index_sent_notifications_on_namespace_id'
|
||||
|
||||
# TODO: Index to be created synchronously in https://gitlab.com/gitlab-org/gitlab/-/work_items/541120
|
||||
def up
|
||||
prepare_async_index :sent_notifications, :namespace_id, name: INDEX_NAME # rubocop:disable Migration/PreventIndexCreation -- Necessary for sharding key
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_index :sent_notifications, :namespace_id, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -1 +0,0 @@
|
|||
f58efcd84729c16298463f6b77e7a804439bd16c951458eebbb00fa4831a961b
|
||||
|
|
@ -1 +0,0 @@
|
|||
df83bdb3424f3256eacceaacfcd767b6d08905b6c278c9a5824b3d04c9c26394
|
||||
|
|
@ -1 +0,0 @@
|
|||
fb2e6ff53e5e5e6f0ddc8c975222b7acce5f2a32de5efe1b23457c7d6b1359f9
|
||||
|
|
@ -0,0 +1 @@
|
|||
2e93d0470c76f2a6c84447aa36492ce0b0f58f04c180e0a4d8a51662cb712f20
|
||||
|
|
@ -0,0 +1 @@
|
|||
7df199b4913e1d7f7a9cf58082c5d66c9c8808a2372f9036617dacf42754c46b
|
||||
|
|
@ -22781,7 +22781,8 @@ CREATE TABLE sent_notifications (
|
|||
in_reply_to_discussion_id character varying,
|
||||
id bigint NOT NULL,
|
||||
issue_email_participant_id bigint,
|
||||
created_at timestamp with time zone NOT NULL
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
namespace_id bigint DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE sent_notifications_id_seq
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
name: gitlab_base.NonStandardHyphens
|
||||
description: |
|
||||
Do not use non-standard dashes or hyphens. Use standard hyphen ("minus"), separate sentences, or commas instead:
|
||||
- U+2010: HYPHEN
|
||||
- U+2011: NON-BREAKING HYPHEN
|
||||
- U+2013: EN DASH
|
||||
- U+2014: EM DASH
|
||||
extends: existence
|
||||
message: "Do not use non-standard dashes or hyphens. Use standard hyphen ('minus'), separate sentences, or commas instead"
|
||||
vocab: false
|
||||
nonword: true
|
||||
level: warning
|
||||
link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation
|
||||
scope: text
|
||||
raw:
|
||||
- '[\u2010\u2011\u2013\u2014]'
|
||||
|
|
@ -51,14 +51,14 @@ from [owasp.org](https://owasp.org/).
|
|||
|
||||
## End-Users
|
||||
|
||||
### Who are the application's end‐users?
|
||||
### Who are the application's end-users?
|
||||
|
||||
- **Secondary** sites are created in regions that are distant (in terms of
|
||||
Internet latency) from the main GitLab installation (the **primary** site). They are
|
||||
intended to be used by anyone who would ordinarily use the **primary** site, who finds
|
||||
that the **secondary** site is closer to them (in terms of Internet latency).
|
||||
|
||||
### How do the end‐users interact with the application?
|
||||
### How do the end-users interact with the application?
|
||||
|
||||
- **Secondary** sites provide all the interfaces a **primary** site does
|
||||
(notably a HTTP/HTTPS web application, and HTTP/HTTPS or SSH Git repository
|
||||
|
|
@ -67,7 +67,7 @@ from [owasp.org](https://owasp.org/).
|
|||
**primary** site, but end-users may use the GitLab web interface to view information like projects,
|
||||
issues, merge requests, and snippets.
|
||||
|
||||
### What security expectations do the end‐users have?
|
||||
### What security expectations do the end-users have?
|
||||
|
||||
- The replication process must be secure. It would typically be unacceptable to
|
||||
transmit the entire database contents or all files and repositories across the
|
||||
|
|
@ -98,7 +98,7 @@ from [owasp.org](https://owasp.org/).
|
|||
|
||||
## Network
|
||||
|
||||
### What details regarding routing, switching, firewalling, and load‐balancing have been defined?
|
||||
### What details regarding routing, switching, firewalling, and load-balancing have been defined?
|
||||
|
||||
- Geo requires the **primary** site and **secondary** site to be able to communicate with each
|
||||
other across a TCP/IP network. In particular, the **secondary** sites must be able to
|
||||
|
|
@ -130,7 +130,7 @@ from [owasp.org](https://owasp.org/).
|
|||
[GitLab installation](https://about.gitlab.com/install/) page for more
|
||||
details), however we recommend using the operating systems listed in the [Geo documentation](../_index.md#requirements-for-running-geo).
|
||||
|
||||
### What details regarding required OS components and lock‐down needs have been defined?
|
||||
### What details regarding required OS components and lock-down needs have been defined?
|
||||
|
||||
- The supported Linux package installation method packages most components itself.
|
||||
- There are significant dependencies on the system-installed OpenSSH daemon (Geo
|
||||
|
|
|
|||
|
|
@ -42355,6 +42355,7 @@ The category of the additional context.
|
|||
| <a id="aiadditionalcontextcategoryissue"></a>`ISSUE` | Issue content category. |
|
||||
| <a id="aiadditionalcontextcategorylocal_git"></a>`LOCAL_GIT` | Local_git content category. |
|
||||
| <a id="aiadditionalcontextcategorymerge_request"></a>`MERGE_REQUEST` | Merge_request content category. |
|
||||
| <a id="aiadditionalcontextcategoryrepository"></a>`REPOSITORY` | Repository content category. |
|
||||
| <a id="aiadditionalcontextcategorysnippet"></a>`SNIPPET` | Snippet content category. |
|
||||
| <a id="aiadditionalcontextcategoryterminal"></a>`TERMINAL` | Terminal content category. |
|
||||
|
||||
|
|
|
|||
|
|
@ -177,6 +177,7 @@ unit_primitives:
|
|||
- include_merge_request_context
|
||||
- include_snippet_context
|
||||
- include_terminal_context
|
||||
- include_repository_context
|
||||
- refactor_code
|
||||
- write_tests
|
||||
```
|
||||
|
|
|
|||
|
|
@ -122,6 +122,10 @@ has an existing membership in the destination namespace with a [higher role](../
|
|||
the one being mapped, the membership is mapped as a direct membership instead. This ensures the member does not get
|
||||
elevated permissions.
|
||||
|
||||
[In GitLab 18.0 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/510673),
|
||||
for top-level groups with at least one enterprise user, you can map
|
||||
contributions and memberships only to [enterprise users](../../enterprise_user/_index.md).
|
||||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
There is a [known issue](_index.md#known-issues) affecting the mapping of shared memberships.
|
||||
|
|
|
|||
|
|
@ -1,420 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# rubocop:disable Metrics/ClassLength -- we need to keep the logic in a single class
|
||||
# rubocop:disable Gitlab/BulkInsert -- we want to use ApplicationRecord.legacy_bulk_insert so we execute raw SQL
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class ResolveVulnerabilitiesForRemovedAnalyzers < BatchedMigrationJob
|
||||
operation_name :resolve_vulnerabilities_for_removed_analyzers
|
||||
feature_category :static_application_security_testing
|
||||
|
||||
RESOLVED_VULNERABILITY_COMMENT =
|
||||
'This vulnerability was automatically resolved because it was created by an analyzer that has ' \
|
||||
'been removed from GitLab SAST.'
|
||||
REMOVED_SCANNERS = %w[
|
||||
eslint
|
||||
gosec
|
||||
bandit
|
||||
security_code_scan
|
||||
brakeman
|
||||
flawfinder
|
||||
mobsf
|
||||
njsscan
|
||||
nodejs-scan
|
||||
nodejs_scan
|
||||
phpcs_security_audit
|
||||
].index_with { true }
|
||||
|
||||
module Migratable
|
||||
module Enums
|
||||
module Vulnerability
|
||||
VULNERABILITY_STATES = {
|
||||
detected: 1,
|
||||
confirmed: 4,
|
||||
resolved: 3,
|
||||
dismissed: 2
|
||||
}.freeze
|
||||
|
||||
SEVERITY_LEVELS = {
|
||||
info: 1,
|
||||
unknown: 2,
|
||||
low: 4,
|
||||
medium: 5,
|
||||
high: 6,
|
||||
critical: 7
|
||||
}.freeze
|
||||
|
||||
def self.severity_levels
|
||||
SEVERITY_LEVELS
|
||||
end
|
||||
|
||||
def self.vulnerability_states
|
||||
VULNERABILITY_STATES
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module Vulnerabilities
|
||||
class Feedback < ApplicationRecord
|
||||
self.table_name = "vulnerability_feedback"
|
||||
|
||||
enum :feedback_type, { dismissal: 0, issue: 1, merge_request: 2 }, prefix: :for
|
||||
end
|
||||
|
||||
class Read < ApplicationRecord
|
||||
self.table_name = "vulnerability_reads"
|
||||
end
|
||||
|
||||
class Statistic < ApplicationRecord
|
||||
self.table_name = 'vulnerability_statistics'
|
||||
|
||||
enum :letter_grade, { a: 0, b: 1, c: 2, d: 3, f: 4 }
|
||||
end
|
||||
|
||||
module Statistics
|
||||
class UpdateService
|
||||
# subtract the severity counts for the number of vulnerabilities
|
||||
# being resolved from the existing severity counts, and use that
|
||||
# to determine the letter grade.
|
||||
LETTER_GRADE_SQL = <<~SQL.freeze
|
||||
CASE
|
||||
WHEN critical - %{critical} > 0 THEN
|
||||
#{Migratable::Vulnerabilities::Statistic.letter_grades[:f]}
|
||||
-- high is high + unknown
|
||||
-- see https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/statistics/update_service.rb#L10
|
||||
WHEN high + unknown - (%{high} + %{unknown}) > 0 THEN
|
||||
#{Migratable::Vulnerabilities::Statistic.letter_grades[:d]}
|
||||
WHEN medium - %{medium} > 0 THEN
|
||||
#{Migratable::Vulnerabilities::Statistic.letter_grades[:c]}
|
||||
WHEN low - %{low} > 0 THEN
|
||||
#{Migratable::Vulnerabilities::Statistic.letter_grades[:b]}
|
||||
ELSE
|
||||
#{Migratable::Vulnerabilities::Statistic.letter_grades[:a]}
|
||||
END
|
||||
SQL
|
||||
|
||||
# this implementation differs from https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/statistics/update_service.rb#L21-27
|
||||
# in that we only update records here, we don't insert new vulnerability_statistics records.
|
||||
#
|
||||
# The reason why we only update records is the following:
|
||||
#
|
||||
# 1. The original Vulnerabilities::Statistics::UpdateService code is called for new
|
||||
# projects that don't yet have any vulnerabilities, and therefore doesn't have any
|
||||
# vulnerability_statistics records.
|
||||
#
|
||||
# However, in our case, we're resolving vulnerabilities that already exist
|
||||
# for a given project, so we can assume that a vulnerability_statistics record
|
||||
# must also exist, because a vulnerability_statistics record is created when
|
||||
# a vulnerability is created. I've also verified this fact on production data.
|
||||
#
|
||||
# 2. Even if we were to create a new vulnerability_statistics record, it wouldn't
|
||||
# make sense, because if we're resolving 20 critical vulnerabilities, we can't
|
||||
# create a vulnerability_statistics record with `critical: -20` since statistics
|
||||
# shouldn't be negative. At best, we could initialize the vulnerability_statistics
|
||||
# record to contain zero for every severity level.
|
||||
UPDATE_SQL = <<~SQL
|
||||
UPDATE vulnerability_statistics
|
||||
SET %{update_values}, letter_grade = (%{letter_grade}), updated_at = now()
|
||||
WHERE project_id = %{project_id}
|
||||
SQL
|
||||
|
||||
def self.update_for(vulnerability_tuples)
|
||||
new(vulnerability_tuples).execute
|
||||
end
|
||||
|
||||
def initialize(vulnerability_tuples)
|
||||
self.changes_by_project = group_changes_by_project(vulnerability_tuples)
|
||||
end
|
||||
|
||||
# Groups severity count changes by project and executes a single update statement
|
||||
# for each project. In the worst case, where every severity count change belongs
|
||||
# to a different project, we'll end up executing SUB_BATCH_SIZE updates (currently
|
||||
# set to 100 in QueueResolveVulnerabilitiesForRemovedAnalyzers::SUB_BATCH_SIZE) and
|
||||
# in the best case, where all 100 changes belong to the same project, we'll execute
|
||||
# a single update statement.
|
||||
def execute
|
||||
changes_by_project.each do |changes|
|
||||
connection.execute(update_sql(changes))
|
||||
end
|
||||
end
|
||||
|
||||
# Groups vulnerability changes by project and aggregates the severity counts for each project.
|
||||
#
|
||||
# This method takes an array of vulnerability tuples and returns an array of hashes,
|
||||
# where each hash contains a project_id and counts of severities grouped by severity level.
|
||||
#
|
||||
# @param vulnerability_tuples [Array<Hash>] An array of vulnerability tuples.
|
||||
#
|
||||
# Each tuple is a hash with keys:
|
||||
#
|
||||
# - :vulnerability_id [Integer]
|
||||
# - :project_id [Integer]
|
||||
# - :namespace_id [Integer]
|
||||
# - :severity [Integer]
|
||||
# - :uuid [String]
|
||||
#
|
||||
# @return [Array<Hash>] an array of hashes, where each hash represents a project with its ID and a
|
||||
# hash of severity counts.
|
||||
#
|
||||
# The format of the returned array of hashes is:
|
||||
# [
|
||||
# {
|
||||
# project_id: Integer,
|
||||
# severity_counts: {
|
||||
# info: Integer,
|
||||
# unknown: Integer,
|
||||
# low: Integer,
|
||||
# medium: Integer,
|
||||
# high: Integer,
|
||||
# critical: Integer,
|
||||
# total: Integer
|
||||
# }
|
||||
# },
|
||||
# ...
|
||||
# ]
|
||||
#
|
||||
# Keys for zero-value severity counts will be omitted
|
||||
#
|
||||
# @example
|
||||
# vulnerability_tuples = [
|
||||
# { vulnerability_id: 145, project_id: 10, namespace_id: 19, severity: 7, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 146, project_id: 10, namespace_id: 19, severity: 7, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 147, project_id: 10, namespace_id: 19, severity: 4, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 148, project_id: 11, namespace_id: 19, severity: 7, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 149, project_id: 11, namespace_id: 19, severity: 7, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 150, project_id: 12, namespace_id: 19, severity: 4, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 151, project_id: 12, namespace_id: 19, severity: 5, uuid: 'abc-1234' },
|
||||
# { vulnerability_id: 152, project_id: 12, namespace_id: 19, severity: 6, uuid: 'abc-1234' }
|
||||
# ]
|
||||
#
|
||||
# group_changes_by_project(vulnerability_tuples)
|
||||
# => [
|
||||
# {
|
||||
# project_id: 10,
|
||||
# severity_counts: { critical: 2, low: 1, total: 3 }
|
||||
# },
|
||||
# {
|
||||
# project_id: 11,
|
||||
# severity_counts: { critical: 2, total: 2 }
|
||||
# },
|
||||
# {
|
||||
# project_id: 12,
|
||||
# severity_counts: { high: 1, medium: 1, low: 1, total: 3 }
|
||||
# }
|
||||
# ]
|
||||
def group_changes_by_project(vulnerability_tuples)
|
||||
severity_levels = Migratable::Enums::Vulnerability.severity_levels
|
||||
|
||||
vulnerability_tuples.group_by { |tuple| tuple[:project_id] }.map do |project_id, tuples|
|
||||
changes_hash = tuples.each_with_object(Hash.new(0)) do |tuple, counts|
|
||||
severity = severity_levels.key(tuple[:severity])
|
||||
counts[severity] += 1 if severity
|
||||
end
|
||||
changes_hash[:total] = changes_hash.values.sum
|
||||
{ project_id: project_id, severity_counts: changes_hash }
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_accessor :changes_by_project
|
||||
|
||||
delegate :connection, to: Migratable::Vulnerabilities::Statistic, private: true
|
||||
delegate :quote, :quote_column_name, to: :connection, private: true
|
||||
|
||||
def update_sql(changes)
|
||||
format(
|
||||
UPDATE_SQL,
|
||||
project_id: changes[:project_id],
|
||||
letter_grade: letter_grade(changes[:severity_counts]),
|
||||
update_values: update_values(changes[:severity_counts])
|
||||
)
|
||||
end
|
||||
|
||||
def letter_grade(severity_counts)
|
||||
format(
|
||||
LETTER_GRADE_SQL,
|
||||
critical: severity_counts[:critical],
|
||||
high: severity_counts[:high],
|
||||
unknown: severity_counts[:unknown],
|
||||
medium: severity_counts[:medium],
|
||||
low: severity_counts[:low]
|
||||
)
|
||||
end
|
||||
|
||||
# when vulnerabilities are resolved, they're no longer considered a threat,
|
||||
# so we want to decrement the number of vulnerabilities matching the severity
|
||||
# level from the vulnerability_statistics table, as well as the total number of
|
||||
# vulnerabilities. We use GREATEST to ensure that we don't end up with a
|
||||
# negative value for any of these counts.
|
||||
#
|
||||
# For example, if we have the following vulnerability_statistics record:
|
||||
#
|
||||
# { project_id: 1, total: 11, critical: 4, medium: 6, low: 1 }
|
||||
#
|
||||
# and the following severity_counts
|
||||
#
|
||||
# { total: 9, critical: 4, medium: 5 }
|
||||
#
|
||||
# then we'll subtract the above severity_counts from the vulnerability_statistics
|
||||
# record and will end up with the following:
|
||||
#
|
||||
# { project_id: 1, total: 2, critical: 0, medium: 1, low: 1 }
|
||||
def update_values(severity_counts)
|
||||
severity_counts.map do |severity, count|
|
||||
column_name = quote_column_name(severity)
|
||||
quoted_value = quote(count)
|
||||
"#{column_name} = GREATEST(#{column_name} - #{quoted_value}, 0)"
|
||||
end.join(', ')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
scope_to ->(relation) do
|
||||
relation.where(state: [Migratable::Enums::Vulnerability.vulnerability_states[:detected]])
|
||||
end
|
||||
|
||||
def perform
|
||||
user_id = Users::Internal.security_bot.id
|
||||
|
||||
each_sub_batch do |sub_batch|
|
||||
cte = Gitlab::SQL::CTE.new(:batched_relation, sub_batch.limit(100))
|
||||
|
||||
filtered_batch = cte
|
||||
.apply_to(Migratable::Vulnerabilities::Read.all)
|
||||
.joins('INNER JOIN vulnerability_scanners ON vulnerability_scanners.id = vulnerability_reads.scanner_id')
|
||||
.where('vulnerability_scanners.external_id': REMOVED_SCANNERS.keys)
|
||||
|
||||
vulnerability_tuples = values_for_fields(
|
||||
filtered_batch, :vulnerability_id, 'vulnerability_reads.project_id', :namespace_id, :severity, :uuid
|
||||
)
|
||||
|
||||
connection.transaction do
|
||||
perform_bulk_writes(user_id, vulnerability_tuples)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def values_for_fields(relation, *field_names)
|
||||
relation.select(*field_names).map do |field|
|
||||
field.attributes.except('id').with_indifferent_access
|
||||
end
|
||||
end
|
||||
|
||||
def perform_bulk_writes(user_id, vulnerability_tuples)
|
||||
return if vulnerability_tuples.empty?
|
||||
|
||||
vulnerability_ids = vulnerability_tuples.pluck(:vulnerability_id)
|
||||
|
||||
bulk_resolve(vulnerability_ids, user_id)
|
||||
bulk_create_state_transitions(vulnerability_ids, user_id)
|
||||
bulk_remove_dismissal_reason(vulnerability_ids)
|
||||
bulk_create_system_note_with_metadata(vulnerability_tuples, user_id)
|
||||
bulk_update_vulnerability_statistics(vulnerability_tuples)
|
||||
bulk_destroy_dismissal_feedback(vulnerability_tuples)
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/base_service.rb#L26
|
||||
def bulk_resolve(vulnerability_ids, user_id)
|
||||
connection.execute(<<~SQL)
|
||||
UPDATE vulnerabilities SET
|
||||
state = #{Migratable::Enums::Vulnerability.vulnerability_states[:resolved]},
|
||||
resolved_by_id = #{user_id},
|
||||
resolved_at = now()
|
||||
WHERE vulnerabilities.id in (#{vulnerability_ids.join(',')})
|
||||
SQL
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/base_state_transition_service.rb#L15
|
||||
def bulk_create_state_transitions(vulnerability_ids, user_id)
|
||||
current_time = Time.current
|
||||
|
||||
rows = vulnerability_ids.map do |vulnerability_id|
|
||||
{
|
||||
vulnerability_id: vulnerability_id,
|
||||
from_state: Migratable::Enums::Vulnerability.vulnerability_states[:detected],
|
||||
to_state: Migratable::Enums::Vulnerability.vulnerability_states[:resolved],
|
||||
created_at: current_time,
|
||||
updated_at: current_time,
|
||||
author_id: user_id,
|
||||
comment: RESOLVED_VULNERABILITY_COMMENT
|
||||
}
|
||||
end
|
||||
|
||||
ApplicationRecord.legacy_bulk_insert('vulnerability_state_transitions', rows)
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/base_state_transition_service.rb#L37
|
||||
def bulk_remove_dismissal_reason(vulnerability_ids)
|
||||
connection.execute(<<~SQL)
|
||||
UPDATE vulnerability_reads SET
|
||||
dismissal_reason = NULL
|
||||
WHERE vulnerability_reads.vulnerability_id in (#{vulnerability_ids.join(',')})
|
||||
SQL
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/app/models/discussion.rb#L71-71
|
||||
def discussion_id(vulnerability_id)
|
||||
# rubocop:disable Fips/SHA1 -- disable this cop to maintain parity with app/models/discussion.rb
|
||||
# a valid discussion_id is required for responding to vulnerability comments
|
||||
Digest::SHA1.hexdigest("discussion-vulnerability-#{vulnerability_id}-#{SecureRandom.hex}")
|
||||
# rubocop:enable Fips/SHA1
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/app/services/system_notes/base_service.rb#L19
|
||||
def bulk_create_system_note_with_metadata(vulnerability_tuples, user_id)
|
||||
current_time = Time.current
|
||||
|
||||
system_notes_rows = vulnerability_tuples.map do |vulnerability_id_tuple|
|
||||
{
|
||||
note: RESOLVED_VULNERABILITY_COMMENT,
|
||||
noteable_type: 'Vulnerability',
|
||||
author_id: user_id,
|
||||
created_at: current_time,
|
||||
updated_at: current_time,
|
||||
project_id: vulnerability_id_tuple[:project_id],
|
||||
noteable_id: vulnerability_id_tuple[:vulnerability_id],
|
||||
system: 'TRUE',
|
||||
discussion_id: discussion_id(vulnerability_id_tuple[:vulnerability_id]),
|
||||
namespace_id: vulnerability_id_tuple[:namespace_id]
|
||||
}
|
||||
end
|
||||
|
||||
system_note_ids = ApplicationRecord.legacy_bulk_insert('notes', system_notes_rows, return_ids: true)
|
||||
|
||||
system_note_metadata_rows = system_note_ids.map do |system_note_id|
|
||||
{
|
||||
action: 'vulnerability_resolved',
|
||||
created_at: current_time,
|
||||
updated_at: current_time,
|
||||
note_id: system_note_id
|
||||
}
|
||||
end
|
||||
|
||||
ApplicationRecord.legacy_bulk_insert('system_note_metadata', system_note_metadata_rows)
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/base_service.rb#L22
|
||||
def bulk_update_vulnerability_statistics(vulnerability_tuples)
|
||||
Migratable::Vulnerabilities::Statistics::UpdateService.update_for(vulnerability_tuples)
|
||||
end
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/blob/18dc5fe8566e/ee/app/services/vulnerabilities/resolve_service.rb#L11
|
||||
def bulk_destroy_dismissal_feedback(vulnerability_tuples)
|
||||
uuid_values = vulnerability_tuples.pluck(:uuid).map { |uuid| connection.quote(uuid) }.join(',')
|
||||
|
||||
connection.execute(<<~SQL)
|
||||
DELETE FROM vulnerability_feedback
|
||||
WHERE feedback_type = #{Migratable::Vulnerabilities::Feedback.feedback_types[:dismissal]}
|
||||
AND finding_uuid IN (#{uuid_values})
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
# rubocop:enable Metrics/ClassLength
|
||||
# rubocop:enable Gitlab/BulkInsert
|
||||
|
|
@ -10,7 +10,7 @@ module Gitlab
|
|||
attach_to :active_record
|
||||
|
||||
DB_COUNTERS = %i[count write_count cached_count txn_count].freeze
|
||||
SQL_COMMANDS_WITH_COMMENTS_REGEX = %r{\A(/\*.*\*/\s)?((?!(.*[^\w'"](DELETE|UPDATE|INSERT INTO)[^\w'"])))(WITH.*)?(SELECT)((?!(FOR UPDATE|FOR SHARE)).)*$}i
|
||||
SQL_COMMANDS_WITH_COMMENTS_REGEX = %r{\A(?:/\*.*\*/\s)?(?!.*[^\w'"](?:DELETE|UPDATE|INSERT INTO)[^\w'"])(?:WITH.*)?SELECT(?!.*(?:FOR UPDATE|FOR SHARE))}i
|
||||
|
||||
SQL_DURATION_BUCKET = [0.05, 0.1, 0.25].freeze
|
||||
TRANSACTION_DURATION_BUCKET = [0.1, 0.25, 1].freeze
|
||||
|
|
@ -19,7 +19,7 @@ module Gitlab
|
|||
DB_LOAD_BALANCING_COUNTERS = %i[txn_count count write_count cached_count wal_count wal_cached_count].freeze
|
||||
DB_LOAD_BALANCING_DURATIONS = %i[txn_max_duration_s txn_duration_s duration_s].freeze
|
||||
|
||||
SQL_WAL_LOCATION_REGEX = /(pg_current_wal_insert_lsn\(\)::text|pg_last_wal_replay_lsn\(\)::text)/
|
||||
SQL_WAL_LOCATION_REGEX = /pg_current_wal_insert_lsn\(\)::text|pg_last_wal_replay_lsn\(\)::text/
|
||||
|
||||
# This event is published from ActiveRecordBaseTransactionMetrics and
|
||||
# used to record a database transaction duration when calling
|
||||
|
|
@ -103,7 +103,7 @@ module Gitlab
|
|||
private
|
||||
|
||||
def wal_command?(payload)
|
||||
payload[:sql].match(SQL_WAL_LOCATION_REGEX)
|
||||
payload[:sql].match?(SQL_WAL_LOCATION_REGEX)
|
||||
end
|
||||
|
||||
def increment_db_role_counters(db_role, payload, cached_query:, select_sql_command:)
|
||||
|
|
@ -143,7 +143,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def select_sql_command?(payload)
|
||||
payload[:sql].match(SQL_COMMANDS_WITH_COMMENTS_REGEX)
|
||||
payload[:sql].match?(SQL_COMMANDS_WITH_COMMENTS_REGEX)
|
||||
end
|
||||
|
||||
def increment(counter, db_config_name:, db_role: nil)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Usage examples:
|
||||
#
|
||||
# For a specific file:
|
||||
# ```
|
||||
# bundle exec rake "product_usage_data:format[log/product_usage_data.log]"
|
||||
# ```
|
||||
#
|
||||
# For standard input (pipe data to it):
|
||||
# ```
|
||||
# cat log/product_usage_data.log | bundle exec rake product_usage_data:format
|
||||
# ```
|
||||
#
|
||||
# For continuous monitoring:
|
||||
# ```
|
||||
# tail -f log/product_usage_data.log | bundle exec rake product_usage_data:format
|
||||
# ```
|
||||
#
|
||||
# Motivation:
|
||||
# This task formats GitLab product usage data event logs for better readability.
|
||||
# It pretty-prints the JSON and decodes the base64 encoded "cx" field in the payload.
|
||||
# The task reads from files provided as arguments or from standard input.
|
||||
#
|
||||
namespace :product_usage_data do
|
||||
desc 'Format GitLab product usage data event logs for better readability'
|
||||
task :format, :file_path do |_t, args|
|
||||
# Handle file path argument or use standard input
|
||||
if args[:file_path]
|
||||
input = File.open(args[:file_path], 'r')
|
||||
else
|
||||
puts "No file specified, reading from standard input. Press Ctrl+D when finished."
|
||||
input = $stdin
|
||||
end
|
||||
|
||||
# rubocop:disable Gitlab/Json -- Speed of loading the full environment isn't worthwhile
|
||||
input.each_line do |line|
|
||||
# Parse the outer JSON
|
||||
data = JSON.parse(line.strip)
|
||||
|
||||
# Parse the payload JSON string
|
||||
if data["payload"] && data["payload"].start_with?('{')
|
||||
payload = JSON.parse(data["payload"])
|
||||
|
||||
# Decode the cx field if it exists
|
||||
if payload["cx"]
|
||||
begin
|
||||
decoded_cx = JSON.parse(Base64.decode64(payload["cx"]))
|
||||
payload["cx"] = decoded_cx
|
||||
rescue StandardError
|
||||
# Ignore the error and use the original value
|
||||
end
|
||||
end
|
||||
|
||||
# Replace the original payload with the parsed version
|
||||
data["payload"] = payload
|
||||
end
|
||||
|
||||
# Pretty print the result
|
||||
puts JSON.pretty_generate(data)
|
||||
rescue StandardError => e
|
||||
puts "Error processing line: #{e.message}"
|
||||
puts line
|
||||
end
|
||||
# rubocop:enable Gitlab/Json
|
||||
|
||||
# Close the file if we opened one
|
||||
input.close if args[:file_path]
|
||||
end
|
||||
end
|
||||
|
|
@ -66060,6 +66060,9 @@ msgstr ""
|
|||
msgid "UserMapping|You can assign active users with regular, auditor, or administrator access only."
|
||||
msgstr ""
|
||||
|
||||
msgid "UserMapping|You can assign only enterprise users in the top-level group you're importing to."
|
||||
msgstr ""
|
||||
|
||||
msgid "UserMapping|You can assign only users with linked SAML and SCIM identities. Ensure the user has signed into GitLab through your SAML SSO provider and has an active SCIM identity for this group."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -72557,6 +72560,9 @@ msgstr ""
|
|||
msgid "math|Too many expansions. Consider using multiple math blocks."
|
||||
msgstr ""
|
||||
|
||||
msgid "max_resources_per_workspace must be an empty hash if shared_namespace is specified"
|
||||
msgstr ""
|
||||
|
||||
msgid "member"
|
||||
msgid_plural "members"
|
||||
msgstr[0] ""
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ RSpec.describe 'Database schema',
|
|||
repository_languages: %w[programming_language_id],
|
||||
routes: %w[source_id],
|
||||
security_findings: %w[project_id],
|
||||
sent_notifications: %w[project_id noteable_id recipient_id commit_id in_reply_to_discussion_id],
|
||||
sent_notifications: %w[project_id noteable_id recipient_id commit_id in_reply_to_discussion_id namespace_id], # namespace_id FK will be added after index creation
|
||||
slack_integrations: %w[team_id user_id bot_user_id], # these are external Slack IDs
|
||||
snippets: %w[author_id],
|
||||
spam_logs: %w[user_id],
|
||||
|
|
|
|||
|
|
@ -61,4 +61,29 @@ RSpec.describe 'Session initializer for GitLab' do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'cookie salt settings' do
|
||||
context 'with default settings' do
|
||||
it 'sets signed_cookie_salt and authenticated_encrypted_cookie_salt to default value' do
|
||||
load_session_store
|
||||
expect(subject.action_dispatch.signed_cookie_salt).to eql('signed cookie')
|
||||
expect(subject.action_dispatch.authenticated_encrypted_cookie_salt).to eql('authenticated encrypted cookie')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with custom settings' do
|
||||
before do
|
||||
allow(Settings).to receive(:[]).with('gitlab').and_return({
|
||||
'signed_cookie_salt' => 'custom signed salt',
|
||||
'authenticated_encrypted_cookie_salt' => 'custom encrypted salt'
|
||||
})
|
||||
end
|
||||
|
||||
it 'sets signed_cookie_salt and authenticated_encrypted_cookie_salt to custom values' do
|
||||
load_session_store
|
||||
expect(subject.action_dispatch.signed_cookie_salt).to eql('custom signed salt')
|
||||
expect(subject.action_dispatch.authenticated_encrypted_cookie_salt).to eql('custom encrypted salt')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,390 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::ResolveVulnerabilitiesForRemovedAnalyzers,
|
||||
schema: 20241015185528,
|
||||
feature_category: :static_application_security_testing do
|
||||
before(:all) do
|
||||
# This migration will not work if a sec database is configured. It should be finalized and removed prior to
|
||||
# sec db rollout.
|
||||
# Consult https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171707 for more info.
|
||||
skip_if_multiple_databases_are_setup(:sec)
|
||||
end
|
||||
|
||||
let(:namespaces) { table(:namespaces) }
|
||||
let(:projects) { table(:projects) }
|
||||
let(:scanners) { table(:vulnerability_scanners) }
|
||||
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
|
||||
let(:vulnerabilities) { table(:vulnerabilities) }
|
||||
let(:vulnerability_reads) { table(:vulnerability_reads) }
|
||||
let(:vulnerability_feedback) { table(:vulnerability_feedback) }
|
||||
let(:vulnerability_state_transitions) { table(:vulnerability_state_transitions) }
|
||||
let(:vulnerability_statistics) { table(:vulnerability_statistics) }
|
||||
let(:notes) { table(:notes) }
|
||||
let(:system_note_metadata) { table(:system_note_metadata) }
|
||||
|
||||
let(:organization) { table(:organizations).create!(name: 'organization', path: 'organization') }
|
||||
let(:namespace) { namespaces.create!(name: 'user', path: 'user', organization_id: organization.id) }
|
||||
let(:project) do
|
||||
projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id, organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:vulnerability_resolved_by_user) { Users::Internal.security_bot }
|
||||
let(:vulnerability_created_by_user) do
|
||||
table(:users).create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10)
|
||||
end
|
||||
|
||||
let(:mitigating_control_dismissal_reason) { 2 }
|
||||
let(:detected_state) { described_class::Migratable::Enums::Vulnerability.vulnerability_states[:detected] }
|
||||
let(:resolved_state) { described_class::Migratable::Enums::Vulnerability.vulnerability_states[:resolved] }
|
||||
|
||||
let(:sub_batch_size) { vulnerability_reads.count }
|
||||
let(:num_vulnerabilities) { vulnerabilities_to_resolve.length + vulnerabilities_not_to_resolve.length }
|
||||
|
||||
let(:removed_scanners) do
|
||||
%w[
|
||||
eslint
|
||||
gosec
|
||||
bandit
|
||||
security_code_scan
|
||||
brakeman
|
||||
flawfinder
|
||||
mobsf
|
||||
njsscan
|
||||
nodejs-scan
|
||||
nodejs_scan
|
||||
phpcs_security_audit
|
||||
]
|
||||
end
|
||||
|
||||
let(:active_scanners) do
|
||||
%w[
|
||||
semgrep
|
||||
gemnasium
|
||||
trivy
|
||||
gemnasium-maven
|
||||
]
|
||||
end
|
||||
|
||||
shared_context 'with vulnerability data' do
|
||||
let!(:vulnerabilities_to_resolve) do
|
||||
removed_scanners.map do |external_id|
|
||||
create_vulnerability(project_id: project.id, external_id: external_id)
|
||||
end
|
||||
end
|
||||
|
||||
let!(:vulnerabilities_not_to_resolve) do
|
||||
vulns = active_scanners.map do |external_id|
|
||||
create_vulnerability(project_id: project.id, external_id: external_id, severity: :medium)
|
||||
end
|
||||
|
||||
# append a removed scanner with a dismissed state, so it won't be processed
|
||||
vulns + [create_vulnerability(project_id: project.id, external_id: removed_scanners.first,
|
||||
severity: :medium, state: :dismissed)]
|
||||
end
|
||||
end
|
||||
|
||||
# use a method instead of a subject to avoid rspec memoization
|
||||
def perform_migration
|
||||
described_class.new(
|
||||
start_id: vulnerability_reads.minimum(:id),
|
||||
end_id: vulnerability_reads.maximum(:id),
|
||||
batch_table: :vulnerability_reads,
|
||||
batch_column: :id,
|
||||
sub_batch_size: sub_batch_size,
|
||||
pause_ms: 0,
|
||||
connection: ActiveRecord::Base.connection
|
||||
).perform
|
||||
end
|
||||
|
||||
describe "#perform", feature_category: :static_application_security_testing do
|
||||
include_context 'with vulnerability data'
|
||||
|
||||
context 'for vulnerability resolution' do
|
||||
it 'resolves vulnerabilities and vulnerability_reads for removed scanners' do
|
||||
count = vulnerabilities_to_resolve.length
|
||||
expect { perform_migration }.to change {
|
||||
vulnerabilities_to_resolve.map { |v| v[:vulnerability].reload.state }
|
||||
}
|
||||
.from([detected_state] * count).to([resolved_state] * count)
|
||||
.and change {
|
||||
vulnerabilities_to_resolve.map { |v| v[:vulnerability_read].reload.state }
|
||||
}
|
||||
.from([detected_state] * count).to([resolved_state] * count)
|
||||
|
||||
common_expected_attributes = {
|
||||
state: resolved_state,
|
||||
resolved_by_id: vulnerability_resolved_by_user.id,
|
||||
resolved_at: be_a_kind_of(Time)
|
||||
}
|
||||
|
||||
expected_vulnerabilities = vulnerabilities_to_resolve.map do
|
||||
have_attributes(**common_expected_attributes)
|
||||
end
|
||||
|
||||
expect(vulnerabilities.where(id: vulnerabilities_to_resolve.map { |v| v[:vulnerability].id }))
|
||||
.to contain_exactly(*expected_vulnerabilities)
|
||||
end
|
||||
|
||||
it 'does not resolve vulnerabilities or vulnerability_reads for active scanners' do
|
||||
expect { perform_migration }.to not_change {
|
||||
vulnerabilities_not_to_resolve.map { |v| v[:vulnerability].reload.state }
|
||||
}
|
||||
.and not_change { vulnerabilities_not_to_resolve.map { |v| v[:vulnerability_read].reload.state } }
|
||||
end
|
||||
|
||||
context 'when the sub_batch size is 1' do
|
||||
let(:sub_batch_size) { 1 }
|
||||
|
||||
it 'does not raise an exception' do
|
||||
expect { perform_migration }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for vulnerability state transitions' do
|
||||
it 'creates vulnerability state transitions for the resolved vulnerabilities' do
|
||||
expect { perform_migration }.to change { vulnerability_state_transitions.count }
|
||||
.from(0).to(vulnerabilities_to_resolve.count)
|
||||
|
||||
common_expected_attributes = {
|
||||
comment: described_class::RESOLVED_VULNERABILITY_COMMENT,
|
||||
from_state: detected_state,
|
||||
to_state: resolved_state,
|
||||
author_id: vulnerability_resolved_by_user.id,
|
||||
project_id: project.id,
|
||||
dismissal_reason: be_nil,
|
||||
created_at: be_a_kind_of(Time),
|
||||
updated_at: be_a_kind_of(Time)
|
||||
}
|
||||
|
||||
expected_state_transitions = vulnerabilities_to_resolve.map do |vulnerability|
|
||||
have_attributes(**common_expected_attributes, vulnerability_id: vulnerability[:vulnerability].id)
|
||||
end
|
||||
|
||||
expect(vulnerability_state_transitions.all).to contain_exactly(*expected_state_transitions)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for system notes' do
|
||||
it 'creates system notes for the resolved vulnerabilities' do
|
||||
expect { perform_migration }.to change { notes.count }
|
||||
.from(0).to(vulnerabilities_to_resolve.count)
|
||||
|
||||
common_expected_attributes = {
|
||||
note: described_class::RESOLVED_VULNERABILITY_COMMENT,
|
||||
noteable_type: 'Vulnerability',
|
||||
author_id: vulnerability_resolved_by_user.id,
|
||||
created_at: be_a_kind_of(Time),
|
||||
updated_at: be_a_kind_of(Time),
|
||||
project_id: project.id,
|
||||
system: be_truthy,
|
||||
namespace_id: namespace.id,
|
||||
discussion_id: /[a-f0-9]{40}/
|
||||
}
|
||||
|
||||
expected_notes = vulnerabilities_to_resolve.map do |vulnerability|
|
||||
have_attributes(**common_expected_attributes, noteable_id: vulnerability[:vulnerability].id)
|
||||
end
|
||||
|
||||
expect(notes.all).to contain_exactly(*expected_notes)
|
||||
end
|
||||
|
||||
it 'creates system note metadata for the resolved vulnerabilities' do
|
||||
expect { perform_migration }.to change { system_note_metadata.count }
|
||||
.from(0).to(vulnerabilities_to_resolve.count)
|
||||
|
||||
common_expected_attributes = {
|
||||
action: 'vulnerability_resolved',
|
||||
created_at: be_a_kind_of(Time),
|
||||
updated_at: be_a_kind_of(Time)
|
||||
}
|
||||
|
||||
expected_system_note_metadata = notes.all.map do |note|
|
||||
have_attributes(**common_expected_attributes, note_id: note.id)
|
||||
end
|
||||
|
||||
expect(system_note_metadata.all).to contain_exactly(*expected_system_note_metadata)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for vulnerability_read dismissal_reason' do
|
||||
it 'nullifies the dismissal_reason of vulnerability_reads for removed scanners' do
|
||||
count = vulnerabilities_to_resolve.length
|
||||
expect { perform_migration }.to change {
|
||||
vulnerabilities_to_resolve.map { |v| v[:vulnerability_read].reload.dismissal_reason }
|
||||
}
|
||||
.from([mitigating_control_dismissal_reason] * count).to([nil] * count)
|
||||
end
|
||||
|
||||
it 'does not alter the dismissal_reason of vulnerability_reads for active scanners' do
|
||||
count = vulnerabilities_not_to_resolve.length
|
||||
expect { perform_migration }.to not_change {
|
||||
vulnerabilities_not_to_resolve.map { |v| v[:vulnerability_read].reload.dismissal_reason }
|
||||
}
|
||||
.from([mitigating_control_dismissal_reason] * count)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for vulnerability_statistics' do
|
||||
context 'when there are no vulnerability_statistics records' do
|
||||
it 'does not create a vulnerability_statistics record' do
|
||||
expect { perform_migration }.not_to change { vulnerability_statistics.count }.from(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are vulnerability_statistics records' do
|
||||
before do
|
||||
vulnerability_statistics.create!(
|
||||
project_id: project.id,
|
||||
critical: vulnerabilities_to_resolve.length,
|
||||
medium: vulnerabilities_not_to_resolve.length,
|
||||
total: num_vulnerabilities,
|
||||
letter_grade: described_class::Migratable::Vulnerabilities::Statistic.letter_grades[:f]
|
||||
)
|
||||
end
|
||||
|
||||
it 'subtracts the number of resolved vulnerabilities from the total number of vulnerabilities' do
|
||||
expect { perform_migration }.to change { vulnerability_statistics.first.reload.total }
|
||||
.from(num_vulnerabilities).to(vulnerabilities_not_to_resolve.length)
|
||||
end
|
||||
|
||||
it 'subtracts the num of resolved vulnerabilities from the num of vulnerabilities for the severity level' do
|
||||
expect { perform_migration }.to change { vulnerability_statistics.first.reload.critical }
|
||||
.from(vulnerabilities_to_resolve.length).to(0)
|
||||
end
|
||||
|
||||
it 'adjusts the letter_grade to reflect the current vulnerabilities' do
|
||||
expect { perform_migration }.to change { vulnerability_statistics.first.reload.letter_grade }
|
||||
.from(described_class::Migratable::Vulnerabilities::Statistic.letter_grades[:f])
|
||||
.to(described_class::Migratable::Vulnerabilities::Statistic.letter_grades[:c])
|
||||
end
|
||||
|
||||
context 'and the vulnerabilities to remove all belong to the same project' do
|
||||
it 'updates the vulnerability_statistics table in a single operation' do
|
||||
# warm the cache
|
||||
perform_migration
|
||||
|
||||
removed_scanners.take(1).map do |external_id|
|
||||
create_vulnerability(project_id: project.id, external_id: external_id)
|
||||
end
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new { perform_migration }
|
||||
|
||||
removed_scanners.map do |external_id|
|
||||
create_vulnerability(project_id: project.id, external_id: external_id)
|
||||
end
|
||||
|
||||
expect(ActiveRecord::QueryRecorder.new { perform_migration }.count).to eq(control.count)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and the vulnerabilities to remove all belong to different projects' do
|
||||
it 'updates the vulnerability_statistics table in a separate operation for each project' do
|
||||
# warm the cache
|
||||
perform_migration
|
||||
|
||||
removed_scanners.map do |external_id|
|
||||
create_vulnerability(project_id: project.id, external_id: external_id)
|
||||
end
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new { perform_migration }
|
||||
|
||||
removed_scanners.map do |external_id|
|
||||
new_namespace = namespaces.create!(name: 'user', path: 'user', organization_id: organization.id)
|
||||
|
||||
new_project = projects.create!(
|
||||
namespace_id: new_namespace.id,
|
||||
project_namespace_id: new_namespace.id,
|
||||
organization_id: organization.id
|
||||
)
|
||||
create_vulnerability(project_id: new_project.id, external_id: external_id)
|
||||
end
|
||||
|
||||
expect(ActiveRecord::QueryRecorder.new { perform_migration }.count)
|
||||
.to eq(control.count + removed_scanners.count - 1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for vulnerability_feedback' do
|
||||
it 'deletes dismissed vulnerability_feedback for removed scanners' do
|
||||
expect { perform_migration }.to change { vulnerability_feedback.count }
|
||||
.from(vulnerabilities_to_resolve.count + vulnerabilities_not_to_resolve.count)
|
||||
.to(vulnerabilities_not_to_resolve.count)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_vulnerability(project_id:, external_id:, severity: :critical, state: :detected)
|
||||
scanner = scanners.where(project_id: project_id, external_id: external_id,
|
||||
name: "Scanner #{external_id}").first_or_create!
|
||||
severity_level = described_class::Migratable::Enums::Vulnerability.severity_levels[severity]
|
||||
vulnerability_state = described_class::Migratable::Enums::Vulnerability.vulnerability_states[state]
|
||||
|
||||
uuid = SecureRandom.uuid
|
||||
project_fingerprint = SecureRandom.hex(20)
|
||||
|
||||
identifier = table(:vulnerability_identifiers).create!(
|
||||
project_id: project_id,
|
||||
external_id: "CVE-2018-1234",
|
||||
external_type: "CVE",
|
||||
name: "CVE-2018-1234",
|
||||
fingerprint: SecureRandom.hex(20)
|
||||
)
|
||||
|
||||
finding = table(:vulnerability_occurrences).create!(
|
||||
project_id: project_id,
|
||||
scanner_id: scanner.id,
|
||||
severity: severity_level,
|
||||
report_type: 99, # generic
|
||||
primary_identifier_id: identifier.id,
|
||||
project_fingerprint: project_fingerprint,
|
||||
location_fingerprint: SecureRandom.hex(20),
|
||||
uuid: uuid,
|
||||
name: "CVE-2018-1234",
|
||||
raw_metadata: "{}",
|
||||
metadata_version: "test:1.0"
|
||||
)
|
||||
|
||||
vulnerability_feedback.create!(
|
||||
feedback_type: described_class::Migratable::Vulnerabilities::Feedback.feedback_types[:dismissal],
|
||||
project_id: project_id,
|
||||
author_id: vulnerability_created_by_user.id,
|
||||
project_fingerprint: project_fingerprint,
|
||||
category: 0, # sast
|
||||
finding_uuid: uuid
|
||||
)
|
||||
|
||||
vulnerability = vulnerabilities.create!(
|
||||
project_id: project_id,
|
||||
author_id: vulnerability_created_by_user.id,
|
||||
title: 'Vulnerability 1',
|
||||
severity: severity_level,
|
||||
confidence: 1,
|
||||
report_type: 1,
|
||||
state: vulnerability_state,
|
||||
finding_id: finding.id
|
||||
)
|
||||
|
||||
vulnerability_read = vulnerability_reads.create!(
|
||||
dismissal_reason: mitigating_control_dismissal_reason,
|
||||
vulnerability_id: vulnerability.id,
|
||||
namespace_id: project.namespace_id,
|
||||
project_id: project_id,
|
||||
scanner_id: scanner.id,
|
||||
report_type: 1,
|
||||
severity: severity_level,
|
||||
state: vulnerability_state,
|
||||
uuid: uuid,
|
||||
archived: false,
|
||||
traversal_ids: []
|
||||
)
|
||||
|
||||
{ vulnerability: vulnerability, vulnerability_read: vulnerability_read }
|
||||
end
|
||||
end
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueResolveVulnerabilitiesForRemovedAnalyzers, feature_category: :static_application_security_testing do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'does not schedule a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe RequeueResolveVulnerabilitiesForRemovedAnalyzers, feature_category: :static_application_security_testing do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :vulnerability_reads,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue