Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-11 12:09:29 +00:00
parent 6f5be4b446
commit 97feef1f73
91 changed files with 1164 additions and 192 deletions

View File

@ -425,7 +425,7 @@ group :development, :test do
gem 'awesome_print', require: false # rubocop:todo Gemfile/MissingFeatureCategory
gem 'database_cleaner-active_record', '~> 2.1.0', feature_category: :database
gem 'factory_bot_rails', '~> 6.2.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'factory_bot_rails', '~> 6.4.3' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'rspec-rails', '~> 6.1.0', feature_category: :shared
# Prevent occasions where minitest is not bundled in packaged versions of ruby (see #3826)

View File

@ -155,8 +155,8 @@
{"name":"expgen","version":"0.1.1","platform":"ruby","checksum":"4e6a0f65b210a201d6045debb3e62a24e33251a49f81a11b067d303a60d3a239"},
{"name":"expression_parser","version":"0.9.0","platform":"ruby","checksum":"2b56db3cffc48c3337f4f29f5bc2374c86e7ba29acb40269c74bb55af9f868a4"},
{"name":"extended-markdown-filter","version":"0.7.0","platform":"ruby","checksum":"c8eeef7409fbae18c6b407cd3e4eeb5d25c35cb08fe1ac06f375df3db2d4f138"},
{"name":"factory_bot","version":"6.2.0","platform":"ruby","checksum":"d181902cdda531cf6cef036001b3a700a7b5e04bac63976864530120b2ac7d13"},
{"name":"factory_bot_rails","version":"6.2.0","platform":"ruby","checksum":"278b969666b078e76e1c972c501da9b1fac15e5b0ff328cc7ce400366164d0a1"},
{"name":"factory_bot","version":"6.4.5","platform":"ruby","checksum":"d71dd29bc95f0ec2bf27e3dd9b1b4d557bd534caca744663cb7db4bacf3198be"},
{"name":"factory_bot_rails","version":"6.4.3","platform":"ruby","checksum":"ea73ceac1c0ff3dc11fff390bf2ea8a2604066525ed8ecd3b3bc2c267226dcc8"},
{"name":"faraday","version":"1.10.0","platform":"ruby","checksum":"a42158d5c1932c16fd483c512f7e0797b4916096bcf0eb5fb927a1c915a7ea02"},
{"name":"faraday-em_http","version":"1.0.0","platform":"ruby","checksum":"7a3d4c7079789121054f57e08cd4ef7e40ad1549b63101f38c7093a9d6c59689"},
{"name":"faraday-em_synchrony","version":"1.0.0","platform":"ruby","checksum":"460dad1c30cc692d6e77d4c391ccadb4eca4854b315632cd7e560f74275cf9ed"},

View File

@ -553,10 +553,10 @@ GEM
expression_parser (0.9.0)
extended-markdown-filter (0.7.0)
html-pipeline (~> 2.9)
factory_bot (6.2.0)
factory_bot (6.4.5)
activesupport (>= 5.0.0)
factory_bot_rails (6.2.0)
factory_bot (~> 6.2.0)
factory_bot_rails (6.4.3)
factory_bot (~> 6.4)
railties (>= 5.0.0)
faraday (1.10.0)
faraday-em_http (~> 1.0)
@ -1870,7 +1870,7 @@ DEPENDENCIES
email_reply_trimmer (~> 0.1)
email_spec (~> 2.2.0)
error_tracking_open_api!
factory_bot_rails (~> 6.2.0)
factory_bot_rails (~> 6.4.3)
faraday (~> 1.0)
faraday_middleware-aws-sigv4 (~> 0.3.0)
fast_blank (~> 1.0.1)

View File

@ -235,7 +235,9 @@ export default {
:work-item-id="workItemId"
:work-item-state="workItemState"
:work-item-type="workItemType"
:has-comment="!!commentText.length"
can-update
@submit-comment="$emit('submitForm', { commentText, isNoteInternal })"
@error="$emit('error', $event)"
/>
<gl-button

View File

@ -38,6 +38,11 @@ export default {
required: false,
default: false,
},
hasComment: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
@ -49,9 +54,15 @@ export default {
return this.workItemState === STATE_OPEN;
},
toggleWorkItemStateText() {
const baseText = this.isWorkItemOpen
let baseText = this.isWorkItemOpen
? __('Close %{workItemType}')
: __('Reopen %{workItemType}');
if (this.hasComment) {
baseText = this.isWorkItemOpen
? __('Comment & close %{workItemType}')
: __('Comment & reopen %{workItemType}');
}
return sprintfWorkItem(baseText, this.workItemType);
},
tracking() {
@ -96,6 +107,10 @@ export default {
Sentry.captureException(error);
}
if (this.hasComment) {
this.$emit('submit-comment');
}
this.updateInProgress = false;
},
},

View File

@ -44,7 +44,7 @@ module Admin
end
def variable_params_attributes
%i[id variable_type key secret_value protected masked raw _destroy]
%i[id variable_type key description secret_value protected masked raw _destroy]
end
end
end

View File

@ -50,7 +50,7 @@ module Groups
end
def variable_params_attributes
%i[id variable_type key secret_value protected masked raw _destroy]
%i[id variable_type key description secret_value protected masked raw _destroy]
end
def authorize_admin_build!

View File

@ -47,6 +47,6 @@ class Projects::VariablesController < Projects::ApplicationController
end
def variable_params_attributes
%i[id variable_type key secret_value protected masked raw environment_scope _destroy]
%i[id variable_type key description secret_value protected masked raw environment_scope _destroy]
end
end

View File

@ -15,6 +15,10 @@ module Types
null: true,
description: 'Name of the variable.'
field :description, GraphQL::Types::String,
null: true,
description: 'Description of the variable.'
field :raw, GraphQL::Types::Boolean,
null: true,
description: 'Indicates whether the variable is raw.'

View File

@ -13,6 +13,10 @@ module Types
null: false,
description: 'ID of the variable.'
field :description, GraphQL::Types::String,
null: true,
description: 'Description of the variable.'
field :environment_scope, GraphQL::Types::String,
null: true,
deprecated: {

View File

@ -150,9 +150,9 @@ class BulkImports::Entity < ApplicationRecord
File.join(base_resource_path, 'export_relations')
end
def export_relations_url_path(batched: false)
if batched && bulk_import.supports_batched_export?
Gitlab::Utils.add_url_parameters(export_relations_url_path_base, batched: batched)
def export_relations_url_path
if bulk_import.supports_batched_export?
Gitlab::Utils.add_url_parameters(export_relations_url_path_base, batched: true)
else
export_relations_url_path_base
end

View File

@ -13,6 +13,7 @@ module Ci
alias_attribute :secret_value, :value
validates :description, length: { maximum: 255 }, allow_blank: true
validates :key, uniqueness: {
message: -> (object, data) { _("(%{value}) has already been taken") }
}

View File

@ -4,6 +4,7 @@
module Ci
class PipelineArtifact < Ci::ApplicationRecord
include Ci::Partitionable
include UpdateProjectStatistics
include Artifactable
include FileStoreMounter
@ -31,6 +32,8 @@ module Ci
validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT }
validates :file_type, presence: true
partitionable scope: :pipeline
mount_file_store_uploader Ci::PipelineArtifactUploader
update_project_statistics project_statistics_name: :pipeline_artifacts_size

View File

@ -2,6 +2,9 @@
module Ci
class PipelineMetadata < Ci::ApplicationRecord
include Ci::Partitionable
include Importable
self.primary_key = :pipeline_id
enum auto_cancel_on_new_commit: {
@ -21,5 +24,7 @@ module Ci
validates :pipeline, presence: true
validates :project, presence: true
validates :name, length: { minimum: 1, maximum: 255 }, allow_nil: true
partitionable scope: :pipeline
end
end

View File

@ -21,7 +21,9 @@ module Ci
Ci::PendingBuild
Ci::RunningBuild
Ci::RunnerManagerBuild
Ci::PipelineArtifact
Ci::PipelineChatData
Ci::PipelineMetadata
Ci::PipelineVariable
Ci::Sources::Pipeline
Ci::Stage

View File

@ -5,6 +5,7 @@ module Ci
expose :id
expose :key
expose :value
expose :description
expose :variable_type
expose :protected?, as: :protected

View File

@ -75,7 +75,7 @@ module BulkImports
::GlobalID.parse(response.dig(*entity_query.data_path, 'id')).model_id
rescue StandardError => e
log_exception(e, message: 'Failed to fetch source entity id')
log_warning(e, message: 'Failed to fetch source entity id')
nil
end
@ -92,14 +92,21 @@ module BulkImports
@logger ||= Logger.build.with_entity(entity)
end
def log_exception(exception, payload)
def build_payload(exception, payload)
Gitlab::ExceptionLogFormatter.format!(exception, payload)
structured_payload(payload)
end
logger.error(structured_payload(payload))
def log_warning(exception, payload)
logger.warn(build_payload(exception, payload))
end
def log_error(exception, payload)
logger.error(build_payload(exception, payload))
end
def log_and_fail(exception)
log_exception(exception, message: "Request to export #{entity.source_type} failed")
log_error(exception, message: "Request to export #{entity.source_type} failed")
BulkImports::Failure.create(failure_attributes(exception))
@ -107,7 +114,7 @@ module BulkImports
end
def export_url
entity.export_relations_url_path(batched: Feature.enabled?(:bulk_imports_batched_import_export))
entity.export_relations_url_path
end
end
end

View File

@ -103,14 +103,14 @@ module ClickHouse
def delete_records_from_click_house(ids)
query = ClickHouse::Client::Query.new(
raw_query: "DELETE FROM events WHERE author_id IN ({author_ids:Array(UInt64)})",
raw_query: "ALTER TABLE events DELETE WHERE author_id IN ({author_ids:Array(UInt64)})",
placeholders: { author_ids: ids.to_json }
)
connection.execute(query)
query = ClickHouse::Client::Query.new(
raw_query: "DELETE FROM event_authors WHERE author_id IN ({author_ids:Array(UInt64)})",
raw_query: "ALTER TABLE event_authors DELETE WHERE author_id IN ({author_ids:Array(UInt64)})",
placeholders: { author_ids: ids.to_json }
)

View File

@ -53,9 +53,7 @@ module Gitlab
importer_class.new(object, project, client).execute
if increment_object_counter?(object)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :imported)
end
increment_object_counter(object, project) if increment_object_counter?(object)
info(project.id, message: 'importer finished')
rescue ActiveRecord::RecordInvalid, NotRetriableError, NoMethodError => e
@ -73,6 +71,10 @@ module Gitlab
true
end
def increment_object_counter(_object, project)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :imported)
end
def object_type
raise NotImplementedError
end

View File

@ -16,6 +16,16 @@ module Gitlab
def object_type
:issue_event
end
def increment_object_counter(object, project)
counter_type = importer_class::EVENT_COUNTER_MAP[object[:event]] if import_settings.extended_events?
counter_type ||= object_type
Gitlab::GithubImport::ObjectCounter.increment(project, counter_type, :imported)
end
def import_settings
@import_settings ||= Gitlab::GithubImport::Settings.new(project)
end
end
end
end

View File

@ -0,0 +1,9 @@
---
name: container_scanning_continuous_vulnerability_scans
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435435
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141023
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/437162
milestone: '16.8'
group: group::composition analysis
type: beta
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: bulk_imports_batched_import_export
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124434
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/406559
milestone: '16.2'
type: development
group: group::import and integrate
default_enabled: true

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillPartitionIdCiPipelineArtifact
description: Fixes incorrect values for ci_pipeline_artifacts being in the wrong partition
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141345
milestone: '16.8'
queued_migration_version: 20240109090354
finalize_after: '2024-01-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillPartitionIdCiPipelineMetadata
description: Fixes incorrect values for ci_pipeline_metadata being in the wrong partition
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141078
milestone: '16.8'
queued_migration_version: 20240108082419
finalize_after: '2024-01-15'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class AddDescriptionToCiInstanceVariables < Gitlab::Database::Migration[2.2]
milestone '16.8'
enable_lock_retries!
# rubocop:disable Migration/AddLimitToTextColumns -- text limit is added in 20231222072237_add_text_limit_to_ci_instance_variables_description.rb migration
def change
add_column(:ci_instance_variables, :description, :text)
end
# rubocop:enable Migration/AddLimitToTextColumns
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
class AddPartitionIdToPipelineMetadata < Gitlab::Database::Migration[2.2]
milestone '16.8'
enable_lock_retries!
def change
add_column(:ci_pipeline_metadata, :partition_id, :bigint, default: 100, null: false)
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class MigrateZoektIndexedNamespacesToZoektEnabledNamespaces < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
milestone '16.8'
INSERTED_COLUMNS = %w[
root_namespace_id
search
created_at
updated_at
].join(',')
def up
connection.execute(<<~SQL)
INSERT INTO zoekt_enabled_namespaces (#{INSERTED_COLUMNS})
(SELECT DISTINCT ON (namespace_id) namespace_id, search, created_at, updated_at
FROM zoekt_indexed_namespaces ORDER BY namespace_id, search)
SQL
end
def down
connection.execute(<<~SQL)
DELETE FROM zoekt_enabled_namespaces
SQL
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
class MigrateZoektIndexedNamespacesToZoektIndices < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
milestone '16.8'
INSERTED_COLUMNS = %w[
zoekt_enabled_namespace_id
namespace_id
zoekt_node_id
state
created_at
updated_at
].join(',')
STATE_READY = 10
def up
connection.execute(<<~SQL)
WITH indexed_namespaces AS (
(SELECT DISTINCT ON (namespace_id) namespace_id, search, zoekt_node_id
FROM zoekt_indexed_namespaces ORDER BY namespace_id, search)
)
INSERT INTO zoekt_indices (#{INSERTED_COLUMNS})
SELECT
zoekt_enabled_namespaces.id,
indexed_namespaces.namespace_id,
indexed_namespaces.zoekt_node_id,
#{STATE_READY},
NOW(),
NOW()
FROM zoekt_enabled_namespaces
JOIN indexed_namespaces ON indexed_namespaces.namespace_id = zoekt_enabled_namespaces.root_namespace_id
SQL
end
def down
connection.execute(<<~SQL)
DELETE FROM zoekt_indices
SQL
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
class AddPartitionIdToPipelineArtifact < Gitlab::Database::Migration[2.2]
milestone '16.8'
enable_lock_retries!
def change
add_column(:ci_pipeline_artifacts, :partition_id, :bigint, default: 100, null: false)
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddTextLimitToCiInstanceVariablesDescription < Gitlab::Database::Migration[2.2]
milestone '16.8'
disable_ddl_transaction!
TABLE_NAME = :ci_instance_variables
COLUMN_NAME = :description
def up
add_text_limit(TABLE_NAME, COLUMN_NAME, 255)
end
def down
remove_text_limit(TABLE_NAME, COLUMN_NAME)
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class QueueBackfillPartitionIdCiPipelineMetadata < Gitlab::Database::Migration[2.2]
milestone '16.8'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = 'BackfillPartitionIdCiPipelineMetadata'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 250
def up
queue_batched_background_migration(
MIGRATION,
:ci_pipeline_metadata,
:pipeline_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :ci_pipeline_metadata, :pipeline_id, [])
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class QueueBackfillPartitionIdCiPipelineArtifact < Gitlab::Database::Migration[2.2]
milestone '16.8'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = 'BackfillPartitionIdCiPipelineArtifact'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:ci_pipeline_artifacts,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :ci_pipeline_artifacts, :id, [])
end
end

View File

@ -0,0 +1 @@
187b2c6e79e0f9e4636923f646ef9b5b5c609cb1797dac6265a1aa12e2a46f31

View File

@ -0,0 +1 @@
c92fb56354f4471103cb34f0a49294961a221dde5ecea72b0f132fdd1c813095

View File

@ -0,0 +1 @@
e6829ae7a671ff2c13c78721c9304bcbaa7738aea01d8e83aeac79d0da2d2a47

View File

@ -0,0 +1 @@
92f7eddc8ba255987adc42e1140b47256c62235762f431ba8047561f120004dc

View File

@ -0,0 +1 @@
eb3a7d14833470fe74420874ed883428d54a4d1a24a7f131f2049bf2f7efa929

View File

@ -0,0 +1 @@
273a951feb99c6064529c3a4403210ec0ff4a0be41867b7b90809b89fd767f5d

View File

@ -0,0 +1 @@
b264b391bc50ede33db98df9e1e54a074c850e2e4af0c5bd380ce1f32646c95b

View File

@ -0,0 +1 @@
7420ebb45a2ef0f82527b7427ec741bf8807efb966c317a9fedfda5c04fb4947

View File

@ -14344,9 +14344,11 @@ CREATE TABLE ci_instance_variables (
encrypted_value text,
encrypted_value_iv text,
raw boolean DEFAULT false NOT NULL,
description text,
CONSTRAINT check_07a45a5bcb CHECK ((char_length(encrypted_value_iv) <= 255)),
CONSTRAINT check_5aede12208 CHECK ((char_length(key) <= 255)),
CONSTRAINT check_956afd70f1 CHECK ((char_length(encrypted_value) <= 13579))
CONSTRAINT check_956afd70f1 CHECK ((char_length(encrypted_value) <= 13579)),
CONSTRAINT check_a0a9762afa CHECK ((char_length(description) <= 255))
);
CREATE SEQUENCE ci_instance_variables_id_seq
@ -14554,6 +14556,7 @@ CREATE TABLE ci_pipeline_artifacts (
verification_checksum bytea,
verification_failure text,
locked smallint DEFAULT 2,
partition_id bigint DEFAULT 100 NOT NULL,
CONSTRAINT check_191b5850ec CHECK ((char_length(file) <= 255)),
CONSTRAINT check_abeeb71caf CHECK ((file IS NOT NULL)),
CONSTRAINT ci_pipeline_artifacts_verification_failure_text_limit CHECK ((char_length(verification_failure) <= 255))
@ -14608,6 +14611,7 @@ CREATE TABLE ci_pipeline_metadata (
name text,
auto_cancel_on_new_commit smallint DEFAULT 0 NOT NULL,
auto_cancel_on_job_failure smallint DEFAULT 0 NOT NULL,
partition_id bigint DEFAULT 100 NOT NULL,
CONSTRAINT check_9d3665463c CHECK ((char_length(name) <= 255))
);

View File

@ -1889,6 +1889,15 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -2027,6 +2036,14 @@ On each node perform the following:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. If you're using [Gitaly with TLS support](#gitaly-cluster-tls-support), make sure the

View File

@ -1897,6 +1897,15 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -2037,6 +2046,14 @@ On each node perform the following:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. If you're using [Gitaly with TLS support](#gitaly-cluster-tls-support), make sure the

View File

@ -694,6 +694,14 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -822,6 +830,15 @@ On each node perform the following:
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
## Uncomment and edit the following options if you have set up NFS
##
## Prevent GitLab from starting if NFS data mounts are not available

View File

@ -1812,6 +1812,15 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -1976,6 +1985,14 @@ On each node perform the following:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. If you're using [Gitaly with TLS support](#gitaly-cluster-tls-support), make sure the

View File

@ -1904,6 +1904,15 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -2050,7 +2059,16 @@ On each node perform the following:
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. If you're using [Gitaly with TLS support](#gitaly-cluster-tls-support), make sure the

View File

@ -1803,6 +1803,15 @@ Updates to example must be made at:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the first Linux package node you configured and add or replace
@ -1960,7 +1969,15 @@ On each node perform the following:
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
gitlab_rails['backup_upload_remote_directory'] = "<gcp-backups-state-bucket-name>"
gitlab_rails['ci_secure_files_object_store_enabled'] = true
gitlab_rails['ci_secure_files_object_store_remote_directory'] = "gcp-ci_secure_files-bucket-name"
gitlab_rails['ci_secure_files_object_store_connection'] = {
'provider' => 'Google',
'google_project' => '<gcp-project-name>',
'google_json_key_location' => '<path-to-gcp-service-account-key>'
}
## Uncomment and edit the following options if you have set up NFS
##
## Prevent GitLab from starting if NFS data mounts are not available

View File

@ -15750,6 +15750,7 @@ CI/CD variables for a GitLab instance.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="ciinstancevariabledescription"></a>`description` | [`String`](#string) | Description of the variable. |
| <a id="ciinstancevariableenvironmentscope"></a>`environmentScope` **{warning-solid}** | [`String`](#string) | **Deprecated** in 15.3. No longer used, only available for GroupVariableType and ProjectVariableType. |
| <a id="ciinstancevariableid"></a>`id` | [`ID!`](#id) | ID of the variable. |
| <a id="ciinstancevariablekey"></a>`key` | [`String`](#string) | Name of the variable. |
@ -16853,6 +16854,7 @@ The currently authenticated GitLab user.
| <a id="currentusercommitemail"></a>`commitEmail` | [`String`](#string) | User's default commit email. |
| <a id="currentusercreatedat"></a>`createdAt` | [`Time`](#time) | Timestamp of when the user was created. |
| <a id="currentuserdiscord"></a>`discord` | [`String`](#string) | Discord ID of the user. |
| <a id="currentuserduochatavailable"></a>`duoChatAvailable` **{warning-solid}** | [`Boolean!`](#boolean) | **Introduced** in 16.8. This feature is an Experiment. It can be changed or removed at any time. User access to AI chat feature. |
| <a id="currentuseremail"></a>`email` **{warning-solid}** | [`String`](#string) | **Deprecated** in 13.7. This was renamed. Use: [`User.publicEmail`](#userpublicemail). |
| <a id="currentuseremails"></a>`emails` | [`EmailConnection`](#emailconnection) | User's email addresses. (see [Connections](#connections)) |
| <a id="currentusergitpodenabled"></a>`gitpodEnabled` | [`Boolean`](#boolean) | Whether Gitpod is enabled at the user level. |
@ -20817,6 +20819,7 @@ CI/CD variables a project inherites from its parent group and ancestors.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="inheritedcivariabledescription"></a>`description` | [`String`](#string) | Description of the variable. |
| <a id="inheritedcivariableenvironmentscope"></a>`environmentScope` | [`String`](#string) | Scope defining the environments that can use the variable. |
| <a id="inheritedcivariablegroupcicdsettingspath"></a>`groupCiCdSettingsPath` | [`String`](#string) | Indicates the path to the CI/CD settings of the group the variable belongs to. |
| <a id="inheritedcivariablegroupname"></a>`groupName` | [`String`](#string) | Indicates group the variable belongs to. |

View File

@ -22,6 +22,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
[
{
"key": "TEST_VARIABLE_1",
"description": null,
"variable_type": "env_var",
"value": "TEST_1",
"protected": false,
@ -30,6 +31,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
},
{
"key": "TEST_VARIABLE_2",
"description": null,
"variable_type": "env_var",
"value": "TEST_2",
"protected": false,
@ -58,6 +60,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/a
```json
{
"key": "TEST_VARIABLE_1",
"description": null,
"variable_type": "env_var",
"value": "TEST_1",
"protected": false,
@ -93,6 +96,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
```json
{
"key": "NEW_VARIABLE",
"description": null,
"value": "new value",
"variable_type": "env_var",
"protected": false,
@ -126,6 +130,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
```json
{
"key": "NEW_VARIABLE",
"description": null,
"value": "updated value",
"variable_type": "env_var",
"protected": true,

View File

@ -696,71 +696,6 @@ Get the GitHub integration settings for a project.
GET /projects/:id/integrations/github
```
## Slack notifications
### Set up Slack notifications
Set up Slack notifications for a project.
```plaintext
PUT /projects/:id/integrations/slack
```
Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | Slack notifications webhook (for example, `https://hooks.slack.com/services/...`). |
| `username` | string | false | Slack notifications username. |
| `channel` | string | false | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | false | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | false | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | false | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | false | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
| `alert_channel` | string | false | The name of the channel to receive notifications for alert events. |
| `alert_events` | boolean | false | Enable notifications for alert events. |
| `commit_events` | boolean | false | Enable notifications for commit events. |
| `confidential_issue_channel` | string | false | The name of the channel to receive notifications for confidential issue events. |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events. |
| `confidential_note_channel` | string | false | The name of the channel to receive notifications for confidential note events. |
| `confidential_note_events` | boolean | false | Enable notifications for confidential note events. |
| `deployment_channel` | string | false | The name of the channel to receive notifications for deployment events. |
| `deployment_events` | boolean | false | Enable notifications for deployment events. |
| `incident_channel` | string | false | The name of the channel to receive notifications for incident events. |
| `incidents_events` | boolean | false | Enable notifications for incident events. |
| `issue_channel` | string | false | The name of the channel to receive notifications for issue events. |
| `issues_events` | boolean | false | Enable notifications for issue events. |
| `job_events` | boolean | false | Enable notifications for job events. |
| `merge_request_channel` | string | false | The name of the channel to receive notifications for merge request events. |
| `merge_requests_events` | boolean | false | Enable notifications for merge request events. |
| `note_channel` | string | false | The name of the channel to receive notifications for note events. |
| `note_events` | boolean | false | Enable notifications for note events. |
| `pipeline_channel` | string | false | The name of the channel to receive notifications for pipeline events. |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events. |
| `push_channel` | string | false | The name of the channel to receive notifications for push events. |
| `push_events` | boolean | false | Enable notifications for push events. |
| `tag_push_channel` | string | false | The name of the channel to receive notifications for tag push events. |
| `tag_push_events` | boolean | false | Enable notifications for tag push events. |
| `wiki_page_channel` | string | false | The name of the channel to receive notifications for wiki page events. |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events. |
### Disable Slack notifications
Disable Slack notifications for a project. Integration settings are reset.
```plaintext
DELETE /projects/:id/integrations/slack
```
### Get Slack notifications settings
Get the Slack notifications settings for a project.
```plaintext
GET /projects/:id/integrations/slack
```
## Google Chat
### Set up Google Chat
@ -1423,6 +1358,71 @@ Get the Redmine integration settings for a project.
GET /projects/:id/integrations/redmine
```
## Slack notifications
### Set up Slack notifications
Set up Slack notifications for a project.
```plaintext
PUT /projects/:id/integrations/slack
```
Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | Slack notifications webhook (for example, `https://hooks.slack.com/services/...`). |
| `username` | string | false | Slack notifications username. |
| `channel` | string | false | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | false | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | false | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | false | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | false | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
| `alert_channel` | string | false | The name of the channel to receive notifications for alert events. |
| `alert_events` | boolean | false | Enable notifications for alert events. |
| `commit_events` | boolean | false | Enable notifications for commit events. |
| `confidential_issue_channel` | string | false | The name of the channel to receive notifications for confidential issue events. |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events. |
| `confidential_note_channel` | string | false | The name of the channel to receive notifications for confidential note events. |
| `confidential_note_events` | boolean | false | Enable notifications for confidential note events. |
| `deployment_channel` | string | false | The name of the channel to receive notifications for deployment events. |
| `deployment_events` | boolean | false | Enable notifications for deployment events. |
| `incident_channel` | string | false | The name of the channel to receive notifications for incident events. |
| `incidents_events` | boolean | false | Enable notifications for incident events. |
| `issue_channel` | string | false | The name of the channel to receive notifications for issue events. |
| `issues_events` | boolean | false | Enable notifications for issue events. |
| `job_events` | boolean | false | Enable notifications for job events. |
| `merge_request_channel` | string | false | The name of the channel to receive notifications for merge request events. |
| `merge_requests_events` | boolean | false | Enable notifications for merge request events. |
| `note_channel` | string | false | The name of the channel to receive notifications for note events. |
| `note_events` | boolean | false | Enable notifications for note events. |
| `pipeline_channel` | string | false | The name of the channel to receive notifications for pipeline events. |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events. |
| `push_channel` | string | false | The name of the channel to receive notifications for push events. |
| `push_events` | boolean | false | Enable notifications for push events. |
| `tag_push_channel` | string | false | The name of the channel to receive notifications for tag push events. |
| `tag_push_events` | boolean | false | Enable notifications for tag push events. |
| `wiki_page_channel` | string | false | The name of the channel to receive notifications for wiki page events. |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events. |
### Disable Slack notifications
Disable Slack notifications for a project. Integration settings are reset.
```plaintext
DELETE /projects/:id/integrations/slack
```
### Get Slack notifications settings
Get the Slack notifications settings for a project.
```plaintext
GET /projects/:id/integrations/slack
```
## Slack slash commands
### Set up Slack slash commands

View File

@ -527,7 +527,7 @@ listed in the descriptions of the relevant settings.
| `protected_ci_variables` | boolean | no | CI/CD variables are protected by default. |
| `disable_overriding_approvers_per_merge_request` | boolean | no | Prevent editing approval rules in projects and merge requests |
| `prevent_merge_requests_author_approval` | boolean | no | Prevent approval by author |
| `prevent_merge_requests_committers_approval` | boolean | no | Prevent editing approval rules in projects and merge requests |
| `prevent_merge_requests_committers_approval` | boolean | no | Prevent approval by committers to merge requests |
| `push_event_activities_limit` | integer | no | Maximum number of changes (branches or tags) in a single push above which a [bulk push event is created](../administration/settings/push_event_activities_limit.md). Setting to `0` does not disable throttling. |
| `push_event_hooks_limit` | integer | no | Maximum number of changes (branches or tags) in a single push above which webhooks and integrations are not triggered. Setting to `0` does not disable throttling. |
| `rate_limiting_response_text` | string | no | When rate limiting is enabled via the `throttle_*` settings, send this plain text response when a rate limit is exceeded. 'Retry later' is sent if this is blank. |

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 340 KiB

View File

@ -31,12 +31,28 @@ To view the runner fleet dashboard:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Runners**.
1. Click **Fleet dashboard**.
1. Select **Fleet dashboard**.
Most of the dashboard works without any additional actions, with the
exception of **Wait time to pick a job** chart and features proposed in [epic 11183](https://gitlab.com/groups/gitlab-org/-/epics/11183).
These features require [setting up an additional infrastructure](#enable-more-ci-analytics-features-with-clickhouse).
## Export compute minutes used by instance runners
Prerequisites:
- You must be an administrator.
To analyze runner usage, you can export a CSV file that contains the number of jobs and executed runner minutes. The
CSV file shows the runner type and job status for each project. The CSV is sent to your email when the export is completed.
To export compute minutes used by instance runners:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Runners**.
1. Select **Fleet dashboard**.
1. Select **Export CSV**.
## Enable more CI analytics features with ClickHouse **(ULTIMATE EXPERIMENT)**
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/11180) in GitLab 16.7 with the [flags](../../administration/feature_flags.md) named `ci_data_ingestion_to_click_house` and `clickhouse_ci_analytics`. Disabled by default.

View File

@ -632,3 +632,73 @@ class AsyncPrepareTableConstraintsForListPartitioning < Gitlab::Database::Migrat
end
end
```
### Step 7 - Re-point foreign keys to parent table
The tables that reference the initial partition must be updated to point to the
parent table now. Without this change, the records from those tables
will not be able to locate the rows in the next partitions because they will look
for them in the initial partition.
Steps:
- Add the foreign key to the partitioned table and validate it asynchronously,
[for example](https://gitlab.com/gitlab-org/gitlab/-/blob/65d63f6a00196c3a7d59f15191920f271ab2b145/db/post_migrate/20230524135543_replace_ci_build_pending_states_foreign_key.rb).
- Validate it synchronously after the asynchronously validation was completed on GitLab.com,
[for example](https://gitlab.com/gitlab-org/gitlab/-/blob/65d63f6a00196c3a7d59f15191920f271ab2b145/db/post_migrate/20230530140456_validate_fk_ci_build_pending_states_p_ci_builds.rb).
- Remove the old foreign key and rename the new one to the old name,
[for example](https://gitlab.com/gitlab-org/gitlab/-/blob/65d63f6a00196c3a7d59f15191920f271ab2b145/db/post_migrate/20230615083713_replace_old_fk_ci_build_pending_states_to_builds.rb#L9).
### Step 8 - Ensure ID uniqueness across partitions
All uniqueness constraints must include the partitioning key, so we can have
duplicate IDs across partitions. To solve this we enforce that only the database
can set the ID values and use a sequence to generate them because sequences are
guaranteed to generate unique values.
For example:
```ruby
class EnsureIdUniquenessForPCiBuilds < Gitlab::Database::Migration[2.1]
include Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers
enable_lock_retries!
TABLE_NAME = :p_ci_builds
FUNCTION_NAME = :assign_p_ci_builds_id_value
def up
ensure_unique_id(TABLE_NAME)
end
def down
execute(<<~SQL.squish)
ALTER TABLE #{TABLE_NAME}
ALTER COLUMN id SET DEFAULT nextval('ci_builds_id_seq'::regclass);
DROP FUNCTION IF EXISTS #{FUNCTION_NAME} CASCADE;
SQL
end
```
### Step 9 - Analyze the partitioned table and create new partitions
The autovacuum daemon does not process partitioned tables. It is necessary to
periodically run a manual `ANALYZE` to keep the statistics of the table hierarchy
up to date.
Models that implement `Ci::Partitionable` with `partitioned: true` option are
analyzed by default on a weekly basis. To enable this and create new partitions
you need to register the model in the [PostgreSQL initializer](https://gitlab.com/gitlab-org/gitlab/-/blob/b7f0e3f1bcd2ffc220768bbc373364151775ca8e/config/initializers/postgres_partitioning.rb).
### Step 10 - Update the application to use the partitioned table
Now that the parent table is ready, we can update the application to use it:
```ruby
class Model < ApplicationRecord
self.table_name = :partitioned_table
end
```
Depending on the model, it might be safer to use a [change management issue](https://gitlab.com/gitlab-com/gl-infra/production/-/issues/16387).

View File

@ -308,22 +308,22 @@ the project first.
When a supported dependency file is detected, all dependencies, including transitive dependencies
are analyzed. There is no limit to the depth of nested or transitive dependencies that are analyzed.
### Dependency analyzers
### Analyzers
Dependency Scanning supports the following official analyzers:
Dependency Scanning supports the following official
[Gemnasium-based](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium) analyzers:
- `gemnasium`
- `gemnasium-maven`
- `gemnasium-python`
Each of these supported Gemnasium-based Dependency Scanning analyzers exist in the following project:
- [`gemnasium`](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium)
The analyzers are published as Docker images, which Dependency Scanning uses
to launch dedicated containers for each analysis. You can also integrate a custom
The analyzers are published as Docker images, which Dependency Scanning uses to launch dedicated
containers for each analysis. You can also integrate a custom
[security scanner](../../../development/integrations/secure.md).
Each analyzer is updated as new versions of Gemnasium are released. For more information, see the
analyzer [Release Process documentation](../../../development/sec/analyzer_development_guide.md#versioning-and-release-process).
### How analyzers obtain dependency information
GitLab analyzers obtain dependency information using one of the following two methods:
@ -807,7 +807,7 @@ The following variables allow configuration of global dependency scanning settin
| CI/CD variables | Description |
| ----------------------------|------------ |
| `ADDITIONAL_CA_CERT_BUNDLE` | Bundle of CA certs to trust. The bundle of certificates provided here is also used by other tools during the scanning process, such as `git`, `yarn`, or `npm`. For more details, see [Using a custom SSL CA certificate authority](#using-a-custom-ssl-ca-certificate-authority). |
| `DS_EXCLUDED_ANALYZERS` | Specify the analyzers (by name) to exclude from Dependency Scanning. For more information, see [Dependency Scanning analyzers](#dependency-analyzers). |
| `DS_EXCLUDED_ANALYZERS` | Specify the analyzers (by name) to exclude from Dependency Scanning. For more information, see [Analyzers](#analyzers). |
| `DS_EXCLUDED_PATHS` | Exclude files and directories from the scan based on the paths. A comma-separated list of patterns. Patterns can be globs (see [`doublestar.Match`](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4@v4.0.2#Match) for supported patterns), or file or folder paths (for example, `doc,spec`). Parent directories also match patterns. Default: `"spec, test, tests, tmp"`. |
| `DS_IMAGE_SUFFIX` | Suffix added to the image name. (GitLab team members can view more information in this confidential issue: `https://gitlab.com/gitlab-org/gitlab/-/issues/354796`). Automatically set to `"-fips"` when FIPS mode is enabled. |
| `DS_MAX_DEPTH` | Defines how many directory levels deep that the analyzer should search for supported files to scan. A value of `-1` scans all directories regardless of depth. Default: `2`. |
@ -1022,10 +1022,6 @@ merge cyclonedx sboms:
- gl-sbom-all.cdx.json
```
## Versioning and release process
Check the [Release Process documentation](../../../development/sec/analyzer_development_guide.md#versioning-and-release-process).
## Contributing to the vulnerability database
To find a vulnerability, you can search the [`GitLab Advisory Database`](https://advisories.gitlab.com/).

View File

@ -59,6 +59,7 @@ Prerequisites:
- You must have the Owner or Maintainer role.
- [Group membership lock](../../group/access_and_permissions.md#prevent-members-from-being-added-to-projects-in-a-group) must be disabled.
- If [sign-up is disabled](../../../administration/settings/sign_up_restrictions.md#disable-new-sign-ups), an administrator must add the user by email first.
To add a user to a project:

View File

@ -54,6 +54,10 @@ module API
type: String,
desc: 'The key of the variable. Max 255 characters'
optional :description,
type: String,
desc: 'The description of the variable'
requires :value,
type: String,
desc: 'The value of a variable'
@ -98,6 +102,10 @@ module API
type: String,
desc: 'The key of a variable'
optional :description,
type: String,
desc: 'The description of the variable'
optional :value,
type: String,
desc: 'The value of a variable'

View File

@ -6,7 +6,7 @@ module API
class EntityFailure < Grape::Entity
expose :relation, documentation: { type: 'string', example: 'label' }
expose :exception_message, documentation: { type: 'string', example: 'error message' } do |failure|
::Projects::ImportErrorFilter.filter_message(failure.exception_message.truncate(72))
::Projects::ImportErrorFilter.filter_message(failure.exception_message).truncate(255)
end
expose :exception_class, documentation: { type: 'string', example: 'Exception' }
expose :correlation_id_value, documentation: { type: 'string', example: 'dfcf583058ed4508e4c7c617bd7f0edd' }

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPartitionIdCiPipelineArtifact < BatchedMigrationJob
operation_name :update_all
feature_category :continuous_integration
def perform
return unless uses_multiple_partitions?
each_sub_batch do |sub_batch|
sub_batch
.where('ci_pipeline_artifacts.pipeline_id = ci_pipelines.id')
.update_all('partition_id = ci_pipelines.partition_id FROM ci_pipelines')
end
end
private
def uses_multiple_partitions?
!!connection.select_value(<<~SQL)
SELECT true FROM p_ci_builds WHERE partition_id = 101 LIMIT 1
SQL
end
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPartitionIdCiPipelineMetadata < BatchedMigrationJob
operation_name :update_all
feature_category :continuous_integration
def perform
return unless uses_multiple_partitions?
each_sub_batch do |sub_batch|
sub_batch
.where('ci_pipeline_metadata.pipeline_id = ci_pipelines.id')
.update_all('partition_id = ci_pipelines.partition_id FROM ci_pipelines')
end
end
private
def uses_multiple_partitions?
!!connection.select_value(<<~SQL)
SELECT true FROM p_ci_builds WHERE partition_id = 101 LIMIT 1
SQL
end
end
end
end

View File

@ -86,7 +86,7 @@ module Gitlab
end
return unless self.in_transaction?
return if in_factory_bot_create?
return if Thread.current[:factory_bot_objects] && Thread.current[:factory_bot_objects] > 0
# PgQuery might fail in some cases due to limited nesting:
# https://github.com/pganalyze/pg_query/issues/209
@ -192,20 +192,6 @@ module Gitlab
def self.in_transaction?
context[:transaction_depth_by_db].values.any?(&:positive?)
end
# We ignore execution in the #create method from FactoryBot
# because it is not representative of real code we run in
# production. There are far too many false positives caused
# by instantiating objects in different `gitlab_schema` in a
# FactoryBot `create`.
def self.in_factory_bot_create?
Rails.env.test? && caller_locations.any? do |l|
l.path.end_with?('lib/factory_bot/evaluation.rb') && l.label == 'create' ||
l.path.end_with?('lib/factory_bot/strategy/create.rb') ||
l.path.end_with?('lib/factory_bot/strategy/build.rb') ||
l.path.end_with?('shoulda/matchers/active_record/validate_uniqueness_of_matcher.rb') && l.label == 'create_existing_record'
end
end
end
end
end

View File

@ -27,6 +27,12 @@ module Gitlab
reviewed
].freeze
EVENT_COUNTER_MAP = {
'commented' => 'note',
'reviewed' => 'pull_request_review',
'merged' => 'pull_request_merged_by'
}.freeze
# issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`.
# project - An instance of `Project`.
# client - An instance of `Gitlab::GithubImport::Client`.

View File

@ -34,7 +34,7 @@ module Gitlab
cache_event(parent_record, associated)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
increment_object_counter(associated[:event])
pull_request = parent_record.is_a? MergeRequest
associated[:issue] = { number: parent_record.iid, pull_request: pull_request }
@ -66,6 +66,12 @@ module Gitlab
:issue_event
end
def increment_object_counter(event_name)
counter_type = importer_class::EVENT_COUNTER_MAP[event_name] if import_settings.extended_events?
counter_type ||= object_type
Gitlab::GithubImport::ObjectCounter.increment(project, counter_type, :fetched)
end
def collection_method
:issue_timeline
end

View File

@ -8081,13 +8081,13 @@ msgstr ""
msgid "Billing|An error occurred while loading billable members list."
msgstr ""
msgid "Billing|An error occurred while loading details for the Code Suggestions add-on. If the problem persists, please %{supportLinkStart}contact support%{supportLinkEnd}."
msgid "Billing|An error occurred while loading details for the Duo Pro add-on. If the problem persists, please %{supportLinkStart}contact support%{supportLinkEnd}."
msgstr ""
msgid "Billing|An error occurred while loading pending members list"
msgstr ""
msgid "Billing|An error occurred while loading users of the Code Suggestions add-on. If the problem persists, please %{supportLinkStart}contact support%{supportLinkEnd}."
msgid "Billing|An error occurred while loading users of the Duo Pro add-on. If the problem persists, please %{supportLinkStart}contact support%{supportLinkEnd}."
msgstr ""
msgid "Billing|An error occurred while removing a billable member."
@ -8105,10 +8105,10 @@ msgstr ""
msgid "Billing|Enter at least three characters to search."
msgstr ""
msgid "Billing|Error assigning Code Suggestions add-on"
msgid "Billing|Error assigning Duo Pro add-on"
msgstr ""
msgid "Billing|Error un-assigning Code Suggestions add-on"
msgid "Billing|Error un-assigning Duo Pro add-on"
msgstr ""
msgid "Billing|Explore paid plans"
@ -8173,7 +8173,7 @@ msgstr ""
msgid "Billing|You can upgrade to a paid tier to get access to more features."
msgstr ""
msgid "Billing|You have assigned all available Code Suggestions add-on seats. Please %{salesLinkStart}contact sales%{salesLinkEnd} if you would like to purchase more seats."
msgid "Billing|You have assigned all available Duo Pro add-on seats. Please %{salesLinkStart}contact sales%{salesLinkEnd} if you would like to purchase more seats."
msgstr ""
msgid "Billing|Your group recently changed to use the Free plan. %{over_limit_message} You can free up space for new members by removing those who no longer need access or toggling them to over-limit. To get an unlimited number of members, you can %{link_start}upgrade%{link_end} to a paid tier."
@ -12286,6 +12286,12 @@ msgstr ""
msgid "Comment"
msgstr ""
msgid "Comment & close %{workItemType}"
msgstr ""
msgid "Comment & reopen %{workItemType}"
msgstr ""
msgid "Comment '%{label}' position"
msgstr ""

View File

@ -48,7 +48,9 @@ RSpec.describe 'Database schema', feature_category: :database do
chat_teams: %w[team_id],
ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
ci_pipeline_artifacts: %w[partition_id],
ci_pipeline_chat_data: %w[partition_id],
ci_pipeline_metadata: %w[partition_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
ci_runner_projects: %w[runner_id],

View File

@ -11,6 +11,12 @@
"id": {
"type": "integer"
},
"description": {
"type": [
"string",
"null"
]
},
"key": {
"type": "string"
},
@ -35,4 +41,4 @@
}
},
"additionalProperties": false
}
}

View File

@ -208,6 +208,20 @@ describe('Work item comment form component', () => {
['Something went wrong while updating the task. Please try again.'],
]);
});
it('emits `submitForm` event on closing of work item', async () => {
createComponent({
isNewDiscussion: true,
});
findWorkItemToggleStateButton().vm.$emit('submit-comment');
await waitForPromises();
expect(wrapper.emitted('submitForm')).toEqual([
[{ commentText: draftComment, isNoteInternal: false }],
]);
});
});
describe('internal note', () => {
@ -239,6 +253,17 @@ describe('Work item comment form component', () => {
expect(findConfirmButton().text()).toBe(WorkItemCommentForm.i18n.addInternalNote);
});
it('emits `submitForm` event on closing of work item', async () => {
findInternalNoteCheckbox().vm.$emit('input', true);
findWorkItemToggleStateButton().vm.$emit('submit-comment');
await waitForPromises();
expect(wrapper.emitted('submitForm')).toEqual([
[{ commentText: draftComment, isNoteInternal: true }],
]);
});
});
});
});

View File

@ -32,6 +32,7 @@ describe('Work Item State toggle button component', () => {
canUpdate = true,
workItemState = STATE_OPEN,
workItemType = 'Task',
hasComment = false,
} = {}) => {
wrapper = shallowMount(WorkItemStateToggle, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
@ -40,6 +41,7 @@ describe('Work Item State toggle button component', () => {
workItemState,
workItemType,
canUpdate,
hasComment,
},
});
};
@ -61,6 +63,23 @@ describe('Work Item State toggle button component', () => {
expect(findStateToggleButton().text()).toBe(buttonText);
},
);
it.each`
workItemState | workItemType | buttonText
${STATE_OPEN} | ${'Task'} | ${'Comment & close task'}
${STATE_CLOSED} | ${'Task'} | ${'Comment & reopen task'}
${STATE_OPEN} | ${'Objective'} | ${'Comment & close objective'}
${STATE_CLOSED} | ${'Objective'} | ${'Comment & reopen objective'}
${STATE_OPEN} | ${'Key result'} | ${'Comment & close key result'}
${STATE_CLOSED} | ${'Key result'} | ${'Comment & reopen key result'}
`(
'is "$buttonText" when "$workItemType" state is "$workItemState" and hasComment is true',
({ workItemState, workItemType, buttonText }) => {
createComponent({ workItemState, workItemType, hasComment: true });
expect(findStateToggleButton().text()).toBe(buttonText);
},
);
});
describe('when updating the state', () => {
@ -92,6 +111,15 @@ describe('Work Item State toggle button component', () => {
});
});
it('emits `submit-comment` when hasComment is true', async () => {
createComponent({ hasComment: true });
findStateToggleButton().vm.$emit('click');
await waitForPromises();
expect(wrapper.emitted('submit-comment')).toBeDefined();
});
it('emits an error message when the mutation was unsuccessful', async () => {
createComponent({ mutationHandler: jest.fn().mockRejectedValue('Error!') });

View File

@ -7,13 +7,14 @@ RSpec.describe GitlabSchema.types['InheritedCiVariable'], feature_category: :sec
expect(described_class).to have_graphql_fields(
:id,
:key,
:raw,
:variable_type,
:description,
:environment_scope,
:group_name,
:group_ci_cd_settings_path,
:masked,
:protected,
:group_name,
:group_ci_cd_settings_path
:raw,
:variable_type
).at_least
end
end

View File

@ -5,5 +5,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['CiInstanceVariable'] do
specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
specify { expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected).at_least }
specify do
expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected, :description).at_least
end
end

View File

@ -19,10 +19,10 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
end
describe 'exception message' do
it 'truncates exception message to 72 characters' do
failure.update!(exception_message: 'a' * 100)
it 'truncates exception message to 255 characters' do
failure.update!(exception_message: 'a' * 500)
expect(subject[:exception_message].length).to eq(72)
expect(subject[:exception_message].length).to eq(255)
end
it 'removes paths from the message' do
@ -30,5 +30,13 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
expect(subject[:exception_message]).to eq('Test [FILTERED]')
end
it 'removes long paths without clipping the message' do
exception_message = "Test #{'/abc' * 300} #{'a' * 500}"
failure.update!(exception_message: exception_message)
filtered_message = "Test [FILTERED] #{'a' * 500}"
expect(subject[:exception_message]).to eq(filtered_message.truncate(255))
end
end
end

View File

@ -0,0 +1,94 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineArtifact,
feature_category: :continuous_integration do
let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
let(:ci_pipeline_artifacts_table) { table(:ci_pipeline_artifacts, database: :ci) }
let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
let!(:ci_pipeline_artifact_100) do
ci_pipeline_artifacts_table.create!(
id: 1,
pipeline_id: pipeline_100.id,
project_id: 1,
size: 1.megabyte,
file_type: 1,
file_format: 1,
file: fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
),
partition_id: pipeline_100.partition_id
)
end
let!(:ci_pipeline_artifact_101) do
ci_pipeline_artifacts_table.create!(
id: 2,
pipeline_id: pipeline_101.id,
project_id: 1,
size: 1.megabyte,
file_type: 1,
file_format: 1,
file: fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
),
partition_id: pipeline_101.partition_id
)
end
let!(:invalid_ci_pipeline_artifact) do
ci_pipeline_artifacts_table.create!(
id: 3,
pipeline_id: pipeline_102.id,
project_id: 1,
size: 1.megabyte,
file_type: 1,
file_format: 1,
file: fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
),
partition_id: pipeline_100.partition_id
)
end
let(:migration_attrs) do
{
start_id: ci_pipeline_artifacts_table.minimum(:pipeline_id),
end_id: ci_pipeline_artifacts_table.maximum(:pipeline_id),
batch_table: :ci_pipeline_artifacts,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: Ci::ApplicationRecord.connection
}
end
let!(:migration) { described_class.new(**migration_attrs) }
describe '#perform' do
context 'when second partition does not exist' do
it 'does not execute the migration' do
expect { migration.perform }
.not_to change { invalid_ci_pipeline_artifact.reload.partition_id }
end
end
context 'when second partition exists' do
before do
allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
end
it 'fixes invalid records in the wrong the partition' do
expect { migration.perform }
.to not_change { ci_pipeline_artifact_100.reload.partition_id }
.and not_change { ci_pipeline_artifact_101.reload.partition_id }
.and change { invalid_ci_pipeline_artifact.reload.partition_id }
.from(100)
.to(101)
end
end
end
end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineMetadata,
feature_category: :continuous_integration do
let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
let(:ci_pipeline_metadata_table) { table(:ci_pipeline_metadata, database: :ci) }
let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
let!(:ci_pipeline_metadata_100) do
ci_pipeline_metadata_table.create!(
pipeline_id: pipeline_100.id,
project_id: 1,
partition_id: pipeline_100.partition_id
)
end
let!(:ci_pipeline_metadata_101) do
ci_pipeline_metadata_table.create!(
pipeline_id: pipeline_101.id,
project_id: 1,
partition_id: pipeline_101.partition_id
)
end
let!(:invalid_ci_pipeline_metadata) do
ci_pipeline_metadata_table.create!(
pipeline_id: pipeline_102.id,
project_id: 1,
partition_id: pipeline_100.partition_id
)
end
let(:migration_attrs) do
{
start_id: ci_pipeline_metadata_table.minimum(:pipeline_id),
end_id: ci_pipeline_metadata_table.maximum(:pipeline_id),
batch_table: :ci_pipeline_metadata,
batch_column: :pipeline_id,
sub_batch_size: 1,
pause_ms: 0,
connection: Ci::ApplicationRecord.connection
}
end
let!(:migration) { described_class.new(**migration_attrs) }
describe '#perform' do
context 'when second partition does not exist' do
it 'does not execute the migration' do
expect { migration.perform }
.not_to change { invalid_ci_pipeline_metadata.reload.partition_id }
end
end
context 'when second partition exists' do
before do
allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
end
it 'fixes invalid records in the wrong the partition' do
expect { migration.perform }
.to not_change { ci_pipeline_metadata_100.reload.partition_id }
.and not_change { ci_pipeline_metadata_101.reload.partition_id }
.and change { invalid_ci_pipeline_metadata.reload.partition_id }
.from(100)
.to(101)
end
end
end
end

View File

@ -239,7 +239,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
context 'when uniquiness validation is tested', type: :model do
context 'when uniqueness validation is tested', type: :model do
subject { build(:ci_variable) }
it 'does not raise exceptions' do

View File

@ -208,6 +208,38 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
end
describe 'increment object counter' do
it 'increments counter' do
expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
subject.each_object_to_import { |event| event }
end
context 'when event should increment a mapped fetched counter' do
before do
stub_const('Gitlab::GithubImport::Importer::IssueEventImporter::EVENT_COUNTER_MAP', {
'closed' => 'custom_type'
})
end
it 'increments the mapped fetched counter' do
expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, 'custom_type', :fetched)
subject.each_object_to_import { |event| event }
end
context 'when extended_events is disabled' do
let(:extended_events) { false }
it 'increments the issue_event fetched counter' do
expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
subject.each_object_to_import { |event| event }
end
end
end
end
describe 'save events' do
shared_examples 'saves event' do
it 'saves event' do

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPartitionIdCiPipelineMetadata, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migrations) { table(:batched_background_migrations) }
let!(:migration) { described_class::MIGRATION }
describe '#up' do
context 'with migration present' do
let!(:ci_backfill_partition_id_ci_pipeline_metadata_migration) do
batched_migrations.create!(
job_class_name: 'QueueBackfillPartitionIdCiPipelineMetadata',
table_name: :ci_pipeline_metadata,
column_name: :pipeline_id,
job_arguments: [],
interval: 2.minutes,
min_value: 1,
max_value: 2,
batch_size: 1000,
sub_batch_size: 100,
gitlab_schema: :gitlab_ci,
status: 3 # finished
)
end
context 'when migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
it 'schedules background jobs for each batch of ci_pipeline_metadata' do
migrate!
expect(migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_ci,
table_name: :ci_pipeline_metadata,
column_name: :pipeline_id,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
end
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPartitionIdCiPipelineArtifact, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migrations) { table(:batched_background_migrations) }
let!(:migration) { described_class::MIGRATION }
describe '#up' do
context 'with migration present' do
let!(:ci_backfill_partition_id_ci_pipeline_artifact_migration) do
batched_migrations.create!(
job_class_name: 'QueueBackfillPartitionIdCiPipelineArtifact',
table_name: :ci_pipeline_artifacts,
column_name: :id,
job_arguments: [],
interval: 2.minutes,
min_value: 1,
max_value: 2,
batch_size: 1000,
sub_batch_size: 100,
gitlab_schema: :gitlab_ci,
status: 3 # finished
)
end
context 'when migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
it 'schedules background jobs for each batch of ci_pipeline_artifacts' do
migrate!
expect(migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_ci,
table_name: :ci_pipeline_artifacts,
column_name: :id,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
end
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -307,8 +307,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
import = build(:bulk_import, source_version: '16.2.0')
entity = build(:bulk_import_entity, :project_entity, bulk_import: import)
expect(entity.export_relations_url_path(batched: true))
.to eq("/projects/#{entity.source_xid}/export_relations?batched=true")
expect(entity.export_relations_url_path).to eq("/projects/#{entity.source_xid}/export_relations?batched=true")
end
end
@ -316,8 +315,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
it 'returns export relations url' do
entity = build(:bulk_import_entity)
expect(entity.export_relations_url_path(batched: true))
.to eq("/groups/#{entity.source_xid}/export_relations")
expect(entity.export_relations_url_path).to eq("/groups/#{entity.source_xid}/export_relations")
end
end
end

View File

@ -113,4 +113,10 @@ RSpec.describe Ci::InstanceVariable do
end
end
end
describe "description" do
it { is_expected.to allow_values('').for(:description) }
it { is_expected.to allow_values(nil).for(:description) }
it { is_expected.to validate_length_of(:description).is_at_most(255) }
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifact, type: :model do
RSpec.describe Ci::PipelineArtifact, type: :model, feature_category: :build_artifacts do
let(:coverage_report) { create(:ci_pipeline_artifact, :with_coverage_report) }
describe 'associations' do
@ -309,4 +309,19 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
let!(:model) { create(:ci_pipeline_artifact, project: parent) }
end
end
describe 'partitioning', :ci_partitionable do
include Ci::PartitioningHelpers
let(:pipeline) { create(:ci_pipeline) }
let(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
before do
stub_current_partition_id
end
it 'assigns the same partition id as the one that pipeline has' do
expect(pipeline_artifact.partition_id).to eq(ci_testing_partition_id)
end
end
end

View File

@ -27,4 +27,19 @@ RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
).with_prefix
end
end
describe 'partitioning', :ci_partitionable do
include Ci::PartitioningHelpers
let(:pipeline) { create(:ci_pipeline) }
let(:pipeline_metadata) { create(:ci_pipeline_metadata, pipeline: pipeline) }
before do
stub_current_partition_id
end
it 'assigns the same partition id as the one that pipeline has' do
expect(pipeline_metadata.partition_id).to eq(ci_testing_partition_id)
end
end
end

View File

@ -12,6 +12,7 @@ RSpec.describe 'Query.ciVariables', feature_category: :secrets_management do
nodes {
id
key
description
value
variableType
protected
@ -36,6 +37,7 @@ RSpec.describe 'Query.ciVariables', feature_category: :secrets_management do
expect(graphql_data.dig('ciVariables', 'nodes')).to contain_exactly({
'id' => variable.to_global_id.to_s,
'key' => 'TEST_VAR',
'description' => nil,
'value' => 'test',
'variableType' => 'ENV_VAR',
'masked' => false,

View File

@ -11,7 +11,7 @@ RSpec.describe Ci::GroupVariableEntity do
it 'contains required fields' do
expect(subject.keys).to contain_exactly(
:id, :key, :value, :protected, :variable_type, :environment_scope, :raw, :masked
:id, :key, :description, :value, :protected, :variable_type, :environment_scope, :raw, :masked
)
end
end

View File

@ -11,7 +11,7 @@ RSpec.describe Ci::VariableEntity do
it 'contains required fields' do
expect(subject.keys).to contain_exactly(
:id, :key, :value, :protected, :environment_scope, :variable_type, :raw, :masked
:id, :key, :description, :value, :protected, :environment_scope, :variable_type, :raw, :masked
)
end
end

View File

@ -68,4 +68,11 @@ RSpec.configure do |config|
::ApplicationRecord.gitlab_transactions_stack.clear
end
config.before(:suite) do
ActiveSupport::Notifications.subscribe("factory_bot.run_factory") do |_name, _start, _finish, _id, payload|
strategy = payload[:strategy]
Thread.current[:factory_bot_objects] -= 1 if strategy == :create
end
end
end

View File

@ -4,13 +4,14 @@ FactoryBot.define do
after(:build) do |object, _|
next unless object.respond_to?(:factory_bot_built=)
# This will help the PreventCrossDatabaseModification to temporarily
# allow the object table when it's saved later.
object.factory_bot_built = true
end
before(:create) do |object, _|
next unless object.respond_to?(:factory_bot_built=)
object.factory_bot_built = false
before(:create) do |_object, _|
Thread.current[:factory_bot_objects] ||= 0
Thread.current[:factory_bot_objects] += 1
end
end

View File

@ -82,7 +82,7 @@ RSpec.shared_examples 'PATCH #update updates variables' do
context 'with valid new variable parameters' do
let(:variables_attributes) do
[
variable_attributes.merge(secret_value: 'other_value'),
variable_attributes.merge(secret_value: 'other_value', description: 'other_description'),
new_variable_attributes
]
end
@ -94,6 +94,7 @@ RSpec.shared_examples 'PATCH #update updates variables' do
variable.reload
expect(variable.value).to eq('other_value')
expect(variable.description).to eq('other_description')
expect(variable.raw?).not_to be(old_raw)
end

View File

@ -252,6 +252,8 @@ RSpec.describe Tooling::Danger::ProjectHelper, feature_category: :tooling do
[:backend, :analytics_instrumentation] | '+ foo_count(User.active)' | ['lib/gitlab/usage_data.rb']
[:backend] | '+ count(User.active)' | ['user.rb']
[:database, :backend] | '+ User.upsert({ name: "blah" })' | ['app/foo/bar.rb']
[:database, :backend] | '+ User.upsert(' | ['app/foo/bar.rb']
[:database, :backend] | '+ Organizations::OrganizationUser.upsert({' | ['app/foo/bar.rb']
[:database, :backend] | '+ upsert({ name: "blah" })' | ['app/foo/bar.rb']
[:database, :backend] | '+ .upsert({ name: "blah" })' | ['app/foo/bar.rb']
[:database, :backend] | '+ .delete_all' | ['app/foo/bar.rb']

View File

@ -62,7 +62,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
context 'when something goes wrong during source id fetch' do
let(:entity_source_id) { 'invalid' }
it 'logs the error & requests relations export using full path url' do
it 'logs the exception as a warning & requests relations export using full path url' do
allow(BulkImports::EntityWorker).to receive(:perform_async)
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
@ -74,7 +74,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger).to receive(:with_entity).with(entity).and_call_original
expect(logger).to receive(:error).with(
expect(logger).to receive(:warn).with(
a_hash_including(
'exception.backtrace' => anything,
'exception.class' => 'NoMethodError',
@ -123,20 +123,6 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
described_class.new.perform(entity.id)
end
context 'when bulk_imports_batched_import_export feature flag is disabled' do
it 'requests relation export without batched param' do
stub_feature_flags(bulk_imports_batched_import_export: false)
expected_url = "/projects/#{entity.source_xid}/export_relations"
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(expected_url)
end
described_class.new.perform(entity.id)
end
end
end
end

View File

@ -5,16 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ImportIssueEventWorker, feature_category: :importers do
subject(:worker) { described_class.new }
describe '#import' do
let(:import_state) { create(:import_state, :started) }
let(:project) do
instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
describe '#execute' do
let_it_be(:project) do
create(:project, import_url: 'https://github.com/foo/bar.git', import_state: create(:import_state, :started))
end
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:importer) { instance_double('Gitlab::GithubImport::Importer::IssueEventImporter') }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:extended_events) { true }
let(:event_hash) do
{
'id' => 6501124486,
@ -29,23 +26,55 @@ RSpec.describe Gitlab::GithubImport::ImportIssueEventWorker, feature_category: :
}
end
it 'imports an issue event' do
expect(Gitlab::GithubImport::Importer::IssueEventImporter)
.to receive(:new)
.with(
an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
project,
client
)
.and_return(importer)
before do
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
end
expect(importer).to receive(:execute)
it 'imports an issue event and increase importer counter' do
expect_next_instance_of(Gitlab::GithubImport::Importer::IssueEventImporter,
an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
project,
client
) do |importer|
expect(importer).to receive(:execute)
end
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
.with(project, :issue_event, :imported)
.and_call_original
worker.import(project, client, event_hash)
end
context 'when event should increment a mapped importer counter' do
before do
stub_const('Gitlab::GithubImport::Importer::IssueEventImporter::EVENT_COUNTER_MAP', {
'closed' => 'custom_type'
})
allow_next_instance_of(Gitlab::GithubImport::Importer::IssueEventImporter) do |importer|
allow(importer).to receive(:execute)
end
end
it 'increments the mapped importer counter' do
expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, 'custom_type', :imported)
worker.import(project, client, event_hash)
end
context 'when extended_events is disabled' do
let(:extended_events) { false }
it 'increments the issue_event importer counter' do
expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :imported)
worker.import(project, client, event_hash)
end
end
end
end
end

View File

@ -105,7 +105,7 @@ module Tooling
%r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => [:database, :backend],
%r{\A((ee|jh)/)?app/finders/} => [:database, :backend],
%r{\Arubocop/cop/migration(/|\.rb)} => :database,
[%r{\A((ee|jh)/)?(app|lib)/.+\.rb}, %r{\A\+\s+(\w*\.)?(update_all|upsert|upsert_all|delete_all|destroy_all)(\(.*\))?\s*\z}] => [:database, :backend],
[%r{\A((ee|jh)/)?(app|lib)/.+\.rb}, %r{\A\+\s+(\S*\.)?(update_all|upsert|upsert_all|delete_all|destroy_all)(\(.*\)|\(|\(.*)?\s*\z}] => [:database, :backend],
%r{\Alib/gitlab/ci/templates} => :ci_template,