Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-17 15:07:18 +00:00
parent c394f78c5a
commit 19a229c898
48 changed files with 518 additions and 111 deletions

View File

@ -216,7 +216,7 @@ graphql-schema-dump:
- .default-retry
- .ruby-cache
- .default-before_script
- .frontend:rules:default-frontend-jobs-with-docs-changes
- .frontend:rules:default-frontend-jobs
stage: fixtures
needs: []
script:

View File

@ -1311,7 +1311,7 @@
rules:
- <<: *if-not-dot-com-gitlab-org-default-branch
when: never
- !reference [.frontend:rules:default-frontend-jobs-with-docs-changes, rules]
- !reference [.frontend:rules:default-frontend-jobs, rules]
# .frontend:rules:default-frontend-jobs, with a additional rules when MR is not approved
.frontend:rules:frontend_fixture:

View File

@ -277,7 +277,7 @@
{"name":"grape-entity","version":"1.0.1","platform":"ruby","checksum":"e00f9e94e407aff77aa2945d741f544d07e48501927942988799913151d02634"},
{"name":"grape-path-helpers","version":"2.0.1","platform":"ruby","checksum":"ad5216e52c6e796738a9118087352ab4c962900dbad1d8f8c0f96e093c6702d7"},
{"name":"grape-swagger","version":"2.1.0","platform":"ruby","checksum":"b64b310101628c697f7d3297dbf454af1125c970289e0f3f9101a97c00cd211e"},
{"name":"grape-swagger-entity","version":"0.5.4","platform":"ruby","checksum":"34c1644de6523c64cee922988bad3d1057634224f26dd48b9b5c1f90709bb571"},
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphiql-rails","version":"1.10.0","platform":"ruby","checksum":"b557f989a737c8b9e985142609bec52fb1e9393a701eb50e02a7c14422891040"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},

View File

@ -895,7 +895,7 @@ GEM
grape-swagger (2.1.0)
grape (>= 1.7, < 3.0)
rack-test (~> 2)
grape-swagger-entity (0.5.4)
grape-swagger-entity (0.5.5)
grape-entity (~> 1)
grape-swagger (~> 2)
grape_logging (1.8.4)

View File

@ -277,7 +277,7 @@
{"name":"grape-entity","version":"1.0.1","platform":"ruby","checksum":"e00f9e94e407aff77aa2945d741f544d07e48501927942988799913151d02634"},
{"name":"grape-path-helpers","version":"2.0.1","platform":"ruby","checksum":"ad5216e52c6e796738a9118087352ab4c962900dbad1d8f8c0f96e093c6702d7"},
{"name":"grape-swagger","version":"2.1.0","platform":"ruby","checksum":"b64b310101628c697f7d3297dbf454af1125c970289e0f3f9101a97c00cd211e"},
{"name":"grape-swagger-entity","version":"0.5.4","platform":"ruby","checksum":"34c1644de6523c64cee922988bad3d1057634224f26dd48b9b5c1f90709bb571"},
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphiql-rails","version":"1.10.0","platform":"ruby","checksum":"b557f989a737c8b9e985142609bec52fb1e9393a701eb50e02a7c14422891040"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},

View File

@ -905,7 +905,7 @@ GEM
grape-swagger (2.1.0)
grape (>= 1.7, < 3.0)
rack-test (~> 2)
grape-swagger-entity (0.5.4)
grape-swagger-entity (0.5.5)
grape-entity (~> 1)
grape-swagger (~> 2)
grape_logging (1.8.4)

View File

@ -6,7 +6,6 @@ import { createAlert } from '~/alert';
import Api from '~/api';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { STATUS_ALL, STATUS_CLOSED, STATUS_OPEN, STATUS_MERGED } from '~/issues/constants';
import axios from '~/lib/utils/axios_utils';
import { fetchPolicies } from '~/lib/graphql';
import { isPositiveInteger } from '~/lib/utils/number_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
@ -43,6 +42,7 @@ import {
TOKEN_TITLE_RELEASE,
TOKEN_TYPE_RELEASE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import { AutocompleteCache } from '~/issues/dashboard/utils';
import {
convertToApiParams,
convertToSearchQuery,
@ -401,6 +401,7 @@ export default {
},
created() {
this.updateData(this.initialSort);
this.autocompleteCache = new AutocompleteCache();
},
methods: {
fetchBranches(search) {
@ -414,8 +415,13 @@ export default {
});
});
},
fetchEmojis() {
return axios.get(this.autocompleteAwardEmojisPath);
fetchEmojis(search) {
return this.autocompleteCache.fetch({
url: this.autocompleteAwardEmojisPath,
cacheName: 'emojis',
searchProperty: 'name',
search,
});
},
fetchLabelsWithFetchPolicy(search, fetchPolicy = fetchPolicies.CACHE_FIRST) {
return this.$apollo

View File

@ -636,6 +636,13 @@ vulnerability_scanners:
- table: projects
column: project_id
on_delete: async_delete
vulnerability_state_transitions:
- table: projects
column: project_id
on_delete: async_delete
- table: users
column: author_id
on_delete: async_nullify
vulnerability_statistics:
- table: ci_pipelines
column: latest_pipeline_id

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillCiJobArtifactStatesProjectId
description: Backfills sharding key `ci_job_artifact_states.project_id` from `p_ci_job_artifacts`.
feature_category: geo_replication
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165940
milestone: '17.5'
queued_migration_version: 20240912122440
finalize_after: '2024-10-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -17,3 +17,4 @@ desired_sharding_key:
table: p_ci_job_artifacts
sharding_key: project_id
belongs_to: job_artifact
desired_sharding_key_migration_job_name: BackfillCiJobArtifactStatesProjectId

View File

@ -7,7 +7,7 @@ feature_categories:
description: Stores state transitions of a Vulnerability
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87957
milestone: '15.1'
gitlab_schema: gitlab_main_cell
gitlab_schema: gitlab_sec
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToCiJobArtifactStates < Gitlab::Database::Migration[2.2]
milestone '17.5'
def change
add_column :ci_job_artifact_states, :project_id, :bigint
end
end

View File

@ -10,31 +10,35 @@ class QueueBackfillVulnerabilityStateTransitionsProjectId < Gitlab::Database::Mi
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:vulnerability_state_transitions,
:id,
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:vulnerability_state_transitions,
:id,
[
Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.with_suppressed do
queue_batched_background_migration(
MIGRATION,
:vulnerability_state_transitions,
:id,
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id
]
)
:vulnerability_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
end
def down
Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.with_suppressed do
delete_batched_background_migration(
MIGRATION,
:vulnerability_state_transitions,
:id,
[
:project_id,
:vulnerabilities,
:project_id,
:vulnerability_id
]
)
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class RemoveProjectsVulnerabilityStateTransitionsProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
FOREIGN_KEY_NAME = "fk_d3ede71c58"
def up
with_lock_retries do
remove_foreign_key_if_exists(:vulnerability_state_transitions, :projects,
name: FOREIGN_KEY_NAME, reverse_lock_order: true)
end
end
def down
add_concurrent_foreign_key(:vulnerability_state_transitions, :projects,
name: FOREIGN_KEY_NAME, column: :project_id,
target_column: :id, on_delete: :cascade)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class RemoveUsersVulnerabilityStateTransitionsAuthorIdFk < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
FOREIGN_KEY_NAME = "fk_e719dc63df"
def up
with_lock_retries do
remove_foreign_key_if_exists(:vulnerability_state_transitions, :users,
name: FOREIGN_KEY_NAME, reverse_lock_order: true)
end
end
def down
add_concurrent_foreign_key(:vulnerability_state_transitions, :users,
name: FOREIGN_KEY_NAME, column: :author_id,
target_column: :id, on_delete: :nullify)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class PrepareIndexCiJobArtifactStatesOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
INDEX_NAME = 'index_ci_job_artifact_states_on_project_id'
def up
prepare_async_index :ci_job_artifact_states, :project_id, name: INDEX_NAME
end
def down
unprepare_async_index :ci_job_artifact_states, INDEX_NAME
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddCiJobArtifactStatesProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.5'
def up
install_sharding_key_assignment_trigger(
table: :ci_job_artifact_states,
sharding_key: :project_id,
parent_table: :p_ci_job_artifacts,
parent_sharding_key: :project_id,
foreign_key: :job_artifact_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :ci_job_artifact_states,
sharding_key: :project_id,
parent_table: :p_ci_job_artifacts,
parent_sharding_key: :project_id,
foreign_key: :job_artifact_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillCiJobArtifactStatesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillCiJobArtifactStatesProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10000
SUB_BATCH_SIZE = 1000
def up
queue_batched_background_migration(
MIGRATION,
:ci_job_artifact_states,
:job_artifact_id,
:project_id,
:p_ci_job_artifacts,
:project_id,
:job_artifact_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:ci_job_artifact_states,
:job_artifact_id,
[
:project_id,
:p_ci_job_artifacts,
:project_id,
:job_artifact_id
]
)
end
end

View File

@ -0,0 +1 @@
7edf2325bcefee99911664e25e3942ec27267e29e6fe5aba9c8d97fc379e192a

View File

@ -0,0 +1 @@
d25c8eda6de9efe4ed47ff23646934bcc31818ccd882ce78f4822fd9e91d0b8b

View File

@ -0,0 +1 @@
63488d8393568e7e93915f351710ba917bee9dae7b73c1bfae0a5a85ce42ec3d

View File

@ -0,0 +1 @@
92484a5cbcf38847998b38a95ce28102252b4ba25fd56cb6b092b00b0e4c48bf

View File

@ -0,0 +1 @@
26df32dffad29ac5ce904b46ea706d36c8f1c15dd76d85a0fa9a19228f6d8c94

View File

@ -0,0 +1 @@
4f8d4d2a2dc5e118366c044b82d477602b9a842c3eaae68072b7e4b3c13be8fd

View File

@ -1833,6 +1833,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_a465de38164e() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "p_ci_job_artifacts"
WHERE "p_ci_job_artifacts"."id" = NEW."job_artifact_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_a4e4fb2451d9() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -8100,6 +8116,7 @@ CREATE TABLE ci_job_artifact_states (
verification_checksum bytea,
verification_failure text,
partition_id bigint NOT NULL,
project_id bigint,
CONSTRAINT check_df832b66ea CHECK ((char_length(verification_failure) <= 255))
);
@ -33042,6 +33059,8 @@ CREATE TRIGGER trigger_a1bc7c70cbdf BEFORE INSERT OR UPDATE ON vulnerability_use
CREATE TRIGGER trigger_a253cb3cacdf BEFORE INSERT OR UPDATE ON dora_daily_metrics FOR EACH ROW EXECUTE FUNCTION trigger_a253cb3cacdf();
CREATE TRIGGER trigger_a465de38164e BEFORE INSERT OR UPDATE ON ci_job_artifact_states FOR EACH ROW EXECUTE FUNCTION trigger_a465de38164e();
CREATE TRIGGER trigger_a4e4fb2451d9 BEFORE INSERT OR UPDATE ON epic_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_a4e4fb2451d9();
CREATE TRIGGER trigger_a7e0fb195210 BEFORE INSERT OR UPDATE ON vulnerability_finding_evidences FOR EACH ROW EXECUTE FUNCTION trigger_a7e0fb195210();
@ -34390,9 +34409,6 @@ ALTER TABLE ONLY ci_builds
ALTER TABLE ONLY boards_epic_user_preferences
ADD CONSTRAINT fk_d32c3d693c FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_state_transitions
ADD CONSTRAINT fk_d3ede71c58 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_sources_pipelines
ADD CONSTRAINT fk_d4e29af7d7_p FOREIGN KEY (source_partition_id, source_pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
@ -34498,9 +34514,6 @@ ALTER TABLE ONLY packages_debian_group_components
ALTER TABLE ONLY merge_requests
ADD CONSTRAINT fk_e719a85f8a FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY vulnerability_state_transitions
ADD CONSTRAINT fk_e719dc63df FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY issue_links
ADD CONSTRAINT fk_e71bb44f1f FOREIGN KEY (target_id) REFERENCES issues(id) ON DELETE CASCADE;

View File

@ -34156,6 +34156,7 @@ Represents a vulnerability.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerabilityairesolutionavailable"></a>`aiResolutionAvailable` | [`Boolean`](#boolean) | Indicates whether this type of vulnerability can be resolved with AI. |
| <a id="vulnerabilityairesolutionenabled"></a>`aiResolutionEnabled` | [`Boolean`](#boolean) | Indicates whether this specific vulnerability can be resolved with AI. |
| <a id="vulnerabilitycommenters"></a>`commenters` | [`UserCoreConnection!`](#usercoreconnection) | All commenters on this noteable. (see [Connections](#connections)) |
| <a id="vulnerabilityconfirmedat"></a>`confirmedAt` | [`Time`](#time) | Timestamp of when the vulnerability state was changed to confirmed. |
| <a id="vulnerabilityconfirmedby"></a>`confirmedBy` | [`UserCore`](#usercore) | User that confirmed the vulnerability. |

View File

@ -14,6 +14,7 @@ module Gitlab
autoload :BackupExecutor, 'gitlab/backup/cli/backup_executor'
autoload :Commands, 'gitlab/backup/cli/commands'
autoload :Dependencies, 'gitlab/backup/cli/dependencies'
autoload :Errors, 'gitlab/backup/cli/errors'
autoload :GitlabConfig, 'gitlab/backup/cli/gitlab_config'
autoload :Metadata, 'gitlab/backup/cli/metadata'
autoload :Output, 'gitlab/backup/cli/output'

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
autoload :DatabaseBackupError, 'gitlab/backup/cli/errors/database_backup_error'
autoload :FileBackupError, 'gitlab/backup/cli/errors/file_backup_error'
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class DatabaseBackupError < Error
attr_reader :config, :db_file_name
def initialize(config, db_file_name)
@config = config
@db_file_name = db_file_name
end
def message
"Failed to create compressed file '#{db_file_name}' " \
"when trying to backup the main database:\n - host: " \
"'#{config[:host]}'\n - port: '#{config[:port]}'\n - " \
"database: '#{config[:database]}'"
end
end
end
end
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class FileBackupError < StandardError
attr_reader :storage_path, :backup_tarball
def initialize(app_files_dir, backup_tarball)
@storage_path = app_files_dir
@backup_tarball = backup_tarball
end
def message
"Failed to create compressed file '#{backup_tarball}' " \
"when trying to backup the following paths: '#{storage_path}' "
end
end
end
end
end
end

View File

@ -70,7 +70,7 @@ module Gitlab
backup_connection = ::Backup::DatabaseConnection.new(database_connection_name)
backup_connection.restore_timeouts!
rescue ActiveRecord::ConnectionNotEstablished
raise ::Backup::DatabaseBackupError.new(
raise DatabaseBackupError.new(
backup_connection.database_configuration.activerecord_variables,
file_name(destination_dir, database_connection_name)
)
@ -245,7 +245,7 @@ module Gitlab
# Trigger a transaction snapshot export that will be used by pg_dump later on
backup_connection.export_snapshot!
rescue ActiveRecord::ConnectionNotEstablished
raise ::Backup::DatabaseBackupError.new(
raise DatabaseBackupError.new(
backup_connection.database_configuration.activerecord_variables,
file_name(destination_dir, database_connection_name)
)

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::Errors::DatabaseBackupError do
let(:config) do
{
host: 'localhost',
port: 5432,
database: 'gitlab_db'
}
end
let(:db_file_name) { 'gitlab_backup.sql.gz' }
subject(:error) { described_class.new(config, db_file_name) }
describe '#initialize' do
it 'sets the config and db_file_name attributes' do
expect(error.config).to eq(config)
expect(error.db_file_name).to eq(db_file_name)
end
end
describe '#message' do
it 'returns a formatted error message' do
expected_message = "Failed to create compressed file 'gitlab_backup.sql.gz' " \
"when trying to backup the main database:\n - host: " \
"'localhost'\n - port: '5432'\n - database: 'gitlab_db'"
expect(error.message).to eq(expected_message)
end
it 'includes the correct database information in the message' do
message = error.message
expect(message).to include("host: '#{config[:host]}'")
expect(message).to include("port: '#{config[:port]}'")
expect(message).to include("database: '#{config[:database]}'")
end
it 'includes the correct db_file_name in the message' do
expect(error.message).to include("'#{db_file_name}'")
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::Errors::FileBackupError do
let(:app_files_dir) { '/path/to/app/files' }
let(:backup_tarball) { '/path/to/backup.tar.gz' }
let(:error) { described_class.new(app_files_dir, backup_tarball) }
describe '#initialize' do
it 'sets the storage_path attribute' do
expect(error.storage_path).to eq(app_files_dir)
end
it 'sets the backup_tarball attribute' do
expect(error.backup_tarball).to eq(backup_tarball)
end
end
describe '#message' do
it 'returns a formatted error message' do
expected_message = "Failed to create compressed file '/path/to/backup.tar.gz' " \
"when trying to backup the following paths: '/path/to/app/files' "
expect(error.message).to eq(expected_message)
end
it 'includes the backup_tarball in the message' do
expect(error.message).to include(backup_tarball)
end
it 'includes the storage_path in the message' do
expect(error.message).to include(app_files_dir)
end
end
end

View File

@ -58,7 +58,7 @@ RSpec.describe 'gitlab-backup-cli backup subcommand', type: :thor do
expect { cli.start(%w[backup all]) }.to output(expected_backup_output).to_stdout
end
it 'displays an error message when a Backup::Error is raised' do
it 'displays an error message when an error is raised' do
backup_error = Gitlab::Backup::Cli::Error.new('Custom error message')
# Simulate an error during execution

View File

@ -56,7 +56,7 @@ RSpec.describe 'gitlab-backup-cli restore subcommand', type: :thor do
expect { cli.start(%W[restore all #{backup_id}]) }.to output(expected_backup_output).to_stdout
end
it 'displays an error message when a Backup::Error is raised' do
it 'displays an error message when an error is raised' do
backup_error = Gitlab::Backup::Cli::Error.new('Custom error message')
# Simulate an error during execution

View File

@ -15,9 +15,7 @@ module API
todo_target_class(todo.target_type).represent(todo.target, todo_options)
end
expose :target_url do |todo, options|
todo_target_url(todo)
end
expose :target_url
expose :body
expose :state
@ -30,18 +28,6 @@ module API
::API::Entities.const_get(target_type, false)
end
def todo_target_url(todo)
return design_todo_target_url(todo) if todo.for_design?
return todo.access_request_url if todo.member_access_requested?
target_type = todo.target_type.gsub('::', '_').underscore
target_url = "#{todo.resource_parent.class.to_s.underscore}_#{target_type}_url"
Gitlab::Routing
.url_helpers
.public_send(target_url, todo.resource_parent, todo.target, anchor: todo_target_anchor(todo)) # rubocop:disable GitlabSecurity/PublicSend
end
def todo_target_anchor(todo)
"note_#{todo.note_id}" if todo.note_id?
end

View File

@ -66,7 +66,7 @@ module BulkImports
object.save!
end
push_placeholder_references(object, original_users_map) if context.importer_user_mapping_enabled?
push_placeholder_references(original_users_map) if context.importer_user_mapping_enabled?
end
def deep_transform_relation!(relation_hash, relation_key, relation_definition, &block)
@ -190,57 +190,29 @@ module BulkImports
end
end
# Method recursively scans through the relationships of an object based
# on the relation_definition and places all objects into a flattened list
# of objects.
# Pushes a placeholder reference for each source_user_identifier contained in
# the original_users_map.
#
# For example, if the relation_definition is: { "notes" => { "events" => {} } }
# The `original_users_map` is a hash where the key is an object built by the
# RelationFactory, and the value is another hash. This second hash maps
# attributes that reference user IDs to the user IDs from the source instance,
# essentially the information present in the NDJSON file.
#
# and the relation_object a merge_request with the following notes:
# For example, below is an example of `original_users_map`:
#
# event1 = Event.new
# note1 = Note.new(events: [event1])
# event2 = Event.new
# note2 = Note.new(events: [event2])
# merge_request = MergeRequest.new(notes:[note1, note2])
#
# the flatten_objects list will contain:
# [note1, event1, note2, event2]
#
# rubocop:disable GitlabSecurity/PublicSend -- only methods in the relation_definition are called
def scan_objects(relation_definition, relation_object, flatten_objects)
relation_definition.each_key do |definition|
subrelation = relation_object.public_send(definition)
association = relation_object.class.reflect_on_association(definition)
next if subrelation.nil? || association.nil?
if association.collection?
subrelation.records.each do |record|
flatten_objects << record
scan_objects(relation_definition[definition], record, flatten_objects)
end
else
flatten_objects << subrelation
end
end
end
# rubocop:enable GitlabSecurity/PublicSend
def push_placeholder_references(object, original_users_map)
flatten_objects = [object]
scan_objects(relation_definition, object, flatten_objects)
flatten_objects.each do |object|
# {
# #<Issue:0x0001: {"author_id"=>1, "updated_by_id"=>2, "last_edited_by_id"=>2, "closed_by_id"=>2 },
# #<ResourceStateEvent:0x0002: {"user_id"=>1"]},
# #<ResourceStateEvent:0x0003: {"user_id"=>2"]},
# #<ResourceStateEvent:0x0004: {"user_id"=>2"]},
# #<Note:0x0005: {"author_id"=>1"]},
# #<Note:0x0006: {"author_id"=>2"]}
# }
def push_placeholder_references(original_users_map)
original_users_map.each do |object, user_references|
next unless object.persisted?
original_users = original_users_map[object]
next unless original_users
original_users.each do |attribute, source_user_identifier|
user_references.each do |attribute, source_user_identifier|
source_user = source_user_mapper.find_source_user(source_user_identifier)
# Do not create a reference if the object is already associated

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillCiJobArtifactStatesProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_ci_job_artifact_states_project_id
feature_category :geo_replication
end
end
end

View File

@ -43,7 +43,7 @@ module Gitlab
# The `expanded_environment_name` method uses `metadata&.expanded_environment_name` first to check
# but we don't need it here because `metadata.expanded_environment_name` is only set in
# `app/services/environments/create_for_job_service.rb` which is after the pipeline creation.
ExpandVariables.expand(attributes[:environment], -> { simple_variables })
ExpandVariables.expand(attributes[:environment], -> { simple_variables.sort_and_expand_all })
end
# Copied from `app/models/concerns/ci/deployable.rb#expanded_kubernetes_namespace`

View File

@ -9,7 +9,7 @@ module Gitlab
class << self
def entries
return @entries if @entries.present? && !Rails.env.test?
return @entries if @entries.present? && defined?(Rails) && !Rails.env.test?
@entries = Dir.glob("*.yml", base: DICTIONARY_BASE_DIR).each_with_object({}) do |file_name, data|
dictionary = YAML.load_file(File.join(DICTIONARY_BASE_DIR, file_name))

View File

@ -3621,6 +3621,9 @@ msgstr ""
msgid "AdminAiPoweredFeatures|%{feature_name}"
msgstr ""
msgid "AdminAiPoweredFeatures|%{selected_model} is incompatible with the %{title} feature"
msgstr ""
msgid "AdminAiPoweredFeatures|AI vendor"
msgstr ""

View File

@ -68,7 +68,7 @@ module QA
end
end
it 'push and pull a npm package via CI', testcase: params[:testcase] do
it 'push and pull a npm package via CI', :blocking, testcase: params[:testcase] do
npm_upload_yaml = ERB.new(read_fixture('package_managers/npm',
'npm_upload_package_instance.yaml.erb')).result(binding)
package_json = ERB.new(read_fixture('package_managers/npm', 'package.json.erb')).result(binding)

View File

@ -95,7 +95,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_sources_projects: %w[partition_id],
ci_stages: %w[partition_id project_id pipeline_id],
ci_trigger_requests: %w[commit_id],
ci_job_artifact_states: %w[partition_id],
ci_job_artifact_states: %w[partition_id project_id],
cluster_providers_aws: %w[security_group_id vpc_id access_key_id],
cluster_providers_gcp: %w[gcp_project_id operation_id],
compliance_management_frameworks: %w[group_id],

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillCiJobArtifactStatesProjectId,
feature_category: :geo_replication,
schema: 20240912122437,
migration: :gitlab_ci do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :ci_job_artifact_states }
let(:backfill_column) { :project_id }
let(:batch_column) { :job_artifact_id }
let(:backfill_via_table) { :p_ci_job_artifacts }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :job_artifact_id }
end
end

View File

@ -142,6 +142,77 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
end
end
end
context 'when environment includes nested variables' do
let(:seed_attributes) do
{
name: 'some-job',
environment: 'env-$NESTED_VAR',
yaml_variables: [
{ key: 'NESTED_VAR', value: 'nested-$CI_COMMIT_REF_NAME' }
],
options: {
environment: { name: 'env-$NESTED_VAR' }
}
}
end
it 'expands the nested variable' do
is_expected.to include('CI_ENVIRONMENT_NAME' => 'env-nested-master')
end
context 'when the FF ci_variables_optimization_for_yaml_and_node is disabled' do
before do
stub_feature_flags(ci_variables_optimization_for_yaml_and_node: false)
end
it 'expands the nested variable' do
is_expected.to include('CI_ENVIRONMENT_NAME' => 'env-nested-master')
end
end
end
context 'when kubernetes namespace includes nested variables' do
let(:seed_attributes) do
{
name: 'some-job',
environment: 'env-master',
yaml_variables: [
{ key: 'NESTED_VAR', value: 'nested-$CI_PROJECT_PATH' }
],
options: {
environment: { name: 'env-master', kubernetes: { namespace: 'k8s-$NESTED_VAR' } }
}
}
end
let!(:default_cluster) do
create(
:cluster,
:not_managed,
platform_type: :kubernetes,
projects: [project],
environment_scope: '*',
platform_kubernetes: default_cluster_kubernetes
)
end
let(:default_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'default-AAA') }
it 'does not expand the nested variable' do
is_expected.to include('KUBE_NAMESPACE' => "k8s-nested-$CI_PROJECT_PATH")
end
context 'when the FF ci_variables_optimization_for_yaml_and_node is disabled' do
before do
stub_feature_flags(ci_variables_optimization_for_yaml_and_node: false)
end
it 'does not expand the nested variable' do
is_expected.to include('KUBE_NAMESPACE' => "k8s-nested-$CI_PROJECT_PATH")
end
end
end
end
describe '#variables_hash' do

View File

@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::Partitioning::IntRangeStrategy, feature_categor
model.create!(external_id: 15)
end
it 'returns missing partitions' do
it 'returns missing partitions', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/444872' do
expect(missing_partitions.size).to eq(7)
expect(missing_partitions).to include(

View File

@ -343,7 +343,7 @@ RSpec.describe Gitlab::Database::Reflection, feature_category: :database do
.to be_an_instance_of(HashWithIndifferentAccess)
end
it 'returns a default pool size' do
it 'returns a default pool size', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/467632' do
expect(database.config)
.to include(pool: Gitlab::Database.default_pool_size)
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillCiJobArtifactStatesProjectId, migration: :gitlab_ci, feature_category: :geo_replication do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :ci_job_artifact_states,
column_name: :job_artifact_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: [
:project_id,
:p_ci_job_artifacts,
:project_id,
:job_artifact_id
]
)
}
end
end
end