Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-01-11 00:10:19 +00:00
parent 99bd45e7ce
commit feb7a8f326
35 changed files with 1589 additions and 369 deletions

View File

@ -186,7 +186,7 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
persist_accepted_terms_if_required(user) if new_user
store_after_sign_up_path_for_user if intent_to_register?
sign_in_and_redirect(user, event: :authentication)
sign_in_and_redirect_or_confirm_identity(user, auth_user, new_user)
end
else
fail_login(user)
@ -316,6 +316,11 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
def store_after_sign_up_path_for_user
store_location_for(:user, users_sign_up_welcome_path)
end
# overridden in EE
def sign_in_and_redirect_or_confirm_identity(user, _, _)
sign_in_and_redirect(user, event: :authentication)
end
end
OmniauthCallbacksController.prepend_mod_with('OmniauthCallbacksController')

View File

@ -0,0 +1,8 @@
---
name: jobs_api_keyset_pagination
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107152
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385940
milestone: '15.8'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -11,3 +11,6 @@ Grape::Validations.register_validator(:untrusted_regexp, ::API::Validations::Val
Grape::Validations.register_validator(:email_or_email_list, ::API::Validations::Validators::EmailOrEmailList)
Grape::Validations.register_validator(:iteration_id, ::API::Validations::Validators::IntegerOrCustomValue)
Grape::Validations.register_validator(:project_portable, ::API::Validations::Validators::ProjectPortable)
Grape::Validations.register_validator(:source_full_path, ::API::Validations::Validators::BulkImports::DestinationNamespacePath) # rubocop: disable Layout/LineLength
Grape::Validations.register_validator(:destination_path, ::API::Validations::Validators::BulkImports::DestinationSlugPath) # rubocop: disable Layout/LineLength
Grape::Validations.register_validator(:source_full_path, ::API::Validations::Validators::BulkImports::SourceFullPath)

View File

@ -1,69 +1,138 @@
{
"type": "object",
"required": ["key_path", "description", "value_type", "status", "product_group", "product_section", "product_stage", "time_frame", "data_source", "distribution", "tier", "data_category", "milestone"],
"required": [
"key_path",
"description",
"value_type",
"status",
"product_group",
"product_section",
"product_stage",
"time_frame",
"data_source",
"distribution",
"tier",
"data_category",
"milestone"
],
"properties": {
"key_path": {
"type": "string"
},
"name": {
"type": ["string", "null"],
"type": [
"string",
"null"
],
"pattern": "^([a-z]+_)*[a-z]+$"
},
"description": {
"type": "string"
},
"product_section": {
"type": ["string"]
"type": [
"string"
]
},
"product_stage": {
"type": ["string"]
"type": [
"string"
]
},
"product_group": {
"type": "string",
"pattern": "^$|^([a-z]+_)*[a-z]+$"
},
"product_category": {
"type": ["string", "null"]
"type": [
"string",
"null"
]
},
"value_type": {
"type": "string",
"enum": ["string", "number", "boolean", "object"]
"enum": [
"string",
"number",
"boolean",
"object"
]
},
"status": {
"type": ["string"],
"enum": ["active", "removed", "broken"]
"type": [
"string"
],
"enum": [
"active",
"removed",
"broken"
]
},
"milestone": {
"type": ["string"],
"type": [
"string"
],
"pattern": "^<?[0-9]+\\.[0-9]+$"
},
"milestone_removed": {
"type": ["string", "null"],
"type": [
"string",
"null"
],
"pattern": "^[0-9]+\\.[0-9]+$"
},
"introduced_by_url": {
"type": ["string", "null"]
"type": [
"string",
"null"
]
},
"removed_by_url": {
"type": ["string", "null"]
"type": [
"string",
"null"
]
},
"repair_issue_url": {
"type": ["string"]
"type": [
"string"
]
},
"options": {
"type": "object"
},
"time_frame": {
"type": "string",
"enum": ["7d", "28d", "all", "none"]
"enum": [
"7d",
"28d",
"all",
"none"
]
},
"data_source": {
"type": "string",
"enum": ["database", "redis", "redis_hll", "prometheus", "system", "license"]
"enum": [
"database",
"redis",
"redis_hll",
"prometheus",
"system",
"license"
]
},
"data_category": {
"type": "string",
"enum": ["Operational", "Optional", "Subscription", "Standard", "operational", "optional", "subscription", "standard"]
"enum": [
"Operational",
"Optional",
"Subscription",
"Standard",
"operational",
"optional",
"subscription",
"standard"
]
},
"instrumentation_class": {
"type": "string",
@ -73,21 +142,38 @@
"type": "array",
"items": {
"type": "string",
"enum": ["ee", "ce"]
"enum": [
"ee",
"ce"
]
}
},
"performance_indicator_type": {
"type": "array",
"items": {
"type": "string",
"enum": ["gmau", "smau", "paid_gmau", "umau"]
"enum": [
"gmau",
"smau",
"paid_gmau",
"umau",
"customer_health_score"
]
}
},
"tier": {
"type": "array",
"items": {
"type": "string",
"enum": ["free", "starter", "premium", "ultimate", "bronze", "silver", "gold"]
"enum": [
"free",
"starter",
"premium",
"ultimate",
"bronze",
"silver",
"gold"
]
}
},
"skip_validation": {
@ -101,12 +187,16 @@
{
"if": {
"properties": {
"status": { "const": "broken" }
"status": {
"const": "broken"
}
}
},
"then": {
"required": ["repair_issue_url"]
"required": [
"repair_issue_url"
]
}
}
]
}
}

View File

@ -0,0 +1,137 @@
# frozen_string_literal: true
class PartitionPmPackageMetadataTables < Gitlab::Database::Migration[2.1]
PURL_TYPES = (1..8).freeze
def up
drop_table(:pm_package_version_licenses) # rubocop:disable Migration/DropTable
drop_table(:pm_package_versions) # rubocop:disable Migration/DropTable
drop_table(:pm_packages) # rubocop:disable Migration/DropTable
create_partitions_for_pm_packages
create_partitions_for_pm_package_versions
create_partitions_for_pm_package_version_licenses
end
def down
drop_table(:pm_package_version_licenses, force: :cascade) # rubocop:disable Migration/DropTable
drop_table(:pm_package_versions, force: :cascade) # rubocop:disable Migration/DropTable
drop_table(:pm_packages, force: :cascade) # rubocop:disable Migration/DropTable
create_table :pm_packages do |t|
t.integer :purl_type, limit: 2, null: false
t.text :name, null: false, limit: 255
t.index [:purl_type, :name], name: 'i_pm_packages_purl_type_and_name', unique: true
end
create_table :pm_package_versions do |t|
t.references :pm_package,
index: false,
foreign_key: {
to_table: :pm_packages,
column: :pm_package_id,
name: 'fk_rails_cf94c3e601',
on_delete: :cascade
}
t.text :version, null: false, limit: 255
t.index [:pm_package_id, :version], name: 'i_pm_package_versions_on_package_id_and_version', unique: true
t.index :pm_package_id, name: 'index_pm_package_versions_on_pm_package_id'
end
create_table :pm_package_version_licenses, primary_key: [:pm_package_version_id, :pm_license_id] do |t|
t.references :pm_package_version,
index: false,
null: false,
foreign_key: {
to_table: :pm_package_versions,
column: :pm_package_version_id,
name: 'fk_rails_30ddb7f837',
on_delete: :cascade
}
t.references :pm_license,
index: false,
null: false,
foreign_key: { name: 'fk_rails_7520ea026d', on_delete: :cascade }
t.index :pm_license_id, name: 'index_pm_package_version_licenses_on_pm_license_id'
t.index :pm_package_version_id, name: 'index_pm_package_version_licenses_on_pm_package_version_id'
end
end
private
def create_partitions_for_pm_packages
execute(<<~SQL)
CREATE TABLE pm_packages (
id BIGSERIAL NOT NULL,
purl_type SMALLINT NOT NULL,
name TEXT NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255)),
PRIMARY KEY (id, purl_type)
) PARTITION BY LIST (purl_type);
SQL
execute(<<~SQL)
CREATE UNIQUE INDEX i_pm_packages_for_inserts ON pm_packages USING btree(purl_type, name);
SQL
PURL_TYPES.each do |i|
execute(<<~SQL)
CREATE TABLE gitlab_partitions_static.pm_packages_#{i}
PARTITION OF pm_packages
FOR VALUES IN (#{i})
SQL
end
end
def create_partitions_for_pm_package_versions
execute(<<~SQL)
CREATE TABLE pm_package_versions (
id BIGSERIAL NOT NULL,
pm_package_id BIGINT NOT NULL,
purl_type SMALLINT NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255)),
PRIMARY KEY (id, purl_type),
CONSTRAINT fkey_fb6234c446 FOREIGN KEY (pm_package_id, purl_type) REFERENCES pm_packages(id, purl_type) ON DELETE CASCADE
) PARTITION BY LIST (purl_type);
SQL
execute(<<~SQL)
CREATE UNIQUE INDEX i_pm_package_versions_for_inserts ON pm_package_versions USING btree (pm_package_id, version, purl_type);
SQL
PURL_TYPES.each do |i|
execute(<<~SQL)
CREATE TABLE gitlab_partitions_static.pm_package_versions_#{i}
PARTITION OF pm_package_versions
FOR VALUES IN (#{i})
SQL
end
end
def create_partitions_for_pm_package_version_licenses
execute(<<~SQL)
CREATE TABLE pm_package_version_licenses (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL,
PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type),
CONSTRAINT pm_package_versions_fkey FOREIGN KEY (pm_package_version_id, purl_type) REFERENCES pm_package_versions (id, purl_type) ON DELETE CASCADE,
CONSTRAINT pm_package_licenses_fkey FOREIGN KEY (pm_license_id) REFERENCES pm_licenses (id) ON DELETE CASCADE
) PARTITION BY LIST (purl_type);
SQL
execute(<<~SQL)
CREATE INDEX i_pm_package_version_licenses_for_inserts ON pm_package_version_licenses USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX i_pm_package_version_licenses_for_selects_on_licenses ON pm_package_version_licenses USING btree (pm_license_id);
SQL
PURL_TYPES.each do |i|
execute(<<~SQL)
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_#{i}
PARTITION OF pm_package_version_licenses
FOR VALUES IN (#{i})
SQL
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddTmpIndexOauthAccessTokensOnIdWhereExpiresInNull < Gitlab::Database::Migration[2.1]
TMP_INDEX = 'tmp_index_oauth_access_tokens_on_id_where_expires_in_null'
disable_ddl_transaction!
def up
# Temporary index to be removed in %15.9 or later https://gitlab.com/gitlab-org/gitlab/-/issues/385343
add_concurrent_index :oauth_access_tokens, :id, where: "expires_in IS NULL", name: TMP_INDEX
end
def down
remove_concurrent_index_by_name :oauth_access_tokens, TMP_INDEX
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class CleanupOAuthAccessTokensWithNullExpiresIn < Gitlab::Database::Migration[2.1]
MIGRATION = 'ReExpireOAuthTokens'
INTERVAL = 2.minutes
MAX_BATCH_SIZE = 50_000
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
:oauth_access_tokens,
:id,
job_interval: INTERVAL,
max_batch_size: MAX_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :oauth_access_tokens, :id, [])
end
end

View File

@ -0,0 +1 @@
1932d6cee91dac3fb3fee7bbdac1bb08c7af92d8ef9f9f32c2d6a2958fc4fdcf

View File

@ -0,0 +1 @@
fc12bfad8fb9c2aba5aea0726f8e8630c9d0f4f791988660ea183e269b2068ef

View File

@ -0,0 +1 @@
37df82f093bb81ff1bc36ea9ba29f4e70bcb96274e2dcc70438ce0710dd7e9d9

View File

@ -1940,6 +1940,240 @@ CREATE TABLE gitlab_partitions_static.issue_search_data_63 (
);
ALTER TABLE ONLY issue_search_data ATTACH PARTITION gitlab_partitions_static.issue_search_data_63 FOR VALUES WITH (modulus 64, remainder 63);
CREATE TABLE pm_package_version_licenses (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
)
PARTITION BY LIST (purl_type);
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_1 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_1 FOR VALUES IN ('1');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_2 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_2 FOR VALUES IN ('2');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_3 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_3 FOR VALUES IN ('3');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_4 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_4 FOR VALUES IN ('4');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_5 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_5 FOR VALUES IN ('5');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_6 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_6 FOR VALUES IN ('6');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_7 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_7 FOR VALUES IN ('7');
CREATE TABLE gitlab_partitions_static.pm_package_version_licenses_8 (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL,
purl_type smallint NOT NULL
);
ALTER TABLE ONLY pm_package_version_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_8 FOR VALUES IN ('8');
CREATE TABLE pm_package_versions (
id bigint NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
)
PARTITION BY LIST (purl_type);
CREATE SEQUENCE pm_package_versions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE pm_package_versions_id_seq OWNED BY pm_package_versions.id;
CREATE TABLE gitlab_partitions_static.pm_package_versions_1 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_1 FOR VALUES IN ('1');
CREATE TABLE gitlab_partitions_static.pm_package_versions_2 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_2 FOR VALUES IN ('2');
CREATE TABLE gitlab_partitions_static.pm_package_versions_3 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_3 FOR VALUES IN ('3');
CREATE TABLE gitlab_partitions_static.pm_package_versions_4 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_4 FOR VALUES IN ('4');
CREATE TABLE gitlab_partitions_static.pm_package_versions_5 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_5 FOR VALUES IN ('5');
CREATE TABLE gitlab_partitions_static.pm_package_versions_6 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_6 FOR VALUES IN ('6');
CREATE TABLE gitlab_partitions_static.pm_package_versions_7 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_7 FOR VALUES IN ('7');
CREATE TABLE gitlab_partitions_static.pm_package_versions_8 (
id bigint DEFAULT nextval('pm_package_versions_id_seq'::regclass) NOT NULL,
pm_package_id bigint NOT NULL,
purl_type smallint NOT NULL,
version text NOT NULL,
CONSTRAINT check_7ed2cc733f CHECK ((char_length(version) <= 255))
);
ALTER TABLE ONLY pm_package_versions ATTACH PARTITION gitlab_partitions_static.pm_package_versions_8 FOR VALUES IN ('8');
CREATE TABLE pm_packages (
id bigint NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
)
PARTITION BY LIST (purl_type);
CREATE SEQUENCE pm_packages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE pm_packages_id_seq OWNED BY pm_packages.id;
CREATE TABLE gitlab_partitions_static.pm_packages_1 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_1 FOR VALUES IN ('1');
CREATE TABLE gitlab_partitions_static.pm_packages_2 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_2 FOR VALUES IN ('2');
CREATE TABLE gitlab_partitions_static.pm_packages_3 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_3 FOR VALUES IN ('3');
CREATE TABLE gitlab_partitions_static.pm_packages_4 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_4 FOR VALUES IN ('4');
CREATE TABLE gitlab_partitions_static.pm_packages_5 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_5 FOR VALUES IN ('5');
CREATE TABLE gitlab_partitions_static.pm_packages_6 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_6 FOR VALUES IN ('6');
CREATE TABLE gitlab_partitions_static.pm_packages_7 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_7 FOR VALUES IN ('7');
CREATE TABLE gitlab_partitions_static.pm_packages_8 (
id bigint DEFAULT nextval('pm_packages_id_seq'::regclass) NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_9df27a82fe CHECK ((char_length(name) <= 255))
);
ALTER TABLE ONLY pm_packages ATTACH PARTITION gitlab_partitions_static.pm_packages_8 FOR VALUES IN ('8');
CREATE TABLE product_analytics_events_experimental (
id bigint NOT NULL,
project_id integer NOT NULL,
@ -19649,43 +19883,6 @@ CREATE SEQUENCE pm_licenses_id_seq
ALTER SEQUENCE pm_licenses_id_seq OWNED BY pm_licenses.id;
CREATE TABLE pm_package_version_licenses (
pm_package_version_id bigint NOT NULL,
pm_license_id bigint NOT NULL
);
CREATE TABLE pm_package_versions (
id bigint NOT NULL,
pm_package_id bigint,
version text NOT NULL,
CONSTRAINT check_2d8a88cfcc CHECK ((char_length(version) <= 255))
);
CREATE SEQUENCE pm_package_versions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE pm_package_versions_id_seq OWNED BY pm_package_versions.id;
CREATE TABLE pm_packages (
id bigint NOT NULL,
purl_type smallint NOT NULL,
name text NOT NULL,
CONSTRAINT check_3a3aedb8ba CHECK ((char_length(name) <= 255))
);
CREATE SEQUENCE pm_packages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE pm_packages_id_seq OWNED BY pm_packages.id;
CREATE TABLE pool_repositories (
id bigint NOT NULL,
shard_id integer NOT NULL,
@ -25222,6 +25419,87 @@ ALTER TABLE ONLY gitlab_partitions_static.issue_search_data_62
ALTER TABLE ONLY gitlab_partitions_static.issue_search_data_63
ADD CONSTRAINT issue_search_data_63_pkey PRIMARY KEY (project_id, issue_id);
ALTER TABLE ONLY pm_package_version_licenses
ADD CONSTRAINT pm_package_version_licenses_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_1
ADD CONSTRAINT pm_package_version_licenses_1_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_2
ADD CONSTRAINT pm_package_version_licenses_2_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_3
ADD CONSTRAINT pm_package_version_licenses_3_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_4
ADD CONSTRAINT pm_package_version_licenses_4_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_5
ADD CONSTRAINT pm_package_version_licenses_5_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_6
ADD CONSTRAINT pm_package_version_licenses_6_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_7
ADD CONSTRAINT pm_package_version_licenses_7_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_version_licenses_8
ADD CONSTRAINT pm_package_version_licenses_8_pkey PRIMARY KEY (pm_package_version_id, pm_license_id, purl_type);
ALTER TABLE ONLY pm_package_versions
ADD CONSTRAINT pm_package_versions_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_1
ADD CONSTRAINT pm_package_versions_1_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_2
ADD CONSTRAINT pm_package_versions_2_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_3
ADD CONSTRAINT pm_package_versions_3_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_4
ADD CONSTRAINT pm_package_versions_4_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_5
ADD CONSTRAINT pm_package_versions_5_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_6
ADD CONSTRAINT pm_package_versions_6_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_7
ADD CONSTRAINT pm_package_versions_7_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_package_versions_8
ADD CONSTRAINT pm_package_versions_8_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY pm_packages
ADD CONSTRAINT pm_packages_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_1
ADD CONSTRAINT pm_packages_1_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_2
ADD CONSTRAINT pm_packages_2_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_3
ADD CONSTRAINT pm_packages_3_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_4
ADD CONSTRAINT pm_packages_4_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_5
ADD CONSTRAINT pm_packages_5_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_6
ADD CONSTRAINT pm_packages_6_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_7
ADD CONSTRAINT pm_packages_7_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY gitlab_partitions_static.pm_packages_8
ADD CONSTRAINT pm_packages_8_pkey PRIMARY KEY (id, purl_type);
ALTER TABLE ONLY product_analytics_events_experimental
ADD CONSTRAINT product_analytics_events_experimental_pkey PRIMARY KEY (id, project_id);
@ -26692,15 +26970,6 @@ ALTER TABLE ONLY plans
ALTER TABLE ONLY pm_licenses
ADD CONSTRAINT pm_licenses_pkey PRIMARY KEY (id);
ALTER TABLE ONLY pm_package_version_licenses
ADD CONSTRAINT pm_package_version_licenses_pkey PRIMARY KEY (pm_package_version_id, pm_license_id);
ALTER TABLE ONLY pm_package_versions
ADD CONSTRAINT pm_package_versions_pkey PRIMARY KEY (id);
ALTER TABLE ONLY pm_packages
ADD CONSTRAINT pm_packages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY pool_repositories
ADD CONSTRAINT pool_repositories_pkey PRIMARY KEY (id);
@ -28065,6 +28334,78 @@ CREATE INDEX issue_search_data_63_issue_id_idx ON gitlab_partitions_static.issue
CREATE INDEX issue_search_data_63_search_vector_idx ON gitlab_partitions_static.issue_search_data_63 USING gin (search_vector);
CREATE INDEX i_pm_package_version_licenses_for_selects_on_licenses ON ONLY pm_package_version_licenses USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_1_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_1 USING btree (pm_license_id);
CREATE INDEX i_pm_package_version_licenses_for_inserts ON ONLY pm_package_version_licenses USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_1_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_1 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_2_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_2 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_2_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_2 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_3_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_3 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_3_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_3 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_4_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_4 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_4_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_4 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_5_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_5 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_5_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_5 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_6_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_6 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_6_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_6 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_7_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_7 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_7_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_7 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE INDEX pm_package_version_licenses_8_pm_license_id_idx ON gitlab_partitions_static.pm_package_version_licenses_8 USING btree (pm_license_id);
CREATE INDEX pm_package_version_licenses_8_purl_type_pm_package_version__idx ON gitlab_partitions_static.pm_package_version_licenses_8 USING btree (purl_type, pm_package_version_id, pm_license_id);
CREATE UNIQUE INDEX i_pm_package_versions_for_inserts ON ONLY pm_package_versions USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_1_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_1 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_2_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_2 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_3_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_3 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_4_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_4 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_5_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_5 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_6_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_6 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_7_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_7 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX pm_package_versions_8_pm_package_id_version_purl_type_idx ON gitlab_partitions_static.pm_package_versions_8 USING btree (pm_package_id, version, purl_type);
CREATE UNIQUE INDEX i_pm_packages_for_inserts ON ONLY pm_packages USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_1_purl_type_name_idx ON gitlab_partitions_static.pm_packages_1 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_2_purl_type_name_idx ON gitlab_partitions_static.pm_packages_2 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_3_purl_type_name_idx ON gitlab_partitions_static.pm_packages_3 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_4_purl_type_name_idx ON gitlab_partitions_static.pm_packages_4 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_5_purl_type_name_idx ON gitlab_partitions_static.pm_packages_5 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_6_purl_type_name_idx ON gitlab_partitions_static.pm_packages_6 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_7_purl_type_name_idx ON gitlab_partitions_static.pm_packages_7 USING btree (purl_type, name);
CREATE UNIQUE INDEX pm_packages_8_purl_type_name_idx ON gitlab_partitions_static.pm_packages_8 USING btree (purl_type, name);
CREATE INDEX index_product_analytics_events_experimental_project_and_time ON ONLY product_analytics_events_experimental USING btree (project_id, collector_tstamp);
CREATE INDEX product_analytics_events_expe_project_id_collector_tstamp_idx10 ON gitlab_partitions_static.product_analytics_events_experimental_10 USING btree (project_id, collector_tstamp);
@ -28257,10 +28598,6 @@ CREATE INDEX i_dast_scanner_profiles_tags_on_scanner_profiles_id ON dast_scanner
CREATE UNIQUE INDEX i_pm_licenses_on_spdx_identifier ON pm_licenses USING btree (spdx_identifier);
CREATE UNIQUE INDEX i_pm_package_versions_on_package_id_and_version ON pm_package_versions USING btree (pm_package_id, version);
CREATE UNIQUE INDEX i_pm_packages_purl_type_and_name ON pm_packages USING btree (purl_type, name);
CREATE INDEX idx_analytics_devops_adoption_segments_on_namespace_id ON analytics_devops_adoption_segments USING btree (namespace_id);
CREATE INDEX idx_analytics_devops_adoption_snapshots_finalized ON analytics_devops_adoption_snapshots USING btree (namespace_id, end_time) WHERE (recorded_at >= end_time);
@ -30575,12 +30912,6 @@ CREATE UNIQUE INDEX index_plan_limits_on_plan_id ON plan_limits USING btree (pla
CREATE UNIQUE INDEX index_plans_on_name ON plans USING btree (name);
CREATE INDEX index_pm_package_version_licenses_on_pm_license_id ON pm_package_version_licenses USING btree (pm_license_id);
CREATE INDEX index_pm_package_version_licenses_on_pm_package_version_id ON pm_package_version_licenses USING btree (pm_package_version_id);
CREATE INDEX index_pm_package_versions_on_pm_package_id ON pm_package_versions USING btree (pm_package_id);
CREATE UNIQUE INDEX index_pool_repositories_on_disk_path ON pool_repositories USING btree (disk_path);
CREATE INDEX index_pool_repositories_on_shard_id ON pool_repositories USING btree (shard_id);
@ -31775,6 +32106,8 @@ CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (st
CREATE INDEX tmp_index_migrated_container_registries ON container_repositories USING btree (project_id) WHERE ((migration_state = 'import_done'::text) OR (created_at >= '2022-01-23 00:00:00'::timestamp without time zone));
CREATE INDEX tmp_index_oauth_access_tokens_on_id_where_expires_in_null ON oauth_access_tokens USING btree (id) WHERE (expires_in IS NULL);
CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2);
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
@ -32837,6 +33170,118 @@ ALTER INDEX issue_search_data_pkey ATTACH PARTITION gitlab_partitions_static.iss
ALTER INDEX index_issue_search_data_on_search_vector ATTACH PARTITION gitlab_partitions_static.issue_search_data_63_search_vector_idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_1_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_1_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_1_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_2_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_2_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_2_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_3_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_3_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_3_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_4_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_4_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_4_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_5_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_5_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_5_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_6_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_6_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_6_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_7_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_7_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_7_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_version_licenses_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_8_pkey;
ALTER INDEX i_pm_package_version_licenses_for_selects_on_licenses ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_8_pm_license_id_idx;
ALTER INDEX i_pm_package_version_licenses_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_version_licenses_8_purl_type_pm_package_version__idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_1_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_1_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_2_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_2_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_3_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_3_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_4_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_4_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_5_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_5_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_6_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_6_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_7_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_7_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_package_versions_pkey ATTACH PARTITION gitlab_partitions_static.pm_package_versions_8_pkey;
ALTER INDEX i_pm_package_versions_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_package_versions_8_pm_package_id_version_purl_type_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_1_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_1_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_2_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_2_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_3_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_3_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_4_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_4_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_5_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_5_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_6_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_6_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_7_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_7_purl_type_name_idx;
ALTER INDEX pm_packages_pkey ATTACH PARTITION gitlab_partitions_static.pm_packages_8_pkey;
ALTER INDEX i_pm_packages_for_inserts ATTACH PARTITION gitlab_partitions_static.pm_packages_8_purl_type_name_idx;
ALTER INDEX index_product_analytics_events_experimental_project_and_time ATTACH PARTITION gitlab_partitions_static.product_analytics_events_expe_project_id_collector_tstamp_idx10;
ALTER INDEX index_product_analytics_events_experimental_project_and_time ATTACH PARTITION gitlab_partitions_static.product_analytics_events_expe_project_id_collector_tstamp_idx11;
@ -34448,9 +34893,6 @@ ALTER TABLE ONLY issuable_severities
ALTER TABLE ONLY saml_providers
ADD CONSTRAINT fk_rails_306d459be7 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY pm_package_version_licenses
ADD CONSTRAINT fk_rails_30ddb7f837 FOREIGN KEY (pm_package_version_id) REFERENCES pm_package_versions(id) ON DELETE CASCADE;
ALTER TABLE ONLY resource_state_events
ADD CONSTRAINT fk_rails_3112bba7dc FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
@ -34913,9 +35355,6 @@ ALTER TABLE ONLY merge_request_context_commit_diff_files
ALTER TABLE ONLY group_crm_settings
ADD CONSTRAINT fk_rails_74fdf2f13d FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY pm_package_version_licenses
ADD CONSTRAINT fk_rails_7520ea026d FOREIGN KEY (pm_license_id) REFERENCES pm_licenses(id) ON DELETE CASCADE;
ALTER TABLE ONLY clusters_applications_ingress
ADD CONSTRAINT fk_rails_753a7b41c1 FOREIGN KEY (cluster_id) REFERENCES clusters(id) ON DELETE CASCADE;
@ -35498,9 +35937,6 @@ ALTER TABLE ONLY resource_iteration_events
ALTER TABLE ONLY member_roles
ADD CONSTRAINT fk_rails_cf0ee35814 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY pm_package_versions
ADD CONSTRAINT fk_rails_cf94c3e601 FOREIGN KEY (pm_package_id) REFERENCES pm_packages(id) ON DELETE CASCADE;
ALTER TABLE ONLY upload_states
ADD CONSTRAINT fk_rails_d00f153613 FOREIGN KEY (upload_id) REFERENCES uploads(id) ON DELETE CASCADE;
@ -35831,12 +36267,21 @@ ALTER TABLE ONLY timelogs
ALTER TABLE ONLY u2f_registrations
ADD CONSTRAINT fk_u2f_registrations_user_id FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE pm_package_versions
ADD CONSTRAINT fkey_fb6234c446 FOREIGN KEY (pm_package_id, purl_type) REFERENCES pm_packages(id, purl_type) ON DELETE CASCADE;
ALTER TABLE issue_search_data
ADD CONSTRAINT issue_search_data_issue_id_fkey FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE;
ALTER TABLE issue_search_data
ADD CONSTRAINT issue_search_data_project_id_fkey FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE pm_package_version_licenses
ADD CONSTRAINT pm_package_licenses_fkey FOREIGN KEY (pm_license_id) REFERENCES pm_licenses(id) ON DELETE CASCADE;
ALTER TABLE pm_package_version_licenses
ADD CONSTRAINT pm_package_versions_fkey FOREIGN KEY (pm_package_version_id, purl_type) REFERENCES pm_package_versions(id, purl_type) ON DELETE CASCADE;
ALTER TABLE product_analytics_events_experimental
ADD CONSTRAINT product_analytics_events_experimental_project_id_fkey FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -16,26 +16,85 @@ finishes. This feature is enabled by default in all GitLab installations.
To disable artifacts site-wide:
**In Omnibus installations:**
::Tabs
1. Edit `/etc/gitlab/gitlab.rb` and add the following line:
:::TabTitle Linux package (Omnibus)
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
gitlab_rails['artifacts_enabled'] = false
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
1. Save the file and reconfigure GitLab:
**In installations from source:**
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
artifacts:
enabled: false
```shell
sudo gitlab-ctl reconfigure
```
1. Save the file and [restart GitLab](restart_gitlab.md#installations-from-source) for the changes to take effect.
:::TabTitle Helm chart (Kubernetes)
1. Export the Helm values:
```shell
helm get values gitlab > gitlab_values.yaml
```
1. Edit `gitlab_values.yaml`:
```yaml
global:
appConfig:
artifacts:
enabled: false
```
1. Save the file and apply the new values:
```shell
helm upgrade -f gitlab_values.yaml gitlab gitlab/gitlab
```
:::TabTitle Docker
1. Edit `docker-compose.yml`:
```yaml
version: "3.6"
services:
gitlab:
environment:
GITLAB_OMNIBUS_CONFIG: |
gitlab_rails['artifacts_enabled'] = false
```
1. Save the file and restart GitLab:
```shell
docker compose up -d
```
:::TabTitle Self-compiled (source)
1. Edit `/home/git/gitlab/config/gitlab.yml`:
```yaml
production: &base
artifacts:
enabled: false
```
1. Save the file and restart GitLab:
```shell
# For systems running systemd
sudo systemctl restart gitlab.target
# For systems running SysV init
sudo service gitlab restart
```
::EndTabs
## Storing job artifacts
@ -48,45 +107,63 @@ Most artifacts are compressed by GitLab Runner before being sent to the coordina
### Using local storage
To change the location where the artifacts are stored locally, follow the steps
below.
If you're using the Linux package or have a self-compiled installation, you
can change the location where the artifacts are stored locally.
**In Omnibus installations:**
NOTE:
For Docker installations, you can change the path where your data is mounted.
For the Helm chart, use
[object storage](https://docs.gitlab.com/charts/advanced/external-object-storage/).
_The artifacts are stored by default in
`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
::Tabs
1. To change the storage path for example to `/mnt/storage/artifacts`, edit
:::TabTitle Linux package (Omnibus)
The artifacts are stored by default in `/var/opt/gitlab/gitlab-rails/shared/artifacts`.
1. To change the storage path, for example to `/mnt/storage/artifacts`, edit
`/etc/gitlab/gitlab.rb` and add the following line:
```ruby
gitlab_rails['artifacts_path'] = "/mnt/storage/artifacts"
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
1. Save the file and reconfigure GitLab:
**In installations from source:**
```shell
sudo gitlab-ctl reconfigure
```
_The artifacts are stored by default in
`/home/git/gitlab/shared/artifacts`._
:::TabTitle Self-compiled (source)
1. To change the storage path for example to `/mnt/storage/artifacts`, edit
The artifacts are stored by default in `/home/git/gitlab/shared/artifacts`.
1. To change the storage path, for example to `/mnt/storage/artifacts`, edit
`/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
artifacts:
enabled: true
path: /mnt/storage/artifacts
production: &base
artifacts:
enabled: true
path: /mnt/storage/artifacts
```
1. Save the file and [restart GitLab](restart_gitlab.md#installations-from-source) for the changes to take effect.
1. Save the file and restart GitLab:
```shell
# For systems running systemd
sudo systemctl restart gitlab.target
# For systems running SysV init
sudo service gitlab restart
```
::EndTabs
### Using object storage
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
Use an object storage option like AWS S3 to store job artifacts.
If you configure GitLab to store artifacts on object storage, you may also want to
[eliminate local disk usage for job logs](job_logs.md#prevent-local-disk-usage).
@ -96,149 +173,110 @@ WARNING:
In a multi-server setup you must use one of the options to
[eliminate local disk usage for job logs](job_logs.md#prevent-local-disk-usage), or job logs could be lost.
[Read more about using object storage with GitLab](object_storage.md).
#### Object Storage Settings
In GitLab 13.2 and later, you should use the
[consolidated object storage settings](object_storage.md#consolidated-object-storage-configuration).
This section describes the earlier configuration format.
For source installations the following settings are nested under `artifacts:`
and then `object_store:`. On Omnibus GitLab installs they are prefixed by
`artifacts_object_store_`.
| Setting | Default | Description |
|---------------------|---------|-------------|
| `enabled` | `false` | Enable or disable object storage. |
| `remote_directory` | | The bucket name where Artifacts are stored. Use the name only, do not include the path. |
| `proxy_download` | `false` | Set to `true` to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data. |
| `connection` | | Various connection options described below. |
#### Connection settings
See [the available connection settings for different providers](object_storage.md#connection-settings).
**In Omnibus installations:**
_The artifacts are stored by default in
`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines, substituting
the values you want:
```ruby
gitlab_rails['artifacts_enabled'] = true
gitlab_rails['artifacts_object_store_enabled'] = true
gitlab_rails['artifacts_object_store_remote_directory'] = "artifacts"
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
}
```
NOTE:
If you're using AWS IAM profiles, omit the AWS access key and secret access
key/value pairs. For example:
```ruby
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'use_iam_profile' => true
}
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
1. [Migrate any existing local artifacts to the object storage](#migrating-to-object-storage).
**In installations from source:**
_The artifacts are stored by default in
`/home/git/gitlab/shared/artifacts`._
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
artifacts:
enabled: true
object_store:
enabled: true
remote_directory: "artifacts" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
1. Save the file and [restart GitLab](restart_gitlab.md#installations-from-source) for the changes to take effect.
1. [Migrate any existing local artifacts to the object storage](#migrating-to-object-storage).
### Migrating to object storage
After [configuring the object storage](#using-object-storage), use the following task to
migrate existing job artifacts from the local storage to the remote storage.
The processing is done in a background worker and requires **no downtime**.
You can migrate the job artifacts from local storage to object storage. The
processing is done in a background worker and requires **no downtime**.
**In Omnibus installations:**
1. [Configure the object storage](#using-object-storage).
1. Migrate the artifacts:
```shell
gitlab-rake gitlab:artifacts:migrate
```
::Tabs
**In installations from source:**
:::TabTitle Linux package (Omnibus)
```shell
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
```
```shell
sudo gitlab-rake gitlab:artifacts:migrate
```
You can optionally track progress and verify that all job artifacts migrated successfully using the
[PostgreSQL console](https://docs.gitlab.com/omnibus/settings/database.html#connecting-to-the-bundled-postgresql-database):
:::TabTitle Docker
- `sudo gitlab-rails dbconsole` for Omnibus GitLab 14.1 and earlier.
- `sudo gitlab-rails dbconsole --database main` for Omnibus GitLab 14.2 and later.
- `sudo -u git -H psql -d gitlabhq_production` for source-installed instances.
```shell
sudo docker exec -t <container name> gitlab-rake gitlab:artifacts:migrate
```
Verify `objectstg` below (where `store=2`) has count of all job artifacts:
:::TabTitle Self-compiled (source)
```shell
gitlabhq_production=# SELECT count(*) AS total, sum(case when file_store = '1' then 1 else 0 end) AS filesystem, sum(case when file_store = '2' then 1 else 0 end) AS objectstg FROM ci_job_artifacts;
```shell
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
```
total | filesystem | objectstg
------+------------+-----------
19 | 0 | 19
```
::EndTabs
Verify that there are no files on disk in the `artifacts` folder:
1. Optional. Track the progress and verify that all job artifacts migrated
successfully using the PostgreSQL console.
1. Open a PostgreSQL console:
```shell
sudo find /var/opt/gitlab/gitlab-rails/shared/artifacts -type f | grep -v tmp | wc -l
```
::Tabs
:::TabTitle Linux package (Omnibus)
```shell
sudo gitlab-psql
```
:::TabTitle Docker
```shell
sudo docker exec -it <container_name> /bin/bash
gitlab-psql
```
:::TabTitle Self-compiled (source)
```shell
sudo -u git -H psql -d gitlabhq_production
```
::EndTabs
1. Verify that all packages migrated to object storage with the following
SQL query. The number of `objectstg` should be the same as `total`:
```shell
gitlabhq_production=# SELECT count(*) AS total, sum(case when file_store = '1' then 1 else 0 end) AS filesystem, sum(case when file_store = '2' then 1 else 0 end) AS objectstg FROM ci_job_artifacts;
total | filesystem | objectstg
------+------------+-----------
19 | 0 | 19
```
1. Verify that there are no files on disk in the `artifacts` directory:
::Tabs
:::TabTitle Linux package (Omnibus)
```shell
sudo find /var/opt/gitlab/gitlab-rails/shared/artifacts -type f | grep -v tmp | wc -l
```
:::TabTitle Docker
Assuming you mounted `/var/opt/gitlab` to `/srv/gitlab`:
```shell
sudo find /srv/gitlab/gitlab-rails/shared/artifacts -type f | grep -v tmp | wc -l
```
:::TabTitle Self-compiled (source)
```shell
sudo find /home/git/gitlab/shared/artifacts -type f | grep -v tmp | wc -l
```
::EndTabs
In some cases, you need to run the [orphan artifact file cleanup Rake task](../raketasks/cleanup.md#remove-orphan-artifact-files)
to clean up orphaned artifacts.
WARNING:
JUnit test report artifact (`junit.xml.gz`) migration
[was not supported until GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/-/issues/27698#note_317190991)
by the `gitlab:artifacts:migrate` Rake task.
### Migrating from object storage to local storage
**In Omnibus installations:**
To migrate back to local storage:
1. Run `gitlab-rake gitlab:artifacts:migrate_to_local`.
1. Disable object storage for artifacts in `gitlab.rb`:
- Set `gitlab_rails['artifacts_object_store_enabled'] = false`.
- Comment out all other `artifacts_object_store` settings, including the entire
`artifacts_object_store_connection` section, including the closing `}`.
1. [Reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure).
To migrate back to local storage, you must
[selectively disable the artifacts storage](object_storage.md#selectively-disabling-object-storage).
## Expiring artifacts
@ -247,36 +285,93 @@ an expiry for the artifacts, they are marked for deletion right after that date
Otherwise, they expire per the [default artifacts expiration setting](../user/admin_area/settings/continuous_integration.md).
Artifacts are cleaned up by the `expire_build_artifacts_worker` cron job which Sidekiq
runs every 7 minutes (`*/7 * * * *`).
runs every 7 minutes (`*/7 * * * *` in [Cron](../topics/cron/index.md) syntax).
To change the default schedule on which the artifacts are expired, follow the
steps below.
To change the default schedule on which the artifacts are expired:
**In Omnibus installations:**
::Tabs
1. Edit `/etc/gitlab/gitlab.rb` and add the following line (or uncomment it if it already exists and is commented out), substituting
your schedule in cron syntax:
:::TabTitle Linux package (Omnibus)
1. Edit `/etc/gitlab/gitlab.rb` and add the following line (or uncomment it if
it already exists and is commented out), substituting your schedule in cron
syntax:
```ruby
gitlab_rails['expire_build_artifacts_worker_cron'] = "*/7 * * * *"
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
1. Save the file and reconfigure GitLab:
**In installations from source:**
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
expire_build_artifacts_worker:
cron: "*/7 * * * *"
```shell
sudo gitlab-ctl reconfigure
```
1. Save the file and [restart GitLab](restart_gitlab.md#installations-from-source) for the changes to take effect.
:::TabTitle Helm chart (Kubernetes)
If the `expire` directive is not set explicitly in your pipeline, artifacts expire per the
default artifacts expiration setting, which you can find in the [CI/CD Administration settings](../user/admin_area/settings/continuous_integration.md).
1. Export the Helm values:
```shell
helm get values gitlab > gitlab_values.yaml
```
1. Edit `gitlab_values.yaml`:
```yaml
global:
appConfig:
cron_jobs:
expire_build_artifacts_worker:
cron: "*/7 * * * *"
```
1. Save the file and apply the new values:
```shell
helm upgrade -f gitlab_values.yaml gitlab gitlab/gitlab
```
:::TabTitle Docker
1. Edit `docker-compose.yml`:
```yaml
version: "3.6"
services:
gitlab:
environment:
GITLAB_OMNIBUS_CONFIG: |
gitlab_rails['expire_build_artifacts_worker_cron'] = "*/7 * * * *"
```
1. Save the file and restart GitLab:
```shell
docker compose up -d
```
:::TabTitle Self-compiled (source)
1. Edit `/home/git/gitlab/config/gitlab.yml`:
```yaml
production: &base
cron_jobs:
expire_build_artifacts_worker:
cron: "*/7 * * * *"
```
1. Save the file and restart GitLab:
```shell
# For systems running systemd
sudo systemctl restart gitlab.target
# For systems running SysV init
sudo service gitlab restart
```
::EndTabs
## Set the maximum file size of the artifacts
@ -373,13 +468,41 @@ these artifacts are not processed by the new housekeeping jobs.
You can check the database to confirm if your instance has artifacts with the `unknown` status:
1. Start a database console, on Omnibus:
1. Start a database console:
::Tabs
:::TabTitle Linux package (Omnibus)
```shell
sudo gitlab-psql
```
1. Run this query:
:::TabTitle Helm chart (Kubernetes)
```shell
# Find the toolbox pod
kubectl --namespace <namespace> get pods -lapp=toolbox
# Connect to the PostgreSQL console
kubectl exec -it <toolbox-pod-name> -- /srv/gitlab/bin/rails dbconsole --include-password --database main
```
:::TabTitle Docker
```shell
sudo docker exec -it <container_name> /bin/bash
gitlab-psql
```
:::TabTitle Self-compiled (source)
```shell
sudo -u git -H psql -d gitlabhq_production
```
::EndTabs
1. Run the following query:
```sql
select expire_at, file_type, locked, count(*) from ci_job_artifacts
@ -652,7 +775,7 @@ review:
{"error":"MissingRegion: could not find region configuration","level":"error","msg":"error uploading S3 session","time":"2021-03-16T22:10:55-04:00"}
```
In both cases, you might need to add `region` to the job artifact [object storage configuration](#connection-settings).
In both cases, you might need to add `region` to the job artifact [object storage configuration](object_storage.md).
### Job artifact upload fails with `500 Internal Server Error (Missing file)`

View File

@ -7,8 +7,6 @@ type: reference
# Job logs **(FREE SELF)**
> [Renamed from job traces to job logs](https://gitlab.com/gitlab-org/gitlab/-/issues/29121) in GitLab 12.5.
Job logs are sent by a runner while it's processing a job. You can see
logs in job pages, pipelines, email notifications, and so on.
@ -23,30 +21,28 @@ In the following table you can see the phases a log goes through:
| 2: archiving | archived log | After a job is finished | Sidekiq moves log to artifacts folder | `#{ROOT_PATH}/gitlab-rails/shared/artifacts/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log` |
| 3: uploading | archived log | After a log is archived | Sidekiq moves archived log to [object storage](#uploading-logs-to-object-storage) (if configured) | `#{bucket_name}/#{disk_hash}/#{YYYY_mm_dd}/#{job_id}/#{job_artifact_id}/job.log` |
The `ROOT_PATH` varies per environment. For Omnibus GitLab it
would be `/var/opt/gitlab`, and for installations from source
it would be `/home/git/gitlab`.
The `ROOT_PATH` varies per environment:
- For the Linux package it's `/var/opt/gitlab`.
- For self-compiled installations it's `/home/git/gitlab`.
## Changing the job logs local location
To change the location where the job logs are stored, follow the steps below.
NOTE:
For Docker installations, you can change the path where your data is mounted.
For the Helm chart, use object storage.
**In Omnibus installations:**
To change the location where the job logs are stored:
1. Edit `/etc/gitlab/gitlab.rb` and add or amend the following line:
::Tabs
```ruby
gitlab_ci['builds_directory'] = '/mnt/to/gitlab-ci/builds'
```
:::TabTitle Linux package (Omnibus)
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the
changes to take effect.
1. Optional. If you have existing job logs, pause continuous integration data
processing. Jobs in progress are not affected, based on how
[data flow](#data-flow) works.
Alternatively, if you have existing job logs you can follow
these steps to move the logs to a new location without losing any data.
1. Pause continuous integration data processing by updating this setting in `/etc/gitlab/gitlab.rb`.
Jobs in progress are not affected, based on how [data flow](#data-flow) works.
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
sidekiq['queue_selector'] = true
@ -55,50 +51,70 @@ these steps to move the logs to a new location without losing any data.
]
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the
changes to take effect.
1. Save the file and reconfigure GitLab:
```shell
sudo gitlab-ctl reconfigure
```
1. Set the new storage location in `/etc/gitlab/gitlab.rb`:
```ruby
gitlab_ci['builds_directory'] = '/mnt/to/gitlab-ci/builds'
gitlab_ci['builds_directory'] = '/mnt/gitlab-ci/builds'
```
1. Save the file and reconfigure GitLab:
```shell
sudo gitlab-ctl reconfigure
```
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the
changes to take effect.
1. Use `rsync` to move job logs from the current location to the new location:
```shell
sudo rsync -avzh --remove-source-files --ignore-existing --progress /var/opt/gitlab/gitlab-ci/builds/ /mnt/to/gitlab-ci/builds`
sudo rsync -avzh --remove-source-files --ignore-existing --progress /var/opt/gitlab/gitlab-ci/builds/ /mnt/gitlab-ci/builds
```
Use `--ignore-existing` so you don't override new job logs with older versions of the same log.
1. Resume continuous integration data processing by editing `/etc/gitlab/gitlab.rb` and removing the `sidekiq` setting you updated earlier.
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the
changes to take effect.
1. Save the file and reconfigure GitLab:
```shell
sudo gitlab-ctl reconfigure
```
1. Remove the old job logs storage location:
```shell
sudo rm -rf /var/opt/gitlab/gitlab-ci/builds`
sudo rm -rf /var/opt/gitlab/gitlab-ci/builds
```
**In installations from source:**
:::TabTitle Self-compiled (source)
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
1. Edit `/home/git/gitlab/config/gitlab.yml` to set the new storage location:
```yaml
gitlab_ci:
# The location where build logs are stored (default: builds/).
# Relative paths are relative to Rails.root.
builds_path: path/to/builds/
production: &base
gitlab_ci:
builds_path: /mnt/gitlab-ci/builds
```
1. Save the file and [restart GitLab](restart_gitlab.md#installations-from-source) for the changes
to take effect.
1. Save the file and restart GitLab:
```shell
# For systems running systemd
sudo systemctl restart gitlab.target
# For systems running SysV init
sudo service gitlab restart
```
::EndTabs
## Uploading logs to object storage
Archived logs are considered as [job artifacts](job_artifacts.md).
Therefore, when you [set up the object storage integration](job_artifacts.md#object-storage-settings),
Therefore, when you [set up the object storage integration](job_artifacts.md#using-object-storage),
job logs are automatically migrated to it along with the other job artifacts.
See "Phase 3: uploading" in [Data flow](#data-flow) to learn about the process.
@ -118,19 +134,45 @@ There isn't a way to automatically expire old job logs, but it's safe to remove
them if they're taking up too much space. If you remove the logs manually, the
job output in the UI is empty.
For example, to delete all job logs older than 60 days, run the following from a shell in your GitLab instance:
For example, to delete all job logs older than 60 days, run the following
command from a shell in your GitLab instance.
NOTE:
For the Helm chart, use the storage management tools provided with your object
storage.
WARNING:
This command permanently deletes the log files and is irreversible.
The following command permanently deletes the log files and is irreversible.
::Tabs
:::TabTitle Linux package (Omnibus)
```shell
find /var/opt/gitlab/gitlab-rails/shared/artifacts -name "job.log" -mtime +60 -delete
```
NOTE:
After execution, broken file references can be reported when running
[`sudo gitlab-rake gitlab:artifacts:check`](raketasks/check.md#uploaded-files-integrity).
For more information, see [delete references to missing artifacts](raketasks/check.md#delete-references-to-missing-artifacts).
:::TabTitle Docker
Assuming you mounted `/var/opt/gitlab` to `/srv/gitlab`:
```shell
find /srv/gitlab/gitlab-rails/shared/artifacts -name "job.log" -mtime +60 -delete
```
:::TabTitle Self-compiled (source)
```shell
find /home/git/gitlab/shared/artifacts -name "job.log" -mtime +60 -delete
```
::EndTabs
After the logs are deleted, you can find any broken file references by running
the Rake task that checks the
[integrity of the uploaded files](raketasks/check.md#uploaded-files-integrity).
For more information, see how to
[delete references to missing artifacts](raketasks/check.md#delete-references-to-missing-artifacts).
## Incremental logging architecture
@ -140,19 +182,48 @@ For more information, see [delete references to missing artifacts](raketasks/che
> - [Recommended for production use with AWS S3](https://gitlab.com/gitlab-org/gitlab/-/issues/273498) in GitLab 13.7.
> - To use in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-incremental-logging).
By default job logs are sent from the GitLab Runner in chunks and cached temporarily on disk
in `/var/opt/gitlab/gitlab-ci/builds` by Omnibus GitLab. After the job completes,
a background job archives the job log. The log is moved to `/var/opt/gitlab/gitlab-rails/shared/artifacts/`
by default, or to object storage if configured.
By default, job logs are sent from the GitLab Runner in chunks and cached
temporarily on disk. After the job completes, a background job archives the job
log. The log is moved to the artifacts directory by default, or to object
storage if configured.
In a [scaled-out architecture](reference_architectures/index.md) with Rails and Sidekiq running on more than one
server, these two locations on the file system have to be shared using NFS.
In a [scaled-out architecture](reference_architectures/index.md) with Rails and
Sidekiq running on more than one server, these two locations on the file system
have to be shared using NFS, which is not recommended. Instead:
To eliminate both file system requirements:
1. Configure [object storage](job_artifacts.md#object-storage-settings) for storing archived job logs.
1. Configure [object storage](job_artifacts.md#using-object-storage) for storing archived job logs.
1. [Enable the incremental logging feature](#enable-or-disable-incremental-logging), which uses Redis instead of disk space for temporary caching of job logs.
### Enable or disable incremental logging
Before you enable the feature flag:
- Review [the limitations of incremental logging](#limitations).
- [Enable object storage](job_artifacts.md#using-object-storage).
To enable incremental logging:
1. Open a [Rails console](operations/rails_console.md#starting-a-rails-console-session).
1. Enable the feature flag:
```ruby
Feature.enable(:ci_enable_live_trace)
```
Running jobs' logs continue to be written to disk, but new jobs use
incremental logging.
To disable incremental logging:
1. Open a [Rails console](operations/rails_console.md#starting-a-rails-console-session).
1. Disable the feature flag:
```ruby
Feature.disable(:ci_enable_live_trace)
```
Running jobs continue to use incremental logging, but new jobs write to the disk.
### Technical details
The data flow is the same as described in the [data flow section](#data-flow)
@ -178,36 +249,7 @@ Here is the detailed data flow:
### Limitations
- [Redis Cluster is not supported](https://gitlab.com/gitlab-org/gitlab/-/issues/224171).
- You must configure [object storage for CI/CD artifacts, logs, and builds](job_artifacts.md#object-storage-settings)
- You must configure [object storage for CI/CD artifacts, logs, and builds](job_artifacts.md#using-object-storage)
before you enable the feature flag. After the flag is enabled, files cannot be written
to disk, and there is no protection against misconfiguration.
- There is [an epic tracking other potential limitations and improvements](https://gitlab.com/groups/gitlab-org/-/epics/3791).
### Enable or disable incremental logging
Incremental logging is under development, but [ready for production use as of GitLab 13.6](https://gitlab.com/groups/gitlab-org/-/epics/4275). It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](feature_flags.md)
can enable it.
Before you enable the feature flag:
- Review [the limitations of incremental logging](#limitations).
- [Enable object storage](job_artifacts.md#object-storage-settings).
To enable incremental logging:
```ruby
Feature.enable(:ci_enable_live_trace)
```
Running jobs' logs continue to be written to disk, but new jobs use
incremental logging.
To disable incremental logging:
```ruby
Feature.disable(:ci_enable_live_trace)
```
Running jobs continue to use incremental logging, but new jobs write to the disk.

View File

@ -45,7 +45,7 @@ Each metric is defined in a separate YAML file consisting of a number of fields:
| `data_category` | yes | `string`; [categories](#data-category) of the metric, may be set to `operational`, `optional`, `subscription`, `standard`. The default value is `optional`.|
| `instrumentation_class` | yes | `string`; [the class that implements the metric](metrics_instrumentation.md). |
| `distribution` | yes | `array`; may be set to one of `ce, ee` or `ee`. The [distribution](https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/#definitions) where the tracked feature is available. |
| `performance_indicator_type` | no | `array`; may be set to one of [`gmau`, `smau`, `paid_gmau`, or `umau`](https://about.gitlab.com/handbook/business-technology/data-team/data-catalog/xmau-analysis/). |
| `performance_indicator_type` | no | `array`; may be set to one of [`gmau`, `smau`, `paid_gmau`, `umau` or `customer_health_score`](https://about.gitlab.com/handbook/business-technology/data-team/data-catalog/xmau-analysis/). |
| `tier` | yes | `array`; may contain one or a combination of `free`, `premium` or `ultimate`. The [tier]( https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/) where the tracked feature is available. This should be verbose and contain all tiers where a metric is available. |
| `milestone` | yes | The milestone when the metric is introduced and when it's available to self-managed instances with the official GitLab release. |
| `milestone_removed` | no | The milestone when the metric is removed. |
@ -126,7 +126,7 @@ A metric's time frame is calculated based on the `time_frame` field and the `dat
For `redis_hll` metrics, the type of aggregation is also taken into consideration. In this context, the term "aggregation" refers to [chosen events data storage interval](implement.md#add-new-events), and is **NOT** related to the Aggregated Metrics feature.
For more information about the aggregation type of each feature, see the [`common.yml` file](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/usage_data_counters/known_events/common.yml). Weeks run from Monday to Sunday.
| data_source | time_frame | aggregation | Description |
| data_source | time_frame | aggregation | Description |
|------------------------|------------|----------------|-------------------------------------------------|
| any | `none` | not applicable | A type of data thats not tracked over time, such as settings and configuration information |
| `database` | `all` | not applicable | The whole time the metric has been active (all-time interval) |

View File

@ -61,12 +61,26 @@ module API
type: String,
desc: 'Source entity type (only `group_entity` is supported)',
values: %w[group_entity]
requires :source_full_path, type: String, desc: 'Source full path of the entity to import'
requires :destination_namespace, type: String, desc: 'Destination namespace for the entity'
optional :destination_slug, type: String, desc: 'Destination slug for the entity'
requires :source_full_path,
type: String,
desc: 'Relative path of the source entity to import',
source_full_path: true,
documentation: { example: "'source/full/path' not 'https://example.com/source/full/path'" }
requires :destination_namespace,
type: String,
desc: 'Destination namespace for the entity',
destination_namespace_path: true,
documentation: { example: "'destination_namespace' or 'destination/namespace'" }
optional :destination_slug,
type: String,
desc: 'Destination slug for the entity',
destination_slug_path: true,
documentation: { example: "'destination_slug' not 'destination/slug'" }
optional :destination_name,
type: String,
desc: 'Deprecated: Use :destination_slug instead. Destination slug for the entity'
desc: 'Deprecated: Use :destination_slug instead. Destination slug for the entity',
destination_slug_path: true,
documentation: { example: "'destination_slug' not 'destination/slug'" }
mutually_exclusive :destination_slug, :destination_name
at_least_one_of :destination_slug, :destination_name

View File

@ -53,11 +53,15 @@ module API
authorize_read_builds!
builds = user_project.builds.order('id DESC')
builds = user_project.builds.order(id: :desc)
builds = filter_builds(builds, params[:scope])
builds = builds.preload(:user, :job_artifacts_archive, :job_artifacts, :runner, :tags, pipeline: :project)
present paginate(builds, without_count: true), with: Entities::Ci::Job
if Feature.enabled?(:jobs_api_keyset_pagination, user_project)
present paginate_with_strategies(builds, paginator_params: { without_count: true }), with: Entities::Ci::Job
else
present paginate(builds, without_count: true), with: Entities::Ci::Job
end
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -3,13 +3,14 @@
module API
module Helpers
module PaginationStrategies
def paginate_with_strategies(relation, request_scope = nil)
# paginator_params are only currently supported with offset pagination
def paginate_with_strategies(relation, request_scope = nil, paginator_params: {})
paginator = paginator(relation, request_scope)
result = if block_given?
yield(paginator.paginate(relation))
yield(paginator.paginate(relation, **paginator_params))
else
paginator.paginate(relation)
paginator.paginate(relation, **paginator_params)
end
result.tap do |records, _|

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
module API
module Validations
module Validators
module BulkImports
class DestinationSlugPath < Grape::Validations::Base
def validate_param!(attr_name, params)
unless params[attr_name] =~ Gitlab::Regex.group_path_regex # rubocop: disable Style/GuardClause
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: "cannot start with a dash or forward slash, or end with a period or forward slash. " \
"It can only contain alphanumeric characters, periods, underscores, and dashes. " \
"E.g. 'destination_namespace' not 'destination/namespace'"
)
end
end
end
class DestinationNamespacePath < Grape::Validations::Base
def validate_param!(attr_name, params)
unless params[attr_name] =~ Gitlab::Regex.bulk_import_namespace_path_regex # rubocop: disable Style/GuardClause
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: "cannot start with a dash or forward slash, or end with a period or forward slash. " \
"It can only contain alphanumeric characters, periods, underscores, forward slashes " \
"and dashes. E.g. 'destination_namespace' or 'destination/namespace'"
)
end
end
end
class SourceFullPath < Grape::Validations::Base
def validate_param!(attr_name, params)
unless params[attr_name] =~ Gitlab::Regex.bulk_import_namespace_path_regex # rubocop: disable Style/GuardClause
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],
message: "must be a relative path and not include protocol, sub-domain, or domain information. " \
"E.g. 'source/full/path' not 'https://example.com/source/full/path'" \
)
end
end
end
end
end
end
end

View File

@ -82,7 +82,7 @@ module Atlassian
def public_key_cdn_url_setting
@public_key_cdn_url_setting ||=
if Gitlab::CurrentSettings.jira_connect_proxy_url
if Gitlab::CurrentSettings.jira_connect_proxy_url.present?
Gitlab::Utils.append_path(Gitlab::CurrentSettings.jira_connect_proxy_url, PROXY_PUBLIC_KEY_PATH)
end
end

View File

@ -4,22 +4,15 @@ module Gitlab
module BackgroundMigration
# Add expiry to all OAuth access tokens
class ExpireOAuthTokens < ::Gitlab::BackgroundMigration::BatchedMigrationJob
operation_name :update_oauth_tokens
scope_to ->(relation) { relation.where(expires_in: nil) }
operation_name :update_all
feature_category :database
def perform
each_sub_batch(
batching_scope: ->(relation) { relation.where(expires_in: nil) }
) do |sub_batch|
update_oauth_tokens(sub_batch)
each_sub_batch do |sub_batch|
sub_batch.update_all(expires_in: 2.hours)
end
end
private
def update_oauth_tokens(relation)
relation.update_all(expires_in: 7_200)
end
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class ReExpireOAuthTokens < Gitlab::BackgroundMigration::ExpireOAuthTokens # rubocop:disable Migration/BackgroundMigrationBaseClass
end
# rubocop: enable Style/Documentation
end
end

View File

@ -25,7 +25,7 @@ module Gitlab
# Note: to be deleted after the minimum PG version is set to 12.0
# Update the documentation together when deleting the method
# https://docs.gitlab.com/ee/development/merge_request_performance_guidelines.html#use-ctes-wisely
# https://docs.gitlab.com/ee/development/merge_request_concepts/performance.html#use-ctes-wisely
def self.materialized_if_supported
materialized_supported? ? 'MATERIALIZED' : ''
end

View File

@ -5,7 +5,8 @@ module Gitlab
module CursorBasedKeyset
SUPPORTED_ORDERING = {
Group => { name: :asc },
AuditEvent => { id: :desc }
AuditEvent => { id: :desc },
::Ci::Build => { id: :desc }
}.freeze
# Relation types that are enforced in this list

View File

@ -10,7 +10,7 @@ module Gitlab
@cursor_based_request_context = cursor_based_request_context
end
def paginate(relation)
def paginate(relation, _params = {})
@paginator ||= relation.keyset_paginate(
per_page: cursor_based_request_context.per_page,
cursor: cursor_based_request_context.cursor

View File

@ -10,7 +10,7 @@ module Gitlab
@request = request
end
def paginate(relation)
def paginate(relation, _params = {})
# Validate assumption: The last two columns must match the page order_by
validate_order!(relation)

View File

@ -251,6 +251,26 @@ module Gitlab
extend self
extend Packages
def bulk_import_namespace_path_regex
# This regexp validates the string conforms to rules for a namespace path:
# i.e does not start with a non-alphanueric character except for periods or underscores,
# contains only alphanumeric characters, forward slashes, periods, and underscores,
# does not end with a period or forward slash, and has a relative path structure
# with no http protocol chars or leading or trailing forward slashes
# eg 'source/full/path' or 'destination_namespace' not 'https://example.com/source/full/path'
@bulk_import_namespace_path_regex ||= %r/^([.]?)[^\W](\/?[.]?[0-9a-z][-_]*)+$/i
end
def group_path_regex
# This regexp validates the string conforms to rules for a group slug:
# i.e does not start with a non-alphanueric character except for periods or underscores,
# contains only alphanumeric characters, periods, and underscores,
# does not end with a period or forward slash, and has a relative path structure
# with no http protocol chars or leading or trailing forward slashes
# eg 'source/full/path' or 'destination_namespace' not 'https://example.com/source/full/path'
@group_path_regex ||= %r/^[.]?[^\W]([.]?[0-9a-z][-_]*)+$/i
end
def project_name_regex
# The character range \p{Alnum} overlaps with \u{00A9}-\u{1f9ff}
# hence the Ruby warning.

View File

@ -32002,6 +32002,9 @@ msgstr ""
msgid "ProductAnalytics|Resulting Data"
msgstr ""
msgid "ProductAnalytics|Single Statistic"
msgstr ""
msgid "ProductAnalytics|There is no data for this type of chart currently. Please see the Setup tab if you have not configured the product analytics tool already."
msgstr ""

View File

@ -170,7 +170,8 @@ RSpec.describe 'Database schema' do
let(:ignored_columns) { ignored_fk_columns(table) }
it 'do have the foreign keys' do
expect(column_names_with_id - ignored_columns).to match_array(foreign_keys_columns)
foreign_keys_columns_with_id = foreign_keys_columns.select { |column_name| column_name.ends_with?('_id') }
expect(column_names_with_id - ignored_columns).to include(*foreign_keys_columns_with_id)
end
it 'and having foreign key are not in the ignore list' do

View File

@ -43,6 +43,14 @@ RSpec.describe API::Helpers::PaginationStrategies do
expect(result).to eq(return_value)
end
context "with paginator_params" do
it 'correctly passes multiple parameters' do
expect(paginator).to receive(:paginate).with(relation, parameter_one: true, parameter_two: 'two')
subject.paginate_with_strategies(relation, nil, paginator_params: { parameter_one: true, parameter_two: 'two' })
end
end
end
describe '#paginator' do

View File

@ -90,7 +90,7 @@ RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric, feature_category: :integ
it { is_expected.not_to be_valid }
end
context 'with jira_connect_proxy_url setting' do
context 'with jira_connect_proxy_url setting', :aggregate_failures do
let(:stub_asymmetric_jwt_cdn) { 'https://example.com/-/jira_connect/public_keys' }
let(:jira_connect_proxy_url_setting) { 'https://example.com' }
@ -101,6 +101,19 @@ RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric, feature_category: :integ
expect(WebMock).to have_requested(:get, "https://example.com/-/jira_connect/public_keys/#{public_key_id}")
end
context 'when the setting is an empty string', :aggregate_failures do
let(:jira_connect_proxy_url_setting) { '' }
let(:stub_asymmetric_jwt_cdn) { 'https://connect-install-keys.atlassian.com' }
it 'requests the default CDN' do
expect(JWT).to receive(:decode).twice.and_call_original
expect(asymmetric_jwt).to be_valid
expect(WebMock).to have_requested(:get, install_keys_url)
end
end
end
end

View File

@ -10,6 +10,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
expect(subject.available_for_type?(Group.all)).to be_truthy
end
it 'returns true for Ci::Build' do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
it 'return false for other types of relations' do
expect(subject.available_for_type?(User.all)).to be_falsey
end
@ -29,6 +33,12 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it { is_expected.to be false }
end
context 'when relation is Ci::Build' do
let(:relation) { Ci::Build.all }
it { is_expected.to be false }
end
end
describe '.available?' do
@ -45,6 +55,20 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
end
end
context 'with order-by id desc' do
let(:order_by) { :id }
let(:sort) { :desc }
it 'returns true for Ci::Build' do
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
end
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
end

View File

@ -7,7 +7,7 @@ require_relative '../../support/shared_examples/lib/gitlab/regex_shared_examples
# All specs that can be run with fast_spec_helper only
# See regex_requires_app_spec for tests that require the full spec_helper
RSpec.describe Gitlab::Regex do
RSpec.describe Gitlab::Regex, feature_category: :tooling do
shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
it { is_expected.to match('GitLab CE') }
@ -72,6 +72,59 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") }
end
describe '.bulk_import_namespace_path_regex' do
subject { described_class.bulk_import_namespace_path_regex }
it { is_expected.not_to match('?gitlab') }
it { is_expected.not_to match("Users's something") }
it { is_expected.not_to match('/source') }
it { is_expected.not_to match('http:') }
it { is_expected.not_to match('https:') }
it { is_expected.not_to match('example.com/?stuff=true') }
it { is_expected.not_to match('example.com:5000/?stuff=true') }
it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') }
it { is_expected.not_to match('_good_for_me!') }
it { is_expected.not_to match('good_for+you') }
it { is_expected.not_to match('source/') }
it { is_expected.not_to match('.source/full./path') }
it { is_expected.to match('source') }
it { is_expected.to match('.source') }
it { is_expected.to match('_source') }
it { is_expected.to match('source/full') }
it { is_expected.to match('source/full/path') }
it { is_expected.to match('.source/.full/.path') }
it { is_expected.to match('domain_namespace') }
it { is_expected.to match('gitlab-migration-test') }
end
describe '.group_path_regex' do
subject { described_class.group_path_regex }
it { is_expected.not_to match('?gitlab') }
it { is_expected.not_to match("Users's something") }
it { is_expected.not_to match('/source') }
it { is_expected.not_to match('http:') }
it { is_expected.not_to match('https:') }
it { is_expected.not_to match('example.com/?stuff=true') }
it { is_expected.not_to match('example.com:5000/?stuff=true') }
it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') }
it { is_expected.not_to match('_good_for_me!') }
it { is_expected.not_to match('good_for+you') }
it { is_expected.not_to match('source/') }
it { is_expected.not_to match('.source/full./path') }
it { is_expected.not_to match('source/full') }
it { is_expected.not_to match('source/full/path') }
it { is_expected.not_to match('.source/.full/.path') }
it { is_expected.to match('source') }
it { is_expected.to match('.source') }
it { is_expected.to match('_source') }
it { is_expected.to match('domain_namespace') }
it { is_expected.to match('gitlab-migration-test') }
end
describe '.environment_name_regex' do
subject { described_class.environment_name_regex }

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CleanupOAuthAccessTokensWithNullExpiresIn, feature_category: :authentication_and_authorization do
let(:batched_migration) { described_class::MIGRATION }
it 'schedules background jobs for each batch of oauth_access_tokens' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :oauth_access_tokens,
column_name: :id,
interval: described_class::INTERVAL
)
}
end
end
end

View File

@ -174,6 +174,43 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
end
context 'when the source_full_path is invalid' do
it 'returns invalid error' do
params[:entities][0][:source_full_path] = 'http://example.com/full_path'
request
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq("entities[0][source_full_path] must be a relative path and not include protocol, sub-domain, " \
"or domain information. E.g. 'source/full/path' not 'https://example.com/source/full/path'")
end
end
context 'when the destination_namespace is invalid' do
it 'returns invalid error' do
params[:entities][0][:destination_namespace] = "?not a destination-namespace"
request
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq("entities[0][destination_namespace] cannot start with a dash or forward slash, " \
"or end with a period or forward slash. It can only contain alphanumeric " \
"characters, periods, underscores, forward slashes and dashes. " \
"E.g. 'destination_namespace' or 'destination/namespace'")
end
end
context 'when the destination_slug is invalid' do
it 'returns invalid error' do
params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg'
request
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to include("entities[0][destination_slug] cannot start with a dash " \
"or forward slash, or end with a period or forward slash. " \
"It can only contain alphanumeric characters, periods, underscores, and dashes. " \
"E.g. 'destination_namespace' not 'destination/namespace'")
end
end
context 'when provided url is blocked' do
let(:params) do
{

View File

@ -487,6 +487,76 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
end
end
describe 'GET /projects/:id/jobs offset pagination' do
before do
running_job
end
it 'returns one record for the first page' do
get api("/projects/#{project.id}/jobs", api_user), params: { per_page: 1 }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(running_job.id)
end
it 'returns second record when passed in offset and per_page params' do
get api("/projects/#{project.id}/jobs", api_user), params: { page: 2, per_page: 1 }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(job.id)
end
end
describe 'GET /projects/:id/jobs keyset pagination' do
before do
running_job
end
it 'returns first page with cursor to next page' do
get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(running_job.id)
expect(response.headers["Link"]).to include("cursor")
next_cursor = response.headers["Link"].match("(?<cursor_data>cursor=.*?)&")["cursor_data"]
get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }.merge(Rack::Utils.parse_query(next_cursor))
expect(response).to have_gitlab_http_status(:ok)
json_response = Gitlab::Json.parse(response.body)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(job.id)
expect(response.headers).not_to include("Link")
end
it 'respects scope filters' do
get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', scope: ['success'] }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(job.id)
expect(response.headers).not_to include("Link")
end
context 'with :jobs_api_keyset_pagination disabled' do
before do
stub_feature_flags(jobs_api_keyset_pagination: false)
end
it 'defaults to offset pagination' do
get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(running_job.id)
expect(response.headers["Link"]).not_to include("cursor")
end
end
end
describe 'GET /projects/:id/jobs rate limited' do
let(:query) { {} }