Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
286bddcf3c
commit
ef144889c1
|
|
@ -44,6 +44,7 @@ ignore: |
|
|||
#### Folders ####
|
||||
node_modules/
|
||||
tmp/
|
||||
generator_templates/gitlab_internal_events/
|
||||
|
||||
# In CI some YAML files are linted using different rules.
|
||||
# See `.gitlab/ci/yaml.gitlab-ci.yml`.
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
e66b5c2f3d56234280470d45f769779619553280
|
||||
58851fe18b0291ed7b11c8821c7ae548c2a96337
|
||||
|
|
|
|||
|
|
@ -390,6 +390,7 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
|
|||
|
||||
validates :container_registry_delete_tags_service_timeout,
|
||||
:container_registry_cleanup_tags_service_max_list_size,
|
||||
:container_registry_data_repair_detail_worker_max_concurrency,
|
||||
:container_registry_expiration_policies_worker_capacity,
|
||||
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ module ContainerRegistry
|
|||
worker_resource_boundary :unknown
|
||||
idempotent!
|
||||
|
||||
MAX_CAPACITY = 2
|
||||
LEASE_TIMEOUT = 1.hour.to_i
|
||||
|
||||
def perform_work
|
||||
|
|
@ -60,11 +59,15 @@ module ContainerRegistry
|
|||
end
|
||||
|
||||
def max_running_jobs
|
||||
MAX_CAPACITY
|
||||
current_application_settings.container_registry_data_repair_detail_worker_max_concurrency.to_i
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def current_application_settings
|
||||
::Gitlab::CurrentSettings.current_application_settings
|
||||
end
|
||||
|
||||
def next_project
|
||||
Project.pending_data_repair_analysis.first
|
||||
end
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ module WikiCloth
|
|||
blahtex_png_path = @options[:blahtex_png_path] || '/tmp'
|
||||
blahtex_options = @options[:blahtex_options] || '--texvc-compatible-commands --mathml-version-1-fonts --disallow-plane-1 --spacing strict'
|
||||
|
||||
if File.exists?(blahtex_path) && @options[:math_formatter] != :google
|
||||
if File.exist?(blahtex_path) && @options[:math_formatter] != :google
|
||||
begin
|
||||
# pass tex markup to blahtex
|
||||
response = IO.popen("#{blahtex_path} #{blahtex_options} --png --mathml --png-directory #{blahtex_png_path}","w+") do |pipe|
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class Gitlab::Seeder::SnippetRepository
|
|||
end
|
||||
|
||||
def import
|
||||
if File.exists?(BUNDLE_PATH)
|
||||
if File.exist?(BUNDLE_PATH)
|
||||
@snippet.repository.create_from_bundle(BUNDLE_PATH)
|
||||
else
|
||||
@snippet.repository.import_repository(SNIPPET_REPO_URL)
|
||||
|
|
@ -18,7 +18,7 @@ class Gitlab::Seeder::SnippetRepository
|
|||
end
|
||||
|
||||
def self.cleanup
|
||||
File.delete(BUNDLE_PATH) if File.exists?(BUNDLE_PATH)
|
||||
File.delete(BUNDLE_PATH) if File.exist?(BUNDLE_PATH)
|
||||
rescue => e
|
||||
warn "\nError cleaning up snippet bundle: #{e}"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddContainerRegistryDataRepairWorkerMaxConcurrencyToApplicationSettings < Gitlab::Database::Migration[2.1]
|
||||
disable_ddl_transaction!
|
||||
|
||||
CONSTRAINT_NAME = 'app_settings_registry_repair_worker_max_concurrency_positive'
|
||||
|
||||
def up
|
||||
unless column_exists?(:application_settings, :container_registry_data_repair_detail_worker_max_concurrency)
|
||||
add_column :application_settings, :container_registry_data_repair_detail_worker_max_concurrency, :integer,
|
||||
default: 2, null: false
|
||||
end
|
||||
|
||||
add_check_constraint :application_settings,
|
||||
'container_registry_data_repair_detail_worker_max_concurrency >= 0',
|
||||
CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
return unless column_exists?(:application_settings, :container_registry_data_repair_detail_worker_max_concurrency)
|
||||
|
||||
remove_check_constraint :application_settings, CONSTRAINT_NAME
|
||||
|
||||
remove_column :application_settings, :container_registry_data_repair_detail_worker_max_concurrency
|
||||
end
|
||||
end
|
||||
|
|
@ -1,83 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SwapMergeRequestUserMentionsNoteIdToBigint < Gitlab::Database::Migration[2.1]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = 'merge_request_user_mentions'
|
||||
|
||||
def up
|
||||
return unless should_run?
|
||||
|
||||
swap
|
||||
end
|
||||
|
||||
def down
|
||||
return unless should_run?
|
||||
|
||||
swap
|
||||
|
||||
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
|
||||
name: 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NOT NULL'
|
||||
|
||||
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
|
||||
name: 'fk_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
on_delete: :cascade,
|
||||
validate: false
|
||||
end
|
||||
|
||||
def swap
|
||||
# This will replace the existing index_merge_request_user_mentions_on_note_id
|
||||
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
|
||||
name: 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NOT NULL'
|
||||
|
||||
# This will replace the existing merge_request_user_mentions_on_mr_id_and_note_id_index
|
||||
add_concurrent_index TABLE_NAME, [:merge_request_id, :note_id_convert_to_bigint], unique: true,
|
||||
name: 'mr_user_mentions_on_mr_id_and_note_id_convert_to_bigint_index'
|
||||
|
||||
# This will replace the existing merge_request_user_mentions_on_mr_id_index
|
||||
add_concurrent_index TABLE_NAME, :merge_request_id, unique: true,
|
||||
name: 'merge_request_user_mentions_on_mr_id_index_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NULL'
|
||||
|
||||
# This will replace the existing fk_rails_c440b9ea31
|
||||
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
|
||||
name: 'fk_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
on_delete: :cascade
|
||||
|
||||
with_lock_retries(raise_on_exhaustion: true) do
|
||||
execute "LOCK TABLE notes, #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id TO note_id_tmp"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_convert_to_bigint TO note_id"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_tmp TO note_id_convert_to_bigint"
|
||||
|
||||
function_name = Gitlab::Database::UnidirectionalCopyTrigger
|
||||
.on_table(TABLE_NAME, connection: connection)
|
||||
.name(:note_id, :note_id_convert_to_bigint)
|
||||
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
|
||||
|
||||
execute 'DROP INDEX IF EXISTS index_merge_request_user_mentions_on_note_id'
|
||||
rename_index TABLE_NAME, 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
'index_merge_request_user_mentions_on_note_id'
|
||||
|
||||
execute 'DROP INDEX IF EXISTS merge_request_user_mentions_on_mr_id_and_note_id_index'
|
||||
rename_index TABLE_NAME, 'mr_user_mentions_on_mr_id_and_note_id_convert_to_bigint_index',
|
||||
'merge_request_user_mentions_on_mr_id_and_note_id_index'
|
||||
|
||||
execute 'DROP INDEX IF EXISTS merge_request_user_mentions_on_mr_id_index'
|
||||
rename_index TABLE_NAME, 'merge_request_user_mentions_on_mr_id_index_convert_to_bigint',
|
||||
'merge_request_user_mentions_on_mr_id_index'
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT IF EXISTS fk_rails_c440b9ea31"
|
||||
rename_constraint(TABLE_NAME, 'fk_merge_request_user_mentions_note_id_convert_to_bigint', 'fk_rails_c440b9ea31')
|
||||
end
|
||||
end
|
||||
|
||||
def should_run?
|
||||
com_or_dev_or_test_but_not_jh?
|
||||
end
|
||||
# No-op, moved to db/post_migrate/20230310020356_swap_merge_request_user_mentions_note_id_to_bigint_2.rb
|
||||
def up; end
|
||||
def down; end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SwapMergeRequestUserMentionsNoteIdToBigint2 < Gitlab::Database::Migration[2.1]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = 'merge_request_user_mentions'
|
||||
|
||||
def up
|
||||
return unless should_run?
|
||||
return if columns_already_swapped?
|
||||
|
||||
swap
|
||||
end
|
||||
|
||||
def down
|
||||
return unless should_run?
|
||||
return unless columns_already_swapped?
|
||||
|
||||
swap
|
||||
|
||||
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
|
||||
name: 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NOT NULL'
|
||||
|
||||
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
|
||||
name: 'fk_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
on_delete: :cascade,
|
||||
validate: false
|
||||
end
|
||||
|
||||
def swap
|
||||
# This will replace the existing index_merge_request_user_mentions_on_note_id
|
||||
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
|
||||
name: 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NOT NULL'
|
||||
|
||||
# This will replace the existing merge_request_user_mentions_on_mr_id_and_note_id_index
|
||||
add_concurrent_index TABLE_NAME, [:merge_request_id, :note_id_convert_to_bigint], unique: true,
|
||||
name: 'mr_user_mentions_on_mr_id_and_note_id_convert_to_bigint_index'
|
||||
|
||||
# This will replace the existing merge_request_user_mentions_on_mr_id_index
|
||||
add_concurrent_index TABLE_NAME, :merge_request_id, unique: true,
|
||||
name: 'merge_request_user_mentions_on_mr_id_index_convert_to_bigint',
|
||||
where: 'note_id_convert_to_bigint IS NULL'
|
||||
|
||||
# This will replace the existing fk_rails_c440b9ea31
|
||||
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
|
||||
name: 'fk_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
on_delete: :cascade
|
||||
|
||||
with_lock_retries(raise_on_exhaustion: true) do
|
||||
execute "LOCK TABLE notes, #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id TO note_id_tmp"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_convert_to_bigint TO note_id"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_tmp TO note_id_convert_to_bigint"
|
||||
|
||||
function_name = Gitlab::Database::UnidirectionalCopyTrigger
|
||||
.on_table(TABLE_NAME, connection: connection)
|
||||
.name(:note_id, :note_id_convert_to_bigint)
|
||||
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
|
||||
|
||||
execute 'DROP INDEX IF EXISTS index_merge_request_user_mentions_on_note_id'
|
||||
rename_index TABLE_NAME, 'index_merge_request_user_mentions_note_id_convert_to_bigint',
|
||||
'index_merge_request_user_mentions_on_note_id'
|
||||
|
||||
execute 'DROP INDEX IF EXISTS merge_request_user_mentions_on_mr_id_and_note_id_index'
|
||||
rename_index TABLE_NAME, 'mr_user_mentions_on_mr_id_and_note_id_convert_to_bigint_index',
|
||||
'merge_request_user_mentions_on_mr_id_and_note_id_index'
|
||||
|
||||
execute 'DROP INDEX IF EXISTS merge_request_user_mentions_on_mr_id_index'
|
||||
rename_index TABLE_NAME, 'merge_request_user_mentions_on_mr_id_index_convert_to_bigint',
|
||||
'merge_request_user_mentions_on_mr_id_index'
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT IF EXISTS fk_rails_c440b9ea31"
|
||||
rename_constraint(TABLE_NAME, 'fk_merge_request_user_mentions_note_id_convert_to_bigint', 'fk_rails_c440b9ea31')
|
||||
end
|
||||
end
|
||||
|
||||
def should_run?
|
||||
com_or_dev_or_test_but_not_jh?
|
||||
end
|
||||
|
||||
def columns_already_swapped?
|
||||
table_columns = columns(TABLE_NAME)
|
||||
note_id = table_columns.find { |c| c.name == 'note_id' }
|
||||
note_id_convert_to_bigint = table_columns.find { |c| c.name == 'note_id_convert_to_bigint' }
|
||||
|
||||
note_id_convert_to_bigint.sql_type == 'integer' && note_id.sql_type == 'bigint'
|
||||
end
|
||||
end
|
||||
|
|
@ -9,14 +9,14 @@ class SwapIssueUserMentionsNoteIdToBigintForGitlabDotCom2 < Gitlab::Database::Mi
|
|||
|
||||
def up
|
||||
return unless should_run?
|
||||
return if columns_alredy_swapped?
|
||||
return if columns_already_swapped?
|
||||
|
||||
swap
|
||||
end
|
||||
|
||||
def down
|
||||
return unless should_run?
|
||||
return unless columns_alredy_swapped?
|
||||
return unless columns_already_swapped?
|
||||
|
||||
swap
|
||||
|
||||
|
|
@ -83,7 +83,7 @@ class SwapIssueUserMentionsNoteIdToBigintForGitlabDotCom2 < Gitlab::Database::Mi
|
|||
com_or_dev_or_test_but_not_jh?
|
||||
end
|
||||
|
||||
def columns_alredy_swapped?
|
||||
def columns_already_swapped?
|
||||
table_columns = columns(TABLE_NAME)
|
||||
note_id = table_columns.find { |c| c.name == 'note_id' }
|
||||
note_id_convert_to_bigint = table_columns.find { |c| c.name == 'note_id_convert_to_bigint' }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CleanupNotesBigintPreparation < Gitlab::Database::Migration[2.1]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
return unless should_run?
|
||||
|
||||
remove_concurrent_index_by_name(
|
||||
:merge_request_user_mentions,
|
||||
:index_merge_request_user_mentions_note_id_convert_to_bigint,
|
||||
if_exists: true
|
||||
)
|
||||
|
||||
remove_concurrent_index_by_name(
|
||||
:issue_user_mentions,
|
||||
:index_issue_user_mentions_on_note_id_convert_to_bigint,
|
||||
if_exists: true
|
||||
)
|
||||
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(
|
||||
:issue_user_mentions,
|
||||
:notes,
|
||||
name: :fk_issue_user_mentions_note_id_convert_to_bigint,
|
||||
reverse_lock_order: true
|
||||
)
|
||||
end
|
||||
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(
|
||||
:merge_request_user_mentions,
|
||||
:notes,
|
||||
name: :fk_merge_request_user_mentions_note_id_convert_to_bigint,
|
||||
reverse_lock_order: true
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
# No-op
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def should_run?
|
||||
com_or_dev_or_test_but_not_jh?
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveProjectCiCdSettingOptInJwtColumn < Gitlab::Database::Migration[2.1]
|
||||
enable_lock_retries!
|
||||
|
||||
def up
|
||||
remove_column(:project_ci_cd_settings, :opt_in_jwt)
|
||||
end
|
||||
|
||||
def down
|
||||
# rubocop:disable Migration/SchemaAdditionMethodsNoPost
|
||||
add_column(:project_ci_cd_settings, :opt_in_jwt, :boolean, default: false, null: false, if_not_exists: true)
|
||||
# rubocop:enable Migration/SchemaAdditionMethodsNoPost
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexWorkItemTypesOnBaseTypeId < Gitlab::Database::Migration[2.1]
|
||||
INDEX_NAME = 'index_work_item_types_on_base_type_and_id'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :work_item_types, [:base_type, :id], name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :work_item_types, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
0b6ca48a68c9695f0035d2dd111707d3abaee7f260549a694250527c1f6f5b9e
|
||||
|
|
@ -0,0 +1 @@
|
|||
ee83bf5d8c283d1d24f4f7300abd0bb96e51ecfef480cab3a74875ebecf5cbcd
|
||||
|
|
@ -0,0 +1 @@
|
|||
ccdbefaed7b3e32082ac2c4c49af1fcdf6d9c317739e6e67cc41156ed92404cf
|
||||
|
|
@ -0,0 +1 @@
|
|||
1379d0f878297bd4815cbb2d58bc0187b9bb3609bdeb87ab20983682f1abfa55
|
||||
|
|
@ -0,0 +1 @@
|
|||
004443f655b7fd50d161cd529839925b446a9aba6f62d58d710379b91ed72f1d
|
||||
|
|
@ -11841,6 +11841,7 @@ CREATE TABLE application_settings (
|
|||
vertex_project text,
|
||||
wiki_asciidoc_allow_uri_includes boolean DEFAULT false NOT NULL,
|
||||
namespace_aggregation_schedule_lease_duration_in_seconds integer DEFAULT 300 NOT NULL,
|
||||
container_registry_data_repair_detail_worker_max_concurrency integer DEFAULT 2 NOT NULL,
|
||||
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
|
||||
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
|
||||
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
|
||||
|
|
@ -11852,6 +11853,7 @@ CREATE TABLE application_settings (
|
|||
CONSTRAINT app_settings_p_cleanup_package_file_worker_capacity_positive CHECK ((packages_cleanup_package_file_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_pkg_registry_cleanup_pol_worker_capacity_gte_zero CHECK ((package_registry_cleanup_policies_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_registry_exp_policies_worker_capacity_positive CHECK ((container_registry_expiration_policies_worker_capacity >= 0)),
|
||||
CONSTRAINT app_settings_registry_repair_worker_max_concurrency_positive CHECK ((container_registry_data_repair_detail_worker_max_concurrency >= 0)),
|
||||
CONSTRAINT app_settings_yaml_max_depth_positive CHECK ((max_yaml_depth > 0)),
|
||||
CONSTRAINT app_settings_yaml_max_size_positive CHECK ((max_yaml_size_bytes > 0)),
|
||||
CONSTRAINT check_17d9558205 CHECK ((char_length((kroki_url)::text) <= 1024)),
|
||||
|
|
@ -20723,7 +20725,6 @@ CREATE TABLE project_ci_cd_settings (
|
|||
job_token_scope_enabled boolean DEFAULT false NOT NULL,
|
||||
runner_token_expiration_interval integer,
|
||||
separated_caches boolean DEFAULT true NOT NULL,
|
||||
opt_in_jwt boolean DEFAULT false NOT NULL,
|
||||
allow_fork_pipelines_to_run_in_parent_project boolean DEFAULT true NOT NULL,
|
||||
inbound_job_token_scope_enabled boolean DEFAULT true NOT NULL
|
||||
);
|
||||
|
|
@ -33040,6 +33041,8 @@ CREATE UNIQUE INDEX index_work_item_parent_links_on_work_item_id ON work_item_pa
|
|||
|
||||
CREATE INDEX index_work_item_parent_links_on_work_item_parent_id ON work_item_parent_links USING btree (work_item_parent_id);
|
||||
|
||||
CREATE INDEX index_work_item_types_on_base_type_and_id ON work_item_types USING btree (base_type, id);
|
||||
|
||||
CREATE UNIQUE INDEX index_work_item_widget_definitions_on_default_witype_and_name ON work_item_widget_definitions USING btree (work_item_type_id, name) WHERE (namespace_id IS NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_work_item_widget_definitions_on_namespace_type_and_name ON work_item_widget_definitions USING btree (namespace_id, work_item_type_id, name);
|
||||
|
|
|
|||
|
|
@ -47,6 +47,10 @@ If one or more of your sites is using the [2K reference architecture](../../refe
|
|||
|
||||
[Configure the GitLab chart with GitLab Geo](https://docs.gitlab.com/charts/advanced/geo/).
|
||||
|
||||
## Geo and self-compiled installations
|
||||
|
||||
Geo is not supported when you use a [self-compiled GitLab installation](../../../install/installation.md).
|
||||
|
||||
## Post-installation documentation
|
||||
|
||||
After installing GitLab on the **secondary** sites and performing the initial configuration, see the [following documentation for post-installation information](../index.md#post-installation-documentation).
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ Prorated charges are not possible without a quarterly usage report.
|
|||
You can view users for your license and determine if you've gone over your subscription.
|
||||
|
||||
1. On the top bar, select **Main menu > Admin**.
|
||||
1. On the left menu, select **Subscription**.
|
||||
1. On the left menu, select **Users**.
|
||||
|
||||
The lists of users are displayed.
|
||||
|
||||
|
|
|
|||
|
|
@ -438,6 +438,12 @@ See [non-configurable limits](../../security/rate_limits.md#non-configurable-lim
|
|||
for information on rate limits that are not configurable, and therefore also
|
||||
used on GitLab.com.
|
||||
|
||||
## GitLab.com-specific Gitaly RPC concurrency limits
|
||||
|
||||
Per-repository Gitaly RPC concurrency and queuing limits are configured for different types of Git operations such as `git clone`. When these limits are exceeded, a `fatal: remote error: GitLab is currently unable to handle this request due to load` message is returned to the client.
|
||||
|
||||
For administrator documentation, see [limit RPC concurrency](../../administration/gitaly/configure_gitaly.md#limit-rpc-concurrency).
|
||||
|
||||
## GitLab.com logging
|
||||
|
||||
We use [Fluentd](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#fluentd)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
key_path: <%= args.second %>
|
||||
name: <%= args.second %>
|
||||
description: <%= args.last %>
|
||||
product_section: <%= options.fetch(:section) %>
|
||||
product_stage: <%= options.fetch(:stage) %>
|
||||
product_group: <%= options.fetch(:group) %>
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: "<%= milestone %>"
|
||||
introduced_by_url: <%= options.fetch(:mr) %>
|
||||
time_frame: <%= args.third %>
|
||||
data_source: redis_hll
|
||||
data_category: optional
|
||||
instrumentation_class: <%= class_name %>
|
||||
<%= distribution %>
|
||||
<%= tier %>
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails/generators'
|
||||
|
||||
module Gitlab
|
||||
module Analytics
|
||||
class InternalEventsGenerator < Rails::Generators::Base
|
||||
TIME_FRAME_DIRS = {
|
||||
'7d' => 'counts_7d',
|
||||
'28d' => 'counts_28d'
|
||||
}.freeze
|
||||
|
||||
TIME_FRAMES_DEFAULT = TIME_FRAME_DIRS.keys.tap do |time_frame_defaults|
|
||||
time_frame_defaults.class_eval do
|
||||
def to_s
|
||||
join(", ")
|
||||
end
|
||||
end
|
||||
end.freeze
|
||||
|
||||
ALLOWED_TIERS = %w[free premium ultimate].dup.tap do |tiers_default|
|
||||
tiers_default.class_eval do
|
||||
def to_s
|
||||
join(", ")
|
||||
end
|
||||
end
|
||||
end.freeze
|
||||
|
||||
TOP_LEVEL_DIR = 'config'
|
||||
TOP_LEVEL_DIR_EE = 'ee'
|
||||
DESCRIPTION_MIN_LENGTH = 50
|
||||
KNOWN_EVENTS_PATH = 'lib/gitlab/usage_data_counters/known_events/common.yml'
|
||||
KNOWN_EVENTS_PATH_EE = 'ee/lib/ee/gitlab/usage_data_counters/known_events/common.yml'
|
||||
|
||||
source_root File.expand_path('../../../../generator_templates/gitlab_internal_events', __dir__)
|
||||
|
||||
desc 'Generates metric definitions yml files and known events entries'
|
||||
|
||||
class_option :skip_namespace,
|
||||
hide: true
|
||||
class_option :skip_collision_check,
|
||||
hide: true
|
||||
class_option :time_frames,
|
||||
optional: true,
|
||||
default: TIME_FRAMES_DEFAULT,
|
||||
type: :array,
|
||||
banner: TIME_FRAMES_DEFAULT,
|
||||
desc: "Indicates the metrics time frames. Please select one or more from: #{TIME_FRAMES_DEFAULT}"
|
||||
class_option :tiers,
|
||||
optional: true,
|
||||
default: ALLOWED_TIERS,
|
||||
type: :array,
|
||||
banner: ALLOWED_TIERS,
|
||||
desc: "Indicates the metric's GitLab subscription tiers. Please select one or more from: #{ALLOWED_TIERS}"
|
||||
class_option :group,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Name of group that added this metric'
|
||||
class_option :stage,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Name of stage that added this metric'
|
||||
class_option :section,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Name of section that added this metric'
|
||||
class_option :mr,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Merge Request that adds this metric'
|
||||
class_option :event,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Name of the event that this metric counts'
|
||||
class_option :unique_on,
|
||||
type: :string,
|
||||
optional: false,
|
||||
desc: 'Name of the event property that this metric counts'
|
||||
|
||||
def create_metric_file
|
||||
validate!
|
||||
|
||||
time_frames.each do |time_frame|
|
||||
template "metric_definition.yml",
|
||||
file_path(time_frame),
|
||||
key_path(time_frame),
|
||||
time_frame,
|
||||
ask_description(time_frame)
|
||||
end
|
||||
|
||||
# ToDo: Delete during https://gitlab.com/groups/gitlab-org/-/epics/9542 cleanup
|
||||
append_file known_events_file_name, known_event_entry
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def known_event_entry
|
||||
<<~YML
|
||||
- name: #{options[:event]}
|
||||
aggregation: weekly
|
||||
YML
|
||||
end
|
||||
|
||||
def ask_description(time_frame)
|
||||
question = <<~DESC
|
||||
Please describe in at least #{DESCRIPTION_MIN_LENGTH} characters
|
||||
what #{key_path(time_frame)} metric represents,
|
||||
consider mentioning: events, and event attributes in the description.
|
||||
your answer will be processed to power a full-text search tool and help others find and reuse this metric.
|
||||
DESC
|
||||
|
||||
say("")
|
||||
description = ask(question)
|
||||
|
||||
while description.length < DESCRIPTION_MIN_LENGTH
|
||||
error_mgs = <<~ERROR
|
||||
Provided description is to short: #{description.length} of required #{DESCRIPTION_MIN_LENGTH} characters
|
||||
ERROR
|
||||
|
||||
say(set_color(error_mgs), :red)
|
||||
|
||||
description = ask("Please provide description that is #{DESCRIPTION_MIN_LENGTH} characters long.\n")
|
||||
end
|
||||
description
|
||||
end
|
||||
|
||||
def distribution
|
||||
content = [
|
||||
free? ? "- ce" : nil,
|
||||
"- ee"
|
||||
].compact.join("\n")
|
||||
|
||||
"distribution:\n#{content}"
|
||||
end
|
||||
|
||||
def tier
|
||||
"tier:\n- #{options[:tiers].join("\n- ")}"
|
||||
end
|
||||
|
||||
def milestone
|
||||
Gitlab::VERSION.match('(\d+\.\d+)').captures.first
|
||||
end
|
||||
|
||||
def class_name
|
||||
'RedisHLLMetric'
|
||||
end
|
||||
|
||||
def key_path(time_frame)
|
||||
"count_distinct_#{options[:unique_on]}_from_#{options[:event]}_#{time_frame}"
|
||||
end
|
||||
|
||||
def file_path(time_frame)
|
||||
path = File.join(TOP_LEVEL_DIR, 'metrics', TIME_FRAME_DIRS[time_frame], "#{key_path(time_frame)}.yml")
|
||||
path = File.join(TOP_LEVEL_DIR_EE, path) unless free?
|
||||
path
|
||||
end
|
||||
|
||||
def known_events_file_name
|
||||
(free? ? KNOWN_EVENTS_PATH : KNOWN_EVENTS_PATH_EE)
|
||||
end
|
||||
|
||||
def validate!
|
||||
raise "Required file: #{known_events_file_name} does not exists." unless File.exist?(known_events_file_name)
|
||||
|
||||
validate_tiers!
|
||||
|
||||
%i[unique_on event mr section stage group].each do |option|
|
||||
raise "The option: --#{option} is missing" unless options.key? option
|
||||
end
|
||||
|
||||
time_frames.each do |time_frame|
|
||||
validate_time_frame!(time_frame)
|
||||
validate_key_path!(time_frame)
|
||||
end
|
||||
end
|
||||
|
||||
def validate_time_frame!(time_frame)
|
||||
return if TIME_FRAME_DIRS.key?(time_frame)
|
||||
|
||||
raise "Invalid time frame: #{time_frame}, allowed options are: #{TIME_FRAMES_DEFAULT}"
|
||||
end
|
||||
|
||||
def validate_tiers!
|
||||
wrong_tiers = options[:tiers] - ALLOWED_TIERS
|
||||
unless wrong_tiers.empty?
|
||||
raise "Tiers option included not allowed values: #{wrong_tiers}. Only allowed values are: #{ALLOWED_TIERS}"
|
||||
end
|
||||
|
||||
return unless options[:tiers].empty?
|
||||
|
||||
raise "At least one tier must be present. Please set --tiers option"
|
||||
end
|
||||
|
||||
def validate_key_path!(time_frame)
|
||||
return unless metric_definition_exists?(time_frame)
|
||||
|
||||
raise "Metric definition with key path '#{key_path(time_frame)}' already exists"
|
||||
end
|
||||
|
||||
def free?
|
||||
options[:tiers].include? "free"
|
||||
end
|
||||
|
||||
def time_frames
|
||||
options[:time_frames]
|
||||
end
|
||||
|
||||
def directory
|
||||
@directory ||= TIME_FRAME_DIRS.find { |d| d.match?(input_dir) }
|
||||
end
|
||||
|
||||
def metric_definitions
|
||||
@definitions ||= Gitlab::Usage::MetricDefinition.definitions(skip_validation: true)
|
||||
end
|
||||
|
||||
def metric_definition_exists?(time_frame)
|
||||
metric_definitions[key_path(time_frame)].present?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,67 +1,75 @@
|
|||
#!/usr/bin/env bash
|
||||
set -o pipefail
|
||||
|
||||
COLOR_RED="\e[31m"
|
||||
COLOR_GREEN="\e[32m"
|
||||
COLOR_RESET="\e[39m"
|
||||
|
||||
cd "$(dirname "$0")/.." || exit 1
|
||||
echo "=> Linting documents at path $(pwd) as $(whoami)..."
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Linting documents at path $(pwd) as $(whoami)...${COLOR_RESET}\n"
|
||||
ERRORCODE=0
|
||||
|
||||
# Use long options (e.g. --header instead of -H) for curl examples in documentation.
|
||||
echo '=> Checking for cURL short options...'
|
||||
echo
|
||||
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/ >/dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for cURL short options...${COLOR_RESET}\n"
|
||||
if grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/ >/dev/null 2>&1;
|
||||
then
|
||||
echo '✖ ERROR: Short options for curl should not be used in documentation!
|
||||
Use long options (e.g., --header instead of -H):' >&2
|
||||
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Short options for curl should not be used in documentation!${COLOR_RESET}"
|
||||
printf " Use long options (for example, --header instead of -H):\n" >&2
|
||||
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Documentation pages need front matter for tracking purposes.
|
||||
echo '=> Checking documentation for front matter...'
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking documentation for front matter...${COLOR_RESET}\n"
|
||||
if ! scripts/lint-docs-metadata.sh
|
||||
then
|
||||
echo '✖ ERROR: These documentation pages need front matter. See https://docs.gitlab.com/ee/development/documentation/index.html#stage-and-group-metadata for how to add it.' >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: These documentation pages need front matter!${COLOR_RESET}"
|
||||
printf " See https://docs.gitlab.com/ee/development/documentation/index.html#stage-and-group-metadata for how to add it.\n" >&2
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Test for non-standard spaces (NBSP, NNBSP, ZWSP) in documentation.
|
||||
echo '=> Checking for non-standard spaces...'
|
||||
echo
|
||||
grep --extended-regexp --binary-file=without-match --recursive '[ ]' doc/ >/dev/null 2>&1
|
||||
if [ $? -eq 0 ]
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for non-standard spaces...${COLOR_RESET}\n"
|
||||
if grep --extended-regexp --binary-file=without-match --recursive '[ ]' doc/ >/dev/null 2>&1;
|
||||
then
|
||||
echo '✖ ERROR: Non-standard spaces (NBSP, NNBSP, ZWSP) should not be used in documentation.
|
||||
https://docs.gitlab.com/ee/development/documentation/styleguide/index.html#spaces-between-words
|
||||
Replace with standard spaces:' >&2
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Non-standard spaces (NBSP, NNBSP, ZWSP) should not be used in documentation!${COLOR_RESET}"
|
||||
printf " https://docs.gitlab.com/ee/development/documentation/styleguide/index.html#spaces-between-words\n"
|
||||
printf "Replace with standard spaces:\n" >&2
|
||||
# Find the spaces, then add color codes with sed to highlight each NBSP or NNBSP in the output.
|
||||
# shellcheck disable=SC1018
|
||||
grep --extended-regexp --binary-file=without-match --recursive --color=auto '[ ]' doc \
|
||||
| sed -e ''/ /s//`printf "\033[0;101m \033[0m"`/'' -e ''/ /s//`printf "\033[0;101m \033[0m"`/''
|
||||
| sed -e ''/ /s//"$(printf "\033[0;101m \033[0m")"/'' -e ''/ /s//"$(printf "\033[0;101m \033[0m")"/''
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Ensure that the CHANGELOG.md does not contain duplicate versions
|
||||
DUPLICATE_CHANGELOG_VERSIONS=$(grep --extended-regexp '^## .+' CHANGELOG.md | sed -E 's| \(.+\)||' | sort -r | uniq -d)
|
||||
echo '=> Checking for CHANGELOG.md duplicate entries...'
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for CHANGELOG.md duplicate entries...${COLOR_RESET}\n"
|
||||
if [ "${DUPLICATE_CHANGELOG_VERSIONS}" != "" ]
|
||||
then
|
||||
echo '✖ ERROR: Duplicate versions in CHANGELOG.md:' >&2
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Duplicate versions in CHANGELOG.md:${COLOR_RESET}\n" >&2
|
||||
echo "${DUPLICATE_CHANGELOG_VERSIONS}" >&2
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Make sure no files in doc/ are executable
|
||||
EXEC_PERM_COUNT=$(find doc/ -type f -perm 755 | wc -l)
|
||||
echo "=> Checking $(pwd)/doc for executable permissions..."
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking $(pwd)/doc for executable permissions...${COLOR_RESET}\n"
|
||||
if [ "${EXEC_PERM_COUNT}" -ne 0 ]
|
||||
then
|
||||
echo '✖ ERROR: Executable permissions should not be used in documentation! Use `chmod 644` to the files in question:' >&2
|
||||
find doc/ -type f -perm 755
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Executable permissions should not be used in documentation!${COLOR_RESET} Use 'chmod 644' on these files:\n" >&2
|
||||
find doc -type f -perm 755
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
|
|
@ -69,15 +77,14 @@ fi
|
|||
# Number of 'README.md's as of 2021-08-17
|
||||
NUMBER_READMES=0
|
||||
FIND_READMES=$(find doc/ -name "README.md" | wc -l)
|
||||
echo '=> Checking for new README.md files...'
|
||||
echo
|
||||
if [ ${FIND_READMES} -ne $NUMBER_READMES ]
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for new README.md files...${COLOR_RESET}\n"
|
||||
if [ "${FIND_READMES}" -ne $NUMBER_READMES ]
|
||||
then
|
||||
echo
|
||||
echo ' ✖ ERROR: The number of README.md file(s) has changed. Use index.md instead of README.md.' >&2
|
||||
echo ' ✖ If removing a README.md file, update NUMBER_READMES in lint-doc.sh.' >&2
|
||||
echo ' https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files' >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: The number of README.md files has changed!${COLOR_RESET} Use index.md instead of README.md.\n" >&2
|
||||
printf "If removing a README.md file, update NUMBER_READMES in lint-doc.sh.\n" >&2
|
||||
printf "https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files\n"
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
|
|
@ -85,42 +92,40 @@ fi
|
|||
# Number of directories with dashes as of 2021-09-17
|
||||
NUMBER_DASHES=2
|
||||
FIND_DASHES=$(find doc -type d -name "*-*" | wc -l)
|
||||
echo '=> Checking for directory names containing dashes...'
|
||||
echo
|
||||
if [ ${FIND_DASHES} -ne $NUMBER_DASHES ]
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for directory names containing dashes...${COLOR_RESET}\n"
|
||||
if [ "${FIND_DASHES}" -ne $NUMBER_DASHES ]
|
||||
then
|
||||
echo
|
||||
echo ' ✖ ERROR: The number of directory names containing dashes has changed. Use underscores instead of dashes for the directory names.' >&2
|
||||
echo ' ✖ If removing a directory containing dashes, update NUMBER_DASHES in lint-doc.sh.' >&2
|
||||
echo ' https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files' >&2
|
||||
echo
|
||||
((ERRORCODE++))
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: The number of directory names containing dashes has changed!${COLOR_RESET} Use underscores instead of dashes for the directory names.\n" >&2
|
||||
printf "If removing a directory containing dashes, update NUMBER_DASHES in lint-doc.sh.\n" >&2
|
||||
printf "https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files\n"
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
# Do not use uppercase letters in directory and file names, use all lowercase instead.
|
||||
# (find always returns 0, so we use the grep hack https://serverfault.com/a/225827)
|
||||
FIND_UPPERCASE_DIRS=$(find doc -type d -name "*[[:upper:]]*")
|
||||
echo '=> Checking for directory names containing an uppercase letter...'
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for directory names containing an uppercase letter...${COLOR_RESET}\n"
|
||||
if echo "${FIND_UPPERCASE_DIRS}" | grep . &>/dev/null
|
||||
then
|
||||
echo '✖ ERROR: Found one or more directories with an uppercase letter in their name. Use lowercase instead of uppercase for the directory names.' >&2
|
||||
echo 'https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files' >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Found one or more directories with an uppercase letter in their name!${COLOR_RESET} Use lowercase instead of uppercase for the directory names.\n" >&2
|
||||
printf "https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files\n" >&2
|
||||
echo "${FIND_UPPERCASE_DIRS}"
|
||||
echo
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
FIND_UPPERCASE_FILES=$(find doc -type f -name "*[[:upper:]]*.md")
|
||||
echo '=> Checking for file names containing an uppercase letter...'
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Checking for file names containing an uppercase letter...${COLOR_RESET}\n"
|
||||
if echo "${FIND_UPPERCASE_FILES}" | grep . &>/dev/null
|
||||
then
|
||||
echo '✖ ERROR: Found one or more file names with an uppercase letter in their name. Use lowercase instead of uppercase for the file names.' >&2
|
||||
echo 'https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files' >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Found one or more file names with an uppercase letter in their name!${COLOR_RESET} Use lowercase instead of uppercase for the file names.\n" >&2
|
||||
printf "https://docs.gitlab.com/ee/development/documentation/site_architecture/folder_structure.html#work-with-directories-and-files\n" >&2
|
||||
echo "${FIND_UPPERCASE_FILES}"
|
||||
echo
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
|
|
@ -130,18 +135,21 @@ fi
|
|||
if [ -z "${CI_MERGE_REQUEST_TARGET_BRANCH_SHA}" ]
|
||||
then
|
||||
MD_DOC_PATH=${MD_DOC_PATH:-doc}
|
||||
echo "Merge request pipeline (detached) detected. Testing all files."
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Merge request pipeline (detached) detected. Running Markdownlint and Vale on all files...${COLOR_RESET}\n"
|
||||
else
|
||||
MERGE_BASE=$(git merge-base ${CI_MERGE_REQUEST_TARGET_BRANCH_SHA} ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA})
|
||||
MERGE_BASE=$(git merge-base "${CI_MERGE_REQUEST_TARGET_BRANCH_SHA}" "${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}")
|
||||
if git diff --diff-filter=d --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" | grep -E "\.vale|\.markdownlint|lint-doc\.sh|docs\.gitlab-ci\.yml"
|
||||
then
|
||||
MD_DOC_PATH=${MD_DOC_PATH:-doc}
|
||||
echo "Vale, Markdownlint, lint-doc.sh, or pipeline configuration changed. Testing all files."
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Vale, Markdownlint, lint-doc.sh, or pipeline configuration changed. Testing all files.${COLOR_RESET}\n"
|
||||
else
|
||||
MD_DOC_PATH=$(git diff --diff-filter=d --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" -- 'doc/*.md')
|
||||
if [ -n "${MD_DOC_PATH}" ]
|
||||
then
|
||||
echo -e "Merged results pipeline detected. Testing only the following files:\n${MD_DOC_PATH}"
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Merged results pipeline detected. Testing only the following files:${COLOR_RESET}\n${MD_DOC_PATH}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
|
@ -151,58 +159,63 @@ function run_locally_or_in_container() {
|
|||
local args=$2
|
||||
local registry_url="registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.16-vale-2.22.0-markdownlint-0.32.2-markdownlint2-0.6.0"
|
||||
|
||||
if hash ${cmd} 2>/dev/null
|
||||
if hash "${cmd}" 2>/dev/null
|
||||
then
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Found locally-installed ${cmd}! Running...${COLOR_RESET}\n"
|
||||
$cmd $args
|
||||
# When using software like Rancher Desktop, both nerdctl and docker binaries are available
|
||||
# but only one is configured. To check which one to use, we need to probe each runtime
|
||||
elif (hash nerdctl 2>/dev/null) && (nerdctl info 2>&1 1>/dev/null)
|
||||
elif (hash nerdctl 2>/dev/null) && (nerdctl info > /dev/null 2>&1)
|
||||
then
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Found nerdctl! Using linting image to run ${cmd}...${COLOR_RESET}\n"
|
||||
nerdctl run -t -v "${PWD}:/gitlab" -w /gitlab --rm ${registry_url} ${cmd} ${args}
|
||||
elif (hash docker 2>/dev/null) && (docker info 2>&1 1>/dev/null)
|
||||
elif (hash docker 2>/dev/null) && (docker info > /dev/null 2>&1)
|
||||
then
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Found docker! Using linting image to run ${cmd}...${COLOR_RESET}\n"
|
||||
docker run -t -v "${PWD}:/gitlab" -w /gitlab --rm ${registry_url} ${cmd} ${args}
|
||||
else
|
||||
echo
|
||||
echo " ✖ ERROR: '${cmd}' not found. Install '${cmd}' or a container runtime (Docker/Nerdctl) to proceed." >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: '${cmd}' not found!${COLOR_RESET} Install '${cmd}' locally, or install a container runtime (docker or nerdctl) and try again.\n" >&2
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo
|
||||
echo " ✖ ERROR: '${cmd}' failed with errors." >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: '${cmd}' failed with errors!${COLOR_RESET}\n" >&2
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
}
|
||||
|
||||
echo '=> Linting markdown style...'
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Linting markdown style...${COLOR_RESET}\n"
|
||||
if [ -z "${MD_DOC_PATH}" ]
|
||||
then
|
||||
echo "Merged results pipeline detected, but no markdown files found. Skipping."
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Merged results pipeline detected, but no markdown files found. Skipping.${COLOR_RESET}\n"
|
||||
else
|
||||
yarn markdownlint --config .markdownlint.yml ${MD_DOC_PATH} --rules doc/.markdownlint/rules
|
||||
|
||||
if [ $? -ne 0 ]
|
||||
if ! yarn markdownlint --config .markdownlint.yml "${MD_DOC_PATH}" --rules doc/.markdownlint/rules;
|
||||
then
|
||||
echo
|
||||
echo '✖ ERROR: Markdownlint failed with errors.' >&2
|
||||
echo
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_RED}ERROR: Markdownlint failed with errors!${COLOR_RESET}\n" >&2
|
||||
((ERRORCODE++))
|
||||
fi
|
||||
fi
|
||||
|
||||
echo '=> Linting prose...'
|
||||
# shellcheck disable=2059
|
||||
printf "${COLOR_GREEN}INFO: Looking for Vale to lint prose, either installed locally or available in documentation linting image...${COLOR_RESET}\n"
|
||||
run_locally_or_in_container 'vale' "--minAlertLevel error --output=doc/.vale/vale.tmpl ${MD_DOC_PATH}"
|
||||
|
||||
if [ $ERRORCODE -ne 0 ]
|
||||
if [ "$ERRORCODE" -ne 0 ]
|
||||
then
|
||||
echo "✖ ${ERRORCODE} lint test(s) failed. Review the log carefully to see full listing."
|
||||
# shellcheck disable=2059
|
||||
printf "\n${COLOR_RED}ERROR: lint test(s) failed! Review the log carefully to see full listing.${COLOR_RESET}\n"
|
||||
exit 1
|
||||
else
|
||||
echo "✔ Linting passed"
|
||||
# shellcheck disable=2059
|
||||
printf "\n${COLOR_GREEN}INFO: Linting passed.${COLOR_RESET}\n"
|
||||
exit 0
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -38,10 +38,10 @@ function check_file {
|
|||
file="$1"
|
||||
TOTAL_FILES=$((TOTAL_FILES + 1))
|
||||
if [ "$(head -n1 "$file")" != "---" ]; then
|
||||
printf "${COLOR_RED}Documentation metadata missing in %s.${COLOR_RESET}\n" "$file" >&2
|
||||
printf "${COLOR_RED}ERROR: Documentation metadata missing in %s.${COLOR_RESET}\n" "$file" >&2
|
||||
FAILING_FILES=$((FAILING_FILES + 1))
|
||||
elif [ "$VERBOSE" == "true" ]; then
|
||||
printf "Documentation metadata found in %s.\n" "$file"
|
||||
printf "${COLOR_GREEN}INFO: Documentation metadata found in %s.${COLOR_RESET}\n" "$file"
|
||||
fi
|
||||
}
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ function check_all_files {
|
|||
|
||||
if [[ "$CHECK_ALL" = "true" ]]; then
|
||||
# shellcheck disable=SC2059
|
||||
printf "No files supplied. Checking all markdown files in doc/.\n"
|
||||
printf "${COLOR_GREEN}INFO: No files supplied! Checking all markdown files in doc/...${COLOR_RESET}\n"
|
||||
check_all_files
|
||||
else
|
||||
# Takes a list of Markdown files as a parameter
|
||||
|
|
@ -66,10 +66,10 @@ fi
|
|||
|
||||
if [ "$FAILING_FILES" -gt 0 ]; then
|
||||
# shellcheck disable=SC2059
|
||||
printf "\n${COLOR_RED}Documentation metadata is missing in ${FAILING_FILES} of ${TOTAL_FILES} documentation files.${COLOR_RESET} For more information, see https://docs.gitlab.com/ee/development/documentation/#metadata.\n" >&2
|
||||
printf "\n${COLOR_RED}ERROR: Documentation metadata is missing in ${FAILING_FILES} of ${TOTAL_FILES} documentation files.${COLOR_RESET} For more information, see https://docs.gitlab.com/ee/development/documentation/#metadata.\n" >&2
|
||||
exit 1
|
||||
else
|
||||
# shellcheck disable=SC2059
|
||||
printf "${COLOR_GREEN}Documentation metadata found in ${TOTAL_FILES} documentation files.${COLOR_RESET}\n"
|
||||
printf "${COLOR_GREEN}INFO: Documentation metadata found in ${TOTAL_FILES} documentation files.${COLOR_RESET}\n"
|
||||
exit 0
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -0,0 +1,230 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feature_category: :service_ping do
|
||||
include UsageDataHelpers
|
||||
|
||||
let(:temp_dir) { Dir.mktmpdir }
|
||||
let(:tmpfile) { Tempfile.new('test-metadata') }
|
||||
let(:ee_temp_dir) { Dir.mktmpdir }
|
||||
let(:existing_key_paths) { {} }
|
||||
let(:description) { "This metric counts unique users viewing analytics metrics dashboard section" }
|
||||
let(:group) { "group::analytics instrumentation" }
|
||||
let(:stage) { "analytics" }
|
||||
let(:section) { "analytics" }
|
||||
let(:mr) { "https://gitlab.com/some-group/some-project/-/merge_requests/123" }
|
||||
let(:event) { "view_analytics_dashboard" }
|
||||
let(:unique_on) { "user_id" }
|
||||
let(:options) do
|
||||
{
|
||||
time_frames: time_frames,
|
||||
free: true,
|
||||
mr: mr,
|
||||
group: group,
|
||||
stage: stage,
|
||||
section: section,
|
||||
event: event,
|
||||
unique_on: unique_on
|
||||
}.stringify_keys
|
||||
end
|
||||
|
||||
let(:key_path_7d) { "count_distinct_#{unique_on}_from_#{event}_7d" }
|
||||
let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
|
||||
let(:metric_definition_7d) do
|
||||
{
|
||||
"key_path" => key_path_7d,
|
||||
"name" => key_path_7d,
|
||||
"description" => description,
|
||||
"product_section" => section,
|
||||
"product_stage" => stage,
|
||||
"product_group" => group,
|
||||
"performance_indicator_type" => [],
|
||||
"value_type" => "number",
|
||||
"status" => "active",
|
||||
"milestone" => "13.9",
|
||||
"introduced_by_url" => mr,
|
||||
"time_frame" => "7d",
|
||||
"data_source" => "redis_hll",
|
||||
"data_category" => "optional",
|
||||
"instrumentation_class" => "RedisHLLMetric",
|
||||
"distribution" => %w[ce ee],
|
||||
"tier" => %w[free premium ultimate]
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
|
||||
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
|
||||
stub_const("#{described_class}::KNOWN_EVENTS_PATH", tmpfile.path)
|
||||
stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", tmpfile.path)
|
||||
|
||||
allow_next_instance_of(described_class) do |instance|
|
||||
allow(instance).to receive(:ask)
|
||||
.with(/Please describe in at least 50 characters/)
|
||||
.and_return(description)
|
||||
end
|
||||
|
||||
allow(Gitlab::Usage::MetricDefinition)
|
||||
.to receive(:definitions).and_return(existing_key_paths)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(temp_dir)
|
||||
FileUtils.rm_rf(ee_temp_dir)
|
||||
FileUtils.rm_rf(tmpfile.path)
|
||||
end
|
||||
|
||||
describe 'Creating metric definition file' do
|
||||
before do
|
||||
# Stub version so that `milestone` key remains constant between releases to prevent flakiness.
|
||||
stub_const('Gitlab::VERSION', '13.9.0')
|
||||
end
|
||||
|
||||
context 'for single time frame' do
|
||||
let(:time_frames) { %w[7d] }
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
end
|
||||
|
||||
context 'for ultimate only feature' do
|
||||
let(:metric_definition_path_7d) do
|
||||
Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
|
||||
end
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d)))
|
||||
.to eq(metric_definition_7d.merge("tier" => ["ultimate"], "distribution" => ["ee"]))
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid time frame' do
|
||||
let(:time_frames) { %w[14d] }
|
||||
|
||||
it 'raises error' do
|
||||
expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicated key path' do
|
||||
let(:existing_key_paths) { { key_path_7d => true } }
|
||||
|
||||
it 'raises error' do
|
||||
expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without at least one tier available' do
|
||||
it 'raises error' do
|
||||
expect { described_class.new([], options.merge(tiers: [])).invoke_all }
|
||||
.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with unknown tier' do
|
||||
it 'raises error' do
|
||||
expect { described_class.new([], options.merge(tiers: %w[superb])).invoke_all }
|
||||
.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without obligatory parameter' do
|
||||
it 'raises error', :aggregate_failures do
|
||||
%w[unique_on event mr section stage group].each do |option|
|
||||
expect { described_class.new([], options.without(option)).invoke_all }
|
||||
.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with to short description' do
|
||||
it 'asks again for description' do
|
||||
allow_next_instance_of(described_class) do |instance|
|
||||
allow(instance).to receive(:ask)
|
||||
.with(/Please describe in at least 50 characters/)
|
||||
.and_return("I am to short")
|
||||
|
||||
expect(instance).to receive(:ask)
|
||||
.with(/Please provide description that is 50 characters long/)
|
||||
.and_return(description)
|
||||
end
|
||||
|
||||
described_class.new([], options).invoke_all
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for multiple time frames' do
|
||||
let(:time_frames) { %w[7d 28d] }
|
||||
let(:key_path_28d) { "count_distinct_#{unique_on}_from_#{event}_28d" }
|
||||
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
|
||||
let(:metric_definition_28d) do
|
||||
metric_definition_7d.merge(
|
||||
"key_path" => key_path_28d,
|
||||
"name" => key_path_28d,
|
||||
"time_frame" => "28d"
|
||||
)
|
||||
end
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with default time frames' do
|
||||
let(:time_frames) { nil }
|
||||
let(:key_path_28d) { "count_distinct_#{unique_on}_from_#{event}_28d" }
|
||||
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
|
||||
let(:metric_definition_28d) do
|
||||
metric_definition_7d.merge(
|
||||
"key_path" => key_path_28d,
|
||||
"name" => key_path_28d,
|
||||
"time_frame" => "28d"
|
||||
)
|
||||
end
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
described_class.new([], options.without('time_frames')).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Creating known event entry' do
|
||||
let(:time_frames) { %w[7d 28d] }
|
||||
let(:expected_known_events) { [{ "name" => event, "aggregation" => "weekly" }] }
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(tmpfile.path))).to match_array(expected_known_events)
|
||||
end
|
||||
|
||||
context 'for ultimate only feature' do
|
||||
let(:ee_tmpfile) { Tempfile.new('test-metadata') }
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(ee_tmpfile)
|
||||
end
|
||||
|
||||
it 'creates a metric definition file using the template' do
|
||||
stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", ee_tmpfile.path)
|
||||
|
||||
described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(tmpfile.path))).to be nil
|
||||
expect(YAML.safe_load(File.read(ee_tmpfile.path))).to match_array(expected_known_events)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -67,6 +67,11 @@ RSpec.describe SwapIssueUserMentionsNoteIdToBigintForGitlabDotCom2, feature_cate
|
|||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE bigint')
|
||||
connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE integer')
|
||||
# Cleanup artefacts from executing `#down` in test setup
|
||||
connection.execute('DROP INDEX IF EXISTS index_issue_user_mentions_on_note_id_convert_to_bigint')
|
||||
connection.execute(
|
||||
'ALTER TABLE issue_user_mentions DROP CONSTRAINT IF EXISTS fk_issue_user_mentions_note_id_convert_to_bigint'
|
||||
)
|
||||
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@
|
|||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint, feature_category: :database do
|
||||
# rubocop: disable RSpec/FilePath
|
||||
RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint2, feature_category: :database do
|
||||
describe '#up' do
|
||||
before do
|
||||
# A we call `schema_migrate_down!` before each example, and for this migration
|
||||
|
|
@ -61,6 +62,30 @@ RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint, feature_category: :da
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'is a no-op if columns are already swapped' do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE bigint')
|
||||
connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE integer')
|
||||
# Cleanup artefacts from executing `#down` in test setup
|
||||
connection.execute('DROP INDEX IF EXISTS index_merge_request_user_mentions_note_id_convert_to_bigint')
|
||||
connection.execute(
|
||||
'ALTER TABLE merge_request_user_mentions ' \
|
||||
'DROP CONSTRAINT IF EXISTS fk_merge_request_user_mentions_note_id_convert_to_bigint'
|
||||
)
|
||||
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
|
||||
allow_any_instance_of(described_class).to receive(:columns_already_swapped?).and_return(true)
|
||||
|
||||
migrate!
|
||||
|
||||
user_mentions = table(:merge_request_user_mentions)
|
||||
user_mentions.reset_column_information
|
||||
|
||||
expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
|
||||
expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer')
|
||||
end
|
||||
# rubocop: enable RSpec/AnyInstanceOf
|
||||
end
|
||||
end
|
||||
# rubocop: enable RSpec/FilePath
|
||||
|
|
@ -97,6 +97,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
|
|||
|
||||
it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to validate_numericality_of(:container_registry_data_repair_detail_worker_max_concurrency).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to allow_value(true).for(:container_registry_expiration_policies_caching) }
|
||||
it { is_expected.to allow_value(false).for(:container_registry_expiration_policies_caching) }
|
||||
|
|
@ -108,6 +109,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
|
|||
it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
|
||||
it { is_expected.not_to allow_value(nil).for(:container_registry_data_repair_detail_worker_max_concurrency) }
|
||||
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
|
||||
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
|
||||
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
|
||||
|
|
|
|||
|
|
@ -148,14 +148,23 @@ RSpec.describe ContainerRegistry::RecordDataRepairDetailWorker, :aggregate_failu
|
|||
end
|
||||
|
||||
describe '#max_running_jobs' do
|
||||
let(:max_concurrency) { 3 }
|
||||
|
||||
before do
|
||||
stub_application_setting(
|
||||
container_registry_data_repair_detail_worker_max_concurrency: max_concurrency
|
||||
)
|
||||
end
|
||||
|
||||
subject { worker.max_running_jobs }
|
||||
|
||||
it { is_expected.to eq(described_class::MAX_CAPACITY) }
|
||||
it { is_expected.to eq(max_concurrency) }
|
||||
end
|
||||
|
||||
describe '#remaining_work_count' do
|
||||
let_it_be(:max_running_jobs) { 5 }
|
||||
let_it_be(:pending_projects) do
|
||||
create_list(:project, described_class::MAX_CAPACITY + 2)
|
||||
create_list(:project, max_running_jobs + 2)
|
||||
end
|
||||
|
||||
subject { worker.remaining_work_count }
|
||||
|
|
@ -163,9 +172,10 @@ RSpec.describe ContainerRegistry::RecordDataRepairDetailWorker, :aggregate_failu
|
|||
context 'when on Gitlab.com', :saas do
|
||||
before do
|
||||
allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
|
||||
allow(worker).to receive(:max_running_jobs).and_return(max_running_jobs)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(described_class::MAX_CAPACITY + 1) }
|
||||
it { is_expected.to eq(worker.max_running_jobs + 1) }
|
||||
|
||||
context 'when the Gitlab API is not supported' do
|
||||
before do
|
||||
|
|
|
|||
Loading…
Reference in New Issue