Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f6939c144e
commit
88c8343af6
|
|
@ -36,6 +36,23 @@ export default function initReadMore(triggerSelector = '.js-read-more-trigger')
|
|||
const readMoreHeight = Number(parentEl.dataset.readMoreHeight);
|
||||
const readMoreContent = parentEl.querySelector('.read-more-content');
|
||||
|
||||
// If element exists in readMoreContent expand content automatically
|
||||
// and scroll to element
|
||||
if (window.location.hash) {
|
||||
const targetId = window.location.href.split('#')[1];
|
||||
const hashTargetEl = readMoreContent.querySelector(`#user-content-${targetId}`);
|
||||
|
||||
if (hashTargetEl) {
|
||||
targetEl.classList.add('is-expanded');
|
||||
triggerEl.remove();
|
||||
window.addEventListener('load', () => {
|
||||
// Trigger scrollTo event
|
||||
hashTargetEl.click();
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (readMoreContent) {
|
||||
parentEl.style.setProperty('--read-more-height', `${readMoreHeight}px`);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,5 +57,6 @@ export default {
|
|||
:alt="avatarAlt"
|
||||
:size="size"
|
||||
:fallback-on-error="true"
|
||||
itemprop="image"
|
||||
/>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: duo_chat_requires_licensed_seat_sm
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/457283
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150391
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/457757
|
||||
milestone: '17.0'
|
||||
group: group::duo chat
|
||||
type: beta
|
||||
default_enabled: false
|
||||
|
|
@ -63,6 +63,7 @@ end
|
|||
|
||||
# Enable partition management for the backfill table during merge_request_diff_commits partitioning.
|
||||
# This way new partitions will be created as the trigger syncs new rows across to this table.
|
||||
#
|
||||
Gitlab::Database::Partitioning.register_tables(
|
||||
[
|
||||
{
|
||||
|
|
@ -73,4 +74,16 @@ Gitlab::Database::Partitioning.register_tables(
|
|||
]
|
||||
)
|
||||
|
||||
# Enable partition management for the backfill table during merge_request_diff_files partitioning.
|
||||
# This way new partitions will be created as the trigger syncs new rows across to this table.
|
||||
#
|
||||
Gitlab::Database::Partitioning.register_tables(
|
||||
[
|
||||
{
|
||||
limit_connection_names: %i[main],
|
||||
table_name: 'merge_request_diff_files_99208b8fac',
|
||||
partitioned_column: :merge_request_diff_id, strategy: :int_range, partition_size: 200_000_000
|
||||
}
|
||||
]
|
||||
)
|
||||
Gitlab::Database::Partitioning.sync_partitions_ignore_db_error
|
||||
|
|
|
|||
|
|
@ -136,7 +136,14 @@ Sidekiq.configure_client do |config|
|
|||
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
|
||||
end
|
||||
|
||||
Gitlab::Application.configure do |config|
|
||||
config.middleware.use(Gitlab::Middleware::SidekiqShardAwarenessValidation)
|
||||
end
|
||||
|
||||
Sidekiq::Scheduled::Poller.prepend Gitlab::Patch::SidekiqPoller
|
||||
Sidekiq::Cron::Poller.prepend Gitlab::Patch::SidekiqPoller
|
||||
Sidekiq::Cron::Poller.prepend Gitlab::Patch::SidekiqCronPoller
|
||||
|
||||
Sidekiq::Client.prepend Gitlab::SidekiqSharding::Validator::Client
|
||||
Sidekiq::RedisClientAdapter::CompatMethods.prepend Gitlab::SidekiqSharding::Validator
|
||||
Sidekiq::Job::Setter.prepend Gitlab::Patch::SidekiqJobSetter
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
table_name: merge_request_diff_files_99208b8fac
|
||||
classes:
|
||||
- MergeRequestDiffFile
|
||||
feature_categories:
|
||||
- code_review_workflow
|
||||
description: Temporary table for partitioning the main merge_request_diff_files table
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/149181
|
||||
milestone: '17.0'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: merge_request_diff_id
|
||||
table: merge_request_diffs
|
||||
sharding_key: project_id
|
||||
belongs_to: merge_request_diff
|
||||
|
|
@ -5,10 +5,12 @@ class RemoveAutoFixWorkerJobs < Gitlab::Database::Migration[2.2]
|
|||
milestone '16.11'
|
||||
|
||||
def up
|
||||
job_to_remove = Sidekiq::Cron::Job.find('security_auto_fix')
|
||||
|
||||
job_to_remove.destroy if job_to_remove
|
||||
job_to_remove.disable! if job_to_remove
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
job_to_remove = Sidekiq::Cron::Job.find('security_auto_fix')
|
||||
job_to_remove.destroy if job_to_remove
|
||||
job_to_remove.disable! if job_to_remove
|
||||
end
|
||||
|
||||
sidekiq_remove_jobs(job_klasses: ['Security::AutoFixWorker'])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreatePartitionedMergeRequestDiffFilesCopy < Gitlab::Database::Migration[2.2]
|
||||
include Gitlab::Database::PartitioningMigrationHelpers
|
||||
|
||||
milestone '17.0'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
partition_table_by_int_range(
|
||||
'merge_request_diff_files',
|
||||
'merge_request_diff_id',
|
||||
partition_size: 200_000_000,
|
||||
primary_key: %w[merge_request_diff_id relative_order]
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
drop_partitioned_table_for('merge_request_diff_files')
|
||||
end
|
||||
end
|
||||
|
|
@ -130,8 +130,12 @@ class MigrateSidekiqQueuedAndFutureJobs < Gitlab::Database::Migration[2.2]
|
|||
mappings = Gitlab::SidekiqConfig.worker_queue_mappings
|
||||
logger = ::Gitlab::BackgroundMigration::Logger.build
|
||||
migrator = SidekiqMigrateJobs.new(mappings, logger: logger)
|
||||
migrator.migrate_queues
|
||||
%w[schedule retry].each { |set| migrator.migrate_set(set) }
|
||||
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
migrator.migrate_queues
|
||||
%w[schedule retry].each { |set| migrator.migrate_set(set) }
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
|
|
|
|||
|
|
@ -8,8 +8,11 @@ class UnscheduleStuckBulkImportCron < Gitlab::Database::Migration[2.2]
|
|||
# This is to clean up the cron schedule for BulkImports::StuckImportWorker
|
||||
# which was removed in
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143806
|
||||
removed_job = Sidekiq::Cron::Job.find('bulk_imports_stuck_import_worker')
|
||||
removed_job.destroy if removed_job
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
removed_job = Sidekiq::Cron::Job.find('bulk_imports_stuck_import_worker')
|
||||
removed_job.destroy if removed_job
|
||||
end
|
||||
|
||||
sidekiq_remove_jobs(job_klasses: %w[BulkImports::StuckImportWorker])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,8 +7,11 @@ class UnscheduleOpenAiClearConversationsWorker < Gitlab::Database::Migration[2.2
|
|||
def up
|
||||
# This is to clean up the cron schedule for OpenAi::ClearConversationsWorker
|
||||
# which was removed in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139626
|
||||
removed_job = Sidekiq::Cron::Job.find('open_ai_clear_conversations_worker')
|
||||
removed_job.destroy if removed_job
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
removed_job = Sidekiq::Cron::Job.find('open_ai_clear_conversations_worker')
|
||||
removed_job.destroy if removed_job
|
||||
end
|
||||
|
||||
sidekiq_remove_jobs(job_klasses: %w[OpenAi::ClearConversationsWorker])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,10 @@ class UnscheduleOpenAiClearConvosCron < Gitlab::Database::Migration[2.2]
|
|||
# This is to clean up the cron schedule for OpenAi::ClearConversationsWorker
|
||||
# which was removed in
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/commit/8c24e145c14d64c62a5b4f6fe72726140457d9f1#be4e3233708096a83c31a905040cb84cc105703d_780_780
|
||||
Sidekiq::Cron::Job.destroy('open_ai_clear_conversations')
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Cron::Job.destroy('open_ai_clear_conversations')
|
||||
end
|
||||
|
||||
sidekiq_remove_jobs(job_klasses: %w[OpenAi::ClearConversationsWorker])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
a752467de5e6e50304c97ed73feaafc6e99485421528ffc590d93dc8b638d68a
|
||||
|
|
@ -643,6 +643,67 @@ $$;
|
|||
|
||||
COMMENT ON FUNCTION table_sync_function_0992e728d3() IS 'Partitioning migration: table sync for merge_request_diff_commits table';
|
||||
|
||||
CREATE FUNCTION table_sync_function_3f39f64fc3() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF (TG_OP = 'DELETE') THEN
|
||||
DELETE FROM merge_request_diff_files_99208b8fac where "merge_request_diff_id" = OLD."merge_request_diff_id" AND "relative_order" = OLD."relative_order";
|
||||
ELSIF (TG_OP = 'UPDATE') THEN
|
||||
UPDATE merge_request_diff_files_99208b8fac
|
||||
SET "new_file" = NEW."new_file",
|
||||
"renamed_file" = NEW."renamed_file",
|
||||
"deleted_file" = NEW."deleted_file",
|
||||
"too_large" = NEW."too_large",
|
||||
"a_mode" = NEW."a_mode",
|
||||
"b_mode" = NEW."b_mode",
|
||||
"new_path" = NEW."new_path",
|
||||
"old_path" = NEW."old_path",
|
||||
"diff" = NEW."diff",
|
||||
"binary" = NEW."binary",
|
||||
"external_diff_offset" = NEW."external_diff_offset",
|
||||
"external_diff_size" = NEW."external_diff_size",
|
||||
"generated" = NEW."generated"
|
||||
WHERE merge_request_diff_files_99208b8fac."merge_request_diff_id" = NEW."merge_request_diff_id" AND merge_request_diff_files_99208b8fac."relative_order" = NEW."relative_order";
|
||||
ELSIF (TG_OP = 'INSERT') THEN
|
||||
INSERT INTO merge_request_diff_files_99208b8fac ("new_file",
|
||||
"renamed_file",
|
||||
"deleted_file",
|
||||
"too_large",
|
||||
"a_mode",
|
||||
"b_mode",
|
||||
"new_path",
|
||||
"old_path",
|
||||
"diff",
|
||||
"binary",
|
||||
"external_diff_offset",
|
||||
"external_diff_size",
|
||||
"generated",
|
||||
"merge_request_diff_id",
|
||||
"relative_order")
|
||||
VALUES (NEW."new_file",
|
||||
NEW."renamed_file",
|
||||
NEW."deleted_file",
|
||||
NEW."too_large",
|
||||
NEW."a_mode",
|
||||
NEW."b_mode",
|
||||
NEW."new_path",
|
||||
NEW."old_path",
|
||||
NEW."diff",
|
||||
NEW."binary",
|
||||
NEW."external_diff_offset",
|
||||
NEW."external_diff_size",
|
||||
NEW."generated",
|
||||
NEW."merge_request_diff_id",
|
||||
NEW."relative_order");
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMENT ON FUNCTION table_sync_function_3f39f64fc3() IS 'Partitioning migration: table sync for merge_request_diff_files table';
|
||||
|
||||
CREATE FUNCTION trigger_10ee1357e825() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -1171,6 +1232,25 @@ CREATE TABLE merge_request_diff_commits_b5377a7a34 (
|
|||
)
|
||||
PARTITION BY RANGE (merge_request_diff_id);
|
||||
|
||||
CREATE TABLE merge_request_diff_files_99208b8fac (
|
||||
new_file boolean NOT NULL,
|
||||
renamed_file boolean NOT NULL,
|
||||
deleted_file boolean NOT NULL,
|
||||
too_large boolean NOT NULL,
|
||||
a_mode character varying NOT NULL,
|
||||
b_mode character varying NOT NULL,
|
||||
new_path text NOT NULL,
|
||||
old_path text NOT NULL,
|
||||
diff text,
|
||||
"binary" boolean,
|
||||
external_diff_offset integer,
|
||||
external_diff_size integer,
|
||||
generated boolean,
|
||||
merge_request_diff_id bigint NOT NULL,
|
||||
relative_order integer NOT NULL
|
||||
)
|
||||
PARTITION BY RANGE (merge_request_diff_id);
|
||||
|
||||
CREATE TABLE p_batched_git_ref_updates_deletions (
|
||||
id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
|
|
@ -21701,6 +21781,9 @@ ALTER TABLE ONLY merge_request_diff_commits
|
|||
ALTER TABLE ONLY merge_request_diff_details
|
||||
ADD CONSTRAINT merge_request_diff_details_pkey PRIMARY KEY (merge_request_diff_id);
|
||||
|
||||
ALTER TABLE ONLY merge_request_diff_files_99208b8fac
|
||||
ADD CONSTRAINT merge_request_diff_files_99208b8fac_pkey PRIMARY KEY (merge_request_diff_id, relative_order);
|
||||
|
||||
ALTER TABLE ONLY merge_request_diff_files
|
||||
ADD CONSTRAINT merge_request_diff_files_pkey PRIMARY KEY (merge_request_diff_id, relative_order);
|
||||
|
||||
|
|
@ -29790,6 +29873,8 @@ CREATE TRIGGER push_rules_loose_fk_trigger AFTER DELETE ON push_rules REFERENCIN
|
|||
|
||||
CREATE TRIGGER table_sync_trigger_57c8465cd7 AFTER INSERT OR DELETE OR UPDATE ON merge_request_diff_commits FOR EACH ROW EXECUTE FUNCTION table_sync_function_0992e728d3();
|
||||
|
||||
CREATE TRIGGER table_sync_trigger_cd362c20e2 AFTER INSERT OR DELETE OR UPDATE ON merge_request_diff_files FOR EACH ROW EXECUTE FUNCTION table_sync_function_3f39f64fc3();
|
||||
|
||||
CREATE TRIGGER tags_loose_fk_trigger AFTER DELETE ON tags REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
|
||||
|
||||
CREATE TRIGGER trigger_10ee1357e825 BEFORE INSERT OR UPDATE ON p_ci_builds FOR EACH ROW EXECUTE FUNCTION trigger_10ee1357e825();
|
||||
|
|
|
|||
|
|
@ -142,6 +142,8 @@ it will print useful error messages with links to the docs on how to resolve the
|
|||
GITLAB_SIMULATE_SAAS=1 RAILS_ENV=development bundle exec rake 'gitlab:duo:setup[<test-group-name>]'
|
||||
```
|
||||
|
||||
[AI Gateway](#local-setup) still needs to be setup when using the automated setup.
|
||||
|
||||
**Manual way**
|
||||
|
||||
1. Ensure you have followed [the process to obtain an EE license](https://handbook.gitlab.com/handbook/developer-onboarding/#working-on-gitlab-ee-developer-licenses) for your local instance and you applied Ultimate license.
|
||||
|
|
@ -169,6 +171,11 @@ GITLAB_SIMULATE_SAAS=1 RAILS_ENV=development bundle exec rake 'gitlab:duo:setup[
|
|||
|
||||
- [Here's how to reach us!](https://handbook.gitlab.com/handbook/engineering/development/data-science/ai-powered/ai-framework/#-how-to-reach-us)
|
||||
|
||||
## Tips for local development
|
||||
|
||||
1. When responses are taking too long to appear in the user interface, consider restarting Sidekiq by running `gdk restart rails-background-jobs`. If that doesn't work, try `gdk kill` and then `gdk start`.
|
||||
1. Alternatively, bypass Sidekiq entirely and run the service synchronously. This can help with debugging errors as GraphQL errors are now available in the network inspector instead of the Sidekiq logs. To do that temporary alter `perform_for` method in `Llm::CompletionWorker` class by changing `perform_async` to `perform_inline`.
|
||||
|
||||
## Feature development (Abstraction Layer)
|
||||
|
||||
### Feature flags
|
||||
|
|
@ -587,8 +594,3 @@ end
|
|||
## Security
|
||||
|
||||
Refer to the [secure coding guidelines for Artificial Intelligence (AI) features](../secure_coding_guidelines.md#artificial-intelligence-ai-features).
|
||||
|
||||
## Tips for local development
|
||||
|
||||
1. When responses are taking too long to appear in the user interface, consider restarting Sidekiq by running `gdk restart rails-background-jobs`. If that doesn't work, try `gdk kill` and then `gdk start`.
|
||||
1. Alternatively, bypass Sidekiq entirely and run the service synchronously. This can help with debugging errors as GraphQL errors are now available in the network inspector instead of the Sidekiq logs. To do that temporary alter `perform_for` method in `Llm::CompletionWorker` class by changing `perform_async` to `perform_inline`.
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ When advisories are added to either the [GitLab Advisory Database](https://advis
|
|||
[Trivy Database](https://github.com/aquasecurity/trivy-db), Continuous Vulnerability Scanning
|
||||
triggers a scan on all projects where either Container Scanning, Dependency Scanning, or both, are
|
||||
enabled. If a new advisory affects an application or operating system dependency, it creates a
|
||||
vulnerability in the project.
|
||||
vulnerability in the project with the scanner value set to `GitLab SBoM Vulnerability Scanner`.
|
||||
|
||||
NOTE:
|
||||
If a new operating system package is added to either the GitLab Advisory Database or Trivy
|
||||
|
|
|
|||
|
|
@ -128,14 +128,14 @@ The following languages and dependency managers are supported:
|
|||
<td>Y</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">Java and Kotlin<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-1">1</a></b></sup></td>
|
||||
<td rowspan="2">Java and Kotlin</td>
|
||||
<td rowspan="2">
|
||||
8 LTS,
|
||||
11 LTS,
|
||||
17 LTS,
|
||||
or 21 LTS<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-2">2</a></b></sup>
|
||||
or 21 LTS<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-1">1</a></b></sup>
|
||||
</td>
|
||||
<td><a href="https://gradle.org/">Gradle</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-3">3</a></b></sup></td>
|
||||
<td><a href="https://gradle.org/">Gradle</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-2">2</a></b></sup></td>
|
||||
<td>
|
||||
<ul>
|
||||
<li><code>build.gradle</code></li>
|
||||
|
|
@ -145,7 +145,7 @@ The following languages and dependency managers are supported:
|
|||
<td>N</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://maven.apache.org/">Maven</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-8">8</a></b></sup></td>
|
||||
<td><a href="https://maven.apache.org/">Maven</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-7">7</a></b></sup></td>
|
||||
<td><code>pom.xml</code></td>
|
||||
<td>N</td>
|
||||
</tr>
|
||||
|
|
@ -167,7 +167,7 @@ The following languages and dependency managers are supported:
|
|||
<td>Y</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://pnpm.io/">pnpm</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-4">4</a></b></sup></td>
|
||||
<td><a href="https://pnpm.io/">pnpm</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-3">3</a></b></sup></td>
|
||||
<td><code>pnpm-lock.yaml</code></td>
|
||||
<td>Y</td>
|
||||
</tr>
|
||||
|
|
@ -180,7 +180,7 @@ The following languages and dependency managers are supported:
|
|||
</tr>
|
||||
<tr>
|
||||
<td rowspan="4">Python</td>
|
||||
<td rowspan="4">3.9<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-9">9</a></b></sup>, 3.10<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-5">5</a></b></sup></td>
|
||||
<td rowspan="4">3.9<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-8">8</a></b></sup>, 3.10<sup><b><a href="#notes-regarding-supported-languages-and-package-managers-4">4</a></b></sup></td>
|
||||
<td><a href="https://setuptools.readthedocs.io/en/latest/">setuptools</a></td>
|
||||
<td><code>setup.py</code></td>
|
||||
<td>N</td>
|
||||
|
|
@ -207,7 +207,7 @@ The following languages and dependency managers are supported:
|
|||
<td>N</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://python-poetry.org/">Poetry</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-6">6</a></b></sup></td>
|
||||
<td><a href="https://python-poetry.org/">Poetry</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-5">5</a></b></sup></td>
|
||||
<td><code>poetry.lock</code></td>
|
||||
<td>N</td>
|
||||
</tr>
|
||||
|
|
@ -226,7 +226,7 @@ The following languages and dependency managers are supported:
|
|||
<tr>
|
||||
<td>Scala</td>
|
||||
<td>All versions</td>
|
||||
<td><a href="https://www.scala-sbt.org/">sbt</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-7">7</a></b></sup></td>
|
||||
<td><a href="https://www.scala-sbt.org/">sbt</a><sup><b><a href="#notes-regarding-supported-languages-and-package-managers-6">6</a></b></sup></td>
|
||||
<td><code>build.sbt</code></td>
|
||||
<td>N</td>
|
||||
</tr>
|
||||
|
|
@ -236,31 +236,25 @@ The following languages and dependency managers are supported:
|
|||
<ol>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-1"></a>
|
||||
<p>
|
||||
Android is supported when <a href="#analyzer-specific-settings"><code>DS_EXPERIMENTAL_GRADLE_BUILTIN_PARSER</code></a> is set to <code>true</code>.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-2"></a>
|
||||
<p>
|
||||
Java 21 LTS for <a href="https://www.scala-sbt.org/">sbt</a> is limited to version 1.9.7. Support for more <a href="https://www.scala-sbt.org/">sbt</a> versions can be tracked in <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/430335">issue 430335</a>.
|
||||
It is not supported when <a href="https://docs.gitlab.com/ee/development/fips_compliance.html#enable-fips-mode">FIPS mode</a> is enabled.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-3"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-2"></a>
|
||||
<p>
|
||||
Gradle is not supported when <a href="https://docs.gitlab.com/ee/development/fips_compliance.html#enable-fips-mode">FIPS mode</a> is enabled.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-4"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-3"></a>
|
||||
<p>
|
||||
Support for <code>pnpm</code> lockfiles was <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/336809">introduced in GitLab 15.11</a>. <code>pnpm</code> lockfiles do not store bundled dependencies, so the reported dependencies may differ from <code>npm</code> or <code>yarn</code>.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-5"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-4"></a>
|
||||
<p>
|
||||
For support of <code>Python 3.10</code>, add the following stanza to the GitLab CI/CD configuration file. This specifies that the <code>Python 3.10</code> image is to be used, instead of the default <code>Python 3.9</code>.
|
||||
<div class="language-yaml highlighter-rouge">
|
||||
|
|
@ -271,7 +265,7 @@ The following languages and dependency managers are supported:
|
|||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-6"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-5"></a>
|
||||
<p>
|
||||
Support for <a href="https://python-poetry.org/">Poetry</a> projects with a <code>poetry.lock</code> file was <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/7006">added in GitLab 15.0</a>.
|
||||
Support for projects without a <code>poetry.lock</code> file is tracked in issue:
|
||||
|
|
@ -279,19 +273,19 @@ The following languages and dependency managers are supported:
|
|||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-7"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-6"></a>
|
||||
<p>
|
||||
Support for sbt 1.0.x was <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/415835">deprecated</a> in GitLab 16.8.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-8"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-7"></a>
|
||||
<p>
|
||||
Support for Maven below 3.8.8 was <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/438772">deprecated</a> in GitLab 16.9 and will be removed in GitLab 17.0.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-9"></a>
|
||||
<a id="notes-regarding-supported-languages-and-package-managers-8"></a>
|
||||
<p>
|
||||
Support for Python 3.9 was <a href="https://gitlab.com/gitlab-org/gitlab/-/issues/441201">deprecated</a> in GitLab 16.9 and will be removed in GitLab 17.0.
|
||||
</p>
|
||||
|
|
@ -843,8 +837,7 @@ The following variables configure the behavior of specific dependency scanning a
|
|||
| `DS_JAVA_VERSION` | `gemnasium-maven` | `17` | Version of Java. Available versions: `8`, `11`, `17`, `21`. |
|
||||
| `MAVEN_CLI_OPTS` | `gemnasium-maven` | `"-DskipTests --batch-mode"` | List of command line arguments that are passed to `maven` by the analyzer. See an example for [using private repositories](../index.md#using-private-maven-repositories). |
|
||||
| `GRADLE_CLI_OPTS` | `gemnasium-maven` | | List of command line arguments that are passed to `gradle` by the analyzer. |
|
||||
| `GRADLE_PLUGIN_INIT_PATH` | `gemnasium-maven` | `"gemnasium-init.gradle"` | Specifies the path to the Gradle initialization script. When using the `DS_EXPERIMENTAL_GRADLE_BUILTIN_PARSER` feature, the init script must include `allprojects { apply plugin: 'project-report' }` to ensure compatibility. |
|
||||
| `DS_EXPERIMENTAL_GRADLE_BUILTIN_PARSER` | `gemnasium-maven` | `"false"` | Enable the experimental Gradle parser to improve dependency scanning in Gradle projects. **Note:** Using `org.gradle.parallel=true` in your `gradle.properties` file may cause non-deterministic scanning results, so you should set `org.gradle.parallel=false` when using this feature. [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/12361) in GitLab 16.11. |
|
||||
| `GRADLE_PLUGIN_INIT_PATH` | `gemnasium-maven` | `"gemnasium-init.gradle"` | Specifies the path to the Gradle initialization script. The init script must include `allprojects { apply plugin: 'project-report' }` to ensure compatibility. |
|
||||
| `SBT_CLI_OPTS` | `gemnasium-maven` | | List of command-line arguments that the analyzer passes to `sbt`. |
|
||||
| `PIP_INDEX_URL` | `gemnasium-python` | `https://pypi.org/simple` | Base URL of Python Package Index. |
|
||||
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning:** Read [the following security consideration](#python-projects) when using this environment variable. |
|
||||
|
|
|
|||
|
|
@ -420,7 +420,7 @@
|
|||
canonical: |
|
||||
<p>## foo</p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:28" dir="auto"><span data-escaped-char>#</span># foo</p>
|
||||
<p data-sourcepos="1:1-1:7" dir="auto"><span data-escaped-char data-sourcepos="1:1-1:2">#</span># foo</p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">## foo</p>
|
||||
04_02_00__leaf_blocks__atx_headings__005:
|
||||
|
|
@ -530,12 +530,12 @@
|
|||
<h2>foo ###</h2>
|
||||
<h1>foo #</h1>
|
||||
static: |-
|
||||
<h3 data-sourcepos="1:1-1:33" dir="auto">
|
||||
<a href="#foo-c" aria-hidden="true" class="anchor" id="user-content-foo-<span data-escaped-char>c</span>"></a>foo <span data-escaped-char>#</span>##</h3>
|
||||
<h2 data-sourcepos="2:1-2:32" dir="auto">
|
||||
<a href="#foo-c-1" aria-hidden="true" class="anchor" id="user-content-foo-<span data-escaped-char>c</span>-1"></a>foo #<span data-escaped-char>#</span>#</h2>
|
||||
<h1 data-sourcepos="3:1-3:29" dir="auto">
|
||||
<a href="#foo-c-2" aria-hidden="true" class="anchor" id="user-content-foo-<span data-escaped-char>c</span>-2"></a>foo <span data-escaped-char>#</span>
|
||||
<h3 data-sourcepos="1:1-1:12" dir="auto">
|
||||
<a href="#foo-" aria-hidden="true" class="anchor" id="user-content-foo-"></a>foo <span data-escaped-char data-sourcepos="1:9-1:10">#</span>##</h3>
|
||||
<h2 data-sourcepos="2:1-2:11" dir="auto">
|
||||
<a href="#foo--1" aria-hidden="true" class="anchor" id="user-content-foo--1"></a>foo #<span data-escaped-char data-sourcepos="2:9-2:10">#</span>#</h2>
|
||||
<h1 data-sourcepos="3:1-3:8" dir="auto">
|
||||
<a href="#foo--2" aria-hidden="true" class="anchor" id="user-content-foo--2"></a>foo <span data-escaped-char data-sourcepos="3:7-3:8">#</span>
|
||||
</h1>
|
||||
wysiwyg: |-
|
||||
<h3 dir="auto">foo ###</h3>
|
||||
|
|
@ -4777,7 +4777,7 @@
|
|||
canonical: |
|
||||
<p>!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~</p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:295" dir="auto"><span data-escaped-char>!</span>"<span data-escaped-char>#</span><span data-escaped-char>$</span><span data-escaped-char>%</span><span data-escaped-char>&</span>'()*+,-./:;<=>?<span data-escaped-char>@</span>[\]<span data-escaped-char>^</span>_`{|}<span data-escaped-char>~</span></p>
|
||||
<p data-sourcepos="1:1-1:64" dir="auto"><span data-escaped-char data-sourcepos="1:1-1:2">!</span>"<span data-escaped-char data-sourcepos="1:5-1:6">#</span><span data-escaped-char data-sourcepos="1:7-1:8">$</span><span data-escaped-char data-sourcepos="1:9-1:10">%</span><span data-escaped-char data-sourcepos="1:11-1:12">&</span>'()*+,-./:;<=>?<span data-escaped-char data-sourcepos="1:43-1:44">@</span>[\]<span data-escaped-char data-sourcepos="1:51-1:52">^</span>_`{|}<span data-escaped-char data-sourcepos="1:63-1:64">~</span></p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~</p>
|
||||
06_02_00__inlines__backslash_escapes__002:
|
||||
|
|
@ -4796,15 +4796,15 @@
|
|||
[foo]: /url "not a reference"
|
||||
&ouml; not a character entity</p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-9:51" dir="auto">*not emphasized*
|
||||
<p data-sourcepos="1:1-9:30" dir="auto">*not emphasized*
|
||||
<br/> not a tag
|
||||
<a data-sourcepos="1:1-1:18" href="/foo">not a link</a>
|
||||
[not a link](/foo)
|
||||
`not code`
|
||||
1. not a list
|
||||
* not a list
|
||||
<span data-escaped-char>#</span> not a heading
|
||||
<span data-escaped-char data-sourcepos="7:1-7:2">#</span> not a heading
|
||||
[foo]: /url "not a reference"
|
||||
<span data-escaped-char>&</span>ouml; not a character entity</p>
|
||||
<span data-escaped-char data-sourcepos="9:1-9:2">&</span>ouml; not a character entity</p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">*not emphasized*
|
||||
<br/> not a tag
|
||||
|
|
@ -5921,7 +5921,7 @@
|
|||
canonical: |
|
||||
<p>foo <em>_</em></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:29" dir="auto">foo <em data-sourcepos="1:5-1:29">_</em></p>
|
||||
<p data-sourcepos="1:1-1:8" dir="auto">foo <em data-sourcepos="1:5-1:8">_</em></p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">foo <em>_</em></p>
|
||||
06_05_00__inlines__emphasis_and_strong_emphasis__100:
|
||||
|
|
@ -5942,7 +5942,7 @@
|
|||
canonical: |
|
||||
<p>foo <strong>_</strong></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:31" dir="auto">foo <strong data-sourcepos="1:5-1:31">_</strong></p>
|
||||
<p data-sourcepos="1:1-1:10" dir="auto">foo <strong data-sourcepos="1:5-1:10">_</strong></p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">foo <strong>_</strong></p>
|
||||
06_05_00__inlines__emphasis_and_strong_emphasis__103:
|
||||
|
|
@ -6236,7 +6236,7 @@
|
|||
canonical: |
|
||||
<p>[link](<foo>)</p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:14" dir="auto"><a data-sourcepos="1:1-1:19" href="%3Cfoo%3E">link</a></p>
|
||||
<p data-sourcepos="1:1-1:14" dir="auto">[link](<foo>)</p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">[link](<foo>)</p>
|
||||
06_07_00__inlines__links__011:
|
||||
|
|
@ -6631,7 +6631,7 @@
|
|||
canonical: |
|
||||
<p>[bar][foo!]</p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:33" dir="auto">[bar][foo<span data-escaped-char>!</span>]</p>
|
||||
<p data-sourcepos="1:1-1:12" dir="auto">[bar][foo<span data-escaped-char data-sourcepos="1:10-1:11">!</span>]</p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">[bar][foo!]</p>
|
||||
<pre>[foo!]: /url</pre>
|
||||
|
|
@ -7035,7 +7035,7 @@
|
|||
canonical: |
|
||||
<p>!<a href="/url" title="title">foo</a></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:28" dir="auto"><span data-escaped-char>!</span><a data-sourcepos="1:24-1:28" href="/url" title="title">foo</a></p>
|
||||
<p data-sourcepos="1:1-1:7" dir="auto"><span data-escaped-char data-sourcepos="1:1-1:2">!</span><a data-sourcepos="1:3-1:7" href="/url" title="title">foo</a></p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto">!<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
|
||||
<pre>[foo]: /url "title"</pre>
|
||||
|
|
@ -7127,7 +7127,7 @@
|
|||
canonical: |
|
||||
<p><foo+@bar.example.com></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:23" dir="auto"><<a href="mailto:foo+@bar.example.com">foo+@bar.example.com</a>></p>
|
||||
<p data-sourcepos="1:1-1:23" dir="auto"><foo+@bar.example.com></p>
|
||||
wysiwyg: |-
|
||||
<p dir="auto"><<a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo+@bar.example.com">foo+@bar.example.com</a>></p>
|
||||
06_09_00__inlines__autolinks__014:
|
||||
|
|
|
|||
|
|
@ -238,7 +238,144 @@
|
|||
<h1 class="title">GitLab Flavored Markdown Internal Extensions</h1>
|
||||
<div class="version">Version alpha</div>
|
||||
|
||||
|
||||
<ul class="section-nav">
|
||||
<li>
|
||||
<a href="#preliminaries">Preliminaries</a><ul>
|
||||
<li><a href="#characters-and-lines">Characters and lines</a></li>
|
||||
<li><a href="#tabs">Tabs</a></li>
|
||||
<li><a href="#insecure-characters">Insecure characters</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#blocks-and-inlines">Blocks and inlines</a><ul>
|
||||
<li><a href="#precedence">Precedence</a></li>
|
||||
<li><a href="#container-blocks-and-leaf-blocks">Container blocks and leaf blocks</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#leaf-blocks">Leaf blocks</a><ul>
|
||||
<li><a href="#thematic-breaks">Thematic breaks</a></li>
|
||||
<li><a href="#atx-headings">ATX headings</a></li>
|
||||
<li><a href="#setext-headings">Setext headings</a></li>
|
||||
<li><a href="#indented-code-blocks">Indented code blocks</a></li>
|
||||
<li><a href="#fenced-code-blocks">Fenced code blocks</a></li>
|
||||
<li><a href="#html-blocks">HTML blocks</a></li>
|
||||
<li><a href="#link-reference-definitions">Link reference definitions</a></li>
|
||||
<li><a href="#paragraphs">Paragraphs</a></li>
|
||||
<li><a href="#blank-lines">Blank lines</a></li>
|
||||
<li><a href="#tables-extension">Tables (extension)</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#container-blocks">Container blocks</a><ul>
|
||||
<li><a href="#block-quotes">Block quotes</a></li>
|
||||
<li>
|
||||
<a href="#list-items">List items</a><ul><li><a href="#motivation">Motivation</a></li></ul>
|
||||
</li>
|
||||
<li><a href="#task-list-items-extension">Task list items (extension)</a></li>
|
||||
<li><a href="#lists">Lists</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#inlines">Inlines</a><ul>
|
||||
<li><a href="#backslash-escapes">Backslash escapes</a></li>
|
||||
<li><a href="#entity-and-numeric-character-references">Entity and numeric character references</a></li>
|
||||
<li><a href="#code-spans">Code spans</a></li>
|
||||
<li><a href="#emphasis-and-strong-emphasis">Emphasis and strong emphasis</a></li>
|
||||
<li><a href="#strikethrough-extension">Strikethrough (extension)</a></li>
|
||||
<li><a href="#links">Links</a></li>
|
||||
<li><a href="#images">Images</a></li>
|
||||
<li><a href="#autolinks">Autolinks</a></li>
|
||||
<li><a href="#autolinks-extension">Autolinks (extension)</a></li>
|
||||
<li><a href="#raw-html">Raw HTML</a></li>
|
||||
<li><a href="#disallowed-raw-html-extension">Disallowed Raw HTML (extension)</a></li>
|
||||
<li><a href="#hard-line-breaks">Hard line breaks</a></li>
|
||||
<li><a href="#soft-line-breaks">Soft line breaks</a></li>
|
||||
<li><a href="#textual-content">Textual content</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#gitlab-official-specification-markdown">GitLab Official Specification Markdown</a><ul>
|
||||
<li><a href="#task-list-items">Task list items</a></li>
|
||||
<li><a href="#front-matter">Front matter</a></li>
|
||||
<li><a href="#table-of-contents">Table of contents</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#gitlab-internal-extension-markdown">GitLab Internal Extension Markdown</a><ul>
|
||||
<li><a href="#audio">Audio</a></li>
|
||||
<li><a href="#video">Video</a></li>
|
||||
<li><a href="#markdown-preview-api-request-overrides">Markdown Preview API Request Overrides</a></li>
|
||||
<li>
|
||||
<a href="#migrated-golden-master-examples">Migrated golden master examples</a><ul>
|
||||
<li><a href="#attachment_image_for_group">attachment_image_for_group</a></li>
|
||||
<li><a href="#attachment_image_for_project">attachment_image_for_project</a></li>
|
||||
<li><a href="#attachment_image_for_project_wiki">attachment_image_for_project_wiki</a></li>
|
||||
<li><a href="#attachment_link_for_group">attachment_link_for_group</a></li>
|
||||
<li><a href="#attachment_link_for_project">attachment_link_for_project</a></li>
|
||||
<li><a href="#attachment_link_for_project_wiki">attachment_link_for_project_wiki</a></li>
|
||||
<li><a href="#attachment_link_for_group_wiki">attachment_link_for_group_wiki</a></li>
|
||||
<li><a href="#audio-1">audio</a></li>
|
||||
<li><a href="#audio_and_video_in_lists">audio_and_video_in_lists</a></li>
|
||||
<li><a href="#blockquote">blockquote</a></li>
|
||||
<li><a href="#bold">bold</a></li>
|
||||
<li><a href="#bullet_list_style_1">bullet_list_style_1</a></li>
|
||||
<li><a href="#bullet_list_style_2">bullet_list_style_2</a></li>
|
||||
<li><a href="#bullet_list_style_3">bullet_list_style_3</a></li>
|
||||
<li><a href="#code_block_javascript">code_block_javascript</a></li>
|
||||
<li><a href="#code_block_plaintext">code_block_plaintext</a></li>
|
||||
<li><a href="#code_block_unknown">code_block_unknown</a></li>
|
||||
<li><a href="#color_chips">color_chips</a></li>
|
||||
<li><a href="#description_list">description_list</a></li>
|
||||
<li><a href="#details">details</a></li>
|
||||
<li><a href="#diagram_kroki_nomnoml">diagram_kroki_nomnoml</a></li>
|
||||
<li><a href="#diagram_plantuml">diagram_plantuml</a></li>
|
||||
<li><a href="#diagram_plantuml_unicode">diagram_plantuml_unicode</a></li>
|
||||
<li><a href="#div">div</a></li>
|
||||
<li><a href="#emoji">emoji</a></li>
|
||||
<li><a href="#emphasis">emphasis</a></li>
|
||||
<li><a href="#figure">figure</a></li>
|
||||
<li><a href="#footnotes">footnotes</a></li>
|
||||
<li><a href="#frontmatter_json">frontmatter_json</a></li>
|
||||
<li><a href="#frontmatter_toml">frontmatter_toml</a></li>
|
||||
<li><a href="#frontmatter_yaml">frontmatter_yaml</a></li>
|
||||
<li><a href="#hard_break">hard_break</a></li>
|
||||
<li><a href="#headings">headings</a></li>
|
||||
<li><a href="#horizontal_rule">horizontal_rule</a></li>
|
||||
<li><a href="#html_marks">html_marks</a></li>
|
||||
<li><a href="#image">image</a></li>
|
||||
<li><a href="#inline_code">inline_code</a></li>
|
||||
<li><a href="#inline_diff">inline_diff</a></li>
|
||||
<li><a href="#label">label</a></li>
|
||||
<li><a href="#link">link</a></li>
|
||||
<li><a href="#math">math</a></li>
|
||||
<li><a href="#ordered_list">ordered_list</a></li>
|
||||
<li><a href="#ordered_list_with_start_order">ordered_list_with_start_order</a></li>
|
||||
<li><a href="#ordered_task_list">ordered_task_list</a></li>
|
||||
<li><a href="#ordered_task_list_with_order">ordered_task_list_with_order</a></li>
|
||||
<li><a href="#reference_for_project_wiki">reference_for_project_wiki</a></li>
|
||||
<li><a href="#strike">strike</a></li>
|
||||
<li><a href="#table">table</a></li>
|
||||
<li><a href="#table_of_contents">table_of_contents</a></li>
|
||||
<li><a href="#task_list">task_list</a></li>
|
||||
<li><a href="#video-1">video</a></li>
|
||||
<li><a href="#word_break">word_break</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><a href="#image-attributes">Image Attributes</a></li>
|
||||
<li><a href="#footnotes-1">Footnotes</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#gfm-undocumented-extensions-and-more-robust-test">GFM undocumented extensions and more robust test</a><ul>
|
||||
<li><a href="#footnotes-2">Footnotes</a></li>
|
||||
<li><a href="#when-a-footnote-is-used-multiple-times-we-insert-multiple-backrefs">When a footnote is used multiple times, we insert multiple backrefs.</a></li>
|
||||
<li><a href="#footnote-reference-labels-are-href-escaped">Footnote reference labels are href escaped</a></li>
|
||||
<li><a href="#interop">Interop</a></li>
|
||||
<li><a href="#task-lists">Task lists</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<h1 data-sourcepos="3:1-3:15" dir="auto">
|
||||
<a href="#preliminaries" aria-hidden="true" class="anchor" id="user-content-preliminaries"></a>Preliminaries</h1>
|
||||
<h2 data-sourcepos="5:1-5:23" dir="auto">
|
||||
|
|
@ -6721,7 +6858,7 @@ not have their usual Markdown meanings:</p>
|
|||
<span id="LC6" class="line" lang="plaintext">\* not a list</span>
|
||||
<span id="LC7" class="line" lang="plaintext">\# not a heading</span>
|
||||
<span id="LC8" class="line" lang="plaintext">\[foo]: /url "not a reference"</span>
|
||||
<span id="LC9" class="line" lang="plaintext">\ö not a character entity</span></code></pre>
|
||||
<span id="LC9" class="line" lang="plaintext">\&ouml; not a character entity</span></code></pre>
|
||||
<copy-code></copy-code>
|
||||
</div>
|
||||
<div class="gl-relative markdown-code-block js-markdown-code">
|
||||
|
|
|
|||
|
|
@ -29,6 +29,9 @@ module Banzai
|
|||
allowlist[:attributes]['pre'] = %w[data-canonical-lang data-lang-params
|
||||
data-math-style data-mermaid-style data-kroki-style]
|
||||
|
||||
# Allow data-escaped-chars span attribute
|
||||
allowlist[:attributes]['span'].push('data-escaped-chars')
|
||||
|
||||
# Allow html5 details/summary elements
|
||||
allowlist[:elements].push('details')
|
||||
allowlist[:elements].push('summary')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,76 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Banzai
|
||||
module Filter
|
||||
# In order to allow a user to short-circuit our reference shortcuts
|
||||
# (such as # or !), the user should be able to escape them, like \#.
|
||||
# The parser does surround escaped chars with `<span data-escaped-char></span>`
|
||||
# which will short-circuit our references. However it does that for all
|
||||
# escaped chars.
|
||||
# So while a label specified as `~c_bug\_` is valid, our label parsing
|
||||
# does not understand `~c_bug<span data-escaped-char>_</span>`
|
||||
#
|
||||
# This filter strips out any `<span data-escaped-char>` that is not one
|
||||
# of our references.
|
||||
#
|
||||
# TODO: Parsing of references should be fixed to remove need of this filter.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/457556
|
||||
class EscapedCharFilter < HTML::Pipeline::Filter
|
||||
# Table of characters that need this special handling. It consists of the
|
||||
# GitLab special reference characters.
|
||||
REFERENCE_CHARS = %w[$ % # & @ ! ~ ^].freeze
|
||||
|
||||
XPATH_ESCAPED_CHAR = Gitlab::Utils::Nokogiri.css_to_xpath('span[data-escaped-char]').freeze
|
||||
|
||||
def call
|
||||
return doc unless MarkdownFilter.glfm_markdown?(context)
|
||||
|
||||
remove_unnecessary_escapes
|
||||
|
||||
doc
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def remove_unnecessary_escapes
|
||||
doc.xpath(XPATH_ESCAPED_CHAR).each do |node|
|
||||
escaped_item = REFERENCE_CHARS.find { |item| item == node.content }
|
||||
|
||||
# Escaped reference character, so leave as is. This is so that our normal
|
||||
# reference processing can be short-circuited by escaping the reference,
|
||||
# like \@username
|
||||
next if escaped_item
|
||||
|
||||
merge_adjacent_text_nodes(node)
|
||||
end
|
||||
end
|
||||
|
||||
def text_node?(node)
|
||||
node.is_a?(Nokogiri::XML::Text)
|
||||
end
|
||||
|
||||
# Merge directly adjacent text nodes and replace existing node with
|
||||
# the merged content. For example, the document could be
|
||||
# #(Text "~c_bug"), #(Element:0x57724 { name = "span" }, children = [ #(Text "_")] })]
|
||||
# Our reference processing requires a single string of text to match against. So even if it was
|
||||
# #(Text "~c_bug"), #(Text "_")
|
||||
# it wouldn't match. Merging together will give
|
||||
# #(Text "~c_bug_")
|
||||
def merge_adjacent_text_nodes(node)
|
||||
content = CGI.escapeHTML(node.content)
|
||||
|
||||
if text_node?(node.previous)
|
||||
content.prepend(node.previous.to_html)
|
||||
node.previous.remove
|
||||
end
|
||||
|
||||
if text_node?(node.next)
|
||||
content.concat(node.next.to_html)
|
||||
node.next.remove
|
||||
end
|
||||
|
||||
node.replace(content)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -11,6 +11,7 @@ module Banzai
|
|||
class GlfmMarkdown < Base
|
||||
OPTIONS = {
|
||||
autolink: true,
|
||||
escaped_char_spans: true,
|
||||
footnotes: true,
|
||||
full_info_string: true,
|
||||
github_pre_lang: true,
|
||||
|
|
|
|||
|
|
@ -2,9 +2,14 @@
|
|||
|
||||
module Banzai
|
||||
module Filter
|
||||
# See comments in MarkdownPreEscapeFilter for details on strategy
|
||||
class MarkdownPostEscapeFilter < HTML::Pipeline::Filter
|
||||
LITERAL_KEYWORD = MarkdownPreEscapeFilter::LITERAL_KEYWORD
|
||||
# TODO: This is now a legacy filter, and is only used with the Ruby parser.
|
||||
# The current markdown parser now properly handles escaping characters.
|
||||
# The Ruby parser is now only for benchmarking purposes.
|
||||
# issue: https://gitlab.com/gitlab-org/gitlab/-/issues/454601
|
||||
#
|
||||
# See comments in MarkdownPreEscapeLegacyFilter for details on strategy
|
||||
class MarkdownPostEscapeLegacyFilter < HTML::Pipeline::Filter
|
||||
LITERAL_KEYWORD = MarkdownPreEscapeLegacyFilter::LITERAL_KEYWORD
|
||||
LITERAL_REGEX = %r{#{LITERAL_KEYWORD}-(.*?)-#{LITERAL_KEYWORD}}
|
||||
NOT_LITERAL_REGEX = %r{#{LITERAL_KEYWORD}-((%5C|\\).+?)-#{LITERAL_KEYWORD}}
|
||||
SPAN_REGEX = %r{<span data-escaped-char>(.*?)</span>}
|
||||
|
|
@ -14,6 +19,7 @@ module Banzai
|
|||
XPATH_ESCAPED_CHAR = Gitlab::Utils::Nokogiri.css_to_xpath('span[data-escaped-char]').freeze
|
||||
|
||||
def call
|
||||
return doc if MarkdownFilter.glfm_markdown?(context)
|
||||
return doc unless result[:escaped_literals]
|
||||
|
||||
new_html = unescaped_literals(doc.to_html)
|
||||
|
|
@ -32,11 +38,13 @@ module Banzai
|
|||
# For any literals that actually didn't get escape processed
|
||||
# (for example in code blocks), remove the special sequence.
|
||||
def unescaped_literals(html)
|
||||
html.gsub!(NOT_LITERAL_REGEX) do |match|
|
||||
html.gsub!(NOT_LITERAL_REGEX) do |_match|
|
||||
last_match = ::Regexp.last_match(1)
|
||||
last_match_token = last_match.sub('%5C', '\\')
|
||||
|
||||
escaped_item = Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.find { |item| item[:token] == last_match_token }
|
||||
escaped_item = Banzai::Filter::MarkdownPreEscapeLegacyFilter::ESCAPABLE_CHARS.find do |item|
|
||||
item[:token] == last_match_token
|
||||
end
|
||||
escaped_char = escaped_item ? escaped_item[:escaped] : last_match
|
||||
|
||||
escaped_char = escaped_char.sub('\\', '%5C') if last_match.start_with?('%5C')
|
||||
|
|
@ -50,11 +58,13 @@ module Banzai
|
|||
# Replace any left over literal sequences with `span` so that our
|
||||
# reference processing is short-circuited
|
||||
def add_spans(html)
|
||||
html.gsub!(LITERAL_REGEX) do |match|
|
||||
html.gsub!(LITERAL_REGEX) do |_match|
|
||||
last_match = ::Regexp.last_match(1)
|
||||
last_match_token = "\\#{last_match}"
|
||||
|
||||
escaped_item = Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.find { |item| item[:token] == last_match_token }
|
||||
escaped_item = Banzai::Filter::MarkdownPreEscapeLegacyFilter::ESCAPABLE_CHARS.find do |item|
|
||||
item[:token] == last_match_token
|
||||
end
|
||||
escaped_char = escaped_item ? escaped_item[:char] : ::Regexp.last_match(1)
|
||||
|
||||
"<span data-escaped-char>#{escaped_char}</span>"
|
||||
|
|
@ -66,18 +76,26 @@ module Banzai
|
|||
# Since literals are converted in links, we need to remove any surrounding `span`.
|
||||
def remove_spans_in_certain_attributes
|
||||
doc.xpath(XPATH_A).each do |node|
|
||||
node.attributes['href'].value = node.attributes['href'].value.gsub(SPAN_REGEX, '\1') if node.attributes['href']
|
||||
node.attributes['title'].value = node.attributes['title'].value.gsub(SPAN_REGEX, '\1') if node.attributes['title']
|
||||
if node.attributes['href']
|
||||
node.attributes['href'].value = node.attributes['href'].value.gsub(SPAN_REGEX, '\1')
|
||||
end
|
||||
|
||||
if node.attributes['title']
|
||||
node.attributes['title'].value = node.attributes['title'].value.gsub(SPAN_REGEX, '\1')
|
||||
end
|
||||
end
|
||||
|
||||
doc.xpath(XPATH_LANG_TAG).each do |node|
|
||||
node.attributes['lang'].value = node.attributes['lang'].value.gsub(SPAN_REGEX, '\1') if node.attributes['lang']
|
||||
if node.attributes['lang']
|
||||
node.attributes['lang'].value = node.attributes['lang'].value.gsub(SPAN_REGEX, '\1')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remove_unnecessary_escapes
|
||||
doc.xpath(XPATH_ESCAPED_CHAR).each do |node|
|
||||
escaped_item = Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.find { |item| item[:char] == node.content }
|
||||
escaped_item =
|
||||
Banzai::Filter::MarkdownPreEscapeLegacyFilter::ESCAPABLE_CHARS.find { |item| item[:char] == node.content }
|
||||
|
||||
next unless escaped_item
|
||||
|
||||
|
|
@ -2,6 +2,11 @@
|
|||
|
||||
module Banzai
|
||||
module Filter
|
||||
# TODO: This is now a legacy filter, and is only used with the Ruby parser.
|
||||
# The current markdown parser now properly handles escaping characters.
|
||||
# The Ruby parser is now only for benchmarking purposes.
|
||||
# issue: https://gitlab.com/gitlab-org/gitlab/-/issues/454601
|
||||
#
|
||||
# In order to allow a user to short-circuit our reference shortcuts
|
||||
# (such as # or !), the user should be able to escape them, like \#.
|
||||
# CommonMark supports this, however it removes all information about
|
||||
|
|
@ -25,9 +30,9 @@ module Banzai
|
|||
#
|
||||
# https://spec.commonmark.org/0.29/#backslash-escapes
|
||||
#
|
||||
# This filter does the initial surrounding, and MarkdownPostEscapeFilter
|
||||
# This filter does the initial surrounding, and MarkdownPostEscapeLegacyFilter
|
||||
# does the conversion into span tags.
|
||||
class MarkdownPreEscapeFilter < HTML::Pipeline::TextFilter
|
||||
class MarkdownPreEscapeLegacyFilter < HTML::Pipeline::TextFilter
|
||||
# Table of characters that need this special handling. It consists of the
|
||||
# GitLab special reference characters and special LaTeX characters.
|
||||
#
|
||||
|
|
@ -41,7 +46,7 @@ module Banzai
|
|||
# original escaped version, `\$`. However if we detect `cmliteral-+a-cmliteral`,
|
||||
# then we know markdown considered it an escaped character, and we should replace it
|
||||
# with the non-escaped version, `$`.
|
||||
# See the MarkdownPostEscapeFilter for how this is done.
|
||||
# See the MarkdownPostEscapeLegacyFilter for how this is done.
|
||||
ESCAPABLE_CHARS = [
|
||||
{ char: '$', escaped: '\$', token: '\+a', reference: true, latex: true },
|
||||
{ char: '%', escaped: '\%', token: '\+b', reference: true, latex: true },
|
||||
|
|
@ -61,6 +66,8 @@ module Banzai
|
|||
LITERAL_KEYWORD = 'cmliteral'
|
||||
|
||||
def call
|
||||
return @text if MarkdownFilter.glfm_markdown?(context)
|
||||
|
||||
@text.gsub(ASCII_PUNCTUATION) do |match|
|
||||
# The majority of markdown does not have literals. If none
|
||||
# are found, we can bypass the post filter
|
||||
|
|
@ -16,6 +16,7 @@ module Banzai
|
|||
# Must always be before the SanitizationFilter to prevent XSS attacks
|
||||
Filter::SpacedLinkFilter,
|
||||
Filter::SanitizationFilter,
|
||||
Filter::EscapedCharFilter,
|
||||
Filter::KrokiFilter,
|
||||
Filter::GollumTagsFilter,
|
||||
Filter::AssetProxyFilter,
|
||||
|
|
|
|||
|
|
@ -10,12 +10,12 @@ module Banzai
|
|||
# markdown processing
|
||||
def self.filters
|
||||
FilterArray[
|
||||
Filter::MarkdownPreEscapeFilter,
|
||||
Filter::MarkdownPreEscapeLegacyFilter,
|
||||
Filter::DollarMathPreFilter,
|
||||
Filter::BlockquoteFenceFilter,
|
||||
Filter::MarkdownFilter,
|
||||
Filter::DollarMathPostFilter,
|
||||
Filter::MarkdownPostEscapeFilter
|
||||
Filter::MarkdownPostEscapeLegacyFilter
|
||||
]
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -106,8 +106,8 @@ module Gitlab
|
|||
end
|
||||
|
||||
def remaining
|
||||
enqueued = Sidekiq::Queue.new(self.queue)
|
||||
Sidekiq::Client.via(sidekiq_redis_pool) do
|
||||
enqueued = Sidekiq::Queue.new(self.queue)
|
||||
scheduled = Sidekiq::ScheduledSet.new
|
||||
|
||||
[enqueued, scheduled].sum do |set|
|
||||
|
|
@ -119,8 +119,8 @@ module Gitlab
|
|||
end
|
||||
|
||||
def exists?(migration_class, additional_queues = [])
|
||||
enqueued = Sidekiq::Queue.new(self.queue)
|
||||
Sidekiq::Client.via(sidekiq_redis_pool) do
|
||||
enqueued = Sidekiq::Queue.new(self.queue)
|
||||
scheduled = Sidekiq::ScheduledSet.new
|
||||
|
||||
enqueued_job?([enqueued, scheduled], migration_class)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Middleware
|
||||
class SidekiqShardAwarenessValidation
|
||||
def initialize(app)
|
||||
@app = app
|
||||
end
|
||||
|
||||
def call(env)
|
||||
::Gitlab::SidekiqSharding::Validator.enabled do
|
||||
@app.call(env)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -11,6 +11,7 @@ module Gitlab
|
|||
lambda do |chain|
|
||||
# Size limiter should be placed at the top
|
||||
chain.add ::Gitlab::SidekiqMiddleware::SizeLimiter::Server
|
||||
chain.add ::Gitlab::SidekiqMiddleware::ShardAwarenessValidator
|
||||
chain.add ::Gitlab::SidekiqMiddleware::Monitor
|
||||
|
||||
# Labkit wraps the job in the `Labkit::Context` resurrected from
|
||||
|
|
|
|||
|
|
@ -12,7 +12,11 @@ module Gitlab
|
|||
rescue Gitlab::SidekiqDaemon::Monitor::CancelledError
|
||||
# push job to DeadSet
|
||||
payload = ::Sidekiq.dump_json(job)
|
||||
::Sidekiq::DeadSet.new.kill(payload, notify_failure: false)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
# DeadSet is shard-local. It is correct to directly use Sidekiq.redis rather than to
|
||||
# route to another shard's DeadSet.
|
||||
::Sidekiq::DeadSet.new.kill(payload, notify_failure: false)
|
||||
end
|
||||
|
||||
# ignore retries
|
||||
raise ::Sidekiq::JobRetry::Skip
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module SidekiqMiddleware
|
||||
class ShardAwarenessValidator
|
||||
def call(_worker, _job, _queue)
|
||||
# Scopes shard-awareness validation to Gitlab-logic since Sidekiq
|
||||
# internally uses Sidekiq.redis for job fetching, cron polling, heartbeats, etc
|
||||
::Gitlab::SidekiqSharding::Validator.enabled do
|
||||
yield
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -25,8 +25,9 @@ module Gitlab
|
|||
end
|
||||
|
||||
def route(klass)
|
||||
return yield unless enabled?
|
||||
return yield unless klass.respond_to?(:get_sidekiq_options)
|
||||
unless enabled? && klass.respond_to?(:get_sidekiq_options)
|
||||
return Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { yield }
|
||||
end
|
||||
|
||||
store_name = klass.get_sidekiq_options['store']
|
||||
redis_name, shard_redis_pool = get_shard_instance(store_name)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module SidekiqSharding
|
||||
module Validator
|
||||
UnroutedSidekiqApiError = Class.new(StandardError)
|
||||
|
||||
module Client
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
# Sets inside_sidekiq_via_scope state to true to avoid error when validation is called
|
||||
def via(pool)
|
||||
in_via_state = Thread.current[:inside_sidekiq_via_scope]
|
||||
Thread.current[:inside_sidekiq_via_scope] = true
|
||||
|
||||
super(pool)
|
||||
ensure
|
||||
Thread.current[:inside_sidekiq_via_scope] = in_via_state
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class << self
|
||||
# Used to allow Sidekiq API or Sidekiq.redis for spec set-ups and components
|
||||
# that does not require sharding such as CronJobs (performed using Sidekiq.redis).
|
||||
def allow_unrouted_sidekiq_calls
|
||||
currently_allowed = Thread.current[:allow_unrouted_sidekiq_calls]
|
||||
Thread.current[:allow_unrouted_sidekiq_calls] = true
|
||||
|
||||
yield
|
||||
ensure
|
||||
Thread.current[:allow_unrouted_sidekiq_calls] = currently_allowed
|
||||
end
|
||||
|
||||
# This allows us to perform validation within the scope of GitLab application logic
|
||||
# without needing to modify/patch Sidekiq internals such as job fetching, cron-polling, and housekeeping.
|
||||
def enabled
|
||||
validate_sidekiq_shard_awareness = Thread.current[:validate_sidekiq_shard_awareness]
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = true
|
||||
|
||||
yield
|
||||
ensure
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = validate_sidekiq_shard_awareness
|
||||
end
|
||||
end
|
||||
|
||||
# This is used to patch the Sidekiq::RedisClientAdapter to validate all Redis commands are routed
|
||||
# rubocop:disable Style/MissingRespondToMissing -- already defined in the module we are patching
|
||||
def method_missing(*args, &block)
|
||||
validate! if Thread.current[:validate_sidekiq_shard_awareness]
|
||||
|
||||
super(*args, &block)
|
||||
end
|
||||
ruby2_keywords :method_missing if respond_to?(:ruby2_keywords, true)
|
||||
# rubocop:enable Style/MissingRespondToMissing
|
||||
|
||||
private
|
||||
|
||||
def validate!
|
||||
return if Thread.current[:allow_unrouted_sidekiq_calls]
|
||||
return if Thread.current[:inside_sidekiq_via_scope]
|
||||
|
||||
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
|
||||
UnroutedSidekiqApiError.new("Sidekiq Redis called outside a .via block")
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,10 +8,14 @@ module Gitlab
|
|||
# for monitoring.
|
||||
queues = ::Gitlab::SidekiqConfig.routing_queues
|
||||
if queues.any?
|
||||
Sidekiq.redis do |conn|
|
||||
conn.multi do |multi|
|
||||
multi.del('queues')
|
||||
multi.sadd('queues', queues)
|
||||
# Allow unrouted calls as this operation is idempotent and can be safely performed
|
||||
# by all Sidekiq processes
|
||||
SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis do |conn|
|
||||
conn.multi do |multi|
|
||||
multi.del('queues')
|
||||
multi.sadd('queues', queues)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,26 +6,33 @@ namespace :gitlab do
|
|||
File.write(path, banner + YAML.dump(object).gsub(/ *$/m, ''))
|
||||
end
|
||||
|
||||
# TODO: make shard-aware. See https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3430
|
||||
namespace :migrate_jobs do
|
||||
desc 'GitLab | Sidekiq | Migrate jobs in the scheduled set to new queue names'
|
||||
task schedule: :environment do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_set('schedule')
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_set('schedule')
|
||||
end
|
||||
end
|
||||
|
||||
desc 'GitLab | Sidekiq | Migrate jobs in the retry set to new queue names'
|
||||
task retry: :environment do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_set('retry')
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_set('retry')
|
||||
end
|
||||
end
|
||||
|
||||
desc 'GitLab | Sidekiq | Migrate jobs in queues outside of routing rules'
|
||||
task queued: :environment do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_queues
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
::Gitlab::SidekiqMigrateJobs
|
||||
.new(::Gitlab::SidekiqConfig.worker_queue_mappings, logger: Logger.new($stdout))
|
||||
.migrate_queues
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ gem 'capybara', '~> 3.40.0'
|
|||
gem 'capybara-screenshot', '~> 1.0.26'
|
||||
gem 'rake', '~> 13', '>= 13.2.1'
|
||||
gem 'rspec', '~> 3.13'
|
||||
gem 'selenium-webdriver', '= 4.19.0'
|
||||
gem 'selenium-webdriver', '= 4.20.1'
|
||||
gem 'airborne', '~> 0.3.7', require: false # airborne is messing with rspec sandboxed mode so not requiring by default
|
||||
gem 'rest-client', '~> 2.1.0'
|
||||
gem 'rspec_junit_formatter', '~> 0.6.0'
|
||||
|
|
|
|||
|
|
@ -304,7 +304,7 @@ GEM
|
|||
sawyer (0.9.2)
|
||||
addressable (>= 2.3.5)
|
||||
faraday (>= 0.17.3, < 3)
|
||||
selenium-webdriver (4.19.0)
|
||||
selenium-webdriver (4.20.1)
|
||||
base64 (~> 0.2)
|
||||
rexml (~> 3.2, >= 3.2.5)
|
||||
rubyzip (>= 1.2.2, < 3.0)
|
||||
|
|
@ -379,7 +379,7 @@ DEPENDENCIES
|
|||
rspec-parameterized (~> 1.0.0)
|
||||
rspec_junit_formatter (~> 0.6.0)
|
||||
ruby-debug-ide (~> 0.7.3)
|
||||
selenium-webdriver (= 4.19.0)
|
||||
selenium-webdriver (= 4.20.1)
|
||||
slack-notifier (~> 2.4)
|
||||
terminal-table (~> 3.0.2)
|
||||
warning (~> 1.3)
|
||||
|
|
|
|||
|
|
@ -95,6 +95,9 @@ RSpec.describe 'Database schema', feature_category: :database do
|
|||
# merge_request_diff_commits_b5377a7a34 is the temporary table for the merge_request_diff_commits partitioning
|
||||
# backfill. It will get foreign keys after the partitioning is finished.
|
||||
merge_request_diff_commits_b5377a7a34: %w[merge_request_diff_id commit_author_id committer_id],
|
||||
# merge_request_diff_files_99208b8fac is the temporary table for the merge_request_diff_commits partitioning
|
||||
# backfill. It will get foreign keys after the partitioning is finished.
|
||||
merge_request_diff_files_99208b8fac: %w[merge_request_diff_id],
|
||||
namespaces: %w[owner_id parent_id],
|
||||
namespace_descendants: %w[namespace_id],
|
||||
notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id namespace_id],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Banzai::Filter::EscapedCharFilter, feature_category: :team_planning do
|
||||
include FilterSpecHelper
|
||||
|
||||
it 'ensure we handle all the GitLab reference characters', :eager_load do
|
||||
reference_chars = ObjectSpace.each_object(Class).filter_map do |klass|
|
||||
next unless klass.included_modules.include?(Referable)
|
||||
next unless klass.respond_to?(:reference_prefix)
|
||||
next unless klass.reference_prefix.length == 1
|
||||
|
||||
klass.reference_prefix
|
||||
end.compact
|
||||
|
||||
reference_chars.all? do |char|
|
||||
Banzai::Filter::EscapedCharFilter::REFERENCE_CHARS.include?(char)
|
||||
end
|
||||
end
|
||||
|
||||
it 'leaves reference chars untouched' do
|
||||
stub_commonmark_sourcepos_disabled
|
||||
|
||||
markdown = Banzai::Filter::EscapedCharFilter::REFERENCE_CHARS.map { |char| "\\#{char}" }.join(' ')
|
||||
doc = Banzai::Filter::MarkdownFilter.new(markdown).call
|
||||
html = filter(doc).to_s
|
||||
|
||||
Banzai::Filter::EscapedCharFilter::REFERENCE_CHARS.each do |item|
|
||||
char = item == '&' ? '&' : item
|
||||
|
||||
expect(html).to include("<span data-escaped-char>#{char}</span>")
|
||||
end
|
||||
end
|
||||
|
||||
it 'removes spans for non-reference punctuation' do
|
||||
# rubocop:disable Style/StringConcatenation -- better format for escaping characters
|
||||
markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\`\|) + %q[\(\)\\\\]
|
||||
# rubocop:enable Style/StringConcatenation
|
||||
|
||||
doc = Banzai::Filter::MarkdownFilter.new(markdown).call
|
||||
|
||||
expect(doc.to_s).to include('<span data-escaped-char')
|
||||
expect(filter(doc).to_s).not_to include('<span data-escaped-char')
|
||||
end
|
||||
|
||||
it 'keeps html escaped text' do
|
||||
markdown = '[link](<foo\>)'
|
||||
doc = Banzai::Filter::MarkdownFilter.new(markdown).call
|
||||
|
||||
expect(filter(doc).to_s).to eq '<p data-sourcepos="1:1-1:14">[link](<foo>)</p>'
|
||||
end
|
||||
end
|
||||
|
|
@ -180,6 +180,8 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
|
|||
let_it_be(:issue) { create(:issue, project: project) }
|
||||
|
||||
it 'does not convert an escaped reference' do
|
||||
stub_commonmark_sourcepos_disabled
|
||||
|
||||
markdown = "\\#{issue.to_reference}"
|
||||
output = described_class.to_html(markdown, project: project)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,17 +5,28 @@ require 'spec_helper'
|
|||
RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_planning do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
describe 'backslash escapes', :aggregate_failures do
|
||||
# TODO: This is legacy code, and is only used with the Ruby parser.
|
||||
# The current markdown parser now handles adding data-escaped-char.
|
||||
# The Ruby parser is now only for benchmarking purposes.
|
||||
# issue: https://gitlab.com/gitlab-org/gitlab/-/issues/454601
|
||||
describe 'legacy backslash handling', :aggregate_failures do
|
||||
let_it_be(:project) { create(:project, :public) }
|
||||
let_it_be(:issue) { create(:issue, project: project) }
|
||||
let_it_be(:context) do
|
||||
{
|
||||
project: project,
|
||||
no_sourcepos: true,
|
||||
markdown_engine: Banzai::Filter::MarkdownFilter::CMARK_ENGINE
|
||||
}
|
||||
end
|
||||
|
||||
it 'converts all escapable punctuation to literals' do
|
||||
markdown = Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.pluck(:escaped).join
|
||||
markdown = Banzai::Filter::MarkdownPreEscapeLegacyFilter::ESCAPABLE_CHARS.pluck(:escaped).join
|
||||
|
||||
result = described_class.call(markdown, project: project)
|
||||
result = described_class.call(markdown, context)
|
||||
output = result[:output].to_html
|
||||
|
||||
Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.each do |item|
|
||||
Banzai::Filter::MarkdownPreEscapeLegacyFilter::ESCAPABLE_CHARS.each do |item|
|
||||
char = item[:char] == '&' ? '&' : item[:char]
|
||||
|
||||
if item[:reference]
|
||||
|
|
@ -39,17 +50,17 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
|
|||
end.compact
|
||||
|
||||
reference_chars.all? do |char|
|
||||
Banzai::Filter::MarkdownPreEscapeFilter::TARGET_CHARS.include?(char)
|
||||
Banzai::Filter::MarkdownPreEscapeLegacyFilter::TARGET_CHARS.include?(char)
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not convert non-reference/latex punctuation to spans' do
|
||||
markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\`\|) + %q[\(\)\\\\]
|
||||
|
||||
result = described_class.call(markdown, project: project)
|
||||
result = described_class.call(markdown, context)
|
||||
output = result[:output].to_html
|
||||
|
||||
expect(output).not_to include('<span>')
|
||||
expect(output).not_to include('<span')
|
||||
expect(result[:escaped_literals]).to be_falsey
|
||||
end
|
||||
|
||||
|
|
@ -57,7 +68,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
|
|||
markdown = %q(\→\A\a\ \3\φ\«)
|
||||
expected = '\→\A\a\ \3\φ\«'
|
||||
|
||||
result = correct_html_included(markdown, expected)
|
||||
result = correct_html_included(markdown, expected, context)
|
||||
expect(result[:escaped_literals]).to be_falsey
|
||||
end
|
||||
|
||||
|
|
@ -66,13 +77,13 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
|
|||
%q(`` \@\! ``) | %q(<code>\@\!</code>)
|
||||
%q( \@\!) | %(<code>\\@\\!\n</code>)
|
||||
%(~~~\n\\@\\!\n~~~) | %(<code>\\@\\!\n</code>)
|
||||
%q($1+\$2$) | %q(<span data-math-style="inline">1+\\$2</span>)
|
||||
%q($1+\$2$) | %q(<code data-math-style="inline">1+\\$2</code>)
|
||||
%q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
|
||||
%q[<a href="/bar\@)">] | %q[<a href="/bar\@)">]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it { correct_html_included(markdown, expected) }
|
||||
it { correct_html_included(markdown, expected, context) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -80,7 +91,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
|
|||
let(:markdown) { %(``` foo\\@bar\nfoo\n```) }
|
||||
|
||||
it 'renders correct html' do
|
||||
correct_html_included(markdown, %(<pre lang="foo@bar"><code>foo\n</code></pre>))
|
||||
correct_html_included(markdown, %(<pre lang="foo@bar"><code>foo\n</code></pre>), context)
|
||||
end
|
||||
|
||||
where(:markdown, :expected) do
|
||||
|
|
@ -89,13 +100,13 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
|
|||
end
|
||||
|
||||
with_them do
|
||||
it { correct_html_included(markdown, expected) }
|
||||
it { correct_html_included(markdown, expected, context) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def correct_html_included(markdown, expected)
|
||||
result = described_class.call(markdown, { no_sourcepos: true })
|
||||
def correct_html_included(markdown, expected, context = {})
|
||||
result = described_class.call(markdown, context)
|
||||
|
||||
expect(result[:output].to_html).to include(expected)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,11 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
|
|||
Sidekiq::RedisConnection.create(params) # rubocop:disable Rails/SaveBang -- RedisConnection only has .create
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::Redis::Queues).to receive(:instances)
|
||||
.and_return({ 'main' => Gitlab::Redis::Queues, 'shard' => Gitlab::Redis::Queues })
|
||||
end
|
||||
|
||||
describe '.for_tracking_database' do
|
||||
it 'returns an executor with the correct worker class and database' do
|
||||
coordinator = described_class.for_tracking_database(tracking_database)
|
||||
|
|
@ -239,12 +244,16 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
|
|||
Sidekiq::Testing.disable! do
|
||||
worker_class.perform_in(10.minutes, 'Object')
|
||||
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
end
|
||||
expect(coordinator).to receive(:perform).with('Object', any_args)
|
||||
|
||||
coordinator.steal('Object')
|
||||
|
||||
expect(Sidekiq::ScheduledSet.new).to be_none
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(Sidekiq::ScheduledSet.new).to be_none
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -232,10 +232,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
|
|||
context 'when job is executed first' do
|
||||
it 'raises an error and retries', :aggregate_failures do
|
||||
expect do
|
||||
process_job(job)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { process_job(job) }
|
||||
end.to raise_error(Sidekiq::JobRetry::Skip)
|
||||
|
||||
job_for_retry = Sidekiq::RetrySet.new.first
|
||||
job_for_retry = Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { Sidekiq::RetrySet.new.first }
|
||||
expect(job_for_retry['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate')
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -65,11 +65,13 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
|
||||
describe "#sidekiq_remove_jobs", :clean_gitlab_redis_queues do
|
||||
def clear_queues
|
||||
Sidekiq::Queue.new("test").clear
|
||||
Sidekiq::Queue.new("test_two").clear
|
||||
Sidekiq::Queue.new("unrelated").clear
|
||||
Sidekiq::RetrySet.new.clear
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Queue.new("test").clear
|
||||
Sidekiq::Queue.new("test_two").clear
|
||||
Sidekiq::Queue.new("unrelated").clear
|
||||
Sidekiq::RetrySet.new.clear
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
end
|
||||
end
|
||||
|
||||
around do |example|
|
||||
|
|
@ -114,32 +116,38 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
end
|
||||
|
||||
it "removes all related job instances from the job classes' queues" do
|
||||
worker.perform_async
|
||||
worker_two.perform_async
|
||||
same_queue_different_worker.perform_async
|
||||
unrelated_worker.perform_async
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
worker.perform_async
|
||||
worker_two.perform_async
|
||||
same_queue_different_worker.perform_async
|
||||
unrelated_worker.perform_async
|
||||
end
|
||||
|
||||
worker_queue = Sidekiq::Queue.new(worker.queue)
|
||||
worker_two_queue = Sidekiq::Queue.new(worker_two.queue)
|
||||
unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
|
||||
|
||||
expect(worker_queue.size).to eq(2)
|
||||
expect(worker_two_queue.size).to eq(1)
|
||||
expect(unrelated_queue.size).to eq(1)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(worker_queue.size).to eq(2)
|
||||
expect(worker_two_queue.size).to eq(1)
|
||||
expect(unrelated_queue.size).to eq(1)
|
||||
end
|
||||
|
||||
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
|
||||
|
||||
expect(worker_queue.size).to eq(1)
|
||||
expect(worker_two_queue.size).to eq(0)
|
||||
expect(worker_queue.map(&:klass)).not_to include(worker.name)
|
||||
expect(worker_queue.map(&:klass)).to include(
|
||||
same_queue_different_worker.name
|
||||
)
|
||||
expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name)
|
||||
expect(unrelated_queue.size).to eq(1)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(worker_queue.size).to eq(1)
|
||||
expect(worker_two_queue.size).to eq(0)
|
||||
expect(worker_queue.map(&:klass)).not_to include(worker.name)
|
||||
expect(worker_queue.map(&:klass)).to include(
|
||||
same_queue_different_worker.name
|
||||
)
|
||||
expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name)
|
||||
expect(unrelated_queue.size).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context "when job instances are in the scheduled set" do
|
||||
context "when job instances are in the scheduled set", :allow_unrouted_sidekiq_calls do
|
||||
it "removes all related job instances from the scheduled set" do
|
||||
worker.perform_in(1.hour)
|
||||
worker_two.perform_in(1.hour)
|
||||
|
|
@ -163,7 +171,7 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
end
|
||||
end
|
||||
|
||||
context "when job instances are in the retry set" do
|
||||
context "when job instances are in the retry set", :allow_unrouted_sidekiq_calls do
|
||||
include_context "when handling retried jobs"
|
||||
|
||||
it "removes all related job instances from the retry set" do
|
||||
|
|
@ -192,7 +200,7 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
end
|
||||
|
||||
# Imitate job deletion returning zero and then non zero.
|
||||
context "when job fails to be deleted" do
|
||||
context "when job fails to be deleted", :allow_unrouted_sidekiq_calls do
|
||||
let(:job_double) do
|
||||
instance_double(
|
||||
"Sidekiq::JobRecord",
|
||||
|
|
@ -260,7 +268,7 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
jid: 'random_jid' })
|
||||
end
|
||||
|
||||
it "migrates jobs from one sidekiq queue to another" do
|
||||
it "migrates jobs from one sidekiq queue to another", :allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Testing.disable! do
|
||||
worker.perform_async("Something", [1])
|
||||
worker.perform_async("Something", [2])
|
||||
|
|
@ -284,8 +292,10 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
Sidekiq::Testing.disable! do
|
||||
# .perform_async internally calls Sidekiq::Client.via and re-route the job to
|
||||
# Gitlab::Redis::Queues's Redis instance.
|
||||
shard_instance.sidekiq_redis.with { |c| c.lpush('queue:test', job_hash) }
|
||||
Gitlab::Redis::Queues.sidekiq_redis.with { |c| c.lpush('queue:test', job_hash) }
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
shard_instance.sidekiq_redis.with { |c| c.lpush('queue:test', job_hash) }
|
||||
Gitlab::Redis::Queues.sidekiq_redis.with { |c| c.lpush('queue:test', job_hash) }
|
||||
end
|
||||
|
||||
# 1 job in each instance's queue
|
||||
Sidekiq::Client.via(shard_instance.sidekiq_redis) do
|
||||
|
|
@ -295,17 +305,21 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
end
|
||||
end
|
||||
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 1
|
||||
expect(c.llen("queue:destination")).to eq 0
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 1
|
||||
expect(c.llen("queue:destination")).to eq 0
|
||||
end
|
||||
end
|
||||
|
||||
model.sidekiq_queue_migrate("test", to: "destination")
|
||||
|
||||
# 2 job in the main instance's queue
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq main_from_size
|
||||
expect(c.llen("queue:destination")).to eq main_to_size
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq main_from_size
|
||||
expect(c.llen("queue:destination")).to eq main_to_size
|
||||
end
|
||||
end
|
||||
|
||||
# queues in shard get emptied
|
||||
|
|
@ -326,19 +340,23 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
|
||||
it 'stores migrated job in a buffer' do
|
||||
Sidekiq::Testing.disable! do
|
||||
worker.perform_async("Something", [1])
|
||||
worker.perform_async("Something", [2])
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
worker.perform_async("Something", [1])
|
||||
worker.perform_async("Something", [2])
|
||||
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 2
|
||||
expect(c.llen("queue:destination")).to eq 0
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 2
|
||||
expect(c.llen("queue:destination")).to eq 0
|
||||
end
|
||||
end
|
||||
|
||||
expect { model.sidekiq_queue_migrate("test", to: "destination") }.to raise_error(StandardError)
|
||||
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 1
|
||||
expect(c.llen("migration_buffer:queue:test")).to eq 1
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis do |c|
|
||||
expect(c.llen("queue:test")).to eq 1
|
||||
expect(c.llen("migration_buffer:queue:test")).to eq 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -435,7 +453,7 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
end
|
||||
|
||||
context 'when there is 1 source and 1 destination store' do
|
||||
context 'when it is the same instance' do
|
||||
context 'when it is the same instance', :allow_unrouted_sidekiq_calls do
|
||||
before do
|
||||
allow(Gitlab::SidekiqConfig::WorkerRouter.global)
|
||||
.to receive(:stores_with_queue).and_return(['main'])
|
||||
|
|
@ -469,7 +487,7 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
|
|||
it_behaves_like 'holds jobs in buffer until migration completes'
|
||||
end
|
||||
|
||||
context 'when routing is disabled' do
|
||||
context 'when routing is disabled', :allow_unrouted_sidekiq_calls do
|
||||
before do
|
||||
allow(Gitlab::SidekiqSharding::Router).to receive(:enabled?).and_return(false)
|
||||
allow(Gitlab::SidekiqConfig::WorkerRouter.global)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
|
|||
[
|
||||
'compliance_framework_security_policies', # has a desired sharding key instead
|
||||
'merge_request_diff_commits_b5377a7a34', # has a desired sharding key instead
|
||||
'merge_request_diff_files_99208b8fac', # has a desired sharding key instead
|
||||
'ml_model_metadata', # has a desired sharding key instead.
|
||||
'p_ci_pipeline_variables', # https://gitlab.com/gitlab-org/gitlab/-/issues/436360
|
||||
'p_ci_stages', # https://gitlab.com/gitlab-org/gitlab/-/issues/448630
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Middleware::SidekiqShardAwarenessValidation, feature_category: :scalability do
|
||||
let(:app) { ->(_env) { Sidekiq.redis(&:ping) } }
|
||||
let(:env) { {} }
|
||||
|
||||
around do |example|
|
||||
original_state = Thread.current[:validate_sidekiq_shard_awareness]
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = nil
|
||||
|
||||
example.run
|
||||
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = original_state
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
subject(:app_call) { described_class.new(app).call(env) }
|
||||
|
||||
it 'enables shard-awareness check within the context of a request' do
|
||||
expect { Sidekiq.redis(&:ping) }.not_to raise_error
|
||||
expect { app_call }.to raise_error(Gitlab::SidekiqSharding::Validator::UnroutedSidekiqApiError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -38,13 +38,17 @@ RSpec.describe Gitlab::SidekiqMiddleware::Monitor do
|
|||
end
|
||||
|
||||
it 'puts job in DeadSet' do
|
||||
::Sidekiq::DeadSet.new.clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { ::Sidekiq::DeadSet.new.clear }
|
||||
|
||||
expect do
|
||||
subject
|
||||
rescue Sidekiq::JobRetry::Skip
|
||||
nil
|
||||
end.to change { ::Sidekiq::DeadSet.new.size }.by(1)
|
||||
end.to change {
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
::Sidekiq::DeadSet.new.size
|
||||
end
|
||||
}.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,45 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqMiddleware::ShardAwarenessValidator, feature_category: :scalability do
|
||||
let(:worker) do
|
||||
Class.new do
|
||||
def self.name
|
||||
'TestWorker'
|
||||
end
|
||||
|
||||
def perform
|
||||
Thread.current[:validate_sidekiq_shard_awareness]
|
||||
end
|
||||
include ApplicationWorker
|
||||
end
|
||||
end
|
||||
|
||||
around do |example|
|
||||
original_state = Thread.current[:validate_sidekiq_shard_awareness]
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = nil
|
||||
|
||||
with_sidekiq_server_middleware do |chain|
|
||||
chain.add described_class
|
||||
Sidekiq::Testing.inline! { example.run }
|
||||
end
|
||||
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = original_state
|
||||
end
|
||||
|
||||
subject { described_class.new }
|
||||
|
||||
before do
|
||||
stub_const('TestWorker', worker)
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
it 'validates shard-aware calls within a middleware' do
|
||||
expect { Sidekiq.redis(&:ping) }.not_to raise_error
|
||||
|
||||
# .perform_async prevents an error from being raised
|
||||
expect(TestWorker.perform_async).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -57,6 +57,7 @@ RSpec.describe Gitlab::SidekiqMiddleware, feature_category: :shared do
|
|||
let(:middleware_expected_args) { [a_kind_of(worker_class), hash_including({ 'args' => job_args }), queue] }
|
||||
let(:all_sidekiq_middlewares) do
|
||||
[
|
||||
::Gitlab::SidekiqMiddleware::ShardAwarenessValidator,
|
||||
::Gitlab::SidekiqMiddleware::Monitor,
|
||||
::Labkit::Middleware::Sidekiq::Server,
|
||||
::Gitlab::SidekiqMiddleware::RequestStoreMiddleware,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
|
||||
:clean_gitlab_redis_queues_metadata do
|
||||
:clean_gitlab_redis_queues_metadata, :allow_unrouted_sidekiq_calls do
|
||||
def clear_queues
|
||||
Sidekiq::Queue.new('authorized_projects').clear
|
||||
Sidekiq::Queue.new('post_receive').clear
|
||||
|
|
|
|||
|
|
@ -4,18 +4,20 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues, :clean_gitlab_redis_queues_metadata do
|
||||
around do |example|
|
||||
Sidekiq::Queue.new('foobar').clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { Sidekiq::Queue.new('foobar').clear }
|
||||
Sidekiq::Testing.disable!(&example)
|
||||
Sidekiq::Queue.new('foobar').clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { Sidekiq::Queue.new('foobar').clear }
|
||||
end
|
||||
|
||||
def add_job(args, user:, klass: 'AuthorizedProjectsWorker')
|
||||
Sidekiq::Client.push(
|
||||
'class' => klass,
|
||||
'queue' => 'foobar',
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Client.push(
|
||||
'class' => klass,
|
||||
'queue' => 'foobar',
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#drop_jobs!' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,10 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqSharding::ScheduledEnq, feature_category: :scalability do
|
||||
# Gitlab::SidekiqSharding::ScheduledEnq does not need routing checks as it extends
|
||||
# Sidekiq::Scheduled::Enq which internally uses Sidekiq.redis. That is expected of the poller.
|
||||
RSpec.describe Gitlab::SidekiqSharding::ScheduledEnq, :allow_unrouted_sidekiq_calls,
|
||||
feature_category: :scalability do
|
||||
it 'extends Sidekiq::Scheduled::Enq' do
|
||||
expect(described_class <= ::Sidekiq::Scheduled::Enq).to eq(true)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,114 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqSharding::Validator, feature_category: :scalability do
|
||||
subject(:ping) { Sidekiq.redis(&:ping) }
|
||||
|
||||
before do
|
||||
Thread.current[:inside_sidekiq_via_scope] = nil
|
||||
Thread.current[:allow_unrouted_sidekiq_calls] = nil
|
||||
end
|
||||
|
||||
describe '.via' do
|
||||
it 'sets Thread.current within via' do
|
||||
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
|
||||
|
||||
Sidekiq::Client.via(Gitlab::Redis::Queues.sidekiq_redis) do
|
||||
expect(Thread.current[:inside_sidekiq_via_scope]).to be_truthy
|
||||
|
||||
ping
|
||||
end
|
||||
end
|
||||
|
||||
it 'restores Thread.current inside_sidekiq_via_scope value after exiting scope' do
|
||||
Thread.current[:inside_sidekiq_via_scope] = 'test'
|
||||
|
||||
Sidekiq::Client.via(Gitlab::Redis::Queues.sidekiq_redis) do
|
||||
expect(Thread.current[:inside_sidekiq_via_scope]).to be_truthy
|
||||
end
|
||||
|
||||
expect(Thread.current[:inside_sidekiq_via_scope]).to eq('test')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#method_missing' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
# we test method_missing through .ping
|
||||
|
||||
where(:env, :expected_error) do
|
||||
'production' | nil
|
||||
'test' | described_class::UnroutedSidekiqApiError
|
||||
'development' | described_class::UnroutedSidekiqApiError
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
stub_rails_env(env)
|
||||
end
|
||||
|
||||
it do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
|
||||
|
||||
if expected_error
|
||||
expect { ping }.to raise_error(expected_error)
|
||||
else
|
||||
expect { ping }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.allow_unrouted_sidekiq_calls' do
|
||||
it 'permits unrouted calls' do
|
||||
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
|
||||
|
||||
described_class.allow_unrouted_sidekiq_calls do
|
||||
expect(Thread.current[:inside_sidekiq_via_scope]).to be_falsey
|
||||
expect(Thread.current[:allow_unrouted_sidekiq_calls]).to be_truthy
|
||||
|
||||
ping
|
||||
end
|
||||
end
|
||||
|
||||
it 'restores Thread.current allow_unrouted_sidekiq_calls value after exiting scope' do
|
||||
Thread.current[:allow_unrouted_sidekiq_calls] = 'test'
|
||||
|
||||
described_class.allow_unrouted_sidekiq_calls do
|
||||
expect(Thread.current[:allow_unrouted_sidekiq_calls]).to be_truthy
|
||||
end
|
||||
|
||||
expect(Thread.current[:allow_unrouted_sidekiq_calls]).to eq('test')
|
||||
end
|
||||
end
|
||||
|
||||
describe '.enabled' do
|
||||
around do |example|
|
||||
original_state = Thread.current[:validate_sidekiq_shard_awareness]
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = nil
|
||||
|
||||
example.run
|
||||
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = original_state
|
||||
end
|
||||
|
||||
it 'scopes validation to within the block' do
|
||||
# avoid reusing subject due to memoized behaviours. subject should be called once only.
|
||||
expect { Sidekiq.redis(&:ping) }.not_to raise_error
|
||||
|
||||
described_class.enabled do
|
||||
expect { ping }.to raise_error(described_class::UnroutedSidekiqApiError)
|
||||
end
|
||||
end
|
||||
|
||||
it 'restores Thread.current validate_sidekiq_shard_awareness value after exiting scope' do
|
||||
Thread.current[:validate_sidekiq_shard_awareness] = 'test'
|
||||
|
||||
described_class.enabled do
|
||||
expect(Thread.current[:validate_sidekiq_shard_awareness]).to be_truthy
|
||||
end
|
||||
|
||||
expect(Thread.current[:validate_sidekiq_shard_awareness]).to eq('test')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqVersioning, :clean_gitlab_redis_queues do
|
||||
# allow_unrouted_sidekiq_calls as this is run on every startup, so shard awareness is not needed
|
||||
RSpec.describe Gitlab::SidekiqVersioning, :clean_gitlab_redis_queues, :allow_unrouted_sidekiq_calls do
|
||||
before do
|
||||
allow(Gitlab::SidekiqConfig).to receive(:routing_queues).and_return(%w[foo bar])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, fe
|
|||
let(:job) { '{foo: 1}' }
|
||||
|
||||
before do
|
||||
Sidekiq.redis do |conn|
|
||||
conn.lpush("queue:email_receiver", job)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis { |conn| conn.lpush("queue:email_receiver", job) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -94,14 +94,16 @@ RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, fe
|
|||
end
|
||||
|
||||
def queue_length(queue_name)
|
||||
Sidekiq.redis do |conn|
|
||||
conn.llen("queue:#{queue_name}")
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis { |conn| conn.llen("queue:#{queue_name}") }
|
||||
end
|
||||
end
|
||||
|
||||
def list_jobs(queue_name)
|
||||
Sidekiq.redis { |conn| conn.lrange("queue:#{queue_name}", 0, -1) }
|
||||
.map { |item| Sidekiq.load_json item }
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq.redis { |conn| conn.lrange("queue:#{queue_name}", 0, -1) }
|
||||
.map { |item| Sidekiq.load_json item }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -122,8 +124,10 @@ RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, fe
|
|||
EmailReceiverWorker.perform_in(1.hour, 'foo')
|
||||
EmailReceiverWorker.perform_in(2.hours, 'bar')
|
||||
EmailReceiverWorker.perform_in(3.hours, 'baz')
|
||||
retry_in(EmailReceiverWorker, 1.hour, 0)
|
||||
retry_in(EmailReceiverWorker, 2.hours, 0)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
retry_in(EmailReceiverWorker, 1.hour, 0)
|
||||
retry_in(EmailReceiverWorker, 2.hours, 0)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'migrates scheduled and retried jobs' do
|
||||
|
|
@ -161,7 +165,7 @@ RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, fe
|
|||
end
|
||||
end
|
||||
|
||||
context 'with worker_queue_mappings mocked' do
|
||||
context 'with worker_queue_mappings mocked', :allow_unrouted_sidekiq_calls do
|
||||
let(:mappings_mocked) { true }
|
||||
|
||||
it_behaves_like 'migrates scheduled and retried jobs'
|
||||
|
|
@ -194,7 +198,7 @@ RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, fe
|
|||
end
|
||||
end
|
||||
|
||||
context 'without worker_queue_mappings mocked' do
|
||||
context 'without worker_queue_mappings mocked', :allow_unrouted_sidekiq_calls do
|
||||
let(:mappings_mocked) { false }
|
||||
|
||||
it_behaves_like 'migrates scheduled and retried jobs'
|
||||
|
|
|
|||
|
|
@ -8,18 +8,26 @@ RSpec.describe API::Admin::Sidekiq, :clean_gitlab_redis_queues, feature_category
|
|||
describe 'DELETE /admin/sidekiq/queues/:queue_name' do
|
||||
context 'when the user is an admin' do
|
||||
around do |example|
|
||||
Sidekiq::Queue.new('authorized_projects').clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Queue.new('authorized_projects').clear
|
||||
end
|
||||
|
||||
Sidekiq::Testing.disable!(&example)
|
||||
Sidekiq::Queue.new('authorized_projects').clear
|
||||
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Queue.new('authorized_projects').clear
|
||||
end
|
||||
end
|
||||
|
||||
def add_job(user, args)
|
||||
Sidekiq::Client.push(
|
||||
'class' => 'AuthorizedProjectsWorker',
|
||||
'queue' => 'authorized_projects',
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Client.push(
|
||||
'class' => 'AuthorizedProjectsWorker',
|
||||
'queue' => 'authorized_projects',
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'valid request' do
|
||||
|
|
|
|||
|
|
@ -28,19 +28,26 @@ RSpec.describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues, feature_cate
|
|||
|
||||
context 'when valid request' do
|
||||
around do |example|
|
||||
Sidekiq::Queue.new(queue).clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Queue.new(queue).clear
|
||||
end
|
||||
Sidekiq::Testing.disable!(&example)
|
||||
Sidekiq::Queue.new(queue).clear
|
||||
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Queue.new(queue).clear
|
||||
end
|
||||
end
|
||||
|
||||
def add_job(user, args)
|
||||
Sidekiq::Client.push(
|
||||
'class' => 'AuthorizedProjectsWorker',
|
||||
'queue' => queue,
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero? # rubocop:disable Style/ZeroLengthPredicate -- Sidekiq::Queue doesn't implement #blank? or #empty?
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::Client.push(
|
||||
'class' => 'AuthorizedProjectsWorker',
|
||||
'queue' => queue,
|
||||
'args' => args,
|
||||
'meta.user' => user.username
|
||||
)
|
||||
raise 'Not enqueued!' if Sidekiq::Queue.new(queue).size.zero? # rubocop:disable Style/ZeroLengthPredicate -- Sidekiq::Queue doesn't implement #blank? or #empty?
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns info about the deleted jobs' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Sidekiq::Cron::Job do
|
||||
# Only Sidekiq.redis interacts with cron jobs so unrouted calls are allowed.
|
||||
RSpec.describe Sidekiq::Cron::Job, :allow_unrouted_sidekiq_calls do
|
||||
describe 'cron jobs' do
|
||||
context 'when Fugit depends on ZoTime or EoTime' do
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -332,6 +332,10 @@ RSpec.configure do |config|
|
|||
# Disable license requirement for duo chat, which is subject to change.
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/457090
|
||||
stub_feature_flags(duo_chat_requires_licensed_seat: false)
|
||||
|
||||
# Disable license requirement for duo chat (self managed), which is subject to change.
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/457283
|
||||
stub_feature_flags(duo_chat_requires_licensed_seat_sm: false)
|
||||
else
|
||||
unstub_all_feature_flags
|
||||
end
|
||||
|
|
@ -394,6 +398,12 @@ RSpec.configure do |config|
|
|||
end
|
||||
end
|
||||
|
||||
config.around(:example, :allow_unrouted_sidekiq_calls) do |example|
|
||||
::Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
# previous test runs may have left some resources throttled
|
||||
config.before do
|
||||
::Gitlab::ExclusiveLease.reset_all!("el:throttle:*")
|
||||
|
|
@ -430,6 +440,12 @@ RSpec.configure do |config|
|
|||
end
|
||||
end
|
||||
|
||||
config.around do |example|
|
||||
Gitlab::SidekiqSharding::Validator.enabled do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
config.after do
|
||||
Fog.unmock! if Fog.mock?
|
||||
Gitlab::ApplicationSettingFetcher.clear_in_memory_application_settings!
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ require 'gitlab/redis'
|
|||
|
||||
RSpec.configure do |config|
|
||||
config.after(:each, :redis) do
|
||||
Sidekiq.redis(&:flushdb)
|
||||
redis_queues_cleanup!
|
||||
redis_queues_metadata_cleanup!
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ RSpec.shared_examples 'marks background migration job records' do
|
|||
end
|
||||
|
||||
RSpec.shared_examples 'finalized background migration' do |worker_class|
|
||||
it 'processed the scheduled sidekiq queue' do
|
||||
it 'processed the scheduled sidekiq queue', :allow_unrouted_sidekiq_calls do
|
||||
queued = Sidekiq::ScheduledSet
|
||||
.new
|
||||
.select do |scheduled|
|
||||
|
|
@ -39,7 +39,7 @@ RSpec.shared_examples 'finalized background migration' do |worker_class|
|
|||
expect(queued.size).to eq(0)
|
||||
end
|
||||
|
||||
it 'processed the async sidekiq queue' do
|
||||
it 'processed the async sidekiq queue', :allow_unrouted_sidekiq_calls do
|
||||
queued = Sidekiq::Queue.new(worker_class.name)
|
||||
.select { |scheduled| scheduled.klass == job_class_name }
|
||||
expect(queued.size).to eq(0)
|
||||
|
|
|
|||
|
|
@ -70,11 +70,16 @@ RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
|
|||
it 'fails if the snippet background migration is running' do
|
||||
Sidekiq::Testing.disable! do
|
||||
BackgroundMigrationWorker.perform_in(180, 'BackfillSnippetRepositories', [non_migrated.first.id, non_migrated.last.id])
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
end
|
||||
|
||||
expect { subject }.to raise_error(RuntimeError, /There are already snippet migrations running/)
|
||||
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -97,11 +102,15 @@ RSpec.describe 'gitlab:snippets namespace rake task', :silence_stdout do
|
|||
it 'returns a message saying that the background migration is running' do
|
||||
Sidekiq::Testing.disable! do
|
||||
BackgroundMigrationWorker.perform_in(180, 'BackfillSnippetRepositories', [non_migrated.first.id, non_migrated.last.id])
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
expect(Sidekiq::ScheduledSet.new).to be_one
|
||||
end
|
||||
|
||||
expect { subject }.to output("There are snippet migrations running\n").to_stdout
|
||||
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
|
||||
Sidekiq::ScheduledSet.new.clear
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ RSpec.describe ApplicationWorker, feature_category: :shared do
|
|||
before do
|
||||
# Set up Sidekiq.default_configuration's Thread.current[:sidekiq_redis_pool].
|
||||
# Creating a RedisConnection during spec's runtime will perform Sidekiq.info which messes with our spec expectations.
|
||||
Sidekiq.redis(&:ping)
|
||||
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls { Sidekiq.redis(&:ping) }
|
||||
|
||||
allow(Feature).to receive(:enabled?).and_call_original
|
||||
allow(Feature).to receive(:enabled?).with(:route_to_main, type: :ops).and_return(true)
|
||||
|
|
|
|||
Loading…
Reference in New Issue