Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
20082d14c8
commit
da4f753e76
|
|
@ -121,6 +121,7 @@ trigger-omnibus as-if-foss:
|
|||
# Override gitlab repository so that omnibus doesn't use foss repository for CE build
|
||||
GITLAB_ALTERNATIVE_REPO: $CI_PROJECT_URL
|
||||
|
||||
# If a rename is required for this job, please notify the Delivery team (`@gitlab-org/delivery`)
|
||||
e2e:package-and-test-ee:
|
||||
extends:
|
||||
- .e2e-trigger-base
|
||||
|
|
|
|||
4
Gemfile
4
Gemfile
|
|
@ -247,8 +247,8 @@ gem 'state_machines-activerecord', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeat
|
|||
gem 'acts-as-taggable-on', '~> 10.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
|
||||
# Background jobs
|
||||
gem 'sidekiq', '~> 6.5.10' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'sidekiq-cron', '~> 1.8.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'sidekiq', '~> 7.1.6' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'sidekiq-cron', '~> 1.9.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'gitlab-sidekiq-fetcher', path: 'vendor/gems/sidekiq-reliable-fetch', require: 'sidekiq-reliable-fetch' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
|
||||
# Cron Parser
|
||||
|
|
|
|||
|
|
@ -514,6 +514,7 @@
|
|||
{"name":"redcarpet","version":"3.6.0","platform":"ruby","checksum":"8ad1889c0355ff4c47174af14edd06d62f45a326da1da6e8a121d59bdcd2e9e9"},
|
||||
{"name":"redis","version":"4.8.0","platform":"ruby","checksum":"2000cf5014669c9dc821704b6d322a35a9a33852a95208911d9175d63b448a44"},
|
||||
{"name":"redis-actionpack","version":"5.3.0","platform":"ruby","checksum":"3fb1ad0a8fd9d26a289c9399bb609dcaef38bf37711e6f677a53ca728fc19140"},
|
||||
{"name":"redis-client","version":"0.18.0","platform":"ruby","checksum":"a93bd1f99c024bb7f8e21eff7bdbcb16d85dbcbfe3f6ed051239e38d4c127704"},
|
||||
{"name":"redis-rack","version":"2.1.4","platform":"ruby","checksum":"0872eecb303e483c3863d6bd0d47323d230640d41c1a4ac4a2c7596ec0b1774c"},
|
||||
{"name":"redis-store","version":"1.9.1","platform":"ruby","checksum":"7b4c7438d46f7b7ce8f67fc0eda3a04fc67d32d28cf606cc98a5df4d2b77071d"},
|
||||
{"name":"regexp_parser","version":"2.6.0","platform":"ruby","checksum":"f163ba463a45ca2f2730e0902f2475bb0eefcd536dfc2f900a86d1e5a7d7a556"},
|
||||
|
|
@ -589,8 +590,8 @@
|
|||
{"name":"sexp_processor","version":"4.17.0","platform":"ruby","checksum":"4daa4874ce1838cd801c65e66ed5d4f140024404a3de7482c36d4ef2604dff6f"},
|
||||
{"name":"shellany","version":"0.0.1","platform":"ruby","checksum":"0e127a9132698766d7e752e82cdac8250b6adbd09e6c0a7fbbb6f61964fedee7"},
|
||||
{"name":"shoulda-matchers","version":"5.1.0","platform":"ruby","checksum":"a01d20589989e9653ab4a28c67d9db2b82bcf0a2496cf01d5e1a95a4aaaf5b07"},
|
||||
{"name":"sidekiq","version":"6.5.12","platform":"ruby","checksum":"b4f93b2204c42220d0b526a7b8e0c49b5f9da82c1ce1a05d2baf1e8f744c197f"},
|
||||
{"name":"sidekiq-cron","version":"1.8.0","platform":"ruby","checksum":"47da72ca73ce5b71896aaf7e7c4391386ec517dd003f184c50c0b727d82eb0ca"},
|
||||
{"name":"sidekiq","version":"7.1.6","platform":"ruby","checksum":"7859da66d5bcef3c22bea2c3091d08c866890168e003f5bf4dea197dc37843a2"},
|
||||
{"name":"sidekiq-cron","version":"1.9.1","platform":"ruby","checksum":"79d11c79c686ec2e540c1932ccd12b0c07e7c228d28a0a7c515a6c7fcd3c22df"},
|
||||
{"name":"sigdump","version":"0.2.4","platform":"ruby","checksum":"0bf2176e55c1a262788623fe5ea57caddd6ba2abebe5e349d9d5e7c3a3010ed7"},
|
||||
{"name":"signet","version":"0.17.0","platform":"ruby","checksum":"1d2831930dc28da32e34bec68cf7ded97ee2867b208f97c500ee293829cb0004"},
|
||||
{"name":"simple_po_parser","version":"1.1.6","platform":"ruby","checksum":"122687d44d3de516a0e69e2f383a4180f5015e8c5ed5a7f2258f2b376f64cbf3"},
|
||||
|
|
|
|||
23
Gemfile.lock
23
Gemfile.lock
|
|
@ -169,9 +169,9 @@ PATH
|
|||
PATH
|
||||
remote: vendor/gems/sidekiq-reliable-fetch
|
||||
specs:
|
||||
gitlab-sidekiq-fetcher (0.10.0)
|
||||
gitlab-sidekiq-fetcher (0.11.0)
|
||||
json (>= 2.5)
|
||||
sidekiq (~> 6.1)
|
||||
sidekiq (~> 7.0)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
|
|
@ -1347,6 +1347,8 @@ GEM
|
|||
actionpack (>= 5, < 8)
|
||||
redis-rack (>= 2.1.0, < 3)
|
||||
redis-store (>= 1.1.0, < 2)
|
||||
redis-client (0.18.0)
|
||||
connection_pool
|
||||
redis-rack (2.1.4)
|
||||
rack (>= 2.0.8, < 3)
|
||||
redis-store (>= 1.2, < 2)
|
||||
|
|
@ -1529,12 +1531,13 @@ GEM
|
|||
shellany (0.0.1)
|
||||
shoulda-matchers (5.1.0)
|
||||
activesupport (>= 5.2.0)
|
||||
sidekiq (6.5.12)
|
||||
connection_pool (>= 2.2.5, < 3)
|
||||
rack (~> 2.0)
|
||||
redis (>= 4.5.0, < 5)
|
||||
sidekiq-cron (1.8.0)
|
||||
fugit (~> 1)
|
||||
sidekiq (7.1.6)
|
||||
concurrent-ruby (< 2)
|
||||
connection_pool (>= 2.3.0)
|
||||
rack (>= 2.2.4)
|
||||
redis-client (>= 0.14.0)
|
||||
sidekiq-cron (1.9.1)
|
||||
fugit (~> 1.8)
|
||||
sidekiq (>= 4.2.1)
|
||||
sigdump (0.2.4)
|
||||
signet (0.17.0)
|
||||
|
|
@ -2053,8 +2056,8 @@ DEPENDENCIES
|
|||
sentry-ruby (~> 5.8.0)
|
||||
sentry-sidekiq (~> 5.8.0)
|
||||
shoulda-matchers (~> 5.1.0)
|
||||
sidekiq (~> 6.5.10)
|
||||
sidekiq-cron (~> 1.8.0)
|
||||
sidekiq (~> 7.1.6)
|
||||
sidekiq-cron (~> 1.9.0)
|
||||
sigdump (~> 0.2.4)
|
||||
simple_po_parser (~> 1.1.6)
|
||||
simplecov (~> 0.21)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,5 @@
|
|||
<script>
|
||||
import {
|
||||
GlAvatar,
|
||||
GlBadge,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlSprintf,
|
||||
GlTooltipDirective,
|
||||
} from '@gitlab/ui';
|
||||
import { GlAvatar, GlBadge, GlIcon, GlLink, GlSprintf, GlTooltipDirective } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
|
||||
|
|
@ -21,7 +13,6 @@ export default {
|
|||
components: {
|
||||
GlAvatar,
|
||||
GlBadge,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlSprintf,
|
||||
|
|
@ -42,6 +33,12 @@ export default {
|
|||
authorProfileUrl() {
|
||||
return this.latestVersion.author.webUrl;
|
||||
},
|
||||
detailsPageHref() {
|
||||
return this.$router.resolve({
|
||||
name: CI_RESOURCE_DETAILS_PAGE_NAME,
|
||||
params: { id: this.entityId },
|
||||
}).href;
|
||||
},
|
||||
entityId() {
|
||||
return getIdFromGraphQLId(this.resource.id);
|
||||
},
|
||||
|
|
@ -68,7 +65,16 @@ export default {
|
|||
},
|
||||
},
|
||||
methods: {
|
||||
navigateToDetailsPage() {
|
||||
navigateToDetailsPage(e) {
|
||||
// Open link in a new tab if any of these modifier key is held down.
|
||||
if (e?.ctrlKey || e?.metaKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Override the <a> tag if no modifier key is held down to use Vue router and not
|
||||
// open a new tab.
|
||||
e.preventDefault();
|
||||
|
||||
this.$router.push({
|
||||
name: CI_RESOURCE_DETAILS_PAGE_NAME,
|
||||
params: { id: this.entityId },
|
||||
|
|
@ -93,14 +99,14 @@ export default {
|
|||
/>
|
||||
<div class="gl-display-flex gl-flex-direction-column gl-flex-grow-1">
|
||||
<div class="gl-display-flex gl-flex-wrap gl-gap-2 gl-mb-2">
|
||||
<gl-button
|
||||
variant="link"
|
||||
<gl-link
|
||||
class="gl-text-gray-900! gl-mr-1"
|
||||
:href="detailsPageHref"
|
||||
data-testid="ci-resource-link"
|
||||
@click="navigateToDetailsPage"
|
||||
>
|
||||
{{ resourcePath }} <b> {{ resource.name }}</b>
|
||||
</gl-button>
|
||||
</gl-link>
|
||||
<div class="gl-display-flex gl-flex-grow-1 gl-md-justify-content-space-between">
|
||||
<gl-badge size="sm">{{ tagName }}</gl-badge>
|
||||
<span class="gl-display-flex gl-align-items-center gl-ml-5">
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ export default {
|
|||
return REVIEW_STATE_ICONS[user.mergeRequestInteraction.reviewState];
|
||||
},
|
||||
showRequestReviewButton(user) {
|
||||
if (this.glFeatures.mrRequestChanges) {
|
||||
if (this.glFeatures.mrRequestChanges && !user.mergeRequestInteraction.approved) {
|
||||
return user.mergeRequestInteraction.reviewState !== 'UNREVIEWED';
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,11 @@ module Ci
|
|||
|
||||
chronic_duration_attr_reader :timeout_human_readable, :timeout
|
||||
|
||||
scope :scoped_build, -> { where("#{quoted_table_name}.build_id = #{Ci::Build.quoted_table_name}.id") }
|
||||
scope :scoped_build, -> do
|
||||
where(arel_table[:build_id].eq(Ci::Build.arel_table[:id]))
|
||||
.where(arel_table[:partition_id].eq(Ci::Build.arel_table[:partition_id]))
|
||||
end
|
||||
|
||||
scope :with_interruptible, -> { where(interruptible: true) }
|
||||
scope :with_exposed_artifacts, -> { where(has_exposed_artifacts: true) }
|
||||
|
||||
|
|
|
|||
|
|
@ -459,20 +459,12 @@ module Ci
|
|||
end
|
||||
|
||||
scope :with_reports, -> (reports_scope) do
|
||||
where('EXISTS (?)',
|
||||
::Ci::Build
|
||||
.latest
|
||||
.with_artifacts(reports_scope)
|
||||
.where("#{quoted_table_name}.id = #{Ci::Build.quoted_table_name}.commit_id")
|
||||
.select(1)
|
||||
)
|
||||
where_exists(Ci::Build.latest.scoped_pipeline.with_artifacts(reports_scope))
|
||||
end
|
||||
|
||||
scope :with_only_interruptible_builds, -> do
|
||||
where('NOT EXISTS (?)',
|
||||
Ci::Build.where("#{Ci::Build.quoted_table_name}.commit_id = #{quoted_table_name}.id")
|
||||
.with_status(STARTED_STATUSES)
|
||||
.not_interruptible
|
||||
where_not_exists(
|
||||
Ci::Build.scoped_pipeline.with_status(STARTED_STATUSES).not_interruptible
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -999,15 +991,15 @@ module Ci
|
|||
end
|
||||
|
||||
def builds_in_self_and_project_descendants
|
||||
Ci::Build.latest.where(pipeline: self_and_project_descendants)
|
||||
Ci::Build.in_partition(self).latest.where(pipeline: self_and_project_descendants)
|
||||
end
|
||||
|
||||
def bridges_in_self_and_project_descendants
|
||||
Ci::Bridge.latest.where(pipeline: self_and_project_descendants)
|
||||
Ci::Bridge.in_partition(self).latest.where(pipeline: self_and_project_descendants)
|
||||
end
|
||||
|
||||
def jobs_in_self_and_project_descendants
|
||||
Ci::Processable.latest.where(pipeline: self_and_project_descendants)
|
||||
Ci::Processable.in_partition(self).latest.where(pipeline: self_and_project_descendants)
|
||||
end
|
||||
|
||||
def environments_in_self_and_project_descendants(deployment_status: nil)
|
||||
|
|
|
|||
|
|
@ -99,6 +99,11 @@ class CommitStatus < Ci::ApplicationRecord
|
|||
preload(project: :namespace)
|
||||
end
|
||||
|
||||
scope :scoped_pipeline, -> do
|
||||
where(arel_table[:commit_id].eq(Ci::Pipeline.arel_table[:id]))
|
||||
.where(arel_table[:partition_id].eq(Ci::Pipeline.arel_table[:partition_id]))
|
||||
end
|
||||
|
||||
scope :match_id_and_lock_version, -> (items) do
|
||||
# it expects that items are an array of attributes to match
|
||||
# each hash needs to have `id` and `lock_version`
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@ class Vulnerability < ApplicationRecord
|
|||
include EachBatch
|
||||
include IgnorableColumns
|
||||
|
||||
ignore_column %i[epic_id milestone_id last_edited_at], remove_with: '16.9', remove_after: '2023-01-13'
|
||||
ignore_column %i[epic_id milestone_id last_edited_at start_date],
|
||||
remove_with: '16.9',
|
||||
remove_after: '2024-01-13'
|
||||
|
||||
alias_attribute :vulnerability_id, :id
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,9 @@ Redis::Cluster::SlotLoader.prepend(Gitlab::Patch::SlotLoader)
|
|||
Redis::Cluster::CommandLoader.prepend(Gitlab::Patch::CommandLoader)
|
||||
Redis::Cluster.prepend(Gitlab::Patch::RedisCluster)
|
||||
|
||||
# this only instruments `RedisClient` used in `Sidekiq.redis`
|
||||
RedisClient.register(Gitlab::Instrumentation::RedisClientMiddleware)
|
||||
|
||||
if Gitlab::Redis::Workhorse.params[:cluster].present?
|
||||
raise "Do not configure workhorse with a Redis Cluster as pub/sub commands are not cluster-compatible."
|
||||
end
|
||||
|
|
|
|||
|
|
@ -28,21 +28,25 @@ def enable_semi_reliable_fetch_mode?
|
|||
end
|
||||
|
||||
# Custom Queues configuration
|
||||
queues_config_hash = Gitlab::Redis::Queues.params
|
||||
queues_config_hash = Gitlab::Redis::Queues.redis_client_params
|
||||
|
||||
enable_json_logs = Gitlab.config.sidekiq.log_format != 'text'
|
||||
|
||||
# Sidekiq's `strict_args!` raises an exception by default in 7.0
|
||||
# https://github.com/sidekiq/sidekiq/blob/31bceff64e10d501323bc06ac0552652a47c082e/docs/7.0-Upgrade.md?plain=1#L59
|
||||
Sidekiq.strict_args!(false)
|
||||
|
||||
Sidekiq.configure_server do |config|
|
||||
config[:strict] = false
|
||||
config[:queues] = Gitlab::SidekiqConfig.expand_queues(config[:queues])
|
||||
|
||||
if enable_json_logs
|
||||
config.log_formatter = Gitlab::SidekiqLogging::JSONFormatter.new
|
||||
config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new
|
||||
config[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger
|
||||
|
||||
# Remove the default-provided handler. The exception is logged inside
|
||||
# Gitlab::SidekiqLogging::StructuredLogger
|
||||
config.error_handlers.delete(Sidekiq::DEFAULT_ERROR_HANDLER)
|
||||
config.error_handlers.delete(Sidekiq::Config::ERROR_HANDLER)
|
||||
end
|
||||
|
||||
Sidekiq.logger.info "Listening on queues #{config[:queues].uniq.sort}"
|
||||
|
|
@ -107,8 +111,8 @@ Sidekiq.configure_client do |config|
|
|||
# We only need to do this for other clients. If Sidekiq-server is the
|
||||
# client scheduling jobs, we have access to the regular sidekiq logger that
|
||||
# writes to STDOUT
|
||||
Sidekiq.logger = Gitlab::SidekiqLogging::ClientLogger.build
|
||||
Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
|
||||
config.logger = Gitlab::SidekiqLogging::ClientLogger.build
|
||||
config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
|
||||
|
||||
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ if ENV['ENABLE_SIDEKIQ_CLUSTER']
|
|||
# Allow sidekiq to cleanly terminate and push any running jobs back
|
||||
# into the queue. We use the configured timeout and add a small
|
||||
# grace period
|
||||
sleep(Sidekiq[:timeout] + 5)
|
||||
sleep(Sidekiq.default_configuration[:timeout] + 5)
|
||||
|
||||
# Signaling the Sidekiq Pgroup as KILL is not forwarded to
|
||||
# a possible child process. In Sidekiq Cluster, all child Sidekiq
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ type: reference, api
|
|||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10242) in GitLab 12.6.
|
||||
> - `last_edited_at` [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/268154) in GitLab 16.7.
|
||||
> - `start_date` [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/268154) in GitLab 16.7.
|
||||
|
||||
WARNING:
|
||||
This API is in the process of being deprecated and considered unstable.
|
||||
|
|
@ -96,7 +97,6 @@ Example response:
|
|||
"resolved_by_id": null,
|
||||
"resolved_on_default_branch": false,
|
||||
"severity": "low",
|
||||
"start_date": null,
|
||||
"state": "detected",
|
||||
"title": "Regular Expression Denial of Service in debug",
|
||||
"updated_at": "2020-04-07T14:01:04.655Z",
|
||||
|
|
@ -183,7 +183,6 @@ Example response:
|
|||
"resolved_by_id": null,
|
||||
"resolved_on_default_branch": false,
|
||||
"severity": "low",
|
||||
"start_date": null,
|
||||
"state": "detected",
|
||||
"title": "Regular Expression Denial of Service in debug",
|
||||
"updated_at": "2020-04-07T14:01:04.655Z",
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10242) in GitLab 12.6.
|
||||
> - `last_edited_at` [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/268154) in GitLab 16.7.
|
||||
> - `start_date` [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/268154) in GitLab 16.7.
|
||||
|
||||
NOTE:
|
||||
The former Vulnerabilities API was renamed to Vulnerability Findings API
|
||||
|
|
@ -64,7 +65,6 @@ Example response:
|
|||
"updated_by_id": null,
|
||||
"last_edited_by_id": null,
|
||||
"closed_by_id": null,
|
||||
"start_date": null,
|
||||
"due_date": null,
|
||||
"created_at": "2019-10-13T15:08:40.219Z",
|
||||
"updated_at": "2019-10-13T15:09:40.382Z",
|
||||
|
|
@ -113,7 +113,6 @@ Example response:
|
|||
"updated_by_id": null,
|
||||
"last_edited_by_id": null,
|
||||
"closed_by_id": null,
|
||||
"start_date": null,
|
||||
"due_date": null,
|
||||
"created_at": "2019-10-13T15:08:40.219Z",
|
||||
"updated_at": "2019-10-13T15:09:40.382Z",
|
||||
|
|
@ -162,7 +161,6 @@ Example response:
|
|||
"updated_by_id": null,
|
||||
"last_edited_by_id": null,
|
||||
"closed_by_id": null,
|
||||
"start_date": null,
|
||||
"due_date": null,
|
||||
"created_at": "2019-10-13T15:08:40.219Z",
|
||||
"updated_at": "2019-10-13T15:09:40.382Z",
|
||||
|
|
@ -211,7 +209,6 @@ Example response:
|
|||
"updated_by_id": null,
|
||||
"last_edited_by_id": null,
|
||||
"closed_by_id": null,
|
||||
"start_date": null,
|
||||
"due_date": null,
|
||||
"created_at": "2019-10-13T15:08:40.219Z",
|
||||
"updated_at": "2019-10-13T15:09:40.382Z",
|
||||
|
|
@ -260,7 +257,6 @@ Example response:
|
|||
"updated_by_id": null,
|
||||
"last_edited_by_id": null,
|
||||
"closed_by_id": null,
|
||||
"start_date": null,
|
||||
"due_date": null,
|
||||
"created_at": "2019-10-13T15:08:40.219Z",
|
||||
"updated_at": "2019-10-13T15:09:40.382Z",
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ bundle exec rails generate gitlab:analytics:internal_events \
|
|||
|
||||
Where:
|
||||
|
||||
- `time_frames`: Valid options are `7d` and `28d` if you provide a `unique` value and `all` for metrics without `unique`. We are working to make `7d` and `28d` work for metrics with `all` time frame in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/411264).
|
||||
- `time_frames`: Valid options are `7d` and `28d` if you provide a `unique` value and `7d`, `28d` and `all` for metrics without `unique`.
|
||||
- `unique`: Valid options are `user.id`, `project.id`, and `namespace.id`, as they are logged as part of the standard context. We [are actively working](https://gitlab.com/gitlab-org/gitlab/-/issues/411255) on a way to define uniqueness on arbitrary properties sent with the event, such as `merge_request.id`.
|
||||
|
||||
## Trigger events
|
||||
|
|
|
|||
|
|
@ -7,42 +7,65 @@ type: index, reference
|
|||
|
||||
# Answer questions with GitLab Duo Chat **(ULTIMATE SAAS BETA)**
|
||||
|
||||
> Introduced in GitLab 16.6 as an [Beta](../policy/experiment-beta-support.md#beta).
|
||||
> Introduced in GitLab 16.6 as a [Beta](../policy/experiment-beta-support.md#beta).
|
||||
|
||||
You can get AI generated support from GitLab Duo Chat about the following topics:
|
||||
You can get AI-generated support from GitLab Duo Chat about:
|
||||
|
||||
- How to use GitLab.
|
||||
- Questions about an issue.
|
||||
- Question about an epic.
|
||||
- Questions about a code file.
|
||||
- Follow-up questions to answers from the chat.
|
||||
- The contents of an issue or issue.
|
||||
- The contents of a code or CI/CD configuration file.
|
||||
|
||||
Example questions you might ask:
|
||||
You can also use GitLab Duo Chat to create code and CI/CD files.
|
||||
|
||||
When you get an answer, you can ask follow-up questions to learn more.
|
||||
|
||||
This is a Beta feature. We're continuously extending the capabilities and reliability of the responses.
|
||||
|
||||
## Ask about GitLab
|
||||
|
||||
You can ask questions about how GitLab works. Things like:
|
||||
|
||||
- `Explain the concept of a 'fork' in a concise manner.`
|
||||
- `Provide step-by-step instructions on how to reset a user's password.`
|
||||
- `Generate a summary for the issue identified via this link: <link to your issue>`
|
||||
- `Generate a concise summary of the description of the current issue.`
|
||||
|
||||
The examples above all use data from either the issue or the GitLab documentation. However, you can also ask to generate code, CI/CD configurations, or to explain code. For example:
|
||||
## Ask about your work
|
||||
|
||||
You can ask about GitLab issues and epics. For example:
|
||||
|
||||
- `Generate a summary for the issue identified via this link: <link to your issue>`
|
||||
- `Generate a concise summary of the current issue.`
|
||||
|
||||
## Ask about code
|
||||
|
||||
You can also ask GitLab Duo Chat to generate code:
|
||||
|
||||
- `Write a Ruby function that prints 'Hello, World!' when called.`
|
||||
- `Develop a JavaScript program that simulates a two-player Tic-Tac-Toe game. Provide both game logic and user interface, if applicable.`
|
||||
- `Create a .gitlab-ci.yml configuration file for testing and building a Ruby on Rails application in a GitLab CI/CD pipeline.`
|
||||
|
||||
And you can ask GitLab Duo Chat to explain code:
|
||||
|
||||
- `Provide a clear explanation of the given Ruby code: def sum(a, b) a + b end. Describe what this code does and how it works.`
|
||||
|
||||
In addition to the provided prompts, feel free to ask follow-up questions to delve deeper into the topic or task at hand. This helps you get more detailed and precise responses tailored to your specific needs, whether it's for further clarification, elaboration, or additional assistance.
|
||||
## Ask about CI/CD
|
||||
|
||||
- A follow-up to the question `Write a Ruby function that prints 'Hello, World!' when called.` could be:
|
||||
- `Could you also explain how I can call and execute this Ruby function in a typical Ruby environment, such as the command line?`
|
||||
You can ask GitLab Duo Chat to create a CI/CD configuration:
|
||||
|
||||
This is a Beta feature. We're continuously extending the capabilities and reliability of the chat responses.
|
||||
- `Create a .gitlab-ci.yml configuration file for testing and building a Ruby on Rails application in a GitLab CI/CD pipeline.`
|
||||
|
||||
## Ask follow up questions
|
||||
|
||||
You can ask follow-up questions to delve deeper into the topic or task at hand.
|
||||
This helps you get more detailed and precise responses tailored to your specific needs,
|
||||
whether it's for further clarification, elaboration, or additional assistance.
|
||||
|
||||
A follow-up to the question `Write a Ruby function that prints 'Hello, World!' when called` could be:
|
||||
|
||||
- `Can you also explain how I can call and execute this Ruby function in a typical Ruby environment, such as the command line?`
|
||||
|
||||
## Enable GitLab Duo Chat
|
||||
|
||||
To use this feature, at least one group you're a member of must:
|
||||
|
||||
- Have the [experiment and beta features setting](group/manage.md#enable-experiment-and-beta-features) enabled.
|
||||
To use this feature, at least one group you're a member of must
|
||||
have the [experiment and beta features setting](group/manage.md#enable-experiment-and-beta-features) enabled.
|
||||
|
||||
## Use GitLab Duo Chat
|
||||
|
||||
|
|
@ -52,37 +75,66 @@ To use this feature, at least one group you're a member of must:
|
|||
1. Enter your question in the chat input box and press **Enter** or select **Send**. It may take a few seconds for the interactive AI chat to produce an answer.
|
||||
1. You can ask a follow-up question.
|
||||
1. If you want to ask a new question unrelated to the previous conversation, you may receive better answers if you clear the context by typing `/reset` into the input box and selecting **Send**.
|
||||
1. If you want to delete all previous conversations, you may do so by typing `/clean` into the input box and selecting **Send**.
|
||||
1. IMPORTANT: Currently you have to refresh the page after using `/clean` command.
|
||||
|
||||
NOTE:
|
||||
Only the last 50 messages are retained in the chat history. The chat history expires 3 days after last use.
|
||||
|
||||
## Use GitLab Duo Chat in the Web IDE and VSCode **(ULTIMATE SAAS EXPERIMENT)**
|
||||
### Delete all conversations
|
||||
|
||||
To delete all previous conversations:
|
||||
|
||||
1. In the text box, type `/clean` and select **Send**.
|
||||
1. Refresh the page.
|
||||
|
||||
## Use GitLab Duo Chat in the Web IDE and VS Code **(ULTIMATE SAAS EXPERIMENT)**
|
||||
|
||||
> Introduced in GitLab 16.6 as an [EXPERIMENT](../policy/experiment-beta-support.md#experiment).
|
||||
|
||||
### Web IDE
|
||||
|
||||
To use GitLab Duo Chat in the Web IDE:
|
||||
To use GitLab Duo Chat in the Web IDE on GitLab.com:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select a file. Then in the upper right, select **Edit > Open in Web IDE**.
|
||||
1. On the left sidebar, select **GitLab Duo Chat**. A drawer opens.
|
||||
1. In the text box, enter your question and press **Enter** or select **Send**. It may take a few seconds for the interactive AI chat to produce an answer.
|
||||
1. Open the Web IDE:
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. Select a file. Then in the upper right, select **Edit > Open in Web IDE**.
|
||||
1. Then open Chat by using one of the following methods:
|
||||
- On the left sidebar, select **GitLab Duo Chat**.
|
||||
- In the file that you have open in the editor, select some code.
|
||||
1. Right-click and select **GitLab Duo Chat**.
|
||||
1. Select **Explain selected code** or **Generate Tests**.
|
||||
|
||||
- Use the keyboard shortcut: <kbd>ALT</kbd>+<kbd>d</kbd> (on Windows and Linux) or <kbd>Option</kbd>+<kbd>d</kbd> (on Mac)
|
||||
|
||||
1. In the message box, enter your question and press **Enter** or select **Send**.
|
||||
|
||||
If you have selected code in the editor, this selection is sent along with your question to the AI. This way you can ask questions about this code selection. For instance, `Could you simplify this?`.
|
||||
|
||||
### GitLab Workflow extension for VS Code
|
||||
|
||||
To disable GitLab Duo Chat in VS Code, go to the VS Code extension settings and clear the **Enable GitLab Duo Chat assistant** checkbox.
|
||||
|
||||
To use GitLab Duo Chat in VS Code:
|
||||
|
||||
1. Install the [GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow) in VS Code.
|
||||
1. In VS Code, open your GitLab project.
|
||||
1. On the left side of the toolbar, select **GitLab Duo Chat**. A drawer opens.
|
||||
1. In the text box, enter your question and press **Enter** or select **Send**. It may take a few seconds for the interactive AI chat to produce an answer.
|
||||
1. Install and set up the Workflow extension for VS Code:
|
||||
1. In VS Code, download and Install the [GitLab Workflow extension for VS Code](../editor_extensions/visual_studio_code/index.md#download-the-extension).
|
||||
1. Configure the [GitLab Workflow extension](../editor_extensions/visual_studio_code/index.md#configure-the-extension).
|
||||
1. In VS Code, open a file. The file does not need to be a file in a Git repository.
|
||||
1. Open Chat by using one of the following methods:
|
||||
- On the left sidebar, select **GitLab Duo Chat**.
|
||||
- In the file that you have open in the editor, select some code.
|
||||
1. Right-click and select **GitLab Duo Chat**.
|
||||
1. Select **Explain selected code** or **Generate Tests**.
|
||||
- Use the keyboard shortcut: <kbd>ALT</kbd>+<kbd>d</kbd> (on Windows and Linux) or <kbd>Option</kbd>+<kbd>d</kbd> (on Mac)
|
||||
1. In the message box, enter your question and press **Enter** or select **Send**.
|
||||
|
||||
## Give Feedback
|
||||
If you have selected code in the editor, this selection is sent along with your question to the AI. This way you can ask questions about this code selection. For instance, `Could you simplify this?`.
|
||||
|
||||
### Disable Chat in Web IDE and VS Code
|
||||
|
||||
To disable GitLab Duo Chat in the Web IDE and VS Code:
|
||||
|
||||
1. Go to **Settings > Extensions > GitLab Workflow (GitLab VSCode Extension)**.
|
||||
1. Clear the **Enable GitLab Duo Chat assistant** checkbox.
|
||||
|
||||
## Give feedback
|
||||
|
||||
Your feedback is important to us as we continually enhance your GitLab Duo Chat experience:
|
||||
|
||||
|
|
@ -90,4 +142,4 @@ Your feedback is important to us as we continually enhance your GitLab Duo Chat
|
|||
- **Privacy Assurance**: Rest assured, we don't collect your prompts. Your privacy is respected, and your interactions remain private.
|
||||
|
||||
To give feedback about a specific response, use the feedback buttons in the response message.
|
||||
Or, you can add a comment in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/415591).
|
||||
Or, you can add a comment in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/430124).
|
||||
|
|
|
|||
|
|
@ -12,11 +12,11 @@ introduced_by_url: <%= options.fetch(:mr) %>
|
|||
time_frame: <%= args.third %>
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
instrumentation_class: <%= class_name(args.third) %>
|
||||
instrumentation_class: <%= class_name %>
|
||||
distribution:<%= distributions %>
|
||||
tier:<%= tiers %>
|
||||
options:
|
||||
events:
|
||||
- <%= event %>
|
||||
events:
|
||||
- name: <%= event %><%= unique(args.third) %>
|
||||
- name: <%= event %><%= unique %>
|
||||
|
|
|
|||
|
|
@ -10,22 +10,10 @@ module Gitlab
|
|||
'7d' => 'counts_7d',
|
||||
'28d' => 'counts_28d'
|
||||
}.freeze
|
||||
TIME_FRAMES_DEFAULT = TIME_FRAME_DIRS.keys.freeze
|
||||
TIME_FRAMES_ALLOWED_FOR_UNIQUE = (TIME_FRAMES_DEFAULT - ['all']).freeze
|
||||
|
||||
TIME_FRAMES_DEFAULT = TIME_FRAME_DIRS.keys.tap do |time_frame_defaults|
|
||||
time_frame_defaults.class_eval do
|
||||
def to_s
|
||||
join(", ")
|
||||
end
|
||||
end
|
||||
end.freeze
|
||||
|
||||
ALLOWED_TIERS = %w[free premium ultimate].dup.tap do |tiers_default|
|
||||
tiers_default.class_eval do
|
||||
def to_s
|
||||
join(", ")
|
||||
end
|
||||
end
|
||||
end.freeze
|
||||
ALLOWED_TIERS = %w[free premium ultimate].freeze
|
||||
|
||||
NEGATIVE_ANSWERS = %w[no n No NO N].freeze
|
||||
POSITIVE_ANSWERS = %w[yes y Yes YES Y].freeze
|
||||
|
|
@ -50,7 +38,6 @@ module Gitlab
|
|||
hide: true
|
||||
class_option :time_frames,
|
||||
optional: true,
|
||||
default: TIME_FRAMES_DEFAULT,
|
||||
type: :array,
|
||||
banner: TIME_FRAMES_DEFAULT,
|
||||
desc: "Indicates the metrics time frames. Please select one or more from: #{TIME_FRAMES_DEFAULT}"
|
||||
|
|
@ -141,8 +128,8 @@ module Gitlab
|
|||
options[:event]
|
||||
end
|
||||
|
||||
def unique(time_frame)
|
||||
return if time_frame == 'all'
|
||||
def unique
|
||||
return unless with_unique?
|
||||
|
||||
"\n unique: #{options.fetch(:unique)}"
|
||||
end
|
||||
|
|
@ -178,16 +165,14 @@ module Gitlab
|
|||
Gitlab::VERSION.match('(\d+\.\d+)').captures.first
|
||||
end
|
||||
|
||||
def class_name(time_frame)
|
||||
time_frame == 'all' ? 'TotalCountMetric' : 'RedisHLLMetric'
|
||||
def class_name
|
||||
with_unique? ? 'RedisHLLMetric' : 'TotalCountMetric'
|
||||
end
|
||||
|
||||
def key_path(time_frame)
|
||||
if time_frame == 'all'
|
||||
"count_total_#{event}"
|
||||
else
|
||||
"count_distinct_#{options[:unique].sub('.', '_')}_from_#{event}_#{time_frame}"
|
||||
end
|
||||
return "count_distinct_#{options[:unique].sub('.', '_')}_from_#{event}_#{time_frame}" if with_unique?
|
||||
|
||||
"count_total_#{event}_#{time_frame}"
|
||||
end
|
||||
|
||||
def metric_file_path(time_frame)
|
||||
|
|
@ -206,16 +191,18 @@ module Gitlab
|
|||
time_frames.each do |time_frame|
|
||||
validate_time_frame!(time_frame)
|
||||
|
||||
raise "The option: --unique is missing" if time_frame != 'all' && !options.key?('unique')
|
||||
|
||||
validate_key_path!(time_frame)
|
||||
end
|
||||
end
|
||||
|
||||
def validate_time_frame!(time_frame)
|
||||
return if TIME_FRAME_DIRS.key?(time_frame)
|
||||
unless TIME_FRAME_DIRS.key?(time_frame)
|
||||
raise "Invalid time frame: #{time_frame}, allowed options are: #{TIME_FRAMES_DEFAULT}"
|
||||
end
|
||||
|
||||
raise "Invalid time frame: #{time_frame}, allowed options are: #{TIME_FRAMES_DEFAULT}"
|
||||
invalid_time_frame = with_unique? && TIME_FRAMES_ALLOWED_FOR_UNIQUE.exclude?(time_frame)
|
||||
|
||||
raise "Invalid time frame: #{time_frame} for a metric using `unique`" if invalid_time_frame
|
||||
end
|
||||
|
||||
def validate_tiers!
|
||||
|
|
@ -252,12 +239,20 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def with_unique?
|
||||
options.key?(:unique)
|
||||
end
|
||||
|
||||
def free?
|
||||
options[:tiers].include? "free"
|
||||
end
|
||||
|
||||
def time_frames
|
||||
options[:time_frames]
|
||||
@time_frames ||= options[:time_frames] || default_time_frames
|
||||
end
|
||||
|
||||
def default_time_frames
|
||||
with_unique? ? TIME_FRAMES_ALLOWED_FOR_UNIQUE : TIME_FRAMES_DEFAULT
|
||||
end
|
||||
|
||||
def directory
|
||||
|
|
|
|||
|
|
@ -40,7 +40,9 @@ module Gitlab
|
|||
store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent
|
||||
store.subscribe ::MergeRequests::ResolveTodosAfterApprovalWorker, to: ::MergeRequests::ApprovedEvent
|
||||
store.subscribe ::MergeRequests::ExecuteApprovalHooksWorker, to: ::MergeRequests::ApprovedEvent
|
||||
store.subscribe ::MergeRequests::SetReviewerReviewedWorker, to: ::MergeRequests::ApprovedEvent
|
||||
store.subscribe ::MergeRequests::SetReviewerReviewedWorker,
|
||||
to: ::MergeRequests::ApprovedEvent,
|
||||
if: -> (event) { ::Feature.disabled?(:mr_request_changes, User.find_by_id(event.data[:current_user_id])) }
|
||||
store.subscribe ::Ml::ExperimentTracking::AssociateMlCandidateToPackageWorker,
|
||||
to: ::Packages::PackageCreatedEvent,
|
||||
if: -> (event) { ::Ml::ExperimentTracking::AssociateMlCandidateToPackageWorker.handles_event?(event) }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This module references https://github.com/redis-rb/redis-client#instrumentation-and-middlewares
|
||||
# implementing `call`, and `call_pipelined`.
|
||||
module Gitlab
|
||||
module Instrumentation
|
||||
module RedisClientMiddleware
|
||||
include RedisHelper
|
||||
|
||||
def call(command, redis_config)
|
||||
instrumentation = instrumentation_class(redis_config)
|
||||
|
||||
result = instrument_call([command], instrumentation) do
|
||||
super
|
||||
end
|
||||
|
||||
measure_io(command, result, instrumentation) if ::RequestStore.active?
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def call_pipelined(commands, redis_config)
|
||||
instrumentation = instrumentation_class(redis_config)
|
||||
|
||||
result = instrument_call(commands, instrumentation, true) do
|
||||
super
|
||||
end
|
||||
|
||||
measure_io(commands, result, instrumentation) if ::RequestStore.active?
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def measure_io(command, result, instrumentation)
|
||||
measure_write_size(command, instrumentation)
|
||||
measure_read_size(result, instrumentation)
|
||||
end
|
||||
|
||||
def instrumentation_class(config)
|
||||
config.custom[:instrumentation_class]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -15,7 +15,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
yield
|
||||
rescue ::Redis::BaseError => ex
|
||||
rescue ::Redis::BaseError, ::RedisClient::Error => ex
|
||||
if ex.message.start_with?('MOVED', 'ASK')
|
||||
instrumentation_class.instance_count_cluster_redirection(ex)
|
||||
else
|
||||
|
|
|
|||
|
|
@ -18,8 +18,8 @@ module Gitlab
|
|||
return true unless @alive
|
||||
|
||||
# Tell sidekiq to restart itself
|
||||
# Keep extra safe to wait `Sidekiq[:timeout] + 2` seconds before SIGKILL
|
||||
send_signal(:TERM, $$, 'gracefully shut down', Sidekiq[:timeout] + 2)
|
||||
# Keep extra safe to wait `Sidekiq.default_configuration[:timeout] + 2` seconds before SIGKILL
|
||||
send_signal(:TERM, $$, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2)
|
||||
return true unless @alive
|
||||
|
||||
# Ideally we should never reach this condition
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@
|
|||
require 'sidekiq/version'
|
||||
require 'sidekiq/cron/version'
|
||||
|
||||
if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
|
||||
if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('7.1.6')
|
||||
raise 'New version of sidekiq detected, please remove or update this patch'
|
||||
end
|
||||
|
||||
if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.8.0')
|
||||
if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.9.1')
|
||||
raise 'New version of sidekiq-cron detected, please remove or update this patch'
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module Gitlab
|
|||
InvalidPathError = Class.new(StandardError)
|
||||
|
||||
class << self
|
||||
delegate :params, :url, :store, :encrypted_secrets, to: :new
|
||||
delegate :params, :url, :store, :encrypted_secrets, :redis_client_params, to: :new
|
||||
|
||||
def with
|
||||
pool.with { |redis| yield redis }
|
||||
|
|
@ -96,6 +96,27 @@ module Gitlab
|
|||
redis_store_options
|
||||
end
|
||||
|
||||
# redis_client_params modifies redis_store_options to be compatible with redis-client
|
||||
# TODO: when redis-rb is updated to v5, there is no need to support 2 types of config format
|
||||
def redis_client_params
|
||||
options = redis_store_options
|
||||
options[:custom] = { instrumentation_class: options[:instrumentation_class] }
|
||||
|
||||
# TODO: add support for cluster when upgrading to redis-rb v5.y.z we do not need cluster support
|
||||
# as Sidekiq workload should not and does not run in a Redis Cluster
|
||||
# support to be added in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134862
|
||||
if options[:sentinels]
|
||||
# name is required in RedisClient::SentinelConfig
|
||||
# https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/sentinel_config.rb#L17
|
||||
options[:name] = options[:host]
|
||||
options.except(:scheme, :instrumentation_class, :host, :port)
|
||||
else
|
||||
# remove disallowed keys as seen in
|
||||
# https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/config.rb#L21
|
||||
options.except(:scheme, :instrumentation_class)
|
||||
end
|
||||
end
|
||||
|
||||
def url
|
||||
raw_config_hash[:url]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ module Gitlab
|
|||
#
|
||||
# These threads execute Sidekiq client middleware when jobs
|
||||
# are enqueued and those can access DB / Redis.
|
||||
threads += Sidekiq[:concurrency] + 2
|
||||
threads += Sidekiq.default_configuration[:concurrency] + 2
|
||||
end
|
||||
|
||||
if puma?
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'yaml'
|
||||
require 'sidekiq/capsule'
|
||||
|
||||
module Gitlab
|
||||
module SidekiqConfig
|
||||
|
|
@ -161,7 +162,7 @@ module Gitlab
|
|||
# the current Sidekiq process
|
||||
def current_worker_queue_mappings
|
||||
worker_queue_mappings
|
||||
.select { |worker, queue| Sidekiq[:queues].include?(queue) }
|
||||
.select { |worker, queue| Sidekiq.default_configuration.queues.include?(queue) }
|
||||
.to_h
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -16,11 +16,11 @@ module Gitlab
|
|||
|
||||
ActiveRecord::LogSubscriber.reset_runtime
|
||||
|
||||
Sidekiq.logger.info log_job_start(job, base_payload)
|
||||
@logger.info log_job_start(job, base_payload)
|
||||
|
||||
yield
|
||||
|
||||
Sidekiq.logger.info log_job_done(job, started_time, base_payload)
|
||||
@logger.info log_job_done(job, started_time, base_payload)
|
||||
rescue Sidekiq::JobRetry::Handled => job_exception
|
||||
# Sidekiq::JobRetry::Handled is raised by the internal Sidekiq
|
||||
# processor. It is a wrapper around real exception indicating an
|
||||
|
|
@ -29,11 +29,11 @@ module Gitlab
|
|||
#
|
||||
# For more information:
|
||||
# https://github.com/mperham/sidekiq/blob/v5.2.7/lib/sidekiq/processor.rb#L173
|
||||
Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
|
||||
@logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
|
||||
|
||||
raise
|
||||
rescue StandardError => job_exception
|
||||
Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception)
|
||||
@logger.warn log_job_done(job, started_time, base_payload, job_exception)
|
||||
|
||||
raise
|
||||
end
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ module Gitlab
|
|||
def initialize_process_metrics
|
||||
metrics = self.metrics
|
||||
|
||||
metrics[:sidekiq_concurrency].set({}, Sidekiq[:concurrency].to_i)
|
||||
metrics[:sidekiq_concurrency].set({}, Sidekiq.default_configuration[:concurrency].to_i)
|
||||
|
||||
return unless ::Feature.enabled?(:sidekiq_job_completion_metric_initialize)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,17 +16,14 @@ module Gitlab
|
|||
# Migrate jobs in SortedSets, i.e. scheduled and retry sets.
|
||||
def migrate_set(sidekiq_set)
|
||||
source_queues_regex = Regexp.union(mappings.keys)
|
||||
cursor = 0
|
||||
scanned = 0
|
||||
migrated = 0
|
||||
|
||||
estimated_size = Sidekiq.redis { |c| c.zcard(sidekiq_set) }
|
||||
logger&.info("Processing #{sidekiq_set} set. Estimated size: #{estimated_size}.")
|
||||
|
||||
begin
|
||||
cursor, jobs = Sidekiq.redis { |c| c.zscan(sidekiq_set, cursor) }
|
||||
|
||||
jobs.each do |(job, score)|
|
||||
Sidekiq.redis do |c|
|
||||
c.zscan(sidekiq_set) do |job, score|
|
||||
if scanned > 0 && scanned % LOG_FREQUENCY == 0
|
||||
logger&.info("In progress. Scanned records: #{scanned}. Migrated records: #{migrated}.")
|
||||
end
|
||||
|
|
@ -45,7 +42,7 @@ module Gitlab
|
|||
|
||||
migrated += migrate_job_in_set(sidekiq_set, job, score, job_hash)
|
||||
end
|
||||
end while cursor.to_i != 0
|
||||
end
|
||||
|
||||
logger&.info("Done. Scanned records: #{scanned}. Migrated records: #{migrated}.")
|
||||
|
||||
|
|
@ -61,7 +58,7 @@ module Gitlab
|
|||
logger&.info("List of queues based on routing rules: #{routing_rules_queues}")
|
||||
Sidekiq.redis do |conn|
|
||||
# Redis 6 supports conn.scan_each(match: "queue:*", type: 'list')
|
||||
conn.scan_each(match: "queue:*") do |key|
|
||||
conn.scan("MATCH", "queue:*") do |key|
|
||||
# Redis 5 compatibility
|
||||
next unless conn.type(key) == 'list'
|
||||
|
||||
|
|
@ -101,13 +98,9 @@ module Gitlab
|
|||
Sidekiq.redis do |connection|
|
||||
removed = connection.zrem(sidekiq_set, job)
|
||||
|
||||
if removed
|
||||
connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash))
|
||||
connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash)) if removed > 0
|
||||
|
||||
1
|
||||
else
|
||||
0
|
||||
end
|
||||
removed
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import Vue from 'vue';
|
||||
import VueRouter from 'vue-router';
|
||||
import { GlAvatar, GlBadge, GlButton, GlSprintf } from '@gitlab/ui';
|
||||
import { GlAvatar, GlBadge, GlSprintf } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { createRouter } from '~/ci/catalog/router/index';
|
||||
import CiResourcesListItem from '~/ci/catalog/components/list/ci_resources_list_item.vue';
|
||||
|
|
@ -10,6 +10,7 @@ import { catalogSinglePageResponse } from '../../mock';
|
|||
|
||||
Vue.use(VueRouter);
|
||||
|
||||
const defaultEvent = { preventDefault: jest.fn, ctrlKey: false, metaKey: false };
|
||||
let router;
|
||||
let routerPush;
|
||||
|
||||
|
|
@ -43,7 +44,7 @@ describe('CiResourcesListItem', () => {
|
|||
|
||||
const findAvatar = () => wrapper.findComponent(GlAvatar);
|
||||
const findBadge = () => wrapper.findComponent(GlBadge);
|
||||
const findResourceName = () => wrapper.findComponent(GlButton);
|
||||
const findResourceName = () => wrapper.findByTestId('ci-resource-link');
|
||||
const findResourceDescription = () => wrapper.findByText(defaultProps.resource.description);
|
||||
const findUserLink = () => wrapper.findByTestId('user-link');
|
||||
const findTimeAgoMessage = () => wrapper.findComponent(GlSprintf);
|
||||
|
|
@ -70,8 +71,11 @@ describe('CiResourcesListItem', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('renders the resource name button', () => {
|
||||
it('renders the resource name and link', () => {
|
||||
expect(findResourceName().exists()).toBe(true);
|
||||
expect(findResourceName().attributes().href).toBe(
|
||||
`/${getIdFromGraphQLId(defaultProps.resource.id)}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the resource version badge', () => {
|
||||
|
|
@ -121,18 +125,37 @@ describe('CiResourcesListItem', () => {
|
|||
});
|
||||
|
||||
describe('when clicking on an item title', () => {
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
|
||||
await findResourceName().vm.$emit('click');
|
||||
});
|
||||
|
||||
it('navigates to the details page', () => {
|
||||
expect(routerPush).toHaveBeenCalledWith({
|
||||
name: CI_RESOURCE_DETAILS_PAGE_NAME,
|
||||
params: {
|
||||
id: getIdFromGraphQLId(resource.id),
|
||||
},
|
||||
describe('without holding down a modifier key', () => {
|
||||
beforeEach(async () => {
|
||||
await findResourceName().vm.$emit('click', defaultEvent);
|
||||
});
|
||||
|
||||
it('navigates to the details page in the same tab', () => {
|
||||
expect(routerPush).toHaveBeenCalledWith({
|
||||
name: CI_RESOURCE_DETAILS_PAGE_NAME,
|
||||
params: {
|
||||
id: getIdFromGraphQLId(resource.id),
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe.each`
|
||||
keyName
|
||||
${'ctrlKey'}
|
||||
${'metaKey'}
|
||||
`('when $keyName is being held down', ({ keyName }) => {
|
||||
beforeEach(async () => {
|
||||
createComponent();
|
||||
await findResourceName().vm.$emit('click', { ...defaultEvent, [keyName]: true });
|
||||
});
|
||||
|
||||
it('does not call VueRouter push', () => {
|
||||
expect(routerPush).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -141,7 +164,7 @@ describe('CiResourcesListItem', () => {
|
|||
beforeEach(async () => {
|
||||
createComponent();
|
||||
|
||||
await findAvatar().vm.$emit('click');
|
||||
await findAvatar().vm.$emit('click', defaultEvent);
|
||||
});
|
||||
|
||||
it('navigates to the details page', () => {
|
||||
|
|
@ -160,6 +183,7 @@ describe('CiResourcesListItem', () => {
|
|||
createComponent({
|
||||
props: {
|
||||
resource: {
|
||||
...resource,
|
||||
starCount: 0,
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
let(:unique) { "user.id" }
|
||||
let(:time_frames) { %w[7d] }
|
||||
let(:include_default_identifiers) { 'yes' }
|
||||
let(:options) do
|
||||
let(:base_options) do
|
||||
{
|
||||
time_frames: time_frames,
|
||||
free: true,
|
||||
|
|
@ -31,60 +31,7 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
}.stringify_keys
|
||||
end
|
||||
|
||||
let(:key_path_without_time_frame) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
|
||||
let(:key_path_7d) { "#{key_path_without_time_frame}_7d" }
|
||||
let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
|
||||
let(:metric_definition_7d) do
|
||||
{
|
||||
"key_path" => key_path_7d,
|
||||
"description" => description,
|
||||
"product_section" => section,
|
||||
"product_stage" => stage,
|
||||
"product_group" => group,
|
||||
"performance_indicator_type" => [],
|
||||
"value_type" => "number",
|
||||
"status" => "active",
|
||||
"milestone" => "13.9",
|
||||
"introduced_by_url" => mr,
|
||||
"time_frame" => "7d",
|
||||
"data_source" => "internal_events",
|
||||
"data_category" => "optional",
|
||||
"instrumentation_class" => "RedisHLLMetric",
|
||||
"distribution" => %w[ce ee],
|
||||
"tier" => %w[free premium ultimate],
|
||||
"options" => {
|
||||
"events" => [event]
|
||||
},
|
||||
"events" => [{ "name" => event, "unique" => unique }]
|
||||
}
|
||||
end
|
||||
|
||||
let(:key_path_all) { "count_total_#{event}" }
|
||||
let(:metric_definition_path_all) { Dir.glob(File.join(temp_dir, "metrics/counts_all/#{key_path_all}.yml")).first }
|
||||
let(:metric_definition_all) do
|
||||
{
|
||||
"key_path" => key_path_all,
|
||||
"description" => description,
|
||||
"product_section" => section,
|
||||
"product_stage" => stage,
|
||||
"product_group" => group,
|
||||
"performance_indicator_type" => [],
|
||||
"value_type" => "number",
|
||||
"status" => "active",
|
||||
"milestone" => "13.9",
|
||||
"introduced_by_url" => mr,
|
||||
"time_frame" => "all",
|
||||
"data_source" => "internal_events",
|
||||
"data_category" => "optional",
|
||||
"instrumentation_class" => "TotalCountMetric",
|
||||
"distribution" => %w[ce ee],
|
||||
"tier" => %w[free premium ultimate],
|
||||
"options" => {
|
||||
"events" => [event]
|
||||
},
|
||||
"events" => [{ "name" => event }]
|
||||
}
|
||||
end
|
||||
let(:options) { base_options }
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
|
||||
|
|
@ -189,35 +136,87 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
end
|
||||
|
||||
describe 'Creating metric definition file' do
|
||||
let(:metric_dir) { temp_dir }
|
||||
let(:base_key_path_unique) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
|
||||
let(:base_key_path_total) { "count_total_#{event}" }
|
||||
let(:base_metric_definition) do
|
||||
{
|
||||
"description" => description,
|
||||
"product_section" => section,
|
||||
"product_stage" => stage,
|
||||
"product_group" => group,
|
||||
"performance_indicator_type" => [],
|
||||
"value_type" => "number",
|
||||
"status" => "active",
|
||||
"milestone" => "13.9",
|
||||
"introduced_by_url" => mr,
|
||||
"data_source" => "internal_events",
|
||||
"data_category" => "optional",
|
||||
"distribution" => %w[ce ee],
|
||||
"tier" => %w[free premium ultimate],
|
||||
"options" => {
|
||||
"events" => [event]
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
let(:metric_definition_extra) { {} }
|
||||
|
||||
shared_examples 'creates unique metric definitions' do |time_frames|
|
||||
it 'creates a metric definiton for each of the time frames' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
time_frames.each do |time_frame|
|
||||
key_path = "#{base_key_path_unique}_#{time_frame}"
|
||||
metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
|
||||
metric_definition = base_metric_definition.merge(
|
||||
"key_path" => key_path,
|
||||
"time_frame" => time_frame,
|
||||
"instrumentation_class" => "RedisHLLMetric",
|
||||
"events" => [{ "name" => event, "unique" => unique }]
|
||||
).merge(metric_definition_extra)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'creates total metric definitions' do |time_frames|
|
||||
it 'creates a metric definiton for each of the time frames' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
time_frames.each do |time_frame|
|
||||
key_path = "#{base_key_path_total}_#{time_frame}"
|
||||
metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
|
||||
metric_definition = base_metric_definition.merge(
|
||||
"key_path" => key_path,
|
||||
"time_frame" => time_frame,
|
||||
"instrumentation_class" => "TotalCountMetric",
|
||||
"events" => [{ "name" => event }]
|
||||
).merge(metric_definition_extra)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for single time frame' do
|
||||
let(:time_frames) { %w[7d] }
|
||||
|
||||
it 'creates a metric definition file' do
|
||||
described_class.new([], options).invoke_all
|
||||
it_behaves_like 'creates unique metric definitions', %w[7d]
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
end
|
||||
|
||||
context 'with time frame "all"' do
|
||||
context 'with time frame "all" and no "unique"' do
|
||||
let(:time_frames) { %w[all] }
|
||||
|
||||
it 'creates a total count metric definition file' do
|
||||
described_class.new([], options).invoke_all
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
|
||||
end
|
||||
let(:options) { base_options.except('unique') }
|
||||
|
||||
it_behaves_like 'creates total metric definitions', %w[all]
|
||||
end
|
||||
|
||||
context 'for ultimate only feature' do
|
||||
let(:metric_definition_path_7d) do
|
||||
Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
|
||||
end
|
||||
let(:metric_dir) { File.join(ee_temp_dir, temp_dir) }
|
||||
let(:options) { base_options.merge(tiers: %w[ultimate]) }
|
||||
let(:metric_definition_extra) { { "tier" => ["ultimate"], "distribution" => ["ee"] } }
|
||||
|
||||
it 'creates a metric definition file' do
|
||||
described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d)))
|
||||
.to eq(metric_definition_7d.merge("tier" => ["ultimate"], "distribution" => ["ee"]))
|
||||
end
|
||||
it_behaves_like 'creates unique metric definitions', %w[7d]
|
||||
end
|
||||
|
||||
context 'with invalid time frame' do
|
||||
|
|
@ -228,7 +227,16 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
end
|
||||
end
|
||||
|
||||
context 'with invalid time frame for unique metrics' do
|
||||
let(:time_frames) { %w[all] }
|
||||
|
||||
it 'raises error' do
|
||||
expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicated key path' do
|
||||
let(:key_path_7d) { "#{base_key_path_unique}_7d" }
|
||||
let(:existing_key_paths) { { key_path_7d => true } }
|
||||
|
||||
it 'raises error' do
|
||||
|
|
@ -252,14 +260,14 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
|
||||
context 'without obligatory parameter' do
|
||||
it 'raises error', :aggregate_failures do
|
||||
%w[unique event mr section stage group].each do |option|
|
||||
%w[event mr section stage group].each do |option|
|
||||
expect { described_class.new([], options.without(option)).invoke_all }
|
||||
.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with to short description' do
|
||||
context 'with too short description' do
|
||||
it 'asks again for description' do
|
||||
allow_next_instance_of(described_class) do |instance|
|
||||
allow(instance).to receive(:ask)
|
||||
|
|
@ -281,42 +289,28 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
|
|||
end
|
||||
|
||||
context 'for multiple time frames' do
|
||||
let(:time_frames) { %w[7d 28d all] }
|
||||
let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
|
||||
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
|
||||
let(:metric_definition_28d) do
|
||||
metric_definition_7d.merge(
|
||||
"key_path" => key_path_28d,
|
||||
"time_frame" => "28d"
|
||||
)
|
||||
end
|
||||
let(:time_frames) { %w[7d 28d] }
|
||||
|
||||
it 'creates metric definition files' do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
|
||||
end
|
||||
it_behaves_like 'creates unique metric definitions', %w[7d 28d]
|
||||
end
|
||||
|
||||
context 'with default time frames' do
|
||||
let(:time_frames) { nil }
|
||||
let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
|
||||
let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
|
||||
let(:metric_definition_28d) do
|
||||
metric_definition_7d.merge(
|
||||
"key_path" => key_path_28d,
|
||||
"time_frame" => "28d"
|
||||
)
|
||||
end
|
||||
let(:options) { base_options.without('time_frames', 'unique') }
|
||||
|
||||
it 'creates metric definition files' do
|
||||
described_class.new([], options.without('time_frames')).invoke_all
|
||||
it_behaves_like 'creates total metric definitions', %w[7d 28d all]
|
||||
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
|
||||
expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
|
||||
context 'with unique' do
|
||||
let(:options) { base_options.without('time_frames') }
|
||||
|
||||
it_behaves_like 'creates unique metric definitions', %w[7d 28d]
|
||||
|
||||
it "doesn't create a total 'all' metric" do
|
||||
described_class.new([], options).invoke_all
|
||||
|
||||
key_path = "#{base_key_path_total}_all"
|
||||
|
||||
expect(Dir.glob(File.join(metric_dir, "metrics/counts_all/#{key_path}.yml")).first).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,224 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rspec-parameterized'
|
||||
require 'support/helpers/rails_helpers'
|
||||
|
||||
RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
include RedisHelpers
|
||||
|
||||
let_it_be(:redis_store_class) { define_helper_redis_store_class }
|
||||
let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
|
||||
|
||||
before do
|
||||
redis_client.call("flushdb")
|
||||
end
|
||||
|
||||
describe 'read and write' do
|
||||
where(:setup, :command, :expect_write, :expect_read) do
|
||||
# The response is 'OK', the request size is the combined size of array
|
||||
# elements. Exercise counting of a status reply.
|
||||
[] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
|
||||
|
||||
# The response is 1001, so 4 bytes. Exercise counting an integer reply.
|
||||
[[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
|
||||
|
||||
# Exercise counting empty multi bulk reply. Returns an empty hash `{}`
|
||||
[] | [:hgetall, 'foobar'] | (7 + 6) | 2
|
||||
|
||||
# Hgetall response length is combined length of keys and values in the
|
||||
# hash. Exercises counting of a multi bulk reply
|
||||
# Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
|
||||
[[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
|
||||
|
||||
# Exercise counting of a bulk reply
|
||||
[[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
|
||||
|
||||
# Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
|
||||
[[:zadd, 'myset', 0, 'foo'],
|
||||
[:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'counts bytes read and written' do
|
||||
setup.each { |cmd| redis_client.call(*cmd) }
|
||||
RequestStore.clear!
|
||||
redis_client.call(*command)
|
||||
|
||||
expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
|
||||
expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'counting' do
|
||||
let(:instrumentation_class) { redis_store_class.instrumentation_class }
|
||||
|
||||
it 'counts successful requests' do
|
||||
expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
|
||||
|
||||
redis_client.call(:get, 'foobar')
|
||||
end
|
||||
|
||||
it 'counts successful pipelined requests' do
|
||||
expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
|
||||
expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
|
||||
|
||||
redis_client.pipelined do |pipeline|
|
||||
pipeline.call(:get, '{foobar}buz')
|
||||
pipeline.call(:get, '{foobar}baz')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when encountering exceptions' do
|
||||
before do
|
||||
allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
|
||||
RedisClient::ConnectionError, 'Connection was closed or lost')
|
||||
end
|
||||
|
||||
it 'counts exception' do
|
||||
expect(instrumentation_class).to receive(:instance_count_exception)
|
||||
.with(instance_of(RedisClient::ConnectionError)).and_call_original
|
||||
expect(instrumentation_class).to receive(:log_exception)
|
||||
.with(instance_of(RedisClient::ConnectionError)).and_call_original
|
||||
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
|
||||
|
||||
expect do
|
||||
redis_client.call(:auth, 'foo', 'bar')
|
||||
end.to raise_error(RedisClient::Error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'in production environment' do
|
||||
before do
|
||||
stub_rails_env('production') # to avoid raising CrossSlotError
|
||||
end
|
||||
|
||||
it 'counts disallowed cross-slot requests' do
|
||||
expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
|
||||
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
|
||||
|
||||
redis_client.call(:mget, 'foo', 'bar')
|
||||
end
|
||||
|
||||
it 'does not count allowed cross-slot requests' do
|
||||
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
|
||||
expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
redis_client.call(:mget, 'foo', 'bar')
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not count allowed non-cross-slot requests' do
|
||||
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
|
||||
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
redis_client.call(:mget, 'bar')
|
||||
end
|
||||
end
|
||||
|
||||
it 'skips count for non-cross-slot requests' do
|
||||
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
|
||||
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
|
||||
|
||||
redis_client.call(:mget, '{foo}bar', '{foo}baz')
|
||||
end
|
||||
end
|
||||
|
||||
context 'without active RequestStore' do
|
||||
before do
|
||||
::RequestStore.end!
|
||||
end
|
||||
|
||||
it 'still runs cross-slot validation' do
|
||||
expect do
|
||||
redis_client.call('mget', 'foo', 'bar')
|
||||
end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'latency' do
|
||||
let(:instrumentation_class) { redis_store_class.instrumentation_class }
|
||||
|
||||
describe 'commands in the apdex' do
|
||||
where(:command) do
|
||||
[
|
||||
[[:get, 'foobar']],
|
||||
[%w[GET foobar]]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'measures requests we want in the apdex' do
|
||||
expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
|
||||
.and_call_original
|
||||
|
||||
redis_client.call(*command)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with pipelined commands' do
|
||||
it 'measures requests that do not have blocking commands' do
|
||||
expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
|
||||
.and_call_original
|
||||
|
||||
redis_client.pipelined do |pipeline|
|
||||
pipeline.call(:get, '{foobar}buz')
|
||||
pipeline.call(:get, '{foobar}baz')
|
||||
end
|
||||
end
|
||||
|
||||
it 'raises error when keys are not from the same slot' do
|
||||
expect do
|
||||
redis_client.pipelined do |pipeline|
|
||||
pipeline.call(:get, 'foo')
|
||||
pipeline.call(:get, 'bar')
|
||||
end
|
||||
end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'commands not in the apdex' do
|
||||
where(:setup, :command) do
|
||||
[['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
|
||||
[['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
|
||||
[['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
|
||||
[['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
|
||||
[['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
|
||||
[['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
|
||||
[['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
|
||||
[['xadd', 'foobar', 1, 'myfield', 'mydata'],
|
||||
['xgroup', 'create', 'foobar', 'mygroup',
|
||||
0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
|
||||
[] | ['command']
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'skips requests we do not want in the apdex' do
|
||||
setup.each { |cmd| redis_client.call(*cmd) }
|
||||
|
||||
expect(instrumentation_class).not_to receive(:instance_observe_duration)
|
||||
|
||||
redis_client.call(*command)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with pipelined commands' do
|
||||
it 'skips requests that have blocking commands' do
|
||||
expect(instrumentation_class).not_to receive(:instance_observe_duration)
|
||||
|
||||
redis_client.pipelined do |pipeline|
|
||||
pipeline.call(:get, '{foobar}buz')
|
||||
pipeline.call(:rpush, '{foobar}baz', 1)
|
||||
pipeline.call(:brpop, '{foobar}baz', 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -12,7 +12,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
|
|||
|
||||
before do
|
||||
allow(Gitlab::Metrics::System).to receive(:monotonic_time)
|
||||
.and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2)
|
||||
.and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq.default_configuration[:timeout] + 2)
|
||||
allow(Process).to receive(:kill)
|
||||
allow(::Sidekiq).to receive(:logger).and_return(logger)
|
||||
allow(logger).to receive(:warn)
|
||||
|
|
@ -81,7 +81,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
|
|||
let(:signal_params) do
|
||||
[
|
||||
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
|
||||
[:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2]
|
||||
[:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2]
|
||||
]
|
||||
end
|
||||
|
||||
|
|
@ -95,7 +95,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
|
|||
let(:signal_params) do
|
||||
[
|
||||
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
|
||||
[:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2],
|
||||
[:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2],
|
||||
[:KILL, kill_pid, 'hard shut down', nil]
|
||||
]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ RSpec.describe Gitlab::Runtime, feature_category: :cloud_connector do
|
|||
before do
|
||||
stub_const('::Sidekiq', sidekiq_type)
|
||||
allow(sidekiq_type).to receive(:server?).and_return(true)
|
||||
allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
|
||||
allow(sidekiq_type).to receive(:default_configuration).and_return({ concurrency: 2 })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :sidekiq, 5
|
||||
|
|
|
|||
|
|
@ -178,7 +178,8 @@ RSpec.describe Gitlab::SidekiqConfig do
|
|||
allow(::Gitlab::SidekiqConfig::WorkerRouter)
|
||||
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
|
||||
|
||||
allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
|
||||
allow(Sidekiq).to receive_message_chain(:default_configuration, :queues)
|
||||
.and_return(%w[default background_migration])
|
||||
|
||||
mappings = described_class.current_worker_queue_mappings
|
||||
|
||||
|
|
|
|||
|
|
@ -492,7 +492,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
|
|||
'completed_at' => current_utc_time.to_i }
|
||||
end
|
||||
|
||||
subject { described_class.new }
|
||||
subject { described_class.new(Sidekiq.logger) }
|
||||
|
||||
it 'update payload correctly' do
|
||||
travel_to(current_utc_time) do
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
|
|||
|
||||
describe '.initialize_process_metrics' do
|
||||
it 'sets concurrency metrics' do
|
||||
expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
|
||||
expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
|
||||
|
||||
described_class.initialize_process_metrics
|
||||
end
|
||||
|
|
@ -122,7 +122,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
|
|||
end
|
||||
|
||||
it 'sets the concurrency metric' do
|
||||
expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
|
||||
expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
|
||||
|
||||
described_class.initialize_process_metrics
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
|
|||
let(:migrator) { described_class.new(mappings) }
|
||||
|
||||
let(:set_after) do
|
||||
Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
|
||||
Sidekiq.redis { |c| c.call("ZRANGE", set_name, 0, -1, "WITHSCORES") }
|
||||
.map { |item, score| [Gitlab::Json.load(item), score] }
|
||||
end
|
||||
|
||||
|
|
@ -226,8 +226,9 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
|
|||
let(:logger) { nil }
|
||||
|
||||
def list_queues
|
||||
queues = Sidekiq.redis do |conn|
|
||||
conn.scan_each(match: "queue:*").to_a
|
||||
queues = []
|
||||
Sidekiq.redis do |conn|
|
||||
conn.scan("MATCH", "queue:*") { |key| queues << key }
|
||||
end
|
||||
queues.uniq.map { |queue| queue.split(':', 2).last }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -149,7 +149,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
|
|||
|
||||
context 'when both multi-store feature flags are off' do
|
||||
def with_redis(&block)
|
||||
Sidekiq.redis(&block)
|
||||
Gitlab::Redis::Queues.with(&block)
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -4055,8 +4055,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
describe '#builds_in_self_and_project_descendants' do
|
||||
subject(:builds) { pipeline.builds_in_self_and_project_descendants }
|
||||
|
||||
let(:pipeline) { create(:ci_pipeline) }
|
||||
let!(:build) { create(:ci_build, pipeline: pipeline) }
|
||||
let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
|
||||
let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
context 'when pipeline is standalone' do
|
||||
it 'returns the list of builds' do
|
||||
|
|
@ -4083,6 +4083,10 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
expect(builds).to contain_exactly(build, child_build, child_of_child_build)
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes partition_id filter' do
|
||||
expect(builds.where_values_hash).to match(a_hash_including('partition_id' => pipeline.partition_id))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#build_with_artifacts_in_self_and_project_descendants' do
|
||||
|
|
@ -4108,7 +4112,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
describe '#jobs_in_self_and_project_descendants' do
|
||||
subject(:jobs) { pipeline.jobs_in_self_and_project_descendants }
|
||||
|
||||
let(:pipeline) { create(:ci_pipeline) }
|
||||
let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
|
||||
|
||||
shared_examples_for 'fetches jobs in self and project descendant pipelines' do |factory_type|
|
||||
let!(:job) { create(factory_type, pipeline: pipeline) }
|
||||
|
|
@ -4141,6 +4145,10 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
expect(jobs).to contain_exactly(job, child_job, child_of_child_job, child_source_bridge, child_of_child_source_bridge)
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes partition_id filter' do
|
||||
expect(jobs.where_values_hash).to match(a_hash_including('partition_id' => pipeline.partition_id))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job is build' do
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ module DnsHelpers
|
|||
stub_invalid_dns!
|
||||
permit_local_dns!
|
||||
permit_postgresql!
|
||||
permit_redis!
|
||||
end
|
||||
|
||||
def permit_dns!
|
||||
|
|
@ -53,6 +54,18 @@ module DnsHelpers
|
|||
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
|
||||
end
|
||||
|
||||
def permit_redis!
|
||||
# https://github.com/redis-rb/redis-client/blob/v0.11.2/lib/redis_client/ruby_connection.rb#L51 uses Socket.tcp that
|
||||
# calls Addrinfo.getaddrinfo internally.
|
||||
hosts = Gitlab::Redis::ALL_CLASSES.map do |redis_instance|
|
||||
redis_instance.redis_client_params[:host]
|
||||
end.uniq.compact
|
||||
|
||||
hosts.each do |host|
|
||||
allow(Addrinfo).to receive(:getaddrinfo).with(host, anything, nil, :STREAM, anything, anything, any_args).and_call_original
|
||||
end
|
||||
end
|
||||
|
||||
def stub_resolver(stubbed_lookups = {})
|
||||
resolver = instance_double('Resolv::DNS')
|
||||
allow(resolver).to receive(:timeouts=)
|
||||
|
|
|
|||
|
|
@ -93,8 +93,6 @@ RSpec.shared_context 'structured_logger' do
|
|||
end
|
||||
|
||||
before do
|
||||
allow(Sidekiq).to receive(:logger).and_return(logger)
|
||||
|
||||
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
|
||||
|
||||
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
|
||||
|
|
@ -103,7 +101,7 @@ RSpec.shared_context 'structured_logger' do
|
|||
.and_return(clock_thread_cputime_start, clock_thread_cputime_end)
|
||||
end
|
||||
|
||||
subject { described_class.new }
|
||||
subject { described_class.new(logger) }
|
||||
|
||||
def call_subject(job, queue)
|
||||
# This structured logger strongly depends on execution of `InstrumentationLogger`
|
||||
|
|
|
|||
|
|
@ -55,6 +55,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
|
|||
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
|
||||
|
||||
allow(concurrency_metric).to receive(:set)
|
||||
allow(completion_seconds_metric).to receive(:get)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -86,6 +86,67 @@ RSpec.shared_examples "redis_shared_examples" do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.redis_client_params' do
|
||||
# .redis_client_params wraps over `.redis_store_options` by modifying its outputs
|
||||
# to be compatible with `RedisClient`. We test for compatibility in this block while
|
||||
# the contents of redis_store_options are tested in the `.params` block.
|
||||
|
||||
subject { described_class.new(rails_env).redis_client_params }
|
||||
|
||||
let(:rails_env) { 'development' }
|
||||
let(:config_file_name) { config_old_format_socket }
|
||||
|
||||
shared_examples 'instrumentation_class in custom key' do
|
||||
it 'moves instrumentation class into custom' do
|
||||
expect(subject[:custom][:instrumentation_class]).to eq(described_class.instrumentation_class)
|
||||
expect(subject[:instrumentation_class]).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when url is host based' do
|
||||
context 'with old format' do
|
||||
let(:config_file_name) { config_old_format_host }
|
||||
|
||||
it 'does not raise ArgumentError for invalid keywords' do
|
||||
expect { RedisClient.config(**subject) }.not_to raise_error
|
||||
end
|
||||
|
||||
it_behaves_like 'instrumentation_class in custom key'
|
||||
end
|
||||
|
||||
context 'with new format' do
|
||||
let(:config_file_name) { config_new_format_host }
|
||||
|
||||
where(:rails_env, :host) do
|
||||
[
|
||||
%w[development development-host],
|
||||
%w[test test-host],
|
||||
%w[production production-host]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'does not raise ArgumentError for invalid keywords in SentinelConfig' do
|
||||
expect(subject[:name]).to eq(host)
|
||||
expect { RedisClient.sentinel(**subject) }.not_to raise_error
|
||||
end
|
||||
|
||||
it_behaves_like 'instrumentation_class in custom key'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when url contains unix socket reference' do
|
||||
let(:config_file_name) { config_old_format_socket }
|
||||
|
||||
it 'does not raise ArgumentError for invalid keywords' do
|
||||
expect { RedisClient.config(**subject) }.not_to raise_error
|
||||
end
|
||||
|
||||
it_behaves_like 'instrumentation_class in custom key'
|
||||
end
|
||||
end
|
||||
|
||||
describe '.params' do
|
||||
subject { described_class.new(rails_env).params }
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.configure do |config|
|
||||
def gitlab_sidekiq_inline(&block)
|
||||
def gitlab_sidekiq_inline
|
||||
# We need to cleanup the queues before running jobs in specs because the
|
||||
# middleware might have written to redis
|
||||
redis_queues_cleanup!
|
||||
redis_queues_metadata_cleanup!
|
||||
Sidekiq::Testing.inline!(&block)
|
||||
|
||||
# Scoped inline! is thread-safe which breaks capybara specs
|
||||
# see https://github.com/sidekiq/sidekiq/issues/6069
|
||||
Sidekiq::Testing.inline!
|
||||
|
||||
yield
|
||||
ensure
|
||||
Sidekiq::Testing.fake! # fake is the default so we reset it to that
|
||||
redis_queues_cleanup!
|
||||
redis_queues_metadata_cleanup!
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,15 +6,6 @@ require 'sidekiq/testing'
|
|||
module SidekiqMiddleware
|
||||
def with_sidekiq_server_middleware(&block)
|
||||
Sidekiq::Testing.server_middleware.clear
|
||||
|
||||
if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
|
||||
raise 'New version of sidekiq detected, please remove this line'
|
||||
end
|
||||
|
||||
# This line is a workaround for a Sidekiq bug that is already fixed in v7.0.0
|
||||
# https://github.com/mperham/sidekiq/commit/1b83a152786ed382f07fff12d2608534f1e3c922
|
||||
Sidekiq::Testing.server_middleware.instance_variable_set(:@config, Sidekiq)
|
||||
|
||||
Sidekiq::Testing.server_middleware(&block)
|
||||
ensure
|
||||
Sidekiq::Testing.server_middleware.clear
|
||||
|
|
|
|||
|
|
@ -251,6 +251,31 @@ RSpec.describe Tooling::Danger::StableBranch, feature_category: :delivery do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with multiple package-and-test pipelines' do
|
||||
let(:pipeline_bridges_response) do
|
||||
[
|
||||
{
|
||||
'name' => 'e2e:package-and-test-ee',
|
||||
'status' => 'success',
|
||||
'downstream_pipeline' => {
|
||||
'id' => '123',
|
||||
'status' => package_and_qa_state
|
||||
}
|
||||
},
|
||||
{
|
||||
'name' => 'follow-up-e2e:package-and-test-ee',
|
||||
'status' => 'failed',
|
||||
'downstream_pipeline' => {
|
||||
'id' => '456',
|
||||
'status' => 'failed'
|
||||
}
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it_behaves_like 'without a failure'
|
||||
end
|
||||
|
||||
context 'when the version API request fails' do
|
||||
let(:response_success) { false }
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ RSpec.describe MergeRequests::SetReviewerReviewedWorker, feature_category: :sour
|
|||
|
||||
it_behaves_like 'subscribes to event' do
|
||||
let(:event) { approved_event }
|
||||
|
||||
before do
|
||||
stub_feature_flags(mr_request_changes: false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'calls MergeRequests::UpdateReviewerStateService' do
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ module Tooling
|
|||
gitlab
|
||||
.api
|
||||
.pipeline_bridges(helper.mr_target_project_id, mr_head_pipeline_id)
|
||||
&.find { |bridge| bridge['name'].include?('package-and-test-ee') }
|
||||
&.find { |bridge| bridge['name'] == 'e2e:package-and-test-ee' }
|
||||
end
|
||||
|
||||
def stable_target_branch
|
||||
|
|
|
|||
|
|
@ -11,4 +11,5 @@ group :test do
|
|||
gem "pry"
|
||||
gem 'simplecov', require: false
|
||||
gem 'stub_env', '~> 1.0'
|
||||
gem 'redis', '~> 4.8'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,46 +1,51 @@
|
|||
PATH
|
||||
remote: .
|
||||
specs:
|
||||
gitlab-sidekiq-fetcher (0.10.0)
|
||||
gitlab-sidekiq-fetcher (0.11.0)
|
||||
json (>= 2.5)
|
||||
sidekiq (~> 6.1)
|
||||
sidekiq (~> 7.0)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
coderay (1.1.2)
|
||||
connection_pool (2.4.0)
|
||||
diff-lcs (1.3)
|
||||
docile (1.3.1)
|
||||
json (2.5.1)
|
||||
method_source (0.9.0)
|
||||
pry (0.11.3)
|
||||
coderay (~> 1.1.0)
|
||||
method_source (~> 0.9.0)
|
||||
rack (2.2.6.4)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.2.2)
|
||||
connection_pool (2.4.1)
|
||||
diff-lcs (1.5.0)
|
||||
docile (1.4.0)
|
||||
json (2.6.3)
|
||||
method_source (1.0.0)
|
||||
pry (0.14.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
rack (3.0.8)
|
||||
redis (4.8.1)
|
||||
rspec (3.8.0)
|
||||
rspec-core (~> 3.8.0)
|
||||
rspec-expectations (~> 3.8.0)
|
||||
rspec-mocks (~> 3.8.0)
|
||||
rspec-core (3.8.0)
|
||||
rspec-support (~> 3.8.0)
|
||||
rspec-expectations (3.8.1)
|
||||
redis-client (0.18.0)
|
||||
connection_pool
|
||||
rspec (3.12.0)
|
||||
rspec-core (~> 3.12.0)
|
||||
rspec-expectations (~> 3.12.0)
|
||||
rspec-mocks (~> 3.12.0)
|
||||
rspec-core (3.12.2)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.8.0)
|
||||
rspec-mocks (3.8.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.6)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.8.0)
|
||||
rspec-support (3.8.0)
|
||||
sidekiq (6.5.8)
|
||||
connection_pool (>= 2.2.5, < 3)
|
||||
rack (~> 2.0)
|
||||
redis (>= 4.5.0, < 5)
|
||||
simplecov (0.16.1)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-support (3.12.1)
|
||||
sidekiq (7.2.0)
|
||||
concurrent-ruby (< 2)
|
||||
connection_pool (>= 2.3.0)
|
||||
rack (>= 2.2.4)
|
||||
redis-client (>= 0.14.0)
|
||||
simplecov (0.22.0)
|
||||
docile (~> 1.1)
|
||||
json (>= 1.8, < 3)
|
||||
simplecov-html (~> 0.10.0)
|
||||
simplecov-html (0.10.2)
|
||||
simplecov-html (~> 0.11)
|
||||
simplecov_json_formatter (~> 0.1)
|
||||
simplecov-html (0.12.3)
|
||||
simplecov_json_formatter (0.1.4)
|
||||
stub_env (1.0.4)
|
||||
rspec (>= 2.0, < 4.0)
|
||||
|
||||
|
|
@ -50,6 +55,7 @@ PLATFORMS
|
|||
DEPENDENCIES
|
||||
gitlab-sidekiq-fetcher!
|
||||
pry
|
||||
redis (~> 4.8)
|
||||
rspec (~> 3)
|
||||
simplecov
|
||||
stub_env (~> 1.0)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ fetches from Redis.
|
|||
|
||||
It's based on https://github.com/TEA-ebook/sidekiq-reliable-fetch.
|
||||
|
||||
**IMPORTANT NOTE:** Since version `0.7.0` this gem works only with `sidekiq >= 6.1` (which introduced Fetch API breaking changes). Please use version `~> 0.5` if you use older version of the `sidekiq` .
|
||||
**IMPORTANT NOTE:** Since version `0.11.0` this gem works only with `sidekiq >= 7` (which introduced Fetch API breaking changes). Please use version `~> 0.10` if you use older version of the `sidekiq` .
|
||||
|
||||
**UPGRADE NOTE:** If upgrading from 0.7.0, strongly consider a full deployed step on 0.7.1 before 0.8.0; that fixes a bug in the queue name validation that will hit if sidekiq nodes running 0.7.0 see working queues named by 0.8.0. See https://gitlab.com/gitlab-org/sidekiq-reliable-fetch/-/merge_requests/22
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
Gem::Specification.new do |s|
|
||||
s.name = 'gitlab-sidekiq-fetcher'
|
||||
s.version = '0.10.0'
|
||||
s.version = '0.11.0'
|
||||
s.authors = ['TEA', 'GitLab']
|
||||
s.email = 'valery@gitlab.com'
|
||||
s.license = 'LGPL-3.0'
|
||||
|
|
@ -10,6 +10,6 @@ Gem::Specification.new do |s|
|
|||
s.require_paths = ['lib']
|
||||
s.files = Dir.glob('lib/**/*.*')
|
||||
s.test_files = Dir.glob('{spec,tests}/**/*.*')
|
||||
s.add_dependency 'sidekiq', '~> 6.1'
|
||||
s.add_dependency 'sidekiq', '~> 7.0'
|
||||
s.add_runtime_dependency 'json', '>= 2.5'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ module Sidekiq
|
|||
Sidekiq::ReliableFetch
|
||||
end
|
||||
|
||||
config[:fetch] = fetch_strategy.new(config)
|
||||
config[:fetch_class] = fetch_strategy
|
||||
|
||||
Sidekiq.logger.info('GitLab reliable fetch activated!')
|
||||
|
||||
|
|
@ -115,18 +115,18 @@ module Sidekiq
|
|||
|
||||
attr_reader :cleanup_interval, :last_try_to_take_lease_at, :lease_interval,
|
||||
:queues, :use_semi_reliable_fetch,
|
||||
:strictly_ordered_queues
|
||||
:strictly_ordered_queues, :config
|
||||
|
||||
def initialize(options)
|
||||
raise ArgumentError, 'missing queue list' unless options[:queues]
|
||||
def initialize(capsule)
|
||||
raise ArgumentError, 'missing queue list' unless capsule.config.queues
|
||||
|
||||
@config = options
|
||||
@config = capsule.config
|
||||
@interrupted_set = Sidekiq::InterruptedSet.new
|
||||
@cleanup_interval = options.fetch(:cleanup_interval, DEFAULT_CLEANUP_INTERVAL)
|
||||
@lease_interval = options.fetch(:lease_interval, DEFAULT_LEASE_INTERVAL)
|
||||
@cleanup_interval = config.fetch(:cleanup_interval, DEFAULT_CLEANUP_INTERVAL)
|
||||
@lease_interval = config.fetch(:lease_interval, DEFAULT_LEASE_INTERVAL)
|
||||
@last_try_to_take_lease_at = 0
|
||||
@strictly_ordered_queues = !!options[:strict]
|
||||
@queues = options[:queues].map { |q| "queue:#{q}" }
|
||||
@strictly_ordered_queues = !!config[:strict]
|
||||
@queues = config.queues.map { |q| "queue:#{q}" }
|
||||
end
|
||||
|
||||
def retrieve_work
|
||||
|
|
@ -140,7 +140,7 @@ module Sidekiq
|
|||
"#{self.class} does not implement #{__method__}"
|
||||
end
|
||||
|
||||
def bulk_requeue(inprogress, _options)
|
||||
def bulk_requeue(inprogress)
|
||||
return if inprogress.empty?
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
|
|
@ -202,7 +202,7 @@ module Sidekiq
|
|||
Sidekiq.logger.info('Cleaning working queues')
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.scan_each(match: "#{WORKING_QUEUE_PREFIX}:queue:*", count: SCAN_COUNT) do |key|
|
||||
conn.scan(match: "#{WORKING_QUEUE_PREFIX}:queue:*", count: SCAN_COUNT) do |key|
|
||||
original_queue, identity = extract_queue_and_identity(key)
|
||||
|
||||
next if original_queue.nil? || identity.nil?
|
||||
|
|
@ -234,7 +234,7 @@ module Sidekiq
|
|||
rescue NameError
|
||||
end
|
||||
|
||||
max_retries_after_interruption ||= @config[:max_retries_after_interruption]
|
||||
max_retries_after_interruption ||= config[:max_retries_after_interruption]
|
||||
max_retries_after_interruption ||= DEFAULT_MAX_RETRIES_AFTER_INTERRUPTION
|
||||
max_retries_after_interruption
|
||||
end
|
||||
|
|
@ -263,7 +263,7 @@ module Sidekiq
|
|||
@last_try_to_take_lease_at = Time.now.to_f
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.set(LEASE_KEY, 1, nx: true, ex: cleanup_interval)
|
||||
conn.set(LEASE_KEY, 1, 'nx', 'ex', cleanup_interval)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ module Sidekiq
|
|||
end
|
||||
|
||||
def self.options
|
||||
Sidekiq.respond_to?(:[]) ? Sidekiq : Sidekiq.options
|
||||
Sidekiq.default_configuration
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ module Sidekiq
|
|||
# for semi-reliable fetch.
|
||||
DEFAULT_SEMI_RELIABLE_FETCH_TIMEOUT = 2 # seconds
|
||||
|
||||
def initialize(options)
|
||||
def initialize(capsule)
|
||||
super
|
||||
|
||||
@queues = @queues.uniq
|
||||
|
|
@ -16,7 +16,7 @@ module Sidekiq
|
|||
private
|
||||
|
||||
def retrieve_unit_of_work
|
||||
work = Sidekiq.redis { |conn| conn.brpop(*queues_cmd, timeout: semi_reliable_fetch_timeout) }
|
||||
work = brpop_with_sidekiq
|
||||
return unless work
|
||||
|
||||
queue, job = work
|
||||
|
|
@ -29,6 +29,17 @@ module Sidekiq
|
|||
unit_of_work
|
||||
end
|
||||
|
||||
def brpop_with_sidekiq
|
||||
Sidekiq.redis do |conn|
|
||||
conn.blocking_call(
|
||||
conn.read_timeout + semi_reliable_fetch_timeout,
|
||||
"brpop",
|
||||
*queues_cmd,
|
||||
semi_reliable_fetch_timeout
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def queues_cmd
|
||||
if strictly_ordered_queues
|
||||
@queues
|
||||
|
|
|
|||
|
|
@ -3,14 +3,20 @@ require 'fetch_shared_examples'
|
|||
require 'sidekiq/base_reliable_fetch'
|
||||
require 'sidekiq/reliable_fetch'
|
||||
require 'sidekiq/semi_reliable_fetch'
|
||||
require 'sidekiq/capsule'
|
||||
|
||||
describe Sidekiq::BaseReliableFetch do
|
||||
let(:job) { Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo']) }
|
||||
let(:queues) { ['foo'] }
|
||||
let(:options) { { queues: queues } }
|
||||
let(:config) { Sidekiq::Config.new(options) }
|
||||
let(:capsule) { Sidekiq::Capsule.new("default", config) }
|
||||
let(:fetcher) { Sidekiq::ReliableFetch.new(capsule) }
|
||||
|
||||
before { Sidekiq.redis(&:flushdb) }
|
||||
|
||||
describe 'UnitOfWork' do
|
||||
let(:fetcher) { Sidekiq::ReliableFetch.new(queues: ['foo']) }
|
||||
before { config.queues = queues }
|
||||
|
||||
describe '#requeue' do
|
||||
it 'requeues job' do
|
||||
|
|
@ -40,14 +46,16 @@ describe Sidekiq::BaseReliableFetch do
|
|||
end
|
||||
|
||||
describe '#bulk_requeue' do
|
||||
let(:options) { { queues: %w[foo bar] } }
|
||||
let(:queues) { %w[foo bar] }
|
||||
let!(:queue1) { Sidekiq::Queue.new('foo') }
|
||||
let!(:queue2) { Sidekiq::Queue.new('bar') }
|
||||
|
||||
before { config.queues = queues }
|
||||
|
||||
it 'requeues the bulk' do
|
||||
uow = described_class::UnitOfWork
|
||||
jobs = [ uow.new('queue:foo', job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
|
||||
described_class.new(options).bulk_requeue(jobs, nil)
|
||||
described_class.new(capsule).bulk_requeue(jobs)
|
||||
|
||||
expect(queue1.size).to eq 2
|
||||
expect(queue2.size).to eq 1
|
||||
|
|
@ -57,24 +65,26 @@ describe Sidekiq::BaseReliableFetch do
|
|||
uow = described_class::UnitOfWork
|
||||
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
|
||||
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
|
||||
described_class.new(options).bulk_requeue(jobs, nil)
|
||||
described_class.new(capsule).bulk_requeue(jobs)
|
||||
|
||||
expect(queue1.size).to eq 1
|
||||
expect(queue2.size).to eq 1
|
||||
expect(Sidekiq::InterruptedSet.new.size).to eq 1
|
||||
end
|
||||
|
||||
it 'does not put jobs into interrupted queue if it is disabled' do
|
||||
options[:max_retries_after_interruption] = -1
|
||||
context 'when max_retries_after_interruption is disabled' do
|
||||
let(:options) { { queues: queues, max_retries_after_interruption: -1 } }
|
||||
|
||||
uow = described_class::UnitOfWork
|
||||
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
|
||||
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
|
||||
described_class.new(options).bulk_requeue(jobs, nil)
|
||||
it 'does not put jobs into interrupted queue' do
|
||||
uow = described_class::UnitOfWork
|
||||
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
|
||||
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
|
||||
described_class.new(capsule).bulk_requeue(jobs)
|
||||
|
||||
expect(queue1.size).to eq 2
|
||||
expect(queue2.size).to eq 1
|
||||
expect(Sidekiq::InterruptedSet.new.size).to eq 0
|
||||
expect(queue1.size).to eq 2
|
||||
expect(queue2.size).to eq 1
|
||||
expect(Sidekiq::InterruptedSet.new.size).to eq 0
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not put jobs into interrupted queue if it is disabled on the worker' do
|
||||
|
|
@ -83,7 +93,7 @@ describe Sidekiq::BaseReliableFetch do
|
|||
uow = described_class::UnitOfWork
|
||||
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
|
||||
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
|
||||
described_class.new(options).bulk_requeue(jobs, nil)
|
||||
described_class.new(capsule).bulk_requeue(jobs)
|
||||
|
||||
expect(queue1.size).to eq 2
|
||||
expect(queue2.size).to eq 1
|
||||
|
|
|
|||
|
|
@ -1,54 +1,70 @@
|
|||
shared_examples 'a Sidekiq fetcher' do
|
||||
let(:queues) { ['assigned'] }
|
||||
let(:options) { { queues: queues } }
|
||||
let(:config) { Sidekiq::Config.new(options) }
|
||||
let(:capsule) { Sidekiq::Capsule.new("default", config) }
|
||||
|
||||
before { Sidekiq.redis(&:flushdb) }
|
||||
before do
|
||||
config.queues = queues
|
||||
Sidekiq.redis(&:flushdb)
|
||||
end
|
||||
|
||||
describe '#retrieve_work' do
|
||||
let(:job) { Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo']) }
|
||||
let(:fetcher) { described_class.new(queues: queues) }
|
||||
let(:fetcher) { described_class.new(capsule) }
|
||||
|
||||
it 'does not clean up orphaned jobs more than once per cleanup interval' do
|
||||
Sidekiq.redis = Sidekiq::RedisConnection.create(url: REDIS_URL, size: 10)
|
||||
Sidekiq::Client.via(Sidekiq::RedisConnection.create(url: REDIS_URL, size: 10)) do
|
||||
expect(fetcher).to receive(:clean_working_queues!).once
|
||||
|
||||
expect(fetcher).to receive(:clean_working_queues!).once
|
||||
|
||||
threads = 10.times.map do
|
||||
Thread.new do
|
||||
fetcher.retrieve_work
|
||||
threads = 10.times.map do
|
||||
Thread.new do
|
||||
fetcher.retrieve_work
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
threads.map(&:join)
|
||||
threads.map(&:join)
|
||||
end
|
||||
end
|
||||
|
||||
it 'retrieves by order when strictly order is enabled' do
|
||||
fetcher = described_class.new(strict: true, queues: ['first', 'second'])
|
||||
context 'when strictly order is enabled' do
|
||||
let(:queues) { ['first', 'second'] }
|
||||
let(:options) { { strict: true, queues: queues } }
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.rpush('queue:first', ['msg3', 'msg2', 'msg1'])
|
||||
conn.rpush('queue:second', 'msg4')
|
||||
it 'retrieves by order' do
|
||||
fetcher = described_class.new(capsule)
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.rpush('queue:first', ['msg3', 'msg2', 'msg1'])
|
||||
conn.rpush('queue:second', 'msg4')
|
||||
end
|
||||
|
||||
jobs = (1..4).map { fetcher.retrieve_work.job }
|
||||
|
||||
expect(jobs).to eq ['msg1', 'msg2', 'msg3', 'msg4']
|
||||
end
|
||||
|
||||
jobs = (1..4).map { fetcher.retrieve_work.job }
|
||||
|
||||
expect(jobs).to eq ['msg1', 'msg2', 'msg3', 'msg4']
|
||||
end
|
||||
|
||||
it 'does not starve any queue when queues are not strictly ordered' do
|
||||
fetcher = described_class.new(queues: ['first', 'second'])
|
||||
context 'when queues are not strictly ordered' do
|
||||
let(:queues) { ['first', 'second'] }
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.rpush('queue:first', (1..200).map { |i| "msg#{i}" })
|
||||
conn.rpush('queue:second', 'this_job_should_not_stuck')
|
||||
it 'does not starve any queue' do
|
||||
fetcher = described_class.new(capsule)
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
conn.rpush('queue:first', (1..200).map { |i| "msg#{i}" })
|
||||
conn.rpush('queue:second', 'this_job_should_not_stuck')
|
||||
end
|
||||
|
||||
jobs = (1..100).map { fetcher.retrieve_work.job }
|
||||
|
||||
expect(jobs).to include 'this_job_should_not_stuck'
|
||||
end
|
||||
|
||||
jobs = (1..100).map { fetcher.retrieve_work.job }
|
||||
|
||||
expect(jobs).to include 'this_job_should_not_stuck'
|
||||
end
|
||||
|
||||
shared_examples "basic queue handling" do |queue|
|
||||
let (:fetcher) { described_class.new(queues: [queue]) }
|
||||
let(:queues) { [queue] }
|
||||
let(:fetcher) { described_class.new(capsule) }
|
||||
|
||||
it 'retrieves the job and puts it to working queue' do
|
||||
Sidekiq.redis { |conn| conn.rpush("queue:#{queue}", job) }
|
||||
|
|
@ -150,7 +166,8 @@ shared_examples 'a Sidekiq fetcher' do
|
|||
|
||||
context 'with short cleanup interval' do
|
||||
let(:short_interval) { 1 }
|
||||
let(:fetcher) { described_class.new(queues: queues, lease_interval: short_interval, cleanup_interval: short_interval) }
|
||||
let(:options) { { queues: queues, lease_interval: short_interval, cleanup_interval: short_interval } }
|
||||
let(:fetcher) { described_class.new(capsule) }
|
||||
|
||||
it 'requeues when there is no heartbeat' do
|
||||
Sidekiq.redis { |conn| conn.rpush('queue:assigned', job) }
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ require 'spec_helper'
|
|||
require 'fetch_shared_examples'
|
||||
require 'sidekiq/base_reliable_fetch'
|
||||
require 'sidekiq/reliable_fetch'
|
||||
require 'sidekiq/capsule'
|
||||
|
||||
describe Sidekiq::ReliableFetch do
|
||||
include_examples 'a Sidekiq fetcher'
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@ require 'spec_helper'
|
|||
require 'fetch_shared_examples'
|
||||
require 'sidekiq/base_reliable_fetch'
|
||||
require 'sidekiq/semi_reliable_fetch'
|
||||
require 'sidekiq/capsule'
|
||||
require 'sidekiq/config'
|
||||
require 'redis'
|
||||
|
||||
describe Sidekiq::SemiReliableFetch do
|
||||
include_examples 'a Sidekiq fetcher'
|
||||
|
|
@ -9,7 +12,11 @@ describe Sidekiq::SemiReliableFetch do
|
|||
describe '#retrieve_work' do
|
||||
let(:queues) { ['stuff_to_do'] }
|
||||
let(:options) { { queues: queues } }
|
||||
let(:fetcher) { described_class.new(options) }
|
||||
let(:config) { Sidekiq::Config.new(options) }
|
||||
let(:capsule) { Sidekiq::Capsule.new("default", config) }
|
||||
let(:fetcher) { described_class.new(capsule) }
|
||||
|
||||
before { config.queues = queues }
|
||||
|
||||
context 'timeout config' do
|
||||
before do
|
||||
|
|
@ -20,8 +27,9 @@ describe Sidekiq::SemiReliableFetch do
|
|||
let(:timeout) { nil }
|
||||
|
||||
it 'brpops with the default timeout timeout' do
|
||||
Sidekiq.redis do |connection|
|
||||
expect(connection).to receive(:brpop).with("queue:stuff_to_do", { timeout: 2 }).once.and_call_original
|
||||
Sidekiq.redis do |conn|
|
||||
expect(conn).to receive(:blocking_call)
|
||||
.with(conn.read_timeout + 2, 'brpop', 'queue:stuff_to_do', 2).once.and_call_original
|
||||
|
||||
fetcher.retrieve_work
|
||||
end
|
||||
|
|
@ -32,8 +40,9 @@ describe Sidekiq::SemiReliableFetch do
|
|||
let(:timeout) { '5' }
|
||||
|
||||
it 'brpops with the default timeout timeout' do
|
||||
Sidekiq.redis do |connection|
|
||||
expect(connection).to receive(:brpop).with("queue:stuff_to_do", { timeout: 5 }).once.and_call_original
|
||||
Sidekiq.redis do |conn|
|
||||
expect(conn).to receive(:blocking_call)
|
||||
.with(conn.read_timeout + 5, 'brpop', 'queue:stuff_to_do', 5).once.and_call_original
|
||||
|
||||
fetcher.retrieve_work
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ SimpleCov.start
|
|||
REDIS_URL = ENV['REDIS_URL'] || 'redis://localhost:6379/10'
|
||||
|
||||
Sidekiq.configure_client do |config|
|
||||
config.redis = { url: REDIS_URL }
|
||||
config.redis = { url: REDIS_URL, read_timeout: 5 }
|
||||
end
|
||||
|
||||
Sidekiq.logger.level = Logger::ERROR
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ Sidekiq.configure_server do |config|
|
|||
# These will be ignored for :basic
|
||||
config[:cleanup_interval] = TEST_CLEANUP_INTERVAL
|
||||
config[:lease_interval] = TEST_LEASE_INTERVAL
|
||||
config[:queues] = ['default']
|
||||
|
||||
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ Sidekiq.configure_server do |config|
|
|||
# These will be ignored for :basic
|
||||
config[:cleanup_interval] = TEST_CLEANUP_INTERVAL
|
||||
config[:lease_interval] = TEST_LEASE_INTERVAL
|
||||
config[:queues] = ['default']
|
||||
|
||||
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue