Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
		
							parent
							
								
									b420826c09
								
							
						
					
					
						commit
						7263a0140a
					
				|  | @ -288,6 +288,7 @@ export default { | |||
|           avatar_url: gon.current_user_avatar_url, | ||||
|         }); | ||||
|       } | ||||
| 
 | ||||
|       const tokens = [ | ||||
|         { | ||||
|           type: TOKEN_TYPE_AUTHOR, | ||||
|  | @ -297,7 +298,6 @@ export default { | |||
|           dataType: 'user', | ||||
|           unique: true, | ||||
|           defaultAuthors: [], | ||||
|           operators: OPERATOR_IS_ONLY, | ||||
|           fetchAuthors: this.fetchUsers, | ||||
|           preloadedAuthors, | ||||
|         }, | ||||
|  | @ -333,7 +333,6 @@ export default { | |||
|           title: TOKEN_TITLE_TYPE, | ||||
|           icon: 'issues', | ||||
|           token: GlFilteredSearchToken, | ||||
|           operators: OPERATOR_IS_ONLY, | ||||
|           options: [ | ||||
|             { icon: 'issue-type-issue', title: 'issue', value: 'issue' }, | ||||
|             { icon: 'issue-type-incident', title: 'incident', value: 'incident' }, | ||||
|  | @ -349,7 +348,6 @@ export default { | |||
|           icon: 'thumb-up', | ||||
|           token: EmojiToken, | ||||
|           unique: true, | ||||
|           operators: OPERATOR_IS_ONLY, | ||||
|           fetchEmojis: this.fetchEmojis, | ||||
|         }); | ||||
| 
 | ||||
|  |  | |||
|  | @ -271,6 +271,7 @@ export const filters = { | |||
|       [OPERATOR_IS]: { | ||||
|         [NORMAL_FILTER]: 'label_name[]', | ||||
|         [SPECIAL_FILTER]: 'label_name[]', | ||||
|         [ALTERNATIVE_FILTER]: 'label_name', | ||||
|       }, | ||||
|       [OPERATOR_IS_NOT]: { | ||||
|         [NORMAL_FILTER]: 'not[label_name][]', | ||||
|  | @ -280,12 +281,13 @@ export const filters = { | |||
|   [TOKEN_TYPE_TYPE]: { | ||||
|     [API_PARAM]: { | ||||
|       [NORMAL_FILTER]: 'types', | ||||
|       [SPECIAL_FILTER]: 'types', | ||||
|     }, | ||||
|     [URL_PARAM]: { | ||||
|       [OPERATOR_IS]: { | ||||
|         [NORMAL_FILTER]: 'type[]', | ||||
|         [SPECIAL_FILTER]: 'type[]', | ||||
|       }, | ||||
|       [OPERATOR_IS_NOT]: { | ||||
|         [NORMAL_FILTER]: 'not[type][]', | ||||
|       }, | ||||
|     }, | ||||
|   }, | ||||
|  | @ -299,6 +301,9 @@ export const filters = { | |||
|         [NORMAL_FILTER]: 'my_reaction_emoji', | ||||
|         [SPECIAL_FILTER]: 'my_reaction_emoji', | ||||
|       }, | ||||
|       [OPERATOR_IS_NOT]: { | ||||
|         [NORMAL_FILTER]: 'not[my_reaction_emoji]', | ||||
|       }, | ||||
|     }, | ||||
|   }, | ||||
|   [TOKEN_TYPE_CONFIDENTIAL]: { | ||||
|  |  | |||
|  | @ -11,9 +11,11 @@ query getIssues( | |||
|   $assigneeId: String | ||||
|   $assigneeUsernames: [String!] | ||||
|   $authorUsername: String | ||||
|   $confidential: Boolean | ||||
|   $labelName: [String] | ||||
|   $milestoneTitle: [String] | ||||
|   $milestoneWildcardId: MilestoneWildcardId | ||||
|   $myReactionEmoji: String | ||||
|   $types: [IssueType!] | ||||
|   $not: NegatedIssueFilterInput | ||||
|   $beforeCursor: String | ||||
|  | @ -30,9 +32,11 @@ query getIssues( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|       before: $beforeCursor | ||||
|  | @ -57,9 +61,11 @@ query getIssues( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|       before: $beforeCursor | ||||
|  |  | |||
|  | @ -5,9 +5,11 @@ query getIssuesCount( | |||
|   $assigneeId: String | ||||
|   $assigneeUsernames: [String!] | ||||
|   $authorUsername: String | ||||
|   $confidential: Boolean | ||||
|   $labelName: [String] | ||||
|   $milestoneTitle: [String] | ||||
|   $milestoneWildcardId: MilestoneWildcardId | ||||
|   $myReactionEmoji: String | ||||
|   $types: [IssueType!] | ||||
|   $not: NegatedIssueFilterInput | ||||
| ) { | ||||
|  | @ -19,9 +21,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  | @ -34,9 +38,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  | @ -49,9 +55,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  | @ -65,9 +73,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  | @ -79,9 +89,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  | @ -93,9 +105,11 @@ query getIssuesCount( | |||
|       assigneeId: $assigneeId | ||||
|       assigneeUsernames: $assigneeUsernames | ||||
|       authorUsername: $authorUsername | ||||
|       confidential: $confidential | ||||
|       labelName: $labelName | ||||
|       milestoneTitle: $milestoneTitle | ||||
|       milestoneWildcardId: $milestoneWildcardId | ||||
|       myReactionEmoji: $myReactionEmoji | ||||
|       types: $types | ||||
|       not: $not | ||||
|     ) { | ||||
|  |  | |||
|  | @ -22,6 +22,7 @@ import { | |||
|   SPECIAL_FILTER, | ||||
|   SPECIAL_FILTER_VALUES, | ||||
|   TOKEN_TYPE_ASSIGNEE, | ||||
|   TOKEN_TYPE_CONFIDENTIAL, | ||||
|   TOKEN_TYPE_ITERATION, | ||||
|   TOKEN_TYPE_MILESTONE, | ||||
|   TOKEN_TYPE_TYPE, | ||||
|  | @ -200,10 +201,15 @@ const isWildcardValue = (tokenType, value) => | |||
| const requiresUpperCaseValue = (tokenType, value) => | ||||
|   tokenType === TOKEN_TYPE_TYPE || isWildcardValue(tokenType, value); | ||||
| 
 | ||||
| const formatData = (token) => | ||||
|   requiresUpperCaseValue(token.type, token.value.data) | ||||
|     ? token.value.data.toUpperCase() | ||||
|     : token.value.data; | ||||
| const formatData = (token) => { | ||||
|   if (requiresUpperCaseValue(token.type, token.value.data)) { | ||||
|     return token.value.data.toUpperCase(); | ||||
|   } | ||||
|   if (token.type === TOKEN_TYPE_CONFIDENTIAL) { | ||||
|     return token.value.data === 'yes'; | ||||
|   } | ||||
|   return token.value.data; | ||||
| }; | ||||
| 
 | ||||
| export const convertToApiParams = (filterTokens) => { | ||||
|   const params = {}; | ||||
|  |  | |||
|  | @ -0,0 +1,8 @@ | |||
| --- | ||||
| name: atomic_sidekiq_scheduler | ||||
| introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72380 | ||||
| rollout_issue_url: | ||||
| milestone: '14.5' | ||||
| type: development | ||||
| group: group::project management | ||||
| default_enabled: false | ||||
|  | @ -1,8 +1,7 @@ | |||
| --- | ||||
| stage: none | ||||
| group: unassigned | ||||
| stage: Monitor | ||||
| group: Monitor | ||||
| info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments | ||||
| type: concepts, howto | ||||
| --- | ||||
| 
 | ||||
| # Health Check **(FREE SELF)** | ||||
|  |  | |||
|  | @ -84,9 +84,12 @@ module Gitlab | |||
|         if puma? && Puma.respond_to?(:cli_config) | ||||
|           threads += Puma.cli_config.options[:max_threads] | ||||
|         elsif sidekiq? | ||||
|           # An extra thread for the poller in Sidekiq Cron: | ||||
|           # 2 extra threads for the pollers in Sidekiq and Sidekiq Cron: | ||||
|           # https://github.com/ondrejbartas/sidekiq-cron#under-the-hood | ||||
|           threads += Sidekiq.options[:concurrency] + 1 | ||||
|           # | ||||
|           # These threads execute Sidekiq client middleware when jobs | ||||
|           # are enqueued and those can access DB / Redis. | ||||
|           threads += Sidekiq.options[:concurrency] + 2 | ||||
|         end | ||||
| 
 | ||||
|         if action_cable? | ||||
|  |  | |||
|  | @ -1,16 +1,37 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| # This is a copy of https://github.com/mperham/sidekiq/blob/32c55e31659a1e6bd42f98334cca5eef2863de8d/lib/sidekiq/scheduled.rb#L11-L34 | ||||
| # | ||||
| # It effectively reverts | ||||
| # https://github.com/mperham/sidekiq/commit/9b75467b33759888753191413eddbc15c37a219e | ||||
| # because we observe that the extra ZREMs caused by this change can lead to high | ||||
| # CPU usage on Redis at peak times: | ||||
| # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1179 | ||||
| # | ||||
| module Gitlab | ||||
|   class SidekiqEnq | ||||
|     LUA_ZPOPBYSCORE = <<~EOS | ||||
|       local key, now = KEYS[1], ARGV[1] | ||||
|       local jobs = redis.call("zrangebyscore", key, "-inf", now, "limit", 0, 1) | ||||
|       if jobs[1] then | ||||
|         redis.call("zrem", key, jobs[1]) | ||||
|         return jobs[1] | ||||
|       end | ||||
|     EOS | ||||
| 
 | ||||
|     LUA_ZPOPBYSCORE_SHA = Digest::SHA1.hexdigest(LUA_ZPOPBYSCORE) | ||||
| 
 | ||||
|     def enqueue_jobs(now = Time.now.to_f.to_s, sorted_sets = Sidekiq::Scheduled::SETS) | ||||
|       if Feature.enabled?(:atomic_sidekiq_scheduler, default_enabled: :yaml) | ||||
|         atomic_find_jobs_and_enqueue(now, sorted_sets) | ||||
|       else | ||||
|         find_jobs_and_enqueue(now, sorted_sets) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     private | ||||
| 
 | ||||
|     # This is a copy of https://github.com/mperham/sidekiq/blob/32c55e31659a1e6bd42f98334cca5eef2863de8d/lib/sidekiq/scheduled.rb#L11-L34 | ||||
|     # | ||||
|     # It effectively reverts | ||||
|     # https://github.com/mperham/sidekiq/commit/9b75467b33759888753191413eddbc15c37a219e | ||||
|     # because we observe that the extra ZREMs caused by this change can lead to high | ||||
|     # CPU usage on Redis at peak times: | ||||
|     # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1179 | ||||
|     # | ||||
|     def find_jobs_and_enqueue(now, sorted_sets) | ||||
|       # A job's "score" in Redis is the time at which it should be processed. | ||||
|       # Just check Redis for the set of jobs with a timestamp before now. | ||||
|       Sidekiq.redis do |conn| | ||||
|  | @ -24,8 +45,7 @@ module Gitlab | |||
|           # We need to go through the list one at a time to reduce the risk of something | ||||
|           # going wrong between the time jobs are popped from the scheduled queue and when | ||||
|           # they are pushed onto a work queue and losing the jobs. | ||||
|           while (job = conn.zrangebyscore(sorted_set, "-inf", now, limit: [0, 1]).first) | ||||
| 
 | ||||
|           while job = conn.zrangebyscore(sorted_set, "-inf", now, limit: [0, 1]).first | ||||
|             # Pop item off the queue and add it to the work queue. If the job can't be popped from | ||||
|             # the queue, it's because another process already popped it so we can move on to the | ||||
|             # next one. | ||||
|  | @ -47,5 +67,38 @@ module Gitlab | |||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def atomic_find_jobs_and_enqueue(now, sorted_sets) | ||||
|       Sidekiq.redis do |conn| | ||||
|         sorted_sets.each do |sorted_set| | ||||
|           start_time = ::Gitlab::Metrics::System.monotonic_time | ||||
|           jobs = 0 | ||||
| 
 | ||||
|           Sidekiq.logger.info(message: 'Atomically enqueuing scheduled jobs', status: 'start', sorted_set: sorted_set) | ||||
| 
 | ||||
|           while job = redis_eval_lua(conn, LUA_ZPOPBYSCORE, LUA_ZPOPBYSCORE_SHA, keys: [sorted_set], argv: [now]) | ||||
|             jobs += 1 | ||||
|             Sidekiq::Client.push(Sidekiq.load_json(job)) | ||||
|           end | ||||
| 
 | ||||
|           end_time = ::Gitlab::Metrics::System.monotonic_time | ||||
|           Sidekiq.logger.info(message: 'Atomically enqueuing scheduled jobs', | ||||
|                               status: 'done', | ||||
|                               sorted_set: sorted_set, | ||||
|                               jobs_count: jobs, | ||||
|                               duration_s: end_time - start_time) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def redis_eval_lua(conn, script, sha, keys: nil, argv: nil) | ||||
|       conn.evalsha(sha, keys: keys, argv: argv) | ||||
|     rescue ::Redis::CommandError => e | ||||
|       if e.message.start_with?('NOSCRIPT') | ||||
|         conn.eval(script, keys: keys, argv: argv) | ||||
|       else | ||||
|         raise | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -101,8 +101,13 @@ export const locationSearch = [ | |||
|   'label_name[]=tv', | ||||
|   'not[label_name][]=live action', | ||||
|   'not[label_name][]=drama', | ||||
|   'type[]=issue', | ||||
|   'type[]=feature', | ||||
|   'not[type][]=bug', | ||||
|   'not[type][]=incident', | ||||
|   'my_reaction_emoji=thumbsup', | ||||
|   'confidential=no', | ||||
|   'not[my_reaction_emoji]=thumbsdown', | ||||
|   'confidential=yes', | ||||
|   'iteration_id=4', | ||||
|   'not[iteration_id]=20', | ||||
|   'epic_id=12', | ||||
|  | @ -114,10 +119,9 @@ export const locationSearch = [ | |||
| export const locationSearchWithSpecialValues = [ | ||||
|   'assignee_id=123', | ||||
|   'assignee_username=bart', | ||||
|   'type[]=issue', | ||||
|   'type[]=incident', | ||||
|   'my_reaction_emoji=None', | ||||
|   'iteration_id=Current', | ||||
|   'label_name[]=None', | ||||
|   'milestone_title=Upcoming', | ||||
|   'epic_id=None', | ||||
|   'weight=None', | ||||
|  | @ -136,8 +140,13 @@ export const filteredTokens = [ | |||
|   { type: 'labels', value: { data: 'tv', operator: OPERATOR_IS } }, | ||||
|   { type: 'labels', value: { data: 'live action', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'labels', value: { data: 'drama', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'type', value: { data: 'issue', operator: OPERATOR_IS } }, | ||||
|   { type: 'type', value: { data: 'feature', operator: OPERATOR_IS } }, | ||||
|   { type: 'type', value: { data: 'bug', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'type', value: { data: 'incident', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'my_reaction_emoji', value: { data: 'thumbsup', operator: OPERATOR_IS } }, | ||||
|   { type: 'confidential', value: { data: 'no', operator: OPERATOR_IS } }, | ||||
|   { type: 'my_reaction_emoji', value: { data: 'thumbsdown', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'confidential', value: { data: 'yes', operator: OPERATOR_IS } }, | ||||
|   { type: 'iteration', value: { data: '4', operator: OPERATOR_IS } }, | ||||
|   { type: 'iteration', value: { data: '20', operator: OPERATOR_IS_NOT } }, | ||||
|   { type: 'epic_id', value: { data: '12', operator: OPERATOR_IS } }, | ||||
|  | @ -151,10 +160,9 @@ export const filteredTokens = [ | |||
| export const filteredTokensWithSpecialValues = [ | ||||
|   { type: 'assignee_username', value: { data: '123', operator: OPERATOR_IS } }, | ||||
|   { type: 'assignee_username', value: { data: 'bart', operator: OPERATOR_IS } }, | ||||
|   { type: 'type', value: { data: 'issue', operator: OPERATOR_IS } }, | ||||
|   { type: 'type', value: { data: 'incident', operator: OPERATOR_IS } }, | ||||
|   { type: 'my_reaction_emoji', value: { data: 'None', operator: OPERATOR_IS } }, | ||||
|   { type: 'iteration', value: { data: 'Current', operator: OPERATOR_IS } }, | ||||
|   { type: 'labels', value: { data: 'None', operator: OPERATOR_IS } }, | ||||
|   { type: 'milestone', value: { data: 'Upcoming', operator: OPERATOR_IS } }, | ||||
|   { type: 'epic_id', value: { data: 'None', operator: OPERATOR_IS } }, | ||||
|   { type: 'weight', value: { data: 'None', operator: OPERATOR_IS } }, | ||||
|  | @ -165,8 +173,9 @@ export const apiParams = { | |||
|   assigneeUsernames: ['bart', 'lisa'], | ||||
|   milestoneTitle: 'season 4', | ||||
|   labelName: ['cartoon', 'tv'], | ||||
|   types: ['ISSUE', 'FEATURE'], | ||||
|   myReactionEmoji: 'thumbsup', | ||||
|   confidential: 'no', | ||||
|   confidential: true, | ||||
|   iterationId: '4', | ||||
|   epicId: '12', | ||||
|   weight: '1', | ||||
|  | @ -175,6 +184,8 @@ export const apiParams = { | |||
|     assigneeUsernames: ['patty', 'selma'], | ||||
|     milestoneTitle: 'season 20', | ||||
|     labelName: ['live action', 'drama'], | ||||
|     types: ['BUG', 'INCIDENT'], | ||||
|     myReactionEmoji: 'thumbsdown', | ||||
|     iterationId: '20', | ||||
|     epicId: '34', | ||||
|     weight: '3', | ||||
|  | @ -184,7 +195,7 @@ export const apiParams = { | |||
| export const apiParamsWithSpecialValues = { | ||||
|   assigneeId: '123', | ||||
|   assigneeUsernames: 'bart', | ||||
|   types: ['ISSUE', 'INCIDENT'], | ||||
|   labelName: 'None', | ||||
|   myReactionEmoji: 'None', | ||||
|   iterationWildcardId: 'CURRENT', | ||||
|   milestoneWildcardId: 'UPCOMING', | ||||
|  | @ -201,8 +212,11 @@ export const urlParams = { | |||
|   'not[milestone_title]': 'season 20', | ||||
|   'label_name[]': ['cartoon', 'tv'], | ||||
|   'not[label_name][]': ['live action', 'drama'], | ||||
|   'type[]': ['issue', 'feature'], | ||||
|   'not[type][]': ['bug', 'incident'], | ||||
|   my_reaction_emoji: 'thumbsup', | ||||
|   confidential: 'no', | ||||
|   'not[my_reaction_emoji]': 'thumbsdown', | ||||
|   confidential: 'yes', | ||||
|   iteration_id: '4', | ||||
|   'not[iteration_id]': '20', | ||||
|   epic_id: '12', | ||||
|  | @ -214,7 +228,7 @@ export const urlParams = { | |||
| export const urlParamsWithSpecialValues = { | ||||
|   assignee_id: '123', | ||||
|   'assignee_username[]': 'bart', | ||||
|   'type[]': ['issue', 'incident'], | ||||
|   'label_name[]': 'None', | ||||
|   my_reaction_emoji: 'None', | ||||
|   iteration_id: 'Current', | ||||
|   milestone_title: 'Upcoming', | ||||
|  |  | |||
|  | @ -108,7 +108,7 @@ RSpec.describe Gitlab::Runtime do | |||
|       allow(sidekiq_type).to receive(:options).and_return(concurrency: 2) | ||||
|     end | ||||
| 
 | ||||
|     it_behaves_like "valid runtime", :sidekiq, 4 | ||||
|     it_behaves_like "valid runtime", :sidekiq, 5 | ||||
|   end | ||||
| 
 | ||||
|   context "console" do | ||||
|  |  | |||
|  | @ -0,0 +1,93 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Gitlab::SidekiqEnq, :clean_gitlab_redis_queues do | ||||
|   let(:retry_set) { Sidekiq::Scheduled::SETS.first } | ||||
|   let(:schedule_set) { Sidekiq::Scheduled::SETS.last } | ||||
| 
 | ||||
|   around do |example| | ||||
|     freeze_time { example.run } | ||||
|   end | ||||
| 
 | ||||
|   shared_examples 'finds jobs that are due and enqueues them' do | ||||
|     before do | ||||
|       Sidekiq.redis do |redis| | ||||
|         redis.zadd(retry_set, (Time.current - 1.day).to_f.to_s, '{"jid": 1}') | ||||
|         redis.zadd(retry_set, Time.current.to_f.to_s, '{"jid": 2}') | ||||
|         redis.zadd(retry_set, (Time.current + 1.day).to_f.to_s, '{"jid": 3}') | ||||
| 
 | ||||
|         redis.zadd(schedule_set, (Time.current - 1.day).to_f.to_s, '{"jid": 4}') | ||||
|         redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 5}') | ||||
|         redis.zadd(schedule_set, (Time.current + 1.day).to_f.to_s, '{"jid": 6}') | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'enqueues jobs that are due' do | ||||
|       expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 1 }) | ||||
|       expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 2 }) | ||||
|       expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 4 }) | ||||
|       expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 5 }) | ||||
| 
 | ||||
|       Gitlab::SidekiqEnq.new.enqueue_jobs | ||||
| 
 | ||||
|       Sidekiq.redis do |redis| | ||||
|         expect(redis.zscan_each(retry_set).map(&:first)).to contain_exactly('{"jid": 3}') | ||||
|         expect(redis.zscan_each(schedule_set).map(&:first)).to contain_exactly('{"jid": 6}') | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'when atomic_sidekiq_scheduler is disabled' do | ||||
|     before do | ||||
|       stub_feature_flags(atomic_sidekiq_scheduler: false) | ||||
|     end | ||||
| 
 | ||||
|     it_behaves_like 'finds jobs that are due and enqueues them' | ||||
| 
 | ||||
|     context 'when ZRANGEBYSCORE returns a job that is already removed by another process' do | ||||
|       before do | ||||
|         Sidekiq.redis do |redis| | ||||
|           redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 1}') | ||||
| 
 | ||||
|           allow(redis).to receive(:zrangebyscore).and_wrap_original do |m, *args, **kwargs| | ||||
|             m.call(*args, **kwargs).tap do |jobs| | ||||
|               redis.zrem(schedule_set, jobs.first) if args[0] == schedule_set && jobs.first | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'calls ZREM but does not enqueue the job' do | ||||
|         Sidekiq.redis do |redis| | ||||
|           expect(redis).to receive(:zrem).with(schedule_set, '{"jid": 1}').twice.and_call_original | ||||
|         end | ||||
|         expect(Sidekiq::Client).not_to receive(:push) | ||||
| 
 | ||||
|         Gitlab::SidekiqEnq.new.enqueue_jobs | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'when atomic_sidekiq_scheduler is enabled' do | ||||
|     before do | ||||
|       stub_feature_flags(atomic_sidekiq_scheduler: true) | ||||
|     end | ||||
| 
 | ||||
|     context 'when Lua script is not yet loaded' do | ||||
|       before do | ||||
|         Gitlab::Redis::Queues.with { |redis| redis.script(:flush) } | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like 'finds jobs that are due and enqueues them' | ||||
|     end | ||||
| 
 | ||||
|     context 'when Lua script is already loaded' do | ||||
|       before do | ||||
|         Gitlab::SidekiqEnq.new.enqueue_jobs | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like 'finds jobs that are due and enqueues them' | ||||
|     end | ||||
|   end | ||||
| end | ||||
		Loading…
	
		Reference in New Issue