Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-02-16 12:09:54 +00:00
parent f971281bcd
commit ee708b240a
37 changed files with 1086 additions and 287 deletions

View File

@ -41,3 +41,6 @@ include:
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-lock_retries"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-housekeeper"

View File

@ -25,6 +25,7 @@ setup-test-env:
- section_start "setup-test-env" "Setting up testing environment"; scripts/setup-test-env; section_end "setup-test-env";
- select_gitlab_workhorse_essentials
- section_start "gitaly-test-build" "Compiling Gitaly binaries"; scripts/gitaly-test-build; section_end "gitaly-test-build"; # Do not use 'bundle exec' here
- strip_executable_binaries "${TMP_TEST_FOLDER}"
artifacts:
expire_in: 7d
paths:

View File

@ -1,4 +1,5 @@
import $ from 'jquery';
import { sanitize } from '~/lib/dompurify';
import { __ } from '~/locale';
/**
@ -64,7 +65,9 @@ export default class GlFieldError {
this.inputDomElement = this.inputElement.get(0);
this.form = formErrors;
this.errorMessage = this.inputElement.attr('title') || __('This field is required.');
this.fieldErrorElement = $(`<p class='${errorMessageClass} hidden'>${this.errorMessage}</p>`);
this.fieldErrorElement = $(
`<p class='${errorMessageClass} hidden'>${sanitize(this.errorMessage)}</p>`,
);
this.state = {
valid: false,

View File

@ -11,6 +11,7 @@ import {
WIDGET_TYPE_PARTICIPANTS,
WIDGET_TYPE_PROGRESS,
WIDGET_TYPE_START_AND_DUE_DATE,
WIDGET_TYPE_TIME_TRACKING,
WIDGET_TYPE_WEIGHT,
WIDGET_TYPE_COLOR,
WORK_ITEM_TYPE_VALUE_KEY_RESULT,
@ -26,6 +27,7 @@ import WorkItemMilestoneInline from './work_item_milestone_inline.vue';
import WorkItemMilestoneWithEdit from './work_item_milestone_with_edit.vue';
import WorkItemParentInline from './work_item_parent_inline.vue';
import WorkItemParent from './work_item_parent_with_edit.vue';
import WorkItemTimeTracking from './work_item_time_tracking.vue';
export default {
components: {
@ -39,6 +41,7 @@ export default {
WorkItemDueDateWithEdit,
WorkItemParent,
WorkItemParentInline,
WorkItemTimeTracking,
WorkItemWeightInline: () =>
import('ee_component/work_items/components/work_item_weight_inline.vue'),
WorkItemWeight: () =>
@ -116,6 +119,9 @@ export default {
workItemParent() {
return this.isWidgetPresent(WIDGET_TYPE_HIERARCHY)?.parent;
},
workItemTimeTracking() {
return this.isWidgetPresent(WIDGET_TYPE_TIME_TRACKING);
},
workItemColor() {
return this.isWidgetPresent(WIDGET_TYPE_COLOR);
},
@ -309,6 +315,12 @@ export default {
:can-update="canUpdate"
@error="$emit('error', $event)"
/>
<work-item-time-tracking
v-if="workItemTimeTracking && glFeatures.workItemsMvc2"
class="gl-mb-5"
:time-estimate="workItemTimeTracking.timeEstimate"
:total-time-spent="workItemTimeTracking.totalTimeSpent"
/>
<participants
v-if="workItemParticipants && glFeatures.workItemsMvc"
class="gl-mb-5 gl-pt-5 gl-border-t gl-border-gray-50"

View File

@ -0,0 +1,80 @@
<script>
import { GlProgressBar, GlTooltipDirective } from '@gitlab/ui';
import { outputChronicDuration } from '~/chronic_duration';
import { isPositiveInteger } from '~/lib/utils/number_utils';
import { s__, sprintf } from '~/locale';
const options = { format: 'short' };
export default {
components: {
GlProgressBar,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
timeEstimate: {
type: Number,
required: false,
default: 0,
},
totalTimeSpent: {
type: Number,
required: false,
default: 0,
},
},
computed: {
humanTimeEstimate() {
return outputChronicDuration(this.timeEstimate, options);
},
humanTotalTimeSpent() {
return outputChronicDuration(this.totalTimeSpent, options) ?? '0h';
},
progressBarTooltipText() {
const timeDifference = this.totalTimeSpent - this.timeEstimate;
const time = outputChronicDuration(Math.abs(timeDifference), options);
return isPositiveInteger(timeDifference)
? sprintf(s__('TimeTracking|%{time} over'), { time })
: sprintf(s__('TimeTracking|%{time} remaining'), { time });
},
progressBarVariant() {
return this.timeRemainingPercent > 100 ? 'danger' : 'primary';
},
timeRemainingPercent() {
return Math.floor((this.totalTimeSpent / this.timeEstimate) * 100);
},
},
};
</script>
<template>
<div>
<h3 class="gl-heading-5 gl-mb-2!">
{{ __('Time tracking') }}
</h3>
<div
class="gl-display-flex gl-align-items-center gl-gap-2 gl-font-sm"
data-testid="time-tracking-body"
>
<template v-if="totalTimeSpent || timeEstimate">
<span class="gl-text-secondary">{{ s__('TimeTracking|Spent') }}</span>
{{ humanTotalTimeSpent }}
<template v-if="timeEstimate">
<gl-progress-bar
v-gl-tooltip="progressBarTooltipText"
class="gl-flex-grow-1 gl-mx-2"
:value="timeRemainingPercent"
:variant="progressBarVariant"
/>
<span class="gl-text-secondary">{{ s__('TimeTracking|Estimate') }}</span>
{{ humanTimeEstimate }}
</template>
</template>
<span v-else class="gl-text-secondary">
{{ s__('TimeTracking|Use /spend or /estimate to manage time.') }}
</span>
</div>
</div>
</template>

View File

@ -16,6 +16,7 @@ export const WIDGET_TYPE_NOTIFICATIONS = 'NOTIFICATIONS';
export const WIDGET_TYPE_CURRENT_USER_TODOS = 'CURRENT_USER_TODOS';
export const WIDGET_TYPE_LABELS = 'LABELS';
export const WIDGET_TYPE_START_AND_DUE_DATE = 'START_AND_DUE_DATE';
export const WIDGET_TYPE_TIME_TRACKING = 'TIME_TRACKING';
export const WIDGET_TYPE_WEIGHT = 'WEIGHT';
export const WIDGET_TYPE_PARTICIPANTS = 'PARTICIPANTS';
export const WIDGET_TYPE_PROGRESS = 'PROGRESS';

View File

@ -41,6 +41,10 @@ fragment WorkItemWidgets on WorkItemWidget {
dueDate
startDate
}
... on WorkItemWidgetTimeTracking {
timeEstimate
totalTimeSpent
}
... on WorkItemWidgetHierarchy {
hasChildren
parent {

View File

@ -224,7 +224,7 @@ class UsersController < ApplicationController
end
def contributed_projects
ContributedProjectsFinder.new(user).execute(current_user)
ContributedProjectsFinder.new(user).execute(current_user, order_by: 'latest_activity_desc')
end
def starred_projects

View File

@ -15,13 +15,13 @@ class ContributedProjectsFinder < UnionFinder
# projects, regardless of their visibility to the current_user
#
# Returns an ActiveRecord::Relation.
def execute(current_user = nil, ignore_visibility: false)
def execute(current_user = nil, ignore_visibility: false, order_by: 'id_desc')
# Do not show contributed projects if the user profile is private.
return Project.none unless can_read_profile?(current_user)
segments = all_projects(current_user, ignore_visibility)
find_union(segments, Project).with_namespace.order_id_desc
find_union(segments, Project).with_namespace.sort_by_attribute(order_by)
end
private

View File

@ -0,0 +1,43 @@
# ----- DELETE EVERYTHING ABOVE THIS LINE -----
- title: "Behavior change for protected variables and multi-project pipelines"
# The milestones for the deprecation announcement, and the removal.
removal_milestone: "17.0"
announcement_milestone: "16.10"
# Change breaking_change to false if needed.
breaking_change: true
# The stage and GitLab username of the person reporting the change,
# and a link to the deprecation issue
reporter: jocelynjane
stage: verify
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/432328
body: | # (required) Don't change this line.
In some cases, users with sufficient permissions in a project could forward protected variables to an unsecure project, so this change is a security enhancement that minimizes the risk of protected variable values being exposed.
While [forwarding CI/CD variables](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html#pass-cicd-variables-to-a-downstream-pipeline) through downstream pipelines is useful for some workflows, [protected variables](https://docs.gitlab.com/ee/ci/variables/#protect-a-cicd-variable) require additional care. They are intended for use only with specific protected branches or tags.
In GitLab 17.0, variable forwarding will be updated to ensure protected variables are only passed in specific situations:
- Project-level protected variables can only be forwarded to downstream pipelines in the same project (child pipelines).
- Group-level protected variables can only be forwarded to downstream pipelines of projects that belong to the same group as the source project.
If your pipeline relies on forwarding protected variables, update your configuration to either conform to the two options above, or avoid forwarding protected variables.
# ==============================
# OPTIONAL END-OF-SUPPORT FIELDS
# ==============================
#
# If an End of Support period applies:
# 1) Share this announcement in the `#spt_managers` Support channel in Slack
# 2) Mention `@gitlab-com/support` in this merge request.
#
# When support for this feature ends, in XX.YY milestone format.
end_of_support_milestone:
# Array of tiers the feature is currently available to,
# like [Free, Silver, Gold, Core, Premium, Ultimate]
tiers:
# Links to documentation and thumbnail image
documentation_url:
image_url:
# Use the youtube thumbnail URL with the structure of https://img.youtube.com/vi/UNIQUEID/hqdefault.jpg
video_url:

View File

@ -0,0 +1,140 @@
# frozen_string_literal: true
class MigrateSidekiqQueuedAndFutureJobs < Gitlab::Database::Migration[2.2]
milestone '16.10'
class SidekiqMigrateJobs
LOG_FREQUENCY_QUEUES = 10
LOG_FREQUENCY = 1000
attr_reader :logger, :mappings
# mappings is a hash of WorkerClassName => target_queue_name
def initialize(mappings, logger: nil)
@mappings = mappings
@logger = logger
end
# Migrates jobs from queues that are outside the mappings
# rubocop: disable Cop/SidekiqRedisCall -- for migration
def migrate_queues
routing_rules_queues = mappings.values.uniq
logger&.info("List of queues based on routing rules: #{routing_rules_queues}")
Sidekiq.redis do |conn|
conn.scan("MATCH", "queue:*") do |key|
next unless conn.type(key) == 'list'
queue_from = key.split(':', 2).last
next if routing_rules_queues.include?(queue_from)
migrate_queue(conn, queue_from)
end
end
logger&.info("Done migrating queued jobs.")
end
# Migrate jobs in SortedSets, i.e. scheduled and retry sets.
def migrate_set(sidekiq_set)
scanned = 0
migrated = 0
estimated_size = Sidekiq.redis { |c| c.zcard(sidekiq_set) }
logger&.info("Processing #{sidekiq_set} set. Estimated size: #{estimated_size}.")
Sidekiq.redis do |c|
c.zscan(sidekiq_set) do |job, score|
if scanned > 0 && scanned % LOG_FREQUENCY == 0
logger&.info("In progress. Scanned records: #{scanned}. Migrated records: #{migrated}.")
end
scanned += 1
job_hash = Gitlab::Json.load(job)
destination_queue = mappings[job_hash['class']]
unless mappings.has_key?(job_hash['class'])
logger&.info("Skipping job from #{job_hash['class']}. No destination queue found.")
next
end
next if job_hash['queue'] == destination_queue
job_hash['queue'] = destination_queue
migrated += migrate_job_in_set(c, sidekiq_set, job, score, job_hash)
end
end
logger&.info("Done. Scanned records: #{scanned}. Migrated records: #{migrated}.")
{
scanned: scanned,
migrated: migrated
}
end
# rubocop: enable Cop/SidekiqRedisCall
def migrate_job_in_set(conn, sidekiq_set, job, score, job_hash)
removed = conn.zrem(sidekiq_set, job)
conn.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash)) if removed > 0
removed
end
private
def migrate_queue(conn, queue_from)
logger&.info("Migrating #{queue_from} queue")
migrated = 0
while queue_length(conn, queue_from) > 0
begin
if migrated >= 0 && migrated % LOG_FREQUENCY_QUEUES == 0
logger&.info("Migrating from #{queue_from}. Total: #{queue_length(conn,
queue_from)}. Migrated: #{migrated}.")
end
job = conn.rpop("queue:#{queue_from}")
job_hash = update_job_hash(job)
next unless job_hash
conn.lpush("queue:#{job_hash['queue']}", Sidekiq.dump_json(job_hash))
migrated += 1
rescue JSON::ParserError
logger&.error("Unmarshal JSON payload from SidekiqMigrateJobs failed. Job: #{job}")
next
end
end
logger&.info("Finished migrating #{queue_from} queue")
end
def update_job_hash(job)
job_hash = Sidekiq.load_json(job)
return unless mappings.has_key?(job_hash['class'])
destination_queue = mappings[job_hash['class']]
job_hash['queue'] = destination_queue
job_hash
end
def queue_length(conn, queue_name)
conn.llen("queue:#{queue_name}")
end
end
def up
return if Gitlab.com?
mappings = Gitlab::SidekiqConfig.worker_queue_mappings
logger = ::Gitlab::BackgroundMigration::Logger.build
migrator = SidekiqMigrateJobs.new(mappings, logger: logger)
migrator.migrate_queues
%w[schedule retry].each { |set| migrator.migrate_set(set) }
end
def down
# no-op
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class RemoveIndexOnPipelineMetadata < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '16.10'
INDEX_NAME = 'index_pipeline_metadata_on_pipeline_id_name_text_pattern'
def up
remove_concurrent_index_by_name :ci_pipeline_metadata, INDEX_NAME
end
def down
add_concurrent_index :ci_pipeline_metadata, 'pipeline_id, name text_pattern_ops', name: INDEX_NAME
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class RemovePackagesProtectionRulesPackageNamePatternIlikeQueryColumn < Gitlab::Database::Migration[2.2]
milestone '16.10'
disable_ddl_transaction!
def up
if column_exists?(
:packages_protection_rules, :package_name_pattern_ilike_query)
with_lock_retries do
remove_column :packages_protection_rules, :package_name_pattern_ilike_query
end
end
end
def down
with_lock_retries do
unless column_exists?(
:packages_protection_rules, :package_name_pattern_ilike_query)
add_column :packages_protection_rules, :package_name_pattern_ilike_query, :text
end
end
add_text_limit :packages_protection_rules, :package_name_pattern_ilike_query, 255
end
end

View File

@ -0,0 +1 @@
b9c539b3b6752562118241be435c16cd0371442bc039bc6b3b3cc3297dc67e77

View File

@ -0,0 +1 @@
daa3f7f6ef7068929f9050df5e49fe7c2744dd12b491cd350b73ccafb8385b97

View File

@ -0,0 +1 @@
f593130e4ff94eab9e0881ff92b21c3d5c133a8ad513d8af49e2fe7e722b95d8

View File

@ -12716,9 +12716,7 @@ CREATE TABLE packages_protection_rules (
push_protected_up_to_access_level smallint NOT NULL,
package_type smallint NOT NULL,
package_name_pattern text NOT NULL,
package_name_pattern_ilike_query text,
CONSTRAINT check_d2d75d206d CHECK ((char_length(package_name_pattern) <= 255)),
CONSTRAINT check_ff47b09794 CHECK ((char_length(package_name_pattern_ilike_query) <= 255))
CONSTRAINT check_d2d75d206d CHECK ((char_length(package_name_pattern) <= 255))
);
CREATE SEQUENCE packages_protection_rules_id_seq
@ -26101,8 +26099,6 @@ CREATE INDEX index_personal_access_tokens_on_user_id ON personal_access_tokens U
CREATE INDEX index_pipeline_metadata_on_name_text_pattern_pipeline_id ON ci_pipeline_metadata USING btree (name text_pattern_ops, pipeline_id);
CREATE INDEX index_pipeline_metadata_on_pipeline_id_name_text_pattern ON ci_pipeline_metadata USING btree (pipeline_id, name text_pattern_ops);
CREATE UNIQUE INDEX p_ci_pipeline_variables_pipeline_id_key_partition_id_idx ON ONLY p_ci_pipeline_variables USING btree (pipeline_id, key, partition_id);
CREATE UNIQUE INDEX index_pipeline_variables_on_pipeline_id_key_partition_id_unique ON ci_pipeline_variables USING btree (pipeline_id, key, partition_id);

View File

@ -66,6 +66,11 @@ Approving a user:
- Changes the user's state to active.
- Consumes a subscription [seat](../subscriptions/self_managed/index.md#billable-users).
Rejecting a user:
- Prevents the user from signing in or accessing instance information.
- Deletes the user.
## Block and unblock users
GitLab administrators can block and unblock users.

View File

@ -91,8 +91,10 @@ Depending on how you installed the app, you might want to check the following:
- If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#connect-the-gitlab-for-jira-cloud-app),
switch between GitLab versions in the GitLab for Jira Cloud app:
<!-- markdownlint-disable MD044 -->
1. In Jira, on the top bar, select **Apps > Manage your apps**.
1. Expand **GitLab for Jira (GitLab.com)**.
1. Expand **GitLab for Jira (gitlab.com)**.
1. Select **Get started**.
1. Select **Change GitLab version**.
1. Select **GitLab.com (SaaS)**, then select **Save**.
@ -101,6 +103,8 @@ Depending on how you installed the app, you might want to check the following:
1. Select all checkboxes, then select **Next**.
1. Enter your **GitLab instance URL**, then select **Save**.
<!-- markdownlint-enable MD044 -->
If this method does not work, [submit a support ticket](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer.
Provide your GitLab instance URL and Jira URL. GitLab Support can try to run the following scripts to resolve the issue:

View File

@ -177,3 +177,11 @@ and `compile-production-assets` jobs to:
This task is responsible for deciding if assets need to be compiled or not.
It [compares the `HEAD` `SHA256` hexdigest from `$GITLAB_ASSETS_HASH` with the `master` hexdigest from `cached-assets-hash.txt`](https://gitlab.com/gitlab-org/gitlab/-/blob/c023191ef412e868ae957f3341208a41ca678403/lib/tasks/gitlab/assets.rake#L86).
1. If the hashes are the same, we don't compile anything. If they're different, we compile the assets.
## Stripped binaries
By default, `setup-test-env` creates an artifact which contains stripped
binaries to [save storage and speed-up artifact downloads](https://gitlab.com/gitlab-org/gitlab/-/issues/442029#note_1775193538) of subsequent CI jobs.
To make debugging a crash from stripped binaries easier comment line with
`strip_executable_binaries` in the `setup-test-job` job and start a new pipeline.

View File

@ -80,14 +80,18 @@ Prerequisites:
You can sync data from GitLab to Jira by linking the GitLab for Jira Cloud app to one or more GitLab groups.
To configure the GitLab for Jira Cloud app:
<!-- markdownlint-disable MD044 -->
1. In Jira, on the top bar, select **Apps > Manage your apps**.
1. Expand **GitLab for Jira (GitLab.com)**.
1. Expand **GitLab for Jira (gitlab.com)**.
1. Select **Get started**.
1. Optional. To set the GitLab instance to use with Jira, select **Change GitLab version**.
1. Select **Sign in to GitLab**.
1. For a list of groups you can link to, select **Link groups**.
1. To link to a group, select **Link**.
<!-- markdownlint-enable MD044 -->
After you link to a GitLab group, data is synced to Jira for all projects in that group.
The initial data sync happens in batches of 20 projects per minute.
For groups with many projects, the data sync for some projects is delayed.

View File

@ -379,6 +379,29 @@ can change `## Step - 1` to `## Step 1` to ensure in-page links continue to work
<div class="deprecation breaking-change" data-milestone="17.0">
### Behavior change for protected variables and multi-project pipelines
<div class="deprecation-notes">
- Announced in GitLab <span class="milestone">16.10</span>
- Removal in GitLab <span class="milestone">17.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change))
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/432328).
</div>
In some cases, users with sufficient permissions in a project could forward protected variables to an unsecure project, so this change is a security enhancement that minimizes the risk of protected variable values being exposed.
While [forwarding CI/CD variables](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html#pass-cicd-variables-to-a-downstream-pipeline) through downstream pipelines is useful for some workflows, [protected variables](https://docs.gitlab.com/ee/ci/variables/#protect-a-cicd-variable) require additional care. They are intended for use only with specific protected branches or tags.
In GitLab 17.0, variable forwarding will be updated to ensure protected variables are only passed in specific situations:
- Project-level protected variables can only be forwarded to downstream pipelines in the same project (child pipelines).
- Group-level protected variables can only be forwarded to downstream pipelines of projects that belong to the same group as the source project.
If your pipeline relies on forwarding protected variables, update your configuration to either conform to the two options above, or avoid forwarding protected variables.
</div>
<div class="deprecation breaking-change" data-milestone="17.0">
### Block usage of ref and sha together in `GET /projects/:id/ci/lint`
<div class="deprecation-notes">

View File

@ -86,7 +86,7 @@ You can refine user search with [Elasticsearch syntax](#syntax).
| [<code>helper -extension:yml -extension:js</code>](https://gitlab.com/search?group_id=9970&project_id=278964&repository_ref=&scope=blobs&search=helper+-extension%3Ayml+-extension%3Ajs&snippets=) | Returns `helper` in all files except files with a `.yml` or `.js` extension. |
| [<code>helper path:lib/git</code>](https://gitlab.com/search?group_id=9970&project_id=278964&scope=blobs&search=helper+path%3Alib%2Fgit) | Returns `helper` in all files with a `lib/git*` path (for example, `spec/lib/gitlab`). |
<!-- markdownlint-enable -->
<!-- markdownlint-enable MD044 -->
## Known issues

View File

@ -20,9 +20,9 @@ Gem::Specification.new do |spec|
spec.executables = ['gitlab-housekeeper']
spec.add_runtime_dependency 'activesupport'
spec.add_runtime_dependency 'awesome_print'
spec.add_runtime_dependency 'httparty'
spec.add_runtime_dependency 'rubocop'
spec.add_runtime_dependency 'awesome_print'
spec.add_development_dependency 'gitlab-styles'
spec.add_development_dependency 'rspec-rails'

View File

@ -70,7 +70,7 @@ module Gitlab
end
end
def remove_first_exclusions(rule, file, remove_count)
def remove_first_exclusions(_rule, file, remove_count)
content = File.read(file)
skipped = 0

View File

@ -17,6 +17,7 @@ RSpec.describe ::Gitlab::Housekeeper::Git do
File.write(file_in_master, 'File already in master!')
::Gitlab::Housekeeper::Shell.execute('git', 'init')
::Gitlab::Housekeeper::Shell.execute('git', 'config', '--local', 'user.email', 'test@example.com')
::Gitlab::Housekeeper::Shell.execute('git', 'checkout', '-b', 'master')
::Gitlab::Housekeeper::Shell.execute('git', 'add', file_in_master)
::Gitlab::Housekeeper::Shell.execute('git', 'commit', '-m', 'Initial commit!')

View File

@ -2,26 +2,10 @@
require 'spec_helper'
# rubocop:disable RSpec/MultipleMemoizedHelpers
RSpec.describe ::Gitlab::Housekeeper::Keeps::RubocopFixer do
let(:todo_dir) { Dir.mktmpdir }
let(:rule1_file) { Pathname(todo_dir).join('rule1.yml').to_s }
let(:rule2_file) { Pathname(todo_dir).join('rule2.yml').to_s }
let(:not_autocorrectable_file) { Pathname(todo_dir).join('not_autocorrectable.yml').to_s }
let(:todo_dir_pattern) { Pathname(todo_dir).join('**/*.yml').to_s }
before do
dir = Pathname.new(todo_dir)
FileUtils.cp('spec/fixtures/rubocop_todo1.yml', rule1_file)
FileUtils.cp('spec/fixtures/rubocop_todo2.yml', rule2_file)
FileUtils.cp('spec/fixtures/rubocop_todo_not_autocorrectable.yml', not_autocorrectable_file)
end
after do
FileUtils.remove_entry(todo_dir)
end
let(:rubocop_fixer) { described_class.new(todo_dir_pattern: todo_dir_pattern, limit_fixes: 5) }
let(:rule1_violating_files) do
[
'rule1_violation1.rb',
@ -45,6 +29,22 @@ RSpec.describe ::Gitlab::Housekeeper::Keeps::RubocopFixer do
]
end
let(:rule1_file) { Pathname(todo_dir).join('rule1.yml').to_s }
let(:rule2_file) { Pathname(todo_dir).join('rule2.yml').to_s }
let(:not_autocorrectable_file) { Pathname(todo_dir).join('not_autocorrectable.yml').to_s }
let(:todo_dir_pattern) { Pathname(todo_dir).join('**/*.yml').to_s }
before do
Pathname.new(todo_dir)
FileUtils.cp('spec/fixtures/rubocop_todo1.yml', rule1_file)
FileUtils.cp('spec/fixtures/rubocop_todo2.yml', rule2_file)
FileUtils.cp('spec/fixtures/rubocop_todo_not_autocorrectable.yml', not_autocorrectable_file)
end
after do
FileUtils.remove_entry(todo_dir)
end
describe '#each_change' do
it 'iterates over todo_dir_pattern files' do
yielded_times = 0
@ -116,3 +116,4 @@ RSpec.describe ::Gitlab::Housekeeper::Keeps::RubocopFixer do
end
end
end
# rubocop:enable RSpec/MultipleMemoizedHelpers

View File

@ -51483,6 +51483,12 @@ msgstr ""
msgid "TimeTracking|%{spentStart}Spent: %{spentEnd}"
msgstr ""
msgid "TimeTracking|%{time} over"
msgstr ""
msgid "TimeTracking|%{time} remaining"
msgstr ""
msgid "TimeTracking|An error occurred while removing the timelog."
msgstr ""
@ -51528,6 +51534,9 @@ msgstr ""
msgid "TimeTracking|Time remaining: %{timeRemainingHumanReadable}"
msgstr ""
msgid "TimeTracking|Use /spend or /estimate to manage time."
msgstr ""
msgid "Timeago|%s days ago"
msgstr ""

View File

@ -91,6 +91,12 @@ function select_gitlab_workhorse_essentials() {
mv ${tmp_path} ${TMP_TEST_FOLDER}
}
function strip_executable_binaries() {
local path="$1"
find "$path" -executable -type f ! -size 0 -print0 | xargs -0 grep -IL . | xargs strip || true
}
# Assets functions
function gitlab_assets_archive_doesnt_exist() {
archive_doesnt_exist "${GITLAB_ASSETS_PACKAGE_URL}"

View File

@ -110,14 +110,13 @@ module Gitlab
# This means all jobs go to 'default' queue and mailer jobs go to 'mailers' queue.
# See config/initializers/1_settings.rb and Settings.build_sidekiq_routing_rules.
#
# Now, in case queue_selector is used, we ensure all Sidekiq processes are still processing jobs
# from default and mailers queues.
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1491
# We can override queue_groups to listen to just the default queues, any more additional queues
# incurs CPU overhead in Redis.
if routing_rules.empty?
queue_groups.each do |queues|
queues.concat(DEFAULT_QUEUES)
queues.uniq!
end
queue_groups.map! { DEFAULT_QUEUES }
# setting min_concurrency equal to max_concurrency so that the concurrency eventually
# is set to 20 (default value) instead of based on the number of queues, which is only 2+1 in this case.
@min_concurrency = @min_concurrency == 0 ? @max_concurrency : @min_concurrency
end
if @list_queues

View File

@ -12,7 +12,7 @@ RSpec.describe 'bin/sidekiq-cluster', :aggregate_failures do
context 'when selecting some queues and excluding others' do
where(:args, :included, :excluded) do
%w[--negate cronjob] | '-qdefault,1' | '-qcronjob,1'
%w[--queue-selector resource_boundary=cpu] | %w[-qupdate_merge_requests,1 -qdefault,1 -qmailers,1] |
%w[--queue-selector resource_boundary=cpu] | %w[-qdefault,1 -qmailers,1] |
'-qauthorized_keys_worker,1'
end
@ -43,7 +43,7 @@ RSpec.describe 'bin/sidekiq-cluster', :aggregate_failures do
expect(status).to be(0)
expect(output).to include('bundle exec sidekiq')
expect(Shellwords.split(output)).to include('-qdefault,1')
expect(Shellwords.split(output)).to include('-qcronjob:ci_archive_traces_cron,1')
expect(Shellwords.split(output)).to include('-qmailers,1')
end
end
end

View File

@ -58,226 +58,18 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
end
context 'with arguments' do
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w[foo])
end
it 'allows the special * selector' do
worker_queues = %w[foo bar baz]
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
.to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w[*])
end
it 'raises an error when the arguments contain newlines' do
invalid_arguments = [
["foo\n"],
["foo\r"],
%W[foo b\nar]
]
invalid_arguments.each do |arguments|
expect { cli.run(arguments) }.to raise_error(described_class::CommandError)
context 'with routing rules specified' do
before do
stub_config(sidekiq: { routing_rules: [['resource_boundary=cpu', 'foo']] })
end
end
context 'with --negate flag' do
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['baz'] + described_class::DEFAULT_QUEUES], default_options)
.with([['foo']], default_options)
.and_return([])
cli.run(%w[foo -n])
end
end
context 'with --max-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a max concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(max_concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo -m 2])
end
end
context 'with --min-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a min concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(min_concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo --min-concurrency 2])
end
end
context 'with --concurrency flag' do
it 'starts Sidekiq workers for specified queues with the fixed concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]].each { |queues| queues.concat(described_class::DEFAULT_QUEUES) }
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo -c 2])
end
end
context 'with --timeout flag' do
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout: 10))
.and_return([])
cli.run(%w[foo --timeout 10])
end
it 'when not given', 'starts Sidekiq workers with default timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options.merge(timeout:
Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
.and_return([])
cli.run(%w[foo])
end
end
context 'with --list-queues flag' do
it 'errors when given --list-queues and --dryrun' do
expect { cli.run(%w[foo --list-queues --dryrun]) }.to raise_error(described_class::CommandError)
end
it 'prints out a list of queues in alphabetical order' do
expected_queues = [
'default',
'epics:epics_update_epics_dates',
'epics_new_epic_issue',
'mailers',
'new_epic',
'todos_destroyer:todos_destroyer_confidential_epic'
]
allow(Gitlab::SidekiqConfig::CliMethods).to receive(:query_queues).and_return(expected_queues.shuffle)
expect(cli).to receive(:puts).with([expected_queues])
cli.run(%w[--queue-selector feature_category=epics --list-queues])
end
end
context 'queue namespace expansion' do
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['cronjob', 'cronjob:foo', 'cronjob:bar'] +
described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w[cronjob])
end
end
context "with --queue-selector" do
where do
{
'memory-bound queues' => {
query: 'resource_boundary=memory',
included_queues: %w[project_export],
excluded_queues: %w[merge]
},
'memory- or CPU-bound queues' => {
query: 'resource_boundary=memory,cpu',
included_queues: %w[auto_merge:auto_merge_process project_export],
excluded_queues: %w[merge]
},
'high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high',
included_queues: %w[pipeline_default:ci_drop_pipeline],
excluded_queues: %w[merge]
},
'CPU-bound high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w[pipeline_default:ci_create_downstream_pipeline],
excluded_queues: %w[pipeline_default:ci_drop_pipeline merge]
},
'CPU-bound high urgency non-CI queues' => {
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w[new_issue],
excluded_queues: %w[pipeline_default:ci_create_downstream_pipeline]
},
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
included_queues: %w[pipeline_default:ci_drop_pipeline merge],
excluded_queues: %w[]
}
}
end
with_them do
it 'expands queues by attributes' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).to include(*included_queues)
expect(queues.first).not_to include(*excluded_queues)
expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
[]
end
cli.run(%W[--queue-selector #{query}])
end
it 'works when negated' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).not_to include(*included_queues)
expect(queues.first).to include(*excluded_queues)
expect(queues.first).to include(*described_class::DEFAULT_QUEUES)
[]
end
cli.run(%W[--negate --queue-selector #{query}])
end
end
it 'expands multiple queue groups correctly' do
expected_workers =
if Gitlab.ee?
[
%w[incident_management_close_incident status_page_publish] + described_class::DEFAULT_QUEUES,
%w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] +
described_class::DEFAULT_QUEUES
]
else
[
%w[incident_management_close_incident] + described_class::DEFAULT_QUEUES,
%w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] +
described_class::DEFAULT_QUEUES
]
end
expect(Gitlab::SidekiqCluster)
.to receive(:start)
.with(expected_workers, default_options)
.and_return([])
cli.run(%w[--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers])
end
it 'allows the special * selector' do
worker_queues = %w[foo bar baz]
@ -288,39 +80,234 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect(Gitlab::SidekiqCluster)
.to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w[--queue-selector *])
cli.run(%w[*])
end
it 'errors when the selector matches no queues' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
it 'raises an error when the arguments contain newlines' do
invalid_arguments = [
["foo\n"],
["foo\r"],
%W[foo b\nar]
]
expect { cli.run(%w[--queue-selector has_external_dependencies=true&has_external_dependencies=false]) }
.to raise_error(described_class::CommandError)
invalid_arguments.each do |arguments|
expect { cli.run(arguments) }.to raise_error(described_class::CommandError)
end
end
it 'errors on an invalid query multiple queue groups correctly' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
context 'with --negate flag' do
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['baz']], default_options)
.and_return([])
expect { cli.run(%w[--queue-selector unknown_field=chatops]) }
.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
cli.run(%w[foo -n])
end
end
context 'with --max-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a max concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]]
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(max_concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo -m 2])
end
end
context 'with --min-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a min concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]]
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(min_concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo --min-concurrency 2])
end
end
context 'with --concurrency flag' do
it 'starts Sidekiq workers for specified queues with the fixed concurrency' do
expected_queues = [%w[foo bar baz], %w[solo]]
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w[foo bar baz])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with(expected_queues, default_options.merge(concurrency: 2))
.and_return([])
cli.run(%w[foo,bar,baz solo -c 2])
end
end
context 'with --timeout flag' do
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: 10))
.and_return([])
cli.run(%w[foo --timeout 10])
end
it 'when not given', 'starts Sidekiq workers with default timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout:
Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
.and_return([])
cli.run(%w[foo])
end
end
context 'with --list-queues flag' do
it 'errors when given --list-queues and --dryrun' do
expect { cli.run(%w[foo --list-queues --dryrun]) }.to raise_error(described_class::CommandError)
end
it 'prints out a list of queues in alphabetical order' do
expected_queues = [
'default',
'epics:epics_update_epics_dates',
'epics_new_epic_issue',
'mailers',
'new_epic',
'todos_destroyer:todos_destroyer_confidential_epic'
]
allow(Gitlab::SidekiqConfig::CliMethods).to receive(:query_queues).and_return(expected_queues.shuffle)
expect(cli).to receive(:puts).with([expected_queues])
cli.run(%w[--queue-selector feature_category=epics --list-queues])
end
end
context 'queue namespace expansion' do
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['cronjob', 'cronjob:foo', 'cronjob:bar']], default_options)
.and_return([])
cli.run(%w[cronjob])
end
end
context "with --queue-selector" do
where do
{
'memory-bound queues' => {
query: 'resource_boundary=memory',
included_queues: %w[project_export],
excluded_queues: %w[merge]
},
'memory- or CPU-bound queues' => {
query: 'resource_boundary=memory,cpu',
included_queues: %w[auto_merge:auto_merge_process project_export],
excluded_queues: %w[merge]
},
'high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high',
included_queues: %w[pipeline_default:ci_drop_pipeline],
excluded_queues: %w[merge]
},
'CPU-bound high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w[pipeline_default:ci_create_downstream_pipeline],
excluded_queues: %w[pipeline_default:ci_drop_pipeline merge]
},
'CPU-bound high urgency non-CI queues' => {
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w[new_issue],
excluded_queues: %w[pipeline_default:ci_create_downstream_pipeline]
},
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
included_queues: %w[pipeline_default:ci_drop_pipeline merge],
excluded_queues: %w[]
}
}
end
with_them do
it 'expands queues by attributes' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).to include(*included_queues)
expect(queues.first).not_to include(*excluded_queues)
[]
end
cli.run(%W[--queue-selector #{query}])
end
it 'works when negated' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).not_to include(*included_queues)
expect(queues.first).to include(*excluded_queues)
[]
end
cli.run(%W[--negate --queue-selector #{query}])
end
end
it 'expands multiple queue groups correctly' do
expected_workers =
if Gitlab.ee?
[
%w[incident_management_close_incident status_page_publish],
%w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export]
]
else
[
%w[incident_management_close_incident],
%w[bulk_imports_pipeline bulk_imports_pipeline_batch bulk_imports_relation_batch_export bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import]
]
end
expect(Gitlab::SidekiqCluster)
.to receive(:start)
.with(expected_workers, default_options)
.and_return([])
cli.run(%w[--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers])
end
it 'allows the special * selector' do
worker_queues = %w[foo bar baz]
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
.to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w[--queue-selector *])
end
it 'errors when the selector matches no queues' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
expect { cli.run(%w[--queue-selector has_external_dependencies=true&has_external_dependencies=false]) }
.to raise_error(described_class::CommandError)
end
it 'errors on an invalid query multiple queue groups correctly' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
expect { cli.run(%w[--queue-selector unknown_field=chatops]) }
.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
end
end
end
context "with routing rules specified" do
before do
stub_config(sidekiq: { routing_rules: [['resource_boundary=cpu', 'foo']] })
end
it "starts Sidekiq workers only for given queues without any additional DEFAULT_QUEUES" do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options)
.and_return([])
cli.run(%w[foo])
end
end
context "with sidekiq settings not specified" do
context "without sidekiq setting specified" do
before do
stub_config(sidekiq: nil)
end
@ -331,14 +318,53 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
expect { cli.run(%w[foo]) }.not_to raise_error
end
it "starts Sidekiq workers with given queues, and additional default and mailers queues (DEFAULT_QUEUES)" do
it "starts Sidekiq workers with DEFAULT_QUEUES and min_concurrency = max_concurrency" do
default_options[:min_concurrency] = default_options[:max_concurrency]
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo'] + described_class::DEFAULT_QUEUES], default_options)
.with([described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w[foo])
end
end
context "without routing rules" do
before do
stub_config(sidekiq: { routing_rules: [] })
end
it "starts Sidekiq workers with DEFAULT_QUEUES and min_concurrency = max_concurrency" do
default_options[:min_concurrency] = default_options[:max_concurrency]
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([described_class::DEFAULT_QUEUES], default_options)
.and_return([])
cli.run(%w[foo])
end
context "with 4 wildcard * as argument" do
it "starts 4 Sidekiq workers all with DEFAULT_QUEUES and min_concurrency = max_concurrency" do
default_options[:min_concurrency] = default_options[:max_concurrency]
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([described_class::DEFAULT_QUEUES] * 4, default_options)
.and_return([])
cli.run(%w[* * * *])
end
end
context "with min-concurrency flag" do
it "starts Sidekiq workers with DEFAULT_QUEUES and min_concurrency as specified" do
options = default_options.dup
options[:min_concurrency] = 10
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([described_class::DEFAULT_QUEUES] * 4, options)
.and_return([])
cli.run(%w[* * * * --min-concurrency 10])
end
end
end
end
context 'metrics server' do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe ContributedProjectsFinder do
RSpec.describe ContributedProjectsFinder, feature_category: :groups_and_projects do
let(:source_user) { create(:user) }
let(:current_user) { create(:user) }
@ -12,14 +12,22 @@ RSpec.describe ContributedProjectsFinder do
let!(:private_project) { create(:project, :private) }
let!(:internal_project) { create(:project, :internal) }
let(:default_ordering) { [internal_project, private_project, public_project] }
before do
private_project.add_maintainer(source_user)
private_project.add_developer(current_user)
public_project.add_maintainer(source_user)
create(:push_event, project: public_project, author: source_user)
create(:push_event, project: private_project, author: source_user)
create(:push_event, project: internal_project, author: source_user)
travel_to(4.hours.from_now) { create(:push_event, project: private_project, author: source_user) }
travel_to(3.hours.from_now) { create(:push_event, project: internal_project, author: source_user) }
travel_to(2.hours.from_now) { create(:push_event, project: public_project, author: source_user) }
end
context 'when order_by is specified' do
subject { finder.execute(current_user, order_by: 'latest_activity_desc') }
it { is_expected.to eq([private_project, internal_project, public_project]) }
end
describe 'activity without a current user' do
@ -30,14 +38,14 @@ RSpec.describe ContributedProjectsFinder do
it 'does return all projects when visibility gets ignored' do
projects = finder.execute(ignore_visibility: true)
expect(projects).to match_array([private_project, internal_project, public_project])
expect(projects).to eq(default_ordering)
end
end
describe 'activity with a current user' do
subject { finder.execute(current_user) }
it { is_expected.to match_array([private_project, internal_project, public_project]) }
it { is_expected.to eq(default_ordering) }
end
context 'user with private profile' do

View File

@ -9,6 +9,7 @@ import WorkItemMilestoneInline from '~/work_items/components/work_item_milestone
import WorkItemMilestoneWithEdit from '~/work_items/components/work_item_milestone_with_edit.vue';
import WorkItemParentInline from '~/work_items/components/work_item_parent_inline.vue';
import WorkItemParent from '~/work_items/components/work_item_parent_with_edit.vue';
import WorkItemTimeTracking from '~/work_items/components/work_item_time_tracking.vue';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemAttributesWrapper from '~/work_items/components/work_item_attributes_wrapper.vue';
import {
@ -32,7 +33,8 @@ describe('WorkItemAttributesWrapper component', () => {
const findWorkItemMilestoneInline = () => wrapper.findComponent(WorkItemMilestoneInline);
const findWorkItemParentInline = () => wrapper.findComponent(WorkItemParentInline);
const findWorkItemParent = () => wrapper.findComponent(WorkItemParent);
const findWorkItemParticipents = () => wrapper.findComponent(Participants);
const findWorkItemTimeTracking = () => wrapper.findComponent(WorkItemTimeTracking);
const findWorkItemParticipants = () => wrapper.findComponent(Participants);
const createComponent = ({
workItem = workItemQueryResponse.data.workItem,
@ -209,6 +211,19 @@ describe('WorkItemAttributesWrapper component', () => {
});
});
describe('time tracking widget', () => {
it.each`
description | timeTrackingWidgetPresent | exists
${'renders when widget is returned from API'} | ${true} | ${true}
${'does not render when widget is not returned from API'} | ${false} | ${false}
`('$description', ({ timeTrackingWidgetPresent, exists }) => {
const response = workItemResponseFactory({ timeTrackingWidgetPresent });
createComponent({ workItem: response.data.workItem });
expect(findWorkItemTimeTracking().exists()).toBe(exists);
});
});
describe('participants widget', () => {
it.each`
description | participantsWidgetPresent | exists
@ -218,7 +233,7 @@ describe('WorkItemAttributesWrapper component', () => {
const response = workItemResponseFactory({ participantsWidgetPresent });
createComponent({ workItem: response.data.workItem });
expect(findWorkItemParticipents().exists()).toBe(exists);
expect(findWorkItemParticipants().exists()).toBe(exists);
});
});
});

View File

@ -0,0 +1,106 @@
import { GlProgressBar } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import WorkItemTimeTracking from '~/work_items/components/work_item_time_tracking.vue';
describe('WorkItemTimeTracking component', () => {
let wrapper;
const findProgressBar = () => wrapper.findComponent(GlProgressBar);
const findTimeTrackingBody = () => wrapper.findByTestId('time-tracking-body');
const getTooltip = () => getBinding(findProgressBar().element, 'gl-tooltip');
const createComponent = ({ timeEstimate = 0, totalTimeSpent = 0 } = {}) => {
wrapper = shallowMountExtended(WorkItemTimeTracking, {
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
propsData: {
timeEstimate,
totalTimeSpent,
},
});
};
it('renders heading text', () => {
createComponent();
expect(wrapper.find('h3').text()).toBe('Time tracking');
});
describe('with no time spent and no time estimate', () => {
it('shows help text', () => {
createComponent({ timeEstimate: 0, totalTimeSpent: 0 });
expect(findTimeTrackingBody().text()).toMatchInterpolatedText(
'Use /spend or /estimate to manage time.',
);
expect(findProgressBar().exists()).toBe(false);
});
});
describe('with time spent and no time estimate', () => {
it('shows only time spent', () => {
createComponent({ timeEstimate: 0, totalTimeSpent: 10800 });
expect(findTimeTrackingBody().text()).toMatchInterpolatedText('Spent 3h');
expect(findProgressBar().exists()).toBe(false);
});
});
describe('with no time spent and time estimate', () => {
beforeEach(() => {
createComponent({ timeEstimate: 10800, totalTimeSpent: 0 });
});
it('shows 0h time spent and time estimate', () => {
expect(findTimeTrackingBody().text()).toMatchInterpolatedText('Spent 0h Estimate 3h');
});
it('shows progress bar with tooltip', () => {
expect(findProgressBar().attributes()).toMatchObject({
value: '0',
variant: 'primary',
});
expect(getTooltip().value).toContain('3h remaining');
});
});
describe('with time spent and time estimate', () => {
describe('when time spent is less than the time estimate', () => {
beforeEach(() => {
createComponent({ timeEstimate: 18000, totalTimeSpent: 10800 });
});
it('shows time spent and time estimate', () => {
expect(findTimeTrackingBody().text()).toMatchInterpolatedText('Spent 3h Estimate 5h');
});
it('shows progress bar with tooltip', () => {
expect(findProgressBar().attributes()).toMatchObject({
value: '60',
variant: 'primary',
});
expect(getTooltip().value).toContain('2h remaining');
});
});
describe('when time spent is greater than the time estimate', () => {
beforeEach(() => {
createComponent({ timeEstimate: 10800, totalTimeSpent: 18000 });
});
it('shows time spent and time estimate', () => {
expect(findTimeTrackingBody().text()).toMatchInterpolatedText('Spent 5h Estimate 3h');
});
it('shows progress bar with tooltip', () => {
expect(findProgressBar().attributes()).toMatchObject({
value: '166',
variant: 'danger',
});
expect(getTooltip().value).toContain('2h over');
});
});
});
});

View File

@ -628,6 +628,7 @@ export const workItemResponseFactory = ({
assigneesWidgetPresent = true,
datesWidgetPresent = true,
weightWidgetPresent = true,
timeTrackingWidgetPresent = true,
participantsWidgetPresent = true,
progressWidgetPresent = true,
milestoneWidgetPresent = true,
@ -757,6 +758,14 @@ export const workItemResponseFactory = ({
},
}
: { type: 'MOCK TYPE' },
timeTrackingWidgetPresent
? {
__typename: 'WorkItemWidgetTimeTracking',
type: 'TIME_TRACKING',
timeEstimate: '5h',
totalTimeSpent: '3h',
}
: { type: 'MOCK TYPE' },
participantsWidgetPresent
? {
__typename: 'WorkItemWidgetParticipants',

View File

@ -0,0 +1,245 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe MigrateSidekiqQueuedAndFutureJobs, :clean_gitlab_redis_queues, feature_category: :scalability do
let(:email_receiver_queue) { 'email_receiver' }
let(:mappings_mocked) { true }
let(:mappings) { { "EmailReceiverWorker" => "default" } }
around do |example|
EmailReceiverWorker.sidekiq_options queue: email_receiver_queue
Sidekiq::Testing.disable!(&example)
EmailReceiverWorker.set_queue
end
describe '#up', :aggregate_failures, :silence_stdout do
context 'when migrating queued jobs' do
let(:email_receiver_jobs_count_pre) { 2 }
let(:default_jobs_count_pre) { 0 }
let(:email_receiver_jobs_count_post) { 0 }
let(:default_jobs_count_post) { 2 }
before do
EmailReceiverWorker.perform_async('foo')
EmailReceiverWorker.perform_async('bar')
end
shared_examples 'migrates queued jobs' do
it 'migrates the jobs to the correct destination queue' do
allow(Gitlab::SidekiqConfig).to receive(:worker_queue_mappings).and_return(mappings) if mappings_mocked
expect(queue_length('email_receiver')).to eq(email_receiver_jobs_count_pre)
expect(queue_length('default')).to eq(default_jobs_count_pre)
migrate!
expect(queue_length('email_receiver')).to eq(email_receiver_jobs_count_post)
expect(queue_length('default')).to eq(default_jobs_count_post)
jobs = list_jobs('default')
expect(jobs[0]).to include("class" => "EmailReceiverWorker", "queue" => "default", "args" => ["bar"])
expect(jobs[1]).to include("class" => "EmailReceiverWorker", "queue" => "default", "args" => ["foo"])
end
end
context 'with worker_queue_mappings mocked' do
let(:mappings_mocked) { true }
it_behaves_like 'migrates queued jobs'
context 'when jobs are already in the correct queue' do
let(:email_receiver_queue) { 'default' }
let(:email_receiver_jobs_count_pre) { 0 }
let(:default_jobs_count_pre) { 2 }
let(:email_receiver_jobs_count_post) { 0 }
let(:default_jobs_count_post) { 2 }
it_behaves_like 'migrates queued jobs'
end
end
context 'without worker_queue_mappings mocked' do
# Assuming Settings.sidekiq.routing_rules is [['*', 'default']]
# If routing_rules or Gitlab::SidekiqConfig.worker_queue_mappings changed,
# this spec might be failing. We'll have to adjust the migration or this spec.
let(:mappings_mocked) { false }
it_behaves_like 'migrates queued jobs'
end
context 'with illegal JSON payload' do
let(:job) { '{foo: 1}' }
before do
Sidekiq.redis do |conn|
conn.lpush("queue:email_receiver", job)
end
end
it 'logs an error' do
allow(::Gitlab::BackgroundMigration::Logger).to receive(:build).and_return(Logger.new($stdout))
migrate!
expect($stdout.string).to include("Unmarshal JSON payload from SidekiqMigrateJobs failed. Job: #{job}")
end
end
context 'when run in GitLab.com' do
it 'skips the migration' do
allow(Gitlab).to receive(:com?).and_return(true)
expect(described_class::SidekiqMigrateJobs).not_to receive(:new)
migrate!
end
end
def queue_length(queue_name)
Sidekiq.redis do |conn|
conn.llen("queue:#{queue_name}")
end
end
def list_jobs(queue_name)
Sidekiq.redis { |conn| conn.lrange("queue:#{queue_name}", 0, -1) }
.map { |item| Sidekiq.load_json item }
end
end
context 'when migrating future jobs' do
include_context 'when handling retried jobs'
let(:schedule_jobs_count_in_email_receiver_pre) { 3 }
let(:retry_jobs_count_in_email_receiver_pre) { 2 }
let(:schedule_jobs_count_in_default_pre) { 0 }
let(:retry_jobs_count_in_default_pre) { 0 }
let(:schedule_jobs_count_in_email_receiver_post) { 0 }
let(:retry_jobs_count_in_email_receiver_post) { 0 }
let(:schedule_jobs_count_in_default_post) { 3 }
let(:retry_jobs_count_in_default_post) { 2 }
before do
allow(Gitlab::SidekiqConfig).to receive(:worker_queue_mappings).and_return(mappings) if mappings_mocked
EmailReceiverWorker.perform_in(1.hour, 'foo')
EmailReceiverWorker.perform_in(2.hours, 'bar')
EmailReceiverWorker.perform_in(3.hours, 'baz')
retry_in(EmailReceiverWorker, 1.hour, 0)
retry_in(EmailReceiverWorker, 2.hours, 0)
end
shared_examples 'migrates scheduled and retried jobs' do
it 'migrates to correct destination queue' do
queues = %w[email_receiver default]
job_types = %w[schedule retry]
worker = EmailReceiverWorker.to_s
queues.each do |queue|
job_types.each do |job_type|
jobs_pre = scan_jobs(job_type, queue, worker)
expect(jobs_pre.length).to eq(send("#{job_type}_jobs_count_in_#{queue}_pre"))
end
end
migrate!
queues.each do |queue|
job_types.each do |job_type|
jobs_post = scan_jobs(job_type, queue, worker)
expect(jobs_post.length).to eq(send("#{job_type}_jobs_count_in_#{queue}_post"))
end
end
end
it 'logs output at the start, finish, and in between set' do
stub_const("#{described_class}::SidekiqMigrateJobs::LOG_FREQUENCY", 1)
allow(::Gitlab::BackgroundMigration::Logger).to receive(:build).and_return(Logger.new($stdout))
migrate!
expect($stdout.string).to include('Processing schedule set')
expect($stdout.string).to include('Processing retry set')
expect($stdout.string).to include('In progress')
expect($stdout.string).to include('Done')
end
end
context 'with worker_queue_mappings mocked' do
let(:mappings_mocked) { true }
it_behaves_like 'migrates scheduled and retried jobs'
context 'when jobs are already in the correct queue' do
let(:email_receiver_queue) { 'default' }
let(:schedule_jobs_count_in_email_receiver_pre) { 0 }
let(:retry_jobs_count_in_email_receiver_pre) { 0 }
let(:schedule_jobs_count_in_default_pre) { 3 }
let(:retry_jobs_count_in_default_pre) { 2 }
let(:schedule_jobs_count_in_email_receiver_post) { 0 }
let(:retry_jobs_count_in_email_receiver_post) { 0 }
let(:schedule_jobs_count_in_default_post) { 3 }
let(:retry_jobs_count_in_default_post) { 2 }
it_behaves_like 'migrates scheduled and retried jobs'
end
context 'when job doesnt match mappings' do
let(:mappings) { { "AuthorizedProjectsWorker" => "default" } }
it 'logs skipping the job' do
allow(::Gitlab::BackgroundMigration::Logger).to receive(:build).and_return(Logger.new($stdout))
migrate!
expect($stdout.string).to include('Skipping job from EmailReceiverWorker. No destination queue found.')
end
end
end
context 'without worker_queue_mappings mocked' do
let(:mappings_mocked) { false }
it_behaves_like 'migrates scheduled and retried jobs'
end
context 'when there are matching jobs that got removed during migration' do
it 'does not try to migrate jobs' do
allow(::Gitlab::BackgroundMigration::Logger).to receive(:build).and_return(Logger.new($stdout))
freeze_time do
allow_next_instance_of(described_class::SidekiqMigrateJobs) do |migrator|
allow(migrator).to receive(:migrate_job_in_set).and_wrap_original do |meth, *args|
Sidekiq.redis { |c| c.zrem('schedule', args.third) }
Sidekiq.redis { |c| c.zrem('retry', args.third) }
meth.call(*args)
end
end
migrate!
# schedule jobs
expect($stdout.string).to include("Done. Scanned records: 3. Migrated records: 0.")
# retry jobs
expect($stdout.string).to include("Done. Scanned records: 2. Migrated records: 0.")
end
end
end
context 'when run in GitLab.com' do
it 'skips the migration' do
allow(Gitlab).to receive(:com?).and_return(true)
expect(described_class::SidekiqMigrateJobs).not_to receive(:new)
migrate!
end
end
def set_length(set)
Sidekiq.redis { |c| c.zcard(set) }
end
def scan_jobs(set_name, queue_name, class_name)
Sidekiq.redis { |c| c.zrange(set_name, 0, -1) }
.map { |item| Gitlab::Json.load(item) }
.select { |job| job['queue'] == queue_name && job['class'] == class_name }
end
end
end
end