Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-26 18:19:16 +00:00
parent 34283a71d9
commit fa69a57b46
80 changed files with 1067 additions and 1892 deletions

View File

@ -141,9 +141,8 @@ retrieve-frontend-fixtures:
- source scripts/gitlab_component_helpers.sh
- |
if check_fixtures_reuse; then
echoinfo "INFO: Reusing frontend fixtures from 'retrieve-frontend-fixtures'."
exit 0
else
echo "No frontend fixtures directory, generating frontend fixtures."
fi
- run_timed_command "gem install knapsack --no-document"
- section_start "gitaly-test-spawn" "Spawning Gitaly"; scripts/gitaly-test-spawn; section_end "gitaly-test-spawn"; # Do not use 'bundle exec' here

View File

@ -98,7 +98,7 @@
if: '$CI_MERGE_REQUEST_LABELS =~ /group::global search/'
.if-merge-request-labels-pipeline-expedite: &if-merge-request-labels-pipeline-expedite
if: '($CI_MERGE_REQUEST_LABELS =~ /master:(foss-)?broken/ || $CI_MERGE_REQUEST_TITLE =~ /^[Rr]evert/) && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
if: '($CI_MERGE_REQUEST_LABELS =~ /master:(foss-)?broken/ || $CI_MERGE_REQUEST_LABELS =~ /quarantine/) && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
.if-merge-request-labels-frontend-and-feature-flag: &if-merge-request-labels-frontend-and-feature-flag
if: '$CI_MERGE_REQUEST_LABELS =~ /frontend/ && $CI_MERGE_REQUEST_LABELS =~ /feature flag/'

View File

@ -1736,7 +1736,6 @@ Layout/ArgumentAlignment:
- 'lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder.rb'
- 'lib/gitlab/setup_helper.rb'
- 'lib/gitlab/sidekiq_config/worker.rb'
- 'lib/gitlab/sidekiq_daemon/memory_killer.rb'
- 'lib/gitlab/spamcheck/client.rb'
- 'lib/gitlab/usage/metrics/instrumentations/database_metric.rb'
- 'lib/gitlab/usage_data.rb'

View File

@ -2932,7 +2932,6 @@ Layout/LineLength:
- 'lib/gitlab/setup_helper.rb'
- 'lib/gitlab/sidekiq_config.rb'
- 'lib/gitlab/sidekiq_config/worker_router.rb'
- 'lib/gitlab/sidekiq_daemon/memory_killer.rb'
- 'lib/gitlab/sidekiq_daemon/monitor.rb'
- 'lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb'
- 'lib/gitlab/sidekiq_middleware/server_metrics.rb'
@ -4197,7 +4196,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/search_results_spec.rb'
- 'spec/lib/gitlab/serializer/pagination_spec.rb'
- 'spec/lib/gitlab/sidekiq_config/worker_router_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- 'spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb'

View File

@ -139,7 +139,6 @@ Lint/EmptyBlock:
- 'spec/lib/gitlab/quick_actions/extractor_spec.rb'
- 'spec/lib/gitlab/search_context/builder_spec.rb'
- 'spec/lib/gitlab/session_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- 'spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb'

View File

@ -2020,7 +2020,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/search_context/controller_concern_spec.rb'
- 'spec/lib/gitlab/search_results_spec.rb'
- 'spec/lib/gitlab/sidekiq_config/worker_router_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- 'spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb'

View File

@ -4395,7 +4395,6 @@ RSpec/MissingFeatureCategory:
- 'spec/lib/gitlab/sidekiq_config/worker_router_spec.rb'
- 'spec/lib/gitlab/sidekiq_config/worker_spec.rb'
- 'spec/lib/gitlab/sidekiq_config_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb'
- 'spec/lib/gitlab/sidekiq_death_handler_spec.rb'
- 'spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb'

View File

@ -162,7 +162,6 @@ RSpec/ReturnFromStub:
- 'spec/lib/gitlab/redis/shared_state_spec.rb'
- 'spec/lib/gitlab/redis/sidekiq_status_spec.rb'
- 'spec/lib/gitlab/relative_positioning/range_spec.rb'
- 'spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- 'spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb'

View File

@ -599,7 +599,7 @@ const PROJECT_SHORTCUTS_GROUP = {
GO_TO_PROJECT_ISSUE_BOARDS,
GO_TO_PROJECT_MERGE_REQUESTS,
GO_TO_PROJECT_JOBS,
GO_TO_PROJECT_METRICS,
...(gon.features?.removeMonitorMetrics ? [] : [GO_TO_PROJECT_METRICS]),
GO_TO_PROJECT_ENVIRONMENTS,
GO_TO_PROJECT_KUBERNETES,
GO_TO_PROJECT_SNIPPETS,
@ -694,7 +694,7 @@ export const keybindingGroups = [
MR_COMMITS_SHORTCUTS_GROUP,
ISSUES_SHORTCUTS_GROUP,
WEB_IDE_SHORTCUTS_GROUP,
METRICS_SHORTCUTS_GROUP,
...(gon.features?.removeMonitorMetrics ? [] : [METRICS_SHORTCUTS_GROUP]),
MISC_SHORTCUTS_GROUP,
];

View File

@ -3,7 +3,15 @@ import { GlDropdown, GlDropdownForm, GlDropdownItem, GlDropdownDivider, GlIcon }
import { s__ } from '~/locale';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '../../constants';
import {
INSTANCE_TYPE,
GROUP_TYPE,
PROJECT_TYPE,
I18N_REGISTER_INSTANCE_TYPE,
I18N_REGISTER_GROUP_TYPE,
I18N_REGISTER_PROJECT_TYPE,
I18N_REGISTER_RUNNER,
} from '../../constants';
import RegistrationToken from './registration_token.vue';
import RegistrationTokenResetDropdownItem from './registration_token_reset_dropdown_item.vue';
@ -51,20 +59,23 @@ export default {
this.glFeatures?.createRunnerWorkflowForNamespace
);
},
actionText() {
switch (this.type) {
case INSTANCE_TYPE:
return I18N_REGISTER_INSTANCE_TYPE;
case GROUP_TYPE:
return I18N_REGISTER_GROUP_TYPE;
case PROJECT_TYPE:
return I18N_REGISTER_PROJECT_TYPE;
default:
return I18N_REGISTER_RUNNER;
}
},
dropdownText() {
if (this.isDeprecated) {
return '';
}
switch (this.type) {
case INSTANCE_TYPE:
return s__('Runners|Register an instance runner');
case GROUP_TYPE:
return s__('Runners|Register a group runner');
case PROJECT_TYPE:
return s__('Runners|Register a project runner');
default:
return s__('Runners|Register a runner');
}
return this.actionText;
},
dropdownToggleClass() {
if (this.isDeprecated) {
@ -109,6 +120,7 @@ export default {
v-bind="$attrs"
>
<template v-if="isDeprecated" #button-content>
<span class="gl-sr-only">{{ actionText }}</span>
<gl-icon name="ellipsis_v" />
</template>
<gl-dropdown-form class="gl-p-4!">

View File

@ -71,6 +71,12 @@ export const I18N_STALE_NEVER_CONTACTED_TOOLTIP = s__(
'Runners|Runner is stale; it has never contacted this instance',
);
// Registration dropdown
export const I18N_REGISTER_INSTANCE_TYPE = s__('Runners|Register an instance runner');
export const I18N_REGISTER_GROUP_TYPE = s__('Runners|Register a group runner');
export const I18N_REGISTER_PROJECT_TYPE = s__('Runners|Register a project runner');
export const I18N_REGISTER_RUNNER = s__('Runners|Register a runner');
// Actions
export const I18N_EDIT = __('Edit');

View File

@ -0,0 +1,39 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import RegistrationDropdown from '~/ci/runner/components/registration/registration_dropdown.vue';
import { PROJECT_TYPE } from '~/ci/runner/constants';
Vue.use(VueApollo);
export const initProjectRunnersRegistrationDropdown = (
selector = '#js-project-runner-registration-dropdown',
) => {
const el = document.querySelector(selector);
if (!el) {
return null;
}
const { registrationToken, projectId } = el.dataset;
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
return new Vue({
el,
apolloProvider,
provide: {
projectId,
},
render(h) {
return h(RegistrationDropdown, {
props: {
registrationToken,
type: PROJECT_TYPE,
},
});
},
});
};

View File

@ -28,3 +28,4 @@ export const TYPENAME_VULNERABILITIES_SCANNER = 'Vulnerabilities::Scanner';
export const TYPENAME_VULNERABILITY = 'Vulnerability';
export const TYPENAME_WORK_ITEM = 'WorkItem';
export const TYPE_USERS_SAVED_REPLY = 'Users::SavedReply';
export const TYPE_WORKSPACE = 'RemoteDevelopment::Workspace';

View File

@ -12,6 +12,7 @@ import { initTokenAccess } from '~/token_access';
import { initCiSecureFiles } from '~/ci_secure_files';
import initDeployTokens from '~/deploy_tokens';
import { initProjectRunners } from '~/ci/runner/project_runners';
import { initProjectRunnersRegistrationDropdown } from '~/ci/runner/project_runners/register';
// Initialize expandable settings panels
initSettingsPanels();
@ -42,6 +43,7 @@ initSettingsPipelinesTriggers();
initArtifactsSettings();
initProjectRunners();
initProjectRunnersRegistrationDropdown();
initSharedRunnersToggle();
initRefSwitcherBadges();
initInstallRunner();

View File

@ -2,7 +2,7 @@
import { GlAvatarLink, GlSprintf } from '@gitlab/ui';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { getWorkItemQuery } from '../utils';
import workItemByIidQuery from '../graphql/work_item_by_iid.query.graphql';
export default {
components: {
@ -11,15 +11,6 @@ export default {
TimeAgoTooltip,
},
props: {
fetchByIid: {
type: Boolean,
required: true,
},
workItemId: {
type: String,
required: false,
default: null,
},
workItemIid: {
type: String,
required: false,
@ -44,31 +35,21 @@ export default {
authorId() {
return getIdFromGraphQLId(this.author.id);
},
queryVariables() {
return this.fetchByIid
? {
fullPath: this.fullPath,
iid: this.workItemIid,
}
: {
id: this.workItemId,
};
},
},
apollo: {
workItem: {
query() {
return getWorkItemQuery(this.fetchByIid);
},
query: workItemByIidQuery,
variables() {
return this.queryVariables;
return {
fullPath: this.fullPath,
iid: this.workItemIid,
};
},
skip() {
return !this.workItemId && !this.workItemIid;
return !this.workItemIid;
},
update(data) {
const workItem = this.fetchByIid ? data.workspace.workItems.nodes[0] : data.workItem;
return workItem ?? {};
return data.workspace.workItems.nodes[0] ?? {};
},
},
},

View File

@ -618,12 +618,7 @@ export default {
:can-update="canUpdate"
@error="updateError = $event"
/>
<work-item-created-updated
:work-item-id="workItem.id"
:work-item-iid="workItemIid"
:full-path="fullPath"
:fetch-by-iid="fetchByIid"
/>
<work-item-created-updated :work-item-iid="workItemIid" :full-path="fullPath" />
<work-item-state
:work-item="workItem"
:work-item-parent-id="workItemParentId"
@ -646,7 +641,6 @@ export default {
:work-item-id="workItem.id"
:can-update="canUpdate"
:full-path="fullPath"
:fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
@error="updateError = $event"
/>
@ -664,8 +658,6 @@ export default {
:work-item-id="workItem.id"
:work-item-milestone="workItemMilestone.milestone"
:work-item-type="workItemType"
:fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
:can-update="canUpdate"
:full-path="fullPath"
@error="updateError = $event"
@ -677,7 +669,6 @@ export default {
:weight="workItemWeight.weight"
:work-item-id="workItem.id"
:work-item-type="workItemType"
:fetch-by-iid="fetchByIid"
:query-variables="queryVariables"
@error="updateError = $event"
/>

View File

@ -8,8 +8,8 @@ import LabelItem from '~/sidebar/components/labels/labels_select_widget/label_it
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { isScopedLabel } from '~/lib/utils/common_utils';
import workItemLabelsSubscription from 'ee_else_ce/work_items/graphql/work_item_labels.subscription.graphql';
import { getWorkItemQuery } from '../utils';
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
import workItemByIidQuery from '../graphql/work_item_by_iid.query.graphql';
import {
i18n,
@ -56,11 +56,6 @@ export default {
type: String,
required: true,
},
fetchByIid: {
type: Boolean,
required: false,
default: false,
},
queryVariables: {
type: Object,
required: true,
@ -79,17 +74,15 @@ export default {
},
apollo: {
workItem: {
query() {
return getWorkItemQuery(this.fetchByIid);
},
query: workItemByIidQuery,
variables() {
return this.queryVariables;
},
update(data) {
return this.fetchByIid ? data.workspace.workItems.nodes[0] : data.workItem;
return data.workspace.workItems.nodes[0];
},
skip() {
return !this.queryVariables.id && !this.queryVariables.iid;
return !this.queryVariables.iid;
},
error() {
this.$emit('error', i18n.fetchError);

View File

@ -70,15 +70,6 @@ export default {
type: String,
required: true,
},
fetchByIid: {
type: Boolean,
required: false,
default: false,
},
queryVariables: {
type: Object,
required: true,
},
},
data() {
return {

View File

@ -1,5 +1,4 @@
.color-item {
@include gl-align-items-center;
@include gl-display-flex;
}

View File

@ -75,9 +75,3 @@
}
}
}
.gl-md-flex-nowrap.gl-md-flex-nowrap {
@include gl-media-breakpoint-up(md) {
@include gl-flex-nowrap;
}
}

View File

@ -91,6 +91,21 @@ class Import::GithubController < Import::BaseController
render json: Import::GithubRealtimeRepoSerializer.new.represent(already_added_projects)
end
def failures
project = Project.imported_from(provider_name).find(params[:project_id])
unless project.import_finished?
return render status: :bad_request, json: {
message: _('The import is not complete.')
}
end
failures = project.import_failures.with_external_identifiers
serializer = Import::GithubFailureSerializer.new.with_pagination(request, response)
render json: serializer.represent(failures)
end
def cancel
project = Project.imported_from(provider_name).find(params[:project_id])
result = Import::Github::CancelProjectImportService.new(project, current_user).execute

View File

@ -91,7 +91,7 @@ class Projects::CommitsController < Projects::ApplicationController
@repository.commits(ref, **options)
end
@commits.load_tags if Feature.enabled?(:show_tags_on_commits_view, @project)
@commits.load_tags
@commits.each(&:lazy_author) # preload authors
@commits = @commits.with_markdown_cache.with_latest_pipeline(ref)

View File

@ -15,6 +15,7 @@ module Projects
before_action do
push_frontend_feature_flag(:ci_variables_pages, current_user)
push_frontend_feature_flag(:ci_limit_environment_scope, @project)
push_frontend_feature_flag(:create_runner_workflow_for_namespace, @project.namespace)
end
helper_method :highlight_badge

View File

@ -151,7 +151,7 @@ module Types
end
def ephemeral_register_url
return unless ephemeral_register_url_access_allowed?(runner)
return unless context[:current_user]&.can?(:read_ephemeral_token, runner) && runner.registration_available?
case runner.runner_type
when 'instance_type'
@ -203,23 +203,6 @@ module Types
def can_admin_runners?
context[:current_user]&.can_admin_all_resources?
end
def ephemeral_register_url_access_allowed?(runner)
return unless runner.registration_available?
case runner.runner_type
when 'instance_type'
can_admin_runners?
when 'group_type'
group = runner.groups[0]
group && context[:current_user]&.can?(:register_group_runners, group)
when 'project_type'
project = runner.projects[0]
project && context[:current_user]&.can?(:register_project_runners, project)
end
end
end
end
end

View File

@ -276,7 +276,7 @@ module ApplicationHelper
if startup_css_enabled?
stylesheet_link_tag(path, media: "print", crossorigin: ActionController::Base.asset_host ? 'anonymous' : nil)
else
stylesheet_link_tag(path, crossorigin: ActionController::Base.asset_host ? 'anonymous' : nil)
stylesheet_link_tag(path, media: "all", crossorigin: ActionController::Base.asset_host ? 'anonymous' : nil)
end
end

View File

@ -160,23 +160,18 @@ module CommitsHelper
# This includes a keyed hash for values that can be nil, to prevent invalid cache entries
# being served if the order should change in future.
def commit_partial_cache_key(commit, ref:, merge_request:, request:)
keyed_hash = {
merge_request: merge_request&.cache_key,
pipeline_status: commit.detailed_status_for(ref)&.cache_key,
xhr: request.xhr?,
controller: controller.controller_path,
path: @path # referred to in #link_to_browse_code
}
if Feature.enabled?(:show_tags_on_commits_view, commit.project)
keyed_hash[:referenced_by] = tag_checksum(commit.referenced_by)
end
[
commit,
commit.author,
ref,
keyed_hash
{
merge_request: merge_request&.cache_key,
pipeline_status: commit.detailed_status_for(ref)&.cache_key,
xhr: request.xhr?,
controller: controller.controller_path,
path: @path, # referred to in #link_to_browse_code
referenced_by: tag_checksum(commit.referenced_by)
}
]
end

View File

@ -8,6 +8,8 @@ class ImportFailure < ApplicationRecord
validates :group, presence: true, unless: :project
validates :external_identifiers, json_schema: { filename: "import_failure_external_identifiers" }
scope :with_external_identifiers, -> { where.not(external_identifiers: {}) }
# Returns any `import_failures` for relations that were unrecoverable errors or failed after
# several retries. An import can be successful even if some relations failed to import correctly.
# A retry_count of 0 indicates that either no retries were attempted, or they were exceeded.

View File

@ -0,0 +1,178 @@
# frozen_string_literal: true
module Import
class GithubFailureEntity < Grape::Entity
expose :type do |failure|
failure.external_identifiers['object_type']
end
expose :title do |failure|
title(failure)
end
expose :provider_url do |failure|
build_url(failure)
end
expose :details do
expose :exception_class
expose :exception_message
expose :correlation_id_value
expose :source
expose :created_at
expose :github_identifiers do
with_options(expose_nil: false) do
expose(:object_type) { |failure| failure.external_identifiers['object_type'] }
expose(:id) { |failure| failure.external_identifiers['id'] }
expose(:db_id) { |failure| failure.external_identifiers['db_id'] }
expose(:iid) { |failure| failure.external_identifiers['iid'] }
expose(:title) { |failure| failure.external_identifiers['title'] }
expose(:login) { |failure| failure.external_identifiers['login'] }
expose(:event) { |failure| failure.external_identifiers['event'] }
expose(:merge_request_id) { |failure| failure.external_identifiers['merge_request_id'] }
expose(:merge_request_iid) { |failure| failure.external_identifiers['merge_request_iid'] }
expose(:requested_reviewers) { |failure| failure.external_identifiers['requested_reviewers'] }
expose(:note_id) { |failure| failure.external_identifiers['note_id'] }
expose(:noteable_type) { |failure| failure.external_identifiers['noteable_type'] }
expose(:noteable_iid) { |failure| failure.external_identifiers['noteable_iid'] }
expose(:issuable_type) { |failure| failure.external_identifiers['issuable_type'] }
expose(:issuable_iid) { |failure| failure.external_identifiers['issuable_iid'] }
expose(:review_id) { |failure| failure.external_identifiers['review_id'] }
expose(:tag) { |failure| failure.external_identifiers['tag'] }
expose(:oid) { |failure| failure.external_identifiers['oid'] }
expose(:size) { |failure| failure.external_identifiers['size'] }
end
end
end
private
# rubocop:disable Metrics/CyclomaticComplexity
def title(failure)
gh_identifiers = failure.external_identifiers
case gh_identifiers['object_type']
when 'pull_request', 'issue', 'label', 'milestone'
gh_identifiers['title']
when 'pull_request_merged_by'
format(s_("GithubImporter|Pull request %{pull_request_iid} merger"), pull_request_iid: gh_identifiers['iid'])
when 'pull_request_review_request'
format(
s_("GithubImporter|Pull request %{pull_request_iid} review request"),
pull_request_iid: gh_identifiers['merge_request_iid']
)
when 'pull_request_review'
format(s_("GithubImporter|Pull request review %{review_id}"), review_id: gh_identifiers['review_id'])
when 'collaborator'
gh_identifiers['login']
when 'protected_branch'
gh_identifiers['id']
when 'issue_event'
gh_identifiers['event']
when 'release'
gh_identifiers['tag']
when 'note'
format(
s_("GithubImporter|%{noteable_type} comment %{note_id}"),
noteable_type: gh_identifiers['noteable_type'],
note_id: gh_identifiers['note_id']
)
when 'diff_note'
format(s_("GithubImporter|Pull request review comment %{note_id}"), note_id: gh_identifiers['note_id'])
when 'issue_attachment'
format(s_("GithubImporter|Issue %{issue_iid} attachment"), issue_iid: gh_identifiers['noteable_iid'])
when 'merge_request_attachment'
format(
s_("GithubImporter|Merge request %{merge_request_iid} attachment"),
merge_request_iid: gh_identifiers['noteable_iid']
)
when 'release_attachment'
format(s_("GithubImporter|Release %{tag} attachment"), tag: gh_identifiers['tag'])
when 'note_attachment'
s_('GithubImporter|Note attachment')
when 'lfs_object'
gh_identifiers['oid'].to_s
else
''
end
end
def build_url(failure)
project = failure.project
gh_identifiers = failure.external_identifiers
github_repo = project.import_source
host = host(project.import_url)
return '' unless host
case gh_identifiers['object_type']
when 'pull_request', 'pull_request_merged_by'
# https://github.com/OWNER/REPO/pull/1
"#{host}/#{github_repo}/pull/#{gh_identifiers['iid']}"
when 'pull_request_review_request'
# https://github.com/OWNER/REPO/pull/1
"#{host}/#{github_repo}/pull/#{gh_identifiers['merge_request_iid']}"
when 'pull_request_review'
# https://github.com/OWNER/REPO/pull/1#pullrequestreview-1219894643
"#{host}/#{github_repo}/pull/#{gh_identifiers['merge_request_iid']}" \
"#pullrequestreview-#{gh_identifiers['review_id']}"
when 'issue'
# https://github.com/OWNER/REPO/issues/1
"#{host}/#{github_repo}/issues/#{gh_identifiers['iid']}"
when 'collaborator'
# https://github.com/USER_NAME
"#{host}/#{gh_identifiers['login']}"
when 'protected_branch'
branch = escape(gh_identifiers['id'])
# https://github.com/OWNER/REPO/tree/BRANCH_NAME
"#{host}/#{github_repo}/tree/#{branch}"
when 'issue_event'
# https://github.com/OWNER/REPO/issues/1#event-8356623615
"#{host}/#{github_repo}/issues/#{gh_identifiers['issuable_iid']}#event-#{gh_identifiers['id']}"
when 'label'
label = escape(gh_identifiers['title'])
# https://github.com/OWNER/REPO/labels/bug
"#{host}/#{github_repo}/labels/#{label}"
when 'milestone'
# https://github.com/OWNER/REPO/milestone/1
"#{host}/#{github_repo}/milestone/#{gh_identifiers['iid']}"
when 'release', 'release_attachment'
tag = escape(gh_identifiers['tag'])
# https://github.com/OWNER/REPO/releases/tag/v1.0
"#{host}/#{github_repo}/releases/tag/#{tag}"
when 'note'
# https://github.com/OWNER/REPO/issues/2#issuecomment-1480755368
"#{host}/#{github_repo}/issues/#{gh_identifiers['noteable_iid']}#issuecomment-#{gh_identifiers['note_id']}"
when 'diff_note'
# https://github.com/OWNER/REPO/pull/1#discussion_r1050098241
"#{host}/#{github_repo}/pull/#{gh_identifiers['noteable_iid']}#discussion_r#{gh_identifiers['note_id']}"
when 'issue_attachment'
# https://github.com/OWNER/REPO/issues/1
"#{host}/#{github_repo}/issues/#{gh_identifiers['noteable_iid']}"
when 'merge_request_attachment'
# https://github.com/OWNER/REPO/pull/1
"#{host}/#{github_repo}/pull/#{gh_identifiers['noteable_iid']}"
when 'lfs_object', 'note_attachment'
# we can't build url to lfs objects and note attachments
''
else
''
end
end
# rubocop:enable Metrics/CyclomaticComplexity
def host(uri)
parsed_uri = URI.parse(uri)
"#{parsed_uri.scheme}://#{parsed_uri.hostname}"
rescue URI::InvalidURIError
nil
end
def escape(str)
CGI.escape(str)
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Import
class GithubFailureSerializer < BaseSerializer
include WithPagination
entity Import::GithubFailureEntity
end
end

View File

@ -7,6 +7,7 @@
- if can?(current_user, :create_runner, @project)
= render Pajamas::ButtonComponent.new(href: new_project_runner_path(@project), variant: :confirm) do
= s_('Runners|New project runner')
#js-project-runner-registration-dropdown{ data: { registration_token: @project.runners_token, project_id: @project.id } }
- else
= _('Please contact an admin to create runners.')
= link_to _('Learn more.'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'restrict-runner-registration-by-all-users-in-an-instance'), target: '_blank', rel: 'noopener noreferrer'

View File

@ -12,10 +12,6 @@ module Gitlab
include GithubImport::Queue
include StageMethods
# technical debt: https://gitlab.com/gitlab-org/gitlab/issues/33991
sidekiq_options memory_killer_memory_growth_kb: ENV.fetch('MEMORY_KILLER_FINISH_IMPORT_WORKER_MEMORY_GROWTH_KB', 50).to_i
sidekiq_options memory_killer_max_memory_growth_kb: ENV.fetch('MEMORY_KILLER_FINISH_IMPORT_WORKER_MAX_MEMORY_GROWTH_KB', 200_000).to_i
# project - An instance of Project.
def import(_, project)
@project = project

View File

@ -12,10 +12,6 @@ module Gitlab
include GithubImport::Queue
include StageMethods
# technical debt: https://gitlab.com/gitlab-org/gitlab/issues/33991
sidekiq_options memory_killer_memory_growth_kb: ENV.fetch('MEMORY_KILLER_IMPORT_REPOSITORY_WORKER_MEMORY_GROWTH_KB', 50).to_i
sidekiq_options memory_killer_max_memory_growth_kb: ENV.fetch('MEMORY_KILLER_IMPORT_REPOSITORY_WORKER_MAX_MEMORY_GROWTH_KB', 300_000).to_i
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)

View File

@ -14,10 +14,6 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
sidekiq_options status_expiration: Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION
worker_resource_boundary :memory
# technical debt: https://gitlab.com/gitlab-org/gitlab/issues/33991
sidekiq_options memory_killer_memory_growth_kb: ENV.fetch('MEMORY_KILLER_REPOSITORY_IMPORT_WORKER_MEMORY_GROWTH_KB', 50).to_i
sidekiq_options memory_killer_max_memory_growth_kb: ENV.fetch('MEMORY_KILLER_REPOSITORY_IMPORT_WORKER_MAX_MEMORY_GROWTH_KB', 300_000).to_i
def perform(project_id)
@project = Project.find_by_id(project_id)
return if project.nil? || !start_import?

View File

@ -1,8 +0,0 @@
---
name: show_tags_on_commits_view
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111493
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/392003
milestone: '15.10'
type: development
group: group::source code
default_enabled: true

View File

@ -1,7 +0,0 @@
---
name: sidekiq_memory_killer_read_only_mode
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98519
milestone: '15.5'
type: ops
group: group::application performance
default_enabled: false

View File

@ -32,8 +32,6 @@ queues_config_hash = Gitlab::Redis::Queues.params
queues_config_hash[:namespace] = Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE
enable_json_logs = Gitlab.config.sidekiq.log_format == 'json'
enable_sidekiq_memory_killer = ENV['SIDEKIQ_MEMORY_KILLER_MAX_RSS'].to_i.nonzero? &&
!Gitlab::Utils.to_boolean(ENV['GITLAB_MEMORY_WATCHDOG_ENABLED'], default: true)
Sidekiq.configure_server do |config|
config[:strict] = false
@ -70,8 +68,6 @@ Sidekiq.configure_server do |config|
# To cancel job, it requires `SIDEKIQ_MONITOR_WORKER=1` to enable notification channel
Gitlab::SidekiqDaemon::Monitor.instance.start
Gitlab::SidekiqDaemon::MemoryKiller.instance.start if enable_sidekiq_memory_killer
first_sidekiq_worker = !ENV['SIDEKIQ_WORKER_ID'] || ENV['SIDEKIQ_WORKER_ID'] == '0'
health_checks = Settings.monitoring.sidekiq_health_checks

View File

@ -22,6 +22,7 @@ namespace :import do
get :details
get :callback
get :realtime_changes
get :failures
post :cancel
post :cancel_all
get :counts

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddExternalIdentifiersIndexToImportFailures < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
INDEX_NAME = 'index_import_failures_on_external_identifiers'
def up
add_concurrent_index :import_failures, :external_identifiers, name: INDEX_NAME,
where: "external_identifiers != '{}'"
end
def down
remove_concurrent_index_by_name :import_failures, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
36c538abaeb4239d5cc0424ebe2ac2f01c427d4acdfaf849f181d066f658899e

View File

@ -30845,6 +30845,8 @@ CREATE INDEX index_import_export_uploads_on_updated_at ON import_export_uploads
CREATE INDEX index_import_failures_on_correlation_id_value ON import_failures USING btree (correlation_id_value);
CREATE INDEX index_import_failures_on_external_identifiers ON import_failures USING btree (external_identifiers) WHERE (external_identifiers <> '{}'::jsonb);
CREATE INDEX index_import_failures_on_group_id_not_null ON import_failures USING btree (group_id) WHERE (group_id IS NOT NULL);
CREATE INDEX index_import_failures_on_project_id_and_correlation_id_value ON import_failures USING btree (project_id, correlation_id_value) WHERE (retry_count = 0);

View File

@ -56,9 +56,7 @@ Sidekiq memory limits are controlled using environment variables.
If jobs do not finish during that time, all currently running jobs are interrupted with a `SIGTERM` signal
sent to the Sidekiq process.
- `GITLAB_MEMORY_WATCHDOG_ENABLED`: enabled by default. Set the `GITLAB_MEMORY_WATCHDOG_ENABLED` to false, to use legacy
Daemon Sidekiq Memory Killer implementation used prior GitLab 15.9. Support for setting `GITLAB_MEMORY_WATCHDOG_ENABLED`
will be removed in GitLab 16.0.
- `GITLAB_MEMORY_WATCHDOG_ENABLED`: enabled by default. Set the `GITLAB_MEMORY_WATCHDOG_ENABLED` to false, to disable Watchdog from running.
### Monitor worker restarts

View File

@ -53,7 +53,6 @@ Example response:
## Get a single protected tag or wildcard protected tag
Gets a single protected tag or wildcard protected tag.
The pagination parameters `page` and `per_page` can be used to restrict the list of protected tags.
```plaintext
GET /projects/:id/protected_tags/:name

View File

@ -31,7 +31,7 @@ To complete this tutorial:
## Create the Google Cloud Workload Identity Pool
[Create a new Google Cloud Workload Identity Pool](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#oidc) with the following options:
[Create a new Google Cloud Workload Identity Pool](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#create_the_workload_identity_pool_and_provider) with the following options:
- **Name**: Human-friendly name for the Workload Identity Pool, such as `GitLab`.
- **Pool ID**: Unique ID in the Google Cloud project for the Workload Identity Pool,
@ -81,7 +81,7 @@ However, you have no permissions on Google Cloud (_authorization_).
To grant your GitLab CI/CD job permissions on Google Cloud, you must:
1. [Create a Google Cloud Service Account](https://www.google.com/search?q=google+cloud+create+service+account).
1. [Create a Google Cloud Service Account](https://cloud.google.com/iam/docs/service-accounts-create).
You can use whatever name and ID you prefer.
1. [Grant IAM permissions](https://cloud.google.com/iam/docs/granting-changing-revoking-access) to your
service account on Google Cloud resources. These permissions vary significantly based on

View File

@ -90,6 +90,8 @@ In addition to `test_file_finder`, we have added several advanced mappings to de
- When a view gets changed, we try to find feature specs that would test that area of the code.
- [`ViewToJsMappings`](https://gitlab.com/gitlab-org/gitlab/-/blob/8d7dfb7c043adf931128088b9ffab3b4a39af6f5/tooling/lib/tooling/mappings/view_to_js_mappings.rb) ([#386719](https://gitlab.com/gitlab-org/gitlab/-/issues/386719))
- If a JS file is changed, we should try to identify the system specs that are covering this JS component.
- [`FindFilesUsingFeatureFlags`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tooling/lib/tooling/find_files_using_feature_flags.rb) ([#407366](https://gitlab.com/gitlab-org/gitlab/-/issues/407366))
- If a feature flag was changed, we check which Ruby file is including that feature flag, and we add it to the list of changed files in the detect-tests CI job. The remainder of the job will then detect which frontend/backend tests should be run based on those changed files.
#### Exceptional cases

View File

@ -408,6 +408,7 @@ You can configure GitLab to use multiple SAML IdPs if:
- The `strategy_class` is explicitly set because it cannot be inferred from provider
name.
NOTE:
[SAML group memberships](#configure-users-based-on-saml-group-membership) and [Group Sync](../user/group/saml_sso/group_sync.md) do not support multiple IdPs. For more information, see [issue 386605](https://gitlab.com/gitlab-org/gitlab/-/issues/386605). This also includes `required_groups`, as mentioned in [issue 391926](https://gitlab.com/gitlab-org/gitlab/-/issues/391926).
To set up multiple SAML IdPs:

View File

@ -76,8 +76,8 @@ Depending on your role, to manage your transfer usage you can [reduce Container
Projects on GitLab SaaS have a 10 GB storage limit on their Git repository and LFS storage.
After namespace-level storage limits are applied, the project limit is removed. A namespace has either a namespace-level storage limit or a project-level storage limit, but not both.
When a project's repository and LFS reaches the quota, the project is locked.
You cannot push changes to a locked project. To monitor the size of each
When a project's repository and LFS reaches the quota, the project is set to a read-only state.
You cannot push changes to a read-only project. To monitor the size of each
repository in a namespace, including a breakdown for each project,
[view storage usage](#view-storage-usage). To allow a project's repository and LFS to exceed the free quota
you must purchase additional storage. For more details, see [Excess storage usage](#excess-storage-usage).
@ -85,45 +85,45 @@ you must purchase additional storage. For more details, see [Excess storage usag
### Excess storage usage
Excess storage usage is the amount that a project's repository and LFS exceeds the [project storage limit](#project-storage-limit). If no
purchased storage is available the project is locked. You cannot push changes to a locked project.
To unlock a project you must [purchase more storage](../subscriptions/gitlab_com/index.md#purchase-more-storage-and-transfer)
for the namespace. When the purchase is completed, locked projects are automatically unlocked. The
purchased storage is available the project is set to a read-only state. You cannot push changes to a read-only project.
To remove the read-only state you must [purchase more storage](../subscriptions/gitlab_com/index.md#purchase-more-storage-and-transfer)
for the namespace. When the purchase is completed, read-only projects are automatically restored to their standard state. The
amount of purchased storage available must always be greater than zero.
The **Storage** tab of the **Usage Quotas** page warns you of the following:
- Purchased storage available is running low.
- Projects that are at risk of being locked if purchased storage available is zero.
- Projects that are locked because purchased storage available is zero. Locked projects are
- Projects that are at risk of becoming read-only if purchased storage available is zero.
- Projects that are read-only because purchased storage available is zero. Read-only projects are
marked with an information icon (**{information-o}**) beside their name.
#### Excess storage example
The following example describes an excess storage scenario for a namespace:
| Repository | Storage used | Excess storage | Quota | Status |
|------------|--------------|----------------|--------|-------------------|
| Red | 10 GB | 0 GB | 10 GB | Locked **{lock}** |
| Blue | 8 GB | 0 GB | 10 GB | Not locked |
| Green | 10 GB | 0 GB | 10 GB | Locked **{lock}** |
| Yellow | 2 GB | 0 GB | 10 GB | Not locked |
| **Totals** | **30 GB** | **0 GB** | - | - |
| Repository | Storage used | Excess storage | Quota | Status |
|------------|--------------|----------------|--------|----------------------|
| Red | 10 GB | 0 GB | 10 GB | Read-only **{lock}** |
| Blue | 8 GB | 0 GB | 10 GB | Not read-only |
| Green | 10 GB | 0 GB | 10 GB | Read-only **{lock}** |
| Yellow | 2 GB | 0 GB | 10 GB | Not read-only |
| **Totals** | **30 GB** | **0 GB** | - | - |
The Red and Green projects are locked because their repositories and LFS have reached the quota. In this
The Red and Green projects are read-only because their repositories and LFS have reached the quota. In this
example, no additional storage has yet been purchased.
To unlock the Red and Green projects, 50 GB additional storage is purchased.
To remove the read-only state from the Red and Green projects, 50 GB additional storage is purchased.
Assuming the Green and Red projects' repositories and LFS grow past the 10 GB quota, the purchased storage
available decreases. All projects remain unlocked because 40 GB purchased storage is available:
available decreases. All projects remain read-only because 40 GB purchased storage is available:
50 GB (purchased storage) - 10 GB (total excess storage used).
| Repository | Storage used | Excess storage | Quota | Status |
|------------|--------------|----------------|---------|-------------------|
| Red | 15 GB | 5 GB | 10 GB | Not locked |
| Blue | 14 GB | 4 GB | 10 GB | Not locked |
| Green | 11 GB | 1 GB | 10 GB | Not locked |
| Yellow | 5 GB | 0 GB | 10 GB | Not locked |
| Red | 15 GB | 5 GB | 10 GB | Not read-only |
| Blue | 14 GB | 4 GB | 10 GB | Not read-only |
| Green | 11 GB | 1 GB | 10 GB | Not read-only |
| Yellow | 5 GB | 0 GB | 10 GB | Not read-only |
| **Totals** | **45 GB** | **10 GB** | - | - |
## Namespace storage limit

View File

@ -8,7 +8,6 @@ module Gitlab
def initialize(review_request, project, client)
@review_request = review_request
@user_finder = UserFinder.new(project, client)
@issue_finder = IssuableFinder.new(project, client)
end
def execute

View File

@ -120,7 +120,7 @@ module Gitlab
def github_identifiers
{
note_id: note_id,
noteable_id: noteable_id,
noteable_iid: noteable_id,
noteable_type: noteable_type
}
end

View File

@ -22,7 +22,7 @@ module Gitlab
def github_identifiers
{
id: id,
iid: issuable_id,
issuable_iid: issuable_id,
event: event
}
end

View File

@ -76,7 +76,7 @@ module Gitlab
def github_identifiers
{
note_id: note_id,
noteable_id: noteable_id,
noteable_iid: noteable_id,
noteable_type: noteable_type
}
end

View File

@ -1,293 +0,0 @@
# frozen_string_literal: true
module Gitlab
module SidekiqDaemon
class MemoryKiller < Daemon
include ::Gitlab::Utils::StrongMemoize
# Today 64-bit CPU support max 256T memory. It is big enough.
MAX_MEMORY_KB = 256 * 1024 * 1024 * 1024
# RSS below `soft_limit_rss` is considered safe
SOFT_LIMIT_RSS_KB = ENV.fetch('SIDEKIQ_MEMORY_KILLER_MAX_RSS', 2000000).to_i
# RSS above `hard_limit_rss` will be stopped
HARD_LIMIT_RSS_KB = ENV.fetch('SIDEKIQ_MEMORY_KILLER_HARD_LIMIT_RSS', MAX_MEMORY_KB).to_i
# RSS in range (soft_limit_rss, hard_limit_rss) is allowed for GRACE_BALLOON_SECONDS
GRACE_BALLOON_SECONDS = ENV.fetch('SIDEKIQ_MEMORY_KILLER_GRACE_TIME', 15 * 60).to_i
# Check RSS every CHECK_INTERVAL_SECONDS, minimum 2 seconds
CHECK_INTERVAL_SECONDS = [ENV.fetch('SIDEKIQ_MEMORY_KILLER_CHECK_INTERVAL', 3).to_i, 2].max
# Give Sidekiq up to 30 seconds to allow existing jobs to finish after exceeding the limit
SHUTDOWN_TIMEOUT_SECONDS = ENV.fetch('SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT', 30).to_i
# Developer/admin should always set `memory_killer_max_memory_growth_kb` explicitly
# In case not set, default to 300M. This is for extra-safe.
DEFAULT_MAX_MEMORY_GROWTH_KB = 300_000
# Phases of memory killer
PHASE = {
running: 1,
above_soft_limit: 2,
stop_fetching_new_jobs: 3,
shutting_down: 4,
killing_sidekiq: 5
}.freeze
def initialize
super
@enabled = true
@metrics = init_metrics
@sidekiq_daemon_monitor = Gitlab::SidekiqDaemon::Monitor.instance
end
private
def init_metrics
{
sidekiq_current_rss: ::Gitlab::Metrics.gauge(:sidekiq_current_rss, 'Current RSS of Sidekiq Worker'),
sidekiq_memory_killer_soft_limit_rss: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_soft_limit_rss, 'Current soft_limit_rss of Sidekiq Worker'),
sidekiq_memory_killer_hard_limit_rss: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_hard_limit_rss, 'Current hard_limit_rss of Sidekiq Worker'),
sidekiq_memory_killer_phase: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_phase, 'Current phase of Sidekiq Worker'),
sidekiq_memory_killer_running_jobs: ::Gitlab::Metrics.counter(:sidekiq_memory_killer_running_jobs_total, 'Current running jobs when limit was reached')
}
end
def refresh_state(phase)
@phase = PHASE.fetch(phase)
@current_rss = get_rss_kb
@soft_limit_rss = get_soft_limit_rss_kb
@hard_limit_rss = get_hard_limit_rss_kb
@memory_total = get_memory_total_kb
# track the current state as prometheus gauges
@metrics[:sidekiq_memory_killer_phase].set({}, @phase)
@metrics[:sidekiq_current_rss].set({}, @current_rss)
@metrics[:sidekiq_memory_killer_soft_limit_rss].set({}, @soft_limit_rss)
@metrics[:sidekiq_memory_killer_hard_limit_rss].set({}, @hard_limit_rss)
end
def run_thread
Sidekiq.logger.info(
class: self.class.to_s,
action: 'start',
pid: pid,
message: 'Starting Gitlab::SidekiqDaemon::MemoryKiller Daemon'
)
while enabled?
begin
sleep(CHECK_INTERVAL_SECONDS)
restart_sidekiq unless rss_within_range?
rescue StandardError => e
log_exception(e, __method__)
rescue Exception => e # rubocop:disable Lint/RescueException
log_exception(e, __method__)
raise e
end
end
ensure
Sidekiq.logger.warn(
class: self.class.to_s,
action: 'stop',
pid: pid,
message: 'Stopping Gitlab::SidekiqDaemon::MemoryKiller Daemon'
)
end
def log_exception(exception, method)
Sidekiq.logger.warn(
class: self.class.to_s,
pid: pid,
message: "Exception from #{method}: #{exception.message}"
)
end
def stop_working
@enabled = false
end
def enabled?
@enabled
end
def restart_sidekiq
return if Feature.enabled?(:sidekiq_memory_killer_read_only_mode, type: :ops)
# Tell Sidekiq to stop fetching new jobs
# We first SIGNAL and then wait given time
# We also monitor a number of running jobs and allow to restart early
refresh_state(:stop_fetching_new_jobs)
signal_and_wait(SHUTDOWN_TIMEOUT_SECONDS, 'SIGTSTP', 'stop fetching new jobs')
return unless enabled?
# Tell sidekiq to restart itself
# Keep extra safe to wait `Sidekiq[:timeout] + 2` seconds before SIGKILL
refresh_state(:shutting_down)
signal_and_wait(Sidekiq[:timeout] + 2, 'SIGTERM', 'gracefully shut down')
return unless enabled?
# Ideally we should never reach this condition
# Wait for Sidekiq to shutdown gracefully, and kill it if it didn't
# Kill the whole pgroup, so we can be sure no children are left behind
refresh_state(:killing_sidekiq)
signal_pgroup('SIGKILL', 'die')
end
def rss_within_range?
refresh_state(:running)
deadline = Gitlab::Metrics::System.monotonic_time + GRACE_BALLOON_SECONDS.seconds
loop do
return true unless enabled?
# RSS go above hard limit should trigger forcible shutdown right away
break if @current_rss > @hard_limit_rss
# RSS go below the soft limit
return true if @current_rss < @soft_limit_rss
# RSS did not go below the soft limit within deadline, restart
break if Gitlab::Metrics::System.monotonic_time > deadline
sleep(CHECK_INTERVAL_SECONDS)
refresh_state(:above_soft_limit)
log_rss_out_of_range(false)
end
# There are two chances to break from loop:
# - above hard limit, or
# - above soft limit after deadline
# When `above hard limit`, it immediately go to `stop_fetching_new_jobs`
# So ignore `above hard limit` and always set `above_soft_limit` here
refresh_state(:above_soft_limit)
log_rss_out_of_range
false
end
def log_rss_out_of_range(deadline_exceeded = true)
reason = out_of_range_description(@current_rss,
@hard_limit_rss,
@soft_limit_rss,
deadline_exceeded)
running_jobs = fetch_running_jobs
Sidekiq.logger.warn(
class: self.class.to_s,
pid: pid,
message: 'Sidekiq worker RSS out of range',
current_rss: @current_rss,
soft_limit_rss: @soft_limit_rss,
hard_limit_rss: @hard_limit_rss,
memory_total_kb: @memory_total,
reason: reason,
running_jobs: running_jobs)
increment_worker_counters(running_jobs, deadline_exceeded)
end
def increment_worker_counters(running_jobs, deadline_exceeded)
running_jobs.each do |job|
@metrics[:sidekiq_memory_killer_running_jobs].increment({ worker_class: job[:worker_class], deadline_exceeded: deadline_exceeded })
end
end
def fetch_running_jobs
@sidekiq_daemon_monitor.jobs.map do |jid, job|
{
jid: jid,
worker_class: job[:worker_class].name
}
end
end
def out_of_range_description(rss, hard_limit, soft_limit, deadline_exceeded)
if rss > hard_limit
"current_rss(#{rss}) > hard_limit_rss(#{hard_limit})"
elsif deadline_exceeded
"current_rss(#{rss}) > soft_limit_rss(#{soft_limit}) longer than GRACE_BALLOON_SECONDS(#{GRACE_BALLOON_SECONDS})"
else
"current_rss(#{rss}) > soft_limit_rss(#{soft_limit})"
end
end
def get_memory_total_kb
Gitlab::Metrics::System.memory_total / 1.kilobytes
end
def get_rss_kb
Gitlab::Metrics::System.memory_usage_rss[:total] / 1.kilobytes
end
def get_soft_limit_rss_kb
SOFT_LIMIT_RSS_KB + rss_increase_by_jobs
end
def get_hard_limit_rss_kb
HARD_LIMIT_RSS_KB
end
def signal_and_wait(time, signal, explanation)
Sidekiq.logger.warn(
class: self.class.to_s,
pid: pid,
signal: signal,
explanation: explanation,
wait_time: time,
message: "Sending signal and waiting"
)
Process.kill(signal, pid)
deadline = Gitlab::Metrics::System.monotonic_time + time
# Sleep until thread killed or timeout reached
sleep(CHECK_INTERVAL_SECONDS) while enabled? && Gitlab::Metrics::System.monotonic_time < deadline
end
def signal_pgroup(signal, explanation)
if Process.getpgrp == pid
pid_or_pgrp_str = 'PGRP'
pid_to_signal = 0
else
pid_or_pgrp_str = 'PID'
pid_to_signal = pid
end
Sidekiq.logger.warn(
class: self.class.to_s,
signal: signal,
pid: pid,
message: "sending Sidekiq worker #{pid_or_pgrp_str}-#{pid} #{signal} (#{explanation})"
)
Process.kill(signal, pid_to_signal)
end
def rss_increase_by_jobs
@sidekiq_daemon_monitor.jobs.sum do |_, job|
rss_increase_by_job(job)
end
end
def rss_increase_by_job(job)
memory_growth_kb = get_job_options(job, 'memory_killer_memory_growth_kb', 0).to_i
max_memory_growth_kb = get_job_options(job, 'memory_killer_max_memory_growth_kb', DEFAULT_MAX_MEMORY_GROWTH_KB).to_i
return 0 if memory_growth_kb == 0
time_elapsed = [Gitlab::Metrics::System.monotonic_time - job[:started_at], 0].max
[memory_growth_kb * time_elapsed, max_memory_growth_kb].min
end
def get_job_options(job, key, default)
job[:worker_class].sidekiq_options.fetch(key, default)
rescue StandardError
default
end
def pid
Process.pid
end
end
end
end

View File

@ -19881,6 +19881,9 @@ msgstr ""
msgid "Gitea Import"
msgstr ""
msgid "GithubImporter|%{noteable_type} comment %{note_id}"
msgstr ""
msgid "GithubImporter|Collaborators"
msgstr ""
@ -19896,12 +19899,21 @@ msgstr ""
msgid "GithubImporter|GitHub gists with more than 10 files must be manually migrated."
msgstr ""
msgid "GithubImporter|Issue %{issue_iid} attachment"
msgstr ""
msgid "GithubImporter|Issue links"
msgstr ""
msgid "GithubImporter|Merge request %{merge_request_iid} attachment"
msgstr ""
msgid "GithubImporter|Merge request links"
msgstr ""
msgid "GithubImporter|Note attachment"
msgstr ""
msgid "GithubImporter|Note links"
msgstr ""
@ -19917,9 +19929,24 @@ msgstr ""
msgid "GithubImporter|Please follow %{import_snippets_url} for more details."
msgstr ""
msgid "GithubImporter|Pull request %{pull_request_iid} merger"
msgstr ""
msgid "GithubImporter|Pull request %{pull_request_iid} review request"
msgstr ""
msgid "GithubImporter|Pull request review %{review_id}"
msgstr ""
msgid "GithubImporter|Pull request review comment %{note_id}"
msgstr ""
msgid "GithubImporter|Pull requests"
msgstr ""
msgid "GithubImporter|Release %{tag} attachment"
msgstr ""
msgid "GithubImporter|Release links"
msgstr ""
@ -44420,6 +44447,9 @@ msgstr ""
msgid "The import cannot be canceled because it is %{project_status}"
msgstr ""
msgid "The import is not complete."
msgstr ""
msgid "The import will time out after %{timeout}. For repositories that take longer, use a clone/push combination."
msgstr ""
@ -50422,6 +50452,9 @@ msgstr ""
msgid "Workspaces|Failed to create workspace"
msgstr ""
msgid "Workspaces|Failed to update workspace"
msgstr ""
msgid "Workspaces|GitLab Workspaces is a powerful collaborative platform that provides a comprehensive set of tools for software development teams to manage their entire development lifecycle."
msgstr ""
@ -50449,6 +50482,12 @@ msgstr ""
msgid "Workspaces|Stopping"
msgstr ""
msgid "Workspaces|Terminate"
msgstr ""
msgid "Workspaces|Terminating"
msgstr ""
msgid "Workspaces|To create a workspace for this project, an administrator must configure an agent for the project's group."
msgstr ""

View File

@ -106,12 +106,12 @@ function upload_gitlab_assets_package() {
# Fixtures functions
function check_fixtures_download() {
if [[ "${REUSE_FRONTEND_FIXTURES_ENABLED:-}" != "true" ]]; then
echoinfo "INFO: Reusing frontend fixtures is disabled due to REUSE_FRONTEND_FIXTURES_ENABLED=${REUSE_FRONTEND_FIXTURES_ENABLED}."
return 1
fi
# Note: Currently, reusing frontend fixtures is only supported in EE.
# Other projects will be supported through this issue in the future: https://gitlab.com/gitlab-org/gitlab/-/issues/393615.
if [[ "${CI_PROJECT_NAME}" != "gitlab" ]] || [[ "${CI_JOB_NAME}" =~ "foss" ]]; then
echoinfo "INFO: Reusing frontend fixtures is only supported in EE."
return 1
fi
@ -128,13 +128,13 @@ function check_fixtures_download() {
function check_fixtures_reuse() {
if [[ "${REUSE_FRONTEND_FIXTURES_ENABLED:-}" != "true" ]]; then
echoinfo "INFO: Reusing frontend fixtures is disabled due to REUSE_FRONTEND_FIXTURES_ENABLED=${REUSE_FRONTEND_FIXTURES_ENABLED}."
rm -rf "tmp/tests/frontend";
return 1
fi
# Note: Currently, reusing frontend fixtures is only supported in EE.
# Other projects will be supported through this issue in the future: https://gitlab.com/gitlab-org/gitlab/-/issues/393615.
if [[ "${CI_PROJECT_NAME}" != "gitlab" ]] || [[ "${CI_JOB_NAME}" =~ "foss" ]]; then
echoinfo "INFO: Reusing frontend fixtures is only supported in EE."
rm -rf "tmp/tests/frontend";
return 1
fi
@ -143,11 +143,12 @@ function check_fixtures_reuse() {
# Remove tmp/tests/frontend/ except on the first parallelized job so that depending
# jobs don't download the exact same artifact multiple times.
if [[ -n "${CI_NODE_INDEX:-}" ]] && [[ "${CI_NODE_INDEX}" -ne 1 ]]; then
echoinfo "INFO: Removing 'tmp/tests/frontend' as we're on node ${CI_NODE_INDEX}.";
echoinfo "INFO: Removing 'tmp/tests/frontend' as we're on node ${CI_NODE_INDEX}. Dependent jobs will use the artifacts from the first parallelized job.";
rm -rf "tmp/tests/frontend";
fi
return 0
else
echoinfo "INFO: 'tmp/tests/frontend' does not exist.";
return 1
fi
}

View File

@ -385,6 +385,57 @@ RSpec.describe Import::GithubController, feature_category: :importers do
end
end
describe "GET failures" do
let_it_be_with_reload(:project) { create(:project, import_type: 'github', import_status: :started, import_source: 'example/repo', import_url: 'https://fake.url') }
let!(:import_failure) do
create(:import_failure,
project: project,
source: 'Gitlab::GithubImport::Importer::PullRequestImporter',
external_identifiers: { iid: 2, object_type: 'pull_request', title: 'My Pull Request' }
)
end
context 'when import is not finished' do
it 'return bad_request' do
get :failures, params: { project_id: project.id }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('The import is not complete.')
end
end
context 'when import is finished' do
before do
project.import_state.finish
end
it 'includes failure details in response' do
get :failures, params: { project_id: project.id }
expect(json_response[0]['type']).to eq('pull_request')
expect(json_response[0]['title']).to eq('My Pull Request')
expect(json_response[0]['provider_url']).to eq("https://fake.url/example/repo/pull/2")
expect(json_response[0]['details']['source']).to eq(import_failure.source)
end
it 'paginates records' do
issue_title = 'My Issue'
create(
:import_failure,
project: project,
source: 'Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter',
external_identifiers: { iid: 3, object_type: 'issue', title: issue_title }
)
get :failures, params: { project_id: project.id, page: 2, per_page: 1 }
expect(json_response.size).to eq(1)
expect(json_response.first['title']).to eq(issue_title)
end
end
end
describe "POST cancel" do
let_it_be(:project) do
create(

View File

@ -108,22 +108,6 @@ RSpec.describe Projects::CommitsController, feature_category: :source_code_manag
get :show, params: { namespace_id: project.namespace, project_id: project, id: 'master/README.md' }
end
context 'when the show_tags_on_commits_view flag is disabled' do
let(:id) { "master/README.md" }
before do
stub_feature_flags(show_tags_on_commits_view: false)
end
it 'does not load tags' do
expect_next_instance_of(CommitCollection) do |collection|
expect(collection).not_to receive(:load_tags)
end
get :show, params: { namespace_id: project.namespace, project_id: project, id: id }
end
end
context "when the ref name ends in .atom" do
context "when the ref does not exist with the suffix" do
before do

View File

@ -21,5 +21,9 @@ FactoryBot.define do
trait :soft_failure do
retry_count { 1 }
end
trait :github_import_failure do
external_identifiers { { iid: 2, object_type: 'pull_request', title: 'Implement cool feature' } }
end
end
end

View File

@ -32,15 +32,30 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
end
describe "runners registration" do
before do
stub_feature_flags(create_runner_workflow_for_admin: false)
context 'when create_runner_workflow_for_namespace is enabled' do
before do
stub_feature_flags(create_runner_workflow_for_admin: true)
visit admin_runners_path
visit admin_runners_path
end
it_behaves_like "shows and resets runner registration token" do
let(:dropdown_text) { s_('Runners|Register an instance runner') }
let(:registration_token) { Gitlab::CurrentSettings.runners_registration_token }
end
end
it_behaves_like "shows and resets runner registration token" do
let(:dropdown_text) { s_('Runners|Register an instance runner') }
let(:registration_token) { Gitlab::CurrentSettings.runners_registration_token }
context 'when create_runner_workflow_for_namespace is disabled' do
before do
stub_feature_flags(create_runner_workflow_for_admin: false)
visit admin_runners_path
end
it_behaves_like "shows and resets runner registration token" do
let(:dropdown_text) { s_('Runners|Register an instance runner') }
let(:registration_token) { Gitlab::CurrentSettings.runners_registration_token }
end
end
end

View File

@ -21,19 +21,28 @@ RSpec.describe 'Runners', feature_category: :runner_fleet do
project.add_maintainer(user)
end
context 'when create_runner_workflow_for_namespace is enabled' do
context 'when create_runner_workflow_for_namespace is enabled', :js do
before do
stub_feature_flags(create_runner_workflow_for_namespace: [project.namespace])
visit project_runners_path(project)
end
it 'user can see a link with instructions on how to install GitLab Runner' do
visit project_runners_path(project)
expect(page).to have_link(s_('Runners|New project runner'), href: new_project_runner_path(project))
end
describe 'runner registration', :js do
it_behaves_like "shows and resets runner registration token" do
let(:dropdown_text) { s_('Runners|Register a project runner') }
let(:registration_token) { project.runners_token }
end
end
context 'when user views new runner page' do
context 'when create_runner_workflow_for_namespace is enabled', :js do
before do
stub_feature_flags(create_runner_workflow_for_namespace: [project.namespace])
visit new_project_runner_path(project)
end

View File

@ -12,7 +12,14 @@ import RegistrationDropdown from '~/ci/runner/components/registration/registrati
import RegistrationToken from '~/ci/runner/components/registration/registration_token.vue';
import RegistrationTokenResetDropdownItem from '~/ci/runner/components/registration/registration_token_reset_dropdown_item.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/ci/runner/constants';
import {
INSTANCE_TYPE,
GROUP_TYPE,
PROJECT_TYPE,
I18N_REGISTER_INSTANCE_TYPE,
I18N_REGISTER_GROUP_TYPE,
I18N_REGISTER_PROJECT_TYPE,
} from '~/ci/runner/constants';
import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_platforms.query.graphql';
import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_setup.query.graphql';
@ -81,13 +88,13 @@ describe('RegistrationDropdown', () => {
it.each`
type | text
${INSTANCE_TYPE} | ${s__('Runners|Register an instance runner')}
${GROUP_TYPE} | ${s__('Runners|Register a group runner')}
${PROJECT_TYPE} | ${s__('Runners|Register a project runner')}
`('Dropdown text for type $type is "$text"', () => {
createComponent({ props: { type: INSTANCE_TYPE } }, mountExtended);
${INSTANCE_TYPE} | ${I18N_REGISTER_INSTANCE_TYPE}
${GROUP_TYPE} | ${I18N_REGISTER_GROUP_TYPE}
${PROJECT_TYPE} | ${I18N_REGISTER_PROJECT_TYPE}
`('Dropdown text for type $type is "$text"', ({ type, text }) => {
createComponent({ props: { type } }, mountExtended);
expect(wrapper.text()).toContain('Register an instance runner');
expect(wrapper.text()).toContain(text);
});
it('Passes attributes to dropdown', () => {
@ -214,7 +221,7 @@ describe('RegistrationDropdown', () => {
{ createRunnerWorkflowForAdmin: true },
{ createRunnerWorkflowForNamespace: true },
])('When showing a "deprecated" warning', (glFeatures) => {
it('Passes deprecated variant props and attributes to dropdown', () => {
it('passes deprecated variant props and attributes to dropdown', () => {
createComponent({
provide: { glFeatures },
});
@ -230,6 +237,17 @@ describe('RegistrationDropdown', () => {
});
});
it.each`
type | text
${INSTANCE_TYPE} | ${I18N_REGISTER_INSTANCE_TYPE}
${GROUP_TYPE} | ${I18N_REGISTER_GROUP_TYPE}
${PROJECT_TYPE} | ${I18N_REGISTER_PROJECT_TYPE}
`('dropdown text for type $type is "$text"', ({ type, text }) => {
createComponent({ props: { type } }, mountExtended);
expect(wrapper.text()).toContain(text);
});
it('shows warning text', () => {
createComponent(
{
@ -243,7 +261,7 @@ describe('RegistrationDropdown', () => {
expect(text.exists()).toBe(true);
});
it('button shows only ellipsis icon', () => {
it('button shows ellipsis icon', () => {
createComponent(
{
provide: { glFeatures },
@ -251,7 +269,6 @@ describe('RegistrationDropdown', () => {
mountExtended,
);
expect(findDropdownBtn().text()).toBe('');
expect(findDropdownBtn().findComponent(GlIcon).props('name')).toBe('ellipsis_v');
expect(findDropdownBtn().findAllComponents(GlIcon)).toHaveLength(1);
});

View File

@ -5,14 +5,12 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemCreatedUpdated from '~/work_items/components/work_item_created_updated.vue';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { workItemResponseFactory, mockAssignees } from '../mock_data';
import { workItemByIidResponseFactory, mockAssignees } from '../mock_data';
describe('WorkItemCreatedUpdated component', () => {
let wrapper;
let successHandler;
let successByIidHandler;
Vue.use(VueApollo);
@ -21,39 +19,17 @@ describe('WorkItemCreatedUpdated component', () => {
const findCreatedAtText = () => findCreatedAt().text().replace(/\s+/g, ' ');
const createComponent = async ({
workItemId = 'gid://gitlab/WorkItem/1',
workItemIid = '1',
fetchByIid = false,
author = null,
updatedAt,
} = {}) => {
const workItemQueryResponse = workItemResponseFactory({
const createComponent = async ({ workItemIid = '1', author = null, updatedAt } = {}) => {
const workItemQueryResponse = workItemByIidResponseFactory({
author,
updatedAt,
});
const byIidResponse = {
data: {
workspace: {
id: 'gid://gitlab/Project/1',
workItems: {
nodes: [workItemQueryResponse.data.workItem],
},
},
},
};
successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
successByIidHandler = jest.fn().mockResolvedValue(byIidResponse);
const handlers = [
[workItemQuery, successHandler],
[workItemByIidQuery, successByIidHandler],
];
wrapper = shallowMount(WorkItemCreatedUpdated, {
apolloProvider: createMockApollo(handlers),
propsData: { workItemId, workItemIid, fetchByIid, fullPath: '/some/project' },
apolloProvider: createMockApollo([[workItemByIidQuery, successHandler]]),
propsData: { workItemIid, fullPath: '/some/project' },
stubs: {
GlAvatarLink,
GlSprintf,
@ -63,42 +39,34 @@ describe('WorkItemCreatedUpdated component', () => {
await waitForPromises();
};
describe.each([true, false])('fetchByIid is %s', (fetchByIid) => {
describe('work item id and iid undefined', () => {
beforeEach(async () => {
await createComponent({ workItemId: null, workItemIid: null, fetchByIid });
});
it('skips the work item query when workItemIid is not defined', async () => {
await createComponent({ workItemIid: null });
it('skips the work item query', () => {
expect(successHandler).not.toHaveBeenCalled();
expect(successByIidHandler).not.toHaveBeenCalled();
});
});
expect(successHandler).not.toHaveBeenCalled();
});
it('shows author name and link', async () => {
const author = mockAssignees[0];
it('shows author name and link', async () => {
const author = mockAssignees[0];
await createComponent({ author });
await createComponent({ fetchByIid, author });
expect(findCreatedAtText()).toBe(`Created by ${author.name}`);
});
expect(findCreatedAtText()).toEqual(`Created by ${author.name}`);
});
it('shows created time when author is null', async () => {
await createComponent({ author: null });
it('shows created time when author is null', async () => {
await createComponent({ fetchByIid, author: null });
expect(findCreatedAtText()).toBe('Created');
});
expect(findCreatedAtText()).toEqual('Created');
});
it('shows updated time', async () => {
await createComponent();
it('shows updated time', async () => {
await createComponent({ fetchByIid });
expect(findUpdatedAt().exists()).toBe(true);
});
expect(findUpdatedAt().exists()).toBe(true);
});
it('does not show updated time for new work items', async () => {
await createComponent({ updatedAt: null });
it('does not show updated time for new work items', async () => {
await createComponent({ fetchByIid, updatedAt: null });
expect(findUpdatedAt().exists()).toBe(false);
});
expect(findUpdatedAt().exists()).toBe(false);
});
});

View File

@ -6,7 +6,6 @@ import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import labelSearchQuery from '~/sidebar/components/labels/labels_select_widget/graphql/project_labels.query.graphql';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemLabelsSubscription from 'ee_else_ce/work_items/graphql/work_item_labels.subscription.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
@ -15,11 +14,9 @@ import { i18n, I18N_WORK_ITEM_ERROR_FETCHING_LABELS } from '~/work_items/constan
import {
projectLabelsResponse,
mockLabels,
workItemQueryResponse,
workItemResponseFactory,
workItemByIidResponseFactory,
updateWorkItemMutationResponse,
workItemLabelsSubscriptionResponse,
projectWorkItemResponse,
} from '../mock_data';
Vue.use(VueApollo);
@ -34,8 +31,9 @@ describe('WorkItemLabels component', () => {
const findEmptyState = () => wrapper.findByTestId('empty-state');
const findLabelsTitle = () => wrapper.findByTestId('labels-title');
const workItemQuerySuccess = jest.fn().mockResolvedValue(workItemQueryResponse);
const workItemByIidResponseHandler = jest.fn().mockResolvedValue(projectWorkItemResponse);
const workItemQuerySuccess = jest
.fn()
.mockResolvedValue(workItemByIidResponseFactory({ labels: null }));
const successSearchQueryHandler = jest.fn().mockResolvedValue(projectLabelsResponse);
const successUpdateWorkItemMutationHandler = jest
.fn()
@ -48,27 +46,22 @@ describe('WorkItemLabels component', () => {
workItemQueryHandler = workItemQuerySuccess,
searchQueryHandler = successSearchQueryHandler,
updateWorkItemMutationHandler = successUpdateWorkItemMutationHandler,
fetchByIid = false,
queryVariables = { id: workItemId },
queryVariables = { iid: '1' },
} = {}) => {
const apolloProvider = createMockApollo([
[workItemQuery, workItemQueryHandler],
[labelSearchQuery, searchQueryHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
[workItemLabelsSubscription, subscriptionHandler],
[workItemByIidQuery, workItemByIidResponseHandler],
]);
wrapper = mountExtended(WorkItemLabels, {
apolloProvider: createMockApollo([
[workItemByIidQuery, workItemQueryHandler],
[labelSearchQuery, searchQueryHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
[workItemLabelsSubscription, subscriptionHandler],
]),
propsData: {
workItemId,
canUpdate,
fullPath: 'test-project-path',
queryVariables,
fetchByIid,
},
attachTo: document.body,
apolloProvider,
});
};
@ -186,7 +179,7 @@ describe('WorkItemLabels component', () => {
});
it('adds new labels to the end', async () => {
const response = workItemResponseFactory({ labels: [mockLabels[1]] });
const response = workItemByIidResponseFactory({ labels: [mockLabels[1]] });
const workItemQueryHandler = jest.fn().mockResolvedValue(response);
createComponent({
workItemQueryHandler,
@ -263,24 +256,15 @@ describe('WorkItemLabels component', () => {
});
});
it('calls the global ID work item query when `fetchByIid` prop is false', async () => {
createComponent({ fetchByIid: false });
it('calls the work item query', async () => {
createComponent();
await waitForPromises();
expect(workItemQuerySuccess).toHaveBeenCalled();
expect(workItemByIidResponseHandler).not.toHaveBeenCalled();
});
it('calls the IID work item query when when `fetchByIid` prop is true', async () => {
createComponent({ fetchByIid: true });
await waitForPromises();
expect(workItemQuerySuccess).not.toHaveBeenCalled();
expect(workItemByIidResponseHandler).toHaveBeenCalled();
});
it('skips calling the handlers when missing the needed queryVariables', async () => {
createComponent({ queryVariables: {}, fetchByIid: false });
it('skips calling the work item query when missing queryVariables', async () => {
createComponent({ queryVariables: {} });
await waitForPromises();
expect(workItemQuerySuccess).not.toHaveBeenCalled();

View File

@ -9,27 +9,20 @@ import {
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
import { resolvers, config } from '~/graphql_shared/issuable_client';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import projectMilestonesQuery from '~/sidebar/queries/project_milestones.query.graphql';
import {
projectMilestonesResponse,
projectMilestonesResponseWithNoMilestones,
mockMilestoneWidgetResponse,
workItemResponseFactory,
updateWorkItemMutationErrorResponse,
workItemMilestoneSubscriptionResponse,
projectWorkItemResponse,
updateWorkItemMutationResponse,
} from 'jest/work_items/mock_data';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import workItemMilestoneSubscription from '~/work_items/graphql/work_item_milestone.subscription.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
} from '../mock_data';
describe('WorkItemMilestone component', () => {
Vue.use(VueApollo);
@ -52,72 +45,34 @@ describe('WorkItemMilestone component', () => {
const findDropdownTextAtIndex = (index) => findDropdownTexts().at(index);
const findInputGroup = () => wrapper.findComponent(GlFormGroup);
const workItemQueryResponse = workItemResponseFactory({ canUpdate: true, canDelete: true });
const workItemQueryHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
const workItemByIidResponseHandler = jest.fn().mockResolvedValue(projectWorkItemResponse);
const networkResolvedValue = new Error();
const successSearchQueryHandler = jest.fn().mockResolvedValue(projectMilestonesResponse);
const successSearchWithNoMatchingMilestones = jest
.fn()
.mockResolvedValue(projectMilestonesResponseWithNoMilestones);
const milestoneSubscriptionHandler = jest
.fn()
.mockResolvedValue(workItemMilestoneSubscriptionResponse);
const successUpdateWorkItemMutationHandler = jest
.fn()
.mockResolvedValue(updateWorkItemMutationResponse);
const showDropdown = () => {
findDropdown().vm.$emit('shown');
};
const hideDropdown = () => {
findDropdown().vm.$emit('hide');
};
const showDropdown = () => findDropdown().vm.$emit('shown');
const hideDropdown = () => findDropdown().vm.$emit('hide');
const createComponent = ({
canUpdate = true,
milestone = mockMilestoneWidgetResponse,
searchQueryHandler = successSearchQueryHandler,
fetchByIid = false,
mutationHandler = successUpdateWorkItemMutationHandler,
} = {}) => {
const apolloProvider = createMockApollo(
[
[workItemQuery, workItemQueryHandler],
[workItemMilestoneSubscription, milestoneSubscriptionHandler],
wrapper = shallowMountExtended(WorkItemMilestone, {
apolloProvider: createMockApollo([
[projectMilestonesQuery, searchQueryHandler],
[updateWorkItemMutation, mutationHandler],
[workItemByIidQuery, workItemByIidResponseHandler],
],
resolvers,
{
typePolicies: config.cacheConfig.typePolicies,
},
);
apolloProvider.clients.defaultClient.writeQuery({
query: workItemQuery,
variables: {
id: workItemId,
},
data: workItemQueryResponse.data,
});
wrapper = shallowMountExtended(WorkItemMilestone, {
apolloProvider,
]),
propsData: {
canUpdate,
workItemMilestone: milestone,
workItemId,
workItemType,
fullPath,
queryVariables: {
id: workItemId,
},
fetchByIid,
},
stubs: {
GlDropdown,
@ -244,7 +199,7 @@ describe('WorkItemMilestone component', () => {
it.each`
errorType | expectedErrorMessage | mockValue | resolveFunction
${'graphql error'} | ${'Something went wrong while updating the task. Please try again.'} | ${updateWorkItemMutationErrorResponse} | ${'mockResolvedValue'}
${'network error'} | ${'Something went wrong while updating the task. Please try again.'} | ${networkResolvedValue} | ${'mockRejectedValue'}
${'network error'} | ${'Something went wrong while updating the task. Please try again.'} | ${new Error()} | ${'mockRejectedValue'}
`(
'emits an error when there is a $errorType',
async ({ mockValue, expectedErrorMessage, resolveFunction }) => {

View File

@ -702,16 +702,16 @@ RSpec.describe ApplicationHelper do
expect(helper.stylesheet_link_tag_defer('test')).to eq( '<link rel="stylesheet" media="print" href="/stylesheets/test.css" />')
end
it 'uses regular stylesheet when feature flag disabled' do
it 'uses regular stylesheet when feature flag enabled' do
stub_feature_flags(remove_startup_css: true)
expect(helper.stylesheet_link_tag_defer('test')).to eq( '<link rel="stylesheet" media="screen" href="/stylesheets/test.css" />')
expect(helper.stylesheet_link_tag_defer('test')).to eq( '<link rel="stylesheet" media="all" href="/stylesheets/test.css" />')
end
it 'uses regular stylesheet when no_startup_css param present' do
allow(helper.controller).to receive(:params).and_return({ no_startup_css: '' })
expect(helper.stylesheet_link_tag_defer('test')).to eq( '<link rel="stylesheet" media="screen" href="/stylesheets/test.css" />')
expect(helper.stylesheet_link_tag_defer('test')).to eq( '<link rel="stylesheet" media="all" href="/stylesheets/test.css" />')
end
end

View File

@ -341,27 +341,6 @@ RSpec.describe CommitsHelper do
])
end
context 'when the show_tags_on_commits_view flag is disabled' do
before do
stub_feature_flags(show_tags_on_commits_view: false)
end
specify do
expect(subject).to eq([
commit,
commit.author,
ref,
{
merge_request: merge_request.cache_key,
pipeline_status: pipeline.cache_key,
xhr: true,
controller: "commits",
path: current_path
}
])
end
end
describe "final cache key output" do
subject { ActiveSupport::Cache.expand_cache_key(cache_key) }

View File

@ -1,530 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
def create_background_migration_job(ids, status)
proper_status = case status
when :pending
Gitlab::Database::BackgroundMigrationJob.statuses['pending']
when :succeeded
Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
else
raise ArgumentError
end
background_migration_jobs.create!(
class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
arguments: Array(ids),
status: proper_status,
created_at: Time.now.utc
)
end
RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, :suppress_gitlab_schemas_validate_connection, schema: 20211202041233 do
let(:background_migration_jobs) { table(:background_migration_jobs) }
let(:pending_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']) }
let(:succeeded_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:users) { table(:users) }
let(:user) { create_user! }
let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
let(:scanners) { table(:vulnerability_scanners) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
let(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:vulnerability_findings) { table(:vulnerability_occurrences) }
let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
let(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
let(:identifier_1) { 'identifier-1' }
let!(:vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: identifier_1,
external_id: identifier_1,
fingerprint: Gitlab::Database::ShaAttribute.serialize('ff9ef548a6e30a0462795d916f3f00d1e2b082ca'),
name: 'Identifier 1')
end
let(:identifier_2) { 'identifier-2' }
let!(:vulnerability_identfier2) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: identifier_2,
external_id: identifier_2,
fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'),
name: 'Identifier 2')
end
let(:identifier_3) { 'identifier-3' }
let!(:vulnerability_identifier3) do
vulnerability_identifiers.create!(
project_id: project.id,
external_type: identifier_3,
external_id: identifier_3,
fingerprint: Gitlab::Database::ShaAttribute.serialize('8e91632f9c6671e951834a723ee221c44cc0d844'),
name: 'Identifier 3')
end
let(:known_uuid_v4) { "b3cc2518-5446-4dea-871c-89d5e999c1ac" }
let(:known_uuid_v5) { "05377088-dc26-5161-920e-52a7159fdaa1" }
let(:desired_uuid_v5) { "f3e9a23f-9181-54bf-a5ab-c5bc7a9b881a" }
subject { described_class.new.perform(start_id, end_id) }
context "when finding has a UUIDv4" do
before do
@uuid_v4 = create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner2.id,
primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
end
let(:start_id) { @uuid_v4.id }
let(:end_id) { @uuid_v4.id }
it "replaces it with UUIDv5" do
expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v4])
subject
expect(vulnerability_findings.pluck(:uuid)).to match_array([desired_uuid_v5])
end
it 'logs recalculation' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:info).twice
end
subject
end
end
context "when finding has a UUIDv5" do
before do
@uuid_v5 = create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize("838574be0210968bf6b9f569df9c2576242cbf0a"),
uuid: known_uuid_v5
)
end
let(:start_id) { @uuid_v5.id }
let(:end_id) { @uuid_v5.id }
it "stays the same" do
expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5])
subject
expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5])
end
end
context 'if a duplicate UUID would be generated' do # rubocop: disable RSpec/MultipleMemoizedHelpers
let(:v1) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let!(:finding_with_incorrect_uuid) do
create_finding!(
vulnerability_id: v1.id,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e'
)
end
let(:v2) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let!(:finding_with_correct_uuid) do
create_finding!(
vulnerability_id: v2.id,
project_id: project.id,
primary_identifier_id: vulnerability_identifier.id,
scanner_id: scanner2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '91984483-5efe-5215-b471-d524ac5792b1'
)
end
let(:v3) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let!(:finding_with_incorrect_uuid2) do
create_finding!(
vulnerability_id: v3.id,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '00000000-1111-2222-3333-444444444444'
)
end
let(:v4) do
create_vulnerability!(
project_id: project.id,
author_id: user.id
)
end
let!(:finding_with_correct_uuid2) do
create_finding!(
vulnerability_id: v4.id,
project_id: project.id,
scanner_id: scanner2.id,
primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '1edd751e-ef9a-5391-94db-a832c8635bfc'
)
end
let!(:finding_with_incorrect_uuid3) do
create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier3.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '22222222-3333-4444-5555-666666666666'
)
end
let!(:duplicate_not_in_the_same_batch) do
create_finding!(
id: 99999,
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner2.id,
primary_identifier_id: vulnerability_identifier3.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '4564f9d5-3c6b-5cc3-af8c-7c25285362a7'
)
end
let(:start_id) { finding_with_incorrect_uuid.id }
let(:end_id) { finding_with_incorrect_uuid3.id }
before do
4.times do
create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid.id)
create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid.id)
create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid2.id)
create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid2.id)
end
end
it 'drops duplicates and related records', :aggregate_failures do
expect(vulnerability_findings.pluck(:id)).to match_array(
[
finding_with_correct_uuid.id,
finding_with_incorrect_uuid.id,
finding_with_correct_uuid2.id,
finding_with_incorrect_uuid2.id,
finding_with_incorrect_uuid3.id,
duplicate_not_in_the_same_batch.id
])
expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
.and change(vulnerability_findings, :count).from(6).to(3)
.and change(vulnerabilities, :count).from(4).to(2)
expect(vulnerability_findings.pluck(:id)).to match_array([finding_with_incorrect_uuid.id, finding_with_incorrect_uuid2.id, finding_with_incorrect_uuid3.id])
end
context 'if there are conflicting UUID values within the batch' do # rubocop: disable RSpec/MultipleMemoizedHelpers
let(:end_id) { finding_with_broken_data_integrity.id }
let(:vulnerability_5) { create_vulnerability!(project_id: project.id, author_id: user.id) }
let(:different_project) { table(:projects).create!(namespace_id: namespace.id) }
let!(:identifier_with_broken_data_integrity) do
vulnerability_identifiers.create!(
project_id: different_project.id,
external_type: identifier_2,
external_id: identifier_2,
fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'),
name: 'Identifier 2')
end
let(:finding_with_broken_data_integrity) do
create_finding!(
vulnerability_id: vulnerability_5,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: identifier_with_broken_data_integrity.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: SecureRandom.uuid
)
end
it 'deletes the conflicting record' do
expect { subject }.to change { vulnerability_findings.find_by_id(finding_with_broken_data_integrity.id) }.to(nil)
end
end
context 'if a conflicting UUID is found during the migration' do # rubocop:disable RSpec/MultipleMemoizedHelpers
let(:finding_class) { Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding }
let(:uuid) { '4564f9d5-3c6b-5cc3-af8c-7c25285362a7' }
before do
exception = ActiveRecord::RecordNotUnique.new("(uuid)=(#{uuid})")
call_count = 0
allow(::Gitlab::Database::BulkUpdate).to receive(:execute) do
call_count += 1
call_count.eql?(1) ? raise(exception) : {}
end
allow(finding_class).to receive(:find_by).with(uuid: uuid).and_return(duplicate_not_in_the_same_batch)
end
it 'retries the recalculation' do
subject
expect(Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding)
.to have_received(:find_by).with(uuid: uuid).once
end
it 'logs the conflict' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:info).exactly(6).times
end
subject
end
it 'marks the job as done' do
create_background_migration_job([start_id, end_id], :pending)
subject
expect(pending_jobs.count).to eq(0)
expect(succeeded_jobs.count).to eq(1)
end
end
it 'logs an exception if a different uniquness problem was found' do
exception = ActiveRecord::RecordNotUnique.new("Totally not an UUID uniqueness problem")
allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(exception)
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
subject
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(exception).once
end
it 'logs a duplicate found message' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:info).exactly(3).times
end
subject
end
end
context 'when finding has a signature' do
before do
@f1 = create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: 'd15d774d-e4b1-5a1b-929b-19f2a53e35ec'
)
vulnerability_finding_signatures.create!(
finding_id: @f1.id,
algorithm_type: 2, # location
signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis')
)
vulnerability_finding_signatures.create!(
finding_id: @f1.id,
algorithm_type: 1, # hash
signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis')
)
@f2 = create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
uuid: '4be029b5-75e5-5ac0-81a2-50ab41726135'
)
vulnerability_finding_signatures.create!(
finding_id: @f2.id,
algorithm_type: 2, # location
signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis')
)
vulnerability_finding_signatures.create!(
finding_id: @f2.id,
algorithm_type: 1, # hash
signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis')
)
end
let(:start_id) { @f1.id }
let(:end_id) { @f2.id }
let(:uuids_before) { [@f1.uuid, @f2.uuid] }
let(:uuids_after) { %w[d3b60ddd-d312-5606-b4d3-ad058eebeacb 349d9bec-c677-5530-a8ac-5e58889c3b1a] }
it 'is recalculated using signature' do
expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_before)
subject
expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_after)
end
end
context 'if all records are removed before the job ran' do
let(:start_id) { 1 }
let(:end_id) { 9 }
before do
create_background_migration_job([start_id, end_id], :pending)
end
it 'does not error out' do
expect { subject }.not_to raise_error
end
it 'marks the job as done' do
subject
expect(pending_jobs.count).to eq(0)
expect(succeeded_jobs.count).to eq(1)
end
end
context 'when recalculation fails' do
before do
@uuid_v4 = create_finding!(
vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner2.id,
primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(expected_error)
end
let(:start_id) { @uuid_v4.id }
let(:end_id) { @uuid_v4.id }
let(:expected_error) { RuntimeError.new }
it 'captures the errors and does not crash entirely' do
expect { subject }.not_to raise_error
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(expected_error).once
end
it_behaves_like 'marks background migration job records' do
let(:arguments) { [1, 4] }
subject { described_class.new }
end
end
it_behaves_like 'marks background migration job records' do
let(:arguments) { [1, 4] }
subject { described_class.new }
end
private
def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
vulnerabilities.create!(
project_id: project_id,
author_id: author_id,
title: title,
severity: severity,
confidence: confidence,
report_type: report_type
)
end
# rubocop:disable Metrics/ParameterLists
def create_finding!(
vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil,
name: "test", severity: 7, confidence: 7, report_type: 0,
project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
vulnerability_findings.create!({
id: id,
vulnerability_id: vulnerability_id,
project_id: project_id,
name: name,
severity: severity,
confidence: confidence,
report_type: report_type,
project_fingerprint: project_fingerprint,
scanner_id: scanner_id,
primary_identifier_id: primary_identifier_id,
location_fingerprint: location_fingerprint,
metadata_version: metadata_version,
raw_metadata: raw_metadata,
uuid: uuid
}.compact
)
end
# rubocop:enable Metrics/ParameterLists
def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
users.create!(
name: name,
email: email,
username: name,
projects_limit: 0,
user_type: user_type,
confirmed_at: confirmed_at
)
end
def create_finding_pipeline!(project_id:, finding_id:)
pipeline = table(:ci_pipelines).create!(project_id: project_id)
vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
end
end

View File

@ -131,7 +131,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
describe '#github_identifiers' do
it 'returns a hash with needed identifiers' do
expect(note.github_identifiers).to eq(
noteable_id: 42,
noteable_iid: 42,
noteable_type: 'MergeRequest',
note_id: 1
)

View File

@ -158,7 +158,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
it 'returns a hash with needed identifiers' do
expect(issue_event.github_identifiers).to eq(
id: 6501124486,
iid: 2,
issuable_iid: 2,
event: 'closed'
)
end

View File

@ -43,6 +43,16 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
it 'includes the note ID' do
expect(note.note_id).to eq(1)
end
describe '#github_identifiers' do
it 'returns a hash with needed identifiers' do
expect(note.github_identifiers).to eq(
noteable_iid: 42,
noteable_type: 'Issue',
note_id: 1
)
end
end
end
end
@ -103,18 +113,4 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
expect(note.author).to be_nil
end
end
describe '#github_identifiers' do
it 'returns a hash with needed identifiers' do
github_identifiers = {
noteable_id: 42,
noteable_type: 'Issue',
note_id: 1
}
other_attributes = { something_else: '_something_else_' }
note = described_class.new(github_identifiers.merge(other_attributes))
expect(note.github_identifiers).to eq(github_identifiers)
end
end
end

View File

@ -1,562 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:memory_killer) { described_class.new }
let(:sidekiq_daemon_monitor) { instance_double(Gitlab::SidekiqDaemon::Monitor) }
let(:running_jobs) { {} }
let(:pid) { 12345 }
let(:worker) do
Class.new do
def self.name
'DummyWorker'
end
end
end
before do
stub_const('DummyWorker', worker)
allow(Sidekiq.logger).to receive(:info)
allow(Sidekiq.logger).to receive(:warn)
allow(Gitlab::SidekiqDaemon::Monitor).to receive(:instance).and_return(sidekiq_daemon_monitor)
allow(sidekiq_daemon_monitor).to receive(:jobs).and_return(running_jobs)
allow(memory_killer).to receive(:pid).and_return(pid)
# make sleep no-op
allow(memory_killer).to receive(:sleep) {}
end
describe '#run_thread' do
subject { memory_killer.send(:run_thread) }
before do
# let enabled? return 3 times: true, true, false
allow(memory_killer).to receive(:enabled?).and_return(true, true, false)
end
context 'when structured logging is used' do
it 'logs start message once' do
expect(Sidekiq.logger).to receive(:info).once
.with(
class: described_class.to_s,
action: 'start',
pid: pid,
message: 'Starting Gitlab::SidekiqDaemon::MemoryKiller Daemon')
subject
end
it 'logs StandardError message twice' do
expect(Sidekiq.logger).to receive(:warn).twice
.with(
class: described_class.to_s,
pid: pid,
message: "Exception from run_thread: My Exception")
expect(memory_killer).to receive(:rss_within_range?)
.twice
.and_raise(StandardError, 'My Exception')
expect { subject }.not_to raise_exception
end
it 'logs exception message once and raise exception and log stop message' do
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
pid: pid,
message: "Exception from run_thread: My Exception")
expect(memory_killer).to receive(:rss_within_range?)
.once
.and_raise(Exception, 'My Exception')
expect(memory_killer).to receive(:sleep).with(Gitlab::SidekiqDaemon::MemoryKiller::CHECK_INTERVAL_SECONDS)
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
action: 'stop',
pid: pid,
message: 'Stopping Gitlab::SidekiqDaemon::MemoryKiller Daemon')
expect { subject }.to raise_exception(Exception, 'My Exception')
end
it 'logs stop message once' do
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
action: 'stop',
pid: pid,
message: 'Stopping Gitlab::SidekiqDaemon::MemoryKiller Daemon')
subject
end
end
it 'not invoke restart_sidekiq when rss in range' do
expect(memory_killer).to receive(:rss_within_range?)
.twice
.and_return(true)
expect(memory_killer).not_to receive(:restart_sidekiq)
subject
end
it 'invoke restart_sidekiq when rss not in range' do
expect(memory_killer).to receive(:rss_within_range?)
.at_least(:once)
.and_return(false)
expect(memory_killer).to receive(:restart_sidekiq)
.at_least(:once)
subject
end
end
describe '#stop_working' do
subject { memory_killer.send(:stop_working) }
it 'changes enable? to false' do
expect { subject }.to change { memory_killer.send(:enabled?) }
.from(true).to(false)
end
end
describe '#rss_within_range?' do
let(:shutdown_timeout_seconds) { 7 }
let(:check_interval_seconds) { 2 }
let(:grace_balloon_seconds) { 5 }
subject { memory_killer.send(:rss_within_range?) }
before do
stub_const("#{described_class}::SHUTDOWN_TIMEOUT_SECONDS", shutdown_timeout_seconds)
stub_const("#{described_class}::CHECK_INTERVAL_SECONDS", check_interval_seconds)
stub_const("#{described_class}::GRACE_BALLOON_SECONDS", grace_balloon_seconds)
allow(Process).to receive(:getpgrp).and_return(pid)
allow(Sidekiq).to receive(:[]).with(:timeout).and_return(9)
end
it 'return true when everything is within limit', :aggregate_failures do
expect(memory_killer).to receive(:get_rss_kb).and_return(100)
expect(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
expect(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
expect(memory_killer).not_to receive(:log_rss_out_of_range)
expect(subject).to be true
end
it 'return false when rss exceeds hard_limit_rss', :aggregate_failures do
expect(memory_killer).to receive(:get_rss_kb).at_least(:once).and_return(400)
expect(memory_killer).to receive(:get_soft_limit_rss_kb).at_least(:once).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss_kb).at_least(:once).and_return(300)
expect(memory_killer).to receive(:get_memory_total_kb).at_least(:once).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
.and_call_original
expect(memory_killer).to receive(:refresh_state)
.with(:above_soft_limit)
.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, true)
expect(subject).to be false
end
it 'return false when rss exceed hard_limit_rss after a while', :aggregate_failures do
expect(memory_killer).to receive(:get_rss_kb).and_return(250, 400, 400)
expect(memory_killer).to receive(:get_soft_limit_rss_kb).at_least(:once).and_return(200)
expect(memory_killer).to receive(:get_hard_limit_rss_kb).at_least(:once).and_return(300)
expect(memory_killer).to receive(:get_memory_total_kb).at_least(:once).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
.and_call_original
expect(memory_killer).to receive(:refresh_state)
.at_least(:once)
.with(:above_soft_limit)
.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, false)
expect(memory_killer).to receive(:out_of_range_description).with(400, 300, 200, true)
expect(subject).to be false
end
it 'return true when rss below soft_limit_rss after a while within GRACE_BALLOON_SECONDS', :aggregate_failures do
expect(memory_killer).to receive(:get_rss_kb).and_return(250, 100)
expect(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200, 200)
expect(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300, 300)
expect(memory_killer).to receive(:get_memory_total_kb).and_return(3072, 3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
.and_call_original
expect(memory_killer).to receive(:refresh_state)
.with(:above_soft_limit)
.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
expect(memory_killer).to receive(:out_of_range_description).with(100, 300, 200, false)
expect(subject).to be true
end
context 'when exceeds GRACE_BALLOON_SECONDS' do
let(:grace_balloon_seconds) { 0 }
it 'return false when rss exceed soft_limit_rss', :aggregate_failures do
allow(memory_killer).to receive(:get_rss_kb).and_return(250)
allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
allow(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
.and_call_original
expect(memory_killer).to receive(:refresh_state)
.with(:above_soft_limit)
.and_call_original
expect(memory_killer).to receive(:out_of_range_description).with(250, 300, 200, true)
expect(subject).to be false
end
end
end
describe '#restart_sidekiq' do
let(:shutdown_timeout_seconds) { 7 }
subject { memory_killer.send(:restart_sidekiq) }
context 'when sidekiq_memory_killer_read_only_mode is enabled' do
before do
stub_feature_flags(sidekiq_memory_killer_read_only_mode: true)
end
it 'does not send signal' do
expect(memory_killer).not_to receive(:refresh_state)
expect(memory_killer).not_to receive(:signal_and_wait)
subject
end
end
context 'when sidekiq_memory_killer_read_only_mode is disabled' do
before do
stub_const("#{described_class}::SHUTDOWN_TIMEOUT_SECONDS", shutdown_timeout_seconds)
stub_feature_flags(sidekiq_memory_killer_read_only_mode: false)
allow(Sidekiq).to receive(:[]).with(:timeout).and_return(9)
allow(memory_killer).to receive(:get_rss_kb).and_return(100)
allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
allow(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
end
it 'send signal' do
expect(memory_killer).to receive(:refresh_state)
.with(:stop_fetching_new_jobs)
.ordered
.and_call_original
expect(memory_killer).to receive(:signal_and_wait)
.with(shutdown_timeout_seconds, 'SIGTSTP', 'stop fetching new jobs')
.ordered
expect(memory_killer).to receive(:refresh_state)
.with(:shutting_down)
.ordered
.and_call_original
expect(memory_killer).to receive(:signal_and_wait)
.with(11, 'SIGTERM', 'gracefully shut down')
.ordered
expect(memory_killer).to receive(:refresh_state)
.with(:killing_sidekiq)
.ordered
.and_call_original
expect(memory_killer).to receive(:signal_pgroup)
.with('SIGKILL', 'die')
.ordered
subject
end
end
end
describe '#signal_and_wait' do
let(:time) { 0.1 }
let(:signal) { 'my-signal' }
let(:explanation) { 'my-explanation' }
let(:check_interval_seconds) { 0.1 }
subject { memory_killer.send(:signal_and_wait, time, signal, explanation) }
before do
stub_const("#{described_class}::CHECK_INTERVAL_SECONDS", check_interval_seconds)
end
it 'send signal and wait till deadline' do
expect(Process).to receive(:kill)
.with(signal, pid)
.ordered
expect(Gitlab::Metrics::System).to receive(:monotonic_time)
.and_call_original
.at_least(3)
expect(memory_killer).to receive(:enabled?).and_return(true).at_least(:twice)
expect(memory_killer).to receive(:sleep).at_least(:once).and_call_original
subject
end
end
describe '#signal_pgroup' do
let(:signal) { 'my-signal' }
let(:explanation) { 'my-explanation' }
subject { memory_killer.send(:signal_pgroup, signal, explanation) }
it 'send signal to this process if it is not group leader' do
expect(Process).to receive(:getpgrp).and_return(pid + 1)
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
signal: signal,
pid: pid,
message: "sending Sidekiq worker PID-#{pid} #{signal} (#{explanation})")
expect(Process).to receive(:kill).with(signal, pid).ordered
subject
end
it 'send signal to whole process group as group leader' do
expect(Process).to receive(:getpgrp).and_return(pid)
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
signal: signal,
pid: pid,
message: "sending Sidekiq worker PGRP-#{pid} #{signal} (#{explanation})")
expect(Process).to receive(:kill).with(signal, 0).ordered
subject
end
end
describe '#log_rss_out_of_range' do
let(:current_rss) { 100 }
let(:soft_limit_rss) { 200 }
let(:hard_limit_rss) { 300 }
let(:memory_total) { 3072 }
let(:jid) { 1 }
let(:reason) { 'rss out of range reason description' }
let(:queue) { 'default' }
let(:metrics) { memory_killer.instance_variable_get(:@metrics) }
let(:running_jobs) { { jid => { worker_class: DummyWorker } } }
before do
allow(memory_killer).to receive(:get_rss_kb).and_return(*current_rss)
allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(soft_limit_rss)
allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(hard_limit_rss)
allow(memory_killer).to receive(:get_memory_total_kb).and_return(memory_total)
memory_killer.send(:refresh_state, :running)
end
subject { memory_killer.send(:log_rss_out_of_range) }
it 'invoke sidekiq logger warn' do
expect(memory_killer).to receive(:out_of_range_description).with(current_rss, hard_limit_rss, soft_limit_rss, true).and_return(reason)
expect(Sidekiq.logger).to receive(:warn)
.with(
class: described_class.to_s,
pid: pid,
message: 'Sidekiq worker RSS out of range',
current_rss: current_rss,
hard_limit_rss: hard_limit_rss,
soft_limit_rss: soft_limit_rss,
reason: reason,
running_jobs: [jid: jid, worker_class: 'DummyWorker'],
memory_total_kb: memory_total)
expect(metrics[:sidekiq_memory_killer_running_jobs]).to receive(:increment)
.with({ worker_class: "DummyWorker", deadline_exceeded: true })
subject
end
end
describe '#out_of_range_description' do
let(:hard_limit) { 300 }
let(:soft_limit) { 200 }
let(:grace_balloon_seconds) { 12 }
let(:deadline_exceeded) { true }
subject { memory_killer.send(:out_of_range_description, rss, hard_limit, soft_limit, deadline_exceeded) }
context 'when rss > hard_limit' do
let(:rss) { 400 }
it 'tells reason' do
expect(subject).to eq("current_rss(#{rss}) > hard_limit_rss(#{hard_limit})")
end
end
context 'when rss <= hard_limit' do
let(:rss) { 300 }
context 'deadline exceeded' do
let(:deadline_exceeded) { true }
it 'tells reason' do
stub_const("#{described_class}::GRACE_BALLOON_SECONDS", grace_balloon_seconds)
expect(subject).to eq("current_rss(#{rss}) > soft_limit_rss(#{soft_limit}) longer than GRACE_BALLOON_SECONDS(#{grace_balloon_seconds})")
end
end
context 'deadline not exceeded' do
let(:deadline_exceeded) { false }
it 'tells reason' do
expect(subject).to eq("current_rss(#{rss}) > soft_limit_rss(#{soft_limit})")
end
end
end
end
describe '#rss_increase_by_jobs' do
let(:running_jobs) { { 'job1' => { worker_class: "Job1" }, 'job2' => { worker_class: "Job2" } } }
subject { memory_killer.send(:rss_increase_by_jobs) }
before do
allow(memory_killer).to receive(:rss_increase_by_job).and_return(11, 22)
end
it 'adds up individual rss_increase_by_job' do
expect(subject).to eq(33)
end
context 'when there is no running job' do
let(:running_jobs) { {} }
it 'return 0 if no job' do
expect(subject).to eq(0)
end
end
end
describe '#rss_increase_by_job' do
let(:worker_class) { Chaos::SleepWorker }
let(:job) { { worker_class: worker_class, started_at: 321 } }
let(:max_memory_kb) { 100000 }
subject { memory_killer.send(:rss_increase_by_job, job) }
before do
stub_const("#{described_class}::DEFAULT_MAX_MEMORY_GROWTH_KB", max_memory_kb)
end
it 'return 0 if memory_growth_kb return 0' do
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_memory_growth_kb', 0).and_return(0)
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_max_memory_growth_kb', max_memory_kb).and_return(0)
expect(Time).not_to receive(:now)
expect(subject).to eq(0)
end
it 'return time factored growth value when it does not exceed max growth limit for whilited job' do
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_memory_growth_kb', 0).and_return(10)
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_max_memory_growth_kb', max_memory_kb).and_return(100)
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(323)
expect(subject).to eq(20)
end
it 'return max growth limit when time factored growth value exceed max growth limit for whilited job' do
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_memory_growth_kb', 0).and_return(10)
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_max_memory_growth_kb', max_memory_kb).and_return(100)
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(332)
expect(subject).to eq(100)
end
end
describe '#get_job_options' do
let(:worker_class) { Chaos::SleepWorker }
let(:job) { { worker_class: worker_class, started_at: 321 } }
let(:key) { 'my-key' }
let(:default) { 'my-default' }
subject { memory_killer.send(:get_job_options, job, key, default) }
it 'return default if key is not defined' do
expect(worker_class).to receive(:sidekiq_options).and_return({ "retry" => 5 })
expect(subject).to eq(default)
end
it 'return default if get StandardError when retrieve sidekiq_options' do
expect(worker_class).to receive(:sidekiq_options).and_raise(StandardError)
expect(subject).to eq(default)
end
it 'return right value if sidekiq_options has the key' do
expect(worker_class).to receive(:sidekiq_options).and_return({ key => 10 })
expect(subject).to eq(10)
end
end
describe '#refresh_state' do
let(:metrics) { memory_killer.instance_variable_get(:@metrics) }
subject { memory_killer.send(:refresh_state, :shutting_down) }
it 'calls gitlab metrics gauge set methods' do
expect(memory_killer).to receive(:get_rss_kb) { 1010 }
expect(memory_killer).to receive(:get_soft_limit_rss_kb) { 1020 }
expect(memory_killer).to receive(:get_hard_limit_rss_kb) { 1040 }
expect(memory_killer).to receive(:get_memory_total_kb) { 3072 }
expect(metrics[:sidekiq_memory_killer_phase]).to receive(:set)
.with({}, described_class::PHASE[:shutting_down])
expect(metrics[:sidekiq_current_rss]).to receive(:set)
.with({}, 1010)
expect(metrics[:sidekiq_memory_killer_soft_limit_rss]).to receive(:set)
.with({}, 1020)
expect(metrics[:sidekiq_memory_killer_hard_limit_rss]).to receive(:set)
.with({}, 1040)
subject
end
end
end

View File

@ -483,9 +483,11 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
context "when #{file_type} type with other formats" do
described_class.file_formats.except(file_format).values.each do |other_format|
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: other_format) }
context "with #{other_format}" do
let(:artifact) { build(:ci_job_artifact, file_type: file_type, file_format: other_format) }
it { is_expected.not_to be_valid }
it { is_expected.not_to be_valid }
end
end
end
end

View File

@ -393,24 +393,26 @@ RSpec.describe Ci::HasStatus, feature_category: :continuous_integration do
subject { object.blocked? }
%w[ci_pipeline ci_stage ci_build generic_commit_status].each do |type|
let(:object) { build(type, status: status) }
context "when #{type}" do
let(:object) { build(type, status: status) }
context 'when status is scheduled' do
let(:status) { :scheduled }
context 'when status is scheduled' do
let(:status) { :scheduled }
it { is_expected.to be_truthy }
end
it { is_expected.to be_truthy }
end
context 'when status is manual' do
let(:status) { :manual }
context 'when status is manual' do
let(:status) { :manual }
it { is_expected.to be_truthy }
end
it { is_expected.to be_truthy }
end
context 'when status is created' do
let(:status) { :created }
context 'when status is created' do
let(:status) { :created }
it { is_expected.to be_falsy }
it { is_expected.to be_falsy }
end
end
end
end

View File

@ -8,8 +8,13 @@ RSpec.describe ImportFailure do
let_it_be(:correlation_id) { 'ABC' }
let_it_be(:hard_failure) { create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id) }
let_it_be(:soft_failure) { create(:import_failure, :soft_failure, project: project, correlation_id_value: correlation_id) }
let_it_be(:github_import_failure) { create(:import_failure, :github_import_failure, project: project) }
let_it_be(:unrelated_failure) { create(:import_failure, project: project) }
it 'returns failures with external_identifiers' do
expect(ImportFailure.with_external_identifiers).to match_array([github_import_failure])
end
it 'returns failures for the given correlation ID' do
expect(ImportFailure.failures_by_correlation_id(correlation_id)).to match_array([hard_failure, soft_failure])
end

View File

@ -7,7 +7,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
let_it_be(:user) { create(:user, :admin) }
let_it_be(:another_admin) { create(:user, :admin) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:group) { create(:group) }
let_it_be(:active_instance_runner) do
create(:ci_runner, :instance, :with_runner_manager,
@ -379,6 +379,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
describe 'ephemeralRegisterUrl' do
let(:runner_args) { { registration_type: :authenticated_user, creator: creator } }
let(:query) do
%(
query {
@ -403,54 +404,46 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
end
context 'with an instance runner' do
context 'with registration available' do
let_it_be(:runner) { create(:ci_runner, registration_type: :authenticated_user) }
context 'with an instance runner', :freeze_time do
let(:creator) { user }
let(:runner) { create(:ci_runner, **runner_args) }
context 'with valid ephemeral registration' do
it_behaves_like 'has register url' do
let(:expected_url) { "http://localhost/admin/runners/#{runner.id}/register" }
end
end
context 'with no registration available' do
let_it_be(:runner) { create(:ci_runner) }
context 'when runner ephemeral registration has expired' do
let(:runner) do
create(:ci_runner, created_at: (Ci::Runner::REGISTRATION_AVAILABILITY_TIME + 1.second).ago, **runner_args)
end
it_behaves_like 'has no register url'
end
context 'when runner has already been registered' do
let(:runner) { create(:ci_runner, :with_runner_manager, **runner_args) }
it_behaves_like 'has no register url'
end
end
context 'with a group runner' do
context 'with registration available' do
let_it_be(:runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) }
let(:creator) { user }
let(:runner) { create(:ci_runner, :group, groups: [group], **runner_args) }
context 'with valid ephemeral registration' do
it_behaves_like 'has register url' do
let(:expected_url) { "http://localhost/groups/#{group.path}/-/runners/#{runner.id}/register" }
end
end
context 'with no group' do
let(:destroyed_group) { create(:group) }
let(:runner) { create(:ci_runner, :group, groups: [destroyed_group], registration_type: :authenticated_user) }
context 'when request not from creator' do
let(:creator) { another_admin }
before do
destroyed_group.destroy!
end
it_behaves_like 'has no register url'
end
context 'with no registration available' do
let_it_be(:runner) { create(:ci_runner, :group, groups: [group]) }
it_behaves_like 'has no register url'
end
context 'with no access' do
let_it_be(:user) { create(:user) }
let_it_be(:runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) }
before do
group.add_maintainer(user)
group.add_owner(another_admin)
end
it_behaves_like 'has no register url'
@ -458,37 +451,20 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
context 'with a project runner' do
context 'with registration available' do
let_it_be(:runner) { create(:ci_runner, :project, projects: [project1], registration_type: :authenticated_user) }
let(:creator) { user }
let(:runner) { create(:ci_runner, :project, projects: [project1], **runner_args) }
context 'with valid ephemeral registration' do
it_behaves_like 'has register url' do
let(:expected_url) { "http://localhost/#{project1.full_path}/-/runners/#{runner.id}/register" }
end
end
context 'with no project' do
let(:destroyed_project) { create(:project) }
let(:runner) { create(:ci_runner, :project, projects: [destroyed_project], registration_type: :authenticated_user) }
context 'when request not from creator' do
let(:creator) { another_admin }
before do
destroyed_project.destroy!
end
it_behaves_like 'has no register url'
end
context 'with no registration available' do
let_it_be(:runner) { create(:ci_runner, :project, projects: [project1]) }
it_behaves_like 'has no register url'
end
context 'with no access' do
let_it_be(:user) { create(:user) }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project1], registration_type: :authenticated_user) }
before do
group.add_maintainer(user)
project1.add_owner(another_admin)
end
it_behaves_like 'has no register url'
@ -1016,11 +992,11 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
describe 'sorting and pagination' do
let(:query) do
<<~GQL
query($id: CiRunnerID!, $projectSearchTerm: String, $n: Int, $cursor: String) {
runner(id: $id) {
#{fields}
query($id: CiRunnerID!, $projectSearchTerm: String, $n: Int, $cursor: String) {
runner(id: $id) {
#{fields}
}
}
}
GQL
end
@ -1039,18 +1015,18 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
let(:fields) do
<<~QUERY
projects(search: $projectSearchTerm, first: $n, after: $cursor) {
count
nodes {
id
projects(search: $projectSearchTerm, first: $n, after: $cursor) {
count
nodes {
id
}
pageInfo {
hasPreviousPage
startCursor
endCursor
hasNextPage
}
}
pageInfo {
hasPreviousPage
startCursor
endCursor
hasNextPage
}
}
QUERY
end

View File

@ -107,11 +107,7 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do
end
end
shared_context 'when runner is created successfully' do
before do
stub_feature_flags(create_runner_workflow_for_namespace: [group])
end
shared_examples 'when runner is created successfully' do
it do
expected_args = { user: current_user, params: anything }
expect_next_instance_of(::Ci::Runners::CreateRunnerService, expected_args) do |service|
@ -168,6 +164,10 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do
}
end
before do
stub_feature_flags(create_runner_workflow_for_namespace: [group])
end
it_behaves_like 'when user does not have permissions'
context 'when user has permissions' do
@ -218,10 +218,7 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do
it 'returns an error' do
post_graphql_mutation(mutation, current_user: current_user)
expect_graphql_errors_to_include(
'The resource that you are attempting to access does not exist ' \
"or you don't have permission to perform this action"
)
expect(flattened_errors).not_to be_empty
end
end
end

View File

@ -0,0 +1,319 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::GithubFailureEntity, feature_category: :importers do
let(:project) { instance_double(Project, id: 123456, import_url: 'https://github.com/example/repo.git', import_source: 'example/repo') }
let(:source) { 'Gitlab::GithubImport::Importer::PullRequestImporter' }
let(:github_identifiers) { { 'iid' => 2, 'object_type' => 'pull_request', 'title' => 'Implement cool feature' } }
let(:import_failure) do
instance_double(
ImportFailure,
project: project,
exception_class: 'Some class',
exception_message: 'Something went wrong',
source: source,
correlation_id_value: '2ea9c4b8587b6df49f35a3fb703688aa',
external_identifiers: github_identifiers,
created_at: Time.current
)
end
let(:failure_details) do
{
exception_class: import_failure.exception_class,
exception_message: import_failure.exception_message,
correlation_id_value: import_failure.correlation_id_value,
source: import_failure.source,
github_identifiers: github_identifiers,
created_at: import_failure.created_at
}
end
subject(:entity) { described_class.new(import_failure).as_json.with_indifferent_access }
shared_examples 'import failure entity' do
it 'exposes required fields for import entity' do
expect(entity).to eq(
{
type: import_failure.external_identifiers['object_type'],
title: title,
provider_url: provider_url,
details: failure_details
}.with_indifferent_access
)
end
end
it 'exposes correct attributes' do
expect(entity.keys).to match_array(%w[type title provider_url details])
end
context 'with `pull_request` failure' do
it_behaves_like 'import failure entity' do
let(:title) { 'Implement cool feature' }
let(:provider_url) { 'https://github.com/example/repo/pull/2' }
end
end
context 'with `pull_request_merged_by` failure' do
before do
import_failure.external_identifiers.merge!({ 'object_type' => 'pull_request_merged_by' })
end
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::PullRequestMergedByImporter' }
let(:title) { 'Pull request 2 merger' }
let(:provider_url) { 'https://github.com/example/repo/pull/2' }
end
end
context 'with `pull_request_review_request` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter' }
let(:title) { 'Pull request 2 review request' }
let(:provider_url) { 'https://github.com/example/repo/pull/2' }
let(:github_identifiers) do
{
'merge_request_iid' => 2,
'requested_reviewers' => %w[alice bob],
'object_type' => 'pull_request_review_request'
}
end
end
end
context 'with `pull_request_review` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::PullRequestReviewImporter' }
let(:title) { 'Pull request review 123456' }
let(:provider_url) { 'https://github.com/example/repo/pull/2#pullrequestreview-123456' }
let(:github_identifiers) do
{
'merge_request_iid' => 2,
'review_id' => 123456,
'object_type' => 'pull_request_review'
}
end
end
end
context 'with `issue` failure' do
before do
import_failure.external_identifiers.merge!({ 'object_type' => 'issue' })
end
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter' }
let(:title) { 'Implement cool feature' }
let(:provider_url) { 'https://github.com/example/repo/issues/2' }
end
end
context 'with `collaborator` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::CollaboratorImporter' }
let(:title) { 'alice' }
let(:provider_url) { 'https://github.com/alice' }
let(:github_identifiers) do
{
'id' => 123456,
'login' => 'alice',
'object_type' => 'collaborator'
}
end
end
end
context 'with `protected_branch` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::ProtectedBranchImporter' }
let(:title) { 'main' }
let(:provider_url) { 'https://github.com/example/repo/tree/main' }
let(:github_identifiers) do
{
'id' => 'main',
'object_type' => 'protected_branch'
}
end
end
end
context 'with `issue_event` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::IssueEventImporter' }
let(:title) { 'closed' }
let(:provider_url) { 'https://github.com/example/repo/issues/2#event-123456' }
let(:github_identifiers) do
{
'id' => 123456,
'issuable_iid' => 2,
'event' => 'closed',
'object_type' => 'issue_event'
}
end
end
end
context 'with `label` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::LabelsImporter' }
let(:title) { 'bug' }
let(:provider_url) { 'https://github.com/example/repo/labels/bug' }
let(:github_identifiers) { { 'title' => 'bug', 'object_type' => 'label' } }
end
end
context 'with `milestone` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::MilestonesImporter' }
let(:title) { '1 release' }
let(:provider_url) { 'https://github.com/example/repo/milestone/1' }
let(:github_identifiers) { { 'iid' => 1, 'title' => '1 release', 'object_type' => 'milestone' } }
end
end
context 'with `release` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::ReleasesImporter' }
let(:title) { 'v1.0' }
let(:provider_url) { 'https://github.com/example/repo/releases/tag/v1.0' }
let(:github_identifiers) do
{
'tag' => 'v1.0',
'object_type' => 'release'
}
end
end
end
context 'with `note` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NoteImporter' }
let(:title) { 'MergeRequest comment 123456' }
let(:provider_url) { 'https://github.com/example/repo/issues/2#issuecomment-123456' }
let(:github_identifiers) do
{
'note_id' => 123456,
'noteable_iid' => 2,
'noteable_type' => 'MergeRequest',
'object_type' => 'note'
}
end
end
end
context 'with `diff_note` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::DiffNoteImporter' }
let(:title) { 'Pull request review comment 123456' }
let(:provider_url) { 'https://github.com/example/repo/pull/2#discussion_r123456' }
let(:github_identifiers) do
{
'note_id' => 123456,
'noteable_iid' => 2,
'noteable_type' => 'MergeRequest',
'object_type' => 'diff_note'
}
end
end
end
context 'with `issue_attachment` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NoteAttachmentsImporter' }
let(:title) { 'Issue 2 attachment' }
let(:provider_url) { 'https://github.com/example/repo/issues/2' }
let(:github_identifiers) do
{
'db_id' => 123456,
'noteable_iid' => 2,
'object_type' => 'issue_attachment'
}
end
end
end
context 'with `merge_request_attachment` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NoteAttachmentsImporter' }
let(:title) { 'Merge request 2 attachment' }
let(:provider_url) { 'https://github.com/example/repo/pull/2' }
let(:github_identifiers) do
{
'db_id' => 123456,
'noteable_iid' => 2,
'object_type' => 'merge_request_attachment'
}
end
end
end
context 'with `release_attachment` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NoteAttachmentsImporter' }
let(:title) { 'Release v1.0 attachment' }
let(:provider_url) { 'https://github.com/example/repo/releases/tag/v1.0' }
let(:github_identifiers) do
{
'db_id' => 123456,
'tag' => 'v1.0',
'object_type' => 'release_attachment'
}
end
end
end
context 'with `note_attachment` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NoteAttachmentsImporter' }
let(:title) { 'Note attachment' }
let(:provider_url) { '' }
let(:github_identifiers) do
{
'db_id' => 123456,
'noteable_type' => 'Issue',
'object_type' => 'note_attachment'
}
end
end
end
context 'with `lfs_object` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::LfsObjectImporter' }
let(:title) { '42' }
let(:provider_url) { '' }
let(:github_identifiers) do
{
'oid' => 42,
'size' => 123456,
'object_type' => 'lfs_object'
}
end
end
end
context 'with unknown failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::NewObjectTypeImporter' }
let(:title) { '' }
let(:provider_url) { '' }
let(:github_identifiers) do
{
'id' => 123456,
'object_type' => 'new_object_type'
}
end
end
end
context 'with an invalid import_url' do
let(:project) { instance_double(Project, id: 123456, import_url: 'Invalid url', import_source: 'example/repo') }
it_behaves_like 'import failure entity' do
let(:title) { 'Implement cool feature' }
let(:provider_url) { '' }
end
end
end

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::GithubFailureSerializer, feature_category: :importers do
subject(:serializer) { described_class.new }
it 'represents GithubFailureEntity entities' do
expect(described_class.entity_class).to eq(Import::GithubFailureEntity)
end
describe '#represent' do
let(:timestamp) { Time.new(2023, 1, 1).utc }
let(:github_identifiers) { { 'iid' => 2, 'object_type' => 'pull_request', 'title' => 'Implement cool feature' } }
let(:project) do
instance_double(
Project,
id: 123456,
import_status: 'finished',
import_url: 'https://github.com/example/repo.git',
import_source: 'example/repo'
)
end
let(:import_failure) do
instance_double(
ImportFailure,
project: project,
exception_class: 'Some class',
exception_message: 'Something went wrong',
source: 'Gitlab::GithubImport::Importer::PullRequestImporter',
correlation_id_value: '2ea9c4b8587b6df49f35a3fb703688aa',
external_identifiers: github_identifiers,
created_at: timestamp
)
end
let(:expected_data) do
{
type: 'pull_request',
title: 'Implement cool feature',
provider_url: 'https://github.com/example/repo/pull/2',
details: {
exception_class: import_failure.exception_class,
exception_message: import_failure.exception_message,
correlation_id_value: import_failure.correlation_id_value,
source: import_failure.source,
github_identifiers: github_identifiers,
created_at: timestamp.iso8601(3)
}
}.deep_stringify_keys
end
context 'when a single object is being serialized' do
let(:resource) { import_failure }
it 'serializes import failure' do
expect(serializer.represent(resource).as_json).to eq expected_data
end
end
context 'when multiple objects are being serialized' do
let(:count) { 3 }
let(:resource) { Array.new(count, import_failure) }
it 'serializes array of import failures' do
expect(serializer.represent(resource).as_json).to all(eq(expected_data))
end
end
end
end

View File

@ -7006,7 +7006,6 @@
- './spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb'
- './spec/lib/gitlab/sidekiq_config/worker_router_spec.rb'
- './spec/lib/gitlab/sidekiq_config/worker_spec.rb'
- './spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb'
- './spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb'
- './spec/lib/gitlab/sidekiq_death_handler_spec.rb'
- './spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb'

View File

@ -59,7 +59,7 @@ RSpec.describe 'layouts/_head' do
render
expect(rendered).to match('<link rel="stylesheet" media="screen" href="/stylesheets/highlight/themes/solarised-light.css" />')
expect(rendered).to match('<link rel="stylesheet" media="all" href="/stylesheets/highlight/themes/solarised-light.css" />')
end
context 'when an asset_host is set and snowplow url is set', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/346542' do

View File

@ -59,7 +59,6 @@ require (
github.com/DataDog/datadog-go v4.4.0+incompatible // indirect
github.com/DataDog/sketches-go v1.0.0 // indirect
github.com/Microsoft/go-winio v0.6.0 // indirect
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect
github.com/beevik/ntp v0.3.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect
@ -68,7 +67,7 @@ require (
github.com/client9/reopen v1.0.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/go-ole/go-ole v1.2.4 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/google/go-cmp v0.5.9 // indirect
@ -84,6 +83,7 @@ require (
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20210210170715-a8dfcb80d3a7 // indirect
github.com/lightstep/lightstep-tracer-go v0.25.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/oklog/ulid/v2 v2.0.2 // indirect
@ -92,19 +92,21 @@ require (
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.42.0 // indirect
github.com/prometheus/procfs v0.9.0 // indirect
github.com/prometheus/prometheus v0.42.0 // indirect
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect
github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500 // indirect
github.com/shirou/gopsutil/v3 v3.21.2 // indirect
github.com/shirou/gopsutil/v3 v3.21.12 // indirect
github.com/smartystreets/assertions v1.2.0 // indirect
github.com/tinylib/msgp v1.1.2 // indirect
github.com/tklauser/go-sysconf v0.3.4 // indirect
github.com/tklauser/numcpus v0.2.1 // indirect
github.com/tklauser/go-sysconf v0.3.9 // indirect
github.com/tklauser/numcpus v0.3.0 // indirect
github.com/uber/jaeger-client-go v2.30.0+incompatible // indirect
github.com/uber/jaeger-lib v2.4.1+incompatible // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect
go.opencensus.io v0.24.0 // indirect
go.uber.org/atomic v1.10.0 // indirect
golang.org/x/crypto v0.6.0 // indirect

View File

@ -530,7 +530,6 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdko
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g=
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
@ -949,8 +948,9 @@ github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbV
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI=
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY=
github.com/go-openapi/analysis v0.21.4/go.mod h1:4zQ35W4neeZTqh3ol0rv/O8JBbka9QyAgQRPp9y3pfo=
github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
@ -1446,6 +1446,8 @@ github.com/lightstep/lightstep-tracer-go v0.25.0/go.mod h1:G1ZAEaqTHFPWpWunnbUn1
github.com/linode/linodego v1.4.0/go.mod h1:PVsRxSlOiJyvG4/scTszpmZDTdgS+to3X6eS8pRrWI8=
github.com/linode/linodego v1.12.0/go.mod h1:NJlzvlNtdMRRkXb0oN6UWzUkj6t+IBsyveHgZ5Ppjyk=
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
github.com/magiconair/properties v1.7.4-0.20170902060319-8d7837e64d3c/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
@ -1670,6 +1672,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
github.com/prometheus/alertmanager v0.24.0/go.mod h1:r6fy/D7FRuZh5YbnX6J3MBY0eI4Pb5yPYS7/bPSXXqI=
@ -1773,8 +1777,9 @@ github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod
github.com/shabbyrobe/gocovmerge v0.0.0-20180507124511-f6ea450bfb63/go.mod h1:n+VKSARF5y/tS9XFSP7vWDfS+GUC5vs/YT7M5XDTUEM=
github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500 h1:WnNuhiq+FOY3jNj6JXFT+eLN3CQ/oPIsDPRanvwsmbI=
github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500/go.mod h1:+njLrG5wSeoG4Ds61rFgEzKvenR2UHbjMoDHsczxly0=
github.com/shirou/gopsutil/v3 v3.21.2 h1:fIOk3hyqV1oGKogfGNjUZa0lUbtlkx3+ZT0IoJth2uM=
github.com/shirou/gopsutil/v3 v3.21.2/go.mod h1:ghfMypLDrFSWN2c9cDYFLHyynQ+QUht0cv/18ZqVczw=
github.com/shirou/gopsutil/v3 v3.21.12 h1:VoGxEW2hpmz0Vt3wUvHIl9fquzYLNpVpgNNB7pGJimA=
github.com/shirou/gopsutil/v3 v3.21.12/go.mod h1:BToYZVTlSVlfazpDDYFnsVZLaoRG+g8ufT6fPQLdJzA=
github.com/shoenig/test v0.6.0/go.mod h1:xYtyGBC5Q3kzCNyJg/SjgNpfAa2kvmgA0i5+lQso8x0=
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
@ -1867,10 +1872,12 @@ github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955u
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tinylib/msgp v1.1.2 h1:gWmO7n0Ys2RBEb7GPYB9Ujq8Mk5p2U08lRnmMcGy6BQ=
github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tklauser/go-sysconf v0.3.4 h1:HT8SVixZd3IzLdfs/xlpq0jeSfTX57g1v6wB1EuzV7M=
github.com/tklauser/go-sysconf v0.3.4/go.mod h1:Cl2c8ZRWfHD5IrfHo9VN+FX9kCFjIOyVklgXycLB6ek=
github.com/tklauser/numcpus v0.2.1 h1:ct88eFm+Q7m2ZfXJdan1xYoXKlmwsfP+k88q05KvlZc=
github.com/tklauser/go-sysconf v0.3.9 h1:JeUVdAOWhhxVcU6Eqr/ATFHgXk/mmiItdKeJPev3vTo=
github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs=
github.com/tklauser/numcpus v0.2.1/go.mod h1:9aU+wOc6WjUIZEwWMP62PL/41d65P+iks1gBkr4QyP8=
github.com/tklauser/numcpus v0.3.0 h1:ILuRUQBtssgnxw0XXIjKUC56fgnOrFoQQ/4+DeU2biQ=
github.com/tklauser/numcpus v0.3.0/go.mod h1:yFGUr7TUHQRAhyqBcEg0Ge34zDBAsIvJJcyE6boqnA8=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
@ -1917,6 +1924,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg=
github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
@ -2360,6 +2369,7 @@ golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -2389,6 +2399,7 @@ golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210816074244-15123e1e1f71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -2398,6 +2409,7 @@ golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=