Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-02-08 21:10:40 +00:00
parent 0b84d7017e
commit 8c5f330a63
62 changed files with 734 additions and 293 deletions

View File

@ -225,7 +225,7 @@ variables:
DOCS_REVIEW_APPS_DOMAIN: "docs.gitlab-review.app" DOCS_REVIEW_APPS_DOMAIN: "docs.gitlab-review.app"
DOCS_GITLAB_REPO_SUFFIX: "ee" DOCS_GITLAB_REPO_SUFFIX: "ee"
REVIEW_APPS_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/ruby-3.0:gcloud-383-kubectl-1.23-helm-3.5" REVIEW_APPS_IMAGE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/debian-bookworm-ruby-3.0:gcloud-383-kubectl-1.26-helm-3.9"
REVIEW_APPS_DOMAIN: "gitlab-review.app" REVIEW_APPS_DOMAIN: "gitlab-review.app"
REVIEW_APPS_GCP_PROJECT: "gitlab-review-apps" REVIEW_APPS_GCP_PROJECT: "gitlab-review-apps"
REVIEW_APPS_GCP_REGION: "us-central1" REVIEW_APPS_GCP_REGION: "us-central1"

View File

@ -47,7 +47,6 @@ Style/ArgumentsForwarding:
- 'app/workers/concerns/limited_capacity/worker.rb' - 'app/workers/concerns/limited_capacity/worker.rb'
- 'app/workers/concerns/reactive_cacheable_worker.rb' - 'app/workers/concerns/reactive_cacheable_worker.rb'
- 'app/workers/concerns/reenqueuer.rb' - 'app/workers/concerns/reenqueuer.rb'
- 'app/workers/gitlab/github_import/refresh_import_jid_worker.rb'
- 'app/workers/pages_worker.rb' - 'app/workers/pages_worker.rb'
- 'config/initializers/6_labkit_middleware.rb' - 'config/initializers/6_labkit_middleware.rb'
- 'config/initializers/active_record_table_definition.rb' - 'config/initializers/active_record_table_definition.rb'

View File

@ -234,7 +234,6 @@ Style/GuardClause:
- 'app/workers/container_registry/migration/guard_worker.rb' - 'app/workers/container_registry/migration/guard_worker.rb'
- 'app/workers/deployments/hooks_worker.rb' - 'app/workers/deployments/hooks_worker.rb'
- 'app/workers/deployments/link_merge_request_worker.rb' - 'app/workers/deployments/link_merge_request_worker.rb'
- 'app/workers/gitlab/github_import/refresh_import_jid_worker.rb'
- 'app/workers/google_cloud/create_cloudsql_instance_worker.rb' - 'app/workers/google_cloud/create_cloudsql_instance_worker.rb'
- 'app/workers/packages/cleanup/execute_policy_worker.rb' - 'app/workers/packages/cleanup/execute_policy_worker.rb'
- 'app/workers/packages/maven/metadata/sync_worker.rb' - 'app/workers/packages/maven/metadata/sync_worker.rb'

View File

@ -2,7 +2,6 @@ import { escape } from 'lodash';
import { spriteIcon } from '~/lib/utils/common_utils'; import { spriteIcon } from '~/lib/utils/common_utils';
import { differenceInMilliseconds } from '~/lib/utils/datetime_utility'; import { differenceInMilliseconds } from '~/lib/utils/datetime_utility';
import { s__, sprintf } from '~/locale'; import { s__, sprintf } from '~/locale';
import { unrestrictedPages } from './constants';
// Renders math using KaTeX in an element. // Renders math using KaTeX in an element.
// //
@ -85,15 +84,12 @@ class SafeMathRenderer {
} }
const el = chosenEl || this.queue.shift(); const el = chosenEl || this.queue.shift();
const forceRender = const forceRender = Boolean(chosenEl) || !gon.math_rendering_limits_enabled;
Boolean(chosenEl) ||
unrestrictedPages.includes(this.pageName) ||
!gon.math_rendering_limits_enabled;
const text = el.textContent; const text = el.textContent;
el.removeAttribute('style'); el.removeAttribute('style');
if (!forceRender && (this.totalMS >= MAX_RENDER_TIME_MS || text.length > MAX_MATH_CHARS)) { if (!forceRender && (this.totalMS >= MAX_RENDER_TIME_MS || text.length > MAX_MATH_CHARS)) {
// Show unrendered math code // Show un-rendered math code
const codeElement = document.createElement('pre'); const codeElement = document.createElement('pre');
codeElement.className = 'code'; codeElement.className = 'code';

View File

@ -1,5 +1,11 @@
<script> <script>
import { GlDropdown, GlDropdownItem, GlIcon, GlLoadingIcon, GlTooltipDirective } from '@gitlab/ui'; import {
GlDisclosureDropdown,
GlDisclosureDropdownItem,
GlIcon,
GlLoadingIcon,
GlTooltipDirective,
} from '@gitlab/ui';
import { createAlert } from '~/alert'; import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'; import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
@ -16,8 +22,8 @@ export default {
}, },
components: { components: {
GlCountdown, GlCountdown,
GlDropdown, GlDisclosureDropdown,
GlDropdownItem, GlDisclosureDropdownItem,
GlIcon, GlIcon,
GlLoadingIcon, GlLoadingIcon,
}, },
@ -52,6 +58,7 @@ export default {
isLoading: false, isLoading: false,
actions: [], actions: [],
hasDropdownBeenShown: false, hasDropdownBeenShown: false,
isDropdownVisible: false,
}; };
}, },
computed: { computed: {
@ -101,58 +108,76 @@ export default {
}); });
}, },
fetchActions() { fetchActions() {
this.isDropdownVisible = true;
this.hasDropdownBeenShown = true; this.hasDropdownBeenShown = true;
this.$apollo.queries.actions.refetch(); this.$apollo.queries.actions.refetch();
this.trackClick(); this.trackClick();
}, },
hideAction() {
this.isDropdownVisible = false;
},
trackClick() { trackClick() {
this.track('click_manual_actions', { label: TRACKING_CATEGORIES.table }); this.track('click_manual_actions', { label: TRACKING_CATEGORIES.table });
}, },
jobItem(job) {
return {
text: job.name,
extraAttrs: {
disabled: !job.canPlayJob,
},
};
},
}, },
}; };
</script> </script>
<template> <template>
<gl-dropdown <gl-disclosure-dropdown
v-gl-tooltip v-gl-tooltip.left="isDropdownVisible ? '' : __('Run manual or delayed jobs')"
:title="__('Run manual or delayed jobs')"
:loading="isLoading" :loading="isLoading"
data-testid="pipelines-manual-actions-dropdown" data-testid="pipelines-manual-actions-dropdown"
right right
lazy lazy
icon="play" icon="play"
@shown="fetchActions" @shown="fetchActions"
@hidden="hideAction"
> >
<gl-dropdown-item v-if="isActionsLoading"> <gl-disclosure-dropdown-item v-if="isActionsLoading">
<div class="gl-display-flex"> <template #list-item>
<gl-loading-icon class="mr-2" /> <div class="gl-display-flex">
<span>{{ __('Loading...') }}</span> <gl-loading-icon class="mr-2" />
</div> <span>{{ __('Loading...') }}</span>
</gl-dropdown-item> </div>
</template>
</gl-disclosure-dropdown-item>
<gl-dropdown-item <gl-disclosure-dropdown-item
v-for="action in actions" v-for="action in actions"
v-else v-else
:key="action.id" :key="action.id"
:disabled="!action.canPlayJob" :item="jobItem(action)"
@click="onClickAction(action)" @action="onClickAction(action)"
> >
<div class="gl-display-flex gl-justify-content-space-between gl-flex-wrap"> <template #list-item>
{{ action.name }} <div class="gl-display-flex gl-justify-content-space-between gl-flex-wrap">
<span v-if="action.scheduledAt"> {{ action.name }}
<gl-icon name="clock" /> <span v-if="action.scheduledAt">
<gl-countdown :end-date-string="action.scheduledAt" /> <gl-icon name="clock" />
</span> <gl-countdown :end-date-string="action.scheduledAt" />
</div> </span>
</gl-dropdown-item> </div>
</template>
</gl-disclosure-dropdown-item>
<template #footer> <template #footer>
<gl-dropdown-item v-if="isDropdownLimitReached"> <gl-disclosure-dropdown-item v-if="isDropdownLimitReached">
<span class="gl-font-sm gl-text-gray-300!" data-testid="limit-reached-msg"> <template #list-item>
{{ __('Showing first 50 actions.') }} <span class="gl-font-sm gl-text-gray-300!" data-testid="limit-reached-msg">
</span> {{ __('Showing first 50 actions.') }}
</gl-dropdown-item> </span>
</template>
</gl-disclosure-dropdown-item>
</template> </template>
</gl-dropdown> </gl-disclosure-dropdown>
</template> </template>

View File

@ -132,6 +132,7 @@ export function initAdminLabels() {
// display the empty state if there are no more labels // display the empty state if there are no more labels
if (labelsCount < 1 && !pagination && emptyState) { if (labelsCount < 1 && !pagination && emptyState) {
emptyState.classList.remove('gl-display-none'); emptyState.classList.remove('gl-display-none');
labelsContainer.classList.add('gl-display-none');
} }
} }

View File

@ -13,6 +13,10 @@ class Groups::ApplicationController < ApplicationController
before_action :set_sorting before_action :set_sorting
requires_cross_project_access requires_cross_project_access
before_action do
push_namespace_setting(:math_rendering_limits_enabled, @group)
end
private private
def group def group

View File

@ -10,6 +10,10 @@ class Projects::ApplicationController < ApplicationController
before_action :repository before_action :repository
layout 'project' layout 'project'
before_action do
push_namespace_setting(:math_rendering_limits_enabled, @project&.parent)
end
helper_method :repository, :can_collaborate_with_project?, :user_access helper_method :repository, :can_collaborate_with_project?, :user_access
rescue_from Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError do |exception| rescue_from Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError do |exception|

View File

@ -5,7 +5,7 @@
# #
# Namespace is a nested hierarchy of one parent to many children. A search # Namespace is a nested hierarchy of one parent to many children. A search
# using only the parent-child relationships is a slow operation. This process # using only the parent-child relationships is a slow operation. This process
# was previously optimized using Postgresql recursive common table expressions # was previously optimized using PostgreSQL recursive common table expressions
# (CTE) with acceptable performance. However, it lead to slower than possible # (CTE) with acceptable performance. However, it lead to slower than possible
# performance, and resulted in complicated queries that were difficult to make # performance, and resulted in complicated queries that were difficult to make
# performant. # performant.
@ -31,7 +31,7 @@
# Note that this search method works so long as the IDs are unique and the # Note that this search method works so long as the IDs are unique and the
# traversal path is ordered from root to leaf nodes. # traversal path is ordered from root to leaf nodes.
# #
# We implement this in the database using Postgresql arrays, indexed by a # We implement this in the database using PostgreSQL arrays, indexed by a
# generalized inverted index (gin). # generalized inverted index (gin).
module Namespaces module Namespaces
module Traversal module Traversal
@ -55,8 +55,8 @@ module Namespaces
end end
class_methods do class_methods do
# This method looks into a list of namespaces trying to optimise a returned traversal_ids # This method looks into a list of namespaces trying to optimize a returned traversal_ids
# into a list of shortest prefixes, due to fact that the shortest prefixes include all childrens. # into a list of shortest prefixes, due to fact that the shortest prefixes include all children.
# Example: # Example:
# INPUT: [[4909902], [4909902,51065789], [4909902,51065793], [7135830], [15599674, 1], [15599674, 1, 3], [15599674, 2]] # INPUT: [[4909902], [4909902,51065789], [4909902,51065793], [7135830], [15599674, 1], [15599674, 1, 3], [15599674, 2]]
# RESULT: [[4909902], [7135830], [15599674, 1], [15599674, 2]] # RESULT: [[4909902], [7135830], [15599674, 1], [15599674, 2]]
@ -148,7 +148,7 @@ module Namespaces
hierarchy_order == :desc ? traversal_ids[0..-2] : traversal_ids[0..-2].reverse hierarchy_order == :desc ? traversal_ids[0..-2] : traversal_ids[0..-2].reverse
end end
# Returns all ancestors upto but excluding the top. # Returns all ancestors up to but excluding the top.
# When no top is given, all ancestors are returned. # When no top is given, all ancestors are returned.
# When top is not found, returns all ancestors. # When top is not found, returns all ancestors.
# #

View File

@ -2,6 +2,10 @@
module Projects module Projects
class BranchRulePolicy < ::ProtectedBranchPolicy class BranchRulePolicy < ::ProtectedBranchPolicy
rule { can?(:read_protected_branch) }.enable :read_branch_rule
rule { can?(:create_protected_branch) }.enable :create_branch_rule
rule { can?(:update_protected_branch) }.enable :update_branch_rule
rule { can?(:destroy_protected_branch) }.enable :destroy_branch_rule
end end
end end

View File

@ -1,35 +1,32 @@
- page_title _("Labels") - page_title _("Labels")
- add_page_specific_style 'page_bundles/labels' - add_page_specific_style 'page_bundles/labels'
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card labels other-labels js-toggle-container js-admin-labels-container' }, header_options: { class: 'gl-new-card-header' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c| - if @labels.present?
- c.with_header do = render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card labels other-labels js-toggle-container js-admin-labels-container' }, header_options: { class: 'gl-new-card-header' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c|
.gl-new-card-title-wrapper.gl-flex-direction-column - c.with_header do
%h5.gl-new-card-title .gl-new-card-title-wrapper.gl-flex-direction-column
= _('Labels') %h5.gl-new-card-title
.gl-new-card-count = _('Labels')
= sprite_icon('label', css_class: 'gl-mr-2') .gl-new-card-count
%span.js-admin-labels-count= @labels.count = sprite_icon('label', css_class: 'gl-mr-2')
.gl-new-card-description %span.js-admin-labels-count= @labels.count
= s_('AdminLabels|Labels created here will be automatically added to new projects.') .gl-new-card-description
.gl-new-card-actions = s_('AdminLabels|Labels created here will be automatically added to new projects.')
= render Pajamas::ButtonComponent.new(variant: :default, .gl-new-card-actions
size: :small, = render Pajamas::ButtonComponent.new(variant: :default,
href: new_admin_label_path) do size: :small,
= _('New label') href: new_admin_label_path) do
- c.with_body do = _('New label')
- if @labels.present? - c.with_body do
%ul.manage-labels-list.js-other-labels.gl-px-3.gl-rounded-base %ul.manage-labels-list.js-other-labels.gl-px-3.gl-rounded-base
= render @labels = render @labels
.js-admin-labels-empty-state{ class: ('gl-display-none' if @labels.present?) } .js-admin-labels-empty-state{ class: ('gl-display-none' if @labels.present?) }
%section.row.empty-state.gl-text-center = render Pajamas::EmptyStateComponent.new(svg_path: 'illustrations/empty-state/empty-labels-md.svg',
.col-12 title: s_('AdminLabels|Define your default set of project labels'),
.svg-content.svg-150 primary_button_text: _('New label'),
= image_tag 'illustrations/empty-state/empty-labels-md.svg' primary_button_link: new_admin_label_path) do |c|
.col-12
.gl-mx-auto.gl-my-0.gl-p-5 - c.with_description do
%h1.gl-font-size-h-display.gl-line-height-36.h4 = s_('AdminLabels|They can be used to categorize issues and merge requests.')
= s_('AdminLabels|Define your default set of project labels')
%p.gl-text-secondary
= s_('AdminLabels|They can be used to categorize issues and merge requests.')
.gl-mt-5= paginate @labels, theme: 'gitlab' .gl-mt-5= paginate @labels, theme: 'gitlab'

View File

@ -3198,6 +3198,15 @@
:weight: 2 :weight: 2
:idempotent: true :idempotent: true
:tags: [] :tags: []
- :name: import_refresh_import_jid
:worker_name: Gitlab::Import::RefreshImportJidWorker
:feature_category: :importers
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: incident_management_close_incident - :name: incident_management_close_incident
:worker_name: IncidentManagement::CloseIncidentWorker :worker_name: IncidentManagement::CloseIncidentWorker
:feature_category: :incident_management :feature_category: :incident_management

View File

@ -16,6 +16,8 @@ module Gitlab
sidekiq_options dead: false, retry: 6 sidekiq_options dead: false, retry: 6
sidekiq_options status_expiration: Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION
sidekiq_retries_exhausted do |msg, e| sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::ImportFailureService.track( Gitlab::Import::ImportFailureService.track(
project_id: msg['args'][0], project_id: msg['args'][0],
@ -33,6 +35,8 @@ module Gitlab
return unless project return unless project
Import::RefreshImportJidWorker.perform_in_the_future(project_id, jid)
import(project) import(project)
info(project_id, message: 'stage finished') info(project_id, message: 'stage finished')

View File

@ -16,6 +16,8 @@ module Gitlab
sidekiq_options dead: false, retry: 6 sidekiq_options dead: false, retry: 6
sidekiq_options status_expiration: Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION
sidekiq_retries_exhausted do |msg, e| sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::ImportFailureService.track( Gitlab::Import::ImportFailureService.track(
project_id: msg['args'][0], project_id: msg['args'][0],
@ -31,6 +33,8 @@ module Gitlab
return unless (project = find_project(project_id)) return unless (project = find_project(project_id))
Import::RefreshImportJidWorker.perform_in_the_future(project_id, jid)
import(project) import(project)
info(project_id, message: 'stage finished') info(project_id, message: 'stage finished')

View File

@ -62,7 +62,7 @@ module Gitlab
return return
end end
RefreshImportJidWorker.perform_in_the_future(project.id, jid) Import::RefreshImportJidWorker.perform_in_the_future(project.id, jid)
client = GithubImport.new_client_for(project) client = GithubImport.new_client_for(project)

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -12,8 +12,6 @@ module Gitlab
def import(project) def import(project)
waiter = importer_class.new(project).execute waiter = importer_class.new(project).execute
project.import_state.refresh_jid_expiration
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },

View File

@ -9,42 +9,11 @@ module Gitlab
include GithubImport::Queue include GithubImport::Queue
sidekiq_options retry: 5
# The interval to schedule new instances of this job at.
INTERVAL = 5.minutes.to_i
def self.perform_in_the_future(*args) def self.perform_in_the_future(*args)
perform_in(INTERVAL, *args) # Delegate to new version of this job so stale sidekiq nodes can still
# run instead of no-op
Gitlab::Import::RefreshImportJidWorker.perform_in_the_future(*args)
end end
# project_id - The ID of the project that is being imported.
# check_job_id - The ID of the job for which to check the status.
def perform(project_id, check_job_id)
import_state = find_import_state(project_id)
return unless import_state
if SidekiqStatus.running?(check_job_id)
# As long as the worker is running we want to keep refreshing
# the worker's JID as well as the import's JID.
Gitlab::SidekiqStatus.expire(check_job_id, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
Gitlab::SidekiqStatus.set(import_state.jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
self.class.perform_in_the_future(project_id, check_job_id)
end
# If the job is no longer running there's nothing else we need to do. If
# the clone job completed successfully it will have scheduled the next
# stage, if it died there's nothing we can do anyway.
end
# rubocop: disable CodeReuse/ActiveRecord
def find_import_state(project_id)
ProjectImportState.select(:jid)
.with_status(:started)
.find_by(project_id: project_id)
end
# rubocop: enable CodeReuse/ActiveRecord
end end
end end
end end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module Import
class RefreshImportJidWorker
include ApplicationWorker
data_consistency :delayed
idempotent!
feature_category :importers
sidekiq_options dead: false
sidekiq_options retry: 5
# The interval to schedule new instances of this job at.
INTERVAL = 5.minutes.to_i
def self.perform_in_the_future(*args)
perform_in(INTERVAL, *args)
end
# project_id - The ID of the project that is being imported.
# check_job_id - The ID of the job for which to check the status.
# params - to avoid multiple releases if parameters change
def perform(project_id, check_job_id, _params = {})
return unless SidekiqStatus.running?(check_job_id)
import_state_jid = ProjectImportState.jid_by(project_id: project_id, status: :started)&.jid
return unless import_state_jid
# As long as the worker is running we want to keep refreshing
# the worker's JID as well as the import's JID.
Gitlab::SidekiqStatus.expire(check_job_id, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
Gitlab::SidekiqStatus.set(import_state_jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
self.class.perform_in_the_future(project_id, check_job_id)
end
end
end
end

View File

@ -373,6 +373,8 @@
- 1 - 1
- - import_issues_csv - - import_issues_csv
- 2 - 2
- - import_refresh_import_jid
- 1
- - incident_management - - incident_management
- 2 - 2
- - incident_management_apply_incident_sla_exceeded_label - - incident_management_apply_incident_sla_exceeded_label

View File

@ -7,4 +7,19 @@ feature_categories:
description: The SHA referencing changes to a single design or multiple design files description: The SHA referencing changes to a single design or multiple design files
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10552 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10552
milestone: '11.10' milestone: '11.10'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: design_id
table: design_management_designs
sharding_key: project_id
belongs_to: design

View File

@ -8,4 +8,19 @@ feature_categories:
description: Notes created during the review of an MR that are not yet published description: Notes created during the review of an MR that are not yet published
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4213 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4213
milestone: '11.4' milestone: '11.4'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -7,4 +7,19 @@ feature_categories:
description: Store allocated assignees for merge requests description: Store allocated assignees for merge requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/26496 introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/26496
milestone: '11.10' milestone: '11.10'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -7,4 +7,19 @@ feature_categories:
description: Store refs cleanup schedules for merge requests description: Store refs cleanup schedules for merge requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46758 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46758
milestone: '13.6' milestone: '13.6'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -7,4 +7,19 @@ feature_categories:
description: Includes machine learning model predictions description: Includes machine learning model predictions
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97622 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97622
milestone: '15.4' milestone: '15.4'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -8,4 +8,19 @@ description: This is the table that stores information about the review summarie
produced from different LLM's. produced from different LLM's.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124402 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124402
milestone: '16.2' milestone: '16.2'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: review_id
table: reviews
sharding_key: project_id
belongs_to: review

View File

@ -7,4 +7,19 @@ feature_categories:
description: Store allocated reviewers for merge requests description: Store allocated reviewers for merge requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40358 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40358
milestone: '13.4' milestone: '13.4'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -7,4 +7,19 @@ feature_categories:
description: Store the events of merge request closing any issues description: Store the events of merge request closing any issues
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5986 introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5986
milestone: '8.12' milestone: '8.12'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: merge_request_id
table: merge_requests
sharding_key: target_project_id
belongs_to: merge_request

View File

@ -186,6 +186,10 @@ On the **primary** site:
takes to finish syncing. takes to finish syncing.
1. Select **Add broadcast message**. 1. Select **Add broadcast message**.
### Runner failover
If you have any runners connected to your current secondary, see [how to handle them](../secondary_proxy/runners.md#handling-a-planned-failover-with-secondary-runners) during the failover.
## Prevent updates to the **primary** site ## Prevent updates to the **primary** site
To ensure that all data is replicated to a secondary site, updates (write requests) need to To ensure that all data is replicated to a secondary site, updates (write requests) need to

View File

@ -43,3 +43,44 @@ Using separate secondary URLs, the runners should be:
1. Registered with the secondary external URL. 1. Registered with the secondary external URL.
1. Configured with [`clone_url`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#how-clone_url-works) set to the `external_url` of the secondary instance. 1. Configured with [`clone_url`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#how-clone_url-works) set to the `external_url` of the secondary instance.
## Handling a Planned Failover with secondary runners
When executing [a planned failover](../disaster_recovery/planned_failover.md), secondary runners try to keep talking to their local instance. This leads to decreased runner capacity, and may need to be accounted for.
### With Location Aware public URL
When using the [Location Aware public URL](location_aware_external_url.md), all runners automatically connect to the closest Geo site.
When failing over to a new primary:
- While the old primary is still in the DNS record, any runners previously connected to your old primary still attempt to pick up jobs from the old primary. If it is unreachable, the runners [detect this](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#how-unhealthy_requests_limit-and-unhealthy_interval-works), and stop requesting for an extended period of time after the instance returns.
- If you have [multiple secondary nodes](../disaster_recovery/index.md#promoting-secondary-geo-replica-in-multi-secondary-configurations), after the initial failover the remaining secondaries are in an unhealthy state until they are [replicated](../disaster_recovery/index.md#step-2-initiate-the-replication-process) with the new primary. The runners attached to them are then unable to check in, and their health check also kicks in.
- If you remove any of the unhealthy nodes from the Geo DNS entry, the runners pick the next closest instance. Depending on your architecture, this may not be what you want, as you could overwhelm your site in its reduced state.
To alleviate any of these issues, you can [pause](#pausing-runners) or shutdown some of the runners until the site is back up to 100%.
If you are not concerned about these issues, there is nothing to do here.
### With separate URLs
- If you are returning the old primary to service, you can pause the old primary runners until it is back online. This prevents the health check from kicking in.
- If the old primary is not returning, or you want to avoid temporarily reduced runner capacity, the primary runners should be reconfigured to connect to the new primary.
- If multiple secondaries are being used, the runners should be [paused](#pausing-runners), shutdown, or reconfigured to connect to the new primary while they are being replicated to the new primary.
### Pausing runners
You must have administrator access to use any of the following methods:
- Through the Admin Area:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Settings > Runners**.
1. Identify the runners you would like to pause.
1. Select the `pause` button next to each runner you would like to pause.
1. After the failover is complete, unpause the runners you paused in the previous step.
- Use the [Runners API](../../../api/runners.md):
1. Fetch or create a [Personal Access Token](../../../user/profile/personal_access_tokens.md) with administrator access.
1. Get the list of runners. You can filter the list [using the API](../../../api/runners.md#list-all-runners).
1. Identify the runners you would like to pause, and make note of their `id`.
1. [Follow the API documentation](../../../api/runners.md#pause-a-runner) to pause each runner.
1. After the failover is complete, unpause the list of runners using the API by setting `paused=false`.

View File

@ -1001,6 +1001,7 @@ Set the limit to `0` to disable it.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132939) in GitLab 16.5. > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132939) in GitLab 16.5.
> - [Removed](https://gitlab.com/gitlab-org/gitlab/-/issues/368009) the 50-node limit from Wiki and repository files. > - [Removed](https://gitlab.com/gitlab-org/gitlab/-/issues/368009) the 50-node limit from Wiki and repository files.
> - [Added](https://gitlab.com/gitlab-org/gitlab/-/issues/368009) a group-level setting to allow disabling math rendering limits, and re-enabled by default the math limits for wiki and repository files in GitLab 16.9.
GitLab imposes default limits when rendering math in Markdown fields. These limits provide better security and performance. GitLab imposes default limits when rendering math in Markdown fields. These limits provide better security and performance.
@ -1008,9 +1009,6 @@ The limits for issues, merge requests, epics, wikis, and repository files:
- Maximum number of macro expansions: `1000`. - Maximum number of macro expansions: `1000`.
- Maximum user-specified size in [em](https://en.wikipedia.org/wiki/Em_(typography)): `20`. - Maximum user-specified size in [em](https://en.wikipedia.org/wiki/Em_(typography)): `20`.
The limits for issues, merge requests, and epics:
- Maximum number of nodes rendered: `50`. - Maximum number of nodes rendered: `50`.
- Maximum number of characters in a math block: `1000`. - Maximum number of characters in a math block: `1000`.
- Maximum rendering time: `2000 ms`. - Maximum rendering time: `2000 ms`.
@ -1023,6 +1021,12 @@ Use the [GitLab Rails console](operations/rails_console.md#starting-a-rails-cons
ApplicationSetting.update(math_rendering_limits_enabled: false) ApplicationSetting.update(math_rendering_limits_enabled: false)
``` ```
These limits can also be disabled per-group using the GraphQL or REST API.
If the limits are disabled, math is rendered with mostly no limits in issues, merge requests, epics, wikis, and repository files.
This means a malicious actor _could_ add math that would cause a DoS when viewing in the browser. You must ensure
that only people you trust can add content.
## Wiki limits ## Wiki limits
- [Wiki page content size limit](wikis/index.md#wiki-page-content-size-limit). - [Wiki page content size limit](wikis/index.md#wiki-page-content-size-limit).

View File

@ -35,6 +35,7 @@ Read more about update policies and warnings in the PostgreSQL
| First GitLab version | PostgreSQL versions | Default version for fresh installs | Default version for upgrades | Notes | | First GitLab version | PostgreSQL versions | Default version for fresh installs | Default version for upgrades | Notes |
| -------------- | ------------------- | ---------------------------------- | ---------------------------- | ----- | | -------------- | ------------------- | ---------------------------------- | ---------------------------- | ----- |
| 16.6.7, 16.7.5, 16.8.2 | 13.13, 14.10 | 14.10 | 14.10 | |
| 16.7.0 | 13.12, 14.9 | 14.9 | 14.9 | | | 16.7.0 | 13.12, 14.9 | 14.9 | 14.9 | |
| 16.4.3, 16.5.3, 16.6.1 | 13.12, 14.9 | 13.12 | 13.12 | For upgrades, you can manually upgrade to 14.9 following the [upgrade documentation](../../update/versions/gitlab_16_changes.md#linux-package-installations-2). | | 16.4.3, 16.5.3, 16.6.1 | 13.12, 14.9 | 13.12 | 13.12 | For upgrades, you can manually upgrade to 14.9 following the [upgrade documentation](../../update/versions/gitlab_16_changes.md#linux-package-installations-2). |
| 16.2.0 | 13.11, 14.8 | 13.11 | 13.11 | For upgrades, you can manually upgrade to 14.8 following the [upgrade documentation](../../update/versions/gitlab_16_changes.md#linux-package-installations-2). | | 16.2.0 | 13.11, 14.8 | 13.11 | 13.11 | For upgrades, you can manually upgrade to 14.8 following the [upgrade documentation](../../update/versions/gitlab_16_changes.md#linux-package-installations-2). |

View File

@ -28,11 +28,11 @@ The flow for using GitLab with GCP Secret Manager is:
To use GitLab with GCP Secret Manager, you must: To use GitLab with GCP Secret Manager, you must:
- Have secrets stored in [GCP Secret Manager](https://cloud.google.com/security/products/secret-manager). - Have secrets stored in [GCP Secret Manager](https://cloud.google.com/security/products/secret-manager).
- Configure [GCP Workload Identity Federation](#configure-gcp-iam-workload-identify-federation-wif) to include GitLab as an identity provider. - Configure [GCP Workload Identity Federation](#configure-gcp-iam-workload-identity-federation-wif) to include GitLab as an identity provider.
- Configure [GCP IAM](#grant-access-to-gcp-iam-principal) permissions to grant access to GCP Secret Manager. - Configure [GCP IAM](#grant-access-to-gcp-iam-principal) permissions to grant access to GCP Secret Manager.
- Configure [GitLab CI/CD with GCP Secret Manager](#configure-gitlab-cicd-to-use-gcp-secret-manager-secrets). - Configure [GitLab CI/CD with GCP Secret Manager](#configure-gitlab-cicd-to-use-gcp-secret-manager-secrets).
## Configure GCP IAM Workload Identify Federation (WIF) ## Configure GCP IAM Workload Identity Federation (WIF)
GCP IAM WIF must be configured to recognize ID tokens issued by GitLab and assign an appropriate principal to them. GCP IAM WIF must be configured to recognize ID tokens issued by GitLab and assign an appropriate principal to them.
The principal is used to authorize access to the Secret Manager resources: The principal is used to authorize access to the Secret Manager resources:

View File

@ -43,13 +43,21 @@ There are also instructions when you want to
:::TabTitle Helm chart (Kubernetes) :::TabTitle Helm chart (Kubernetes)
GitLab can be deployed into a Kubernetes cluster using Helm. GitLab can be deployed into a Kubernetes cluster using Helm. For production deployments,
Instructions on how to upgrade a cloud-native deployment are in the setup follows the [Cloud Native Hybrid](../administration/reference_architectures/index.md#cloud-native-hybrid)
[a separate document](https://docs.gitlab.com/charts/installation/upgrade.html). guidance where stateless components of cloud-native GitLab run in Kubernetes with
the GitLab Helm chart, and stateful components are deployed in compute VMs with the
Linux package.
Use the [version mapping](https://docs.gitlab.com/charts/installation/version_mappings.html) Use the [version mapping](https://docs.gitlab.com/charts/installation/version_mappings.html)
from the chart version to GitLab version to determine the [upgrade path](#upgrade-paths). from the chart version to GitLab version to determine the [upgrade path](#upgrade-paths).
Follow [Multi-node upgrades with downtime](with_downtime.md) to perform the upgrade in a Cloud Native Hybrid setup.
A full cloud-native deployment is [not supported](../administration/reference_architectures/index.md#stateful-components-in-kubernetes)
for production. However, instructions on how to upgrade such an environment are in
[a separate document](https://docs.gitlab.com/charts/installation/upgrade.html).
:::TabTitle Docker :::TabTitle Docker
GitLab provides official Docker images for both Community and Enterprise GitLab provides official Docker images for both Community and Enterprise

View File

@ -39,6 +39,13 @@ At a high level, the process is:
## Stop writes to the database ## Stop writes to the database
Before upgrade, you need to stop writes to the database. The process is different
depending on your [reference architecture](../administration/reference_architectures/index.md).
::Tabs
:::TabTitle Linux package
Shut down Puma and Sidekiq on all servers running these processes: Shut down Puma and Sidekiq on all servers running these processes:
```shell ```shell
@ -46,6 +53,24 @@ sudo gitlab-ctl stop sidekiq
sudo gitlab-ctl stop puma sudo gitlab-ctl stop puma
``` ```
:::TabTitle Cloud Native Hybrid
For [Cloud Native Hybrid](../administration/reference_architectures/index.md#cloud-native-hybrid) environments:
1. Note the current number of replicas for database clients for subsequent restart:
```shell
kubectl get deploy -n <namespace> -l release=<helm release name> -l 'app in (prometheus,webservice,sidekiq)' -o jsonpath='{range .items[*]}{.metadata.name}{"\t"}{.spec.replicas}{"\n"}{end}'
```
1. Stop the clients of the database:
```shell
kubectl scale deploy -n <namespace> -l release=<helm release name> -l 'app in (prometheus,webservice,sidekiq)' --replicas=0
```
::EndTabs
## Upgrade the Consul nodes ## Upgrade the Consul nodes
[Consult the Consul documentation for the complete instructions](../administration/consul.md#upgrade-the-consul-nodes). [Consult the Consul documentation for the complete instructions](../administration/consul.md#upgrade-the-consul-nodes).
@ -177,7 +202,11 @@ DETAILS:
Follow [the zero downtime instructions](zero_downtime.md#redis-ha-using-sentinel) Follow [the zero downtime instructions](zero_downtime.md#redis-ha-using-sentinel)
for upgrading your Redis HA cluster. for upgrading your Redis HA cluster.
## Upgrade the Rails nodes (Puma / Sidekiq) ## Upgrade the Rails components
::Tabs
:::TabTitle Linux package
All the Puma and Sidekiq processes were previously shut down. On each node: All the Puma and Sidekiq processes were previously shut down. On each node:
@ -256,6 +285,22 @@ They can be upgraded in parallel:
sudo gitlab-ctl restart sudo gitlab-ctl restart
``` ```
:::TabTitle Cloud Native Hybrid
Now that all stateful components are upgraded, you need to follow
[GitLab chart upgrade steps](https://docs.gitlab.com/charts/installation/upgrade.html)
to upgrade the stateless components (Webservice, Sidekiq, other supporting services).
After you perform the GitLab chart upgrade, resume the database clients:
```shell
kubectl scale deploy -lapp=sidekiq,release=<helm release name> -n <namespace> --replicas=<value>
kubectl scale deploy -lapp=webservice,release=<helm release name> -n <namespace> --replicas=<value>
kubectl scale deploy -lapp=prometheus,release=<helm release name> -n <namespace> --replicas=<value>
```
::EndTabs
## Upgrade the Monitor node ## Upgrade the Monitor node
[Upgrade the GitLab package](package/index.md#upgrade-to-a-specific-version-using-the-official-repositories). [Upgrade the GitLab package](package/index.md#upgrade-to-a-specific-version-using-the-official-repositories).

View File

@ -63,7 +63,7 @@ To use Code Suggestions, use one of these editor extensions:
| VSCode | [VS Code GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow)| | VSCode | [VS Code GitLab Workflow extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow)|
| [GitLab WebIDE (VS Code in the Cloud)](../../../project/web_ide/index.md) | No configuration required. | | [GitLab WebIDE (VS Code in the Cloud)](../../../project/web_ide/index.md) | No configuration required. |
| Microsoft Visual Studio | [Visual Studio GitLab extension](https://marketplace.visualstudio.com/items?itemName=GitLab.GitLabExtensionForVisualStudio) | | Microsoft Visual Studio | [Visual Studio GitLab extension](https://marketplace.visualstudio.com/items?itemName=GitLab.GitLabExtensionForVisualStudio) |
| JetBrains IDEs | [GitLab plugin](https://plugins.jetbrains.com/plugin/22325-gitlab) | | JetBrains IDEs | [GitLab Duo Plugin for JetBrains](https://plugins.jetbrains.com/plugin/22325-gitlab) |
| Neovim | [`gitlab.vim` plugin](https://gitlab.com/gitlab-org/editor-extensions/gitlab.vim) | | Neovim | [`gitlab.vim` plugin](https://gitlab.com/gitlab-org/editor-extensions/gitlab.vim) |
A [GitLab Language Server](https://gitlab.com/gitlab-org/editor-extensions/gitlab-lsp) is used in VS Code, Visual Studio, and Neovim. The Language Server supports faster iteration across more platforms. You can also configure it to support Code Suggestions in IDEs where GitLab doesn't provide official support. A [GitLab Language Server](https://gitlab.com/gitlab-org/editor-extensions/gitlab-lsp) is used in VS Code, Visual Studio, and Neovim. The Language Server supports faster iteration across more platforms. You can also configure it to support Code Suggestions in IDEs where GitLab doesn't provide official support.

View File

@ -11,6 +11,7 @@ module Banzai
class MathFilter < HTML::Pipeline::Filter class MathFilter < HTML::Pipeline::Filter
# Handle the $`...`$ and ```math syntax in this filter. # Handle the $`...`$ and ```math syntax in this filter.
# Also add necessary classes any existing math blocks. # Also add necessary classes any existing math blocks.
include ::Gitlab::Utils::StrongMemoize
CSS_MATH = 'pre[data-canonical-lang="math"] > code' CSS_MATH = 'pre[data-canonical-lang="math"] > code'
XPATH_MATH = Gitlab::Utils::Nokogiri.css_to_xpath(CSS_MATH).freeze XPATH_MATH = Gitlab::Utils::Nokogiri.css_to_xpath(CSS_MATH).freeze
@ -88,24 +89,19 @@ module Banzai
end end
end end
def settings
Gitlab::CurrentSettings.current_application_settings
end
def render_nodes_limit_reached?(count) def render_nodes_limit_reached?(count)
return false if wiki? count >= RENDER_NODES_LIMIT && math_rendering_limits_enabled?
return false if blob?
return false unless settings.math_rendering_limits_enabled?
count >= RENDER_NODES_LIMIT
end end
def wiki? def math_rendering_limits_enabled?
context[:wiki].present? return true unless group && group.namespace_settings
end
def blob? group.namespace_settings.math_rendering_limits_enabled?
context[:text_source] == :blob end
strong_memoize_attr :math_rendering_limits_enabled?
def group
context[:project]&.parent || context[:group]
end end
end end
end end

View File

@ -35,7 +35,7 @@ module Gitlab
def execute def execute
Gitlab::Import::SetAsyncJid.set_jid(project.import_state) Gitlab::Import::SetAsyncJid.set_jid(project.import_state)
# We need to track this job's status for use by Gitlab::GithubImport::RefreshImportJidWorker. # We need to track this job's status for use by Gitlab::Import::RefreshImportJidWorker.
Stage::ImportRepositoryWorker Stage::ImportRepositoryWorker
.with_status .with_status
.perform_async(project.id) .perform_async(project.id)

View File

@ -111,6 +111,12 @@ module Gitlab
push_to_gon_attributes(:features, name, !!enabled) push_to_gon_attributes(:features, name, !!enabled)
end end
def push_namespace_setting(key, object)
return unless object&.namespace_settings.respond_to?(key)
gon.push({ key => object.namespace_settings.public_send(key) }) # rubocop:disable GitlabSecurity/PublicSend
end
def push_to_gon_attributes(key, name, enabled) def push_to_gon_attributes(key, name, enabled)
var_name = name.to_s.camelize(:lower) var_name = name.to_s.camelize(:lower)
# Here the `true` argument signals gon that the value should be merged # Here the `true` argument signals gon that the value should be merged

View File

@ -25029,6 +25029,9 @@ msgstr ""
msgid "IdentityVerification|Phone number must contain only digits." msgid "IdentityVerification|Phone number must contain only digits."
msgstr "" msgstr ""
msgid "IdentityVerification|Phone number verification is unavailable at this time. Please verify with a credit card instead."
msgstr ""
msgid "IdentityVerification|Please enter a valid code" msgid "IdentityVerification|Please enter a valid code"
msgstr "" msgstr ""

View File

@ -65,7 +65,11 @@ module QA
end end
def click_commit_tab def click_commit_tab
click_element('.codicon-source-control-view-icon') if has_element?('.codicon-source-control-view-icon + .badge')
click_element('.codicon-source-control-view-icon + .badge')
else
click_element('.codicon-source-control-view-icon')
end
end end
def has_commit_message_box? def has_commit_message_box?

View File

@ -47,13 +47,8 @@ RSpec.describe 'admin issues labels', feature_category: :team_planning do
wait_for_requests wait_for_requests
expect(page).to have_content("Define your default set of project labels") expect(page).to have_css '.js-admin-labels-container', visible: :hidden
expect(page).not_to have_content('bug') expect(page).to have_css '.js-admin-labels-empty-state', visible: :visible
expect(page).not_to have_content('feature_label')
page.within '.js-admin-labels-count' do
expect(page).to have_content('0')
end
end end
end end

View File

@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe 'Math rendering', :js, feature_category: :team_planning do RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
let!(:project) { create(:project, :public) } let_it_be(:project) { create(:project, :public) }
it 'renders inline and display math correctly' do it 'renders inline and display math correctly' do
description = <<~MATH description = <<~MATH
@ -98,7 +98,7 @@ RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
end end
end end
it 'renders without any limits on wiki page', :js do it 'renders with limits on wiki page', :js do
wiki_page = build(:wiki_page, { container: project, content: lazy_load_description }) wiki_page = build(:wiki_page, { container: project, content: lazy_load_description })
wiki_page.create message: 'math test commit' # rubocop:disable Rails/SaveBang wiki_page.create message: 'math test commit' # rubocop:disable Rails/SaveBang
wiki_page = project.wiki.find_page(wiki_page.slug) wiki_page = project.wiki.find_page(wiki_page.slug)
@ -108,20 +108,26 @@ RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
wait_for_requests wait_for_requests
page.within '.js-wiki-page-content' do page.within '.js-wiki-page-content' do
expect(page).not_to have_selector('.js-lazy-render-math') # the find is needed to ensure the lazy container is loaded, otherwise
# it can be a flaky test, similar to
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25408
find('.js-lazy-render-math-container')
expect(page).to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/)
end end
end end
end end
context 'when limits are disabled' do context 'when limits are disabled' do
before do let_it_be(:namespace_settings) { create(:namespace_settings, math_rendering_limits_enabled: false) }
stub_application_setting(math_rendering_limits_enabled: false) let_it_be(:group) { create(:group, namespace_settings: namespace_settings) }
end let_it_be(:project) { create(:project, :public, group: group) }
it 'does not render lazy load button' do it 'does not render lazy load button' do
create_and_visit_issue_with_description(lazy_load_description) create_and_visit_issue_with_description(lazy_load_description)
page.within '.description > .md' do page.within '.description > .md' do
expect(page).not_to have_selector('button', text: 'Display anyway')
expect(page) expect(page)
.not_to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/) .not_to have_selector('.js-lazy-render-math-container', text: /math block exceeds 1000 characters/)
end end
@ -131,6 +137,7 @@ RSpec.describe 'Math rendering', :js, feature_category: :team_planning do
create_and_visit_issue_with_description(excessive_expansion_description) create_and_visit_issue_with_description(excessive_expansion_description)
page.within '.description > .md' do page.within '.description > .md' do
expect(page).not_to have_selector('button', text: 'Display anyway')
expect(page).not_to have_selector('.katex-error', text: /Too many expansions/) expect(page).not_to have_selector('.katex-error', text: /Too many expansions/)
end end
end end

View File

@ -295,7 +295,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
context 'when manual action was played' do context 'when manual action was played' do
before do before do
find('[data-testid="pipelines-manual-actions-dropdown"]').click find('[data-testid="pipelines-manual-actions-dropdown"] button').click
wait_for_requests wait_for_requests
@ -326,7 +326,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end end
it "has link to the delayed job's action" do it "has link to the delayed job's action" do
find('[data-testid="pipelines-manual-actions-dropdown"]').click find('[data-testid="pipelines-manual-actions-dropdown"] button').click
wait_for_requests wait_for_requests
@ -345,7 +345,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end end
it "shows 00:00:00 as the remaining time" do it "shows 00:00:00 as the remaining time" do
find('[data-testid="pipelines-manual-actions-dropdown"]').click find('[data-testid="pipelines-manual-actions-dropdown"] button').click
wait_for_requests wait_for_requests
@ -354,7 +354,8 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end end
context 'when user played a delayed job immediately' do context 'when user played a delayed job immediately' do
let(:manual_action_selector) { '[data-testid="pipelines-manual-actions-dropdown"]' } let(:manual_action_selector) { '[data-testid="pipelines-manual-actions-dropdown"] button' }
let(:manual_action_dropdown) { '[data-testid="pipelines-manual-actions-dropdown"]' }
before do before do
find(manual_action_selector).click find(manual_action_selector).click
@ -363,8 +364,8 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
end end
# Wait for UI to transition to ensure a request has been made # Wait for UI to transition to ensure a request has been made
within(manual_action_selector) { find('.gl-spinner') } within(manual_action_dropdown) { find('.gl-spinner') }
within(manual_action_selector) { find('[data-testid="play-icon"]') } within(manual_action_dropdown) { find('[data-testid="play-icon"]') }
wait_for_requests wait_for_requests
end end

View File

@ -1,4 +1,4 @@
import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui'; import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue'; import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo'; import VueApollo from 'vue-apollo';
@ -48,14 +48,14 @@ describe('Pipeline manual actions', () => {
iid: 100, iid: 100,
}, },
stubs: { stubs: {
GlDropdown, GlDisclosureDropdown,
}, },
apolloProvider: createMockApollo([[getPipelineActionsQuery, queryHandler]]), apolloProvider: createMockApollo([[getPipelineActionsQuery, queryHandler]]),
}); });
}; };
const findDropdown = () => wrapper.findComponent(GlDropdown); const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); const findAllDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
const findAllCountdowns = () => wrapper.findAllComponents(GlCountdown); const findAllCountdowns = () => wrapper.findAllComponents(GlCountdown);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findLimitMessage = () => wrapper.findByTestId('limit-reached-msg'); const findLimitMessage = () => wrapper.findByTestId('limit-reached-msg');
@ -101,14 +101,16 @@ describe('Pipeline manual actions', () => {
}); });
it("displays a disabled action when it's not playable", () => { it("displays a disabled action when it's not playable", () => {
expect(findAllDropdownItems().at(0).attributes('disabled')).toBeDefined(); expect(findAllDropdownItems().at(0).props('item')).toMatchObject({
extraAttrs: { disabled: true },
});
}); });
describe('on action click', () => { describe('on action click', () => {
it('makes a request and toggles the loading state', async () => { it('makes a request and toggles the loading state', async () => {
mock.onPost(mockPath).reply(HTTP_STATUS_OK); mock.onPost(mockPath).reply(HTTP_STATUS_OK);
findAllDropdownItems().at(1).vm.$emit('click'); findAllDropdownItems().at(1).vm.$emit('action');
await nextTick(); await nextTick();
@ -122,7 +124,7 @@ describe('Pipeline manual actions', () => {
it('makes a failed request and toggles the loading state', async () => { it('makes a failed request and toggles the loading state', async () => {
mock.onPost(mockPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR); mock.onPost(mockPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
findAllDropdownItems().at(1).vm.$emit('click'); findAllDropdownItems().at(1).vm.$emit('action');
await nextTick(); await nextTick();
@ -163,7 +165,7 @@ describe('Pipeline manual actions', () => {
confirmAction.mockResolvedValueOnce(true); confirmAction.mockResolvedValueOnce(true);
findAllDropdownItems().at(2).vm.$emit('click'); findAllDropdownItems().at(2).vm.$emit('action');
expect(confirmAction).toHaveBeenCalled(); expect(confirmAction).toHaveBeenCalled();
@ -177,7 +179,7 @@ describe('Pipeline manual actions', () => {
confirmAction.mockResolvedValueOnce(false); confirmAction.mockResolvedValueOnce(false);
findAllDropdownItems().at(2).vm.$emit('click'); findAllDropdownItems().at(2).vm.$emit('action');
expect(confirmAction).toHaveBeenCalled(); expect(confirmAction).toHaveBeenCalled();

View File

@ -208,7 +208,9 @@ RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do
end end
context 'when limiting how many elements can be marked as math' do context 'when limiting how many elements can be marked as math' do
subject { pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$') } let_it_be(:context) { {} }
subject { pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$', context) }
before do before do
stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2) stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2)
@ -218,22 +220,61 @@ RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do
expect(subject.search('.js-render-math').count).to eq(2) expect(subject.search('.js-render-math').count).to eq(2)
end end
it 'does not limit when math_rendering_limits_enabled is false' do context 'when project with user namespace (no group)' do
stub_application_setting(math_rendering_limits_enabled: false) let_it_be(:project) { create(:project, :public) }
let_it_be(:context) { { project: project } }
expect(subject.search('.js-render-math').count).to eq(3) it 'limits' do
expect(subject.search('.js-render-math').count).to eq(2)
end
end end
it 'does not limit for the wiki' do context 'when project with group, no namespace settings' do
doc = pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$', { wiki: true }) let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:context) { { project: project } }
expect(doc.search('.js-render-math').count).to eq(3) it 'limits' do
expect(subject.search('.js-render-math').count).to eq(2)
end
end end
it 'does not limit for blobs' do context 'when project with group, default namespace settings' do
doc = pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$', { text_source: :blob }) let_it_be(:namespace_settings) { create(:namespace_settings) }
let_it_be(:group) { create(:group, namespace_settings: namespace_settings) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:context) { { project: project } }
expect(doc.search('.js-render-math').count).to eq(3) it 'limits' do
expect(subject.search('.js-render-math').count).to eq(2)
end
end
context 'when limits math_rendering_limits_enabled is false' do
let_it_be(:namespace_settings) { create(:namespace_settings, math_rendering_limits_enabled: false) }
let_it_be(:group) { create(:group, namespace_settings: namespace_settings) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:context) { { project: project } }
it 'does not limit' do
expect(subject.search('.js-render-math').count).to eq(3)
end
end
context 'when for wikis' do
let_it_be(:context) { { wiki: true } }
it 'does limit' do
expect(subject.search('.js-render-math').count).to eq(2)
end
end
context 'when for blobs' do
let_it_be(:context) { { text_source: :blob } }
it 'does limit for blobs' do
expect(subject.search('.js-render-math').count).to eq(2)
end
end end
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::GonHelper do RSpec.describe Gitlab::GonHelper, feature_category: :shared do
let(:helper) do let(:helper) do
Class.new do Class.new do
include Gitlab::GonHelper include Gitlab::GonHelper
@ -173,6 +173,39 @@ RSpec.describe Gitlab::GonHelper do
end end
end end
describe '#push_namespace_setting' do
it 'pushes a namespace setting to the frontend' do
namespace_settings = create(:namespace_settings, math_rendering_limits_enabled: false)
group = create(:group, namespace_settings: namespace_settings)
gon = class_double('Gon')
allow(helper)
.to receive(:gon)
.and_return(gon)
expect(gon)
.to receive(:push)
.with({ math_rendering_limits_enabled: false })
helper.push_namespace_setting(:math_rendering_limits_enabled, group)
end
it 'does not push if missing namespace setting entry' do
group = create(:group)
gon = class_double('Gon')
allow(helper)
.to receive(:gon)
.and_return(gon)
expect(gon)
.not_to receive(:push)
.with({ math_rendering_limits_enabled: false })
helper.push_namespace_setting(:math_rendering_limits_enabled, group)
end
end
describe '#default_avatar_url' do describe '#default_avatar_url' do
it 'returns an absolute URL' do it 'returns an absolute URL' do
url = helper.default_avatar_url url = helper.default_avatar_url

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::BranchRulePolicy, feature_category: :source_code_management do
let_it_be(:name) { 'feature' }
let_it_be(:protected_branch) { create(:protected_branch, name: name) }
let_it_be(:project) { protected_branch.project }
let_it_be(:user) { create(:user) }
let(:branch_rule) { Projects::BranchRule.new(project, protected_branch) }
subject { described_class.new(user, branch_rule) }
context 'as a maintainer' do
before_all do
project.add_maintainer(user)
end
it_behaves_like 'allows branch rule crud'
end
context 'as a developer' do
before_all do
project.add_developer(user)
end
it_behaves_like 'disallows branch rule crud'
end
context 'as a guest' do
before_all do
project.add_guest(user)
end
it_behaves_like 'disallows branch rule crud'
end
end

View File

@ -9483,7 +9483,6 @@
- './spec/workers/gitlab/github_import/import_issue_worker_spec.rb' - './spec/workers/gitlab/github_import/import_issue_worker_spec.rb'
- './spec/workers/gitlab/github_import/import_note_worker_spec.rb' - './spec/workers/gitlab/github_import/import_note_worker_spec.rb'
- './spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb' - './spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb'
- './spec/workers/gitlab/github_import/refresh_import_jid_worker_spec.rb'
- './spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb' - './spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb'
- './spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb' - './spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb'
- './spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb' - './spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb'

View File

@ -16,7 +16,7 @@ RSpec.shared_examples Gitlab::BitbucketImport::StageMethods do
end end
end end
describe '.perform' do describe '#perform' do
let(:worker) { described_class.new } let(:worker) { described_class.new }
it 'executes the import' do it 'executes the import' do
@ -25,5 +25,16 @@ RSpec.shared_examples Gitlab::BitbucketImport::StageMethods do
worker.perform(project.id) worker.perform(project.id)
end end
it 'queues RefreshImportJidWorker' do
allow(worker).to receive(:import)
allow(worker).to receive(:jid).and_return('mock_jid')
expect(Gitlab::Import::RefreshImportJidWorker)
.to receive(:perform_in_the_future)
.with(project.id, 'mock_jid')
worker.perform(project.id)
end
end end
end end

View File

@ -15,4 +15,26 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::StageMethods do
described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new) described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
end end
end end
describe '#perform' do
let(:worker) { described_class.new }
it 'executes the import' do
expect(worker).to receive(:import).with(project).once
expect(Gitlab::BitbucketServerImport::Logger).to receive(:info).twice
worker.perform(project.id)
end
it 'queues RefreshImportJidWorker' do
allow(worker).to receive(:import)
allow(worker).to receive(:jid).and_return('mock_jid')
expect(Gitlab::Import::RefreshImportJidWorker)
.to receive(:perform_in_the_future)
.with(project.id, 'mock_jid')
worker.perform(project.id)
end
end
end end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
RSpec.shared_examples 'allows branch rule crud' do
it { is_expected.to be_allowed(:read_branch_rule) }
it { is_expected.to be_allowed(:create_branch_rule) }
it { is_expected.to be_allowed(:update_branch_rule) }
it { is_expected.to be_allowed(:destroy_branch_rule) }
end
RSpec.shared_examples 'disallows branch rule crud' do
it { is_expected.not_to be_allowed(:read_branch_rule) }
it { is_expected.not_to be_allowed(:create_branch_rule) }
it { is_expected.not_to be_allowed(:update_branch_rule) }
it { is_expected.not_to be_allowed(:destroy_branch_rule) }
end
RSpec.shared_examples 'disallows branch rule changes' do
it { is_expected.not_to be_allowed(:create_branch_rule) }
it { is_expected.not_to be_allowed(:update_branch_rule) }
it { is_expected.not_to be_allowed(:destroy_branch_rule) }
end

View File

@ -100,7 +100,7 @@ RSpec.shared_examples Gitlab::GithubImport::StageMethods do
allow(worker).to receive(:import) allow(worker).to receive(:import)
allow(worker).to receive(:jid).and_return('mock_jid') allow(worker).to receive(:jid).and_return('mock_jid')
expect(Gitlab::GithubImport::RefreshImportJidWorker) expect(Gitlab::Import::RefreshImportJidWorker)
.to receive(:perform_in_the_future) .to receive(:perform_in_the_future)
.with(project.id, 'mock_jid') .with(project.id, 'mock_jid')

View File

@ -279,7 +279,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::PullRequests::ImportReviewWorker' => 5, 'Gitlab::GithubImport::PullRequests::ImportReviewWorker' => 5,
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5, 'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5, 'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
'Gitlab::GithubImport::ReplayEventsWorker' => 5, 'Gitlab::GithubImport::ReplayEventsWorker' => 5,
'Gitlab::GithubImport::Stage::FinishImportWorker' => 6, 'Gitlab::GithubImport::Stage::FinishImportWorker' => 6,
'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6, 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6,
@ -298,6 +297,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubGistsImport::ImportGistWorker' => 5, 'Gitlab::GithubGistsImport::ImportGistWorker' => 5,
'Gitlab::GithubGistsImport::StartImportWorker' => 5, 'Gitlab::GithubGistsImport::StartImportWorker' => 5,
'Gitlab::GithubGistsImport::FinishImportWorker' => 5, 'Gitlab::GithubGistsImport::FinishImportWorker' => 5,
'Gitlab::Import::RefreshImportJidWorker' => 5,
'Gitlab::JiraImport::AdvanceStageWorker' => 6, 'Gitlab::JiraImport::AdvanceStageWorker' => 6,
'Gitlab::JiraImport::ImportIssueWorker' => 5, 'Gitlab::JiraImport::ImportIssueWorker' => 5,
'Gitlab::JiraImport::Stage::FinishImportWorker' => 6, 'Gitlab::JiraImport::Stage::FinishImportWorker' => 6,

View File

@ -6,102 +6,12 @@ RSpec.describe Gitlab::GithubImport::RefreshImportJidWorker, feature_category: :
let(:worker) { described_class.new } let(:worker) { described_class.new }
describe '.perform_in_the_future' do describe '.perform_in_the_future' do
it 'schedules a job in the future' do it 'calls Gitlab::Import::RefreshImportJidWorker#perform_in_the_future' do
expect(described_class) expect(Gitlab::Import::RefreshImportJidWorker)
.to receive(:perform_in) .to receive(:perform_in_the_future)
.with(5.minutes.to_i, 10, '123') .with(10, '123')
described_class.perform_in_the_future(10, '123') described_class.perform_in_the_future(10, '123')
end end
end end
describe '#perform' do
let(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123abc') }
context 'when the project does not exist' do
it 'does nothing' do
expect(Gitlab::SidekiqStatus)
.not_to receive(:running?)
worker.perform(-1, '123')
end
end
context 'when the job is running' do
it 'refreshes the import JID and reschedules itself' do
allow(worker)
.to receive(:find_import_state)
.with(project.id)
.and_return(import_state)
expect(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(true)
expect(Gitlab::SidekiqStatus)
.to receive(:expire)
.with('123', Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(Gitlab::SidekiqStatus)
.to receive(:set)
.with(import_state.jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(worker.class)
.to receive(:perform_in_the_future)
.with(project.id, '123')
worker.perform(project.id, '123')
end
end
context 'when the job is no longer running' do
it 'returns' do
allow(worker)
.to receive(:find_import_state)
.with(project.id)
.and_return(project)
expect(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(false)
expect(Gitlab::SidekiqStatus)
.not_to receive(:expire)
expect(Gitlab::SidekiqStatus)
.not_to receive(:set)
worker.perform(project.id, '123')
end
end
end
describe '#find_import_state' do
it 'returns a ProjectImportState' do
project = create(:project, :import_started)
expect(worker.find_import_state(project.id)).to be_an_instance_of(ProjectImportState)
end
# it 'only selects the import JID field' do
# project = create(:project, :import_started)
# project.import_state.update_attributes(jid: '123abc')
#
# expect(worker.find_project(project.id).attributes)
# .to eq({ 'id' => nil, 'import_jid' => '123abc' })
# end
it 'returns nil for a import state for which the import process failed' do
project = create(:project, :import_failed)
expect(worker.find_import_state(project.id)).to be_nil
end
it 'returns nil for a non-existing find_import_state' do
expect(worker.find_import_state(-1)).to be_nil
end
end
end end

View File

@ -0,0 +1,87 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Import::RefreshImportJidWorker, feature_category: :importers do
let(:worker) { described_class.new }
describe '.perform_in_the_future' do
it 'schedules a job in the future' do
expect(described_class)
.to receive(:perform_in)
.with(5.minutes.to_i, 10, '123')
described_class.perform_in_the_future(10, '123')
end
end
describe '#perform' do
let_it_be(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123abc', status: :started) }
context 'when the project does not exist' do
let(:job_args) { [-1, '123'] }
it_behaves_like 'an idempotent worker'
it 'does nothing' do
expect(Gitlab::SidekiqStatus)
.not_to receive(:expire)
worker.perform(*job_args)
end
end
context 'when the job is running' do
let(:job_args) { [project.id, '123'] }
before do
allow(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(true)
end
it_behaves_like 'an idempotent worker'
it 'refreshes the import JID and reschedules itself' do
expect(Gitlab::SidekiqStatus)
.to receive(:expire)
.with('123', Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(Gitlab::SidekiqStatus)
.to receive(:set)
.with(import_state.jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
expect(worker.class)
.to receive(:perform_in_the_future)
.with(project.id, '123')
worker.perform(*job_args)
end
end
context 'when the job is no longer running' do
let(:job_args) { [project.id, '123'] }
before do
allow(Gitlab::SidekiqStatus)
.to receive(:running?)
.with('123')
.and_return(false)
end
it_behaves_like 'an idempotent worker'
it 'returns' do
expect(Gitlab::SidekiqStatus)
.not_to receive(:expire)
expect(Gitlab::SidekiqStatus)
.not_to receive(:set)
worker.perform(*job_args)
end
end
end
end