Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-08-02 18:10:41 +00:00
parent b3432e3b6d
commit 93fdeb5a61
93 changed files with 1727 additions and 501 deletions

View File

@ -143,7 +143,7 @@ variables:
BUNDLE_INSTALL_FLAGS: "--jobs=$(nproc) --retry=3"
BUNDLE_FROZEN: "true"
# we override the max_old_space_size to prevent OOM errors
NODE_OPTIONS: --max_old_space_size=4096
NODE_OPTIONS: --max_old_space_size=5120
GIT_DEPTH: "20"
# 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
GIT_STRATEGY: "clone"

View File

@ -1333,6 +1333,7 @@ lib/gitlab/checks/**
/app/policies/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/presenters/ci/runner_*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/serializers/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/groups/update_shared_runners_service.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/workers/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/db/docs/ci_runner*.yml @gitlab-org/ci-cd/runner-fleet-team/backend-approvers

View File

@ -1013,6 +1013,7 @@ Search/NamespacedClass:
- 'lib/gitlab/instrumentation/**/*.rb'
- 'lib/gitlab/usage/metrics/instrumentations/**/*.rb'
- 'ee/lib/gitlab/usage/metrics/instrumentations/**/*.rb'
- 'lib/gitlab/sidekiq_middleware/pause_control/strategies/zoekt.rb'
SidekiqLoadBalancing/WorkerDataConsistency:
Enabled: true

View File

@ -1 +1 @@
26f9b86f5d59e643b719b5ff15bb8e57d8ba599f
4dd176cbbace0b22872fd82e94ea56713f8304ea

View File

@ -404,7 +404,7 @@ group :development, :test do
gem 'parser', '~> 3.2', '>= 3.2.2.3'
gem 'pry-byebug'
gem 'pry-rails', '~> 0.3.9'
gem 'pry-shell', '~> 0.6.1'
gem 'pry-shell', '~> 0.6.3'
gem 'awesome_print', require: false
@ -447,7 +447,7 @@ group :development, :test do
end
group :development, :test, :danger do
gem 'gitlab-dangerfiles', '~> 3.12.0', require: false
gem 'gitlab-dangerfiles', '~> 3.13.0', require: false
end
group :development, :test, :coverage do

View File

@ -206,7 +206,7 @@
{"name":"gitaly","version":"16.2.0.pre.rc4","platform":"ruby","checksum":"08756662fb1537b7d481bbd377c20648f3e0c50fff7d1fd25c6e6034cea2517c"},
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
{"name":"gitlab-chronic","version":"0.10.5","platform":"ruby","checksum":"f80f18dc699b708870a80685243331290bc10cfeedb6b99c92219722f729c875"},
{"name":"gitlab-dangerfiles","version":"3.12.0","platform":"ruby","checksum":"0b260c84530664b5ae9d8cf21658c4658d4c319a0c9cbc4d56ecb6591b097d7d"},
{"name":"gitlab-dangerfiles","version":"3.13.0","platform":"ruby","checksum":"2081eac7fe1f538427f8ebec1e8cd7c143a30d50e1470348cdec4f2d273ea1ad"},
{"name":"gitlab-experiment","version":"0.7.1","platform":"ruby","checksum":"166dddb3aa83428bcaa93c35684ed01dc4d61f321fd2ae40b020806dc54a7824"},
{"name":"gitlab-fog-azure-rm","version":"1.8.0","platform":"ruby","checksum":"e4f24b174b273b88849d12fbcfecb79ae1c09f56cbd614998714c7f0a81e6c28"},
{"name":"gitlab-labkit","version":"0.33.0","platform":"ruby","checksum":"d1fba8d30fde314a3f5dee1921ac31860bed4fecd8aa98ac6671f2627479e05b"},
@ -461,7 +461,7 @@
{"name":"pry","version":"0.14.2","platform":"ruby","checksum":"c4fe54efedaca1d351280b45b8849af363184696fcac1c72e0415f9bdac4334d"},
{"name":"pry-byebug","version":"3.10.1","platform":"ruby","checksum":"c8f975c32255bfdb29e151f5532130be64ff3d0042dc858d0907e849125581f8"},
{"name":"pry-rails","version":"0.3.9","platform":"ruby","checksum":"468662575abb6b67f4a9831219f99290d5eae7bf186e64dd810d0a3e4a8cc4b1"},
{"name":"pry-shell","version":"0.6.1","platform":"ruby","checksum":"a99a6b3dffe4df274ea1751866816906861a23851f13346e10a8e8f61b53360c"},
{"name":"pry-shell","version":"0.6.3","platform":"ruby","checksum":"17b9cdf0e318ab50dc12698da3e1b8f532518cbceb0353c42a9ce2dd066676c2"},
{"name":"public_suffix","version":"5.0.0","platform":"ruby","checksum":"26ee4fbce33ada25eb117ac71f2c24bf4d8b3414ab6b34f05b4708a3e90f1c6b"},
{"name":"puma","version":"6.3.0","platform":"java","checksum":"5e2ff95953608d1ba0350b80a3961a43e9bbb78ec60ebd5e4db1940c2921d5d8"},
{"name":"puma","version":"6.3.0","platform":"ruby","checksum":"b0e35b4fe7ae440237a9ff1647c6bb252a1c0951ff356020670d2e62c1aeeeec"},

View File

@ -641,7 +641,7 @@ GEM
terminal-table (>= 1.5.1)
gitlab-chronic (0.10.5)
numerizer (~> 0.2)
gitlab-dangerfiles (3.12.0)
gitlab-dangerfiles (3.13.0)
danger (>= 8.4.5)
danger-gitlab (>= 8.0.0)
rake
@ -1217,7 +1217,7 @@ GEM
pry (>= 0.13, < 0.15)
pry-rails (0.3.9)
pry (>= 0.10.4)
pry-shell (0.6.1)
pry-shell (0.6.3)
pry (>= 0.13.0)
tty-markdown
tty-prompt
@ -1810,7 +1810,7 @@ DEPENDENCIES
gettext_i18n_rails_js (~> 1.3)
gitaly (~> 16.2.0.pre.rc4)
gitlab-chronic (~> 0.10.5)
gitlab-dangerfiles (~> 3.12.0)
gitlab-dangerfiles (~> 3.13.0)
gitlab-experiment (~> 0.7.1)
gitlab-fog-azure-rm (~> 1.8.0)
gitlab-labkit (~> 0.33.0)
@ -1941,7 +1941,7 @@ DEPENDENCIES
prometheus-client-mmap (~> 0.27)
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.1)
pry-shell (~> 0.6.3)
puma (~> 6.3)
rack (~> 2.2.7)
rack-attack (~> 6.6.1)

View File

@ -163,7 +163,7 @@ export default {
"
>
<template #link="{ content }">
<gl-link data-testid="runner-install-link" @click="toggleDrawer">{{ content }}</gl-link>
<gl-link @click="toggleDrawer">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>

View File

@ -94,10 +94,7 @@ export default {
<div>
<runner-upgrade-status-alert class="gl-my-4" :runner="runner" />
<div class="gl-pt-4">
<dl
class="gl-mb-0 gl-display-grid runner-details-grid-template"
data-testid="runner-details-list"
>
<dl class="gl-mb-0 gl-display-grid runner-details-grid-template">
<runner-detail :label="s__('Runners|Description')" :value="runner.description" />
<runner-detail
:label="s__('Runners|Last contact')"

View File

@ -61,6 +61,13 @@ export default {
},
},
methods: {
onCancel() {
if (this.id) {
this.$router.push({ path: '/' });
} else {
this.$emit('cancel');
}
},
onSubmit() {
this.showValidation = true;
@ -178,6 +185,6 @@ export default {
>
{{ __('Save') }}
</gl-button>
<gl-button v-if="id" :to="{ path: '/' }">{{ __('Cancel') }}</gl-button>
<gl-button @click="onCancel">{{ __('Cancel') }}</gl-button>
</gl-form>
</template>

View File

@ -1,21 +1,14 @@
<!-- eslint-disable vue/multi-word-component-names -->
<script>
import { GlKeysetPagination, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { GlKeysetPagination } from '@gitlab/ui';
import ListItem from './list_item.vue';
export default {
components: {
GlLoadingIcon,
GlKeysetPagination,
GlSprintf,
ListItem,
},
props: {
loading: {
type: Boolean,
required: false,
default: false,
},
savedReplies: {
type: Array,
required: true,
@ -24,10 +17,6 @@ export default {
type: Object,
required: true,
},
count: {
type: Number,
required: true,
},
},
methods: {
prevPage() {
@ -45,28 +34,16 @@ export default {
</script>
<template>
<div class="settings-section">
<gl-loading-icon v-if="loading" size="lg" />
<template v-else>
<div class="settings-sticky-header">
<div class="settings-sticky-header-inner">
<h4 class="gl-my-0" data-testid="title">
<gl-sprintf :message="__('My comment templates (%{count})')">
<template #count>{{ count }}</template>
</gl-sprintf>
</h4>
</div>
</div>
<ul class="gl-list-style-none gl-p-0 gl-m-0">
<list-item v-for="template in savedReplies" :key="template.id" :template="template" />
</ul>
<gl-keyset-pagination
v-if="pageInfo.hasPreviousPage || pageInfo.hasNextPage"
v-bind="pageInfo"
class="gl-mt-4"
@prev="prevPage"
@next="nextPage"
/>
</template>
<div class="gl-new-card-content gl-p-0">
<ul class="content-list">
<list-item v-for="template in savedReplies" :key="template.id" :template="template" />
</ul>
<gl-keyset-pagination
v-if="pageInfo.hasPreviousPage || pageInfo.hasNextPage"
v-bind="pageInfo"
class="gl-mt-4"
@prev="prevPage"
@next="nextPage"
/>
</div>
</template>

View File

@ -74,8 +74,8 @@ export default {
</script>
<template>
<li class="gl-pt-4 gl-pb-5 gl-border-b">
<div class="gl-display-flex gl-align-items-center">
<li class="gl-px-5! gl-py-4!">
<div class="gl-display-flex">
<h6 class="gl-mr-3 gl-my-0" data-testid="comment-template-name">{{ template.name }}</h6>
<div class="gl-ml-auto">
<gl-disclosure-dropdown
@ -94,7 +94,9 @@ export default {
</gl-tooltip>
</div>
</div>
<div class="gl-mt-3 gl-font-monospace gl-white-space-pre-wrap">{{ template.content }}</div>
<div class="gl-font-monospace gl-white-space-pre-line gl-font-sm gl-mt-n5">
{{ template.content }}
</div>
<gl-modal
ref="delete-modal"
:title="__('Delete comment template')"

View File

@ -1,5 +1,6 @@
<!-- eslint-disable vue/multi-word-component-names -->
<script>
import { GlCard, GlLoadingIcon, GlIcon, GlButton } from '@gitlab/ui';
import { fetchPolicies } from '~/lib/graphql';
import CreateForm from '../components/form.vue';
import savedRepliesQuery from '../queries/saved_replies.query.graphql';
@ -28,6 +29,10 @@ export default {
},
},
components: {
GlCard,
GlButton,
GlLoadingIcon,
GlIcon,
CreateForm,
List,
},
@ -37,34 +42,58 @@ export default {
count: 0,
pageInfo: {},
pagination: {},
showForm: false,
};
},
methods: {
refetchSavedReplies() {
this.pagination = {};
this.$apollo.queries.savedReplies.refetch();
this.toggleShowForm();
},
changePage(pageInfo) {
this.pagination = pageInfo;
},
toggleShowForm() {
this.showForm = !this.showForm;
},
},
};
</script>
<template>
<div>
<div class="settings-section">
<h5 class="gl-mt-0 gl-font-lg">
{{ __('Add new comment template') }}
</h5>
<create-form @saved="refetchSavedReplies" />
<gl-card
class="gl-new-card gl-overflow-hidden"
header-class="gl-new-card-header"
body-class="gl-new-card-body gl-px-0"
>
<template #header>
<div class="gl-new-card-title-wrapper" data-testid="title">
<h3 class="gl-new-card-title">
{{ __('My comment templates') }}
</h3>
<div class="gl-new-card-count">
<gl-icon name="comment-lines" class="gl-mr-2" />
{{ count }}
</div>
</div>
<gl-button v-if="!showForm" size="small" class="gl-ml-3" @click="toggleShowForm">
{{ __('Add new') }}
</gl-button>
</template>
<div v-if="showForm" class="gl-new-card-add-form gl-m-3 gl-mb-4">
<h4 class="gl-mt-0">{{ __('Add new comment template') }}</h4>
<create-form @saved="refetchSavedReplies" @cancel="toggleShowForm" />
</div>
<gl-loading-icon v-if="$apollo.queries.savedReplies.loading" size="sm" class="gl-my-5" />
<list
:loading="$apollo.queries.savedReplies.loading"
v-else-if="savedReplies"
:saved-replies="savedReplies"
:page-info="pageInfo"
:count="count"
@input="changePage"
/>
</div>
<div v-else class="gl-new-card-empty gl-px-5 gl-py-4">
{{ __('You have no saved replies yet.') }}
</div>
</gl-card>
</template>

View File

@ -200,17 +200,21 @@ export default {
const isSearchInput = target.matches(SEARCH_INPUT_SELECTOR);
if (code === HOME_KEY) {
if (isSearchInput) return;
this.focusItem(0, elements);
} else if (code === END_KEY) {
if (isSearchInput) return;
this.focusItem(elements.length - 1, elements);
} else if (code === ARROW_UP_KEY) {
if (isSearchInput) return;
if (elements.indexOf(target) === 0) {
this.focusSearchInput();
return;
} else {
this.focusNextItem(event, elements, -1);
}
this.focusNextItem(event, elements, -1);
} else if (code === ARROW_DOWN_KEY) {
this.focusNextItem(event, elements, 1);
} else if (code === ESC_KEY) {

View File

@ -21,6 +21,6 @@ export const INPUT_FIELD_PADDING = 84;
export const FETCH_TYPES = ['generic', 'search'];
export const SEARCH_MODAL_ID = 'super-sidebar-search-modal';
export const SEARCH_INPUT_SELECTOR = '.gl-search-box-by-type-input-borderless';
export const SEARCH_INPUT_SELECTOR = 'input[role="searchbox"]';
export const SEARCH_RESULTS_ITEM_SELECTOR = '.gl-new-dropdown-item';

View File

@ -16,8 +16,10 @@ module Organizations
strong_memoize_attr :organization
def authorize_action!(action)
access_denied! if Feature.disabled?(:ui_for_organizations, current_user)
access_denied! unless can?(current_user, action, organization)
return if Feature.enabled?(:ui_for_organizations, current_user) &&
can?(current_user, action, organization)
access_denied!
end
end
end

View File

@ -65,15 +65,7 @@ class Projects::PagesController < Projects::ApplicationController
end
def project_params_attributes
attributes = %i[pages_https_only]
return attributes unless Feature.enabled?(:pages_unique_domain, @project)
attributes + [
project_setting_attributes: [
:pages_unique_domain_enabled
]
]
[:pages_https_only, { project_setting_attributes: [:pages_unique_domain_enabled] }]
end
end

View File

@ -19,7 +19,8 @@ module WorkItems
requirement: 'Requirement',
task: 'Task',
objective: 'Objective',
key_result: 'Key Result'
key_result: 'Key Result',
epic: 'Epic'
}.freeze
# Base types need to exist on the DB on app startup
@ -32,7 +33,8 @@ module WorkItems
requirement: { name: TYPE_NAMES[:requirement], icon_name: 'issue-type-requirements', enum_value: 3 }, ## EE-only
task: { name: TYPE_NAMES[:task], icon_name: 'issue-type-task', enum_value: 4 },
objective: { name: TYPE_NAMES[:objective], icon_name: 'issue-type-objective', enum_value: 5 }, ## EE-only
key_result: { name: TYPE_NAMES[:key_result], icon_name: 'issue-type-keyresult', enum_value: 6 } ## EE-only
key_result: { name: TYPE_NAMES[:key_result], icon_name: 'issue-type-keyresult', enum_value: 6 }, ## EE-only
epic: { name: TYPE_NAMES[:epic], icon_name: 'issue-type-epic', enum_value: 7 } ## EE-only
}.freeze
# A list of types user can change between - both original and new
@ -79,7 +81,7 @@ module WorkItems
end
def self.allowed_types_for_issues
base_types.keys.excluding('task', 'objective', 'key_result')
base_types.keys.excluding('task', 'objective', 'key_result', 'epic')
end
def default?

View File

@ -51,12 +51,6 @@ module Projects
private
def add_pages_unique_domain
if Feature.disabled?(:pages_unique_domain, project)
params[:project_setting_attributes]&.delete(:pages_unique_domain_enabled)
return
end
return unless params.dig(:project_setting_attributes, :pages_unique_domain_enabled)
# If the project used a unique domain once, it'll always use the same

View File

@ -1,8 +1,6 @@
- can_edit_max_page_size = can?(current_user, :update_max_pages_size)
- can_enforce_https_only = Gitlab.config.pages.external_http || Gitlab.config.pages.external_https
- can_edit_unique_domain = Feature.enabled?(:pages_unique_domain, @project)
- return unless can_edit_max_page_size || can_enforce_https_only || can_edit_unique_domain
= gitlab_ui_form_for @project, url: project_pages_path(@project), html: { class: 'inline', title: pages_https_only_title } do |f|
- if can_edit_max_page_size
= render_if_exists 'shared/pages/max_pages_size_input', form: f
@ -18,14 +16,13 @@
%p.gl-pl-6
= s_("GitLabPages|When enabled, all attempts to visit your website through HTTP are automatically redirected to HTTPS using a response with status code 301. Requires a valid certificate for all domains. %{docs_link_start}Learn more.%{link_end}").html_safe % { docs_link_start: docs_link_start, link_end: link_end }
- if can_edit_unique_domain
.form-group
= f.fields_for :project_setting do |settings|
= settings.gitlab_ui_checkbox_component :pages_unique_domain_enabled,
s_('GitLabPages|Use unique domain'),
label_options: { class: 'label-bold' }
%p.gl-pl-6
= s_("GitLabPages|When enabled, a unique domain is generated to access pages.").html_safe
.form-group
= f.fields_for :project_setting do |settings|
= settings.gitlab_ui_checkbox_component :pages_unique_domain_enabled,
s_('GitLabPages|Use unique domain'),
label_options: { class: 'label-bold' }
%p.gl-pl-6
= s_("GitLabPages|When enabled, a unique domain is generated to access pages.").html_safe
.gl-mt-3
= f.submit s_('GitLabPages|Save changes'), pajamas_button: true

View File

@ -678,6 +678,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:pause_control_resume
:worker_name: PauseControl::ResumeWorker
:feature_category: :global_search
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:personal_access_tokens_expired_notification
:worker_name: PersonalAccessTokens::ExpiredNotificationWorker
:feature_category: :system_access

View File

@ -151,6 +151,10 @@ module WorkerAttributes
set_class_attribute(:weight, value)
end
def pause_control(value)
::Gitlab::SidekiqMiddleware::PauseControl::WorkersMap.set_strategy_for(strategy: value, worker: self)
end
def get_weight
get_class_attribute(:weight) ||
NAMESPACE_WEIGHTS[queue_namespace] ||

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module PauseControl
class ResumeWorker
include ApplicationWorker
# There is no onward scheduling and this cron handles work from across the
# application, so there's no useful context to add.
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
RESCHEDULE_DELAY = 1.second
feature_category :global_search
data_consistency :sticky
idempotent!
urgency :low
def perform
reschedule_job = false
pause_strategies_workers.each do |strategy, workers|
strategy_klass = Gitlab::SidekiqMiddleware::PauseControl.for(strategy)
next if strategy_klass.should_pause?
workers.each do |worker|
next unless jobs_in_the_queue?(worker)
queue_size = resume_processing!(worker)
reschedule_job = true if queue_size.to_i > 0
end
end
self.class.perform_in(RESCHEDULE_DELAY) if reschedule_job
end
private
def jobs_in_the_queue?(worker)
Gitlab::SidekiqMiddleware::PauseControl::PauseControlService.has_jobs_in_waiting_queue?(worker.to_s)
end
def resume_processing!(worker)
Gitlab::SidekiqMiddleware::PauseControl::PauseControlService.resume_processing!(worker.to_s)
end
def pause_strategies_workers
Gitlab::SidekiqMiddleware::PauseControl::WorkersMap.workers || []
end
end
end

View File

@ -1,8 +1,8 @@
---
name: pages_unique_domain
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/109011
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/388151
milestone: '15.9'
type: development
group: group::editor
default_enabled: true
name: zoekt_pause_indexing
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/126027
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/417597
milestone: '16.3'
type: ops
group: group::global search
default_enabled: false

View File

@ -789,6 +789,9 @@ Gitlab.ee do
Settings.cron_jobs['search_index_curation_worker'] ||= {}
Settings.cron_jobs['search_index_curation_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['search_index_curation_worker']['job_class'] ||= 'Search::IndexCurationWorker'
Settings.cron_jobs['pause_control_resume_worker'] ||= {}
Settings.cron_jobs['pause_control_resume_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['pause_control_resume_worker']['job_class'] ||= 'PauseControl::ResumeWorker'
Settings.cron_jobs['sync_seat_link_worker'] ||= {}
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} #{rand(3..4)} * * * UTC"
Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'

View File

@ -35,6 +35,7 @@ if Gitlab::Metrics.enabled? && Gitlab::Runtime.application?
elsif Gitlab::Runtime.sidekiq?
Gitlab::Metrics::GlobalSearchIndexingSlis.initialize_slis! if Gitlab.ee?
Gitlab::Metrics::LooseForeignKeysSlis.initialize_slis!
Gitlab::Metrics::Llm.initialize_slis! if Gitlab.ee?
end
GC::Profiler.enable

View File

@ -100,10 +100,6 @@ categories.subtract([:database, :ux, :analytics_instrumentation]) if stable_bran
if changes.any?
random_roulette_spins = roulette.spin(nil, categories)
if categories.include?(:ux) # rubocop:disable Style/IfUnlessModifier
roulette.assign_pedroms_for_ux_wider_community_contribution(random_roulette_spins)
end
rows = random_roulette_spins.map do |spin|
markdown_row_for_spin(spin.category, spin)
end

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
class AddEpicWorkItemType < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
ISSUE_ENUM_VALUE = 0
EPIC_ENUM_VALUE = 7
EPIC_NAME = 'Epic'
EPIC_WIDGETS = {
'Assignees' => 0,
'Description' => 1,
'Hierarchy' => 2,
'Labels' => 3,
'Notes' => 5,
'Start and due date' => 6,
'Health status' => 7,
'Status' => 11,
'Notifications' => 14,
'Award emoji' => 16
}.freeze
class MigrationWorkItemType < MigrationRecord
self.table_name = 'work_item_types'
end
class MigrationWidgetDefinition < MigrationRecord
self.table_name = 'work_item_widget_definitions'
end
class MigrationHierarchyRestriction < MigrationRecord
self.table_name = 'work_item_hierarchy_restrictions'
end
def up
# New instances will not run this migration and add this type via fixtures
# checking if record exists mostly because migration specs will run all migrations
# and that will conflict with the preloaded base work item types
existing_epic_work_item_type = MigrationWorkItemType.find_by(base_type: EPIC_ENUM_VALUE, namespace_id: nil)
return say('Epic work item type record exists, skipping creation') if existing_epic_work_item_type
new_epic_work_item_type = MigrationWorkItemType.create(
name: EPIC_NAME,
namespace_id: nil,
base_type: EPIC_ENUM_VALUE,
icon_name: 'issue-type-epic'
)
widgets = EPIC_WIDGETS.map do |widget_name, widget_enum_value|
{
work_item_type_id: new_epic_work_item_type.id,
name: widget_name,
widget_type: widget_enum_value
}
end
MigrationWidgetDefinition.upsert_all(
widgets,
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
)
issue_type = MigrationWorkItemType.find_by(base_type: ISSUE_ENUM_VALUE, namespace_id: nil)
return say('Issue work item type not found, skipping hierarchy restrictions creation') unless issue_type
restrictions = [
{ parent_type_id: new_epic_work_item_type.id, child_type_id: new_epic_work_item_type.id, maximum_depth: 9 },
{ parent_type_id: new_epic_work_item_type.id, child_type_id: issue_type.id, maximum_depth: 1 }
]
MigrationHierarchyRestriction.upsert_all(
restrictions,
unique_by: :index_work_item_hierarchy_restrictions_on_parent_and_child
)
end
def down
# There's the remote possibility that issues could already be
# using this issue type, with a tight foreign constraint.
# Therefore we will not attempt to remove any data.
end
end

View File

@ -0,0 +1 @@
5f796b08ce9888671e53dbdb7690d09518c714f442c94bd154cf568dee5afe99

View File

@ -196,6 +196,7 @@ This list of limitations only reflects the latest version of GitLab. If you are
- The **primary** site has to be online for OAuth login to happen. Existing sessions and Git are not affected. Support for the **secondary** site to use an OAuth provider independent from the primary is [being planned](https://gitlab.com/gitlab-org/gitlab/-/issues/208465).
- The installation takes multiple manual steps that together can take about an hour depending on circumstances. Consider using [the GitLab Environment Toolkit](https://gitlab.com/gitlab-org/gitlab-environment-toolkit) to deploy and operate production GitLab instances based on our [Reference Architectures](../reference_architectures/index.md), including automation of common daily tasks. We are planning to [improve Geo's installation even further](https://gitlab.com/groups/gitlab-org/-/epics/1465).
- Real-time updates of issues/merge requests (for example, via long polling) doesn't work on the **secondary** site.
- Using Geo secondary sites to accelerate runners is not officially supported. Support for this functionality is planned and can be tracked in [epic 9779](https://gitlab.com/groups/gitlab-org/-/epics/9779). If a replication lag occurs between the primary and secondary site, and the pipeline ref is not available on the secondary site when the job is executed, the job will fail.
- GitLab Runners cannot register with a **secondary** site. Support for this is [planned for the future](https://gitlab.com/gitlab-org/gitlab/-/issues/3294).
- [Selective synchronization](replication/configuration.md#selective-synchronization) only limits what repositories and files are replicated. The entire PostgreSQL data is still replicated. Selective synchronization is not built to accommodate compliance / export control use cases.
- [Pages access control](../../user/project/pages/pages_access_control.md) doesn't work on secondaries. See [GitLab issue #9336](https://gitlab.com/gitlab-org/gitlab/-/issues/9336) for details.

View File

@ -122,12 +122,7 @@ for details.
To use TLS certificates with Let's Encrypt, you can manually point the domain to one of the Geo sites, generate
the certificate, then copy it to all other sites.
- When secondary proxying is used together with separate URLs, registering [GitLab runners](https://docs.gitlab.com/runner/) to clone from
secondary sites is not supported. The runner registration succeeds, but the clone URL defaults to the primary site. The runner
[clone URL](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section) is configured per GitLab deployment
and cannot be configured per Geo site. Therefore, all runners clone from the primary site (or configured clone URL) irrespective of
which Geo site they register on. For information about GitLab CI using a specific Geo secondary to clone from, see issue
[3294](https://gitlab.com/gitlab-org/gitlab/-/issues/3294#note_1009488466).
- Using Geo secondary sites to accelerate runners is not officially supported. Support for this functionality is planned and can be tracked in [epic 9779](https://gitlab.com/groups/gitlab-org/-/epics/9779). If a replication lag occurs between the primary and secondary site, and the pipeline ref is not available on the secondary site when the job is executed, the job will fail.
- When secondary proxying is used together with separate URLs,
[signing in the secondary site using SAML](../replication/single_sign_on.md#saml-with-separate-url-with-proxying-enabled)

View File

@ -34,7 +34,8 @@ Example response:
## Generate code completions (Experiment)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415581) in GitLab 16.2 [with a flag](../administration/feature_flags.md) named `code_suggestions_completion_api`. Disabled by default. This feature is an Experiment.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415581) in GitLab 16.2 [with a flag](../administration/feature_flags.md) named `code_suggestions_completion_api`. Disabled by default. This feature is an Experiment.
> - Requirement to generate a JWT before calling this endpoint was [removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127863) in GitLab 16.3.
FLAG:
On self-managed GitLab, by default this feature is not available.
@ -49,10 +50,8 @@ POST /code_suggestions/completions
Requests to this endpoint are proxied directly to the [model gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist#completions). The documentation for the endpoint is currently the SSoT for named parameters.
Authentication to this endpoint requires both a GitLab access token and a Code Suggestions JWT. The access token is used to authenticate the user and the JWT is used to authenticate the request to the model gateway.
```shell
curl --header "Authorization: Bearer <YOUR_ACCESS_TOKEN>" --header "X-Gitlab-Oidc-Token: <TOKEN_GENERATED_FROM_TOKENS_ENDPOINT>" --data "<JSON_BODY>" https://gitlab.example.com/api/v4/code_suggestions/completions
curl --header "Authorization: Bearer <YOUR_ACCESS_TOKEN>" --data "<JSON_BODY>" https://gitlab.example.com/api/v4/code_suggestions/completions
```
Example body:

View File

@ -1271,8 +1271,7 @@ POST /projects/:id/merge_requests
| `milestone_id` | integer | **{dotted-circle}** No | The global ID of a milestone. |
| `remove_source_branch` | boolean | **{dotted-circle}** No | Flag indicating if a merge request should remove the source branch when merging. |
| `reviewer_ids` | integer array | **{dotted-circle}** No | The ID of the users added as a reviewer to the merge request. If set to `0` or left empty, no reviewers are added. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49341) in GitLab 13.8. |
| `squash` | boolean | no | Indicates if the merge request is set to be squashed when merged. [Project settings](../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project) may override this value. Use `squash_on_merge` instead to take project squash options into account. |
| `squash_on_merge` | boolean | no | Indicates if the merge request will be squashed when merged. |
| `squash` | boolean | **{dotted-circle}** No | Indicates if the merge request is set to be squashed when merged. [Project settings](../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project) may override this value. |
| `target_project_id` | integer | **{dotted-circle}** No | Numeric ID of the target project. |
```json
@ -1423,8 +1422,7 @@ PUT /projects/:id/merge_requests/:merge_request_iid
| `remove_labels` | string | **{dotted-circle}** No | Comma-separated label names to remove from a merge request. |
| `remove_source_branch` | boolean | **{dotted-circle}** No | Flag indicating if a merge request should remove the source branch when merging. |
| `reviewer_ids` | integer array | **{dotted-circle}** No | The ID of the users set as a reviewer to the merge request. Set the value to `0` or provide an empty value to unset all reviewers. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49341) in GitLab 13.8. |
| `squash` | boolean | no | Indicates if the merge request is set to be squashed when merged. [Project settings](../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project) may override this value. Use `squash_on_merge` instead to take project squash options into account. |
| `squash_on_merge` | boolean | no | Indicates if the merge request will be squashed when merged. |
| `squash` | boolean | **{dotted-circle}** No | Indicates if the merge request is set to be squashed when merged. [Project settings](../user/project/merge_requests/squash_and_merge.md#configure-squash-options-for-a-project) may override this value. |
| `state_event` | string | **{dotted-circle}** No | New state (close/reopen). |
| `target_branch` | string | **{dotted-circle}** No | The target branch. |
| `title` | string | **{dotted-circle}** No | Title of MR. |

View File

@ -92,8 +92,8 @@ Parameters:
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user |
| `title` | string | yes | The title of a milestone |
| `description` | string | no | The description of the milestone |
| `due_date` | string | no | The due date of the milestone (`YYYYMMDD`) |
| `start_date` | string | no | The start date of the milestone (`YYYYMMDD`) |
| `due_date` | string | no | The due date of the milestone (`YYYY-MM-DD`) |
| `start_date` | string | no | The start date of the milestone (`YYYY-MM-DD`) |
## Edit milestone
@ -111,8 +111,8 @@ Parameters:
| `milestone_id` | integer | yes | The ID of the project's milestone |
| `title` | string | no | The title of a milestone |
| `description` | string | no | The description of the milestone |
| `due_date` | string | no | The due date of the milestone (`YYYYMMDD`) |
| `start_date` | string | no | The start date of the milestone (`YYYYMMDD`) |
| `due_date` | string | no | The due date of the milestone (`YYYY-MM-DD`) |
| `start_date` | string | no | The start date of the milestone (`YYYY-MM-DD`) |
| `state_event` | string | no | The state event of the milestone (close or activate) |
## Delete project milestone

View File

@ -1516,6 +1516,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your-token>" \
| `emails_enabled` | boolean | **{dotted-circle}** No | Enable email notifications. |
| `external_authorization_classification_label` **(PREMIUM)** | string | **{dotted-circle}** No | The classification label for the project. |
| `forking_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `group_with_project_templates_id` **(PREMIUM)** | integer | **{dotted-circle}** No | For group-level custom templates, specifies ID of group from which all the custom project templates are sourced. Leave empty for instance-level templates. Requires `use_custom_template` to be true. |
| `import_url` | string | **{dotted-circle}** No | URL to import repository from. When the URL value isn't empty, you must not set `initialize_with_readme` to `true`. Doing so might result in the [following error](https://gitlab.com/gitlab-org/gitlab/-/issues/360266): `not a git repository`. |
| `initialize_with_readme` | boolean | **{dotted-circle}** No | Whether to create a Git repository with just a `README.md` file. Default is `false`. When this boolean is true, you must not pass `import_url` or other attributes of this endpoint which specify alternative contents for the repository. Doing so might result in the [following error](https://gitlab.com/gitlab-org/gitlab/-/issues/360266): `not a git repository`. |
@ -1550,7 +1551,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your-token>" \
| `resolve_outdated_diff_discussions` | boolean | **{dotted-circle}** No | Automatically resolve merge request diffs discussions on lines changed with a push. |
| `security_and_compliance_access_level` | string | **{dotted-circle}** No | (GitLab 14.9 and later) Security and compliance access level. One of `disabled`, `private`, or `enabled`. |
| `shared_runners_enabled` | boolean | **{dotted-circle}** No | Enable shared runners for this project. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `show_default_award_emojis` | boolean | **{dotted-circle}** No | Show default award emojis. |
| `snippets_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `snippets_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable snippets for this project. Use `snippets_access_level` instead. |
| `squash_option` | string | **{dotted-circle}** No | One of `never`, `always`, `default_on`, or `default_off`. |
@ -1603,6 +1604,7 @@ POST /projects/user/:user_id
| `enforce_auth_checks_on_uploads` | boolean | **{dotted-circle}** No | Enforce [auth checks](../security/user_file_uploads.md#enable-authorization-checks-for-all-media-files) on uploads. |
| `external_authorization_classification_label` **(PREMIUM)** | string | **{dotted-circle}** No | The classification label for the project. |
| `forking_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `group_with_project_templates_id` **(PREMIUM)** | integer | **{dotted-circle}** No | For group-level custom templates, specifies ID of group from which all the custom project templates are sourced. Leave empty for instance-level templates. Requires `use_custom_template` to be true. |
| `import_url` | string | **{dotted-circle}** No | URL to import repository from. |
| `initialize_with_readme` | boolean | **{dotted-circle}** No | `false` by default. |
@ -1637,7 +1639,7 @@ POST /projects/user/:user_id
| `resolve_outdated_diff_discussions` | boolean | **{dotted-circle}** No | Automatically resolve merge request diffs discussions on lines changed with a push. |
| `security_and_compliance_access_level` | string | **{dotted-circle}** No | (GitLab 14.9 and later) Security and compliance access level. One of `disabled`, `private`, or `enabled`. |
| `shared_runners_enabled` | boolean | **{dotted-circle}** No | Enable shared runners for this project. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `show_default_award_emojis` | boolean | **{dotted-circle}** No | Show default award emojis. |
| `snippets_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `snippets_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable snippets for this project. Use `snippets_access_level` instead. |
| `issue_branch_template` | string | **{dotted-circle}** No | Template used to suggest names for [branches created from issues](../user/project/merge_requests/creating_merge_requests.md#from-an-issue). _([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/21243) in GitLab 15.6.)_ |
@ -1709,6 +1711,7 @@ Supported attributes:
| `enforce_auth_checks_on_uploads` | boolean | **{dotted-circle}** No | Enforce [auth checks](../security/user_file_uploads.md#enable-authorization-checks-for-all-media-files) on uploads. |
| `external_authorization_classification_label` **(PREMIUM)** | string | **{dotted-circle}** No | The classification label for the project. |
| `forking_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `import_url` | string | **{dotted-circle}** No | URL the repository was imported from. |
| `issues_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `issues_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable issues for this project. Use `issues_access_level` instead. |
@ -1752,7 +1755,7 @@ Supported attributes:
| `security_and_compliance_access_level` | string | **{dotted-circle}** No | (GitLab 14.9 and later) Security and compliance access level. One of `disabled`, `private`, or `enabled`. |
| `service_desk_enabled` | boolean | **{dotted-circle}** No | Enable or disable Service Desk feature. |
| `shared_runners_enabled` | boolean | **{dotted-circle}** No | Enable shared runners for this project. |
| `group_runners_enabled` | boolean | **{dotted-circle}** No | Enable group runners for this project. |
| `show_default_award_emojis` | boolean | **{dotted-circle}** No | Show default award emojis. |
| `snippets_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `snippets_enabled` | boolean | **{dotted-circle}** No | _(Deprecated)_ Enable snippets for this project. Use `snippets_access_level` instead. |
| `issue_branch_template` | string | **{dotted-circle}** No | Template used to suggest names for [branches created from issues](../user/project/merge_requests/creating_merge_requests.md#from-an-issue). _([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/21243) in GitLab 15.6.)_ |

View File

@ -214,8 +214,8 @@ GET /projects/:id/repository/files/:file_path/raw
|-------------|----------------|----------|------------|
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |
| `file_path` | string | yes | URL-encoded full path to new file, such as `lib%2Fclass%2Erb`. |
| `ref` | string | yes | The name of branch, tag or commit. Default is the `HEAD` of the project. |
| `lfs` | boolean | no | Determines if the response should be Git LFS file contents, rather than the pointer. If the file is not tracked by Git LFS, ignored. Defaults to `false`. |
| `ref` | string | no | The name of branch, tag or commit. Default is the `HEAD` of the project. |
| `lfs` | boolean | no | Determines if the response should be Git LFS file contents, rather than the pointer. If the file is not tracked by Git LFS, ignored. Defaults to `false`. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/13083/repository/files/app%2Fmodels%2Fkey%2Erb/raw?ref=master"

View File

@ -880,10 +880,10 @@ Example response:
```json
{
"id": 1,
"user_id": 1
"view_diffs_file_by_file": true,
"show_whitespace_in_diffs": false,
"pass_user_identities_to_ci_jwt": false
"user_id": 1,
"view_diffs_file_by_file": true,
"show_whitespace_in_diffs": false,
"pass_user_identities_to_ci_jwt": false
}
```
@ -902,10 +902,10 @@ PUT /user/preferences
```json
{
"id": 1,
"user_id": 1
"view_diffs_file_by_file": true,
"show_whitespace_in_diffs": false,
"pass_user_identities_to_ci_jwt": false
"user_id": 1,
"view_diffs_file_by_file": true,
"show_whitespace_in_diffs": false,
"pass_user_identities_to_ci_jwt": false
}
```

View File

@ -109,6 +109,7 @@ A job with the `created` state isn't seen by the runner yet. To make it possible
1. The job required a manual start and it has been triggered.
1. All jobs from the previous stage have completed successfully. In this case we transition all jobs from the next stage to `pending`.
1. The job specifies DAG dependencies using `needs:` and all the dependent jobs are completed.
1. The job has not been [dropped](#dropping-stuck-builds) because of its not-runnable state by [`Ci::PipelineCreation::DropNotRunnableBuildsService`](https://gitlab.com/gitlab-org/gitlab/-/blob/v16.0.4-ee/ee/app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb).
When the runner is connected, it requests the next `pending` job to run by polling the server continuously.
@ -119,11 +120,6 @@ After the server receives the request it selects a `pending` job based on the [`
Once all jobs are completed for the current stage, the server "unlocks" all the jobs from the next stage by changing their state to `pending`. These can now be picked by the scheduling algorithm when the runner requests new jobs, and continues like this until all stages are completed.
If a job is not picked up by a runner in 24 hours it is automatically removed from
the processing queue after that time. If a pending job is stuck, when there is no
runner available that can process it, it is removed from the queue after 1 hour.
In both cases the job's status is changed to `failed` with an appropriate failure reason.
### Communication between runner and GitLab server
After the runner is [registered](https://docs.gitlab.com/runner/register/) using the registration token, the server knows what type of jobs it can execute. This depends on:
@ -163,6 +159,47 @@ At this point we loop through remaining `pending` jobs and we try to assign the
As we increase the number of runners in the pool we also increase the chances of conflicts which would arise if assigning the same job to different runners. To prevent that we gracefully rescue conflict errors and assign the next job in the list.
### Dropping stuck builds
There are two ways of marking builds as "stuck" and drop them.
1. When a build is created, [`Ci::PipelineCreation::DropNotRunnableBuildsService`](https://gitlab.com/gitlab-org/gitlab/-/blob/v16.0.4-ee/ee/app/services/ci/pipeline_creation/drop_not_runnable_builds_service.rb) checks for upfront known conditions that would make jobs not executable:
- If there is not enough [CI/CD Minutes](#compute-quota) to run the build, then the build is immediately dropped with `ci_quota_exceeded`.
- [In the future](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121761), if the project is not on the plan that available runners for the build require via `allowed_plans`, then the build is immediately dropped with `no_matching_runner`.
1. If there is no available Runner to pick up a build, it is dropped after 1 hour by [`Ci::StuckBuilds::DropPendingService`](https://gitlab.com/gitlab-org/gitlab/-/blob/v16.0.4-ee/app/services/ci/stuck_builds/drop_pending_service.rb).
- If a job is not picked up by a runner in 24 hours it is automatically removed from
the processing queue after that time.
- If a pending job is **stuck**, when there is no
runner available that can process it, it is removed from the queue after 1 hour.
- In both cases the job's status is changed to `failed` with an appropriate failure reason.
#### The reason behind this difference
CI Minutes quota mechanism is handled early when the job is created because it is a constant decision for most of the time.
Once a project exceeds the limit, every next job matching it will be applicable for it until next month starts.
Of course, the project owner can buy additional minutes, but that is a manual action that the project need to take.
The same mechanism will be used for `allowed_plans` [soon](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121761).
If the project is not on the required plan and a job is targeting such runner,
it will be failing constantly until the project owner changes the configuration or upgrades the namespace to the required plan.
These two mechanisms are also very SaaS specific and at the same time are quite compute expensive when we consider SaaS' scale.
Doing the check before the job is even transitioned to pending and failing early makes a lot of sense here.
Why we don't handle other cases for pending and drop jobs early?
In some cases, a job is in pending only because the runner is slow on taking up jobs.
This is not something that you can know at GitLab level.
Depending on the runner's configuration and capacity and the size of the queue in GitLab, a job may be taken immediately, or may need to wait.
There may be also other reasons:
- you are handling runner maintenance and it's not available for a while at all,
- you are updating configuration and by mistake, you've messed up the tagging and/or protected flag (or in the case of our SaaS instance runners; you've assigned a wrong cost factor or `allowed_plans` configuration).
All of that are problems that may be temporary and mostly are not expected to happen and are expected to be detected and fixed early.
We definitely don't want to drop jobs immediately when one of these conditions is happening.
Dropping a job only because a runner is at capacity or because there is a temporary unavailability/configuration mistake would be very harmful to users.
## The definition of "Job" in GitLab CI/CD
"Job" in GitLab CI context refers a task to drive Continuous Integration, Delivery and Deployment.

View File

@ -209,7 +209,7 @@ prudent to skip this step until you have verified that it runs smoothly in produ
rollout. In this case, go to the next step first, and then, after the verification period has passed, promote
the new Ruby to be the new default.
### Update CNG and Omnibus, merge the GitLab MR
### Update CNG, Omnibus, Self-compiled and merge the GitLab MR
The last step is to use the new Ruby in production. This
requires updating Omnibus and production Docker images to use the new version.
@ -220,6 +220,7 @@ To use the new Ruby in production, update the following projects:
- [Cloud-native GitLab Docker Images (CNG)](https://gitlab.com/gitlab-org/build/CNG) ([example](https://gitlab.com/gitlab-org/build/CNG/-/merge_requests/739))
- [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) ([example](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5545))
- [Self-compiled installations](../install/installation.md): update the [Ruby system version check](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/system_check/app/ruby_version_check.rb)
If you submit a change management request, coordinate the rollout with infrastructure
engineers. When dealing with larger upgrades, involve [Release Managers](https://about.gitlab.com/community/release-managers/)

View File

@ -326,3 +326,42 @@ end
For [idempotent jobs](idempotent_jobs.md) that declare either `:sticky` or `:delayed` data consistency, we are
[preserving the latest WAL location](idempotent_jobs.md#preserve-the-latest-wal-location-for-idempotent-jobs) while deduplicating,
ensuring that we read from the replica that is fully caught up.
## Job pause control
With the `pause_control` property, you can conditionally pause job processing. If the strategy is active, the job
is stored in a separate `ZSET` and re-enqueued when the strategy becomes inactive. `PauseControl::ResumeWorker` is a cron
worker that checks if any paused jobs must be restarted.
To use `pause_control`, you can:
- Use one of the strategies defined in `lib/gitlab/sidekiq_middleware/pause_control/strategies/`.
- Define a custom strategy in `lib/gitlab/sidekiq_middleware/pause_control/strategies/` and add the strategy to `lib/gitlab/sidekiq_middleware/pause_control/strategies.rb`.
For example:
```ruby
module Gitlab
module SidekiqMiddleware
module PauseControl
module Strategies
class CustomStrategy < Base
def enabled?
ApplicationSetting.current.elasticsearch_pause_indexing?
end
end
end
end
end
end
```
```ruby
class PausedWorker
include ApplicationWorker
pause_control :custom_strategy
# ...
end
```

View File

@ -136,18 +136,45 @@ We cannot guarantee that the large language model produces results that are corr
> Introduced in GitLab 16.0 as an [Experiment](../policy/experiment-beta-support.md#experiment).
This feature is an [Experiment](../policy/experiment-beta-support.md) on GitLab.com that is powered by OpenAI's GPT-3. It requires the [group-level third-party AI features setting](group/manage.md#enable-third-party-ai-features) to be enabled.
This feature is an [Experiment](../policy/experiment-beta-support.md) on GitLab.com. It requires the [group-level third-party AI features setting](group/manage.md#enable-third-party-ai-features) to be enabled.
Getting help has never been easier. If you have a question about how the GitLab product works, you can get AI generated support from GitLab Duo Chat.
GitLab Duo Chat is powered by Anthropic's Claude-2.0 and Claude-instant-1.1 large language models and OpenAI's text-embedding-ada-002 embeddings. The LLMs are employed to analyze user questions to collect appropriate context data from the user's project, and to generate responses. In some cases, embeddings are used to embed user questions and find relevant content in GitLab documentation to share with the LLMs to generate an answer.
You can get AI generated support from GitLab Duo Chat about the following topics:
- How to use GitLab.
- Questions about an issue.
- Summarizing an issue.
Example questions you might ask:
- `What is a fork?`
- `How to reset my password`
- `Summarize the issue <link to your issue>`
- `Summarize the description of the current issue`
The examples above all use data from either the issue or the GitLab documentation. However, you can also ask to generate code, CI/CD configurations, or to explain code. For example:
- `Write a hello world function in Ruby`
- `Write a tic tac toe game in JavaScript`
- `Write a .gitlab-ci.yml file to test and build a rails application`
- `Explain the following code: def sum(a, b) a + b end`
You can also ask follow-up questions.
This is an experimental feature and we're continuously extending the capabilities and reliability of the chat.
1. In the lower-left corner, select the Help icon.
1. Select **Ask in GitLab Duo Chat**. A drawer opens on the right side of your screen.
1. Enter your question in the chat input box and press **Enter** or select **Send**. It may take a few seconds for the interactive AI chat to search the product documentation and produce an answer.
1. Enter your question in the chat input box and press **Enter** or select **Send**. It may take a few seconds for the interactive AI chat to produce an answer.
1. You can ask a follow-up question.
1. If you want to ask a new question unrelated to the previous conversation, you may receive better answers if you clear the context by typing `/reset` into the input box an press **Send**
To give feedback, select the **Give Feedback** link.
To give feedback about a specific response, use the feedback buttons in the response message.
Or, you can add a comment in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/415591).
NOTE:
Only the last 50 messages in the chat history are retained. The chat history expires 3 days after last use.
Only the last 50 messages are retained in the chat history. The chat history expires 3 days after last use.
### Summarize merge request changes **(ULTIMATE SAAS)**

View File

@ -106,7 +106,69 @@ Users granted:
SAML group membership is evaluated each time a user signs in.
### Global SAML group memberships lock **(PREMIUM SELF)**
### Use the API
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/290367) in GitLab 15.3.
You can use the GitLab API to [list, add, and delete](../../../api/groups.md#saml-group-links) SAML group links.
## Microsoft Azure Active Directory integration
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/10507) in GitLab 16.3.
NOTE:
Microsoft has [announced](https://azure.microsoft.com/en-us/updates/azure-ad-is-becoming-microsoft-entra-id/) that Azure Active Directory (AD) is being renamed to Entra ID.
Azure AD sends up to 150 groups in the groups claim. When users are members of more than 150 groups Azure AD sends a
group overage claim attribute in the SAML response. Then group memberships must be obtained using the Microsoft Graph API.
To integrate Microsoft Azure AD, you:
- Configure Azure AD to enable GitLab to communicate with the Microsoft Graph API.
- Configure GitLab.
### GitLab settings to Azure AD fields
| GitLab setting | Azure field |
| ============== | ========================================== |
| Tenant ID | Directory (tenant) ID |
| Client ID | Application (client) ID |
| Client Secret | Value (on **Certificates & secrets** page) |
### Configure Azure AD
<!-- vale gitlab.SentenceSpacing = NO -->
1. In the [Azure Portal](https://portal.azure.com), go to **Azure Active Directory > App registrations > All applications**, and select your GitLab SAML application.
1. Under **Essentials**, the **Application (client) ID** and **Directory (tenant) ID** values are displayed. Copy these values, because you need them for the GitLab configuration.
1. In the left navigation, select **Certificates & secrets**.
1. On the **Client secrets** tab, select **New client secret**.
1. In the **Description** text box, add a description.
1. In the **Expires** dropdown list, set the expiration date for the credentials. If the secret expires, the GitLab integration will no longer work until the credentials are updated.
1. To generate the credentials, select **Add**.
1. Copy the **Value** of the credential. This value is displayed only once, and you need it for the GitLab configuration.
1. In the left navigation, select **API permissions**.
1. Select **Microsoft Graph > Application permissions**.
1. Select the checkboxes **GroupMember.Read.All** and **User.Read.All**.
1. Select **Add permissions** to save.
1. Select **Grant admin consent for <application name>**, then on the confirmation dialog select **Yes**. The **Status** column for both permissions should change to a green check with **Granted for <application name>**.
<!-- vale gitlab.SentenceSpacing = YES -->
### Configure GitLab
1. On the left sidebar, at the top, select **Search GitLab** (**{search}**) to find your top-level group.
1. Select **Settings > SAML SSO**.
1. In the Microsoft Azure integration section, select the **Enable Microsoft Azure integration for this group** checkbox.
1. Enter the **Tenant ID**, **Client ID**, and **Client secret** obtained earlier when configuring Azure Active Directory in the Azure Portal.
1. Optional. If using Azure AD for US Government or Azure AD China, enter the appropriate **Login API endpoint** and **Graph API endpoint**. The default values work for most organizations.
1. Select **Save changes**.
With this configuration, if a user signs in with SAML and Azure sends a group overage claim in the response,
GitLab initiates a Group Sync job to call the Microsoft Graph API and retrieve the user's group membership.
Then the GitLab Group membership is updated according to SAML Group Links.
## Global SAML group memberships lock **(PREMIUM SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/386390) in GitLab 15.10.
@ -131,7 +193,7 @@ To enable global group memberships lock:
1. Expand the **Visibility and access controls** section.
1. Ensure the **Lock memberships to SAML synchronization** checkbox is selected.
### Automatic member removal
## Automatic member removal
After a group sync, users who are not members of a mapped SAML group are removed from the group.
On GitLab.com, users in the top-level group are assigned the
@ -215,23 +277,17 @@ graph TB
GitLabGroupD --> |Member|GitLabUserD
```
#### User that belongs to many SAML groups automatically removed from GitLab group
### User that belongs to many SAML groups automatically removed from GitLab group
When using Azure AD as the SAML identity provider, users that belong to many SAML groups can be automatically removed from your GitLab group. Users are removed from GitLab
groups if the group claim is missing from the user's SAML assertion.
When using Azure AD with SAML, if any user in your organization is a member of more than 150 groups and you use SAML Group Sync,
that user may lose their group memberships.
For more information, see
[Microsoft Group overages](https://learn.microsoft.com/en-us/security/zero-trust/develop/configure-tokens-group-claims-app-roles#group-overages).
Because of a [known issue with Azure AD](https://support.esri.com/en/technical-article/000022190), if a user belongs to more than 150 SAML groups, the group claim is not sent
in the user's SAML assertion.
With an Azure AD premium subscription, you can allow up to 500 group IDs to be sent in a SAML token using the
[Azure AD documentation configuration steps](https://support.esri.com/en/technical-article/000022190).
GitLab has a [Microsoft Azure Active Directory integration](#microsoft-azure-active-directory-integration) that enables SAML Group Sync for organizations
with users in more than 150 groups. This integration uses the Microsoft Graph API to obtain all user memberships and is
not limited to 150 groups.
Otherwise, you can work around this issue by changing the [group claims](https://learn.microsoft.com/en-us/azure/active-directory/hybrid/connect/how-to-connect-fed-group-claims#configure-the-azure-ad-application-registration-for-group-attributes) to use the `Groups assigned to the application` option instead.
![Manage Group Claims](img/Azure-manage-group-claims.png).
### Use the API
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/290367) in GitLab 15.3.
You can use the GitLab API to [list, add, and delete](../../../api/groups.md#saml-group-links) SAML group links.

View File

@ -436,3 +436,9 @@ current_user = User.first
recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "push_to"); recipients.count
recipients.each { |notify| puts notify.user.username }
```
### Notifications about failed pipeline that doesn't exist
If you receive notifications (through email or Slack) regarding a failed pipeline that no longer
exists, double-check to see if you have any duplicate GitLab instances that could have triggered the
message.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.1 KiB

View File

@ -6,25 +6,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Import your Jira project issues to GitLab **(FREE)**
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2766) in GitLab 12.10.
Using GitLab Jira importer, you can import your Jira issues to GitLab.com or to
your self-managed GitLab instance.
Jira issues import is an MVC, project-level feature, meaning that issues from multiple
Jira projects can be imported into a GitLab project. MVC version imports issue title and description
as well as some other issue metadata as a section in the issue description.
and some other issue metadata as a section in the issue description.
## Known limitations
The information imported into GitLab fields from Jira depends on the version of GitLab:
GitLab imports the following information directly:
- From GitLab 12.10 to GitLab 13.1, only the issue's title and description are imported
directly.
- From GitLab 13.2:
- The issue's labels are also imported directly.
- You're also able to map Jira users to GitLab project members when preparing for the
import.
- The issue's title, description, and labels.
- You can also map Jira users to GitLab project members when preparing for the import.
Other Jira issue metadata that is not formally mapped to GitLab issue fields is
imported into the GitLab issue's description as plain text.
@ -44,8 +38,6 @@ iterations of the GitLab Jira importer.
## Import Jira issues to GitLab
> New import form [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/216145) in GitLab 13.2.
NOTE:
Importing Jira issues is done as an asynchronous background job, which
may result in delays based on import queues load, system load, or other factors.
@ -55,7 +47,7 @@ To import Jira issues to a GitLab project:
1. On the **{issues}** **Issues** page, select **Actions** (**{ellipsis_v}**) **> Import from Jira**.
![Import issues from Jira button](img/jira/import_issues_from_jira_button_v12_10.png)
![Import issues from Jira button](img/jira/import_issues_from_jira_button_v16_3.png)
The **Import from Jira** option is only visible if you have the [correct permissions](#prerequisites).

View File

@ -93,6 +93,7 @@ you can create your project first and access it under `http(s)://namespace.examp
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/9347) in GitLab 15.9 [with a flag](../../../administration/feature_flags.md) named `pages_unique_domain`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/388151) in GitLab 15.11.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/122229) in GitLab 16.3.
By default, every project in a group shares the same domain, for example, `group.gitlab.io`. This means that cookies are also shared for all projects in a group.

View File

@ -105,6 +105,18 @@ module Gitlab
:notifications,
:current_user_todos,
:award_emoji
],
epic: [
:assignees,
:description,
:hierarchy,
:labels,
:notes,
:start_and_due_date,
:health_status,
:status,
:notifications,
:award_emoji
]
}.freeze

View File

@ -10,12 +10,15 @@ module Gitlab
issue = find_or_create_type(::WorkItems::Type::TYPE_NAMES[:issue])
task = find_or_create_type(::WorkItems::Type::TYPE_NAMES[:task])
incident = find_or_create_type(::WorkItems::Type::TYPE_NAMES[:incident])
epic = find_or_create_type(::WorkItems::Type::TYPE_NAMES[:epic])
restrictions = [
{ parent_type_id: objective.id, child_type_id: objective.id, maximum_depth: 9 },
{ parent_type_id: objective.id, child_type_id: key_result.id, maximum_depth: 1 },
{ parent_type_id: issue.id, child_type_id: task.id, maximum_depth: 1 },
{ parent_type_id: incident.id, child_type_id: task.id, maximum_depth: 1 }
{ parent_type_id: incident.id, child_type_id: task.id, maximum_depth: 1 },
{ parent_type_id: epic.id, child_type_id: epic.id, maximum_depth: 9 },
{ parent_type_id: epic.id, child_type_id: issue.id, maximum_depth: 1 }
]
::WorkItems::HierarchyRestriction.upsert_all(

View File

@ -82,8 +82,7 @@ module Gitlab
end
def unique_domain_enabled?
Feature.enabled?(:pages_unique_domain, project) &&
project.project_setting.pages_unique_domain_enabled?
project.project_setting.pages_unique_domain_enabled?
end
def config

View File

@ -28,7 +28,6 @@ module Gitlab
def by_unique_domain(name)
project = Project.by_pages_enabled_unique_domain(name)
return unless Feature.enabled?(:pages_unique_domain, project)
return unless project&.pages_deployed?
::Pages::VirtualDomain.new(projects: [project])

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module SidekiqLogging
class PauseControlLogger
include Singleton
include LogsJobs
def paused_log(job, strategy:)
payload = parse_job(job)
payload['job_status'] = 'paused'
payload['message'] = "#{base_message(payload)}: paused: #{strategy}"
payload['pause_control.strategy'] = strategy
Sidekiq.logger.info payload
end
def resumed_log(worker_name, args)
job = {
'class' => worker_name,
'args' => args
}
payload = parse_job(job)
payload['job_status'] = 'resumed'
payload['message'] = "#{base_message(payload)}: resumed"
Sidekiq.logger.info payload
end
end
end
end

View File

@ -36,6 +36,7 @@ module Gitlab
chain.add ::Gitlab::SidekiqVersioning::Middleware
chain.add ::Gitlab::SidekiqStatus::ServerMiddleware
chain.add ::Gitlab::SidekiqMiddleware::WorkerContext::Server
chain.add ::Gitlab::SidekiqMiddleware::PauseControl::Server
# DuplicateJobs::Server should be placed at the bottom, but before the SidekiqServerMiddleware,
# so we can compare the latest WAL location against replica
chain.add ::Gitlab::SidekiqMiddleware::DuplicateJobs::Server
@ -54,6 +55,7 @@ module Gitlab
# Sidekiq Client Middleware should be placed before DuplicateJobs::Client middleware,
# so we can store WAL location before we deduplicate the job.
chain.add ::Gitlab::Database::LoadBalancing::SidekiqClientMiddleware
chain.add ::Gitlab::SidekiqMiddleware::PauseControl::Client
chain.add ::Gitlab::SidekiqMiddleware::DuplicateJobs::Client
chain.add ::Gitlab::SidekiqStatus::ClientMiddleware
chain.add ::Gitlab::SidekiqMiddleware::AdminMode::Client

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
DEFAULT_STRATEGY = :none
UnknownStrategyError = Class.new(StandardError)
STRATEGIES = {
zoekt: ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt,
none: ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None
}.freeze
def self.for(name)
STRATEGIES.fetch(name, STRATEGIES[DEFAULT_STRATEGY])
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
class Client
def call(worker_class, job, _queue, _redis_pool, &block)
::Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(worker_class, job).schedule(&block)
end
end
end
end
end

View File

@ -0,0 +1,118 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
class PauseControlService
# Class for managing queues for paused workers
# When a worker is paused all jobs are saved in a separate sorted sets in redis
LIMIT = 1000
PROJECT_CONTEXT_KEY = "#{Gitlab::ApplicationContext::LOG_KEY}.project".freeze
def initialize(worker_name)
@worker_name = worker_name
worker_name = @worker_name.underscore
@redis_set_key = "sidekiq:pause_control:paused_jobs:zset:{#{worker_name}}"
@redis_score_key = "sidekiq:pause_control:paused_jobs:score:{#{worker_name}}"
end
class << self
def add_to_waiting_queue!(worker_name, args, context)
new(worker_name).add_to_waiting_queue!(args, context)
end
def has_jobs_in_waiting_queue?(worker_name)
new(worker_name).has_jobs_in_waiting_queue?
end
def resume_processing!(worker_name)
new(worker_name).resume_processing!
end
def queue_size(worker_name)
new(worker_name).queue_size
end
end
def add_to_waiting_queue!(args, context)
with_redis do |redis|
redis.zadd(redis_set_key, generate_unique_score(redis), serialize(args, context))
end
end
def queue_size
with_redis { |redis| redis.zcard(redis_set_key) }
end
def has_jobs_in_waiting_queue?
with_redis { |redis| redis.exists?(redis_set_key) } # rubocop:disable CodeReuse/ActiveRecord
end
def resume_processing!(iterations: 1)
with_redis do |redis|
iterations.times do
jobs_with_scores = next_batch_from_waiting_queue(redis)
break if jobs_with_scores.empty?
parsed_jobs = jobs_with_scores.map { |j, _| deserialize(j) }
parsed_jobs.each { |j| send_to_processing_queue(j) }
remove_jobs_from_waiting_queue(redis, jobs_with_scores)
end
size = queue_size
redis.del(redis_score_key, redis_set_key) if size == 0
size
end
end
private
attr_reader :worker_name, :redis_set_key, :redis_score_key
def with_redis(&blk)
Gitlab::Redis::SharedState.with(&blk) # rubocop:disable CodeReuse/ActiveRecord
end
def serialize(args, context)
{
args: args,
# Only include part of the context that would not prevent deduplication
context: context.slice(PROJECT_CONTEXT_KEY)
}.to_json
end
def deserialize(json)
Gitlab::Json.parse(json)
end
def send_to_processing_queue(job)
Gitlab::ApplicationContext.with_raw_context(job['context']) do
args = job['args']
Gitlab::SidekiqLogging::PauseControlLogger.instance.resumed_log(worker_name, args)
worker_name.safe_constantize&.perform_async(*args)
end
end
def generate_unique_score(redis)
redis.incr(redis_score_key)
end
def next_batch_from_waiting_queue(redis)
redis.zrangebyscore(redis_set_key, '-inf', '+inf', limit: [0, LIMIT], with_scores: true)
end
def remove_jobs_from_waiting_queue(redis, jobs_with_scores)
first_score = jobs_with_scores.first.last
last_score = jobs_with_scores.last.last
redis.zremrangebyscore(redis_set_key, first_score, last_score)
end
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
class Server
def call(worker_class, job, _queue, &block)
::Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(worker_class, job).perform(&block)
end
end
end
end
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
module Strategies
class Base
extend ::Gitlab::Utils::Override
def self.should_pause?
new.should_pause?
end
def schedule(job)
if should_pause?
pause_job!(job)
return
end
yield
end
def perform(job)
if should_pause?
pause_job!(job)
return
end
yield
end
def should_pause?
# All children must implement this method
# return false when the jobs shouldn't be paused and true when it should
# A cron job PauseControl::ResumeWorker will execute this method to check if jobs should remain paused
raise NotImplementedError
end
private
def pause_job!(job)
Gitlab::SidekiqLogging::PauseControlLogger.instance.paused_log(job, strategy: strategy_name)
Gitlab::SidekiqMiddleware::PauseControl::PauseControlService.add_to_waiting_queue!(
job['class'],
job['args'],
current_context
)
end
def strategy_name
Gitlab::SidekiqMiddleware::PauseControl::STRATEGIES.key(self.class)
end
def current_context
Gitlab::ApplicationContext.current
end
end
end
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
module Strategies
# This strategy will never pause a job
class None < Base
override :should_pause?
def should_pause?
false
end
end
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
module Strategies
class Zoekt < Base
override :should_pause?
def should_pause?
::Feature.enabled?(:zoekt_pause_indexing, type: :ops)
end
end
end
end
end
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
class StrategyHandler
def initialize(worker_class, job)
@worker_class = worker_class
@job = job
end
# This will continue the middleware chain if the job should be scheduled
# It will return false if the job needs to be cancelled
def schedule(&block)
PauseControl.for(strategy).new.schedule(job, &block)
end
# This will continue the server middleware chain if the job should be
# executed.
# It will return false if the job should not be executed.
def perform(&block)
PauseControl.for(strategy).new.perform(job, &block)
end
private
attr_reader :job, :worker_class
def strategy
Gitlab::SidekiqMiddleware::PauseControl::WorkersMap.strategy_for(worker: worker_class)
end
end
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module PauseControl
class WorkersMap
class << self
attr_reader :workers
def set_strategy_for(strategy:, worker:)
raise ArgumentError, "Unknown strategy: #{strategy}" unless PauseControl::STRATEGIES.key?(strategy)
@workers ||= Hash.new { |h, k| h[k] = [] }
@workers[strategy].push(worker)
end
def strategy_for(worker:)
return unless @workers
@workers.find { |_, v| v.include?(worker) }&.first
end
end
end
end
end
end

View File

@ -7,7 +7,7 @@ module SystemCheck
set_check_pass -> { "yes (#{self.current_version})" }
def self.required_version
@required_version ||= Gitlab::VersionInfo.new(2, 7, 2)
@required_version ||= Gitlab::VersionInfo.new(3, 0, 6)
end
def self.current_version

View File

@ -2887,6 +2887,9 @@ msgstr ""
msgid "Add list"
msgstr ""
msgid "Add new"
msgstr ""
msgid "Add new application"
msgstr ""
@ -12083,6 +12086,9 @@ msgstr ""
msgid "ComplianceReport|No projects found that match filters"
msgstr ""
msgid "ComplianceReport|No standards adherences found"
msgstr ""
msgid "ComplianceReport|No violations found"
msgstr ""
@ -12113,6 +12119,27 @@ msgstr ""
msgid "ComplianceReport|Update result"
msgstr ""
msgid "ComplianceStandardsAdherence|Have a valid rule that prevents author approved merge requests"
msgstr ""
msgid "ComplianceStandardsAdherence|Have a valid rule that prevents merge requests approved by committers"
msgstr ""
msgid "ComplianceStandardsAdherence|Have a valid rule that requires any merge request to have more than two approvals"
msgstr ""
msgid "ComplianceStandardsAdherence|Prevent authors as approvers"
msgstr ""
msgid "ComplianceStandardsAdherence|Prevent committers as approvers"
msgstr ""
msgid "ComplianceStandardsAdherence|Two approvals"
msgstr ""
msgid "ComplianceStandardsAdherence|View details"
msgstr ""
msgid "Component"
msgstr ""
@ -19142,6 +19169,9 @@ msgstr ""
msgid "Facebook"
msgstr ""
msgid "Fail"
msgstr ""
msgid "Failed"
msgstr ""
@ -30173,7 +30203,7 @@ msgstr ""
msgid "My awesome group"
msgstr ""
msgid "My comment templates (%{count})"
msgid "My comment templates"
msgstr ""
msgid "My company or team"
@ -45299,6 +45329,9 @@ msgstr ""
msgid "Succeeded"
msgstr ""
msgid "Success"
msgstr ""
msgid "Successfully activated"
msgstr ""
@ -53664,6 +53697,9 @@ msgstr ""
msgid "You have no permissions"
msgstr ""
msgid "You have no saved replies yet."
msgstr ""
msgid "You have not added any approvers. Start by adding users or groups."
msgstr ""

View File

@ -5,7 +5,7 @@
"block-dependencies": "node scripts/frontend/block_dependencies.js",
"check:startup_css": "scripts/frontend/startup_css/startup_css_changed.sh",
"clean": "rm -rf public/assets tmp/cache/*-loader",
"dev-server": "NODE_OPTIONS=\"--max-old-space-size=4096\" node scripts/frontend/webpack_dev_server.js",
"dev-server": "NODE_OPTIONS=\"--max-old-space-size=5120\" node scripts/frontend/webpack_dev_server.js",
"file-coverage": "scripts/frontend/file_test_coverage.js",
"lint-docs": "scripts/lint-doc.sh",
"internal:eslint": "eslint --cache --max-warnings 0 --report-unused-disable-directives --ext .js,.vue,.graphql",
@ -43,9 +43,9 @@
"storybook:build": "yarn --cwd ./storybook build --quiet",
"storybook:start": "./scripts/frontend/start_storybook.sh",
"swagger:validate": "swagger-cli validate",
"webpack": "NODE_OPTIONS=\"--max-old-space-size=4096\" webpack --config config/webpack.config.js",
"webpack-vendor": "NODE_OPTIONS=\"--max-old-space-size=4096\" webpack --config config/webpack.vendor.config.js",
"webpack-prod": "NODE_OPTIONS=\"--max-old-space-size=4096\" NODE_ENV=production webpack --config config/webpack.config.js"
"webpack": "NODE_OPTIONS=\"--max-old-space-size=5120\" webpack --config config/webpack.config.js",
"webpack-vendor": "NODE_OPTIONS=\"--max-old-space-size=5120\" webpack --config config/webpack.vendor.config.js",
"webpack-prod": "NODE_OPTIONS=\"--max-old-space-size=5120\" NODE_ENV=production webpack --config config/webpack.config.js"
},
"dependencies": {
"@apollo/client": "^3.5.10",

View File

@ -1102,6 +1102,14 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
end
end
context 'when trying to create an epic' do
it 'defaults to issue type' do
issue = post_new_issue(issue_type: 'epic')
expect(issue.work_item_type.base_type).to eq('issue')
end
end
context 'when create service return an unrecoverable error with http_status' do
let(:http_status) { 403 }

View File

@ -182,44 +182,29 @@ RSpec.describe Projects::PagesController, feature_category: :pages do
create(:project_setting, project: project, pages_unique_domain_enabled: false)
end
context 'with pages_unique_domain feature flag disabled' do
it 'does not update pages unique domain' do
stub_feature_flags(pages_unique_domain: false)
it 'updates pages_https_only and pages_unique_domain and redirects back to pages settings' do
expect { patch :update, params: request_params }
.to change { project.project_setting.reload.pages_unique_domain_enabled }
.from(false).to(true)
expect(project.project_setting.pages_unique_domain).not_to be_nil
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_pages_path(project))
end
context 'when it fails to update' do
it 'adds an error message' do
expect_next_instance_of(Projects::UpdateService) do |service|
expect(service)
.to receive(:execute)
.and_return(status: :error, message: 'some error happened')
end
expect { patch :update, params: request_params }
.not_to change { project.project_setting.reload.pages_unique_domain_enabled }
end
end
context 'with pages_unique_domain feature flag enabled' do
before do
stub_feature_flags(pages_unique_domain: true)
end
it 'updates pages_https_only and pages_unique_domain and redirects back to pages settings' do
expect { patch :update, params: request_params }
.to change { project.project_setting.reload.pages_unique_domain_enabled }
.from(false).to(true)
expect(project.project_setting.pages_unique_domain).not_to be_nil
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_pages_path(project))
end
context 'when it fails to update' do
it 'adds an error message' do
expect_next_instance_of(Projects::UpdateService) do |service|
expect(service)
.to receive(:execute)
.and_return(status: :error, message: 'some error happened')
end
expect { patch :update, params: request_params }
.not_to change { project.project_setting.reload.pages_unique_domain_enabled }
expect(response).to redirect_to(project_pages_path(project))
expect(flash[:alert]).to eq('some error happened')
end
expect(flash[:alert]).to eq('some error happened')
end
end
end

View File

@ -93,6 +93,10 @@ FactoryBot.define do
association :work_item_type, :default, :test_case
end
trait :epic do
association :work_item_type, :default, :epic
end
factory :incident do
association :work_item_type, :default, :incident

View File

@ -58,6 +58,10 @@ FactoryBot.define do
association :work_item_type, :default, :key_result
end
trait :epic do
association :work_item_type, :default, :epic
end
before(:create, :build) do |work_item, evaluator|
if evaluator.namespace.present?
work_item.project = nil

View File

@ -14,7 +14,7 @@ RSpec.describe 'Profile > Comment templates > List users comment templates', :js
it 'shows the user a list of their comment templates' do
visit profile_comment_templates_path
expect(page).to have_content('My comment templates (1)')
expect(page).to have_content('My comment templates')
expect(page).to have_content(saved_reply.name)
expect(page).to have_content(saved_reply.content)
end

View File

@ -15,6 +15,7 @@ RSpec.describe 'Profile > Comment templates > User creates comment template', :j
end
it 'shows the user a list of their saved replies' do
click_button 'Add new'
find('[data-testid="comment-template-name-input"]').set('test')
find('[data-testid="comment-template-content-input"]').set('Test content')
@ -22,7 +23,7 @@ RSpec.describe 'Profile > Comment templates > User creates comment template', :j
wait_for_requests
expect(page).to have_content('My comment templates (1)')
expect(page).to have_content('My comment templates')
expect(page).to have_content('test')
expect(page).to have_content('Test content')
end

View File

@ -2,10 +2,10 @@
exports[`Comment templates list item component renders list item 1`] = `
<li
class="gl-pt-4 gl-pb-5 gl-border-b"
class="gl-px-5! gl-py-4!"
>
<div
class="gl-display-flex gl-align-items-center"
class="gl-display-flex"
>
<h6
class="gl-mr-3 gl-my-0"
@ -130,9 +130,11 @@ exports[`Comment templates list item component renders list item 1`] = `
</div>
<div
class="gl-mt-3 gl-font-monospace gl-white-space-pre-wrap"
class="gl-font-monospace gl-white-space-pre-line gl-font-sm gl-mt-n5"
>
/assign_reviewer
</div>
<!---->

View File

@ -25,12 +25,6 @@ describe('Comment templates list component', () => {
expect(wrapper.findAllComponents(ListItem).length).toBe(0);
});
it('render comment templates count', () => {
wrapper = createComponent(savedRepliesResponse);
expect(wrapper.find('[data-testid="title"]').text()).toEqual('My comment templates (2)');
});
it('renders list of comment templates', () => {
const savedReplies = savedRepliesResponse.data.currentUser.savedReplies.nodes;
wrapper = createComponent(savedRepliesResponse);

View File

@ -42,4 +42,13 @@ describe('Comment templates index page component', () => {
expect.objectContaining(savedReplies[1]),
);
});
it('render comment templates count', async () => {
const mockApollo = createMockApolloProvider(savedRepliesResponse);
wrapper = createComponent({ mockApollo });
await waitForPromises();
expect(wrapper.find('[data-testid="title"]').text()).toContain('2');
});
});

View File

@ -64,12 +64,13 @@ describe('GlobalSearchModal', () => {
scopedSearchOptions: () => MOCK_SCOPED_SEARCH_OPTIONS,
};
const createComponent = (
const createComponent = ({
initialState = deafaultMockState,
mockGetters = defaultMockGetters,
stubs,
glFeatures = { commandPalette: false },
) => {
...mountOptions
} = {}) => {
const store = new Vuex.Store({
state: {
...deafaultMockState,
@ -88,6 +89,7 @@ describe('GlobalSearchModal', () => {
store,
stubs,
provide: { glFeatures },
...mountOptions,
});
};
@ -148,7 +150,7 @@ describe('GlobalSearchModal', () => {
describe(`when search is ${search}`, () => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
createComponent({ search }, {});
createComponent({ initialState: { search }, mockGetters: {} });
findGlobalSearchInput().vm.$emit('click');
});
@ -180,15 +182,15 @@ describe('GlobalSearchModal', () => {
describe(`search is "${search}" and loading is ${loading}`, () => {
beforeEach(() => {
window.gon.current_username = username;
createComponent(
{
createComponent({
initialState: {
search,
loading,
},
{
mockGetters: {
searchOptions: () => searchOptions,
},
);
});
});
it(`sets description to ${expectedDesc}`, () => {
@ -208,7 +210,7 @@ describe('GlobalSearchModal', () => {
`('token', ({ search, hasToken }) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
createComponent({ search });
createComponent({ initialState: { search } });
findGlobalSearchInput().vm.$emit('click');
});
@ -220,12 +222,12 @@ describe('GlobalSearchModal', () => {
describe.each(MOCK_SCOPED_SEARCH_OPTIONS)('token content', (searchOption) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
createComponent(
{ search: MOCK_SEARCH },
{
createComponent({
initialState: { search: MOCK_SEARCH },
mockGetters: {
searchOptions: () => [searchOption],
},
);
});
findGlobalSearchInput().vm.$emit('click');
});
@ -247,12 +249,12 @@ describe('GlobalSearchModal', () => {
`('token', ({ searchOptions, iconName }) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
createComponent(
{ search: MOCK_SEARCH },
{
createComponent({
initialState: { search: MOCK_SEARCH },
mockGetters: {
searchOptions: () => searchOptions,
},
);
});
findGlobalSearchInput().vm.$emit('click');
});
@ -287,8 +289,11 @@ describe('GlobalSearchModal', () => {
'when FF `command_palette` is enabled and search handle is %s',
(handle) => {
beforeEach(() => {
createComponent({ search: handle }, undefined, undefined, {
commandPalette: true,
createComponent({
initialState: { search: handle },
glFeatures: {
commandPalette: true,
},
});
});
@ -358,12 +363,18 @@ describe('GlobalSearchModal', () => {
describe('Submitting a search', () => {
const submitSearch = () =>
findGlobalSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY }));
findGlobalSearchInput().vm.$emit(
'keydown',
new KeyboardEvent('keydown', { key: ENTER_KEY }),
);
describe('in command mode', () => {
beforeEach(() => {
createComponent({ search: '>' }, undefined, undefined, {
commandPalette: true,
createComponent({
initialState: { search: '>' },
glFeatures: {
commandPalette: true,
},
});
submitSearch();
});
@ -375,7 +386,7 @@ describe('GlobalSearchModal', () => {
describe('in search mode', () => {
it('will NOT submit a search with less than min characters', () => {
createComponent({ search: 'x' });
createComponent({ initialState: { search: 'x' } });
submitSearch();
expect(visitUrl).not.toHaveBeenCalledWith(MOCK_SEARCH_QUERY);
});
@ -391,7 +402,7 @@ describe('GlobalSearchModal', () => {
describe('Modal events', () => {
beforeEach(() => {
createComponent({ search: 'searchQuery' });
createComponent({ initialState: { search: 'searchQuery' } });
});
it('should emit `shown` event when modal shown`', () => {
@ -406,4 +417,101 @@ describe('GlobalSearchModal', () => {
});
});
});
describe('Navigating results', () => {
const findSearchInput = () => wrapper.findByRole('searchbox');
const triggerKeydownEvent = (target, code) => {
const event = new KeyboardEvent('keydown', { bubbles: true, cancelable: true, code });
target.dispatchEvent(event);
return event;
};
beforeEach(() => {
createComponent({
stubs: {
GlSearchBoxByType: {
inheritAttrs: false,
template: '<div><input v-bind="$attrs" v-on="$listeners"></div>',
},
GlobalSearchDefaultItems: {
template: `
<ul>
<li
v-for="n in 5"
class="gl-new-dropdown-item"
tabindex="0"
:data-testid="'test-result-' + n"
>Result {{ n }}</li>
</ul>`,
},
},
attachTo: document.body,
});
});
describe('when the search input has focus', () => {
beforeEach(() => {
findSearchInput().element.focus();
});
it('Home key keeps focus in input', () => {
const event = triggerKeydownEvent(findSearchInput().element, 'Home');
expect(document.activeElement).toBe(findSearchInput().element);
expect(event.defaultPrevented).toBe(false);
});
it('End key keeps focus on input', () => {
const event = triggerKeydownEvent(findSearchInput().element, 'End');
findSearchInput().trigger('keydown', { code: 'End' });
expect(document.activeElement).toBe(findSearchInput().element);
expect(event.defaultPrevented).toBe(false);
});
it('ArrowUp keeps focus on input', () => {
const event = triggerKeydownEvent(findSearchInput().element, 'ArrowUp');
expect(document.activeElement).toBe(findSearchInput().element);
expect(event.defaultPrevented).toBe(false);
});
it('ArrowDown focuses the first item', () => {
const event = triggerKeydownEvent(findSearchInput().element, 'ArrowDown');
expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
expect(event.defaultPrevented).toBe(true);
});
});
describe('when search result item has focus', () => {
beforeEach(() => {
wrapper.findByTestId('test-result-2').element.focus();
});
it('Home key focuses first item', () => {
const event = triggerKeydownEvent(document.activeElement, 'Home');
expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
expect(event.defaultPrevented).toBe(true);
});
it('End key focuses last item', () => {
const event = triggerKeydownEvent(document.activeElement, 'End');
expect(document.activeElement).toBe(wrapper.findByTestId('test-result-5').element);
expect(event.defaultPrevented).toBe(true);
});
it('ArrowUp focuses previous item if any, else input', () => {
let event = triggerKeydownEvent(document.activeElement, 'ArrowUp');
expect(document.activeElement).toBe(wrapper.findByTestId('test-result-1').element);
expect(event.defaultPrevented).toBe(true);
event = triggerKeydownEvent(document.activeElement, 'ArrowUp');
expect(document.activeElement).toBe(findSearchInput().element);
expect(event.defaultPrevented).toBe(true);
});
it('ArrowDown focuses next item', () => {
const event = triggerKeydownEvent(document.activeElement, 'ArrowDown');
expect(document.activeElement).toBe(wrapper.findByTestId('test-result-3').element);
expect(event.defaultPrevented).toBe(true);
});
});
});
});

View File

@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Types::IssueTypeEnum, feature_category: :team_planning do
specify { expect(described_class.graphql_name).to eq('IssueType') }
it 'exposes all the existing issue type values except key_result' do
it 'exposes all the existing issue type values except epic' do
expect(described_class.values.keys).to match_array(
%w[ISSUE INCIDENT TEST_CASE REQUIREMENT TASK OBJECTIVE KEY_RESULT]
)

View File

@ -285,7 +285,7 @@ RSpec.describe IntegrationsHelper, feature_category: :integrations do
end
it "only consider these enumeration values are valid" do
expected_valid_types = %w[issue incident test_case requirement task objective key_result]
expected_valid_types = %w[issue incident test_case requirement task objective key_result epic]
expect(WorkItems::Type.base_types.keys).to contain_exactly(*expected_valid_types)
end
end

View File

@ -83,60 +83,32 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
context 'when not using pages_unique_domain' do
subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
context 'when pages_unique_domain feature flag is disabled' do
before do
stub_feature_flags(pages_unique_domain: false)
end
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
context 'when pages_unique_domain feature flag is enabled' do
before do
stub_feature_flags(pages_unique_domain: true)
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
it { is_expected.to eq('http://group.example.com/project') }
end
it { is_expected.to eq('http://group.example.com/project') }
end
end
context 'when using pages_unique_domain' do
subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
context 'when pages_unique_domain feature flag is disabled' do
before do
stub_feature_flags(pages_unique_domain: false)
end
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
context 'when pages_unique_domain feature flag is enabled' do
before do
stub_feature_flags(pages_unique_domain: true)
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to eq('http://group.example.com/project') }
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
it { is_expected.to eq('http://unique-domain.example.com') }
end
it { is_expected.to eq('http://unique-domain.example.com') }
end
end
end
@ -144,30 +116,16 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
describe '#unique_host' do
subject(:unique_host) { builder.unique_host }
context 'when pages_unique_domain feature flag is disabled' do
before do
stub_feature_flags(pages_unique_domain: false)
end
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to be_nil }
end
context 'when pages_unique_domain feature flag is enabled' do
before do
stub_feature_flags(pages_unique_domain: true)
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
context 'when pages_unique_domain_enabled is false' do
let(:unique_domain_enabled) { false }
it { is_expected.to be_nil }
end
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
it { is_expected.to eq('unique-domain.example.com') }
end
it { is_expected.to eq('unique-domain.example.com') }
end
end

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Client, :clean_gitlab_redis_queues, feature_category: :global_search do
let(:worker_class) do
Class.new do
def self.name
'TestPauseWorker'
end
include ApplicationWorker
pause_control :zoekt
def perform(*); end
end
end
before do
stub_const('TestPauseWorker', worker_class)
end
describe '#call' do
context 'when strategy is enabled' do
before do
stub_feature_flags(zoekt_pause_indexing: true)
end
it 'does not schedule the job' do
expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
TestPauseWorker.perform_async('args1')
expect(TestPauseWorker.jobs.count).to eq(0)
end
end
context 'when strategy is disabled' do
before do
stub_feature_flags(zoekt_pause_indexing: false)
end
it 'schedules the job' do
expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
TestPauseWorker.perform_async('args1')
expect(TestPauseWorker.jobs.count).to eq(1)
end
end
end
end

View File

@ -0,0 +1,178 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::PauseControlService, :clean_gitlab_redis_shared_state, feature_category: :global_search do
let(:worker_class) do
Class.new do
def self.name
'DummyWorker'
end
include ApplicationWorker
end
end
let(:worker_class_name) { worker_class.name }
let(:worker_context) do
{ 'correlation_id' => 'context_correlation_id',
'meta.project' => 'gitlab-org/gitlab' }
end
let(:stored_context) do
{ "#{Gitlab::ApplicationContext::LOG_KEY}.project" => 'gitlab-org/gitlab' }
end
let(:worker_args) { [1, 2] }
subject { described_class.new(worker_class_name) }
before do
stub_const(worker_class_name, worker_class)
end
describe '.add_to_waiting_queue!' do
it 'calls an instance method' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:add_to_waiting_queue!).with(worker_args, worker_context)
end
described_class.add_to_waiting_queue!(worker_class_name, worker_args, worker_context)
end
end
describe '.has_jobs_in_waiting_queue?' do
it 'calls an instance method' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:has_jobs_in_waiting_queue?)
end
described_class.has_jobs_in_waiting_queue?(worker_class_name)
end
end
describe '.resume_processing!' do
it 'calls an instance method' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:resume_processing!)
end
described_class.resume_processing!(worker_class_name)
end
end
describe '.queue_size' do
it 'reports the queue size' do
expect(described_class.queue_size(worker_class_name)).to eq(0)
subject.add_to_waiting_queue!(worker_args, worker_context)
expect(described_class.queue_size(worker_class_name)).to eq(1)
expect { subject.resume_processing! }.to change { described_class.queue_size(worker_class_name) }.by(-1)
end
end
describe '#add_to_waiting_queue!' do
it 'adds a job to the set' do
expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
.to change { subject.queue_size }
.from(0).to(1)
end
it 'adds only one unique job to the set' do
expect do
2.times { subject.add_to_waiting_queue!(worker_args, worker_context) }
end.to change { subject.queue_size }.from(0).to(1)
end
it 'only stores `project` context information' do
subject.add_to_waiting_queue!(worker_args, worker_context)
subject.send(:with_redis) do |r|
set_key = subject.send(:redis_set_key)
stored_job = subject.send(:deserialize, r.zrange(set_key, 0, -1).first)
expect(stored_job['context']).to eq(stored_context)
end
end
end
describe '#has_jobs_in_waiting_queue?' do
it 'checks set existence' do
expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
.to change { subject.has_jobs_in_waiting_queue? }
.from(false).to(true)
end
end
describe '#resume_processing!' do
let(:jobs) { [[1], [2], [3]] }
it 'puts jobs back into the queue and respects order' do
# We stub this const to test at least a couple of loop iterations
stub_const("#{described_class}::LIMIT", 2)
jobs.each do |j|
subject.add_to_waiting_queue!(j, worker_context)
end
expect(worker_class).to receive(:perform_async).with(1).ordered
expect(worker_class).to receive(:perform_async).with(2).ordered
expect(worker_class).not_to receive(:perform_async).with(3).ordered
expect(Gitlab::SidekiqLogging::PauseControlLogger.instance).to receive(:resumed_log).with(worker_class_name, [1])
expect(Gitlab::SidekiqLogging::PauseControlLogger.instance).to receive(:resumed_log).with(worker_class_name, [2])
subject.resume_processing!
end
it 'drops a set after execution' do
jobs.each do |j|
subject.add_to_waiting_queue!(j, worker_context)
end
expect(Gitlab::ApplicationContext).to receive(:with_raw_context)
.with(stored_context)
.exactly(jobs.count).times.and_call_original
expect(worker_class).to receive(:perform_async).exactly(jobs.count).times
expect { subject.resume_processing! }.to change { subject.has_jobs_in_waiting_queue? }.from(true).to(false)
end
end
context 'with concurrent changes to different queues' do
let(:second_worker_class) do
Class.new do
def self.name
'SecondDummyIndexingWorker'
end
include ApplicationWorker
end
end
let(:other_subject) { described_class.new(second_worker_class.name) }
before do
stub_const(second_worker_class.name, second_worker_class)
end
it 'allows to use queues independently of each other' do
expect { subject.add_to_waiting_queue!(worker_args, worker_context) }
.to change { subject.queue_size }
.from(0).to(1)
expect { other_subject.add_to_waiting_queue!(worker_args, worker_context) }
.to change { other_subject.queue_size }
.from(0).to(1)
expect { subject.resume_processing! }.to change { subject.has_jobs_in_waiting_queue? }
.from(true).to(false)
expect { other_subject.resume_processing! }.to change { other_subject.has_jobs_in_waiting_queue? }
.from(true).to(false)
end
end
end

View File

@ -0,0 +1,76 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Server, :clean_gitlab_redis_queues, feature_category: :global_search do
let(:worker_class) do
Class.new do
def self.name
'TestPauseWorker'
end
include ApplicationWorker
pause_control :zoekt
def perform(*)
self.class.work
end
def self.work; end
end
end
before do
stub_const('TestPauseWorker', worker_class)
end
around do |example|
with_sidekiq_server_middleware do |chain|
chain.add described_class
Sidekiq::Testing.inline! { example.run }
end
end
describe '#call' do
context 'when strategy is enabled' do
before do
stub_feature_flags(zoekt_pause_indexing: true)
end
it 'puts the job to another queue without execution' do
bare_job = { 'class' => 'TestPauseWorker', 'args' => ['hello'] }
job_definition = Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(TestPauseWorker, bare_job.dup)
expect(Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler)
.to receive(:new).with(TestPauseWorker, a_hash_including(bare_job))
.and_return(job_definition).once
expect(TestPauseWorker).not_to receive(:work)
expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
TestPauseWorker.perform_async('hello')
end
end
context 'when strategy is disabled' do
before do
stub_feature_flags(zoekt_pause_indexing: false)
end
it 'executes the job' do
bare_job = { 'class' => 'TestPauseWorker', 'args' => ['hello'] }
job_definition = Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler.new(TestPauseWorker, bare_job.dup)
expect(Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler)
.to receive(:new).with(TestPauseWorker, hash_including(bare_job))
.and_return(job_definition).twice
expect(TestPauseWorker).to receive(:work)
expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
TestPauseWorker.perform_async('hello')
end
end
end
end

View File

@ -0,0 +1,68 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::StrategyHandler, :clean_gitlab_redis_queues, feature_category: :global_search do
subject(:pause_control) do
described_class.new(TestPauseWorker, job)
end
let(:worker_class) do
Class.new do
def self.name
'TestPauseWorker'
end
include ApplicationWorker
pause_control :zoekt
def perform(*); end
end
end
let(:job) { { 'class' => 'TestPauseWorker', 'args' => [1], 'jid' => '123' } }
before do
stub_const('TestPauseWorker', worker_class)
end
describe '#schedule' do
shared_examples 'scheduling with pause control class' do |strategy_class|
it 'calls schedule on the strategy' do
expect do |block|
klass = "Gitlab::SidekiqMiddleware::PauseControl::Strategies::#{strategy_class}".constantize
expect_next_instance_of(klass) do |strategy|
expect(strategy).to receive(:schedule).with(job, &block)
end
pause_control.schedule(&block)
end.to yield_control
end
end
it_behaves_like 'scheduling with pause control class', 'Zoekt'
end
describe '#perform' do
it 'calls perform on the strategy' do
expect do |block|
expect_next_instance_of(Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt) do |strategy|
expect(strategy).to receive(:perform).with(job, &block)
end
pause_control.perform(&block)
end.to yield_control
end
it 'pauses job' do
expect_next_instance_of(Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt) do |strategy|
expect(strategy).to receive(:should_pause?).and_return(true)
end
expect { pause_control.perform }.to change {
Gitlab::SidekiqMiddleware::PauseControl::PauseControlService.queue_size('TestPauseWorker')
}.by(1)
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl, feature_category: :global_search do
describe '.for' do
it 'returns the right class for `zoekt`' do
expect(described_class.for(:zoekt)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt)
end
it 'returns the right class for `none`' do
expect(described_class.for(:none)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
end
it 'returns nil when passing an unknown key' do
expect(described_class.for(:unknown)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
end
end
end

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddEpicWorkItemType, :migration, feature_category: :team_planning do
include MigrationHelpers::WorkItemTypesHelper
let(:work_item_types) { table(:work_item_types) }
let(:work_item_widget_definitions) { table(:work_item_widget_definitions) }
let(:work_item_hierarchy_restrictions) { table(:work_item_hierarchy_restrictions) }
let(:base_types) do
{
issue: 0,
incident: 1,
test_case: 2,
requirement: 3,
task: 4,
objective: 5,
key_result: 6,
epic: 7
}
end
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
end
before do
reset_db_state_prior_to_migration
end
it 'adds the epic type, widget definitions and hierarchy restrictions', :aggregate_failures do
expect do
migrate!
end.to change { work_item_types.count }.by(1)
.and(change { work_item_widget_definitions.count }.by(10))
.and(change { work_item_hierarchy_restrictions.count }.by(2))
epic_type = work_item_types.last
issue_type = work_item_types.find_by!(namespace_id: nil, base_type: base_types[:issue])
expect(work_item_types.pluck(:base_type)).to include(base_types[:epic])
expect(
work_item_widget_definitions.where(work_item_type_id: epic_type.id).pluck(:widget_type)
).to match_array(described_class::EPIC_WIDGETS.values)
expect(
work_item_hierarchy_restrictions.where(parent_type_id: epic_type.id).pluck(:child_type_id, :maximum_depth)
).to contain_exactly([epic_type.id, 9], [issue_type.id, 1])
end
it 'skips creating the new type an it\'s definitions' do
work_item_types.find_or_create_by!(
name: 'Epic', namespace_id: nil, base_type: base_types[:epic], icon_name: 'issue-type-epic'
)
expect do
migrate!
end.to not_change(work_item_types, :count)
.and(not_change(work_item_widget_definitions, :count))
.and(not_change(work_item_hierarchy_restrictions, :count))
end
def reset_db_state_prior_to_migration
# Database needs to be in a similar state as when this migration was created
work_item_types.delete_all
work_item_types.find_or_create_by!(
name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue'
)
work_item_types.find_or_create_by!(
name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident'
)
work_item_types.find_or_create_by!(
name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case'
)
work_item_types.find_or_create_by!(
name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements'
)
work_item_types.find_or_create_by!(
name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task'
)
work_item_types.find_or_create_by!(
name: 'Objective', namespace_id: nil, base_type: base_types[:objective], icon_name: 'issue-type-objective'
)
work_item_types.find_or_create_by!(
name: 'Key Result', namespace_id: nil, base_type: base_types[:key_result], icon_name: 'issue-type-keyresult'
)
end
end

View File

@ -1058,7 +1058,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
describe '#to_branch_name' do
let_it_be(:issue) { create(:issue, project: reusable_project, iid: 123, title: 'Testing Issue') }
let_it_be(:issue, reload: true) { create(:issue, project: reusable_project, iid: 123, title: 'Testing Issue') }
it 'returns a branch name with the issue title if not confidential' do
expect(issue.to_branch_name).to eq('123-testing-issue')

View File

@ -49,10 +49,10 @@ RSpec.describe WorkItems::Type do
it 'deletes type but not unrelated issues' do
type = create(:work_item_type)
expect(described_class.count).to eq(8)
expect(described_class.count).to eq(9)
expect { type.destroy! }.not_to change(Issue, :count)
expect(described_class.count).to eq(7)
expect(described_class.count).to eq(8)
end
end

View File

@ -151,20 +151,6 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do
project.mark_pages_as_deployed
end
context 'when the feature flag is disabled' do
before do
stub_feature_flags(pages_unique_domain: false)
end
context 'when there are no pages deployed for the related project' do
it 'responds with 204 No Content' do
get api('/internal/pages'), headers: auth_header, params: { host: 'unique-domain.example.com' }
expect(response).to have_gitlab_http_status(:no_content)
end
end
end
context 'when the unique domain is disabled' do
before do
project.project_setting.update!(pages_unique_domain_enabled: false)

View File

@ -5,7 +5,19 @@ require 'spec_helper'
RSpec.describe Organizations::OrganizationsController, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
RSpec.shared_examples 'basic organization controller action' do
shared_examples 'action disabled by `ui_for_organizations` feature flag' do
before do
stub_feature_flags(ui_for_organizations: false)
end
it 'renders 404' do
gitlab_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
shared_examples 'basic organization controller action' do
before do
sign_in(user)
end
@ -18,6 +30,8 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
end
context 'when the user has authorization', :enable_admin_mode do
@ -29,19 +43,7 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
expect(response).to have_gitlab_http_status(:ok)
end
context 'when the feature flag `ui_for_organizations` is disabled' do
let_it_be(:other_user) { create :user }
before do
stub_feature_flags(ui_for_organizations: other_user)
end
it 'renders 404' do
gitlab_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
it_behaves_like 'action disabled by `ui_for_organizations` feature flag'
end
end

View File

@ -794,104 +794,69 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
let(:group) { create(:group, path: 'group') }
let(:project) { create(:project, path: 'project', group: group) }
context 'with pages_unique_domain feature flag disabled' do
before do
stub_feature_flags(pages_unique_domain: false)
end
it 'updates project pages unique domain' do
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
end.to change { project.project_setting.pages_unique_domain_enabled }
it 'does not change pages unique domain' do
expect(project)
.to receive(:update)
.with({ project_setting_attributes: { has_confluence: true } })
.and_call_original
expect do
update_project(project, user, project_setting_attributes: {
has_confluence: true,
pages_unique_domain_enabled: true
})
end.not_to change { project.project_setting.pages_unique_domain_enabled }
end
it 'does not remove other attributes' do
expect(project)
.to receive(:update)
.with({ name: 'True' })
.and_call_original
update_project(project, user, name: 'True')
end
expect(project.project_setting.pages_unique_domain_enabled).to eq true
expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
end
context 'with pages_unique_domain feature flag enabled' do
before do
stub_feature_flags(pages_unique_domain: true)
end
it 'does not changes unique domain when it already exists' do
project.project_setting.update!(
pages_unique_domain_enabled: false,
pages_unique_domain: 'unique-domain'
)
it 'updates project pages unique domain' do
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
end.to change { project.project_setting.pages_unique_domain_enabled }
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
end.to change { project.project_setting.pages_unique_domain_enabled }
expect(project.project_setting.pages_unique_domain_enabled).to eq true
expect(project.project_setting.pages_unique_domain).to match %r{project-group-\w+}
end
expect(project.project_setting.pages_unique_domain_enabled).to eq true
expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
end
it 'does not changes unique domain when it already exists' do
project.project_setting.update!(
pages_unique_domain_enabled: false,
pages_unique_domain: 'unique-domain'
)
it 'does not changes unique domain when it disabling unique domain' do
project.project_setting.update!(
pages_unique_domain_enabled: true,
pages_unique_domain: 'unique-domain'
)
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
end.to change { project.project_setting.pages_unique_domain_enabled }
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: false
})
end.not_to change { project.project_setting.pages_unique_domain }
expect(project.project_setting.pages_unique_domain_enabled).to eq true
expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
end
expect(project.project_setting.pages_unique_domain_enabled).to eq false
expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
end
it 'does not changes unique domain when it disabling unique domain' do
project.project_setting.update!(
context 'when there is another project with the unique domain' do
it 'fails pages unique domain already exists' do
create(
:project_setting,
pages_unique_domain_enabled: true,
pages_unique_domain: 'unique-domain'
)
expect do
update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: false
})
end.not_to change { project.project_setting.pages_unique_domain }
allow(Gitlab::Pages::RandomDomain)
.to receive(:generate)
.and_return('unique-domain')
expect(project.project_setting.pages_unique_domain_enabled).to eq false
expect(project.project_setting.pages_unique_domain).to eq 'unique-domain'
end
result = update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
context 'when there is another project with the unique domain' do
it 'fails pages unique domain already exists' do
create(
:project_setting,
pages_unique_domain_enabled: true,
pages_unique_domain: 'unique-domain'
)
allow(Gitlab::Pages::RandomDomain)
.to receive(:generate)
.and_return('unique-domain')
result = update_project(project, user, project_setting_attributes: {
pages_unique_domain_enabled: true
})
expect(result).to eq(
status: :error,
message: 'Project setting pages unique domain has already been taken'
)
end
expect(result).to eq(
status: :error,
message: 'Project setting pages unique domain has already been taken'
)
end
end
end

View File

@ -3,7 +3,7 @@
RSpec.shared_examples 'work item hierarchy restrictions importer' do
shared_examples_for 'adds restrictions' do
it "adds all restrictions if they don't exist" do
expect { subject }.to change { WorkItems::HierarchyRestriction.count }.from(0).to(4)
expect { subject }.to change { WorkItems::HierarchyRestriction.count }.from(0).to(6)
end
end
@ -53,7 +53,7 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
expect { subject }.to make_queries_matching(/INSERT/, 1).and(
change { WorkItems::HierarchyRestriction.count }.by(1)
)
expect(WorkItems::HierarchyRestriction.count).to eq(4)
expect(WorkItems::HierarchyRestriction.count).to eq(6)
end
end
end

View File

@ -24,16 +24,6 @@ RSpec.describe Tooling::Danger::BulkDatabaseActions, feature_category: :tooling
file_diff.map { |line| line.delete_prefix('+') }
end
let(:file_diff) do
[
"+ def execute",
"+ pat_family.active.#{method_call}",
"+",
"+ ServiceResponse.success",
"+ end"
]
end
before do
allow(bulk_database_actions).to receive(:project_helper).and_return(fake_project_helper)
allow(bulk_database_actions.project_helper).to receive(:file_lines).and_return(file_lines)
@ -44,66 +34,91 @@ RSpec.describe Tooling::Danger::BulkDatabaseActions, feature_category: :tooling
subject(:bulk_database_actions) { fake_danger.new(helper: fake_helper) }
shared_examples 'no Danger comment' do
it 'does not comment on the bulk update action usage' do
expect(bulk_database_actions).not_to receive(:markdown)
bulk_database_actions.add_comment_for_bulk_database_action_method_usage
end
end
describe '#add_comment_for_bulk_database_action_method_usage' do
context 'when file is a non-spec Ruby file' do
let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
using RSpec::Parameterized::TableSyntax
where(:method_call, :expect_comment?) do
'update_all(revoked: true)' | true
'destroy_all' | true
'delete_all' | true
'update(revoked: true)' | true
'delete' | true
'update_two_factor' | false
'delete_keys(key)' | false
'destroy_hook(hook)' | false
'destroy_all_merged' | false
'update_all_mirrors' | false
context 'for single line method call' do
let(:file_diff) do
[
"+ def execute",
"+ pat_family.active.#{method_call}",
"+",
"+ ServiceResponse.success",
"+ end"
]
end
with_them do
it "correctly handles potential bulk database action" do
if expect_comment?
expect(bulk_database_actions).to receive(:markdown).with(comment_text, file: filename, line: 2)
else
expect(bulk_database_actions).not_to receive(:markdown)
end
context 'when file is a non-spec Ruby file' do
let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
bulk_database_actions.add_comment_for_bulk_database_action_method_usage
using RSpec::Parameterized::TableSyntax
where(:method_call, :expect_comment?) do
'update_all(revoked: true)' | true
'destroy_all' | true
'delete_all' | true
'update(revoked: true)' | true
'delete' | true
'update_two_factor' | false
'delete_keys(key)' | false
'destroy_hook(hook)' | false
'destroy_all_merged' | false
'update_all_mirrors' | false
end
with_them do
it "correctly handles potential bulk database action" do
if expect_comment?
expect(bulk_database_actions).to receive(:markdown).with(comment_text, file: filename, line: 2)
else
expect(bulk_database_actions).not_to receive(:markdown)
end
bulk_database_actions.add_comment_for_bulk_database_action_method_usage
end
end
end
context 'for spec directories' do
let(:method_call) { 'update_all(revoked: true)' }
context 'for FOSS spec file' do
let(:filename) { 'spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
context 'for EE spec file' do
let(:filename) { 'ee/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
context 'for JiHu spec file' do
let(:filename) { 'jh/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
end
end
context 'for spec directories' do
let(:method_call) { 'update_all(revoked: true)' }
shared_examples 'no Danger comment' do
it 'does not comment on the bulk update action usage' do
expect(bulk_database_actions).not_to receive(:markdown)
bulk_database_actions.add_comment_for_bulk_database_action_method_usage
end
context 'for strings' do
let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
let(:file_diff) do
[
'+ expect { subject }.to output(',
'+ "ERROR: Could not update tag"',
'+ ).to_stderr'
]
end
context 'for FOSS spec file' do
let(:filename) { 'spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
context 'for EE spec file' do
let(:filename) { 'ee/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
context 'for JiHu spec file' do
let(:filename) { 'jh/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
it_behaves_like 'no Danger comment'
end
it_behaves_like 'no Danger comment'
end
end
end

View File

@ -17,15 +17,5 @@ RSpec.describe 'projects/pages/_pages_settings', feature_category: :pages do
expect(rendered).to have_content('Use unique domain')
end
context 'when pages_unique_domain feature flag is disabled' do
it 'does not show the unique domain toggle' do
stub_feature_flags(pages_unique_domain: false)
# We have to use `view.render` because `render` causes issues
# https://github.com/rails/rails/issues/41320
expect(view.render('projects/pages/pages_settings')).to be_nil
end
end
end
end

View File

@ -7,7 +7,7 @@ module Tooling
module BulkDatabaseActions
include ::Tooling::Danger::Suggestor
BULK_UPDATE_METHODS_REGEX = /\W(((update|delete|destroy)_all)|delete|update)(\(|\s+|$)/
BULK_UPDATE_METHODS_REGEX = /\.((update|delete|destroy)(_all)?)\b/
DOCUMENTATION_LINK = 'https://docs.gitlab.com/ee/development/database_review.html#preparation-when-using-update-delete-update_all-and-destroy_all'
COMMENT_TEXT =