Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-09 12:19:26 +00:00
parent 22d3d5d4f2
commit 2d0fdb468b
179 changed files with 1763 additions and 1735 deletions

View File

@ -334,6 +334,10 @@
litellm_params: litellm_params:
model: ollama/codestral model: ollama/codestral
mock_response: "Mock response from codestral" mock_response: "Mock response from codestral"
- model_name: mistral
litellm_params:
model: ollama/mistral
mock_response: "Mock response from mistral"
.litellm-proxy-services: .litellm-proxy-services:
services: services:

View File

@ -89,7 +89,7 @@ workflow:
needs: needs:
- build-cng - build-cng
- download-knapsack-report - download-knapsack-report
parallel: 5 parallel: 10
# ========================================== # ==========================================
# Pre stage # Pre stage

View File

@ -23,7 +23,7 @@ export default {
<div> <div>
<gl-card <gl-card
class="gl-rounded-lg" class="gl-rounded-lg"
header-class="gl-rounded-lg gl-px-0 gl-py-0 gl-bg-white gl-border-b-0" header-class="gl-rounded-lg gl-px-0 gl-py-0 gl-bg-default gl-border-b-0"
body-class="gl-pt-2 gl-pb-0 gl-px-2" body-class="gl-pt-2 gl-pb-0 gl-px-2"
> >
<template #header> <template #header>

View File

@ -29,7 +29,6 @@
@import 'framework/markdown_area'; @import 'framework/markdown_area';
@import 'framework/media_object'; @import 'framework/media_object';
@import 'framework/modal'; @import 'framework/modal';
@import 'framework/pagination';
@import 'framework/panels'; @import 'framework/panels';
@import 'framework/popup'; @import 'framework/popup';
@import 'framework/secondary_navigation_elements'; @import 'framework/secondary_navigation_elements';

View File

@ -1,14 +0,0 @@
.gl-pagination {
a {
color: inherit;
text-decoration: none;
}
}
.page-item {
&.active {
.page-link {
z-index: 3;
}
}
}

View File

@ -307,14 +307,6 @@
&, &,
.gl-new-dropdown-custom-toggle { .gl-new-dropdown-custom-toggle {
width: 100%; width: 100%;
// stylelint-disable-next-line gitlab/no-gl-class
.gl-badge.badge-muted {
// stylelint-disable-next-line gitlab/no-gl-class
.gl-dark & {
@apply gl-bg-gray-100;
}
}
} }
// Reset padding, as inner element will // Reset padding, as inner element will

View File

@ -10,7 +10,9 @@ module Import
feature_category :importers feature_category :importers
def accept def accept
result = ::Import::SourceUsers::AcceptReassignmentService.new(source_user, current_user: current_user).execute result = ::Import::SourceUsers::AcceptReassignmentService.new(
source_user, current_user: current_user, reassignment_token: params[:reassignment_token]
).execute
if result.success? if result.success?
flash[:raw] = banner('accept_invite') flash[:raw] = banner('accept_invite')
@ -21,7 +23,9 @@ module Import
end end
def decline def decline
result = ::Import::SourceUsers::RejectReassignmentService.new(source_user, current_user: current_user).execute result = ::Import::SourceUsers::RejectReassignmentService.new(
source_user, current_user: current_user, reassignment_token: params[:reassignment_token]
).execute
if result.success? if result.success?
flash[:raw] = banner('reject_invite') flash[:raw] = banner('reject_invite')
@ -36,7 +40,7 @@ module Import
private private
def check_source_user_valid! def check_source_user_valid!
return if source_user.awaiting_approval? && current_user_matches_invite? return if source_user&.awaiting_approval? && current_user_matches_invite?
flash[:raw] = banner('invalid_invite') flash[:raw] = banner('invalid_invite')
redirect_to(root_path) redirect_to(root_path)
@ -47,12 +51,12 @@ module Import
end end
def source_user def source_user
Import::SourceUser.find(params[:id]) Import::SourceUser.find_by_reassignment_token(params[:reassignment_token])
end end
strong_memoize_attr :source_user strong_memoize_attr :source_user
def check_feature_flag! def check_feature_flag!
not_found unless Feature.enabled?(:importer_user_mapping, source_user.reassigned_by_user) not_found unless source_user.nil? || Feature.enabled?(:importer_user_mapping, source_user.reassigned_by_user)
end end
def banner(partial) def banner(partial)

View File

@ -100,10 +100,15 @@ class RegistrationsController < Devise::RegistrationsController
def after_successful_create_hook(user) def after_successful_create_hook(user)
accept_pending_invitations accept_pending_invitations
persist_accepted_terms_if_required(user) persist_accepted_terms_if_required(user)
execute_system_hooks(user)
notify_new_instance_access_request(user) notify_new_instance_access_request(user)
track_successful_user_creation(user) track_successful_user_creation(user)
end end
def execute_system_hooks(user)
SystemHooksService.new.execute_hooks_for(user, :create)
end
def notify_new_instance_access_request(user) def notify_new_instance_access_request(user)
return unless pending_approval? return unless pending_approval?

View File

@ -7,7 +7,7 @@ module Emails
@project = Project.find project_id @project = Project.find project_id
@target_url = project_url(@project) @target_url = project_url(@project)
@old_path_with_namespace = old_path_with_namespace @old_path_with_namespace = old_path_with_namespace
mail_with_locale( email_with_layout(
to: @user.notification_email_for(@project.group), to: @user.notification_email_for(@project.group),
subject: subject("Project was moved") subject: subject("Project was moved")
) )
@ -15,7 +15,7 @@ module Emails
def project_was_exported_email(current_user, project) def project_was_exported_email(current_user, project)
@project = project @project = project
mail_with_locale( email_with_layout(
to: current_user.notification_email_for(project.group), to: current_user.notification_email_for(project.group),
subject: subject("Project was exported") subject: subject("Project was exported")
) )
@ -24,7 +24,7 @@ module Emails
def project_was_not_exported_email(current_user, project, errors) def project_was_not_exported_email(current_user, project, errors)
@project = project @project = project
@errors = errors @errors = errors
mail_with_locale( email_with_layout(
to: current_user.notification_email_for(@project.group), to: current_user.notification_email_for(@project.group),
subject: subject("Project export error") subject: subject("Project export error")
) )

View File

@ -330,6 +330,10 @@ class NotifyPreview < ActionMailer::Preview
Notify.project_was_exported_email(user, project).message Notify.project_was_exported_email(user, project).message
end end
def repository_cleanup_success_email
Notify.repository_cleanup_success_email(project, user).message
end
def request_review_merge_request_email def request_review_merge_request_email
Notify.request_review_merge_request_email(user.id, merge_request.id, user.id).message Notify.request_review_merge_request_email(user.id, merge_request.id, user.id).message
end end

View File

@ -69,7 +69,7 @@ class ApplicationRecord < ActiveRecord::Base
# to allow callers gracefully handling the errors to still complete within # to allow callers gracefully handling the errors to still complete within
# the 5s target duration of a low urgency request. # the 5s target duration of a low urgency request.
def self.with_fast_read_statement_timeout(timeout_ms = 4500) def self.with_fast_read_statement_timeout(timeout_ms = 4500)
::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).fallback_to_replicas_for_ambiguous_queries do ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions
connection.exec_query("SET LOCAL statement_timeout = #{timeout_ms}") connection.exec_query("SET LOCAL statement_timeout = #{timeout_ms}")

View File

@ -469,7 +469,7 @@ module Ci
# not want to upgrade database connection proxy to use the primary # not want to upgrade database connection proxy to use the primary
# database after heartbeat write happens. # database after heartbeat write happens.
# #
::Gitlab::Database::LoadBalancing::SessionMap.current(connection.load_balancer).without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
values = { contacted_at: Time.current, creation_state: :finished } values = { contacted_at: Time.current, creation_state: :finished }
merge_cache_attributes(values) merge_cache_attributes(values)

View File

@ -137,7 +137,7 @@ module Ci
# not want to upgrade database connection proxy to use the primary # not want to upgrade database connection proxy to use the primary
# database after heartbeat write happens. # database after heartbeat write happens.
# #
::Gitlab::Database::LoadBalancing::SessionMap.current(connection.load_balancer).without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
values = values&.slice(:version, :revision, :platform, :architecture, :ip_address, :config, :executor) || {} values = values&.slice(:version, :revision, :platform, :architecture, :ip_address, :config, :executor) || {}
values.merge!(contacted_at: Time.current, creation_state: :finished) if update_contacted_at values.merge!(contacted_at: Time.current, creation_state: :finished) if update_contacted_at

View File

@ -5,7 +5,7 @@ module Packages
extend ActiveSupport::Concern extend ActiveSupport::Concern
def touch_last_downloaded_at def touch_last_downloaded_at
::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
update_column(:last_downloaded_at, Time.zone.now) update_column(:last_downloaded_at, Time.zone.now)
end end
end end

View File

@ -58,7 +58,7 @@ module UseSqlFunctionForPrimaryKeyLookups
return unless verification_arel.ast == arel.ast return unless verification_arel.ast == arel.ast
if table_name == "namespaces" && Feature.enabled?(:log_sql_function_namespace_lookups, Feature.current_request) if table_name == "namespaces" && Feature.enabled?(:log_sql_function_namespace_lookups, Feature.current_request)
using_primary = Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).use_primary? using_primary = Gitlab::Database::LoadBalancing::Session.current.use_primary?
Gitlab::AppLogger.info( Gitlab::AppLogger.info(
message: "Namespaces lookup using function", message: "Namespaces lookup using function",
backtrace: caller, backtrace: caller,

View File

@ -48,8 +48,7 @@ module Users
LIMIT #{limit} LIMIT #{limit}
SQL SQL
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
.current(connection.load_balancer).fallback_to_replicas_for_ambiguous_queries do
connection.execute(sql).to_a connection.execute(sql).to_a
end end
end end

View File

@ -22,6 +22,8 @@ module Import
validates :namespace_id, :import_type, :source_hostname, :source_user_identifier, :status, presence: true validates :namespace_id, :import_type, :source_hostname, :source_user_identifier, :status, presence: true
validates :source_user_identifier, uniqueness: { scope: [:namespace_id, :source_hostname, :import_type] } validates :source_user_identifier, uniqueness: { scope: [:namespace_id, :source_hostname, :import_type] }
validates :placeholder_user_id, presence: true, unless: :completed? validates :placeholder_user_id, presence: true, unless: :completed?
validates :reassignment_token, absence: true, unless: :awaiting_approval?
validates :reassignment_token, length: { is: 32 }, if: :awaiting_approval?
validates :reassign_to_user_id, presence: true, if: -> { validates :reassign_to_user_id, presence: true, if: -> {
awaiting_approval? || reassignment_in_progress? || completed? awaiting_approval? || reassignment_in_progress? || completed?
} }
@ -61,6 +63,14 @@ module Import
state status_name, value: value state status_name, value: value
end end
before_transition awaiting_approval: any do |source_user|
source_user.reassignment_token = nil
end
before_transition any => :awaiting_approval do |source_user|
source_user.reassignment_token = SecureRandom.hex
end
event :reassign do event :reassign do
transition REASSIGNABLE_STATUSES => :awaiting_approval transition REASSIGNABLE_STATUSES => :awaiting_approval
end end

View File

@ -191,7 +191,7 @@ class InternalId < ApplicationRecord
# `init` computes the maximum based on actual records. We use the # `init` computes the maximum based on actual records. We use the
# primary to make sure we have up to date results # primary to make sure we have up to date results
Gitlab::Database::LoadBalancing::SessionMap.current(subject.load_balancer).use_primary do Gitlab::Database::LoadBalancing::Session.current.use_primary do
instance = subject.is_a?(::Class) ? nil : subject instance = subject.is_a?(::Class) ? nil : subject
init.call(instance, scope) || 0 init.call(instance, scope) || 0

View File

@ -60,6 +60,18 @@ class Issue < ApplicationRecord
# prevent caching this column by rails, as we want to easily remove it after the backfilling # prevent caching this column by rails, as we want to easily remove it after the backfilling
ignore_column :tmp_epic_id, remove_with: '16.11', remove_after: '2024-03-31' ignore_column :tmp_epic_id, remove_with: '16.11', remove_after: '2024-03-31'
# Interim columns to convert integer IDs to bigint
ignore_column :author_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :closed_by_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :duplicated_to_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :last_edited_by_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :milestone_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :moved_to_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :project_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :promoted_to_epic_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
ignore_column :updated_by_id_convert_to_bigint, remove_with: '17.7', remove_after: '2024-11-17'
belongs_to :project belongs_to :project
belongs_to :namespace, inverse_of: :issues belongs_to :namespace, inverse_of: :issues

View File

@ -1491,8 +1491,7 @@ class Project < ApplicationRecord
job_type = type.to_s.capitalize job_type = type.to_s.capitalize
if job_id if job_id
use_primary = ::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).use_primary? Gitlab::AppLogger.info("#{job_type} job scheduled for #{full_path} with job ID #{job_id} (primary: #{::Gitlab::Database::LoadBalancing::Session.current.use_primary?}).")
Gitlab::AppLogger.info("#{job_type} job scheduled for #{full_path} with job ID #{job_id} (primary: #{use_primary}).")
else else
Gitlab::AppLogger.error("#{job_type} job failed to create for #{full_path}.") Gitlab::AppLogger.error("#{job_type} job failed to create for #{full_path}.")
end end

View File

@ -45,7 +45,7 @@ class SentNotification < ApplicationRecord
# Non-sticky write is used as `.record` is only used in ActionMailer # Non-sticky write is used as `.record` is only used in ActionMailer
# where there are no queries to SentNotification. # where there are no queries to SentNotification.
::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
create(attrs) create(attrs)
end end
end end

View File

@ -52,7 +52,7 @@ module Ci
# Prevent parallel jobs # Prevent parallel jobs
in_lock("#{self.class.name.underscore}/worker/#{@worker_index}", ttl: MAX_TTL, retries: 0) do in_lock("#{self.class.name.underscore}/worker/#{@worker_index}", ttl: MAX_TTL, retries: 0) do
::Gitlab::Database::LoadBalancing::SessionMap.without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
report = insert_new_finished_pipelines report = insert_new_finished_pipelines
ServiceResponse.success(payload: report.merge(service_payload)) ServiceResponse.success(payload: report.merge(service_payload))

View File

@ -144,7 +144,7 @@ module Ci
# We want to reset a load balancing session to discard the side # We want to reset a load balancing session to discard the side
# effects of writes that could have happened prior to this moment. # effects of writes that could have happened prior to this moment.
# #
::Gitlab::Database::LoadBalancing::SessionMap.clear_session ::Gitlab::Database::LoadBalancing::Session.clear_session
@metrics.observe_queue_time(:retrieve, @runner.runner_type) do @metrics.observe_queue_time(:retrieve, @runner.runner_type) do
queue_query_proc.call queue_query_proc.call

View File

@ -3,15 +3,25 @@
module Import module Import
module SourceUsers module SourceUsers
class AcceptReassignmentService < BaseService class AcceptReassignmentService < BaseService
def initialize(import_source_user, current_user:) def initialize(import_source_user, current_user:, reassignment_token:)
@import_source_user = import_source_user @import_source_user = import_source_user
@current_user = current_user @current_user = current_user
@reassignment_token = reassignment_token
end end
def execute def execute
return error_invalid_permissions unless current_user_matches_reassign_to_user invalid_permissions = false
accept_successful = false
if import_source_user.accept import_source_user.with_lock do
next invalid_permissions = true unless current_user_matches_reassign_to_user? && reassignment_token_is_valid?
accept_successful = import_source_user.accept
end
return error_invalid_permissions if invalid_permissions
if accept_successful
Import::ReassignPlaceholderUserRecordsWorker.perform_async(import_source_user.id) Import::ReassignPlaceholderUserRecordsWorker.perform_async(import_source_user.id)
ServiceResponse.success(payload: import_source_user) ServiceResponse.success(payload: import_source_user)
else else
@ -21,11 +31,17 @@ module Import
private private
def current_user_matches_reassign_to_user attr_reader :reassignment_token
def current_user_matches_reassign_to_user?
return false if current_user.nil? return false if current_user.nil?
current_user.id == import_source_user.reassign_to_user_id current_user.id == import_source_user.reassign_to_user_id
end end
def reassignment_token_is_valid?
reassignment_token == import_source_user.reassignment_token
end
end end
end end
end end

View File

@ -10,9 +10,21 @@ module Import
def execute def execute
return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user) return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
return error_invalid_status unless import_source_user.cancelable_status?
if cancel_reassignment invalid_status = false
cancel_successful = false
import_source_user.with_lock do
if import_source_user.cancelable_status?
cancel_successful = cancel_reassignment
else
invalid_status = true
end
end
return error_invalid_status if invalid_status
if cancel_successful
ServiceResponse.success(payload: import_source_user) ServiceResponse.success(payload: import_source_user)
else else
ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages) ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)

View File

@ -11,10 +11,22 @@ module Import
def execute def execute
return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user) return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
return error_invalid_status unless import_source_user.reassignable_status?
return error_invalid_assignee unless valid_assignee?(assignee_user) return error_invalid_assignee unless valid_assignee?(assignee_user)
if reassign_user invalid_status = false
reassign_successful = false
import_source_user.with_lock do
if import_source_user.reassignable_status?
reassign_successful = reassign_user
else
invalid_status = true
end
end
return error_invalid_status if invalid_status
if reassign_successful
send_user_reassign_email send_user_reassign_email
ServiceResponse.success(payload: import_source_user) ServiceResponse.success(payload: import_source_user)

View File

@ -3,16 +3,27 @@
module Import module Import
module SourceUsers module SourceUsers
class RejectReassignmentService < BaseService class RejectReassignmentService < BaseService
def initialize(import_source_user, current_user:) def initialize(import_source_user, current_user:, reassignment_token:)
@import_source_user = import_source_user @import_source_user = import_source_user
@current_user = current_user @current_user = current_user
@reassignment_token = reassignment_token
end end
def execute def execute
return error_invalid_permissions unless current_user_matches_reassign_to_user
return error_invalid_status unless import_source_user.awaiting_approval? return error_invalid_status unless import_source_user.awaiting_approval?
if reject invalid_permissions = false
reject_successful = false
import_source_user.with_lock do
next invalid_permissions = true unless current_user_matches_reassign_to_user? && reassignment_token_is_valid?
reject_successful = import_source_user.reject
end
return error_invalid_permissions if invalid_permissions
if reject_successful
send_user_reassign_rejected_email send_user_reassign_rejected_email
ServiceResponse.success(payload: import_source_user) ServiceResponse.success(payload: import_source_user)
@ -27,14 +38,16 @@ module Import
private private
def current_user_matches_reassign_to_user attr_reader :reassignment_token
def current_user_matches_reassign_to_user?
return false if current_user.nil? return false if current_user.nil?
current_user.id == import_source_user.reassign_to_user_id current_user.id == import_source_user.reassign_to_user_id
end end
def reject def reassignment_token_is_valid?
import_source_user.reject reassignment_token == import_source_user.reassignment_token
end end
end end
end end

View File

@ -274,7 +274,7 @@ class IssuableBaseService < ::BaseContainerService
# rubocop:disable Metrics/AbcSize -- Method is only slightly over the limit due to decomposition method # rubocop:disable Metrics/AbcSize -- Method is only slightly over the limit due to decomposition method
def update(issuable) def update(issuable)
::Gitlab::Database::LoadBalancing::SessionMap.current(issuable.load_balancer).use_primary! ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
old_associations = associations_before_update(issuable) old_associations = associations_before_update(issuable)

View File

@ -18,9 +18,8 @@ module PersonalAccessTokens
# would be updated when using #touch). # would be updated when using #touch).
return unless update? return unless update?
lb = @personal_access_token.load_balancer
try_obtain_lease do try_obtain_lease do
::Gitlab::Database::LoadBalancing::SessionMap.current(lb).without_sticky_writes do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
@personal_access_token.update_column(:last_used_at, Time.zone.now) @personal_access_token.update_column(:last_used_at, Time.zone.now)
end end
end end

View File

@ -20,8 +20,7 @@ module Users
return unless user return unless user
return if user.last_activity_on == Date.today return if user.last_activity_on == Date.today
::Gitlab::Database::LoadBalancing::SessionMap.current(user.load_balancer) ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes { record_activity }
.without_sticky_writes { record_activity }
end end
private private

View File

@ -48,7 +48,7 @@
- c.with_footer do - c.with_footer do
.gl-flex.gl-gap-3 .gl-flex.gl-gap-3
= render Pajamas::ButtonComponent.new(variant: :danger, method: :post, href: accept_import_source_user_path(@source_user)) do = render Pajamas::ButtonComponent.new(variant: :danger, method: :post, href: accept_import_source_user_path(@source_user.reassignment_token)) do
= s_('UserMapping|Approve reassignment') = s_('UserMapping|Approve reassignment')
= render Pajamas::ButtonComponent.new(method: :post, href: decline_import_source_user_path(@source_user)) do = render Pajamas::ButtonComponent.new(method: :post, href: decline_import_source_user_path(@source_user.reassignment_token)) do
= s_('UserMapping|Reject') = s_('UserMapping|Reject')

View File

@ -0,0 +1,3 @@
%li.disabled{ 'aria-disabled': 'true', 'aria-hidden': 'true' }
%span.gl-pagination-item{ data: { testid: testid } }
= yield

View File

@ -4,5 +4,5 @@
-# total_pages: total number of pages -# total_pages: total number of pages
-# per_page: number of items to fetch per page -# per_page: number of items to fetch per page
-# remote: data-remote -# remote: data-remote
%li.page-item.disabled.gl-hidden.md:gl-block %li.disabled.gl-hidden.md:gl-block
= link_to raw(t('views.pagination.truncate')), '#', class: 'page-link' = link_to raw(t('views.pagination.truncate')), '#', class: 'gl-pagination-item'

View File

@ -6,25 +6,25 @@
- if paginator.has_previous_page? - if paginator.has_previous_page?
- unless without_first_and_last_pages - unless without_first_and_last_pages
%li.page-item %li
- first_page_path = url_for(page_params.merge(cursor: paginator.cursor_for_first_page)) - first_page_path = url_for(page_params.merge(cursor: paginator.cursor_for_first_page))
= link_to first_page_path, rel: 'first', class: 'page-link' do = link_to first_page_path, rel: 'first', class: 'gl-pagination-item' do
= sprite_icon('chevron-double-lg-left', size: 8) = sprite_icon('chevron-double-lg-left', size: 8)
= s_('Pagination|First') = s_('Pagination|First')
%li.page-item.prev %li
= link_to previous_path, rel: 'prev', class: 'page-link' do = link_to previous_path, rel: 'prev', class: 'gl-pagination-item' do
= sprite_icon('chevron-lg-left', size: 8) = sprite_icon('chevron-left')
= s_('Pagination|Prev') = s_('Pagination|Prev')
- if paginator.has_next_page? - if paginator.has_next_page?
%li.page-item.next %li
= link_to next_path, rel: 'next', class: 'page-link' do = link_to next_path, rel: 'next', class: 'gl-pagination-item' do
= s_('Pagination|Next') = s_('Pagination|Next')
= sprite_icon('chevron-lg-right', size: 8) = sprite_icon('chevron-right')
- unless without_first_and_last_pages - unless without_first_and_last_pages
%li.page-item %li
- last_page_path = url_for(page_params.merge(cursor: paginator.cursor_for_last_page)) - last_page_path = url_for(page_params.merge(cursor: paginator.cursor_for_last_page))
= link_to last_page_path, rel: 'last', class: 'page-link' do = link_to last_page_path, rel: 'last', class: 'gl-pagination-item' do
= s_('Pagination|Last') = s_('Pagination|Last')
= sprite_icon('chevron-double-lg-right', size: 8) = sprite_icon('chevron-double-lg-right', size: 8)

View File

@ -6,9 +6,9 @@
-# per_page: number of items to fetch per page -# per_page: number of items to fetch per page
-# remote: data-remote -# remote: data-remote
- page_url = current_page.last? ? '#' : url - is_last_page = current_page.last?
- view = is_last_page ? 'kaminari/gitlab/disabled_page_cursor_nav' : 'kaminari/gitlab/page_cursor_nav'
%li.page-item.js-next-button{ class: ('disabled' if current_page.last?) } = render view, testid: 'kaminari-pagination-next', rel: 'next', url: url, remote: remote do
= link_to page_url, rel: 'next', remote: remote, class: 'page-link' do = s_('Pagination|Next')
= s_('Pagination|Next') = sprite_icon('chevron-right')
= sprite_icon('chevron-lg-right', size: 8)

View File

@ -6,9 +6,9 @@
-# total_pages: total number of pages -# total_pages: total number of pages
-# per_page: number of items to fetch per page -# per_page: number of items to fetch per page
-# remote: data-remote -# remote: data-remote
%li.page-item.js-pagination-page{ class: [active_when(page.current?), %li.js-pagination-page{ class: [active_when(page.current?),
('sibling' if page.next? || page.prev?), ('sibling' if page.next? || page.prev?),
('js-first-button' if page.first?), ('js-first-button' if page.first?),
('js-last-button' if page.last?), ('js-last-button' if page.last?),
('!gl-hidden md:!gl-block' if !page.current?)] } ('!gl-hidden md:!gl-block' if !page.current?)] }
= link_to page, url, { remote: remote, rel: page.next? ? 'next' : page.prev? ? 'prev' : nil, class: ['page-link', active_when(page.current?)] } = link_to page, url, { remote: remote, rel: page.next? ? 'next' : page.prev? ? 'prev' : nil, class: ['gl-pagination-item', active_when(page.current?)], 'data-testid': 'kaminari-pagination-item' }

View File

@ -0,0 +1,3 @@
%li
= link_to url, rel: rel, remote: local_assigns[:remote], class: 'gl-pagination-item', data: { testid: testid } do
= yield

View File

@ -7,7 +7,7 @@
-# paginator: the paginator that renders the pagination tags inside -# paginator: the paginator that renders the pagination tags inside
= paginator.render do = paginator.render do
.gl-pagination.gl-mt-3 .gl-pagination.gl-mt-3
%ul.pagination.justify-content-center %ul.gl-justify-center
= prev_page_tag = prev_page_tag
- each_page do |page| - each_page do |page|
- if page.left_outer? || page.right_outer? || page.inside_window? || page.first? || page.last? - if page.left_outer? || page.right_outer? || page.inside_window? || page.first? || page.last?

View File

@ -6,9 +6,9 @@
-# per_page: number of items to fetch per page -# per_page: number of items to fetch per page
-# remote: data-remote -# remote: data-remote
- page_url = current_page.first? ? '#' : url - is_first_page = current_page.first?
- view = is_first_page ? 'kaminari/gitlab/disabled_page_cursor_nav' : 'kaminari/gitlab/page_cursor_nav'
%li.page-item.js-previous-button{ class: ('disabled' if current_page.first?) } = render view, testid: 'kaminari-pagination-prev', rel: 'prev', url: url, remote: remote do
= link_to page_url, rel: 'prev', remote: remote, class: 'page-link' do = sprite_icon('chevron-left')
= sprite_icon('chevron-lg-left', size: 8) = s_('Pagination|Prev')
= s_('Pagination|Prev')

View File

@ -1,12 +1,12 @@
.gl-pagination.gl-mt-3 - prev_view = previous_path ? 'kaminari/gitlab/page_cursor_nav' : 'kaminari/gitlab/disabled_page_cursor_nav'
%ul.pagination.justify-content-center - next_view = next_path ? 'kaminari/gitlab/page_cursor_nav' : 'kaminari/gitlab/disabled_page_cursor_nav'
- if previous_path
%li.page-item.prev - if previous_path || next_path
= link_to previous_path, rel: 'prev', class: 'page-link', data: paginate_event_tracking_data_attributes(event_tracking: event_tracking, event_label: 'prev') do .gl-pagination.gl-mt-3
= sprite_icon('chevron-lg-left', size: 8) %ul.gl-justify-center
= s_('Pagination|Prev') = render prev_view, testid: 'kaminari-pagination-prev', rel: 'prev', url: previous_path, data: paginate_event_tracking_data_attributes(event_tracking: event_tracking, event_label: 'prev') do
- if next_path = sprite_icon('chevron-left')
%li.page-item.next = s_('Pagination|Prev')
= link_to next_path, rel: 'next', class: 'page-link', data: paginate_event_tracking_data_attributes(event_tracking: event_tracking, event_label: 'next') do = render next_view, testid: 'kaminari-pagination-next', rel: 'next', url: next_path, data: paginate_event_tracking_data_attributes(event_tracking: event_tracking, event_label: 'next') do
= s_('Pagination|Next') = s_('Pagination|Next')
= sprite_icon('chevron-lg-right', size: 8) = sprite_icon('chevron-right')

View File

@ -27,7 +27,7 @@
destination_group: destination_group) destination_group: destination_group)
%p{ style: text_style } %p{ style: text_style }
= link_to import_source_user_url(@source_user), target: '_blank', rel: 'noopener noreferrer' do = link_to import_source_user_url(@source_user.reassignment_token), target: '_blank', rel: 'noopener noreferrer' do
%button{ type: 'button', style: button_style } %button{ type: 'button', style: button_style }
= s_('UserMapping|Review reassignment details') = s_('UserMapping|Review reassignment details')

View File

@ -16,7 +16,7 @@
source_hostname: source_hostname, source_hostname: source_hostname,
destination_group: destination_group } %> destination_group: destination_group } %>
<%= s_('UserMapping|Review reassignment details') %>: <%= import_source_user_url(@source_user) %> <%= s_('UserMapping|Review reassignment details') %>: <%= import_source_user_url(@source_user.reassignment_token) %>
<%= s_('UserMapping|Import details:') %> <%= s_('UserMapping|Import details:') %>
<%= safe_format(s_('UserMapping|Imported from: %{source_hostname}'), source_hostname: source_hostname) %> <%= safe_format(s_('UserMapping|Imported from: %{source_hostname}'), source_hostname: source_hostname) %>

View File

@ -3729,6 +3729,15 @@
:weight: 1 :weight: 1
:idempotent: true :idempotent: true
:tags: [] :tags: []
- :name: pages_delete_pages_deployment
:worker_name: Pages::DeletePagesDeploymentWorker
:feature_category: :pages
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: pages_domain_ssl_renewal - :name: pages_domain_ssl_renewal
:worker_name: PagesDomainSslRenewalWorker :worker_name: PagesDomainSslRenewalWorker
:feature_category: :pages :feature_category: :pages

View File

@ -30,9 +30,7 @@ module AuthorizedProjectUpdate
# does not allow us to deduplicate these jobs. # does not allow us to deduplicate these jobs.
# https://gitlab.com/gitlab-org/gitlab/-/issues/325291 # https://gitlab.com/gitlab-org/gitlab/-/issues/325291
def use_replica_if_available(&block) def use_replica_if_available(&block)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&block)
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord])
.use_replicas_for_read_queries(&block)
end end
def project_authorizations_needs_refresh?(user) def project_authorizations_needs_refresh?(user)

View File

@ -185,9 +185,7 @@ module ContainerExpirationPolicies
end end
def use_replica_if_available(&blk) def use_replica_if_available(&blk)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&blk)
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord])
.use_replicas_for_read_queries(&blk)
end end
end end
end end

View File

@ -51,9 +51,7 @@ class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWo
# not perfomed with a delay # not perfomed with a delay
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63635#note_603771207 # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63635#note_603771207
def use_replica_if_available(&blk) def use_replica_if_available(&blk)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&blk)
.current(ContainerRepository.load_balancer)
.use_replicas_for_read_queries(&blk)
end end
def process_stale_ongoing_cleanups def process_stale_ongoing_cleanups

View File

@ -43,9 +43,7 @@ module DependencyProxy
end end
def use_replica_if_available(&block) def use_replica_if_available(&block)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&block)
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord])
.use_replicas_for_read_queries(&block)
end end
end end
end end

View File

@ -53,9 +53,7 @@ module Packages
end end
def use_replica_if_available(&block) def use_replica_if_available(&block)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries(&block)
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord])
.use_replicas_for_read_queries(&block)
end end
end end
end end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Pages
class DeletePagesDeploymentWorker
include Gitlab::EventStore::Subscriber
data_consistency :always
feature_category :pages
idempotent!
def handle_event(event)
project = Project.find_by_id(event.data['project_id'])
return unless project
::Pages::DeleteService.new(project).execute
end
end
end

View File

@ -1,9 +0,0 @@
---
name: use_load_balancing_session_map
feature_issue_url: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/3834
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166986
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/496505
milestone: '17.5'
group: group::scalability
type: gitlab_com_derisk
default_enabled: false

View File

@ -29,6 +29,8 @@ events:
unique: user.id unique: user.id
- name: i_package_terraform_module_user - name: i_package_terraform_module_user
unique: user.id unique: user.id
- name: i_package_ml_model_user
unique: user.id
distribution: distribution:
- ee - ee
- ce - ce

View File

@ -0,0 +1,23 @@
---
key_path: counts.count_total_package_ml_model_pulled_monthly
description: Monthly count of ml_model packages pulled
product_group: package_registry
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/151919
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: pull_package_from_registry
filter:
label: ml_model

View File

@ -0,0 +1,23 @@
---
key_path: counts.count_total_push_package_ml_model_monthly
description: Monthly count of ml_model packages pushed
product_group: package_registry
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167298
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: push_package_to_registry
filter:
label: ml_model

View File

@ -29,6 +29,8 @@ events:
unique: user.id unique: user.id
- name: i_package_terraform_module_user - name: i_package_terraform_module_user
unique: user.id unique: user.id
- name: i_package_ml_model_user
unique: user.id
distribution: distribution:
- ee - ee
- ce - ce

View File

@ -0,0 +1,23 @@
---
key_path: counts.count_total_package_ml_model_pulled_weekly
description: Weekly count of ml_model packages pulled
product_group: package_registry
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/151919
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: pull_package_from_registry
filter:
label: ml_model

View File

@ -0,0 +1,23 @@
---
key_path: counts.count_total_push_package_ml_model_weekly
description: Weekly count of ml_model packages pushed
product_group: package_registry
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167298
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: push_package_to_registry
filter:
label: ml_model

View File

@ -89,7 +89,7 @@ namespace :import do
post :upload post :upload
end end
resources :source_users, only: [] do resources :source_users, param: :reassignment_token, only: [] do
member do member do
get :show get :show
post :accept post :accept

View File

@ -595,6 +595,8 @@
- 1 - 1
- - pages_deactivate_mr_deployments - - pages_deactivate_mr_deployments
- 1 - 1
- - pages_delete_pages_deployment
- 1
- - pages_domain_ssl_renewal - - pages_domain_ssl_renewal
- 1 - 1
- - pages_domain_verification - - pages_domain_verification
@ -823,6 +825,8 @@
- 1 - 1
- - security_sync_policy_violation_comment - - security_sync_policy_violation_comment
- 1 - 1
- - security_sync_project_policy
- 1
- - security_sync_scan_policies - - security_sync_scan_policies
- 1 - 1
- - security_unassign_redundant_policy_configurations - - security_unassign_redundant_policy_configurations

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillIssuesCorrectWorkItemTypeId
description: Backfills column issues.correct_work_item_type_id
feature_category: team_planning
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167972
milestone: '17.5'
queued_migration_version: 20241002185804
finalized_by: # version of the migration that finalized this BBM

View File

@ -1,11 +0,0 @@
---
migration_job_name: BackfillSbomOccurrencesTraversalIdsAndArchived
description: 'Backfills sbom_occurrences.traversal_ids and sbom_occurrences.archived
columns with values from sbom_occurrences.project.namespace.traversal_ids and sbom_occurrences.project.archived.
'
feature_category: dependency_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144802
milestone: '16.10'
queued_migration_version: 20240214203242
finalized_by: '20240626231944'

View File

@ -0,0 +1,8 @@
---
migration_job_name: FixPickUpAtCiDeletedObject
description: Fix ci_deleted_objects#pick_up_at column so records get purged
feature_category: job_artifacts
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165778
milestone: '17.5'
queued_migration_version: 20241004064933
finalized_by: # version of the migration that finalized this BBM

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddReassignmentTokenToImportSourceUsers < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.5'
def up
add_column :import_source_users, :reassignment_token, :text
add_concurrent_index :import_source_users, :reassignment_token, unique: true
add_text_limit :import_source_users, :reassignment_token, 32
end
def down
remove_column :import_source_users, :reassignment_token, :text
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class InitializeConversionOfIssuesIntegerIdsToBigint < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
TABLE = :issues
COLUMNS = %i[author_id closed_by_id duplicated_to_id id last_edited_by_id milestone_id moved_to_id
project_id promoted_to_epic_id updated_by_id]
def up
initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
def down
revert_initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
end
end

View File

@ -1,27 +0,0 @@
# frozen_string_literal: true
class QueueBackfillSbomOccurrencesTraversalIdsAndArchived < Gitlab::Database::Migration[2.2]
milestone '16.10'
MIGRATION = "BackfillSbomOccurrencesTraversalIdsAndArchived"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10_000
SUB_BATCH_SIZE = 100
restrict_gitlab_migration gitlab_schema: :gitlab_sec
def up
queue_batched_background_migration(
MIGRATION,
:sbom_occurrences,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :sbom_occurrences, :id, [])
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class QueueBackfillIssuesCorrectWorkItemTypeId < Gitlab::Database::Migration[2.2]
milestone '17.5'
# Select the applicable gitlab schema for your batched background migration
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "BackfillIssuesCorrectWorkItemTypeId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 10_000
MAX_BATCH_SIZE = 30_000
SUB_BATCH_SIZE = 50
def up
queue_batched_background_migration(
MIGRATION,
:issues,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
max_batch_size: MAX_BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :issues, :id, [])
end
end

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
class QueueFixPickUpAtCiDeletedObject < Gitlab::Database::Migration[2.2]
milestone '17.5'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "FixPickUpAtCiDeletedObject"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 25_000
SUB_BATCH_SIZE = 150
GITLAB_OPTIMIZED_BATCH_SIZE = 75_000
GITLAB_OPTIMIZED_SUB_BATCH_SIZE = 250
def up
queue_batched_background_migration(
MIGRATION,
:ci_deleted_objects,
:id,
job_interval: DELAY_INTERVAL,
**batch_sizes
)
end
def down
delete_batched_background_migration(MIGRATION, :ci_deleted_objects, :id, [])
end
private
def batch_sizes
if Gitlab.com_except_jh?
{
batch_size: GITLAB_OPTIMIZED_BATCH_SIZE,
sub_batch_size: GITLAB_OPTIMIZED_SUB_BATCH_SIZE
}
else
{
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
}
end
end
end

View File

@ -1 +0,0 @@
c8fb4783e43e1276a74b0890c35b51ed4dd2fc15ea9855be1def9fca221d7d5f

View File

@ -0,0 +1 @@
1a74228cbfeb2795e3ea6c544cfedee3b3f809e86bd379cd89bfec748be17ded

View File

@ -0,0 +1 @@
83921a692be7211db743282ff5227815d743ca87c764101fce59a6d4a62a083c

View File

@ -0,0 +1 @@
edea1b1c817159aa2440b01d4f070df06e3cb310c988a87324bb4b2da044f8d1

View File

@ -0,0 +1 @@
4ae367e0a34f81246d2ff0d7d021447d2ffd01c03f463d07502df186aa708cc4

View File

@ -1065,6 +1065,24 @@ RETURN NEW;
END END
$$; $$;
CREATE FUNCTION trigger_22262f5f16d8() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."author_id_convert_to_bigint" := NEW."author_id";
NEW."closed_by_id_convert_to_bigint" := NEW."closed_by_id";
NEW."duplicated_to_id_convert_to_bigint" := NEW."duplicated_to_id";
NEW."id_convert_to_bigint" := NEW."id";
NEW."last_edited_by_id_convert_to_bigint" := NEW."last_edited_by_id";
NEW."milestone_id_convert_to_bigint" := NEW."milestone_id";
NEW."moved_to_id_convert_to_bigint" := NEW."moved_to_id";
NEW."project_id_convert_to_bigint" := NEW."project_id";
NEW."promoted_to_epic_id_convert_to_bigint" := NEW."promoted_to_epic_id";
NEW."updated_by_id_convert_to_bigint" := NEW."updated_by_id";
RETURN NEW;
END;
$$;
CREATE FUNCTION trigger_248cafd363ff() RETURNS trigger CREATE FUNCTION trigger_248cafd363ff() RETURNS trigger
LANGUAGE plpgsql LANGUAGE plpgsql
AS $$ AS $$
@ -12278,11 +12296,13 @@ CREATE TABLE import_source_users (
import_type text NOT NULL, import_type text NOT NULL,
reassigned_by_user_id bigint, reassigned_by_user_id bigint,
reassignment_error text, reassignment_error text,
reassignment_token text,
CONSTRAINT check_05708218cd CHECK ((char_length(reassignment_error) <= 255)), CONSTRAINT check_05708218cd CHECK ((char_length(reassignment_error) <= 255)),
CONSTRAINT check_0d7295a307 CHECK ((char_length(import_type) <= 255)), CONSTRAINT check_0d7295a307 CHECK ((char_length(import_type) <= 255)),
CONSTRAINT check_199c28ec54 CHECK ((char_length(source_username) <= 255)), CONSTRAINT check_199c28ec54 CHECK ((char_length(source_username) <= 255)),
CONSTRAINT check_562655155f CHECK ((char_length(source_name) <= 255)), CONSTRAINT check_562655155f CHECK ((char_length(source_name) <= 255)),
CONSTRAINT check_cc9d4093b5 CHECK ((char_length(source_user_identifier) <= 255)), CONSTRAINT check_cc9d4093b5 CHECK ((char_length(source_user_identifier) <= 255)),
CONSTRAINT check_cd2edb9334 CHECK ((char_length(reassignment_token) <= 32)),
CONSTRAINT check_e2039840c5 CHECK ((char_length(source_hostname) <= 255)) CONSTRAINT check_e2039840c5 CHECK ((char_length(source_hostname) <= 255))
); );
@ -12965,6 +12985,16 @@ CREATE TABLE issues (
tmp_epic_id bigint, tmp_epic_id bigint,
imported_from smallint DEFAULT 0 NOT NULL, imported_from smallint DEFAULT 0 NOT NULL,
correct_work_item_type_id bigint DEFAULT 0 NOT NULL, correct_work_item_type_id bigint DEFAULT 0 NOT NULL,
author_id_convert_to_bigint bigint,
closed_by_id_convert_to_bigint bigint,
duplicated_to_id_convert_to_bigint bigint,
id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
last_edited_by_id_convert_to_bigint bigint,
milestone_id_convert_to_bigint bigint,
moved_to_id_convert_to_bigint bigint,
project_id_convert_to_bigint bigint,
promoted_to_epic_id_convert_to_bigint bigint,
updated_by_id_convert_to_bigint bigint,
CONSTRAINT check_2addf801cd CHECK ((work_item_type_id IS NOT NULL)), CONSTRAINT check_2addf801cd CHECK ((work_item_type_id IS NOT NULL)),
CONSTRAINT check_c33362cd43 CHECK ((namespace_id IS NOT NULL)), CONSTRAINT check_c33362cd43 CHECK ((namespace_id IS NOT NULL)),
CONSTRAINT check_fba63f706d CHECK ((lock_version IS NOT NULL)) CONSTRAINT check_fba63f706d CHECK ((lock_version IS NOT NULL))
@ -29284,6 +29314,8 @@ CREATE INDEX index_import_source_users_on_placeholder_user_id ON import_source_u
CREATE INDEX index_import_source_users_on_reassigned_by_user_id ON import_source_users USING btree (reassigned_by_user_id); CREATE INDEX index_import_source_users_on_reassigned_by_user_id ON import_source_users USING btree (reassigned_by_user_id);
CREATE UNIQUE INDEX index_import_source_users_on_reassignment_token ON import_source_users USING btree (reassignment_token);
CREATE INDEX index_imported_projects_on_import_type_creator_id_created_at ON projects USING btree (import_type, creator_id, created_at) WHERE (import_type IS NOT NULL); CREATE INDEX index_imported_projects_on_import_type_creator_id_created_at ON projects USING btree (import_type, creator_id, created_at) WHERE (import_type IS NOT NULL);
CREATE INDEX index_imported_projects_on_import_type_id ON projects USING btree (import_type, id) WHERE (import_type IS NOT NULL); CREATE INDEX index_imported_projects_on_import_type_id ON projects USING btree (import_type, id) WHERE (import_type IS NOT NULL);
@ -33678,6 +33710,8 @@ CREATE TRIGGER trigger_207005e8e995 BEFORE INSERT OR UPDATE ON operations_strate
CREATE TRIGGER trigger_219952df8fc4 BEFORE INSERT OR UPDATE ON merge_request_blocks FOR EACH ROW EXECUTE FUNCTION trigger_219952df8fc4(); CREATE TRIGGER trigger_219952df8fc4 BEFORE INSERT OR UPDATE ON merge_request_blocks FOR EACH ROW EXECUTE FUNCTION trigger_219952df8fc4();
CREATE TRIGGER trigger_22262f5f16d8 BEFORE INSERT OR UPDATE ON issues FOR EACH ROW EXECUTE FUNCTION trigger_22262f5f16d8();
CREATE TRIGGER trigger_248cafd363ff BEFORE INSERT OR UPDATE ON packages_npm_metadata FOR EACH ROW EXECUTE FUNCTION trigger_248cafd363ff(); CREATE TRIGGER trigger_248cafd363ff BEFORE INSERT OR UPDATE ON packages_npm_metadata FOR EACH ROW EXECUTE FUNCTION trigger_248cafd363ff();
CREATE TRIGGER trigger_2514245c7fc5 BEFORE INSERT OR UPDATE ON dast_site_profile_secret_variables FOR EACH ROW EXECUTE FUNCTION trigger_2514245c7fc5(); CREATE TRIGGER trigger_2514245c7fc5 BEFORE INSERT OR UPDATE ON dast_site_profile_secret_variables FOR EACH ROW EXECUTE FUNCTION trigger_2514245c7fc5();

View File

@ -296,7 +296,7 @@ To create a thread:
> - Resolvable threads for issues [enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/31114) in GitLab 16.4. > - Resolvable threads for issues [enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/31114) in GitLab 16.4.
> - Resolvable threads for issues [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/31114) in GitLab 16.7. Feature flag `resolvable_issue_threads` removed. > - Resolvable threads for issues [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/31114) in GitLab 16.7. Feature flag `resolvable_issue_threads` removed.
> - Resolvable threads for tasks, objectives, and key results [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/458818) in GitLab 17.3. > - Resolvable threads for tasks, objectives, and key results [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/458818) in GitLab 17.3.
> - Resolvable threads for epics [introduced](https://gitlab.com/groups/gitlab-org/-/epics/458818) in GitLab 17.5. Your administrator must have [enabled the new look for epics](../group/epics/epic_work_items.md). > - Resolvable threads for epics [introduced](https://gitlab.com/groups/gitlab-org/-/issues/458818) in GitLab 17.5. Your administrator must have [enabled the new look for epics](../group/epics/epic_work_items.md).
You can resolve a thread when you want to finish a conversation. You can resolve a thread when you want to finish a conversation.

View File

@ -71,7 +71,8 @@ Prerequisites:
- A custom domain name `example.com` or subdomain `subdomain.example.com`. - A custom domain name `example.com` or subdomain `subdomain.example.com`.
- Access to your domain's server control panel to set up a DNS `TXT` record to verify your domain's ownership. - Access to your domain's server control panel to set up a DNS `TXT` record to verify your domain's ownership.
- A project in the group. This project will be linked to the verified domains, and should not be deleted. - A project in the group. This project will be linked to the verified domains, and should not be deleted.
- Ensure that [GitLab Pages](../project/pages/index.md) is enabled for the project. If GitLab Pages is disabled, adding the domain might result in an error.
- You must have the Owner role for the top-level group. - You must have the Owner role for the top-level group.
Domain verification applies at the top-level group and to all subgroups and projects Domain verification applies at the top-level group and to all subgroups and projects

View File

@ -101,10 +101,7 @@ module API
end end
def authenticate_job_via_dependent_job! def authenticate_job_via_dependent_job!
# Use primary for both main and ci database as authenticating in the scope of runners will load ::Gitlab::Database::LoadBalancing::Session.current.use_primary { authenticate! }
# Ci::Build model and other standard authn related models like License, Project and User.
::Gitlab::Database::LoadBalancing::SessionMap
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord]).use_primary { authenticate! }
forbidden! unless current_job forbidden! unless current_job
forbidden! unless can?(current_user, :read_build, current_job) forbidden! unless can?(current_user, :read_build, current_job)

View File

@ -266,8 +266,7 @@ module API
# so we need to skip the second FIPS check here. # so we need to skip the second FIPS check here.
file_name, format = extract_format(params[:file_name], skip_fips_check: true) file_name, format = extract_format(params[:file_name], skip_fips_check: true)
lb = ::ApplicationRecord.load_balancer ::Gitlab::Database::LoadBalancing::Session.current.use_primary do
::Gitlab::Database::LoadBalancing::SessionMap.current(lb).use_primary do
result = ::Packages::Maven::FindOrCreatePackageService result = ::Packages::Maven::FindOrCreatePackageService
.new(user_project, current_user, params.merge(build: current_authenticated_job)).execute .new(user_project, current_user, params.merge(build: current_authenticated_job)).execute

View File

@ -136,6 +136,8 @@ module API
bad_request!(s_('MlModelRegistry|Artifact file creation failed')) unless package_file bad_request!(s_('MlModelRegistry|Artifact file creation failed')) unless package_file
track_package_event('push_package', :ml_model, project: project, namespace: project.namespace)
created! created!
rescue ObjectStorage::RemoteStoreError => e rescue ObjectStorage::RemoteStoreError => e
Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:file_name], project_id: project.id }) Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:file_name], project_id: project.id })
@ -157,6 +159,8 @@ module API
package_file = ::Packages::PackageFileFinder.new(package, file_name).execute! package_file = ::Packages::PackageFileFinder.new(package, file_name).execute!
track_package_event('pull_package', :ml_model, project: project, namespace: project.namespace)
present_package_file!(package_file) present_package_file!(package_file)
end end
end end

View File

@ -471,8 +471,7 @@ module Gitlab
end end
def find_build_by_token(token) def find_build_by_token(token)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_primary do
.with_sessions([::ApplicationRecord, ::Ci::ApplicationRecord]).use_primary do
::Ci::AuthJobFinder.new(token: token).execute ::Ci::AuthJobFinder.new(token: token).execute
end end
end end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillIssuesCorrectWorkItemTypeId < BatchedMigrationJob
operation_name :update_issues_correct_work_item_type_id
feature_category :team_planning
def perform
each_sub_batch do |sub_batch|
first, last = sub_batch.pick(Arel.sql('min(id), max(id)'))
connection.execute(
<<~SQL
UPDATE
"issues"
SET
"correct_work_item_type_id" = "work_item_types"."correct_id",
"author_id_convert_to_bigint" = "issues"."author_id",
"closed_by_id_convert_to_bigint" = "issues"."closed_by_id",
"duplicated_to_id_convert_to_bigint" = "issues"."duplicated_to_id",
"id_convert_to_bigint" = "issues"."id",
"last_edited_by_id_convert_to_bigint" = "issues"."last_edited_by_id",
"milestone_id_convert_to_bigint" = "issues"."milestone_id",
"moved_to_id_convert_to_bigint" = "issues"."moved_to_id",
"project_id_convert_to_bigint" = "issues"."project_id",
"promoted_to_epic_id_convert_to_bigint" = "issues"."promoted_to_epic_id",
"updated_by_id_convert_to_bigint" = "issues"."updated_by_id"
FROM
"work_item_types"
WHERE
"issues"."work_item_type_id" = "work_item_types"."id"
AND "issues"."id" BETWEEN #{first}
AND #{last}
SQL
)
end
end
end
end
end

View File

@ -1,39 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillSbomOccurrencesTraversalIdsAndArchived < BatchedMigrationJob
feature_category :dependency_management
operation_name :backfill_sbom_occurrences_traversal_ids_and_archived
def perform
each_sub_batch do |relation|
batch_start_id, batch_end_id = relation.pick(Arel.sql("MIN(#{batch_column}), MAX(#{batch_column})"))
::Gitlab::Database.allow_cross_joins_across_databases(
url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/477830'
) do
connection.exec_update(update_sql(batch_start_id, batch_end_id))
end
end
end
private
def update_sql(batch_start_id, batch_end_id)
<<~SQL
UPDATE
sbom_occurrences
SET
traversal_ids = namespaces.traversal_ids,
archived = projects.archived
FROM
projects JOIN namespaces ON namespaces.id = projects.namespace_id
WHERE
sbom_occurrences.project_id = projects.id AND
sbom_occurrences.id >= #{batch_start_id} AND
sbom_occurrences.id <= #{batch_end_id}
SQL
end
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class FixPickUpAtCiDeletedObject < BatchedMigrationJob
operation_name :fix_pick_up_at_ci_deleted_objects
feature_category :job_artifacts
def perform
each_sub_batch do |sub_batch|
sub_batch
.where('pick_up_at > ?', 15.minutes.from_now)
.update_all("pick_up_at = least(pick_up_at, now() + '1 hour'::interval)")
end
end
end
end
end

View File

@ -28,7 +28,7 @@ module Gitlab
# the write location to ensure the replica can make this query. # the write location to ensure the replica can make this query.
# Adding use_primary_on_empty_location: true for extra precaution in case there happens to be # Adding use_primary_on_empty_location: true for extra precaution in case there happens to be
# no LSN saved for the project then we will use the primary. # no LSN saved for the project then we will use the primary.
track_session_metrics(::ApplicationRecord.load_balancer) do track_session_metrics do
::ApplicationRecord.sticking.find_caught_up_replica(:project, @project.id, use_primary_on_empty_location: true) ::ApplicationRecord.sticking.find_caught_up_replica(:project, @project.id, use_primary_on_empty_location: true)
end end
@ -42,12 +42,12 @@ module Gitlab
private private
def track_session_metrics(lb) def track_session_metrics
before = ::Gitlab::Database::LoadBalancing::SessionMap.current(lb).use_primary? before = ::Gitlab::Database::LoadBalancing::Session.current.use_primary?
yield yield
after = ::Gitlab::Database::LoadBalancing::SessionMap.current(lb).use_primary? after = ::Gitlab::Database::LoadBalancing::Session.current.use_primary?
increment_attempt_count increment_attempt_count

View File

@ -214,10 +214,10 @@ module Gitlab
# Calls to #uncached only disable caching for the current connection. Since the load balancer # Calls to #uncached only disable caching for the current connection. Since the load balancer
# can potentially upgrade from read to read-write mode (using a different connection), we specify # can potentially upgrade from read to read-write mode (using a different connection), we specify
# up-front that we'll explicitly use the primary for the duration of the operation. # up-front that we'll explicitly use the primary for the duration of the operation.
base_models = database_base_models_using_load_balancing.values Gitlab::Database::LoadBalancing::Session.current.use_primary do
base_models.reduce(block) do |blk, model| base_models = database_base_models_using_load_balancing.values
-> { Gitlab::Database::LoadBalancing::SessionMap.current(model.load_balancer).use_primary { model.uncached(&blk) } } base_models.reduce(block) { |blk, model| -> { model.uncached(&blk) } }.call
end.call end
end end
def self.allow_cross_joins_across_databases(url:) def self.allow_cross_joins_across_databases(url:)

View File

@ -6,13 +6,13 @@ module Gitlab
DEFAULT_INDEXES_PER_INVOCATION = 2 DEFAULT_INDEXES_PER_INVOCATION = 2
def self.create_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION) def self.create_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION)
PostgresAsyncIndex.to_create.order(:id).limit(how_many).each do |async_index| PostgresAsyncIndex.to_create.ordered.limit(how_many).each do |async_index|
IndexCreator.new(async_index).perform IndexCreator.new(async_index).perform
end end
end end
def self.drop_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION) def self.drop_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION)
PostgresAsyncIndex.to_drop.order(:id).limit(how_many).each do |async_index| PostgresAsyncIndex.to_drop.ordered.limit(how_many).each do |async_index|
IndexDestructor.new(async_index).perform IndexDestructor.new(async_index).perform
end end
end end

View File

@ -13,9 +13,8 @@ module Gitlab
# require read consistency after recent writes. # require read consistency after recent writes.
# #
def self.with_read_consistency(&block) def self.with_read_consistency(&block)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session
.with_sessions(Gitlab::Database::LoadBalancing.base_models) .current.use_primary(&block)
.use_primary(&block)
end end
end end
end end

View File

@ -59,7 +59,7 @@ module Gitlab
# Returns number of WAL segments pending archival # Returns number of WAL segments pending archival
def pending_wal_count def pending_wal_count
Gitlab::Database::LoadBalancing::SessionMap.current(connection.load_balancer).use_primary do Gitlab::Database::LoadBalancing::Session.current.use_primary do
connection.execute(PENDING_WAL_COUNT_SQL).to_a.first&.fetch('pending_wal_count') connection.execute(PENDING_WAL_COUNT_SQL).to_a.first&.fetch('pending_wal_count')
end end
end end

View File

@ -34,10 +34,6 @@ module Gitlab
each_load_balancer.all?(&:primary_only?) each_load_balancer.all?(&:primary_only?)
end end
def self.primary?(name)
each_load_balancer.find { |c| c.name == name }&.primary_only?
end
def self.release_hosts def self.release_hosts
each_load_balancer(&:release_host) each_load_balancer(&:release_host)
end end

View File

@ -13,7 +13,7 @@ module Gitlab
inner.call inner.call
ensure ensure
::Gitlab::Database::LoadBalancing.release_hosts ::Gitlab::Database::LoadBalancing.release_hosts
::Gitlab::Database::LoadBalancing::SessionMap.clear_session ::Gitlab::Database::LoadBalancing::Session.clear_session
end end
end end
end end

View File

@ -131,7 +131,7 @@ module Gitlab
private private
def current_session def current_session
::Gitlab::Database::LoadBalancing::SessionMap.current(@load_balancer) ::Gitlab::Database::LoadBalancing::Session.current
end end
def track_read_only_transaction! def track_read_only_transaction!

View File

@ -45,17 +45,18 @@ module Gitlab
# Determine if we need to stick after handling a request. # Determine if we need to stick after handling a request.
def stick_if_necessary(env) def stick_if_necessary(env)
return unless ::Gitlab::Database::LoadBalancing::Session.current.performed_write?
namespaces_and_ids = sticking_namespaces(env) namespaces_and_ids = sticking_namespaces(env)
namespaces_and_ids.each do |sticking, namespace, id| namespaces_and_ids.each do |sticking, namespace, id|
lb = sticking.load_balancer sticking.stick(namespace, id)
sticking.stick(namespace, id) if ::Gitlab::Database::LoadBalancing::SessionMap.current(lb).performed_write?
end end
end end
def clear def clear
::Gitlab::Database::LoadBalancing.release_hosts ::Gitlab::Database::LoadBalancing.release_hosts
::Gitlab::Database::LoadBalancing::SessionMap.clear_session ::Gitlab::Database::LoadBalancing::Session.clear_session
end end
# Determines the sticking namespace and identifier based on the Rack # Determines the sticking namespace and identifier based on the Rack

View File

@ -56,7 +56,6 @@ module Gitlab
ensure ensure
@ignore_writes = false @ignore_writes = false
end end
alias_method :without_sticky_writes, :ignore_writes
# Indicates that the read SQL statements from anywhere inside this # Indicates that the read SQL statements from anywhere inside this
# blocks should use a replica, regardless of the current primary # blocks should use a replica, regardless of the current primary

View File

@ -1,128 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Database
module LoadBalancing
class SessionMap
CACHE_KEY = :gitlab_load_balancer_session_map
InvalidLoadBalancerNameError = Class.new(StandardError)
# lb - Gitlab::Database::LoadBalancing::LoadBalancer instance
def self.current(lb)
return cached_instance.lookup(lb) if use_session_map?
Session.current
end
# models - Array<ActiveRecord::Base>
def self.with_sessions(models)
dbs = models.map { |m| m.load_balancer.name }.uniq
dbs.each { |db| cached_instance.validate_db_name(db) }
ScopedSessions.new(dbs, cached_instance.session_map)
end
def self.clear_session
return RequestStore.delete(CACHE_KEY) if use_session_map?
Session.clear_session
end
def self.without_sticky_writes(&)
return with_sessions(Gitlab::Database::LoadBalancing.base_models).ignore_writes(&) if use_session_map?
Session.without_sticky_writes(&)
end
def self.use_session_map?
::Feature.enabled?(:use_load_balancing_session_map, :current_request, type: :gitlab_com_derisk)
rescue ActiveRecord::StatementInvalid,
Gitlab::Database::QueryAnalyzers::Base::QueryAnalyzerError
# If the feature_gates table is missing, we should default to a false.
# In a migration scope, we also rescue and default to false.
false
end
private_class_method :use_session_map?
def self.cached_instance
RequestStore[CACHE_KEY] ||= new
end
private_class_method :cached_instance
attr_reader :session_map
def initialize
@session_map = Gitlab::Database.all_database_names.to_h do |k|
[k.to_sym, Gitlab::Database::LoadBalancing::Session.new]
end
@session_map[:primary] = Gitlab::Database::LoadBalancing::Session.new
end
def lookup(lb)
name = lb.name
validate_db_name(name)
session_map[name]
end
def validate_db_name(db)
# Allow :primary only for rake task db migrations as ActiveRecord::Tasks::PostgresqlDatabaseTasks calls
# .establish_connection using a hash which resets the name from :main/:ci to :primary.
# See
# https://github.com/rails/rails/blob/v7.0.8.4/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb#L97
#
# In the case of derailed test in memory-on-boot job, the runtime is unknown.
return if db == :primary && (Gitlab::Runtime.rake? || Gitlab::Runtime.safe_identify.nil?)
# Disallow :primary usage outside of rake or unknown runtimes as the db config should be
# main/ci/embedding/ci/geo.
return if db != :primary && session_map[db]
raise InvalidLoadBalancerNameError, "Invalid load balancer name #{db} in #{Gitlab::Runtime.safe_identify}."
end
end
class ScopedSessions
attr_reader :scoped_sessions
def initialize(scope, session_map)
@scope = scope
@scoped_sessions = session_map.slice(*@scope).values
end
def use_primary!
scoped_sessions.each(&:use_primary!)
end
def ignore_writes(&)
nest_sessions(scoped_sessions, :without_sticky_writes, &)
end
def use_primary(&)
nest_sessions(scoped_sessions, :use_primary, &)
end
def use_replicas_for_read_queries(&)
nest_sessions(scoped_sessions, :use_replicas_for_read_queries, &)
end
def fallback_to_replicas_for_ambiguous_queries(&)
nest_sessions(scoped_sessions, :fallback_to_replicas_for_ambiguous_queries, &)
end
private
def nest_sessions(sessions, method, &block)
if sessions.empty?
yield if block
else
session = sessions.shift
session.public_send(method) do # rubocop: disable GitlabSecurity/PublicSend -- methods are verified
nest_sessions(sessions, method, &block)
end
end
end
end
end
end
end

View File

@ -34,7 +34,19 @@ module Gitlab
def set_data_consistency_locations!(job) def set_data_consistency_locations!(job)
job['wal_locations'] = wal_locations_by_db_name job['wal_locations'] = wal_locations_by_db_name
job['wal_location_sources'] = wal_location_sources_by_db_name job['wal_location_source'] = wal_location_source
end
def wal_location_source
if ::Gitlab::Database::LoadBalancing.primary_only? || uses_primary?
::Gitlab::Database::LoadBalancing::ROLE_PRIMARY
else
::Gitlab::Database::LoadBalancing::ROLE_REPLICA
end
end
def uses_primary?
::Gitlab::Database::LoadBalancing::Session.current.use_primary?
end end
end end
end end

View File

@ -22,9 +22,7 @@ module Gitlab
job['load_balancing_strategy'] = strategy.to_s job['load_balancing_strategy'] = strategy.to_s
if use_primary?(strategy) if use_primary?(strategy)
::Gitlab::Database::LoadBalancing::SessionMap ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
.with_sessions(Gitlab::Database::LoadBalancing.base_models)
.use_primary!
elsif strategy == :retry elsif strategy == :retry
raise JobReplicaNotUpToDate, "Sidekiq job #{resolved_class} JID-#{job['jid']} couldn't use the replica. "\ raise JobReplicaNotUpToDate, "Sidekiq job #{resolved_class} JID-#{job['jid']} couldn't use the replica. "\
"Replica was not up to date." "Replica was not up to date."
@ -41,7 +39,7 @@ module Gitlab
def clear def clear
::Gitlab::Database::LoadBalancing.release_hosts ::Gitlab::Database::LoadBalancing.release_hosts
::Gitlab::Database::LoadBalancing::SessionMap.clear_session ::Gitlab::Database::LoadBalancing::Session.clear_session
end end
def use_primary?(strategy) def use_primary?(strategy)

View File

@ -10,8 +10,6 @@ module Gitlab
# the primary. # the primary.
EXPIRATION = 30 EXPIRATION = 30
attr_reader :load_balancer
def initialize(load_balancer) def initialize(load_balancer)
@load_balancer = load_balancer @load_balancer = load_balancer
end end
@ -37,7 +35,7 @@ module Gitlab
!use_primary_on_empty_location !use_primary_on_empty_location
end end
use_primary! if !result && use_primary_on_failure ::Gitlab::Database::LoadBalancing::Session.current.use_primary! if !result && use_primary_on_failure
result result
end end
@ -48,7 +46,7 @@ module Gitlab
with_primary_write_location do |location| with_primary_write_location do |location|
set_write_location_for(namespace, id, location) set_write_location_for(namespace, id, location)
end end
use_primary! ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
end end
def bulk_stick(namespace, ids) def bulk_stick(namespace, ids)
@ -58,7 +56,7 @@ module Gitlab
end end
end end
use_primary! ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
end end
private private
@ -102,10 +100,6 @@ module Gitlab
def with_redis(&block) def with_redis(&block)
Gitlab::Redis::DbLoadBalancing.with(&block) Gitlab::Redis::DbLoadBalancing.with(&block)
end end
def use_primary!
::Gitlab::Database::LoadBalancing::SessionMap.current(@load_balancer).use_primary!
end
end end
end end
end end

View File

@ -18,31 +18,12 @@ module Gitlab
# When only using the primary there's no need for any WAL queries. # When only using the primary there's no need for any WAL queries.
return if load_balancer.primary_only? return if load_balancer.primary_only?
if SessionMap.current(load_balancer).use_primary? if Session.current.use_primary?
load_balancer.primary_write_location load_balancer.primary_write_location
else else
load_balancer.host&.database_replica_location || load_balancer.primary_write_location load_balancer.host&.database_replica_location || load_balancer.primary_write_location
end end
end end
def wal_location_sources_by_db_name
{}.tap do |locations|
::Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
if (location = wal_location_source(lb))
locations[lb.name] = location
end
end
end
end
def wal_location_source(lb)
if ::Gitlab::Database::LoadBalancing.primary?(lb.name) ||
::Gitlab::Database::LoadBalancing::SessionMap.current(lb).use_primary?
::Gitlab::Database::LoadBalancing::ROLE_PRIMARY
else
::Gitlab::Database::LoadBalancing::ROLE_REPLICA
end
end
end end
end end
end end

View File

@ -196,7 +196,7 @@ module Gitlab
end end
def primary_transaction(statement_timeout: nil) def primary_transaction(statement_timeout: nil)
Gitlab::Database::LoadBalancing::SessionMap.current(connection.load_balancer).use_primary do Gitlab::Database::LoadBalancing::Session.current.use_primary do
connection.transaction(requires_new: false) do connection.transaction(requires_new: false) do
if statement_timeout.present? if statement_timeout.present?
connection.execute( connection.execute(

View File

@ -9,9 +9,7 @@ module Gitlab
scope :wraparound_prevention, -> { where(wraparound_prevention: true) } scope :wraparound_prevention, -> { where(wraparound_prevention: true) }
def self.for_tables(tables) def self.for_tables(tables)
Gitlab::Database::LoadBalancing::SessionMap Gitlab::Database::LoadBalancing::Session.current.use_primary do
.current(connection.load_balancer)
.use_primary do
# calling `.to_a` here to execute the query in the primary's scope # calling `.to_a` here to execute the query in the primary's scope
# and to avoid having the scope chained and re-executed # and to avoid having the scope chained and re-executed
# #

View File

@ -56,6 +56,7 @@ module Gitlab
actor = ::Group.actor_from_id(event.data[:namespace_id]) actor = ::Group.actor_from_id(event.data[:namespace_id])
Feature.enabled?(:track_member_activity, actor) Feature.enabled?(:track_member_activity, actor)
end end
store.subscribe ::Pages::DeletePagesDeploymentWorker, to: ::Projects::ProjectArchivedEvent
end end
private_class_method :configure! private_class_method :configure!
end end

Some files were not shown because too many files have changed in this diff Show More