Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-21 21:12:42 +00:00
parent d6a50412ea
commit 31de60f0ac
64 changed files with 132 additions and 671 deletions

View File

@ -17,6 +17,8 @@ class Projects::IssuesController < Projects::ApplicationController
prepend_before_action :store_uri, only: [:new, :show, :designs]
before_action :disable_query_limiting, only: [:create_merge_request, :move, :bulk_update]
before_action :disable_show_query_limit!, only: :show
before_action :check_issues_available!
before_action :issue, unless: ->(c) { ISSUES_EXCEPT_ACTIONS.include?(c.action_name.to_sym) }
before_action :require_incident_for_incident_routes, only: :show
@ -406,6 +408,10 @@ class Projects::IssuesController < Projects::ApplicationController
private
def disable_show_query_limit!
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/544875', new_threshold: 120)
end
def show_work_item?
# Service Desk issues and incidents should not use the work item view
!issue.from_service_desk? &&

View File

@ -1195,11 +1195,6 @@ module Ci
end
strong_memoize_attr :source
# Can be removed in Rails 7.1. Related to: Gitlab.next_rails?
def to_partial_path
'jobs/job'
end
def token
return encoded_jwt if user&.has_composite_identity? || use_jwt_for_ci_cd_job_token?

View File

@ -172,11 +172,9 @@ module BulkInsertSafe
returns
end
composite_primary_key = ::Gitlab.next_rails? && composite_primary_key?
# Handle insertions for tables with a composite primary key
primary_keys = connection.schema_cache.primary_keys(table_name)
unique_by = primary_keys if unique_by.blank? && (composite_primary_key || primary_key != primary_keys)
unique_by = primary_keys if unique_by.blank? && (composite_primary_key? || primary_key != primary_keys)
transaction do
items.each_slice(batch_size).flat_map do |item_batch|

View File

@ -30,9 +30,7 @@ module PartitionedTable
#
# https://github.com/rails/rails/blob/v7.2.0.rc1/activerecord/lib/active_record/model_schema.rb#L444
def _returning_columns_for_insert
auto_populated_columns = []
auto_populated_columns = super if Gitlab.next_rails?
(auto_populated_columns + Array(primary_key)).uniq
(super + Array(primary_key)).uniq
end
end
end

View File

@ -54,14 +54,7 @@ module Ci
deleted = []
objects.each do |object|
if object.delete_file_from_storage
deleted << object
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_error(error: false)
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_apdex(
success: object.created_at > ACCEPTABLE_DELAY.ago)
else
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_error(error: true)
end
deleted << object if delete_object(object)
end
Ci::DeletedObject.id_in(deleted.map(&:id)).delete_all
@ -72,5 +65,16 @@ module Ci
def transaction_open?
Ci::DeletedObject.connection.transaction_open?
end
def delete_object(object)
if object.delete_file_from_storage
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_error(error: false)
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_apdex(success: object.created_at > ACCEPTABLE_DELAY.ago)
true
else
::Gitlab::Metrics::CiDeletedObjectProcessingSlis.record_error(error: true)
false
end
end
end
end

View File

@ -86,16 +86,9 @@ module Gitlab
require_dependency Rails.root.join('lib/gitlab/runtime')
require_dependency Rails.root.join('lib/gitlab/patch/database_config')
require_dependency Rails.root.join('lib/gitlab/patch/redis_cache_store')
require_dependency Rails.root.join('lib/gitlab/patch/old_redis_cache_store')
require_dependency Rails.root.join('lib/gitlab/pdf')
require_dependency Rails.root.join('lib/gitlab/exceptions_app')
unless ::Gitlab.next_rails?
config.active_support.disable_to_s_conversion = false # New default is true
config.active_support.use_rfc4122_namespaced_uuids = true
ActiveSupport.to_time_preserves_timezone = false
end
config.exceptions_app = Gitlab::ExceptionsApp.new(Gitlab.jh? ? Rails.root.join('jh/public') : Rails.public_path)
# This preload is required to:
@ -126,12 +119,7 @@ module Gitlab
config.generators.templates.push("#{config.root}/generator_templates")
foss_eager_load_paths =
if Gitlab.next_rails?
config.all_eager_load_paths.dup.freeze
else
config.eager_load_paths.dup.freeze
end
foss_eager_load_paths = config.all_eager_load_paths.dup.freeze
load_paths = ->(dir:) do
ext_paths = foss_eager_load_paths.each_with_object([]) do |path, memo|
@ -541,11 +529,7 @@ module Gitlab
end
# Use caching across all environments
if ::Gitlab.next_rails?
ActiveSupport::Cache::RedisCacheStore.prepend(Gitlab::Patch::RedisCacheStore)
else
ActiveSupport::Cache::RedisCacheStore.prepend(Gitlab::Patch::OldRedisCacheStore)
end
ActiveSupport::Cache::RedisCacheStore.prepend(Gitlab::Patch::RedisCacheStore)
config.cache_store = :redis_cache_store, Gitlab::Redis::Cache.active_support_config

View File

@ -71,11 +71,7 @@ Rails.application.configure do
# Don't make a mess when bootstrapping a development environment
config.action_mailer.perform_deliveries = (ENV['BOOTSTRAP'] != '1')
if ::Gitlab.next_rails?
config.action_mailer.preview_paths = [GitlabEdition.path_glob('app/mailers/previews')]
else
config.action_mailer.preview_path = GitlabEdition.path_glob('app/mailers/previews')
end
config.action_mailer.preview_paths = [GitlabEdition.path_glob('app/mailers/previews')]
config.eager_load = false

View File

@ -53,11 +53,7 @@ Rails.application.configure do
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
if ::Gitlab.next_rails?
config.action_mailer.preview_paths = [GitlabEdition.path_glob('app/mailers/previews')]
else
config.action_mailer.preview_path = GitlabEdition.path_glob('app/mailers/previews')
end
config.action_mailer.preview_paths = [GitlabEdition.path_glob('app/mailers/previews')]
config.eager_load = Gitlab::Utils.to_boolean(ENV['GITLAB_TEST_EAGER_LOAD'], default: ENV['CI'].present?)

View File

@ -7,12 +7,7 @@
# Can be removed with Rails 7.0.
Warning.ignore(/PG::Coder.new\(hash\) is deprecated/)
deprecators =
if ::Gitlab.next_rails?
Rails.application.deprecators
else
ActiveSupport::Deprecation
end
deprecators = Rails.application.deprecators
silenced = Rails.env.production? && !Gitlab::Utils.to_boolean(ENV['GITLAB_LOG_DEPRECATIONS'])
deprecators.silenced = silenced
@ -42,16 +37,12 @@ else
view_component_3_warnings = []
deprecators.disallowed_warnings = rails7_deprecation_warnings + view_component_3_warnings
if ::Gitlab.next_rails?
deprecators.behavior = ->(message, callstack, deprecator) do
if ignored_warnings.none? { |warning| warning.match?(message) }
ActiveSupport::Deprecation::DEFAULT_BEHAVIORS.slice(:stderr, :notify).each_value do |behavior|
behavior.call(message, callstack, deprecator)
end
deprecators.behavior = ->(message, callstack, deprecator) do
if ignored_warnings.none? { |warning| warning.match?(message) }
ActiveSupport::Deprecation::DEFAULT_BEHAVIORS.slice(:stderr, :notify).each_value do |behavior|
behavior.call(message, callstack, deprecator)
end
end
else
deprecators.behavior = [:stderr, :notify]
end
end
@ -71,7 +62,7 @@ unless silenced
# Log deprecation warnings emitted from Rails (see ActiveSupport::Deprecation).
ActiveSupport::Notifications.subscribe('deprecation.rails') do |_name, _start, _finish, _id, payload|
if !::Gitlab.next_rails? || ignored_warnings.none? { |warning| warning.match?(payload[:message]) }
if ignored_warnings.none? { |warning| warning.match?(payload[:message]) }
Gitlab::DeprecationJsonLogger.info(message: payload[:message].strip, source: 'rails')
end
end

View File

@ -49,11 +49,7 @@ module ActionDispatch
# - https://github.com/rails/rails/blob/v7.0.8.4/actionpack/lib/action_dispatch/journey/router.rb#L130
#
# After the upgrade, this method can be more like the v7.1.3.4 version
if Gitlab.next_rails?
yield [match_data, path_parameters, r]
else
[match_data, path_parameters, r]
end
yield [match_data, path_parameters, r]
end.compact!
routes

View File

@ -1,18 +1,16 @@
# frozen_string_literal: true
if ::Gitlab.next_rails?
# Override the method to use Set instead of Array:
#
# https://github.com/rails/rails/blob/v7.1.5.1/activemodel/lib/active_model/attribute_methods.rb#L398
#
# Otherwise, the background migrations are very slow.
# Explanation: https://gitlab.com/gitlab-org/gitlab/-/issues/495067#note_2260634049
module ActiveModel
module AttributeMethods
module ClassMethods
def aliases_by_attribute_name
@aliases_by_attribute_name ||= Hash.new { |h, k| h[k] = Set.new }
end
# Override the method to use Set instead of Array:
#
# https://github.com/rails/rails/blob/v7.1.5.1/activemodel/lib/active_model/attribute_methods.rb#L398
#
# Otherwise, the background migrations are very slow.
# Explanation: https://gitlab.com/gitlab-org/gitlab/-/issues/495067#note_2260634049
module ActiveModel
module AttributeMethods
module ClassMethods
def aliases_by_attribute_name
@aliases_by_attribute_name ||= Hash.new { |h, k| h[k] = Set.new }
end
end
end

View File

@ -1,5 +0,0 @@
# frozen_string_literal: true
unless Gitlab.next_rails?
ActiveRecord::ConnectionAdapters::ConnectionPool.prepend(Gitlab::Patch::ActiveRecordConnectionPool)
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
module ActiveRecord
class Relation
def null_relation?
return super if ::Gitlab.next_rails?
is_a?(ActiveRecord::NullRelation)
end
end
end

View File

@ -9,16 +9,6 @@ if Gitlab::Runtime.console?
super
IRB.conf[:SAVE_HISTORY] = false
init_autocomplete
end
def init_autocomplete
return if ::Gitlab.next_rails?
return unless Rails.env.production?
# IRB_USE_AUTOCOMPLETE was added in https://github.com/ruby/irb/pull/469
IRB.conf[:USE_AUTOCOMPLETE] = ENV.fetch("IRB_USE_AUTOCOMPLETE", "false") == "true"
end
end

View File

@ -54,6 +54,24 @@ To set up DNS records for a custom hostname with GitLab Dedicated:
1. Save your changes and wait for the DNS changes to propagate.
### DNS requirements for Let's Encrypt certificates
When using custom hostnames with GitLab Dedicated, your domain must be publicly resolvable
through DNS, even if you plan to access your instance through private networks only.
This public DNS requirement exists because:
- Let's Encrypt uses the HTTP-01 challenge, which requires public internet access to verify
domain ownership.
- The validation process must reach your custom hostname from the public internet through
the CNAME record that points to your GitLab Dedicated tenant.
- Certificate renewal happens automatically every 90 days and uses the same public
validation process as the initial issuance.
For instances configured with private networking (such as AWS PrivateLink), maintaining public
DNS resolution ensures certificate renewal works properly, even when all other access is
restricted to private networks.
### Add your custom hostname
To add a custom hostname to your existing GitLab Dedicated instance, submit a [support ticket](https://support.gitlab.com/hc/en-us/requests/new?ticket_form_id=4414917877650).

View File

@ -649,6 +649,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryduoworkflowworkflowsprojectpath"></a>`projectPath` | [`ID`](#id) | Full path of the project containing the workflows. |
| <a id="queryduoworkflowworkflowstype"></a>`type` | [`String`](#string) | Type of workflow to filter by (e.g., software_development). |
### `Query.echo`

View File

@ -136,7 +136,7 @@ The above methods make use of the [BulkAssignService](https://gitlab.com/gitlab-
### Setting up Duo on your **staging** GitLab.com account
Please refer to [Instructions for setting up Duo add-ons on your **staging** GitLab.com account](ai_development_license.md).
For more information, see [setting up Duo on your GitLab.com staging account](ai_development_license.md#setting-up-duo-on-your-gitlabcom-staging-account).
### Video demonstrations of installing and using Code Suggestions in IDEs

View File

@ -51,5 +51,5 @@ log_conditional_info(user, message:"User prompt processed", event_name: 'ai_even
log_error(user, message: "System application error", event_name: 'ai_event', ai_component: 'abstraction_layer', error_message: sanitized_error_message)
```
**Important**: Please familiarize yourself with our [Data Retention Policy](../../user/gitlab_duo/data_usage.md#data-retention) and remember
**Important**: Familiarize yourself with our [Data Retention Policy](../../user/gitlab_duo/data_usage.md#data-retention) and remember
to make sure we are not logging user input and LLM-generated output.

View File

@ -425,7 +425,7 @@ Priority: [Priority level]
## Implementation Details
Please follow the issues below with the associated rollout plans:
Follow the issues below with the associated rollout plans:
| Feature | DRI | ETA | Issue Link |
|---------|-----|-----|------------|

View File

@ -126,7 +126,7 @@ Additionally, these package repositories are disabled in FIPS mode:
### Development guidelines
Please refer to the information above and the GitLab [Cryptography Standard](https://handbook.gitlab.com/handbook/security/cryptographic-standard/). Reach out
For more information, refer to the information above and see the [GitLab Cryptography Standard](https://handbook.gitlab.com/handbook/security/cryptographic-standard/). Reach out
to `#sec-assurance` with questions or open an MR if something needs to be clarified.
Here are some guidelines for developing GitLab FIPS-approved software:

View File

@ -60,4 +60,4 @@ The [CommonMark forum](https://talk.commonmark.org) is a good place to research
- [How to render GitLab-flavored Markdown on the frontend?](../fe_guide/frontend_faq.md#10-how-to-render-gitlab-flavored-markdown)
- [Diagrams.net integration](../fe_guide/diagrams_net_integration.md)
Please contact the [Plan:Knowledge team](https://handbook.gitlab.com/handbook/engineering/development/dev/plan/knowledge/) if you have any questions.
Contact the [Plan:Knowledge team](https://handbook.gitlab.com/handbook/engineering/development/dev/plan/knowledge/) if you have any questions.

View File

@ -80,7 +80,7 @@ We use our [`gitlab-glfm-markdown`](https://gitlab.com/gitlab-org/ruby/gems/gitl
`comrak` provides 100% compatibility with GFM and CommonMark while allowing additional extensions to be added to it. For example, we were able to implement our multi-line blockquote and wikilink syntax directly in `comrak`. The goal is to move more of the Ruby filters into either `comrak` (if it makes sense) or into `gitlab-glfm-markdown`.
Please see [glfm_markdown.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/markdown_engines/glfm_markdown.rb#L12-L34) for the various options that get passed into `comrak`.
For more information about the various options that get passed into `comrak`, see [glfm_markdown.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/banzai/filter/markdown_engines/glfm_markdown.rb#L12-L34).
## Debugging

View File

@ -126,7 +126,7 @@ track_internal_event(
)
```
Please add custom properties only in addition to the built-in properties. Additional properties can only have string or numeric values.
Add custom properties only in addition to the built-in properties. Additional properties can only have string or numeric values.
{{< alert type="warning" >}}

View File

@ -34,7 +34,7 @@ To authenticate using the JWT, clients:
### Admin personal access token (PAT)
This authentication method is deprecated as it is not supported in the Cells architecture. It will be
[removed in a future milestone](https://gitlab.com/gitlab-org/gitlab/-/issues/473625). Please use JWT authentication instead.
[removed in a future milestone](https://gitlab.com/gitlab-org/gitlab/-/issues/473625). Use JWT authentication instead.
To authenticate as an administrator, generate a personal access token for an administrator with the
`api` and `admin_mode` scopes. This token can then be supplied in the `PRIVATE-TOKEN` header.

View File

@ -31,7 +31,7 @@ fostering trust and consistency in the GitLab ecosystem.
- Strongly consider using an external secret store like OpenBao or Vault
- At a minimum, store tokens [securely](../ci/pipelines/pipeline_security.md#cicd-variables) in environment variables
in GitLab CI/CD pipelines, ensuring that masking and protection is enabled.
- Do not store tokens on your local machine in unsecured locations. Please store tokens in 1Password and
- Do not store tokens on your local machine in unsecured locations. Instead, store tokens in 1Password and
refrain from storing these secrets in unencrypted files like shell profiles, `.npmrc`, and `.env`.
1. Add `gitlab-bot` as author of the package. This ensures the organization retains ownership if a team member's email becomes invalid during offboarding.

View File

@ -725,7 +725,7 @@ Exceptions to this general guideline should be motivated and documented.
We're running Ruby 3.2 on GitLab.com, as well as for the default branch.
To prepare for the next Ruby version, we run merge requests in Ruby 3.3.
Please see the roadmap at
See the roadmap at
[Ruby 3.3 epic](https://gitlab.com/groups/gitlab-org/-/epics/12350)
for more details.

View File

@ -49,7 +49,7 @@ To configure deployment of the PyPI package:
### Runway deployment for .com
Services for GitLab.com, GitLab Dedicated and self-hosted customers using CloudConnect are deployed using [Runway](https://docs.runway.gitlab.com/welcome/onboarding/).
Please refer to the project documentation on how to add or manage Runway services.
Refer to the project documentation on how to add or manage Runway services.
### Deploying in self-hosted environments

View File

@ -485,12 +485,12 @@ This is necessary to provide compatibility with Red Hat OpenShift instances,
which don't allow containers to run as an admin (root) user.
There are certain limitations to keep in mind when running a container as an unprivileged user,
such as the fact that any files that need to be written on the Docker filesystem will require the appropriate permissions for the `GitLab` user.
Please see the following merge request for more details:
See the following merge request for more details:
[Use GitLab user instead of root in Docker image](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/merge_requests/130).
#### Minimal vulnerability data
Please see [our security-report-schemas](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/master/src/security-report-format.json) for a full list of required fields.
See [our security-report-schemas](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/master/src/security-report-format.json) for a full list of required fields.
The [security-report-schema](https://gitlab.com/gitlab-org/security-products/security-report-schemas) repository contains JSON schemas that list the required fields for each report type:

View File

@ -109,7 +109,7 @@ Looking at an autogenerated file, for example,
```jsonnet
// This file is autogenerated using scripts/update_stage_groups_dashboards.rb
// Please feel free to customize this file.
// Feel free to customize this file.
local stageGroupDashboards = import './stage-group-dashboards.libsonnet';
stageGroupDashboards.dashboard('product_planning')

View File

@ -152,7 +152,7 @@ This stage is responsible for [allure test report](_index.md#allure-report) gene
## Test Licenses
Please see the [Test Licenses runbook](https://gitlab-org.gitlab.io/quality/runbooks/test_licenses/) for more information on the licenses used by these pipelines.
For more information on the licenses these pipelines use, see [test licenses](https://gitlab-org.gitlab.io/quality/runbooks/test_licenses/).
## Adding new jobs to E2E test pipelines

View File

@ -202,4 +202,4 @@ You can also consider implementing abuse rate limiting as detailed in [Git abuse
GitLab SIRT maintains an active repository of detections in the [GitLab SIRT public project](https://gitlab.com/gitlab-security-oss/guard/-/tree/main/detections).
The detections in this repository are based on the audit events and in the general Sigma rule format. You can use sigma rule converter to get the rules in your desired format. Please refer to the repository for more information about Sigma format and tools related to it. Make sure you have GitLab audit logs ingested to your SIEM. You should follow the audit event streaming guide [for your self-managed instance](../administration/audit_event_streaming/_index.md) or [GitLab.com top-level group](../user/compliance/audit_event_streaming.md) to stream audit events to your desired destination.
The detections in this repository are based on the audit events and in the general Sigma rule format. You can use sigma rule converter to get the rules in your desired format. Visit the repository for more information about Sigma format and tools related to it. Make sure you have GitLab audit logs ingested to your SIEM. You should follow the audit event streaming guide [for your self-managed instance](../administration/audit_event_streaming/_index.md) or [GitLab.com top-level group](../user/compliance/audit_event_streaming.md) to stream audit events to your desired destination.

View File

@ -314,7 +314,7 @@ module Gitlab
def self.empty_config?(db_config)
return true unless db_config
::Gitlab.next_rails? && db_config.is_a?(ActiveRecord::ConnectionAdapters::NullPool::NullConfig)
db_config.is_a?(ActiveRecord::ConnectionAdapters::NullPool::NullConfig)
end
# At the moment, the connection can only be retrieved by
@ -390,15 +390,7 @@ module Gitlab
::Gitlab::Database::Metrics.subtransactions_increment(self.name) if transaction_type == :sub_transaction
if ::Gitlab.next_rails?
super(**options, &block)
else
payload = { connection: connection, transaction_type: transaction_type }
ActiveSupport::Notifications.instrument('transaction.active_record', payload) do
super(**options, &block)
end
end
super(**options, &block)
end
private

View File

@ -111,9 +111,7 @@ module Gitlab
end
def pool_disconnect!
return pool.disconnect! if ::Gitlab.next_rails?
pool.disconnect_without_verify!
pool.disconnect!
end
def offline!

View File

@ -29,23 +29,13 @@ module Gitlab
private
def record_event(_name, started, finished, _unique_id, payload)
if ::Gitlab.next_rails?
stack_count = payload[:connection].open_transactions
stack_count = payload[:connection].open_transactions
@writer.push_value({
start_time: started.iso8601(6),
end_time: finished.iso8601(6),
transaction_type: stack_count == 0 ? :real_transaction : :sub_transaction
})
else
return if payload[:transaction_type] == :fake_transaction
@writer.push_value({
start_time: started.iso8601(6),
end_time: finished.iso8601(6),
transaction_type: payload[:transaction_type]
})
end
@writer.push_value({
start_time: started.iso8601(6),
end_time: finished.iso8601(6),
transaction_type: stack_count == 0 ? :real_transaction : :sub_transaction
})
end
end
end

View File

@ -17,27 +17,8 @@ module Gitlab
end
end
module OldMigratorPgBackendPid
extend ::Gitlab::Utils::Override
override :with_advisory_lock_connection
def with_advisory_lock_connection
super do |conn|
Gitlab::Database::Migrations::PgBackendPid.say(conn)
yield(conn)
ensure
Gitlab::Database::Migrations::PgBackendPid.say(conn)
end
end
end
def self.patch!
if ::Gitlab.next_rails?
ActiveRecord::Migrator.prepend(MigratorPgBackendPid)
else
ActiveRecord::Migrator.prepend(OldMigratorPgBackendPid)
end
ActiveRecord::Migrator.prepend(MigratorPgBackendPid)
end
def self.say(conn)

View File

@ -17,13 +17,7 @@ module Gitlab
end
def versions_to_create
versions_from_database =
if ::Gitlab.next_rails?
@connection.schema_migration.versions
else
@connection.schema_migration.all_versions
end
versions_from_database = @connection.schema_migration.versions
versions_from_migration_files = @connection.migration_context.migrations.map { |m| m.version.to_s }
versions_from_database & versions_from_migration_files

View File

@ -24,15 +24,9 @@ module Gitlab
def valid_authenticity_token?(session, masked_authenticity_token)
# rubocop:disable GitlabSecurity/PublicSend
if ::Gitlab.next_rails?
controller = ActionController::Base.new
controller.set_request!(ActionDispatch::Request.new('rack.session' => session))
controller.send(:valid_authenticity_token?, nil, masked_authenticity_token)
else
ActionController::Base.new.send(
:valid_authenticity_token?, session, masked_authenticity_token
)
end
controller = ActionController::Base.new
controller.set_request!(ActionDispatch::Request.new('rack.session' => session))
controller.send(:valid_authenticity_token?, nil, masked_authenticity_token)
# rubocop:enable GitlabSecurity/PublicSend
end

View File

@ -1,167 +0,0 @@
# frozen_string_literal: true
# In Rails 7.0, whenever `ConnectionPool#disconnect!` is called, each
# connection in the `@available` queue is acquired by the thread and
# verified with a SQL `;` query. If the verification fails, then Rails
# will attempt a reconnect for all those connections in the pool. This
# reconnection can cause unnecessary database connection saturation and
# result in a flood of SET statements on a PostgreSQL host.
#
# Rails 7.1 has fixed this in https://github.com/rails/rails/pull/44576, but
# until we upgrade this patch disables this verification step.
module Gitlab
module Patch
# rubocop:disable Cop/AvoidReturnFromBlocks -- This patches an upstream class
# rubocop:disable Cop/LineBreakAfterGuardClauses -- This patches an upstream class
# rubocop:disable Cop/LineBreakAroundConditionalBlock -- This patches an upstream class
# rubocop:disable Gitlab/ModuleWithInstanceVariables -- This patches an upstream class
# rubocop:disable Layout/EmptyLineAfterGuardClause -- This patches an upstream class
# rubocop:disable Lint/RescueException -- This patches an upstream class
# rubocop:disable Style/IfUnlessModifier -- This patches an upstream class
module ActiveRecordConnectionPool
# Many of these methods were copied directly from
# https://github.com/rails/rails/blob/v7.0.8.4/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb.
# Public methods have the `_without_verify` suffix appended, and
# private methods have the `_no_verify` suffix appended.
# Disconnects all connections in the pool, and clears the pool.
#
# Raises:
# - ActiveRecord::ExclusiveConnectionTimeoutError if unable to gain ownership of all
# connections in the pool within a timeout interval (default duration is
# <tt>spec.db_config.checkout_timeout * 2</tt> seconds).
def disconnect_without_verify(raise_on_acquisition_timeout = true)
with_exclusively_acquired_all_connections_no_verify(raise_on_acquisition_timeout) do
synchronize do
@connections.each do |conn|
if conn.in_use?
conn.steal!
checkin conn
end
conn.disconnect!
end
@connections = []
@available.clear
end
end
end
def disconnect_without_verify!
disconnect_without_verify(false)
end
private
# Take control of all existing connections so a "group" action such as
# reload/disconnect can be performed safely. It is no longer enough to
# wrap it in +synchronize+ because some pool's actions are allowed
# to be performed outside of the main +synchronize+ block.
def with_exclusively_acquired_all_connections_no_verify(raise_on_acquisition_timeout = true)
with_new_connections_blocked do
attempt_to_checkout_all_existing_connections_no_verify(raise_on_acquisition_timeout)
yield
end
end
def attempt_to_checkout_all_existing_connections_no_verify(raise_on_acquisition_timeout = true)
collected_conns = synchronize do
# account for our own connections
@connections.select { |conn| conn.owner == Thread.current }
end
newly_checked_out = []
timeout_time = Process.clock_gettime(Process::CLOCK_MONOTONIC) + (@checkout_timeout * 2)
@available.with_a_bias_for(Thread.current) do
loop do
synchronize do
return if collected_conns.size == @connections.size && @now_connecting == 0
remaining_timeout = timeout_time - Process.clock_gettime(Process::CLOCK_MONOTONIC)
remaining_timeout = 0 if remaining_timeout < 0
conn = checkout_for_exclusive_access_no_verify(remaining_timeout)
collected_conns << conn
newly_checked_out << conn
end
end
end
rescue ActiveRecord::ExclusiveConnectionTimeoutError
# <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
# timeouts and are expected to just give up: we've obtained as many connections
# as possible, note that in a case like that we don't return any of the
# +newly_checked_out+ connections.
if raise_on_acquisition_timeout
release_newly_checked_out = true
raise
end
rescue Exception # if something else went wrong
# this can't be a "naked" rescue, because we have should return conns
# even for non-StandardErrors
release_newly_checked_out = true
raise
ensure
if release_newly_checked_out && newly_checked_out
# releasing only those conns that were checked out in this method, conns
# checked outside this method (before it was called) are not for us to release
newly_checked_out.each { |conn| checkin(conn) }
end
end
#--
# Must be called in a synchronize block.
def checkout_for_exclusive_access_no_verify(checkout_timeout)
checkout_no_verify(checkout_timeout)
rescue ActiveRecord::ConnectionTimeoutError
# this block can't be easily moved into attempt_to_checkout_all_existing_connections's
# rescue block, because doing so would put it outside of synchronize section, without
# being in a critical section thread_report might become inaccurate
msg = "could not obtain ownership of all database connections in #{checkout_timeout} seconds"
thread_report = []
@connections.each do |conn|
unless conn.owner == Thread.current
thread_report << "#{conn} is owned by #{conn.owner}"
end
end
msg << " (#{thread_report.join(', ')})" if thread_report.any?
raise ActiveRecord::ExclusiveConnectionTimeoutError, msg
end
# Check-out a database connection from the pool, indicating that you want
# to use it. You should call #checkin when you no longer need this.
#
# This is done by either returning and leasing existing connection, or by
# creating a new connection and leasing it.
#
# If all connections are leased and the pool is at capacity (meaning the
# number of currently leased connections is greater than or equal to the
# size limit set), an ActiveRecord::ConnectionTimeoutError exception will be raised.
#
# Returns: an AbstractAdapter object.
#
# Raises:
# - ActiveRecord::ConnectionTimeoutError no connection can be obtained from the pool.
def checkout_no_verify(checkout_timeout = @checkout_timeout)
checkout_with_no_verify(acquire_connection(checkout_timeout))
end
def checkout_with_no_verify(c) # rubocop:disable Naming/MethodParameterName -- This is an upstream method
c._run_checkout_callbacks {} # rubocop:disable Lint/EmptyBlock -- Added to be safe to preserve previous behavior
c
rescue # rubocop:disable Style/RescueStandardError -- This is in the upstream code
remove c
c.disconnect!
raise
end
end
# rubocop:enable Cop/AvoidReturnFromBlocks
# rubocop:enable Cop/LineBreakAfterGuardClauses
# rubocop:enable Cop/LineBreakAroundConditionalBlock
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# rubocop:enable Layout/EmptyLineAfterGuardClause
# rubocop:enable Lint/RescueException -- This patches an upstream class
# rubocop:enable Style/IfUnlessModifier
end
end

View File

@ -1,44 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Patch
module OldRedisCacheStore
# We will try keep patched code explicit and matching the original signature in
# https://github.com/rails/rails/blob/v6.1.7.2/activesupport/lib/active_support/cache/redis_cache_store.rb#L361
def read_multi_mget(*names)
return super unless enable_rails_cache_pipeline_patch?
return super unless use_patched_mget?
::Gitlab::Redis::ClusterUtil.batch_entries(names) do |batched_names|
super(*batched_names)
end.reduce(&:merge)
end
# `delete_multi_entries` in Rails runs a multi-key `del` command
# patch will run pipelined single-key `del` for Redis Cluster compatibility
def delete_multi_entries(entries, **options)
return super unless enable_rails_cache_pipeline_patch?
::Gitlab::Redis::ClusterUtil.batch_entries(entries) do |batched_names|
super(batched_names)
end.sum
end
private
def enable_rails_cache_pipeline_patch?
redis.with { |c| ::Gitlab::Redis::ClusterUtil.cluster?(c) }
end
# MultiStore reads ONLY from the default store (no fallback), hence we can use `mget`
# if the default store is not a Redis::Cluster. We should do that as pipelining gets on a single redis is slow
def use_patched_mget?
redis.with do |conn|
next true unless conn.is_a?(Gitlab::Redis::MultiStore)
::Gitlab::Redis::ClusterUtil.cluster?(conn.default_store)
end
end
end
end
end

View File

@ -292,11 +292,7 @@ new_sha: Gitlab::Git::SHA1_BLANK_SHA }
Logger.new($stdout).tap do |stdout_logger|
stdout_logger.level = debug? ? Logger::DEBUG : Logger::INFO
if ::Gitlab.next_rails?
ActiveSupport::BroadcastLogger.new(stdout_logger, Rails.logger, Rails.logger)
else
stdout_logger.extend(ActiveSupport::Logger.broadcast(Rails.logger))
end
ActiveSupport::BroadcastLogger.new(stdout_logger, Rails.logger, Rails.logger)
end
else
Rails.logger

View File

@ -129,19 +129,8 @@ namespace :gitlab do
def insert_db_identifier(db_config)
ActiveRecord::Base.establish_connection(db_config) # rubocop: disable Database/EstablishConnection
if ::Gitlab.next_rails?
internal_metadata = ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
internal_metadata[DB_CONFIG_NAME_KEY] = db_config.name if internal_metadata.table_exists?
elsif ActiveRecord::InternalMetadata.table_exists?
ts = Time.zone.now
ActiveRecord::InternalMetadata.upsert(
{ key: DB_CONFIG_NAME_KEY,
value: db_config.name,
created_at: ts,
updated_at: ts }
)
end
internal_metadata = ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
internal_metadata[DB_CONFIG_NAME_KEY] = db_config.name if internal_metadata.table_exists?
rescue ActiveRecord::ConnectionNotEstablished, PG::ConnectionBad => err
warn "WARNING: Could not establish database connection for #{db_config.name}: #{err.message}"
rescue ActiveRecord::NoDatabaseError
@ -154,12 +143,7 @@ namespace :gitlab do
def get_db_identifier(db_config)
ActiveRecord::Base.establish_connection(db_config) # rubocop: disable Database/EstablishConnection
internal_metadata =
if ::Gitlab.next_rails?
ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
else
ActiveRecord::InternalMetadata
end
internal_metadata = ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
# rubocop:disable Database/MultipleDatabases
if internal_metadata.table_exists?

View File

@ -23376,9 +23376,6 @@ msgstr ""
msgid "DuoEnterpriseTrial|Start your free GitLab Duo Enterprise trial on %{group_name}"
msgstr ""
msgid "DuoEnterpriseTrial|Start your free GitLab Duo Pro trial"
msgstr ""
msgid "DuoEnterpriseTrial|Stay on top of regulatory requirements with self-hosted model deployment"
msgstr ""

View File

@ -17,11 +17,7 @@ RSpec.describe StorageHelper, feature_category: :consumables_cost_management do
end
it "uses commas as thousands separator" do
if ::Gitlab.next_rails?
expect(helper.storage_counter(100_000_000_000_000_000_000_000_000)).to eq("84,703.3 ZB")
else
expect(helper.storage_counter(100_000_000_000_000_000_000_000)).to eq("86,736.2 EiB")
end
expect(helper.storage_counter(100_000_000_000_000_000_000_000_000)).to eq("84,703.3 ZB")
end
end

View File

@ -2,25 +2,23 @@
require 'spec_helper'
if ::Gitlab.next_rails?
RSpec.describe 'ActiveModel::AttributeMethods Patch', feature_category: :database do
before do
load Rails.root.join('config/initializers/active_model_attribute_methods.rb')
RSpec.describe 'ActiveModel::AttributeMethods Patch', feature_category: :database do
before do
load Rails.root.join('config/initializers/active_model_attribute_methods.rb')
end
describe '.aliases_by_attribute_name' do
let(:klass) do
Class.new do
include ActiveModel::AttributeMethods
alias_attribute :id_value, :id
alias_attribute :id_value, :id
end
end
describe '.aliases_by_attribute_name' do
let(:klass) do
Class.new do
include ActiveModel::AttributeMethods
alias_attribute :id_value, :id
alias_attribute :id_value, :id
end
end
it 'stores the alias attribute only once' do
expect(klass.aliases_by_attribute_name['id'].to_a).to eq(['id_value'])
end
it 'stores the alias attribute only once' do
expect(klass.aliases_by_attribute_name['id'].to_a).to eq(['id_value'])
end
end
end

View File

@ -2,12 +2,7 @@
require 'spec_helper'
require 'rails/generators/testing/assertions'
if ::Gitlab.next_rails?
require 'rails/generators/testing/behavior'
else
require 'rails/generators/testing/behaviour'
end
require 'rails/generators/testing/behavior'
RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator, feature_category: :database do
include Rails::Generators::Testing::Behaviour

View File

@ -3,20 +3,10 @@
require 'spec_helper'
require 'generators/gitlab/click_house/migration_generator'
require 'fileutils'
if ::Gitlab.next_rails?
require 'rails/generators/testing/behavior'
else
require 'rails/generators/testing/behaviour'
end
require 'rails/generators/testing/behavior'
RSpec.describe Gitlab::ClickHouse::MigrationGenerator, feature_category: :database do
if ::Gitlab.next_rails?
include Rails::Generators::Testing::Behavior
else
include Rails::Generators::Testing::Behaviour
end
include Rails::Generators::Testing::Behavior
include FileUtils
let(:migration_name) { "CreateProjects" }

View File

@ -74,15 +74,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :databas
let(:disconnect_method) { :disconnect! }
if ::Gitlab.next_rails?
it_behaves_like 'disconnects the pool'
else
context 'with Rails 7.0' do
let(:disconnect_method) { :disconnect_without_verify! }
it_behaves_like 'disconnects the pool'
end
end
it_behaves_like 'disconnects the pool'
end
describe '#release_connection' do
@ -95,11 +87,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :databas
describe '#offline!' do
it 'marks the host as offline' do
if ::Gitlab.next_rails?
expect(host.pool).to receive(:disconnect!)
else
expect(host.pool).to receive(:disconnect_without_verify!)
end
expect(host.pool).to receive(:disconnect!)
expect(Gitlab::Database::LoadBalancing::Logger).to receive(:warn)
.with(hash_including(event: :host_offline))

View File

@ -44,41 +44,9 @@ RSpec.describe Gitlab::Database::Migrations::PgBackendPid, feature_category: :da
end
end
describe Gitlab::Database::Migrations::PgBackendPid::OldMigratorPgBackendPid do
let(:klass) do
Class.new do
def with_advisory_lock_connection
yield :conn
end
end
end
it 're-yields with same arguments and wraps it with calls to .say' do
patched_instance = klass.prepend(described_class).new
expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice
expect { |b| patched_instance.with_advisory_lock_connection(&b) }.to yield_with_args(:conn)
end
it 're-yields with same arguments and wraps it with calls to .say even when error is raised' do
patched_instance = klass.prepend(described_class).new
expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice
expect do
patched_instance.with_advisory_lock_connection do
raise ActiveRecord::ConcurrentMigrationError, 'test'
end
end.to raise_error ActiveRecord::ConcurrentMigrationError
end
end
describe '.patch!' do
it 'patches ActiveRecord::Migrator' do
if ::Gitlab.next_rails?
expect(ActiveRecord::Migrator).to receive(:prepend).with(described_class::MigratorPgBackendPid)
else
expect(ActiveRecord::Migrator).to receive(:prepend).with(described_class::OldMigratorPgBackendPid)
end
expect(ActiveRecord::Migrator).to receive(:prepend).with(described_class::MigratorPgBackendPid)
described_class.patch!
end

View File

@ -10,12 +10,13 @@ RSpec.describe 'cross-database foreign keys', feature_category: :database do
# The issue corresponding to the loose foreign key conversion
# should be added as a comment along with the name of the column.
let!(:allowed_cross_database_foreign_keys) do
keys = [
[
'zoekt_indices.zoekt_enabled_namespace_id',
'zoekt_repositories.project_id',
'zoekt_replicas.zoekt_enabled_namespace_id',
'zoekt_replicas.namespace_id',
'system_access_microsoft_applications.namespace_id',
'ci_job_artifact_states.partition_id.job_artifact_id',
'p_ci_build_tags.tag_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/470872
'ci_secure_file_states.ci_secure_file_id', # https://gitlab.com/groups/gitlab-org/-/epics/17347
'dependency_proxy_blob_states.dependency_proxy_blob_id', # https://gitlab.com/groups/gitlab-org/-/epics/17347
@ -46,14 +47,6 @@ RSpec.describe 'cross-database foreign keys', feature_category: :database do
'appearance_uploads.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/534207
'appearance_uploads.organization_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/534207
]
keys << if ::Gitlab.next_rails?
'ci_job_artifact_states.partition_id.job_artifact_id'
else
'ci_job_artifact_states.partition_id'
end
keys
end
def foreign_keys_for(table_name)

View File

@ -619,13 +619,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'when the table is not empty' do
before do
if ::Gitlab.next_rails?
source_model.create!(id: [1, 2])
source_model.create!(id: [2, 3])
else
source_model.create!(id: 1, runner_type: 2)
source_model.create!(id: 2, runner_type: 3)
end
source_model.create!(id: [1, 2])
source_model.create!(id: [2, 3])
end
let(:opts) { {} }

View File

@ -48,23 +48,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::TypeMapCache do
end
end
# Based on https://github.com/rails/rails/blob/v6.1.3.2/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb#L36-L41
def initialize_connection(config = db_config)
return adapter_class.new(config).connect! if ::Gitlab.next_rails?
conn_params = config.symbolize_keys.compact
conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
valid_conn_param_keys = PG::Connection.conndefaults_hash.keys + [:requiressl]
conn_params.slice!(*valid_conn_param_keys)
adapter_class.new(
adapter_class.new_client(conn_params),
ActiveRecord::Base.logger,
conn_params,
config
)
adapter_class.new(config).connect!
end
end

View File

@ -65,9 +65,6 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
describe '#versions_to_create' do
before do
allow(connection).to receive_message_chain(:schema_migration, :all_versions).and_return(migrated_versions)
# Can be removed after Gitlab.next_rails?
allow(connection).to receive_message_chain(:schema_migration, :versions).and_return(migrated_versions)
migrations_struct = Struct.new(:version)

View File

@ -43,7 +43,7 @@ RSpec.describe Gitlab::Database::WithLockRetries, feature_category: :database do
conn = ActiveRecord::Base.connection_pool.checkout
# Set a new lock every time to allow multiple connections per thread
conn.lock_thread = ActiveSupport::Concurrency::ThreadLoadInterlockAwareMonitor.new if ::Gitlab.next_rails?
conn.lock_thread = ActiveSupport::Concurrency::ThreadLoadInterlockAwareMonitor.new
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")

View File

@ -594,32 +594,6 @@ RSpec.describe Gitlab::Database, feature_category: :database do
event = events.first
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
unless ::Gitlab.next_rails?
expect(event.payload).to a_hash_including(
connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
end
unless ::Gitlab.next_rails?
context 'within an empty transaction block' do
it 'publishes a transaction event' do
events = subscribe_events do
ApplicationRecord.transaction {}
Ci::ApplicationRecord.transaction {}
end
expect(events.length).to be(2)
event = events.first
expect(event).not_to be_nil
expect(event.duration).to be > 0.0
expect(event.payload).to a_hash_including(
connection: be_a(Gitlab::Database::LoadBalancing::ConnectionProxy)
)
end
end
end

View File

@ -1,54 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Patch::ActiveRecordConnectionPool, feature_category: :shared do
let(:db_config) { ApplicationRecord.connection_pool.db_config }
let(:pool_config) do
ActiveRecord::ConnectionAdapters::PoolConfig.new(ActiveRecord::Base, db_config, :writing, :default)
end
let(:done_connection) do
conn = nil
Thread.new do
conn = pool.checkout
pool.checkin(conn)
end.join
conn
end
subject(:pool) { ActiveRecord::ConnectionAdapters::ConnectionPool.new(pool_config) }
describe '#disconnect_without_verify!' do
unless Gitlab.next_rails?
it 'does not call verify!' do
expect(done_connection).not_to receive(:verify!)
pool.disconnect_without_verify!
expect(pool.connections.count).to eq(0)
end
end
end
describe '#disconnect!' do
if Gitlab.next_rails?
it 'does not call verify on the connection' do
expect(done_connection).not_to receive(:verify!)
pool.disconnect!
expect(pool.connections.count).to eq(0)
end
else
it 'calls verify on the connection' do
expect(done_connection).to receive(:verify!).and_call_original
pool.disconnect!
expect(pool.connections.count).to eq(0)
end
end
end
end

View File

@ -27,11 +27,7 @@ RSpec.describe Database::MarkMigrationService, feature_category: :database do
subject(:execute) { service.execute }
def versions
if ::Gitlab.next_rails?
connection.schema_migration.versions.count { |v| v == version.to_s }
else
ActiveRecord::SchemaMigration.where(version: version).count
end
connection.schema_migration.versions.count { |v| v == version.to_s }
end
it 'marks the migration as successful' do

View File

@ -51,13 +51,8 @@ RSpec.describe LooseForeignKeys::PartitionCleanerService, feature_category: :dat
FOR VALUES IN (101);
SQL
if ::Gitlab.next_rails?
table("_test_target_table").create!(id: [1, 100], parent_id: deleted_id)
table("_test_target_table").create!(id: [2, 101], parent_id: deleted_id)
else
table("_test_target_table").create!(id: 1, parent_id: deleted_id, partition_id: 100)
table("_test_target_table").create!(id: 2, parent_id: deleted_id, partition_id: 101)
end
table("_test_target_table").create!(id: [1, 100], parent_id: deleted_id)
table("_test_target_table").create!(id: [2, 101], parent_id: deleted_id)
end
describe 'query generation' do

View File

@ -206,11 +206,7 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
context 'for batched nullify' do
# rubocop:disable Layout/LineLength
def nullify_in_batches_regexp(table, column, user, batch_size: 100)
if ::Gitlab.next_rails?
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE \("#{table}"."id"\) IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
else
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE "#{table}"."id" IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
end
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE \("#{table}"."id"\) IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
end
# rubocop:enable Layout/LineLength

View File

@ -78,12 +78,7 @@ quality_level = Quality::TestLevel.new
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.use_instantiated_fixtures = false
if ::Gitlab.next_rails?
config.fixture_paths = [Rails.root]
else
config.fixture_path = Rails.root
end
config.fixture_paths = [Rails.root]
config.verbose_retry = true
config.display_try_failure_messages = true

View File

@ -25,12 +25,7 @@ module Database
end
def self.btree_index_struct(index)
columns =
if ::Gitlab.next_rails?
Array.wrap(index.columns) + Array.wrap(index.include)
else
Array.wrap(index.columns)
end
columns = Array.wrap(index.columns) + Array.wrap(index.include)
BTREE_INDEX_STRUCT.new(
index.name,

View File

@ -96,7 +96,7 @@ module Database
.connection_pool_names
.map(&:constantize)
connection_classes.delete(ActiveRecord::PendingMigrationConnection) if ::Gitlab.next_rails?
connection_classes.delete(ActiveRecord::PendingMigrationConnection)
connection_class_to_config = connection_classes.index_with(&:connection_db_config)

View File

@ -2,10 +2,6 @@
module RenderedHtml
def rendered_html
if ::Gitlab.next_rails?
Capybara::Node::Simple.new(rendered.html)
else
Capybara::Node::Simple.new(rendered)
end
Capybara.string(rendered)
end
end

View File

@ -5,9 +5,7 @@ require 'spec_helper'
RSpec.describe Database::DuplicateIndexes, feature_category: :database do
index_class = ActiveRecord::ConnectionAdapters::IndexDefinition
let(:default_index_options) do
options = { using: :btree, orders: {}, unique: false, opclasses: {}, where: nil }
options[:include] = [] if ::Gitlab.next_rails?
options
{ using: :btree, orders: {}, unique: false, opclasses: {}, where: nil, include: [] }
end
let(:table_name) { 'foobar' }

View File

@ -213,10 +213,6 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_sc
before do
allow(exception).to receive(:cause).and_return(PG::ReadOnlySqlTransaction.new("cannot execute UPSERT in a read-only transaction"))
unless Gitlab.next_rails?
allow(ActiveRecord::InternalMetadata).to receive(:upsert).at_least(:once).and_raise(exception)
end
end
it_behaves_like 'validates successfully'