Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-30 00:14:21 +00:00
parent a5ba80c8d1
commit 20e2d9705e
39 changed files with 237 additions and 41 deletions

View File

@ -52,7 +52,12 @@ module PersonalAccessTokens
deliver_user_notifications(user, token_names)
expiring_user_tokens.update_all(expire_notification_delivered: true)
# we are in the process of deprecating expire_notification_delivered column
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166683
expiring_user_tokens.update_all(
expire_notification_delivered: true,
seven_days_notification_sent_at: Time.current
)
end
end
end
@ -102,7 +107,13 @@ module PersonalAccessTokens
tokens_with_delivered_notifications =
tokens
.where.not(user_id: project_bot_ids_without_resource | project_bot_ids_with_failed_delivery)
tokens_with_delivered_notifications.update_all(expire_notification_delivered: true)
# we are in the process of deprecating expire_notification_delivered column
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166683
tokens_with_delivered_notifications.update_all(
expire_notification_delivered: true,
seven_days_notification_sent_at: Time.current
)
notifications_delivered += tokens_with_delivered_notifications.count
end

View File

@ -209,7 +209,7 @@ module WikiCloth
when "nowiki"
return self.element_content
when "a"
if self.element_attributes['href'] =~ /:\/\//
if /:\/\//.match?(self.element_attributes['href'])
return @options[:link_handler].external_link(self.element_attributes['href'], self.element_content)
elsif self.element_attributes['href'].nil? || self.element_attributes['href'] =~ /^\s*([\?\/])/
# if a element has no href attribute, or href starts with / or ?

View File

@ -0,0 +1,10 @@
---
migration_job_name: BackfillPersonalAccessTokenSevenDaysNotificationSent
description: Backfill seven_days_notification_sent_at column using data from expires_at column in personal_access_tokens table.
feature_category: system_access
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165592
milestone: '17.5'
queued_migration_version: 20240909222743
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: '2024-10-24'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class CreateTempIndexForBackfillingPatNotifications < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
INDEX_NAME = 'tmp_index_pats_on_notification_columns_and_expires_at'
INDEX_CONDITION = 'expire_notification_delivered IS TRUE ' \
'AND seven_days_notification_sent_at IS NULL ' \
'AND expires_at IS NOT NULL'
def up
# to be removed once BackfillPersonalAccessTokenSevenDaysNotificationSent is finalized
# https://gitlab.com/gitlab-org/gitlab/-/issues/485856
add_concurrent_index :personal_access_tokens, :id, where: INDEX_CONDITION, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :personal_access_tokens, INDEX_NAME
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class QueueBackfillPersonalAccessTokenSevenDaysNotificationSent < Gitlab::Database::Migration[2.2]
milestone '17.5'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "BackfillPersonalAccessTokenSevenDaysNotificationSent"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:personal_access_tokens,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :personal_access_tokens, :id, [])
end
end

View File

@ -0,0 +1 @@
8eba9f08402dee5cf285ca26e521c3ec1f4e8b1bd94ec57bfdf21c91597f6584

View File

@ -0,0 +1 @@
33feb1432b6e33edf61831495df67bc064e30653c2501bfc94bc2881dbe917e7

View File

@ -31446,6 +31446,8 @@ CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING
CREATE INDEX tmp_index_packages_dependencies_on_id_without_project_id ON packages_dependencies USING btree (id) WHERE (project_id IS NULL);
CREATE INDEX tmp_index_pats_on_notification_columns_and_expires_at ON personal_access_tokens USING btree (id) WHERE ((expire_notification_delivered IS TRUE) AND (seven_days_notification_sent_at IS NULL) AND (expires_at IS NOT NULL));
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
CREATE INDEX tmp_index_vulnerability_overlong_title_html ON vulnerabilities USING btree (id) WHERE (length(title_html) > 800);

View File

@ -115,6 +115,14 @@ The CRON expression is evaluated in [UTC](https://www.timeanddate.com/worldclock
You can view the complete schema within the [scan execution policy documentation](../../application_security/policies/scan_execution_policies.md#scan-execution-policies-schema).
## OCS vulnerability resolution for multi cluster configuration
To ensure accurate vulnerability tracking with OCS, you should create a separate GitLab project with OCS enabled for each cluster. If you have multiple clusters, be sure to use one project for each cluster.
OCS resolves vulnerabilities that are no longer found in your cluster after each scan by comparing the current scan vulnerabilities with those previously detected. Any vulnerabilities from earlier scans that are no longer present in the current scan are resolved for the GitLab project.
If multiple clusters are configured in the same project, an OCS scan in one cluster (for example, Project A) would resolve previously detected vulnerabilities from another cluster (for example, Project B), leading to incorrect vulnerability reporting.
## Configure scanner resource requirements
By default the scanner pod's default resource requirements are:

View File

@ -181,7 +181,7 @@ module API
def find_pipeline(id)
return unless id
if id.to_s =~ INTEGER_ID_REGEX
if INTEGER_ID_REGEX.match?(id.to_s)
::Ci::Pipeline.find_by(id: id)
end
end
@ -210,7 +210,7 @@ module API
def find_group(id, organization: nil)
collection = organization.present? ? Group.in_organization(organization) : Group.all
if id.to_s =~ INTEGER_ID_REGEX
if INTEGER_ID_REGEX.match?(id.to_s)
collection.find_by(id: id)
else
collection.find_by_full_path(id)
@ -250,7 +250,7 @@ module API
# find_namespace returns the namespace regardless of user access level on the namespace
# rubocop: disable CodeReuse/ActiveRecord
def find_namespace(id)
if id.to_s =~ INTEGER_ID_REGEX
if INTEGER_ID_REGEX.match?(id.to_s)
Namespace.without_project_namespaces.find_by(id: id)
else
find_namespace_by_path(id)

View File

@ -27,7 +27,7 @@ module API
options[:route_options][:params].map do |key, val|
param_type = val[:type]
# Search for parameters with Array types (e.g. "[String]", "[Integer]", etc.)
if param_type =~ %r{\[\w*\]}
if %r{\[\w*\]}.match?(param_type)
key
end
end.compact.to_set

View File

@ -6,7 +6,7 @@ module API
module BulkImports
class DestinationSlugPath < Grape::Validations::Validators::Base
def validate_param!(attr_name, params)
return if params[attr_name] =~ Gitlab::Regex.oci_repository_path_regex
return if Gitlab::Regex.oci_repository_path_regex.match?(params[attr_name])
raise Grape::Exceptions::Validation.new(
params: [@scope.full_name(attr_name)],

View File

@ -40,7 +40,7 @@ module Banzai
#
# Returns the recognized color String or nil if none was found.
def self.parse(text)
text if COLOR_FORMAT =~ text
text if COLOR_FORMAT.match?(text)
end
end
end

View File

@ -77,10 +77,10 @@ module Banzai
return unless node.name == 'a' || node.name == 'div' || SECTION_HEADINGS.any?(node.name)
return unless node.has_attribute?('id')
return if node['id'] =~ PREFIXED_ID_PATTERN
return if PREFIXED_ID_PATTERN.match?(node['id'])
if (pattern = FOOTNOTE_LINK_ID_PATTERNS[node.name.to_sym])
return if node['id'] =~ pattern
return if node['id']&.match?(pattern)
end
node.remove_attribute('id')

View File

@ -157,7 +157,7 @@ module Banzai
next
end
if link =~ link_pattern_anchor
if link_pattern_anchor.match?(link)
replace_link_node_with_href(node, index, link) do
object_link_filter(link, link_pattern_anchor, link_content: inner_html, link_reference: true)
end

View File

@ -53,7 +53,7 @@ module Banzai
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if link =~ ref_pattern_start
if ref_pattern_start.match?(link)
replace_link_node_with_href(node, index, link) do
object_link_filter(link, ref_pattern_start, link_content: inner_html)
end

View File

@ -5,7 +5,7 @@ module BulkImports
private
def normalize_path(path)
return path.downcase if path =~ Gitlab::Regex.oci_repository_path_regex
return path.downcase if Gitlab::Regex.oci_repository_path_regex.match?(path)
path = path.parameterize.downcase

View File

@ -40,7 +40,7 @@ module Feature
raise Feature::InvalidFeatureFlagError, "Feature flag is missing name"
end
unless VALID_FEATURE_NAME =~ name
unless VALID_FEATURE_NAME.match?(name)
raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' is invalid"
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPersonalAccessTokenSevenDaysNotificationSent < BatchedMigrationJob
operation_name :backfill_personal_access_token_seven_days_notification_sent
# rubocop:disable CodeReuse/ActiveRecord -- guidelines says query methods are okay to use here
scope_to ->(relation) do
relation.where(expire_notification_delivered: true, seven_days_notification_sent_at: nil)
.where.not(expires_at: nil)
end
# rubocop:enable CodeReuse/ActiveRecord
feature_category :system_access
def perform
each_sub_batch do |sub_batch|
sub_batch.update_all("seven_days_notification_sent_at = (expires_at - interval '7 days')")
end
end
end
end
end

View File

@ -155,7 +155,7 @@ namespace :gitlab do
# - Dir.entries returns also the entries '.' and '..'
def remove_unneeded_files(directory, regex)
Dir.foreach(directory) do |file|
FileUtils.rm_rf(File.join(directory, file)) unless file =~ regex
FileUtils.rm_rf(File.join(directory, file)) unless regex.match?(file)
end
end

View File

@ -92,7 +92,7 @@ class UploadedFile
name = name.tr("\\", "/") # work-around for IE
name = ::File.basename(name)
name = name.gsub(CarrierWave::SanitizedFile.sanitize_regexp, "_")
name = "_#{name}" if name =~ /\A\.+\z/
name = "_#{name}" if /\A\.+\z/.match?(name)
name = "unnamed" if name.empty?
name.mb_chars.to_s
end

View File

@ -41,7 +41,7 @@ module QA
chat_page.messages.last.text =~ /connect your GitLab account|404 not found!/i
end
break(true) if chat_page.messages.last.text =~ /404 not found!/i
break(true) if /404 not found!/i.match?(chat_page.messages.last.text)
chat_page.click_connect_account_link

View File

@ -14,7 +14,7 @@ module QA
def initialize(git_uri)
@git_uri = git_uri
@uri =
if git_uri =~ %r{\A(?:ssh|http|https)://}
if %r{\A(?:ssh|http|https)://}.match?(git_uri)
URI.parse(git_uri)
else
*rest, path = git_uri.split(':')

View File

@ -117,7 +117,7 @@ module QA
body = extract_graphql_body(graphql_response)
unless graphql_response.code == HTTP_STATUS_OK && (body[:errors].nil? || body[:errors].empty?)
action = post_body =~ /mutation {\s+destroy/ ? 'Deletion' : 'Fabrication'
action = /mutation {\s+destroy/.match?(post_body) ? 'Deletion' : 'Fabrication'
raise(ResourceFabricationFailedError, <<~MSG.strip)
#{action} of #{self.class.name} using the API failed (#{graphql_response.code}) with `#{graphql_response}`.
#{QA::Support::Loglinking.failure_metadata(graphql_response.headers[:x_request_id])}

View File

@ -113,7 +113,7 @@ module QA
def verify_search_engine_ok(search_term)
response = get_response('commits', search_term)
if response.code.to_s =~ /5[0-9][0-9]/
if /5[0-9][0-9]/.match?(response.code.to_s)
raise ElasticSearchServerError, "elasticsearch attempt returned code #{response.code}. Check that search was conducted on the appropriate url and port."
end
end

View File

@ -53,7 +53,7 @@ module QA
def fetch_kubeconfig
retry_until do
config = `k3d get-kubeconfig --name #{cluster_name}`.chomp
config if config =~ /kubeconfig.yaml/
config if /kubeconfig.yaml/.match?(config)
end
end

View File

@ -4,17 +4,13 @@ require 'net/http'
module QA
RSpec.describe 'Create' do
describe 'Merge Requests', product_group: :code_review,
quarantine: {
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/483173',
type: :investigating
} do
describe 'Merge Requests', product_group: :code_review do
let(:address) { Runtime::Address.new(:gitlab, '') }
context 'with a malformed URL' do
let(:path) { %(/-/merge_requests?sort=created_date&state=<th:t=\"%24{dfb}%23foreach) }
it 'returns 400', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/426509' do
it 'returns 400 or 302', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/426509' do
# Ruby's URI module automatically encodes query parameters:
# https://github.com/ruby/uri/blob/f4999b61daa40f2c99fdc7159e2c85c036b22c67/lib/uri/generic.rb#L849
#
@ -28,7 +24,9 @@ module QA
request = Net::HTTP::Get.new(path)
response = http.request(request)
expect(response.code.to_i).to eq(400)
# URL normalization in live environments can result in a 302
# https://gitlab.com/gitlab-org/gitlab/-/issues/483173#note_2126564107
expect(response.code.to_i).to eq(400).or eq(302)
end
end
end

View File

@ -50,7 +50,8 @@ module QA
# @param [Hash] change mr file change
# @return [Hash] a hash containing the YAML data for the feature flag definition
def ff_yaml_for_file(change)
return unless change[:path] =~ %r{/feature_flags/.*\.yml}
return unless %r{/feature_flags/.*\.yml}.match?(change[:path])
if change[:deleted_file]
return { name: change[:path].split("/").last.gsub(/\.(yml|yaml)/, ""), deleted: true }
end

View File

@ -48,7 +48,7 @@ class FailedTests
def failed_cases_for_suite_collection
suite_map.each_with_object(Hash.new { |h, k| h[k] = Set.new }) do |(suite_name, suite_collection_regex), hash|
failed_suites.each do |suite|
hash[suite_name].merge(failed_cases(suite)) if suite['name'] =~ suite_collection_regex
hash[suite_name].merge(failed_cases(suite)) if suite_collection_regex.match?(suite['name'])
end
end
end

View File

@ -59,7 +59,7 @@ class BlueprintFrontMatter
end
def validate_creation_date
return if @metadata['creation-date'] =~ /\d{4}-[01]\d-[0123]\d/
return if /\d{4}-[01]\d-[0123]\d/.match?(@metadata['creation-date'])
add_error("Invalid creation-date: the date format must be 'yyyy-mm-dd'")
end

View File

@ -120,14 +120,14 @@ class QueryLimitingReport
item_hash = item.to_hash
filename = item_hash.fetch('filename')
next if filename !~ /\.rb\Z/
next unless /\.rb\Z/.match?(filename)
file_contents = Gitlab.file_contents(GITLAB_PROJECT_ID, filename)
file_lines = file_contents.split("\n")
file_lines.each_index do |index|
line = file_lines[index]
next unless line =~ /#{CODE_LINES_SEARCH_STRING}/o
next unless /#{CODE_LINES_SEARCH_STRING}/o.match?(line)
issue_iid = line.slice(%r{issues/(\d+)\D}, 1)
line_number = index + 1

View File

@ -25,7 +25,7 @@ end
file = File.read(test_metadata_file)
unless file =~ %r{.*\"examples\":\[\{\"id\"\:.*}
unless %r{.*\"examples\":\[\{\"id\"\:.*}.match?(file)
puts "\nRspec output did not match regex. Check test-metadata.json file.\n"
exit 1
end
@ -36,12 +36,12 @@ data_hash = JSON.parse(file)
tests = data_hash['examples']
tests.each do |test|
next if test['id'] =~ %r{.\/qa\/specs\/features\/sanity\/*}
next if %r{.\/qa\/specs\/features\/sanity\/*}.match?(test['id'])
if test['testcase']
testcases.push([(test['testcase']).to_s, "#{test['id']} - #{test['full_description']}"])
unless TESTCASE_FORMAT =~ test['testcase']
unless TESTCASE_FORMAT.match?(test['testcase'])
testcase_format_errors.push(
<<~FORMAT_ERRORS
==> #{test['testcase']} in file: #{test['id']} with title:

View File

@ -232,7 +232,7 @@ module Trigger
raw_version = super
# if the version matches semver format, treat it as a tag and prepend `v`
if raw_version =~ Regexp.compile(/^\d+\.\d+\.\d+(-rc\d+)?(-ee)?$/)
if Regexp.compile(/^\d+\.\d+\.\d+(-rc\d+)?(-ee)?$/).match?(raw_version)
"v#{raw_version}"
else
raw_version

View File

@ -72,7 +72,7 @@ module Gitlab
available_queues = queues_from_routing_rules.empty? ? DEFAULT_QUEUES : [*queues_from_routing_rules, 'mailers'].freeze
queue_groups = argv.map do |queues|
if queues =~ /[\r\n]/
if /[\r\n]/.match?(queues)
raise CommandError,
'The queue arguments cannot contain newlines'
end

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPersonalAccessTokenSevenDaysNotificationSent, feature_category: :system_access do
let(:pats_table) { table(:personal_access_tokens) }
let(:users) { table(:users) }
let(:organizations) { table(:organizations) }
let!(:organization) { organizations.create!(name: 'default org', path: 'dflt') }
let!(:user) { users.create!(email: 'capybara@example.com', encrypted_password: 'abc123', projects_limit: 2) }
let!(:pat_to_update) do
pats_table.create!(name: 'notified token', expire_notification_delivered: true, expires_at: Date.current,
user_id: user.id, organization_id: organization.id)
end
let!(:seven_days_notified) do
pats_table.create!(name: 'seven days token', expire_notification_delivered: true, expires_at: Date.current,
seven_days_notification_sent_at: Time.current - 1.day, user_id: user.id, organization_id: organization.id)
end
let!(:not_notified_pat) do
pats_table.create!(name: 'not notified token', expires_at: Date.current + 1, user_id: user.id,
organization_id: organization.id)
end
let!(:no_expiry_pat) do
pats_table.create!(name: 'no expiry token', expire_notification_delivered: true, user_id: user.id,
organization_id: organization.id)
end
describe '#perform' do
subject(:perform_migration) do
described_class.new(
start_id: pats_table.first.id,
end_id: pats_table.last.id,
batch_table: :personal_access_tokens,
batch_column: :id,
sub_batch_size: pats_table.count,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
end
it 'backfills seven_days_notification_sent_at field', :freeze_time do
expect(pat_to_update.reload.seven_days_notification_sent_at).to be_nil
expect(seven_days_notified.reload.seven_days_notification_sent_at).to eq(Time.current - 1.day)
expect(not_notified_pat.reload.seven_days_notification_sent_at).to be_nil
expect(no_expiry_pat.reload.seven_days_notification_sent_at).to be_nil
perform_migration
# db updates do not use the same timezone as Rails; default to UTC
db_updated_time = Time.utc(Time.current.year, Time.current.month, Time.current.day) - 7.days
expect(pat_to_update.reload.seven_days_notification_sent_at).to eq(db_updated_time)
expect(seven_days_notified.reload.seven_days_notification_sent_at).to eq(Time.current - 1.day)
expect(not_notified_pat.reload.seven_days_notification_sent_at).to be_nil
expect(no_expiry_pat.reload.seven_days_notification_sent_at).to be_nil
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPersonalAccessTokenSevenDaysNotificationSent, feature_category: :system_access do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :personal_access_tokens,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -75,7 +75,7 @@ RSpec.shared_examples 'groups routing' do
end
end
RSpec.describe "Groups", "routing" do
RSpec.describe "Groups", "routing", feature_category: :groups_and_projects do
context 'complex group path with dot' do
include_examples 'groups routing' do
let(:group_path) { 'complex.group-namegit' }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'project routing' do
RSpec.describe 'project routing', feature_category: :groups_and_projects do
let(:base_params) { { namespace_id: 'gitlab', project_id: 'gitlabhq' } }
before do

View File

@ -41,6 +41,10 @@ RSpec.describe PersonalAccessTokens::ExpiringWorker, type: :worker, feature_cate
expect { worker.perform }.to change { expiring_token.reload.expire_notification_delivered }.from(false).to(true)
end
it 'marks the notification as delivered with new column', :freeze_time do
expect { worker.perform }.to change { expiring_token.reload.seven_days_notification_sent_at }.from(nil).to(Time.current)
end
it 'avoids N+1 queries', :use_sql_query_cache do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) { worker.perform }