Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ec05f6eb14
commit
4ec95e2795
|
|
@ -3176,7 +3176,6 @@ Gitlab/BoundedContexts:
|
|||
- 'ee/app/services/ee/post_receive_service.rb'
|
||||
- 'ee/app/services/ee/preview_markdown_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/api_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/base_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/create_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/destroy_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/legacy_api_update_service.rb'
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
7186455e61c1a6556cd6f0664a9c90c5da2e6bae
|
||||
da3a19a82c6c31f1dd1c29ce5ec48906d1594207
|
||||
|
|
|
|||
|
|
@ -42,7 +42,8 @@ export default {
|
|||
return this.metricNames.map((metric) => {
|
||||
return {
|
||||
name: metric,
|
||||
data: this.candidates
|
||||
data: [...this.candidates]
|
||||
.sort((a, b) => new Date(a.created_at) - new Date(b.created_at))
|
||||
.filter((candidate) => candidate[metric] !== undefined && candidate[metric] !== null)
|
||||
.map((candidate, index) => ({
|
||||
value: [index + 1, parseFloat(candidate[metric])],
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ export default ({ el, router }) => {
|
|||
projectPath,
|
||||
cssClasses,
|
||||
...options,
|
||||
gitRef: ref,
|
||||
},
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -275,6 +275,7 @@ export default {
|
|||
:gitpod-url="gitpodUrl"
|
||||
:user-preferences-gitpod-path="userPreferencesGitpodPath"
|
||||
:user-profile-enable-gitpod-path="userProfileEnableGitpodPath"
|
||||
:git-ref="currentRef"
|
||||
disable-fork-modal
|
||||
v-on="$listeners"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -74,7 +74,9 @@ export default {
|
|||
},
|
||||
items() {
|
||||
return this.descriptionTemplates
|
||||
.filter(({ name }) => (this.searchTerm ? name.includes(this.searchTerm) : true))
|
||||
.filter(({ name }) =>
|
||||
this.searchTerm ? name.toLowerCase().includes(this.searchTerm.toLowerCase()) : true,
|
||||
)
|
||||
.reduce((groups, current) => {
|
||||
const idx = groups.findIndex((group) => group.text === current.category);
|
||||
if (idx > -1) {
|
||||
|
|
|
|||
|
|
@ -297,8 +297,11 @@ class GraphqlController < ApplicationController
|
|||
|
||||
# Merging to :metadata will ensure these are logged as top level keys
|
||||
payload[:metadata] ||= {}
|
||||
|
||||
payload[:metadata][:graphql] = logs
|
||||
|
||||
payload[:metadata][:referer] = request.headers['Referer'] if logs.any? { |log| log[:operation_name] == 'GLQL' }
|
||||
|
||||
payload[:exception_object] = @exception_object if @exception_object
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -674,10 +674,19 @@ module ProjectsHelper
|
|||
end
|
||||
|
||||
title = visibility_icon_description(project)
|
||||
container_class = ['has-tooltip', css_class].compact.join(' ')
|
||||
container_class = [
|
||||
'has-tooltip gl-border-0 gl-bg-transparent gl-p-0 gl-leading-0 gl-text-inherit',
|
||||
css_class
|
||||
].compact.join(' ')
|
||||
data = { container: 'body', placement: 'top' }
|
||||
|
||||
content_tag(:span, class: container_class, data: data, title: title) do
|
||||
content_tag(
|
||||
:button,
|
||||
class: container_class,
|
||||
data: data,
|
||||
title: title,
|
||||
type: 'button',
|
||||
aria: { label: title }) do
|
||||
visibility_level_icon(project.visibility_level, options: { class: icon_css_class })
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -331,7 +331,7 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
def self.sharded_table_proxy_model
|
||||
@sharded_table_proxy_class ||= Class.new(self) do
|
||||
self.table_name = :ci_runners_e59bb2812d
|
||||
|
|
@ -445,7 +445,7 @@ module Ci
|
|||
tag_list.any?
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
def ensure_partitioned_runner_record_exists
|
||||
self.class.sharded_table_proxy_model.insert_all(
|
||||
[attributes.except('tag_list')], unique_by: [:id, :runner_type],
|
||||
|
|
@ -541,7 +541,7 @@ module Ci
|
|||
RunnerManager.safe_find_or_create_by!(runner_id: id, system_xid: system_xid.to_s) do |m|
|
||||
# Avoid inserting partitioned runner managers that refer to a missing ci_runners partitioned record, since
|
||||
# the backfill is not yet finalized.
|
||||
ensure_partitioned_runner_record_exists
|
||||
ensure_partitioned_runner_record_exists if Feature.disabled?(:reject_orphaned_runners, Feature.current_request)
|
||||
|
||||
m.runner_type = runner_type
|
||||
m.sharding_key_id = sharding_key_id
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ module VirtualRegistries
|
|||
enum :status, default: 0, processing: 1, pending_destruction: 2, error: 3
|
||||
|
||||
ignore_column :downloaded_at, remove_with: '17.9', remove_after: '2025-01-23'
|
||||
ignore_column :file_final_path, remove_with: '17.11', remove_after: '2025-03-23'
|
||||
|
||||
sha_attribute :file_sha1
|
||||
sha_attribute :file_md5
|
||||
|
|
@ -39,18 +40,19 @@ module VirtualRegistries
|
|||
:file_sha1,
|
||||
presence: true
|
||||
validates :upstream_etag, :content_type, length: { maximum: 255 }
|
||||
validates :relative_path, :object_storage_key, :file_final_path, length: { maximum: 1024 }
|
||||
validates :relative_path, :object_storage_key, length: { maximum: 1024 }
|
||||
validates :file_md5, length: { is: 32 }, allow_nil: true
|
||||
validates :file_sha1, length: { is: 40 }
|
||||
validates :relative_path,
|
||||
uniqueness: { scope: [:upstream_id, :status] },
|
||||
if: :default?
|
||||
validates :object_storage_key, uniqueness: { scope: :relative_path }
|
||||
validates :file, presence: true
|
||||
|
||||
mount_file_store_uploader ::VirtualRegistries::Cache::EntryUploader
|
||||
|
||||
before_validation :set_object_storage_key,
|
||||
if: -> { object_storage_key.blank? && relative_path && upstream && upstream.registry }
|
||||
if: -> { object_storage_key.blank? && upstream && upstream.registry }
|
||||
attr_readonly :object_storage_key
|
||||
|
||||
scope :search_by_relative_path, ->(query) do
|
||||
|
|
@ -106,18 +108,7 @@ module VirtualRegistries
|
|||
private
|
||||
|
||||
def set_object_storage_key
|
||||
self.object_storage_key = Gitlab::HashedPath.new(
|
||||
'virtual_registries',
|
||||
'packages',
|
||||
'maven',
|
||||
upstream.registry.id,
|
||||
'upstream',
|
||||
upstream.id,
|
||||
'cache',
|
||||
'entry',
|
||||
OpenSSL::Digest::SHA256.hexdigest(relative_path),
|
||||
root_hash: upstream.registry.id
|
||||
).to_s
|
||||
self.object_storage_key = upstream.object_storage_key_for(registry_id: upstream.registry.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -59,6 +59,24 @@ module VirtualRegistries
|
|||
cache_entries.default
|
||||
end
|
||||
|
||||
def object_storage_key_for(registry_id:)
|
||||
hash = Digest::SHA2.hexdigest(SecureRandom.uuid)
|
||||
Gitlab::HashedPath.new(
|
||||
'virtual_registries',
|
||||
'packages',
|
||||
'maven',
|
||||
registry_id.to_s,
|
||||
'upstream',
|
||||
id.to_s,
|
||||
'cache',
|
||||
'entry',
|
||||
hash[0..1],
|
||||
hash[2..3],
|
||||
hash[4..],
|
||||
root_hash: registry_id
|
||||
).to_s
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def reset_credentials
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Authz
|
||||
module Applications
|
||||
class ResetSecretService
|
||||
attr_reader :application, :current_user
|
||||
|
||||
def initialize(application:, current_user:)
|
||||
@application = application
|
||||
@current_user = current_user
|
||||
end
|
||||
|
||||
def execute
|
||||
return error(message: "#{current_user.name} cannot reset secret") unless can_reset_secret?(current_user)
|
||||
|
||||
application.renew_secret
|
||||
|
||||
return ServiceResponse.success if application.save
|
||||
|
||||
error(message: "Couldn't save application")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def error(message:)
|
||||
ServiceResponse.error(message: message)
|
||||
end
|
||||
|
||||
def can_reset_secret?(current_user)
|
||||
current_user.can_admin_all_resources?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -27,7 +27,7 @@ module Ci
|
|||
has_length: false,
|
||||
maximum_size: max_size(artifact_type),
|
||||
use_final_store_path: true,
|
||||
final_store_path_root_id: project.id
|
||||
final_store_path_config: { root_hash: project.id }
|
||||
)
|
||||
|
||||
if lsif?(artifact_type)
|
||||
|
|
|
|||
|
|
@ -22,9 +22,29 @@ module ProtectedBranches
|
|||
protected_branch.save.tap do
|
||||
# Refresh all_protected_branches association as it is not automatically updated
|
||||
project_or_group.all_protected_branches.reset if project_or_group.is_a?(Project)
|
||||
|
||||
publish_created_event
|
||||
end
|
||||
end
|
||||
|
||||
def publish_created_event
|
||||
return unless protected_branch.id
|
||||
|
||||
parent_type = if project_or_group.is_a?(Project)
|
||||
::Repositories::ProtectedBranchCreatedEvent::PARENT_TYPES[:project]
|
||||
else
|
||||
::Repositories::ProtectedBranchCreatedEvent::PARENT_TYPES[:group]
|
||||
end
|
||||
|
||||
::Gitlab::EventStore.publish(
|
||||
::Repositories::ProtectedBranchCreatedEvent.new(data: {
|
||||
protected_branch_id: protected_branch.id,
|
||||
parent_id: project_or_group.id,
|
||||
parent_type: parent_type
|
||||
})
|
||||
)
|
||||
end
|
||||
|
||||
def protected_branch
|
||||
@protected_branch ||= project_or_group.protected_branches.new(params)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,9 +7,24 @@ module ProtectedBranches
|
|||
|
||||
protected_branch.destroy.tap do
|
||||
refresh_cache
|
||||
after_execute
|
||||
publish_deleted_event
|
||||
end
|
||||
end
|
||||
|
||||
def publish_deleted_event
|
||||
parent_type = if project_or_group.is_a?(Project)
|
||||
::Repositories::ProtectedBranchDestroyedEvent::PARENT_TYPES[:project]
|
||||
else
|
||||
::Repositories::ProtectedBranchDestroyedEvent::PARENT_TYPES[:group]
|
||||
end
|
||||
|
||||
::Gitlab::EventStore.publish(
|
||||
::Repositories::ProtectedBranchDestroyedEvent.new(data: {
|
||||
parent_id: project_or_group.id,
|
||||
parent_type: parent_type
|
||||
})
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -177,7 +177,7 @@ module ObjectStorage
|
|||
[CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
|
||||
end
|
||||
|
||||
def generate_final_store_path(root_id:)
|
||||
def generate_final_store_path(root_hash:)
|
||||
hash = Digest::SHA2.hexdigest(SecureRandom.uuid)
|
||||
|
||||
# We prefix '@final' to prevent clashes and make the files easily recognizable
|
||||
|
|
@ -186,21 +186,26 @@ module ObjectStorage
|
|||
|
||||
# We generate a hashed path of the root ID (e.g. Project ID) to distribute directories instead of
|
||||
# filling up one root directory with a bunch of files.
|
||||
Gitlab::HashedPath.new(sub_path, root_hash: root_id).to_s
|
||||
Gitlab::HashedPath.new(sub_path, root_hash: root_hash).to_s
|
||||
end
|
||||
|
||||
# final_store_path_config is only used if use_final_store_path is set to true
|
||||
# Two keys are available:
|
||||
# - :root_hash. The root hash used in Gitlab::HashedPath for the path generation.
|
||||
# - :override_path. If set, the path generation is skipped and this value is used instead.
|
||||
# Make sure that this value is unique for each upload.
|
||||
def workhorse_authorize(
|
||||
has_length:,
|
||||
maximum_size: nil,
|
||||
use_final_store_path: false,
|
||||
final_store_path_root_id: nil)
|
||||
final_store_path_config: {})
|
||||
{}.tap do |hash|
|
||||
if self.direct_upload_to_object_store?
|
||||
hash[:RemoteObject] = workhorse_remote_upload_options(
|
||||
has_length: has_length,
|
||||
maximum_size: maximum_size,
|
||||
use_final_store_path: use_final_store_path,
|
||||
final_store_path_root_id: final_store_path_root_id
|
||||
final_store_path_config: final_store_path_config
|
||||
)
|
||||
else
|
||||
hash[:TempPath] = workhorse_local_upload_path
|
||||
|
|
@ -231,13 +236,18 @@ module ObjectStorage
|
|||
has_length:,
|
||||
maximum_size: nil,
|
||||
use_final_store_path: false,
|
||||
final_store_path_root_id: nil)
|
||||
final_store_path_config: {})
|
||||
return unless direct_upload_to_object_store?
|
||||
|
||||
if use_final_store_path
|
||||
raise MissingFinalStorePathRootId unless final_store_path_root_id.present?
|
||||
id = if final_store_path_config[:override_path].present?
|
||||
final_store_path_config[:override_path]
|
||||
else
|
||||
raise MissingFinalStorePathRootId unless final_store_path_config[:root_hash].present?
|
||||
|
||||
generate_final_store_path(root_hash: final_store_path_config[:root_hash])
|
||||
end
|
||||
|
||||
id = generate_final_store_path(root_id: final_store_path_root_id)
|
||||
upload_path = with_bucket_prefix(id)
|
||||
prepare_pending_direct_upload(id)
|
||||
else
|
||||
|
|
|
|||
|
|
@ -28,6 +28,11 @@ module VirtualRegistries
|
|||
true
|
||||
end
|
||||
|
||||
override :direct_upload_final_path_attribute_name
|
||||
def direct_upload_final_path_attribute_name
|
||||
:object_storage_key
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_content_type(file)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: reject_orphaned_runners
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/516862
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180163
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/516929
|
||||
milestone: '17.9'
|
||||
group: group::runner
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddObjectStorageKeyUniqueToVRegsPackagesMavenCacheEntries < Gitlab::Database::Migration[2.2]
|
||||
include Gitlab::Database::PartitioningMigrationHelpers
|
||||
|
||||
milestone '17.9'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = :virtual_registries_packages_maven_cache_entries
|
||||
COLUMNS = [:relative_path, :object_storage_key]
|
||||
INDEX_NAME = :idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key
|
||||
|
||||
def up
|
||||
truncate_tables!(TABLE_NAME.to_s)
|
||||
add_concurrent_partitioned_index TABLE_NAME, COLUMNS, unique: true, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_partitioned_index_by_name TABLE_NAME, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
d43f2fa892bb1a4b268197cd6c83b6f1437c80677fae1caadc61b52caf45f2e5
|
||||
|
|
@ -28872,6 +28872,10 @@ CREATE INDEX index_issue_stage_events_project_duration ON ONLY analytics_cycle_a
|
|||
|
||||
CREATE INDEX index_000925dbd7 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_11 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ON ONLY virtual_registries_packages_maven_cache_entries USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE UNIQUE INDEX index_0051c4d20c ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_15 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_merge_request_stage_events_project_duration ON ONLY analytics_cycle_analytics_merge_request_stage_events USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_006f943df6 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_16 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -28880,6 +28884,8 @@ CREATE INDEX index_issue_stage_events_for_consistency_check ON ONLY analytics_cy
|
|||
|
||||
CREATE INDEX index_009e6c1133 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_26 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_0264b93cfb ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_11 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_02749b504c ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_11 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_merge_request_stage_events_group_duration ON ONLY analytics_cycle_analytics_merge_request_stage_events USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -28962,6 +28968,8 @@ CREATE INDEX index_1f6c3faabe ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_1f8af04ed1 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_19 USING btree (stage_event_hash_id, group_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE UNIQUE INDEX index_1fa613e160 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_08 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_201c5ddbe9 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_00 USING btree (stage_event_hash_id, group_id, start_event_timestamp, merge_request_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE INDEX index_20353089e0 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_20 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -28996,6 +29004,10 @@ CREATE INDEX index_23783dc748 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_241e9a574c ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_08 USING btree (stage_event_hash_id, project_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE UNIQUE INDEX index_2439930f8c ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_01 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE UNIQUE INDEX index_2442d1fbd9 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_05 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_24ac321751 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_02 USING btree (stage_event_hash_id, project_id, start_event_timestamp, merge_request_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE INDEX index_25e2aaee9b ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_12 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -29004,6 +29016,8 @@ CREATE INDEX index_2653e7eeb8 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_2745f5a388 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_31 USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_27739b516b ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_09 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_27759556bc ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_01 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_27d7ad78d8 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_31 USING btree (stage_event_hash_id, group_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
|
@ -29080,6 +29094,8 @@ CREATE INDEX index_435802dd01 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_436fa9ad5f ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_13 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_43aff761b5 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_00 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_453a659cb6 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_16 USING btree (stage_event_hash_id, project_id, start_event_timestamp, merge_request_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE INDEX index_46b989b294 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_02 USING btree (stage_event_hash_id, group_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
|
@ -29116,6 +29132,8 @@ CREATE INDEX index_4e6ce1c371 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_4ea50d3a5b ON gitlab_partitions_static.issue_search_data_24 USING btree (namespace_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_4efb1529af ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_10 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_4f2eb7a06b ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_08 USING btree (stage_event_hash_id, group_id, start_event_timestamp, merge_request_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE INDEX index_4f6fc34e57 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_31 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -29216,6 +29234,8 @@ CREATE INDEX index_6bf2b9282c ON gitlab_partitions_static.issue_search_data_22 U
|
|||
|
||||
CREATE INDEX index_6cfb391b86 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_21 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_6daa12da84 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_03 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_6e560c1a4d ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_23 USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id);
|
||||
|
||||
CREATE INDEX index_6e64aa1646 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_18 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id);
|
||||
|
|
@ -29324,6 +29344,8 @@ CREATE INDEX index_8b1b6b03b4 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_8b9f9a19a4 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_18 USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_8c8835ac5e ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_12 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_8fb48e72ce ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_26 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_907e12b7ba ON gitlab_partitions_static.issue_search_data_54 USING btree (namespace_id);
|
||||
|
|
@ -29372,6 +29394,8 @@ CREATE INDEX index_a46b7b7f26 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_a4f5106804 ON gitlab_partitions_static.issue_search_data_11 USING btree (namespace_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_a5d8ab0218 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_06 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_a6999c65c9 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_09 USING btree (stage_event_hash_id, group_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
||||
CREATE INDEX index_a6c68d16b2 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_14 USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -29438,6 +29462,8 @@ CREATE INDEX index_b7f21460bb ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_b83fe1306b ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_03 USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_bb41d5837a ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_02 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_bb6defaa27 ON gitlab_partitions_static.issue_search_data_34 USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_bc189e47ab ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_11 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -29490,6 +29516,8 @@ CREATE INDEX index_c6ea8a0e26 ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_c7ac8595d3 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_00 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_c7fa6f402d ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_04 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_c8bbf2b334 ON gitlab_partitions_static.issue_search_data_26 USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_c8c4219c0a ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_26 USING btree (stage_event_hash_id, group_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
|
@ -29498,6 +29526,8 @@ CREATE INDEX index_c971e6c5ce ON gitlab_partitions_static.analytics_cycle_analyt
|
|||
|
||||
CREATE INDEX index_c9b14a3d9f ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_08 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_cb0e4510aa ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_07 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_cb222425ed ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_29 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id);
|
||||
|
||||
CREATE INDEX index_cbb61ea269 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_12 USING btree (stage_event_hash_id, project_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
|
@ -29554,6 +29584,8 @@ CREATE INDEX index_db6477916f ON gitlab_partitions_static.issue_search_data_28 U
|
|||
|
||||
CREATE INDEX index_dc571ba649 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_01 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_dc7ca9eb1d ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_13 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_de0334da63 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_12 USING btree (stage_event_hash_id, group_id, end_event_timestamp, issue_id);
|
||||
|
||||
CREATE INDEX index_df62a8c50e ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_07 USING btree (stage_event_hash_id, project_id, start_event_timestamp, issue_id) WHERE ((end_event_timestamp IS NULL) AND (state_id = 1));
|
||||
|
|
@ -29624,6 +29656,8 @@ CREATE INDEX index_f415dc2abd ON gitlab_partitions_static.issue_search_data_18 U
|
|||
|
||||
CREATE INDEX index_f47327ec1f ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_27 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
||||
CREATE UNIQUE INDEX index_f586c952e6 ON gitlab_partitions_static.virtual_registries_packages_maven_cache_entries_14 USING btree (relative_path, object_storage_key);
|
||||
|
||||
CREATE INDEX index_f5f0e8eefd ON gitlab_partitions_static.issue_search_data_37 USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_f6b0d458a3 ON gitlab_partitions_static.analytics_cycle_analytics_issue_stage_events_15 USING btree (stage_event_hash_id, project_id, end_event_timestamp, issue_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
|
||||
|
|
@ -35702,10 +35736,14 @@ ALTER INDEX analytics_cycle_analytics_merge_request_stage_events_pkey ATTACH PAR
|
|||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_000925dbd7;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_0051c4d20c;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_006f943df6;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_009e6c1133;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_0264b93cfb;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_02749b504c;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_0287f5ba09;
|
||||
|
|
@ -35774,6 +35812,8 @@ ALTER INDEX index_issue_stage_events_project_in_progress_duration ATTACH PARTITI
|
|||
|
||||
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_1f8af04ed1;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_1fa613e160;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_201c5ddbe9;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_20353089e0;
|
||||
|
|
@ -35806,6 +35846,10 @@ ALTER INDEX index_merge_request_stage_events_group_in_progress_duration ATTACH P
|
|||
|
||||
ALTER INDEX index_issue_stage_events_project_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_241e9a574c;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_2439930f8c;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_2442d1fbd9;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_project_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_24ac321751;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_25e2aaee9b;
|
||||
|
|
@ -35814,6 +35858,8 @@ ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION g
|
|||
|
||||
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_2745f5a388;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_27739b516b;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_27759556bc;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_27d7ad78d8;
|
||||
|
|
@ -35890,6 +35936,8 @@ ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION
|
|||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_436fa9ad5f;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_43aff761b5;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_project_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_453a659cb6;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_46b989b294;
|
||||
|
|
@ -35926,6 +35974,8 @@ ALTER INDEX index_merge_request_stage_events_project_in_progress_duration ATTACH
|
|||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_4ea50d3a5b;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_4efb1529af;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_4f2eb7a06b;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_4f6fc34e57;
|
||||
|
|
@ -36026,6 +36076,8 @@ ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_part
|
|||
|
||||
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_6cfb391b86;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_6daa12da84;
|
||||
|
||||
ALTER INDEX index_mr_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_6e560c1a4d;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_6e64aa1646;
|
||||
|
|
@ -36134,6 +36186,8 @@ ALTER INDEX index_mr_stage_events_for_consistency_check ATTACH PARTITION gitlab_
|
|||
|
||||
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_8b9f9a19a4;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_8c8835ac5e;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_8fb48e72ce;
|
||||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_907e12b7ba;
|
||||
|
|
@ -36182,6 +36236,8 @@ ALTER INDEX index_mr_stage_events_for_consistency_check ATTACH PARTITION gitlab_
|
|||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_a4f5106804;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_a5d8ab0218;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_a6999c65c9;
|
||||
|
||||
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_a6c68d16b2;
|
||||
|
|
@ -36248,6 +36304,8 @@ ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION
|
|||
|
||||
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_b83fe1306b;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_bb41d5837a;
|
||||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_bb6defaa27;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_bc189e47ab;
|
||||
|
|
@ -36300,6 +36358,8 @@ ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION
|
|||
|
||||
ALTER INDEX index_issue_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_c7ac8595d3;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_c7fa6f402d;
|
||||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_c8bbf2b334;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_c8c4219c0a;
|
||||
|
|
@ -36308,6 +36368,8 @@ ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_part
|
|||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_c9b14a3d9f;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_cb0e4510aa;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_cb222425ed;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_project_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_cbb61ea269;
|
||||
|
|
@ -36364,6 +36426,8 @@ ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_part
|
|||
|
||||
ALTER INDEX index_issue_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_dc571ba649;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_dc7ca9eb1d;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_for_consistency_check ATTACH PARTITION gitlab_partitions_static.index_de0334da63;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_project_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_df62a8c50e;
|
||||
|
|
@ -36434,6 +36498,8 @@ ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_part
|
|||
|
||||
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_f47327ec1f;
|
||||
|
||||
ALTER INDEX idx_vregs_pkgs_mvn_cache_entries_on_uniq_object_storage_key ATTACH PARTITION gitlab_partitions_static.index_f586c952e6;
|
||||
|
||||
ALTER INDEX index_issue_search_data_on_namespace_id ATTACH PARTITION gitlab_partitions_static.index_f5f0e8eefd;
|
||||
|
||||
ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_f6b0d458a3;
|
||||
|
|
|
|||
|
|
@ -108,6 +108,7 @@ Example response:
|
|||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170421) in GitLab 17.7 [with a flag](../../administration/feature_flags.md) named `api_admin_token_revoke`. Disabled by default.
|
||||
> - [Cluster agent tokens added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178211) in GitLab 17.9.
|
||||
> - [Runner authentication tokens added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179066) in GitLab 17.9.
|
||||
> - [OAuth application secrets added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179035) in GitLab 17.9.
|
||||
|
||||
FLAG:
|
||||
The availability of this feature is controlled by a feature flag.
|
||||
|
|
@ -116,15 +117,16 @@ This feature is available for testing, but not ready for production use.
|
|||
|
||||
Revokes or resets a given token based on the token type. This endpoint supports the following token types:
|
||||
|
||||
| Token type | Supported action |
|
||||
|---------------------------------------------------------------------------------------------|--------------------|
|
||||
| [Personal access tokens](../../user/profile/personal_access_tokens.md) | Revoke |
|
||||
| Token type | Supported action |
|
||||
|----------------------------------------------------------------------------------------------|--------------------|
|
||||
| [Personal access tokens](../../user/profile/personal_access_tokens.md) | Revoke |
|
||||
| [Project access tokens](../../security/tokens/_index.md#project-access-tokens) | Revoke |
|
||||
| [Group access tokens](../../security/tokens/_index.md#group-access-tokens) | Revoke |
|
||||
| [Deploy tokens](../../user/project/deploy_tokens/index.md) | Revoke |
|
||||
| [Deploy tokens](../../user/project/deploy_tokens/index.md) | Revoke |
|
||||
| [Cluster agent tokens](../../security/tokens/_index.md#gitlab-cluster-agent-tokens) | Revoke |
|
||||
| [Feed tokens](../../security/tokens/_index.md#feed-token) | Reset |
|
||||
| [Runner authentication tokens](../../security/tokens/_index.md#runner-authentication-tokens) | Reset |
|
||||
| [OAuth application secrets](../../integration/oauth_provider.md) | Reset |
|
||||
|
||||
```plaintext
|
||||
DELETE /api/v4/admin/token
|
||||
|
|
|
|||
|
|
@ -12,13 +12,21 @@ module API
|
|||
JOB_TOKEN_PARAM = :token
|
||||
LEGACY_SYSTEM_XID = '<legacy>'
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
UnknownRunnerOwnerError = Class.new(StandardError)
|
||||
|
||||
def authenticate_runner!(ensure_runner_manager: true, update_contacted_at: true)
|
||||
track_runner_authentication
|
||||
forbidden! unless current_runner
|
||||
|
||||
# TODO: Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/504963 (when ci_runners is swapped)
|
||||
# This is because the new table will have check constraints for these scenarios, and therefore
|
||||
# any orphaned runners will be missing
|
||||
if Feature.enabled?(:reject_orphaned_runners, Feature.current_request) &&
|
||||
current_runner.sharding_key_id.nil? && !current_runner.instance_type?
|
||||
unprocessable_entity!('Runner is orphaned')
|
||||
end
|
||||
|
||||
current_runner.heartbeat if update_contacted_at
|
||||
return unless ensure_runner_manager
|
||||
|
||||
|
|
|
|||
|
|
@ -77,7 +77,9 @@ module API
|
|||
has_length: true,
|
||||
maximum_size: MAX_FILE_SIZE,
|
||||
use_final_store_path: true,
|
||||
final_store_path_root_id: registry.id
|
||||
final_store_path_config: {
|
||||
override_path: upstream.object_storage_key_for(registry_id: registry.id)
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ module API
|
|||
|
||||
params = { has_length: has_length, use_final_store_path: use_final_store_path }
|
||||
params[:maximum_size] = maximum_size unless has_length
|
||||
params[:final_store_path_root_id] = subject.id if use_final_store_path
|
||||
params[:final_store_path_config] = { root_hash: subject.id } if use_final_store_path
|
||||
::Packages::PackageFileUploader.workhorse_authorize(**params)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -22,10 +22,13 @@ module Authn
|
|||
::API::Entities::Application
|
||||
end
|
||||
|
||||
def revoke!(_current_user)
|
||||
def revoke!(current_user)
|
||||
raise ::Authn::AgnosticTokenIdentifier::NotFoundError, 'Not Found' if revocable.blank?
|
||||
|
||||
raise ::Authn::AgnosticTokenIdentifier::UnsupportedTokenError, 'Revocation not supported for this token type'
|
||||
Authz::Applications::ResetSecretService.new(
|
||||
application: revocable,
|
||||
current_user: current_user
|
||||
).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -797,11 +797,37 @@ RSpec.describe GraphqlController, feature_category: :integrations do
|
|||
end
|
||||
end
|
||||
|
||||
it 'appends metadata for logging' do
|
||||
post :execute, params: { _json: graphql_queries }
|
||||
context 'when source is not glql' do
|
||||
it 'appends metadata for logging' do
|
||||
post :execute, params: { _json: graphql_queries }
|
||||
|
||||
expect(controller).to have_received(:append_info_to_payload)
|
||||
expect(log_payload.dig(:metadata, :graphql)).to match_array(expected_logs)
|
||||
expect(controller).to have_received(:append_info_to_payload)
|
||||
expect(log_payload.dig(:metadata, :graphql)).to match_array(expected_logs)
|
||||
expect(log_payload.dig(:metadata, :referer)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source is glql' do
|
||||
let(:query_1) { { query: graphql_query_for('project', { 'fullPath' => 'foo' }, %w[id name], 'GLQL') } }
|
||||
let(:query_2) { { query: graphql_query_for('project', { 'fullPath' => 'bar' }, %w[id], 'GLQL') } }
|
||||
|
||||
let(:expected_glql_logs) do
|
||||
expected_logs.map do |q|
|
||||
q.merge(operation_name: "GLQL")
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
request.headers['Referer'] = 'path'
|
||||
end
|
||||
|
||||
it 'appends glql-related metadata for logging' do
|
||||
post :execute, params: { _json: graphql_queries }
|
||||
|
||||
expect(controller).to have_received(:append_info_to_payload)
|
||||
expect(log_payload.dig(:metadata, :graphql)).to match_array(expected_glql_logs)
|
||||
expect(log_payload.dig(:metadata, :referer)).to eq('path')
|
||||
end
|
||||
end
|
||||
|
||||
it 'appends the exception in case of errors' do
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ FactoryBot.define do
|
|||
size { 1.kilobyte }
|
||||
upstream_etag { OpenSSL::Digest.hexdigest('SHA256', 'test') }
|
||||
content_type { 'text/plain' }
|
||||
file_final_path { '5f/9c/5f9c/@final/c7/4c/240c' }
|
||||
file_md5 { 'd8e8fca2dc0f896fd7cb4cb0031ba249' }
|
||||
file_sha1 { '4e1243bd22c66e76c2ba9eddc1f91394e57f9f83' }
|
||||
status { :default }
|
||||
|
|
|
|||
|
|
@ -41,6 +41,16 @@ describe('PerformanceGraph', () => {
|
|||
expect(findLineChart().props('data')[1].data.length).toBe(5);
|
||||
expect(findLineChart().props('data')[2].data.length).toBe(1);
|
||||
});
|
||||
|
||||
it('sorts the data by created_at in ascending order', () => {
|
||||
createWrapper();
|
||||
|
||||
const data = findLineChart()
|
||||
.props('data')[0]
|
||||
.data.map(({ value }) => value[1]);
|
||||
|
||||
expect(data).toEqual([0.3, 0.4, 0.6, 0.5]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty state', () => {
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ export const MOCK_CANDIDATES = [
|
|||
name: 'a job',
|
||||
},
|
||||
name: 'aCandidate',
|
||||
created_at: '2023-01-05T14:07:02.975Z',
|
||||
created_at: '2023-01-05T14:07:01.975Z',
|
||||
user: { username: 'root', path: '/root' },
|
||||
},
|
||||
{
|
||||
|
|
@ -74,26 +74,26 @@ export const MOCK_CANDIDATES = [
|
|||
user: null,
|
||||
},
|
||||
{
|
||||
auc: 0.3,
|
||||
auc: 0.4,
|
||||
l1_ratio: 0.5,
|
||||
details: 'link/to/candidate/3',
|
||||
created_at: '2023-01-05T14:07:02.975Z',
|
||||
created_at: '2023-01-05T14:07:03.975Z',
|
||||
name: null,
|
||||
user: null,
|
||||
},
|
||||
{
|
||||
auc: 0.3,
|
||||
auc: 0.6,
|
||||
l1_ratio: 0.5,
|
||||
details: 'link/to/candidate/4',
|
||||
created_at: '2023-01-05T14:07:02.975Z',
|
||||
created_at: '2023-01-05T14:07:04.975Z',
|
||||
name: null,
|
||||
user: null,
|
||||
},
|
||||
{
|
||||
auc: 0.3,
|
||||
auc: 0.5,
|
||||
l1_ratio: 0.5,
|
||||
details: 'link/to/candidate/5',
|
||||
created_at: '2023-01-05T14:07:02.975Z',
|
||||
created_at: '2023-01-05T14:07:05.975Z',
|
||||
name: null,
|
||||
user: null,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ const mockTemplatesList = [
|
|||
projectId: 3,
|
||||
},
|
||||
{
|
||||
name: 'template 4',
|
||||
name: 'Bug',
|
||||
__typename: 'WorkItemDescriptionTemplate',
|
||||
category: 'GROUP C',
|
||||
projectId: 4,
|
||||
|
|
@ -162,9 +162,9 @@ describe('WorkItemDescriptionTemplateListbox', () => {
|
|||
expect(text).toContain('GROUP C');
|
||||
});
|
||||
|
||||
it('allows searching to narrow down results', async () => {
|
||||
// only matches 'template 4'
|
||||
findListbox().vm.$emit('search', '4');
|
||||
it('allows case insensitive searching to narrow down results', async () => {
|
||||
// only matches 'Bug'
|
||||
findListbox().vm.$emit('search', 'bug');
|
||||
await nextTick();
|
||||
expect(findListbox().props('items')).toHaveLength(1);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1821,12 +1821,12 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'returns visibility level content_tag' do
|
||||
expected_result = "<span class=\"has-tooltip\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\">#{icon}</span>"
|
||||
expected_result = "<button class=\"has-tooltip gl-border-0 gl-bg-transparent gl-p-0 gl-leading-0 gl-text-inherit\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\" type=\"button\" aria-label=\"#{description}\">#{icon}</button>"
|
||||
expect(helper.visibility_level_content(project)).to eq(expected_result)
|
||||
end
|
||||
|
||||
it 'returns visibility level content_tag with extra CSS classes' do
|
||||
expected_result = "<span class=\"has-tooltip extra-class\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\">#{icon}</span>"
|
||||
expected_result = "<button class=\"has-tooltip gl-border-0 gl-bg-transparent gl-p-0 gl-leading-0 gl-text-inherit extra-class\" data-container=\"body\" data-placement=\"top\" title=\"#{description}\" type=\"button\" aria-label=\"#{description}\">#{icon}</button>"
|
||||
|
||||
expect(helper).to receive(:visibility_level_icon)
|
||||
.with(anything, options: { class: 'extra-icon-class' })
|
||||
|
|
|
|||
|
|
@ -188,6 +188,20 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do
|
|||
|
||||
expect { current_runner_manager }.to raise_error described_class::UnknownRunnerOwnerError
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
context 'with reject_orphaned_runners FF disabled' do
|
||||
before do
|
||||
stub_feature_flags(reject_orphaned_runners: false)
|
||||
end
|
||||
|
||||
it 'fails to create a new runner manager', :aggregate_failures do
|
||||
allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id')
|
||||
expect(helper.current_runner).to eq(runner)
|
||||
|
||||
expect { current_runner_manager }.to raise_error described_class::UnknownRunnerOwnerError
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -161,7 +161,7 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr
|
|||
let(:params) { super().merge(use_final_store_path: true) }
|
||||
|
||||
it_behaves_like 'workhorse authorize' do
|
||||
let(:workhorse_authorize_params) { { has_length: true, use_final_store_path: true, final_store_path_root_id: project.id } }
|
||||
let(:workhorse_authorize_params) { { has_length: true, use_final_store_path: true, final_store_path_config: { root_hash: project.id } } }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Authn::Tokens::OauthApplicationSecret, feature_category: :system_access do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:admin) { create(:admin) }
|
||||
|
||||
let(:oauth_application_secret) { create(:oauth_application) }
|
||||
|
||||
|
|
@ -15,12 +16,31 @@ RSpec.describe Authn::Tokens::OauthApplicationSecret, feature_category: :system_
|
|||
|
||||
it_behaves_like 'finding the valid revocable'
|
||||
|
||||
describe '#revoke!' do
|
||||
it 'does not support revocation yet' do
|
||||
expect do
|
||||
token.revoke!(user)
|
||||
end.to raise_error(::Authn::AgnosticTokenIdentifier::UnsupportedTokenError,
|
||||
'Revocation not supported for this token type')
|
||||
describe '#revoke!', :enable_admin_mode do
|
||||
subject(:revoke) { described_class.new(plaintext, :api_admin_token).revoke!(current_user) }
|
||||
|
||||
context 'as admin' do
|
||||
let(:current_user) { admin }
|
||||
|
||||
it 'successfully revokes the token' do
|
||||
expect { revoke }.to change { oauth_application_secret.reload.secret }
|
||||
end
|
||||
|
||||
it 'does support revocation' do
|
||||
expect { revoke }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'as a user' do
|
||||
let(:current_user) { user }
|
||||
|
||||
it 'does not reset the token' do
|
||||
expect { revoke }.not_to change { oauth_application_secret.reload.secret }
|
||||
end
|
||||
|
||||
it 'returns an error' do
|
||||
expect(revoke.error?).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entry, type: :model, f
|
|||
it { is_expected.to validate_length_of(attr).is_at_most(255) }
|
||||
end
|
||||
|
||||
%i[relative_path object_storage_key file_final_path].each do |attr|
|
||||
%i[relative_path object_storage_key].each do |attr|
|
||||
it { is_expected.to validate_length_of(attr).is_at_most(1024) }
|
||||
end
|
||||
|
||||
|
|
@ -35,6 +35,7 @@ RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entry, type: :model, f
|
|||
end
|
||||
|
||||
it { is_expected.to validate_uniqueness_of(:relative_path).scoped_to(:upstream_id, :status) }
|
||||
it { is_expected.to validate_uniqueness_of(:object_storage_key).scoped_to(:relative_path) }
|
||||
|
||||
context 'with a similar cached response in a different status' do
|
||||
let!(:cache_entry_in_error) do
|
||||
|
|
@ -103,6 +104,7 @@ RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entry, type: :model, f
|
|||
it 'can not be null' do
|
||||
cache_entry.object_storage_key = nil
|
||||
cache_entry.relative_path = nil
|
||||
cache_entry.upstream = nil
|
||||
|
||||
expect(cache_entry).to be_invalid
|
||||
expect(cache_entry.errors.full_messages).to include("Object storage key can't be blank")
|
||||
|
|
|
|||
|
|
@ -247,4 +247,23 @@ RSpec.describe VirtualRegistries::Packages::Maven::Upstream, type: :model, featu
|
|||
|
||||
it { is_expected.to contain_exactly(default_cache_entry) }
|
||||
end
|
||||
|
||||
describe '#object_storage_key_for' do
|
||||
let_it_be(:upstream) { build_stubbed(:virtual_registries_packages_maven_upstream) }
|
||||
|
||||
let(:registry_id) { '555' }
|
||||
|
||||
subject { upstream.object_storage_key_for(registry_id: registry_id) }
|
||||
|
||||
it 'contains the expected terms' do
|
||||
is_expected.to include("virtual_registries/packages/maven/#{registry_id}/upstream/#{upstream.id}/cache/entry")
|
||||
end
|
||||
|
||||
it 'does not return the same value when called twice' do
|
||||
first_value = upstream.object_storage_key_for(registry_id: registry_id)
|
||||
second_value = upstream.object_storage_key_for(registry_id: registry_id)
|
||||
|
||||
expect(first_value).not_to eq(second_value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -186,6 +186,16 @@ RSpec.describe API::Admin::Token, :aggregate_failures, feature_category: :system
|
|||
end
|
||||
end
|
||||
|
||||
context 'when the token is an oauth application token' do
|
||||
let(:plaintext) { oauth_application.plaintext_secret }
|
||||
|
||||
it 'resets the token' do
|
||||
expect { delete_token }.to change { oauth_application.reload.secret }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the revocation feature is disabled' do
|
||||
before do
|
||||
stub_feature_flags(api_admin_token_revoke: false)
|
||||
|
|
|
|||
|
|
@ -152,14 +152,24 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
expect(runner_manager.contacted_at).to eq Time.current
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
context 'when runner is not yet synced to partitioned table', :aggregate_failures do
|
||||
# TODO: Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/504963 (when ci_runners is swapped)
|
||||
# This is because the new table will have check constraints for these scenarios, and therefore
|
||||
# any orphaned runners will be missing
|
||||
context 'when runner is missing sharding_key_id', :aggregate_failures do
|
||||
let(:connection) { Ci::ApplicationRecord.connection }
|
||||
let(:non_partitioned_runner) { runner }
|
||||
let(:params) { { token: 'foo' } }
|
||||
let(:non_partitioned_runner) do
|
||||
connection.execute(<<~SQL)
|
||||
INSERT INTO ci_runners(created_at, runner_type, token, sharding_key_id)
|
||||
VALUES(NOW(), #{runner_type}, '#{params[:token]}', NULL);
|
||||
SQL
|
||||
|
||||
Ci::Runner.where(runner_type: runner_type).last
|
||||
end
|
||||
|
||||
before do
|
||||
# Allow creating legacy runners that are not present in the partitioned table (created when FK was not
|
||||
# present)
|
||||
# Allow creating orphaned runners that are not present in the partitioned table and
|
||||
# are not associated with any group or project (created when FK was not present)
|
||||
connection.transaction do
|
||||
connection.execute(<<~SQL)
|
||||
ALTER TABLE ci_runners DISABLE TRIGGER ALL;
|
||||
|
|
@ -173,25 +183,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
end
|
||||
end
|
||||
|
||||
it 'creates respective ci_runner_machines record and syncs runner to partitioned table' do
|
||||
expect { request }
|
||||
.to change { runner.runner_managers.reload.count }.from(0).to(1)
|
||||
.and change { partitioned_runner_exists?(non_partitioned_runner) }.from(false).to(true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(non_partitioned_runner.contacted_at).to be_nil
|
||||
end
|
||||
|
||||
context 'when project runner is missing sharding_key_id' do
|
||||
let(:params) { { token: 'foo' } }
|
||||
let(:runner) { Ci::Runner.project_type.last }
|
||||
let(:non_partitioned_runner) do
|
||||
connection.execute(<<~SQL)
|
||||
INSERT INTO ci_runners(created_at, runner_type, token, sharding_key_id) VALUES(NOW(), 3, 'foo', NULL);
|
||||
SQL
|
||||
|
||||
runner
|
||||
end
|
||||
context 'when group runner is missing sharding_key_id' do
|
||||
let(:runner_type) { 2 }
|
||||
let(:runner) { non_partitioned_runner }
|
||||
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { request }.not_to change { Ci::RunnerManager.count }.from(0)
|
||||
|
|
@ -200,6 +194,30 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
end
|
||||
end
|
||||
|
||||
context 'when project runner is missing sharding_key_id' do
|
||||
let(:runner_type) { 3 }
|
||||
let(:runner) { non_partitioned_runner }
|
||||
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { request }.not_to change { Ci::RunnerManager.count }.from(0)
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
context 'with reject_orphaned_runners FF disabled' do
|
||||
before do
|
||||
stub_feature_flags(reject_orphaned_runners: false)
|
||||
end
|
||||
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { request }.not_to change { Ci::RunnerManager.count }.from(0)
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def partitioned_runner_exists?(runner)
|
||||
|
|
|
|||
|
|
@ -72,14 +72,19 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
expect { verify }.not_to change { runner.reload.contacted_at }.from(nil)
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
context 'when runner is not yet synced to partitioned table' do
|
||||
# TODO: Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/504963 (when ci_runners is swapped)
|
||||
# This is because the new table will have check constraints for these scenarios, and therefore
|
||||
# any orphaned runners will be missing
|
||||
context 'when runner is missing sharding_key_id', :aggregate_failures do
|
||||
let(:connection) { Ci::ApplicationRecord.connection }
|
||||
let(:params) { { token: non_partitioned_runner.token } }
|
||||
let(:registration_token) { 'glrt-abcdefg123457' }
|
||||
let(:params) { { token: 'foo' } }
|
||||
let(:non_partitioned_runner) do
|
||||
create(:ci_runner, registration_type: registration_type,
|
||||
token: registration_token, token_expires_at: 3.days.from_now)
|
||||
connection.execute(<<~SQL)
|
||||
INSERT INTO ci_runners(created_at, runner_type, token, sharding_key_id)
|
||||
VALUES(NOW(), #{runner_type}, '#{params[:token]}', NULL);
|
||||
SQL
|
||||
|
||||
Ci::Runner.where(runner_type: runner_type).last
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -98,11 +103,41 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
end
|
||||
end
|
||||
|
||||
it 'does not update contacted_at but syncs runner to partitioned table', :aggregate_failures do
|
||||
expect { verify }.to change { partitioned_runner_exists?(non_partitioned_runner) }.from(false).to(true)
|
||||
context 'when group runner is missing sharding_key_id' do
|
||||
let(:runner_type) { 2 }
|
||||
let(:runner) { non_partitioned_runner }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(non_partitioned_runner.contacted_at).to be_nil
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { verify }.not_to change { partitioned_runner_exists?(runner) }.from(false)
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when project runner is missing sharding_key_id' do
|
||||
let(:runner_type) { 3 }
|
||||
let(:runner) { non_partitioned_runner }
|
||||
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { verify }.not_to change { partitioned_runner_exists?(runner) }.from(false)
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/516929 is closed.
|
||||
context 'with reject_orphaned_runners FF disabled' do
|
||||
before do
|
||||
stub_feature_flags(reject_orphaned_runners: false)
|
||||
end
|
||||
|
||||
it 'does not update contacted_at and returns error', :aggregate_failures do
|
||||
expect { verify }.not_to change { partitioned_runner_exists?(non_partitioned_runner) }.from(false)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
expect(non_partitioned_runner.contacted_at).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -159,29 +194,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
it 'creates a runner_manager' do
|
||||
expect { verify }.to change { Ci::RunnerManager.count }.by(1)
|
||||
end
|
||||
|
||||
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
|
||||
context 'when project runner is missing sharding_key_id' do
|
||||
let(:runner) { Ci::Runner.project_type.last }
|
||||
let(:params) { { token: 'foo' } }
|
||||
let(:connection) { Ci::Runner.connection }
|
||||
|
||||
before do
|
||||
connection.execute(<<~SQL)
|
||||
ALTER TABLE ci_runners DISABLE TRIGGER ALL;
|
||||
|
||||
INSERT INTO ci_runners(created_at, runner_type, token, sharding_key_id) VALUES(NOW(), 3, 'foo', NULL);
|
||||
|
||||
ALTER TABLE ci_runners ENABLE TRIGGER ALL;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'returns unprocessable entity status code', :aggregate_failures do
|
||||
expect { verify }.not_to change { Ci::RunnerManager.count }.from(0)
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq({ message: 'Runner is orphaned' }.to_json)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@ RSpec.describe API::GenericPackages, feature_category: :package_registry do
|
|||
|
||||
it 'sends use_final_store_path with true' do
|
||||
expect(::Packages::PackageFileUploader).to receive(:workhorse_authorize).with(
|
||||
hash_including(use_final_store_path: true, final_store_path_root_id: project.id)
|
||||
hash_including(use_final_store_path: true, final_store_path_config: { root_hash: project.id })
|
||||
).and_call_original
|
||||
|
||||
authorize_upload_file(workhorse_headers.merge(personal_access_token_header))
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :
|
|||
|
||||
it 'sends use_final_store_path with true' do
|
||||
expect(::Packages::PackageFileUploader).to receive(:workhorse_authorize).with(
|
||||
hash_including(use_final_store_path: true, final_store_path_root_id: project.id)
|
||||
hash_including(use_final_store_path: true, final_store_path_config: { root_hash: project.id })
|
||||
).and_call_original
|
||||
|
||||
api_request
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ RSpec.describe API::VirtualRegistries::Packages::Maven::Endpoints, :aggregate_fa
|
|||
expect(::VirtualRegistries::Cache::EntryUploader).to receive(:workhorse_authorize).with(
|
||||
a_hash_including(
|
||||
use_final_store_path: true,
|
||||
final_store_path_root_id: registry.id
|
||||
final_store_path_config: { override_path: be_instance_of(String) }
|
||||
)
|
||||
).and_call_original
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "spec_helper"
|
||||
|
||||
RSpec.describe ::Authz::Applications::ResetSecretService, :aggregate_failures, feature_category: :system_access do
|
||||
let(:application) { create(:oauth_application) }
|
||||
|
||||
describe '#execute' do
|
||||
subject(:service) { described_class.new(application: application, current_user: current_user) }
|
||||
|
||||
context 'as a user' do
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
|
||||
it 'does not change the secret' do
|
||||
expect { service.execute }.not_to change { application.reload.secret }
|
||||
end
|
||||
|
||||
it 'returns an error response' do
|
||||
response = service.execute
|
||||
expect(response.error?).to be_truthy
|
||||
expect(response.message).to include('cannot reset secret')
|
||||
end
|
||||
end
|
||||
|
||||
context 'as an admin', :enable_admin_mode do
|
||||
let_it_be(:current_user) { create(:admin) }
|
||||
|
||||
it 'returns a successful ServiceResponse' do
|
||||
response = service.execute
|
||||
expect(response).to be_kind_of(ServiceResponse)
|
||||
expect(response.success?).to be_truthy
|
||||
end
|
||||
|
||||
it 'changes the secret' do
|
||||
expect { service.execute }.to change { application.reload.secret }
|
||||
end
|
||||
|
||||
context 'when saving fails' do
|
||||
before do
|
||||
allow(application).to receive(:save).and_return(false)
|
||||
end
|
||||
|
||||
it 'does not change the secret' do
|
||||
expect { service.execute }.not_to change { application.reload.secret }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -97,7 +97,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
|
|||
|
||||
allow(JobArtifactUploader)
|
||||
.to receive(:generate_final_store_path)
|
||||
.with(root_id: project.id)
|
||||
.with(root_hash: project.id)
|
||||
.and_return(final_store_path)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,15 @@ RSpec.describe ProtectedBranches::CreateService, feature_category: :compliance_m
|
|||
expect(entity.protected_branches.last.merge_access_levels.map(&:access_level)).to match_array([Gitlab::Access::MAINTAINER])
|
||||
end
|
||||
|
||||
it 'publishes ProtectedBranchCreatedEvent event' do
|
||||
expect { service.execute }.to publish_event(Repositories::ProtectedBranchCreatedEvent)
|
||||
.with(
|
||||
protected_branch_id: an_instance_of(Integer),
|
||||
parent_id: entity.id,
|
||||
parent_type: entity.is_a?(Project) ? 'project' : 'group'
|
||||
)
|
||||
end
|
||||
|
||||
it 'refreshes the cache' do
|
||||
expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
|
||||
expect(cache_service).to receive(:refresh)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,11 @@ RSpec.describe ProtectedBranches::DestroyService, feature_category: :compliance_
|
|||
expect(protected_branch).to be_destroyed
|
||||
end
|
||||
|
||||
it 'publishes ProtectedBranchDestroyedEvent event' do
|
||||
expect { service.execute(protected_branch) }.to publish_event(Repositories::ProtectedBranchDestroyedEvent)
|
||||
.with(parent_id: entity.id, parent_type: entity.is_a?(Project) ? 'project' : 'group')
|
||||
end
|
||||
|
||||
it 'refreshes the cache' do
|
||||
expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
|
||||
expect(cache_service).to receive(:refresh)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module OrphanFinalArtifactsCleanupHelpers
|
||||
def create_fog_file(final: true)
|
||||
path = if final
|
||||
JobArtifactUploader.generate_final_store_path(root_id: 123)
|
||||
JobArtifactUploader.generate_final_store_path(root_hash: 123)
|
||||
else
|
||||
JobArtifactUploader.generate_remote_id
|
||||
end
|
||||
|
|
|
|||
|
|
@ -552,14 +552,15 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
|
|||
let(:has_length) { true }
|
||||
let(:maximum_size) { nil }
|
||||
let(:use_final_store_path) { false }
|
||||
let(:final_store_path_root_id) { nil }
|
||||
let(:final_store_path_root_hash) { nil }
|
||||
let(:final_store_path_config) { { root_hash: final_store_path_root_hash } }
|
||||
|
||||
subject do
|
||||
uploader_class.workhorse_authorize(
|
||||
has_length: has_length,
|
||||
maximum_size: maximum_size,
|
||||
use_final_store_path: use_final_store_path,
|
||||
final_store_path_root_id: final_store_path_root_id
|
||||
final_store_path_config: final_store_path_config
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -644,24 +645,24 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
|
|||
shared_examples 'handling object storage final upload path' do |multipart|
|
||||
context 'when use_final_store_path is true' do
|
||||
let(:use_final_store_path) { true }
|
||||
let(:final_store_path_root_id) { 12345 }
|
||||
let(:final_store_path_root_hash) { 12345 }
|
||||
let(:final_store_path) { File.join('@final', 'myprefix', 'abc', '123', 'somefilename') }
|
||||
let(:escaped_path) { escape_path(final_store_path) }
|
||||
|
||||
context 'and final_store_path_root_id was not given' do
|
||||
let(:final_store_path_root_id) { nil }
|
||||
context 'and final_store_path_root_hash was not given' do
|
||||
let(:final_store_path_root_hash) { nil }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject }.to raise_error(ObjectStorage::MissingFinalStorePathRootId)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and final_store_path_root_id was given' do
|
||||
context 'and final_store_path_root_hash was given' do
|
||||
before do
|
||||
stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{final_store_path}") if multipart
|
||||
|
||||
allow(uploader_class).to receive(:generate_final_store_path)
|
||||
.with(root_id: final_store_path_root_id)
|
||||
.with(root_hash: final_store_path_root_hash)
|
||||
.and_return(final_store_path)
|
||||
end
|
||||
|
||||
|
|
@ -720,6 +721,35 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'and override_path was given' do
|
||||
let(:override_path) { 'test_override_path' }
|
||||
let(:final_store_path_config) { { override_path: override_path } }
|
||||
|
||||
before do
|
||||
stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{override_path}") if multipart
|
||||
end
|
||||
|
||||
it 'uses the override instead of generating a path' do
|
||||
expect(uploader_class).not_to receive(:generate_final_store_path)
|
||||
|
||||
expect(subject[:RemoteObject][:ID]).to eq(override_path)
|
||||
expect(subject[:RemoteObject][:GetURL]).to include(override_path)
|
||||
expect(subject[:RemoteObject][:StoreURL]).to include(override_path)
|
||||
|
||||
if multipart
|
||||
expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(override_path))
|
||||
expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(override_path)
|
||||
expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(override_path)
|
||||
end
|
||||
|
||||
expect(subject[:RemoteObject][:SkipDelete]).to eq(true)
|
||||
|
||||
expect(
|
||||
ObjectStorage::PendingDirectUpload.exists?(uploader_class.storage_location_identifier, override_path)
|
||||
).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def escape_path(path)
|
||||
|
|
@ -1334,10 +1364,10 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
|
|||
end
|
||||
|
||||
describe '.generate_final_store_path' do
|
||||
let(:root_id) { 12345 }
|
||||
let(:expected_root_hashed_path) { Gitlab::HashedPath.new(root_hash: root_id) }
|
||||
let(:root_hash) { 12345 }
|
||||
let(:expected_root_hashed_path) { Gitlab::HashedPath.new(root_hash: root_hash) }
|
||||
|
||||
subject(:final_path) { uploader_class.generate_final_store_path(root_id: root_id) }
|
||||
subject(:final_path) { uploader_class.generate_final_store_path(root_hash: root_hash) }
|
||||
|
||||
before do
|
||||
allow(Digest::SHA2).to receive(:hexdigest).and_return('somehash1234')
|
||||
|
|
|
|||
Loading…
Reference in New Issue