Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-16 12:19:09 +00:00
parent 4565087ff3
commit b837b7fc8d
105 changed files with 999 additions and 160 deletions

View File

@ -985,6 +985,7 @@ lib/gitlab/checks/**
/doc/solutions/integrations/index.md @jfullam @brianwald @Darwinjs
/doc/solutions/integrations/servicenow.md @ashrafkhamis
/doc/subscriptions/ @fneill
/doc/subscriptions/gitlab_com/ @lyspin
/doc/subscriptions/gitlab_dedicated/ @lyspin
/doc/topics/ @msedlakjakubowski
/doc/topics/autodevops/ @phillipwells
@ -1046,6 +1047,7 @@ lib/gitlab/checks/**
/doc/user/get_started/get_started_projects.md @lciutacu
/doc/user/gitlab_duo/ @sselhorn @jglassman1 @fneill
/doc/user/gitlab_duo_chat/ @sselhorn @jglassman1 @fneill
/doc/user/glql/ @msedlakjakubowski
/doc/user/group/access_and_permissions.md @lciutacu
/doc/user/group/clusters/ @phillipwells
/doc/user/group/compliance_frameworks.md @eread
@ -1167,9 +1169,9 @@ lib/gitlab/checks/**
/doc/user/ssh.md @jglassman1
/doc/user/ssh_troubleshooting.md @jglassman1
/doc/user/storage_management_automation.md @fneill
/doc/user/storage_usage_quotas.md @fneill
/doc/user/tasks.md @msedlakjakubowski
/doc/user/todos.md @sselhorn
/doc/user/storage_usage_quotas.md @fneill
/doc/user/workspace/ @ashrafkhamis
# End rake-managed-docs-block

View File

@ -199,3 +199,10 @@ include:
- <<: *not-canonical-project
when: never
- *process-test-results
.rules:report:upload-knapsack-report:
rules:
- <<: *not-canonical-project
when: never
- if: '$KNAPSACK_GENERATE_REPORT == "true"'
when: always

View File

@ -164,6 +164,6 @@ notify-slack:
upload-knapsack-report:
extends:
- .upload-knapsack-report
- .rules:report:process-results
- .rules:report:upload-knapsack-report
variables:
QA_KNAPSACK_REPORT_FILE_PATTERN: $CI_PROJECT_DIR/qa/tmp/knapsack/*/*.json

View File

@ -225,7 +225,7 @@ e2e-test-report:
upload-knapsack-report:
extends:
- .upload-knapsack-report
- .rules:report:process-results
- .rules:report:upload-knapsack-report
variables:
QA_KNAPSACK_REPORT_FILE_PATTERN: $CI_PROJECT_DIR/qa/tmp/knapsack/*/*.json

View File

@ -208,6 +208,7 @@ e2e-test-report:
upload-knapsack-report:
extends:
- .upload-knapsack-report
- .rules:report:upload-knapsack-report
export-test-metrics:
extends:

View File

@ -578,6 +578,7 @@ e2e-test-report:
upload-knapsack-report:
extends:
- .upload-knapsack-report
- .rules:report:upload-knapsack-report
export-test-metrics:
extends:

View File

@ -276,7 +276,6 @@ RSpec/BeforeAllRoleAssignment:
- 'ee/spec/lib/ee/gitlab/ci/pipeline/chain/validate/abilities_spec.rb'
- 'ee/spec/lib/ee/gitlab/git_access_project_spec.rb'
- 'ee/spec/lib/ee/gitlab/import_export/project/tree_saver_spec.rb'
- 'ee/spec/lib/elastic/latest/git_class_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/user_instance_proxy_spec.rb'
- 'ee/spec/lib/gitlab/code_owners/loader_spec.rb'
- 'ee/spec/lib/gitlab/code_owners/validator_spec.rb'

View File

@ -309,7 +309,6 @@ RSpec/NamedSubject:
- 'ee/spec/lib/elastic/latest/application_instance_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/epic_class_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/epic_instance_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/git_class_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/git_instance_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/note_class_proxy_spec.rb'
- 'ee/spec/lib/elastic/latest/routing_spec.rb'

View File

@ -1 +1 @@
2e62fe7f6de8b3959614c0db2d27cbc0e140da99
957c1d4fd2151430c9e889b91f68cf5ea6b536f3

View File

@ -160,7 +160,7 @@ export default Node.create({
return [
createPlugin('@', 'reference', 'user', { limit: 10, filterOnBackend: true }),
createPlugin('#', 'reference', 'issue'),
createPlugin('#', 'reference', 'issue', { filterOnBackend: true }),
createPlugin('$', 'reference', 'snippet'),
createPlugin('~', 'referenceLabel', 'label', { limit: 20 }),
createPlugin('&', 'reference', 'epic'),

View File

@ -38,8 +38,6 @@ export const CONTACT_STATE_ACTIVE = 'active';
export const CONTACTS_ADD_COMMAND = '/add_contacts';
export const CONTACTS_REMOVE_COMMAND = '/remove_contacts';
const useIssueBackendFiltering = window.gon.features?.issueAutocompleteBackendFiltering;
const busyBadge = memoize(
() =>
renderVueComponentForLegacyJS(
@ -500,7 +498,7 @@ class GfmAutoComplete {
alias: ISSUES_ALIAS,
searchKey: 'search',
maxLen: 100,
delay: useIssueBackendFiltering ? DEFAULT_DEBOUNCE_AND_THROTTLE_MS : null,
delay: DEFAULT_DEBOUNCE_AND_THROTTLE_MS,
displayTpl(value) {
let tmpl = GfmAutoComplete.Loading.template;
if (value.title != null) {
@ -1092,11 +1090,7 @@ GfmAutoComplete.atTypeMap = {
'[[': 'wikis',
};
GfmAutoComplete.typesWithBackendFiltering = ['vulnerabilities', 'members'];
if (useIssueBackendFiltering) {
GfmAutoComplete.typesWithBackendFiltering.push('issues');
}
GfmAutoComplete.typesWithBackendFiltering = ['vulnerabilities', 'members', 'issues'];
GfmAutoComplete.isTypeWithBackendFiltering = (type) =>
GfmAutoComplete.typesWithBackendFiltering.includes(GfmAutoComplete.atTypeMap[type]);

View File

@ -183,7 +183,11 @@ export default {
return true;
});
if (this.previewNote && !this.previewNoteLoadedInList) {
// don't show preview in modal, as we might accidentally load a note from the parent work item
const urlParams = new URLSearchParams(window.location.search);
const modalOpen = urlParams.has('show');
if (this.previewNote && !this.previewNoteLoadedInList && !modalOpen) {
const preview = {
notes: {
nodes: [this.previewNote],

View File

@ -23,11 +23,13 @@ query getWorkItems(
$firstPageSize: Int
$lastPageSize: Int
$isGroup: Boolean = true
$excludeProjects: Boolean
) {
group(fullPath: $fullPath) @include(if: $isGroup) {
id
name
workItems(
excludeProjects: $excludeProjects
includeDescendants: true
search: $search
sort: $sort

View File

@ -239,6 +239,7 @@ export default {
search: this.searchQuery,
...this.apiFilterParams,
...this.pageParams,
excludeProjects: this.workItemType === WORK_ITEM_TYPE_ENUM_EPIC,
includeDescendants: !this.apiFilterParams.fullPath,
types: this.apiFilterParams.types || this.workItemType || this.defaultWorkItemTypes,
isGroup: this.isGroup,

View File

@ -10,7 +10,7 @@ module Banzai
before_action :verify_upload_model_class!
before_action :authorize_access!
feature_category :team_planning
feature_category :markdown
MODEL_CLASSES = {
'project' => Project,

View File

@ -6,7 +6,8 @@ class Projects::AutocompleteSourcesController < Projects::ApplicationController
before_action :authorize_read_milestone!, only: :milestones
before_action :authorize_read_crm_contact!, only: :contacts
feature_category :team_planning, [:issues, :labels, :milestones, :commands, :contacts, :wikis]
feature_category :team_planning, [:issues, :labels, :milestones, :commands, :contacts]
feature_category :wiki, [:wikis]
feature_category :code_review_workflow, [:merge_requests]
feature_category :groups_and_projects, [:members]
feature_category :source_code_management, [:snippets]

View File

@ -51,7 +51,6 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
push_frontend_feature_flag(:comment_tooltips, current_user)
push_force_frontend_feature_flag(:glql_integration, project&.glql_integration_feature_flag_enabled?)
push_frontend_feature_flag(:issue_autocomplete_backend_filtering, project)
end
before_action only: [:index, :show] do

View File

@ -47,7 +47,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:reviewer_assign_drawer, current_user)
push_frontend_feature_flag(:vulnerability_code_flow, project)
push_frontend_feature_flag(:pipeline_vulnerability_code_flow, project)
push_frontend_feature_flag(:issue_autocomplete_backend_filtering, project)
push_frontend_feature_flag(:realtime_issuable_todo, current_user)
end

View File

@ -19,7 +19,6 @@ module Projects
blob_path: project_blob_path(project, pipeline.sha),
has_test_report: pipeline.has_test_reports?,
empty_state_image_path: image_path('illustrations/empty-todos-md.svg'),
empty_dag_svg_path: image_path('illustrations/empty-state/empty-dag-md.svg'),
artifacts_expired_image_path: image_path('illustrations/empty-state/empty-pipeline-md.svg'),
tests_count: pipeline.test_report_summary.total[:count]
}

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Members
module Enumerable
extend ActiveSupport::Concern
include EachBatch
included do
def each_member_user(filters = {}, &block)
each_member_user_batch(filters) { |users| users.each(&block) }
end
def map_member_user(filters = {}, &block)
values = []
each_member_user_batch(filters) { |users| values.concat(users.map(&block)) }
values
end
def pluck_member_user(*columns, filters: {})
values = []
# rubocop:disable Database/AvoidUsingPluckWithoutLimit -- plucking on batch
each_member_user_batch(filters) { |users| values.concat(users.pluck(*columns)) }
# rubocop:enable Database/AvoidUsingPluckWithoutLimit
values
end
private
def each_member_user_batch(filters = {})
members.non_request.non_invite.where(filters).each_batch do |relation|
yield User.id_in(relation.pluck_user_ids)
end
end
end
end
end

View File

@ -21,6 +21,7 @@ class Group < Namespace
include RunnerTokenExpirationInterval
include Importable
include IdInOrdered
include Members::Enumerable
extend ::Gitlab::Utils::Override
@ -40,6 +41,7 @@ class Group < Namespace
has_many :all_owner_members, -> { non_request.all_owners }, as: :source, class_name: 'GroupMember'
has_many :group_members, -> { non_request.non_minimal_access }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
has_many :non_invite_group_members, -> { non_request.non_minimal_access.non_invite }, class_name: 'GroupMember', as: :source
has_many :non_invite_owner_members, -> { non_request.non_invite.all_owners }, class_name: 'GroupMember', as: :source
has_many :request_group_members, -> do
request.non_minimal_access
end, inverse_of: :group, class_name: 'GroupMember', as: :source
@ -506,7 +508,7 @@ class Group < Namespace
def owned_by?(user)
return false unless user
all_owner_members.non_invite.exists?(user: user)
non_invite_owner_members.exists?(user: user)
end
def add_members(users, access_level, current_user: nil, expires_at: nil)

View File

@ -3,4 +3,5 @@
class OauthAccessGrant < Doorkeeper::AccessGrant
belongs_to :resource_owner, class_name: 'User'
belongs_to :application, class_name: 'Doorkeeper::Application'
belongs_to :organization, class_name: 'Organizations::Organization'
end

View File

@ -3,6 +3,7 @@
class OauthAccessToken < Doorkeeper::AccessToken
belongs_to :resource_owner, class_name: 'User'
belongs_to :application, class_name: 'Doorkeeper::Application'
belongs_to :organization, class_name: 'Organizations::Organization'
validates :expires_in, presence: true

View File

@ -8,7 +8,7 @@ module Admin
expose :created_at
expose :updated_at
expose :count
expose :labels, using: LabelEntity, if: ->(*) { Feature.enabled?(:abuse_report_labels) }
expose :labels, using: AntiAbuse::Reports::LabelEntity, if: ->(*) { Feature.enabled?(:abuse_report_labels) }
expose :reported_user do |report|
UserEntity.represent(report.user, only: [:name])

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
module AntiAbuse
module Reports
class LabelEntity < Issuables::BaseLabelEntity
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Issuables
class BaseLabelEntity < Grape::Entity
expose :id
expose :title
expose :color do |label|
label.color.to_s
end
expose :description
expose :text_color
expose :created_at
expose :updated_at
end
end

View File

@ -1,19 +1,9 @@
# frozen_string_literal: true
class LabelEntity < Grape::Entity
expose :id
expose :title
expose :color do |label|
label.color.to_s
end
expose :description
class LabelEntity < Issuables::BaseLabelEntity
expose :group_id
expose :project_id
expose :template
expose :text_color
expose :created_at
expose :updated_at
expose :priority, if: ->(*) { options.key?(:project) } do |label|
label.priority(options[:project])

View File

@ -243,7 +243,7 @@ module Groups
def ensure_ownership
return if @new_parent_group
return unless @group.all_owner_members.non_invite.empty?
return unless @group.non_invite_owner_members.empty?
add_owner_on_transferred_group
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Search
module Worker
extend ActiveSupport::Concern
included do
feature_category :global_search
concurrency_limit -> { ::Search.default_concurrency_limit }
end
end
end

View File

@ -3,13 +3,13 @@
module ConcurrencyLimit
class ResumeWorker
include ApplicationWorker
include Search::Worker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext -- There is no onward scheduling and this cron handles work from across the
# application, so there's no useful context to add.
DEFAULT_LIMIT = 1_000
RESCHEDULE_DELAY = 1.second
feature_category :global_search
data_consistency :sticky
idempotent!
urgency :low

View File

@ -3,13 +3,13 @@
module PauseControl
class ResumeWorker
include ApplicationWorker
include Search::Worker
# There is no onward scheduling and this cron handles work from across the
# application, so there's no useful context to add.
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
RESCHEDULE_DELAY = 1.second
feature_category :global_search
data_consistency :sticky
idempotent!
urgency :low

View File

@ -1,9 +1,9 @@
---
name: issue_autocomplete_backend_filtering
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/289238
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166026
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/488831
milestone: '17.5'
group: group::project management
type: gitlab_com_derisk
default_enabled: false
name: search_sidekiq_default_concurrency_limit
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/498212
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169034
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/498992
milestone: '17.6'
group: group::global search
type: ops
default_enabled: true

View File

@ -129,4 +129,6 @@ Doorkeeper.configure do
# Use a custom class for generating the application secret.
# https://doorkeeper.gitbook.io/guides/configuration/other-configurations#custom-application-secret-generator
application_secret_generator 'Gitlab::DoorkeeperSecretStoring::Token::UniqueApplicationToken'
custom_access_token_attributes [:organization_id]
end

View File

@ -85,4 +85,8 @@ Doorkeeper::OpenidConnect.configure do
end
end
end
Doorkeeper::OpenidConnect::Request.class_eval do
belongs_to :organization, class_name: 'Organizations::Organization'
end
end

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillContainerRepositoryStatesProjectId
description: Backfills sharding key `container_repository_states.project_id` from `container_repositories`.
feature_category: geo_replication
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169240
milestone: '17.6'
queued_migration_version: 20241015082361
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: DeleteOrphanedStageRecords
description: Delete corrupted rows from p_ci_stages
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/168750
milestone: '17.6'
queued_migration_version: 20241009135743
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: container_repositories
sharding_key: project_id
belongs_to: container_repository
desired_sharding_key_migration_job_name: BackfillContainerRepositoryStatesProjectId

View File

@ -8,5 +8,6 @@ feature_categories:
description: TODO
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/e41dadcb33fda44ee274daa673bd933e13aa90eb
milestone: '7.7'
gitlab_schema: gitlab_main
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463785
gitlab_schema: gitlab_main_cell
sharding_key:
organization_id: organizations

View File

@ -8,5 +8,6 @@ feature_categories:
description: TODO
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/e41dadcb33fda44ee274daa673bd933e13aa90eb
milestone: '7.7'
gitlab_schema: gitlab_main
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463785
gitlab_schema: gitlab_main_cell
sharding_key:
organization_id: organizations

View File

@ -7,5 +7,6 @@ feature_categories:
description: TODO
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/c4982890489d254da2fe998aab30bf257767ed5e
milestone: '9.0'
gitlab_schema: gitlab_main
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463785
gitlab_schema: gitlab_main_cell
sharding_key:
organization_id: organizations

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class AddOrganizationIdToOauthTables < Gitlab::Database::Migration[2.2]
DEFAULT_ORGANIZATION_ID = 1
disable_ddl_transaction!
milestone '17.6'
TABLES = [:oauth_access_grants, :oauth_access_tokens, :oauth_openid_requests]
def up
TABLES.each do |table|
with_lock_retries do
add_column table, :organization_id, :bigint,
default: DEFAULT_ORGANIZATION_ID,
null: false,
if_not_exists: true
end
end
end
def down
TABLES.each do |table|
remove_column table, :organization_id, :bigint
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToContainerRepositoryStates < Gitlab::Database::Migration[2.2]
milestone '17.6'
def change
add_column :container_repository_states, :project_id, :bigint
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AddOrganizationIdIndexToOauthTables < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.6'
TABLES = [:oauth_access_grants, :oauth_access_tokens, :oauth_openid_requests]
def up
TABLES.each do |table|
add_concurrent_index table, :organization_id, name: "idx_#{table}_on_organization_id"
add_concurrent_foreign_key table, :organizations, column: :organization_id, on_delete: :cascade
end
end
def down
TABLES.each do |table|
remove_concurrent_index table, :organization_id, name: "idx_#{table}_on_organization_id"
remove_foreign_key table, :organizations, column: :organization_id, on_delete: :cascade
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
class QueueDeleteOrphanedStageRecords < Gitlab::Database::Migration[2.2]
milestone '17.6'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "DeleteOrphanedStageRecords"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:p_ci_stages,
:pipeline_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
batch_class_name: 'LooseIndexScanBatchingStrategy',
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :p_ci_stages, :pipeline_id, [])
end
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
class PrepareAsyncForeignKeyValidationForPipelinesUpstreamPipelineId < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
disable_ddl_transaction!
milestone '17.6'
SOURCE_TABLE_NAME = :p_ci_builds
TARGET_TABLE_NAME = :p_ci_pipelines
COLUMN = :upstream_pipeline_id
PARTITION_COLUMN = :upstream_pipeline_partition_id
TARGET_COLUMN = :id
TARGET_PARTITION_COLUMN = :partition_id
FK_NAME = :fk_rails_4540ead625_p
def up
add_concurrent_partitioned_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: [PARTITION_COLUMN, COLUMN],
target_column: [TARGET_PARTITION_COLUMN, TARGET_COLUMN],
validate: false,
reverse_lock_order: true,
on_update: :cascade,
on_delete: :cascade,
name: FK_NAME
)
prepare_partitioned_async_foreign_key_validation SOURCE_TABLE_NAME, name: FK_NAME
end
def down
unprepare_partitioned_async_foreign_key_validation SOURCE_TABLE_NAME, name: FK_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexContainerRepositoryStatesOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.6'
disable_ddl_transaction!
INDEX_NAME = 'index_container_repository_states_on_project_id'
def up
add_concurrent_index :container_repository_states, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :container_repository_states, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddContainerRepositoryStatesProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.6'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :container_repository_states, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :container_repository_states, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddContainerRepositoryStatesProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.6'
def up
install_sharding_key_assignment_trigger(
table: :container_repository_states,
sharding_key: :project_id,
parent_table: :container_repositories,
parent_sharding_key: :project_id,
foreign_key: :container_repository_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :container_repository_states,
sharding_key: :project_id,
parent_table: :container_repositories,
parent_sharding_key: :project_id,
foreign_key: :container_repository_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillContainerRepositoryStatesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.6'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillContainerRepositoryStatesProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:container_repository_states,
:container_repository_id,
:project_id,
:container_repositories,
:project_id,
:container_repository_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:container_repository_states,
:container_repository_id,
[
:project_id,
:container_repositories,
:project_id,
:container_repository_id
]
)
end
end

View File

@ -0,0 +1 @@
7ab31680403e3dd390c4880e65790cb8450dfc06e3916373c4cd29e2710a64c0

View File

@ -0,0 +1 @@
d85a3fd72c2724aa5fe5ce33fa03e0caf8ded9d8508a6b35a318529391c543cf

View File

@ -0,0 +1 @@
84bd92cf4afd4e72c708ac6b81978928ae560e6eb4a36d96678d0eaf9e51bce4

View File

@ -0,0 +1 @@
fe6d1615022d6ffc774ac4f9ba84f681b6856449a13f299141b91ff49af38a3d

View File

@ -0,0 +1 @@
b9f5d2ca9d39c79c8f959cf10ef337e4cb8c9f814db7b75a1df31905ade33092

View File

@ -0,0 +1 @@
d30752181eecdebf3b831f661041f0334bf77e93955fe4e38518e40c92179368

View File

@ -0,0 +1 @@
aedd6ae050b120a58d190d304cd5291c58e2580174a42775a6d0f89d81a16f88

View File

@ -0,0 +1 @@
a4ae12a361d0ec188a8ee0fba7befd753016014bf7e0e31008bdfb7592a75e18

View File

@ -0,0 +1 @@
3748a53af91a3292c72687ac815b8a1291dd54f73337b6d2d584e4946a095e8a

View File

@ -2635,6 +2635,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_fd4a1be98713() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "container_repositories"
WHERE "container_repositories"."id" = NEW."container_repository_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_ff16c1fd43ea() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -9893,6 +9909,7 @@ CREATE TABLE container_repository_states (
verification_retry_count smallint DEFAULT 0 NOT NULL,
verification_checksum bytea,
verification_failure text,
project_id bigint,
CONSTRAINT check_c96417dbc5 CHECK ((char_length(verification_failure) <= 255))
);
@ -14716,6 +14733,7 @@ CREATE TABLE oauth_access_grants (
scopes character varying,
code_challenge text,
code_challenge_method text,
organization_id bigint DEFAULT 1 NOT NULL,
CONSTRAINT oauth_access_grants_code_challenge CHECK ((char_length(code_challenge) <= 128)),
CONSTRAINT oauth_access_grants_code_challenge_method CHECK ((char_length(code_challenge_method) <= 5))
);
@ -14739,6 +14757,7 @@ CREATE TABLE oauth_access_tokens (
revoked_at timestamp without time zone,
created_at timestamp without time zone NOT NULL,
scopes character varying,
organization_id bigint DEFAULT 1 NOT NULL,
CONSTRAINT check_70f294ef54 CHECK ((expires_in IS NOT NULL))
);
@ -14803,7 +14822,8 @@ ALTER SEQUENCE oauth_device_grants_id_seq OWNED BY oauth_device_grants.id;
CREATE TABLE oauth_openid_requests (
id bigint NOT NULL,
access_grant_id bigint NOT NULL,
nonce character varying NOT NULL
nonce character varying NOT NULL,
organization_id bigint DEFAULT 1 NOT NULL
);
CREATE SEQUENCE oauth_openid_requests_id_seq
@ -27526,6 +27546,12 @@ CREATE UNIQUE INDEX idx_o11y_metric_issue_conn_on_issue_id_metric_type_name ON o
CREATE UNIQUE INDEX idx_o11y_trace_issue_conn_on_issue_id_trace_identifier ON observability_traces_issues_connections USING btree (issue_id, trace_identifier);
CREATE INDEX idx_oauth_access_grants_on_organization_id ON oauth_access_grants USING btree (organization_id);
CREATE INDEX idx_oauth_access_tokens_on_organization_id ON oauth_access_tokens USING btree (organization_id);
CREATE INDEX idx_oauth_openid_requests_on_organization_id ON oauth_openid_requests USING btree (organization_id);
CREATE UNIQUE INDEX idx_on_approval_group_rules_any_approver_type ON approval_group_rules USING btree (group_id, rule_type) WHERE (rule_type = 4);
CREATE UNIQUE INDEX idx_on_approval_group_rules_group_id_type_name ON approval_group_rules USING btree (group_id, rule_type, name);
@ -28726,6 +28752,8 @@ CREATE INDEX index_container_repository_states_failed_verification ON container_
CREATE INDEX index_container_repository_states_needs_verification ON container_repository_states USING btree (verification_state) WHERE ((verification_state = 0) OR (verification_state = 3));
CREATE INDEX index_container_repository_states_on_project_id ON container_repository_states USING btree (project_id);
CREATE INDEX index_container_repository_states_on_verification_state ON container_repository_states USING btree (verification_state);
CREATE INDEX index_container_repository_states_pending_verification ON container_repository_states USING btree (verified_at NULLS FIRST) WHERE (verification_state = 0);
@ -34014,6 +34042,8 @@ CREATE TRIGGER trigger_fbd42ed69453 BEFORE INSERT OR UPDATE ON external_status_c
CREATE TRIGGER trigger_fbd8825b3057 BEFORE INSERT OR UPDATE ON boards_epic_board_labels FOR EACH ROW EXECUTE FUNCTION trigger_fbd8825b3057();
CREATE TRIGGER trigger_fd4a1be98713 BEFORE INSERT OR UPDATE ON container_repository_states FOR EACH ROW EXECUTE FUNCTION trigger_fd4a1be98713();
CREATE TRIGGER trigger_ff16c1fd43ea BEFORE INSERT OR UPDATE ON geo_event_log FOR EACH ROW EXECUTE FUNCTION trigger_ff16c1fd43ea();
CREATE TRIGGER trigger_fff8735b6b9a BEFORE INSERT OR UPDATE ON vulnerability_finding_signatures FOR EACH ROW EXECUTE FUNCTION trigger_fff8735b6b9a();
@ -34596,6 +34626,9 @@ ALTER TABLE ONLY approval_merge_request_rules
ALTER TABLE ONLY deploy_keys_projects
ADD CONSTRAINT fk_58a901ca7e FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY oauth_access_grants
ADD CONSTRAINT fk_59cdb2323c FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_tags
ADD CONSTRAINT fk_5a230894f6 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -34677,6 +34710,9 @@ ALTER TABLE ONLY ci_pipeline_chat_data
ALTER TABLE ONLY cluster_agent_tokens
ADD CONSTRAINT fk_64f741f626 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY container_repository_states
ADD CONSTRAINT fk_6591698505 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY import_placeholder_memberships
ADD CONSTRAINT fk_66286fb5e6 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -34716,6 +34752,9 @@ ALTER TABLE ONLY protected_environment_approval_rules
ALTER TABLE ONLY deploy_tokens
ADD CONSTRAINT fk_7082f8a288 FOREIGN KEY (creator_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY oauth_openid_requests
ADD CONSTRAINT fk_7092424b77 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_branch_push_access_levels
ADD CONSTRAINT fk_7111b68cdb FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -34944,6 +34983,9 @@ ALTER TABLE ONLY work_item_type_custom_fields
ALTER TABLE ONLY workspaces_agent_configs
ADD CONSTRAINT fk_94660551c8 FOREIGN KEY (cluster_agent_id) REFERENCES cluster_agents(id) ON DELETE CASCADE;
ALTER TABLE ONLY oauth_access_tokens
ADD CONSTRAINT fk_94884daa35 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_site_profiles_builds
ADD CONSTRAINT fk_94e80df60e FOREIGN KEY (dast_site_profile_id) REFERENCES dast_site_profiles(id) ON DELETE CASCADE;

View File

@ -131,6 +131,20 @@ WARNING:
Any command that changes data directly could be damaging if not run correctly, or under the right conditions.
We highly recommend running them in a test environment with a backup of the instance ready to be restored, just in case.
### Cancel all running pipelines and their jobs
```ruby
admin = User.find(user_id) # replace user_id with the id of the admin you want to cancel the pipeline
# Iterate over each cancelable pipeline
Ci::Pipeline.cancelable.find_each do |pipeline|
Ci::CancelPipelineService.new(
pipeline: pipeline,
current_user: user,
cascade_to_children: false # the children are included in the outer loop
)
end
```
### Cancel stuck pending pipelines
```ruby

View File

@ -52,6 +52,26 @@ thereafter. If the first word in a sentence, do not capitalize `factor` or `auth
- Two-factor authentication (2FA) helps secure your account. Set up 2FA when you first sign in.
## ability, able
Avoid using **ability** because it focuses more on the user's
capabilities rather than the product's features or functions,
which makes it vague and ambiguous.
Do not use **ability** or **able** to refer to permissions or rights that a user needs to perform a task.
Use:
- You do not have the rights to change this setting.
- You must have permission to change this setting.
Instead of:
- You're not able to change this setting.
- You must have the ability to change this setting.
See also [**enable**](#enable).
## above
Try to avoid using **above** when referring to an example or table in a documentation page. If required, use **previous** instead. For example:
@ -1681,8 +1701,8 @@ Use:
If you write a phrase like, "On the **Issues** page," ensure steps for how to get to the page are nearby. Otherwise, people might not know what the **Issues** page is.
The page name should be visible in the UI at the top of the page.
If it is not, you should be able to get the name from the breadcrumb.
The page name should be visible in the UI at the top of the page,
or included in the breadcrumb.
The docs should match the case in the UI, and the page name should be bold. For example:

View File

@ -93,6 +93,40 @@ You can assign or remove seats in bulk for multiple users.
Administrators of self-managed instances can use a [Rake task](../raketasks/user_management.md#bulk-assign-users-to-gitlab-duo-pro) to assign or remove seats in bulk.
## View assigned GitLab Duo users
Prerequisites:
- You must purchase a GitLab Duo add-on, or have an active GitLab Duo trial.
- For self-managed and GitLab Dedicated:
- The GitLab Duo Pro add-on is available in GitLab 16.8 and later.
- The GitLab Duo Enterprise add-on is only available in GitLab 17.3 and later.
After you purchase GitLab Duo, you can assign seats to users to grant access to the add-on.
### For GitLab.com
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Settings > GitLab Duo**.
1. From the filter bar, select **Assigned seat** and **Yes**.
1. User list is filtered to only users assigned a GitLab Duo seat.
### For self-managed
Prerequisites:
- You must be an administrator.
- GitLab 17.5 or later
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **GitLab Duo**.
- If the **GitLab Duo** menu item is not available, synchronize your subscription
after purchase:
1. On the left sidebar, select **Subscription**.
1. In **Subscription details**, to the right of **Last sync**, select
synchronize subscription (**{retry}**).
1. To filter by users assigned to a GitLab Duo seat, in the **Filter users** bar, select **Assigned seat**, then select **Yes**.
## Purchase additional GitLab Duo seats
You can purchase additional GitLab Duo Pro or GitLab Duo Enterprise seats for your group namespace or self-managed instance. After you complete the purchase, the seats are added to the total number of GitLab Duo seats in your subscription.

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillContainerRepositoryStatesProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_container_repository_states_project_id
feature_category :geo_replication
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class DeleteOrphanedStageRecords < BatchedMigrationJob
operation_name :delete_orphaned_stage_records
feature_category :continuous_integration
class CiPipeline < ::Ci::ApplicationRecord
self.table_name = :p_ci_pipelines
self.primary_key = :id
end
def perform
distinct_each_batch do |batch|
pipeline_ids = batch.pluck(batch_column)
pipelines_query = CiPipeline
.where('p_ci_stages.pipeline_id = p_ci_pipelines.id')
.where('p_ci_stages.partition_id = p_ci_pipelines.partition_id')
.select(1)
base_relation
.where(batch_column => pipeline_ids)
.where('NOT EXISTS (?)', pipelines_query)
.delete_all
end
end
private
def base_relation
define_batchable_model(batch_table, connection: connection, primary_key: :id)
.where(batch_column => start_id..end_id)
end
end
end
end

View File

@ -28,7 +28,6 @@ namespace :tw do
# CodeOwnerRule.new('Anti-Abuse', ''),
CodeOwnerRule.new('Authentication', '@jglassman1'),
# CodeOwnerRule.new('Authorization', ''),
# CodeOwnerRule.new('Billing and Subscription Management', ''),
CodeOwnerRule.new('Cloud Connector', '@jglassman1'),
CodeOwnerRule.new('Code Creation', '@jglassman1'),
CodeOwnerRule.new('Code Review', '@aqualls'),
@ -44,6 +43,7 @@ namespace :tw do
CodeOwnerRule.new('Distribution (Charts)', '@axil'),
CodeOwnerRule.new('Distribution (Omnibus)', '@eread'),
CodeOwnerRule.new('Duo Chat', '@sselhorn @jglassman1 @fneill'),
CodeOwnerRule.new('Duo Workflow', '@sselhorn @jglassman1 @fneill'),
CodeOwnerRule.new('Dynamic Analysis', '@rdickenson @phillipwells'),
CodeOwnerRule.new('Editor Extensions', '@aqualls'),
CodeOwnerRule.new('Environments', '@phillipwells'),
@ -104,6 +104,7 @@ namespace :tw do
'@gitlab-org/secure/composition-analysis-be @gitlab-org/secure/static-analysis'),
CodeOwnerRule.new('Distribution', '@gitlab-org/distribution'),
CodeOwnerRule.new('Documentation Guidelines', '@fneill'),
CodeOwnerRule.new('Duo Workflow', '@gitlab-org/ai-powered'),
CodeOwnerRule.new('Engineering Productivity', '@gl-quality/eng-prod'),
CodeOwnerRule.new('Personal Productivity', '@gitlab-org/foundations/engineering'),
CodeOwnerRule.new('Gitaly', '@proglottis @toon'),

View File

@ -37,5 +37,9 @@ FactoryBot.define do
}
end
end
trait :with_labels do
labels { [association(:label)] }
end
end
end

View File

@ -1,27 +1,12 @@
# frozen_string_literal: true
FactoryBot.define do
factory :group_member do
factory :group_member, parent: :member, class: 'GroupMember' do
access_level { GroupMember::OWNER }
source { association(:group) }
member_namespace_id { source.id }
user
trait(:guest) { access_level { GroupMember::GUEST } }
trait(:reporter) { access_level { GroupMember::REPORTER } }
trait(:developer) { access_level { GroupMember::DEVELOPER } }
trait(:maintainer) { access_level { GroupMember::MAINTAINER } }
trait(:owner) { access_level { GroupMember::OWNER } }
trait(:access_request) { requested_at { Time.now } }
trait(:invited) do
user { nil }
invite_token { 'xxx' }
sequence :invite_email do |n|
"email#{n}@email.com"
end
end
trait(:created_by) do
created_by { association(:user) }
end
@ -30,26 +15,10 @@ FactoryBot.define do
ldap { true }
end
trait :blocked do
after(:build) { |group_member, _| group_member.user.block! }
end
trait :minimal_access do
to_create { |instance| instance.save!(validate: false) }
access_level { GroupMember::MINIMAL_ACCESS }
end
trait :awaiting do
after(:create) do |member|
member.update!(state: ::Member::STATE_AWAITING)
end
end
trait :active do
after(:create) do |member|
member.update!(state: ::Member::STATE_ACTIVE)
end
end
end
end

41
spec/factories/member.rb Normal file
View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
FactoryBot.define do
factory :member do
access_level { Gitlab::Access::GUEST }
source { association(:source) }
member_namespace_id { source.id }
user
trait(:guest) { access_level { Gitlab::Access::GUEST } }
trait(:reporter) { access_level { Gitlab::Access::REPORTER } }
trait(:developer) { access_level { Gitlab::Access::DEVELOPER } }
trait(:maintainer) { access_level { Gitlab::Access::MAINTAINER } }
trait(:owner) { access_level { Gitlab::Access::OWNER } }
trait(:access_request) { requested_at { Time.now } }
trait(:invited) do
user { nil }
invite_token { 'xxx' }
sequence :invite_email do |n|
"email#{n}@email.com"
end
end
trait :blocked do
after(:build) { |member, _| member.user.block! }
end
trait :awaiting do
after(:create) do |member|
member.update!(state: ::Member::STATE_AWAITING)
end
end
trait :active do
after(:create) do |member|
member.update!(state: ::Member::STATE_ACTIVE)
end
end
end
end

View File

@ -4,6 +4,7 @@ FactoryBot.define do
factory :oauth_access_grant do
resource_owner_id { create(:user).id }
application
organization
token { Doorkeeper::OAuth::Helpers::UniqueToken.generate }
expires_in { 2.hours }

View File

@ -4,6 +4,7 @@ FactoryBot.define do
factory :oauth_access_token do
resource_owner
application
organization
token { Doorkeeper::OAuth::Helpers::UniqueToken.generate }
refresh_token { Doorkeeper::OAuth::Helpers::UniqueToken.generate }
scopes { application.scopes }

View File

@ -2,6 +2,7 @@
FactoryBot.define do
factory :oauth_openid_request, class: 'Doorkeeper::OpenidConnect::Request' do
organization
access_grant factory: :oauth_access_grant
sequence(:nonce) { |n| n.to_s }
end

View File

@ -1,41 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
factory :project_member do
user
factory :project_member, parent: :member, class: 'ProjectMember' do
source { association(:project) }
member_namespace_id { source.id }
maintainer
trait(:guest) { access_level { ProjectMember::GUEST } }
trait(:reporter) { access_level { ProjectMember::REPORTER } }
trait(:developer) { access_level { ProjectMember::DEVELOPER } }
trait(:maintainer) { access_level { ProjectMember::MAINTAINER } }
trait(:owner) { access_level { ProjectMember::OWNER } }
trait(:access_request) { requested_at { Time.now } }
trait(:invited) do
user_id { nil }
invite_token { 'xxx' }
sequence :invite_email do |n|
"email#{n}@email.com"
end
end
trait :blocked do
after(:build) { |project_member, _| project_member.user.block! }
end
trait :awaiting do
after(:create) do |member|
member.update!(state: ::Member::STATE_AWAITING)
end
end
trait :active do
after(:create) do |member|
member.update!(state: ::Member::STATE_ACTIVE)
end
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
RSpec.describe 'GFM autocomplete', :js, feature_category: :text_editors do
include Features::AutocompleteHelpers
let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
@ -384,16 +384,6 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
end
it_behaves_like 'searching issue autocomplete'
context 'when issue_autocomplete_backend_filtering is disabled' do
before do
stub_feature_flags(issue_autocomplete_backend_filtering: false)
visit project_issue_path(project, issue)
end
it_behaves_like 'searching issue autocomplete'
end
end
context 'merge requests' do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'Issue markdown toolbar', :js, feature_category: :team_planning do
RSpec.describe 'Issue markdown toolbar', :js, feature_category: :text_editors do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }

View File

@ -17,7 +17,7 @@ RSpec.describe 'User views merged merge request from deleted fork', feature_cate
before do
sign_in user
fork_owner = source_project.namespace.all_owner_members.non_invite.first.user
fork_owner = source_project.namespace.non_invite_owner_members.first.user
# Place the source_project in the weird in between state
source_project.update_attribute(:pending_delete, true)
Projects::DestroyService.new(source_project, fork_owner, {}).__send__(:trash_project_repositories!)

View File

@ -151,6 +151,7 @@ RSpec.describe 'OAuth Login', :allow_forgery_protection, feature_category: :syst
before do
sign_in(user)
create(:organization, :default)
create(:oauth_access_token, application: application, resource_owner_id: user.id, scopes: 'api')
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'User uploads file to note', feature_category: :team_planning do
RSpec.describe 'User uploads file to note', feature_category: :text_editors do
include DropzoneHelper
let(:user) { create(:user) }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Autocomplete::GroupUsersFinder, feature_category: :team_planning do
RSpec.describe Autocomplete::GroupUsersFinder, feature_category: :text_editors do
let_it_be(:parent_group) { create(:group) }
let_it_be(:group) { create(:group, parent: parent_group) }
let_it_be(:subgroup) { create(:group, parent: group) }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Banzai::UploadsFinder, feature_category: :team_planning do
RSpec.describe Banzai::UploadsFinder, feature_category: :markdown do
let_it_be(:project) { create(:project) }
let_it_be(:project_upload_1) { create(:upload, :issuable_upload, model: project, filename: 'file1.jpg') }
let_it_be(:project_upload_2) { create(:upload, :issuable_upload, model: project, filename: 'file2.jpg') }

View File

@ -265,27 +265,27 @@ describe('GfmAutoComplete', () => {
describe('data is not in cache', () => {
beforeEach(() => {
const context = {
isLoadingData: { '#': false },
dataSources: { issues: 'issues_autocomplete_url' },
isLoadingData: { '/': false },
dataSources: { commands: 'commands_autocomplete_url' },
cachedData: {},
};
fetchData.call(context, {}, '#', 'query');
fetchData.call(context, {}, '/', 'query');
});
it('should call AjaxCache', () => {
expect(AjaxCache.retrieve).toHaveBeenCalledWith('issues_autocomplete_url', true);
expect(AjaxCache.retrieve).toHaveBeenCalledWith('commands_autocomplete_url', true);
});
});
describe('data is in cache', () => {
beforeEach(() => {
const context = {
isLoadingData: { '#': false },
dataSources: { issues: 'issues_autocomplete_url' },
cachedData: { '#': [{}] },
isLoadingData: { '/': false },
dataSources: { issues: 'commands_autocomplete_url' },
cachedData: { '/': [{}] },
loadData: () => {},
};
fetchData.call(context, {}, '#', 'query');
fetchData.call(context, {}, '/', 'query');
});
it('should not call AjaxCache', () => {

View File

@ -167,6 +167,12 @@ describeSkipVue3(skipReason, () => {
types: ['ISSUE', 'INCIDENT', 'TASK'],
}),
);
expect(defaultQueryHandler).toHaveBeenCalledWith(
expect.objectContaining({
excludeProjects: false,
}),
);
});
it('calls `getParameterByName` to get the `show` param', () => {
@ -212,6 +218,21 @@ describeSkipVue3(skipReason, () => {
});
});
describe('when workItemType EPIC is provided', () => {
it('sends excludeProjects variable in GraphQL query', async () => {
const type = 'EPIC';
mountComponent({ provide: { workItemType: type } });
await waitForPromises();
expect(defaultQueryHandler).toHaveBeenCalledWith(
expect.objectContaining({
excludeProjects: true,
}),
);
});
});
describe('when there is an error fetching work items', () => {
const message = 'Something went wrong when fetching work items. Please try again.';

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe MarkupHelper, feature_category: :team_planning do
RSpec.describe MarkupHelper, feature_category: :markdown do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) do
user = create(:user, username: 'gfm')

View File

@ -31,7 +31,6 @@ RSpec.describe Projects::PipelineHelper do
suite_endpoint: project_pipeline_test_path(project, pipeline, suite_name: 'suite', format: :json),
blob_path: project_blob_path(project, pipeline.sha),
has_test_report: pipeline.complete_and_has_reports?(Ci::JobArtifact.of_report_type(:test)),
empty_dag_svg_path: match_asset_path('illustrations/empty-state/empty-dag-md.svg'),
empty_state_image_path: match_asset_path('illustrations/empty-todos-md.svg'),
artifacts_expired_image_path: match_asset_path('illustrations/empty-state/empty-pipeline-md.svg'),
tests_count: pipeline.test_report_summary.total[:count],

View File

@ -2,7 +2,7 @@
require 'fast_spec_helper'
RSpec.describe Banzai::FilterArray, feature_category: :team_planning do
RSpec.describe Banzai::FilterArray, feature_category: :markdown do
describe '#insert_after' do
it 'inserts an element after a provided element' do
filters = described_class.new(%w[a b c])

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillContainerRepositoryStatesProjectId,
feature_category: :geo_replication,
schema: 20241015082357 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :container_repository_states }
let(:batch_column) { :container_repository_id }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :container_repositories }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :container_repository_id }
end
end

View File

@ -0,0 +1,61 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedStageRecords,
feature_category: :continuous_integration, migration: :gitlab_ci do
let(:pipelines_table) { table(:p_ci_pipelines, database: :ci, primary_key: :id) }
let(:stages_table) { table(:p_ci_stages, database: :ci, primary_key: :id) }
let(:default_attributes) { { project_id: 600, partition_id: 100 } }
let!(:regular_pipeline) { pipelines_table.create!(default_attributes) }
let!(:deleted_pipeline) { pipelines_table.create!(default_attributes) }
let!(:other_pipeline) { pipelines_table.create!(default_attributes) }
let!(:regular_build) do
stages_table.create!(pipeline_id: regular_pipeline.id, **default_attributes)
end
let!(:orphaned_build) do
stages_table.create!(pipeline_id: deleted_pipeline.id, **default_attributes)
end
let(:connection) { Ci::ApplicationRecord.connection }
around do |example|
connection.transaction do
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;
SQL
example.run
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;
SQL
end
end
describe '#perform' do
subject(:migration) do
described_class.new(
start_id: stages_table.minimum(:pipeline_id),
end_id: stages_table.maximum(:pipeline_id),
batch_table: :p_ci_stages,
batch_column: :pipeline_id,
sub_batch_size: 100,
pause_ms: 0,
connection: connection
)
end
it 'deletes from p_ci_stages where pipeline_id has no related record at p_ci_pipelines.id', :aggregate_failures do
expect { deleted_pipeline.delete }.to not_change { stages_table.count }
expect { migration.perform }.to change { stages_table.count }.from(2).to(1)
expect(regular_build.reload).to be_persisted
expect { orphaned_build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Emoji, feature_category: :team_planning do
RSpec.describe Gitlab::DataBuilder::Emoji, feature_category: :markdown do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }

View File

@ -201,7 +201,10 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
"sbom_components" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/469436',
"sbom_component_versions" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/483194',
"subscription_user_add_on_assignments" => "https://gitlab.com/gitlab-org/gitlab/-/issues/480697",
"topics" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/463254'
"topics" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/463254',
"oauth_access_tokens" => "https://gitlab.com/gitlab-org/gitlab/-/issues/496717",
"oauth_access_grants" => "https://gitlab.com/gitlab-org/gitlab/-/issues/496717",
"oauth_openid_requests" => "https://gitlab.com/gitlab-org/gitlab/-/issues/496717"
}
organization_id_columns = ApplicationRecord.connection.select_rows(sql)

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::HookData::EmojiBuilder, feature_category: :team_planning do
RSpec.describe Gitlab::HookData::EmojiBuilder, feature_category: :markdown do
let_it_be(:award_emoji) { create(:award_emoji) }
let(:builder) { described_class.new(award_emoji) }

View File

@ -883,7 +883,7 @@ project:
- dora_performance_scores
- xray_reports
- member_approvals
- zoekt_repository
- zoekt_repositories
- security_policy_management_project_linked_configurations
- security_policy_project_linked_projects
- security_policy_project_linked_namespaces

View File

@ -23,7 +23,7 @@ RSpec.describe Mattermost::Session, type: :request do
it { is_expected.to respond_to(:authorization) }
it { is_expected.to respond_to(:strategy) }
describe '#with session' do
describe '#with session', :with_default_organization do
let(:location) { 'http://location.tld' }
let(:cookie_header) { 'MMOAUTH=taskik8az7rq8k6rkpuas7htia; Path=/;' }
let!(:stub) do

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueDeleteOrphanedStageRecords, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :p_ci_stages,
column_name: :pipeline_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci
)
}
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillContainerRepositoryStatesProjectId, feature_category: :geo_replication do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :container_repository_states,
column_name: :container_repository_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:container_repositories,
:project_id,
:container_repository_id
]
)
}
end
end
end

View File

@ -0,0 +1,124 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Members::Enumerable, feature_category: :cell do
using RSpec::Parameterized::TableSyntax
before_all do
ActiveRecord::Schema.define do
create_table :_test_member_sources, force: true
end
end
before do
stub_const('TestMemberSource', klass)
# rubocop:disable RSpec/AnyInstanceOf -- stub all instances, hooks and notifications are irrelevant to the tests.
allow_any_instance_of(SystemHooksService).to receive(:execute_hooks_for)
allow_any_instance_of(Member).to receive_messages(notifiable?: false, send_request: nil)
# rubocop:enable RSpec/AnyInstanceOf
end
let_it_be(:klass) do
Class.new(Namespace) do
include Members::Enumerable
self.table_name = '_test_member_sources'
has_many :members, dependent: :destroy, as: :source, class_name: '::Member'
end
end
let_it_be(:source) { create(:namespace).becomes(klass) } # rubocop: disable Cop/AvoidBecomes -- easier to reuse existing factory object for a dummy model
let!(:owner) { create(:member, :owner, source: source).user }
let!(:guest) { create(:member, :guest, source: source).user }
let!(:requested) { create(:member, :access_request, source: source).user }
let!(:invited) { create(:member, :invited, source: source, user: create(:user)).user }
shared_context 'with parametrized filters table' do
where(:filters, :selected) do
nil | [ref(:owner), ref(:guest)]
{ access_level: Gitlab::Access::OWNER } | [ref(:owner)]
{ access_level: Gitlab::Access::GUEST } | [ref(:guest)]
end
end
shared_examples 'extract value from selected members only' do |column|
it 'extracted value from selected members' do
expected_values = selected.map { |user| user.public_send(column) }
expect(values).to contain_exactly(*expected_values)
end
it 'skips members with access request' do
expect(values).not_to include(requested.public_send(column))
end
it 'skips invited members' do
expect(values).not_to include(invited.public_send(column))
end
end
describe '#each_member_user' do
include_context 'with parametrized filters table'
with_them do
subject(:users) do
users = []
source.each_member_user(filters) { |user| users << user }
users
end
it 'iterates over selected members' do
expect(users).to contain_exactly(*selected)
end
it 'skips members with access request' do
expect(users).not_to include(requested)
end
it 'skips invited members' do
expect(users).not_to include(invited)
end
end
end
describe '#map_member_user' do
include_context 'with parametrized filters table'
with_them do
subject(:values) { source.map_member_user(filters, &:name) }
it_behaves_like 'extract value from selected members only', :name
end
end
describe '#pluck_member_user' do
include_context 'with parametrized filters table'
with_them do
context 'when single column passed' do
subject(:values) { source.pluck_member_user(:name, filters: filters) }
it_behaves_like 'extract value from selected members only', :name
end
context 'when multiple column passed' do
subject(:values) { source.pluck_member_user(:name, :email, filters: filters) }
it 'extract multiple values from selected members' do
expected_values = selected.map { |user| [user.name, user.email] }
expect(values).to contain_exactly(*expected_values)
end
it 'skips members with access request' do
expect(values).not_to include([requested.name, requested.email])
end
it 'skips invited members' do
expect(values).not_to include([invited.name, invited.email])
end
end
end
end
end

View File

@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe 'OAuth tokens', feature_category: :system_access do
include HttpBasicAuthHelpers
let_it_be(:organization) { create(:organization, :default) }
context 'Resource Owner Password Credentials' do
def request_oauth_token(user, headers = {}, password = user.password)
post '/oauth/token',

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Banzai::UploadsController, feature_category: :team_planning do
RSpec.describe Banzai::UploadsController, feature_category: :markdown do
describe '#show' do
let_it_be(:user) { create(:user) }

View File

@ -11,7 +11,7 @@ RSpec.describe Oauth::TokensController, feature_category: :system_access do
post '/oauth/token', params: { grant_type: 'password', username: user.username, password: with_password }
end
context 'when user does not have two factor enabled' do
context 'when user does not have two factor enabled', :with_default_organization do
let_it_be(:user) { create(:user, password: password) }
it 'authenticates successfully' do

View File

@ -5,7 +5,7 @@ require "spec_helper"
RSpec.describe Admin::AbuseReportEntity, feature_category: :insider_threat do
include Gitlab::Routing
let(:abuse_report) { build_stubbed(:abuse_report) }
let_it_be(:abuse_report) { build_stubbed(:abuse_report, :with_labels) }
let(:entity) do
described_class.new(abuse_report)

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe AntiAbuse::Reports::LabelEntity, feature_category: :insider_threat do
let_it_be(:abuse_report_label) { build_stubbed(:label) }
let(:entity) do
described_class.new(abuse_report_label)
end
describe '#as_json' do
subject(:entity_hash) { entity.as_json }
it 'exposes correct attributes' do
expect(entity_hash.keys).to match_array([
:id,
:title,
:color,
:description,
:text_color,
:created_at,
:updated_at
])
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe LabelEntity, feature_category: :team_planning do
let_it_be(:label) { build_stubbed(:label) }
let(:entity) do
described_class.new(label)
end
describe '#as_json' do
subject(:entity_hash) { entity.as_json }
it 'exposes correct attributes' do
expect(entity_hash.keys).to match_array([
:id,
:title,
:color,
:description,
:text_color,
:created_at,
:updated_at,
:group_id,
:project_id,
:template
])
end
end
end

Some files were not shown because too many files have changed in this diff Show More