Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-11 03:07:15 +00:00
parent fe1e276391
commit aa068e95cb
84 changed files with 911 additions and 141 deletions

View File

@ -2,7 +2,6 @@
# Cop supports --autocorrect.
Layout/LineContinuationSpacing:
Exclude:
- 'app/helpers/application_settings_helper.rb'
- 'app/helpers/projects_helper.rb'
- 'app/helpers/tags_helper.rb'
- 'app/helpers/tree_helper.rb'

View File

@ -25,6 +25,7 @@ RSpec/MultipleMemoizedHelpers:
- 'spec/requests/api/issues/issues_spec.rb'
- 'spec/requests/api/issues/put_projects_issues_spec.rb'
- 'spec/requests/api/maven_packages_spec.rb'
- 'spec/requests/api/todos_spec.rb'
- 'spec/requests/api/users_spec.rb'
- 'spec/services/boards/issues/list_service_spec.rb'
- 'spec/services/labels/promote_service_spec.rb'

View File

@ -33,10 +33,6 @@ module Mutations
description: 'Number of work items that were successfully updated.'
def ready?(**args)
if Feature.disabled?(:bulk_update_work_items_mutation, parent_for!(args[:parent_id]))
raise_resource_not_available_error!('`bulk_update_work_items_mutation` feature flag is disabled.')
end
if args[:ids].size > MAX_WORK_ITEMS
raise Gitlab::Graphql::Errors::ArgumentError,
format(

View File

@ -193,7 +193,7 @@ module ApplicationSettingsHelper
end
def external_authorization_description
s_("ExternalAuthorization|Access to projects is validated on an external service "\
s_("ExternalAuthorization|Access to projects is validated on an external service " \
"using their classification label.")
end
@ -202,39 +202,39 @@ module ApplicationSettingsHelper
end
def external_authorization_timeout_help_text
s_("ExternalAuthorization|Period GitLab waits for a response from the external "\
s_("ExternalAuthorization|Period GitLab waits for a response from the external " \
"service. If there is no response, access is denied. Default: 0.5 seconds.")
end
def external_authorization_url_help_text
s_("ExternalAuthorization|URL to which the projects make authorization requests. If the URL is blank, cross-project "\
"features are available and can still specify classification "\
s_("ExternalAuthorization|URL to which the projects make authorization requests. If the URL is blank, cross-project " \
"features are available and can still specify classification " \
"labels for projects.")
end
def external_authorization_client_certificate_help_text
s_("ExternalAuthorization|Certificate used to authenticate with the external authorization service. "\
s_("ExternalAuthorization|Certificate used to authenticate with the external authorization service. " \
"If blank, the server certificate is validated when accessing over HTTPS.")
end
def external_authorization_client_key_help_text
s_("ExternalAuthorization|Private key of client authentication certificate. "\
s_("ExternalAuthorization|Private key of client authentication certificate. " \
"Encrypted when stored.")
end
def external_authorization_client_pass_help_text
s_("ExternalAuthorization|Passphrase required to decrypt the private key. "\
s_("ExternalAuthorization|Passphrase required to decrypt the private key. " \
"Encrypted when stored.")
end
def external_authorization_client_url_help_text
s_("ExternalAuthorization|Classification label to use when requesting authorization if no specific "\
s_("ExternalAuthorization|Classification label to use when requesting authorization if no specific " \
"label is defined on the project.")
end
def sidekiq_job_limiter_mode_help_text
_("How the job limiter handles jobs exceeding the thresholds specified below. "\
"The 'track' mode only logs the jobs. The 'compress' mode compresses the jobs and "\
_("How the job limiter handles jobs exceeding the thresholds specified below. " \
"The 'track' mode only logs the jobs. The 'compress' mode compresses the jobs and " \
"raises an exception if the compressed size exceeds the limit.")
end

View File

@ -86,14 +86,22 @@ module GroupsHelper
}
end
# Overridden in EE
def remove_group_message(group, permanently_remove)
return permanently_delete_group_message(group) if permanently_remove
return permanently_delete_group_message(group) unless group.adjourned_deletion?
return permanently_delete_group_message(group) if group.marked_for_deletion?
date = permanent_deletion_date_formatted(Date.current)
_("The contents of this group, its subgroups and projects will be permanently deleted after %{deletion_adjourned_period} days on %{date}. After this point, your data cannot be recovered.") %
{ date: date, deletion_adjourned_period: group.deletion_adjourned_period }
end
def permanently_delete_group_message(group)
content = ''.html_safe
content << content_tag(:span, format(_("You are about to delete the group %{group_name}."), group_name: group.name))
additional_content = additional_removed_items(group)
content << additional_content if additional_content.present?
content << remove_group_warning
end

View File

@ -2,6 +2,8 @@
module Emails
module Groups
include NamespacesHelper
def group_was_exported_email(current_user, group)
group_email(current_user, group, _('Group was exported'))
end
@ -15,6 +17,18 @@ module Emails
@errors = errors
mail_with_locale(to: current_user.notification_email_for(@group), subject: subject(subj))
end
def group_scheduled_for_deletion(recipient_id, group_id)
@group = ::Group.find(group_id)
@user = ::User.find(recipient_id)
@deletion_due_in_days = ::Gitlab::CurrentSettings.deletion_adjourned_period.days
@deletion_date = permanent_deletion_date_formatted(@group.marked_for_deletion_on, format: '%B %-d, %Y')
email_with_layout(
to: @user.email,
subject: subject('Group scheduled for deletion')
)
end
end
end

View File

@ -443,6 +443,17 @@ class NotifyPreview < ActionMailer::Preview
end
end
def group_scheduled_for_deletion
cleanup do
group.create_deletion_schedule!(
marked_for_deletion_on: Time.current,
deleting_user: user
)
::Notify.group_scheduled_for_deletion(user.id, group.id).message
end
end
private
def project

View File

@ -18,8 +18,12 @@ module Groups # rubocop:disable Gitlab/BoundedContexts -- existing top-level mod
private
# overridden in EE
def send_group_deletion_notification; end
def send_group_deletion_notification
return unless ::Feature.enabled?(:group_deletion_notification_email, group) &&
group.adjourned_deletion?
::NotificationService.new.group_scheduled_for_deletion(group)
end
def create_deletion_schedule
deletion_schedule = group.build_deletion_schedule(deletion_schedule_params)

View File

@ -801,6 +801,19 @@ class NotificationService
end
end
def group_scheduled_for_deletion(group)
return if group.emails_disabled?
recipients = group.members.active_without_invites_and_requests.owners.map(&:user)
recipients.each do |recipient|
mailer.group_scheduled_for_deletion(
recipient.id,
group.id
).deliver_later
end
end
protected
def new_resource_email(target, current_user, method)

View File

@ -29,9 +29,7 @@ module Projects
private
def send_project_deletion_notification
return unless ::Feature.enabled?(:project_deletion_notification_email, project) &&
project.adjourned_deletion? &&
project.marked_for_deletion?
return unless project.adjourned_deletion? && project.marked_for_deletion?
::NotificationService.new.project_scheduled_for_deletion(project)
end

View File

@ -0,0 +1,7 @@
%p
= _('Hi %{username}!') % { username: sanitize_name(@user.name) }
%p
= _('Your group %{group_name} has been marked for deletion and will be removed in %{days}.').html_safe % { group_name: link_to(@group.full_name, group_url(@group)), days: pluralize((@deletion_due_in_days / 1.day).to_i, _('day')) }
%p
- link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: url_for(controller: 'groups', action: 'edit', id: @group.full_path, anchor: 'js-advanced-settings', only_path: false) }
= _('If this was a mistake, you can %{link_start}retain the group%{link_end} before %{deletion_date}.').html_safe % { link_start: link_start, link_end: '</a>'.html_safe, deletion_date: @deletion_date }

View File

@ -0,0 +1,4 @@
<%= _('Hi %{username}!') % { username: sanitize_name(@user.name) } %>
<%= _('Your group %{group_name} has been marked for deletion and will be removed in %{days}.') % { group_name: @group.full_name, days: pluralize((@deletion_due_in_days / 1.day).to_i, _('day')) } %>
<%= _('View your group: %{group_url}') % { group_url: group_url(@group) } %>
<%= _('If this was a mistake, you can retain the group before %{deletion_date}: %{retention_url}') % { retention_url: url_for(controller: 'groups', action: 'edit', id: @group.full_path, anchor: 'js-advanced-settings', only_path: false), deletion_date: @deletion_date } %>

View File

@ -1,9 +0,0 @@
---
name: bulk_update_work_items_mutation
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/434296
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/161507
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/476586
milestone: '17.4'
group: group::project management
type: beta
default_enabled: true

View File

@ -1,10 +1,9 @@
---
name: project_deletion_notification_email
name: group_deletion_notification_email
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/522883
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/184026
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/525979
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/185270
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/526037
milestone: '17.11'
group: group::authorization
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,14 @@
---
table_name: work_item_custom_lifecycle_statuses
classes:
- WorkItems::Statuses::Custom::LifecycleStatus
feature_categories:
- team_planning
description: Stores the association between custom lifecycles and statuses with position
information for ordering
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186050
milestone: '17.11'
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
table_size: small

View File

@ -0,0 +1,13 @@
---
table_name: work_item_custom_lifecycles
classes:
- WorkItems::Statuses::Custom::Lifecycle
feature_categories:
- team_planning
description: Stores namespace-level custom lifecycle configurations with default statuses for open, closed, and duplicate states
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186050
milestone: '17.11'
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
table_size: small

View File

@ -0,0 +1,13 @@
---
table_name: work_item_custom_statuses
classes:
- WorkItems::Statuses::Custom::Status
feature_categories:
- team_planning
description: Stores namespace-level custom status definitions for work items
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186050
milestone: '17.11'
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
table_size: small

View File

@ -0,0 +1,13 @@
---
table_name: work_item_type_custom_lifecycles
classes:
- WorkItems::TypeCustomLifecycle
feature_categories:
- team_planning
description: Associates work item types with custom lifecycles within a namespace
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186050
milestone: '17.11'
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
table_size: small

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class CreateWorkItemCustomStatuses < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
# Factory: /ee/spec/factories/work_items/statuses/custom/statuses.rb
create_table :work_item_custom_statuses do |t| # rubocop:disable Migration/EnsureFactoryForTable -- reason above
t.bigint :namespace_id, null: false
t.timestamps_with_timezone null: false
t.integer :category, null: false, default: 1, limit: 1
t.text :name, null: false, limit: 255
t.text :description, limit: 255
t.text :color, null: false, limit: 7
t.index [:namespace_id, :name], unique: true
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddNamespaceFkToWorkItemCustomStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_statuses, :namespaces,
column: :namespace_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_statuses, column: :namespace_id
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class CreateWorkItemCustomLifecycles < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
# Factory: /ee/spec/factories/work_items/statuses/custom/lifecycles.rb
create_table :work_item_custom_lifecycles do |t| # rubocop:disable Migration/EnsureFactoryForTable -- reason above
t.bigint :namespace_id, null: false
t.bigint :default_open_status_id, null: false,
index: { name: 'idx_wi_custom_lifecycles_on_open_status_id' }
t.bigint :default_closed_status_id, null: false,
index: { name: 'idx_wi_custom_lifecycles_on_closed_status_id' }
t.bigint :default_duplicate_status_id, null: false,
index: { name: 'idx_wi_custom_lifecycles_on_duplicate_status_id' }
t.timestamps_with_timezone null: false
t.text :name, null: false, limit: 255
t.index [:namespace_id, :name], unique: true
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddNamespaceFkToWorkItemCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycles, :namespaces,
column: :namespace_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycles, column: :namespace_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddOpenStatusFkToWorkItemCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycles, :work_item_custom_statuses,
column: :default_open_status_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycles, column: :default_open_status_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddClosedStatusFkToWorkItemCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycles, :work_item_custom_statuses,
column: :default_closed_status_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycles, column: :default_closed_status_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddDuplicateStatusFkToWorkItemCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycles, :work_item_custom_statuses,
column: :default_duplicate_status_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycles, column: :default_duplicate_status_id
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateWorkItemCustomLifecycleStatuses < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
# Factory: /ee/spec/factories/work_items/statuses/custom/lifecycle_statuses.rb
create_table :work_item_custom_lifecycle_statuses do |t| # rubocop:disable Migration/EnsureFactoryForTable -- reason above
t.bigint :namespace_id, null: false, index: { name: 'idx_wi_custom_lifecycle_statuses_on_namespace_id' }
t.bigint :lifecycle_id, null: false
t.bigint :status_id, null: false, index: { name: 'idx_wi_custom_lifecycle_statuses_on_status_id' }
t.timestamps_with_timezone null: false
t.integer :position, null: false, default: 0
t.index [:lifecycle_id, :status_id], unique: true, name: 'idx_lifecycle_statuses_on_lifecycle_and_status'
end
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddConstraintsToWorkItemCustomLifecycleStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_check_constraint :work_item_custom_lifecycle_statuses, 'position >= 0', constraint_name
end
def down
remove_check_constraint :work_item_custom_lifecycle_statuses, constraint_name
end
private
def constraint_name
check_constraint_name(:work_item_custom_lifecycle_statuses, :position, 'positive')
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddNamespaceFkToWorkItemCustomLifecycleStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycle_statuses, :namespaces,
column: :namespace_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycle_statuses, column: :namespace_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddLifecycleFkToWorkItemCustomLifecycleStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycle_statuses, :work_item_custom_lifecycles,
column: :lifecycle_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycle_statuses, column: :lifecycle_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddStatusFkToWorkItemCustomLifecycleStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_custom_lifecycle_statuses, :work_item_custom_statuses,
column: :status_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_custom_lifecycle_statuses, column: :status_id
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateWorkItemTypeCustomLifecycles < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
# Factory: /ee/spec/factories/work_items/type_custom_lifecycles.rb
create_table :work_item_type_custom_lifecycles do |t| # rubocop:disable Migration/EnsureFactoryForTable -- reason above
t.bigint :namespace_id, null: false
t.bigint :work_item_type_id, null: false, index: { name: 'idx_wi_type_custom_lifecycles_on_work_item_type_id' }
t.bigint :lifecycle_id, null: false, index: { name: 'idx_wi_type_custom_lifecycles_on_lifecycle_id' }
t.timestamps_with_timezone null: false
t.index [:namespace_id, :work_item_type_id, :lifecycle_id],
name: 'idx_wi_type_custom_lifecycles_on_namespace_type_lifecycle', unique: true
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddNamespaceFkToWorkItemTypeCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_type_custom_lifecycles, :namespaces,
column: :namespace_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_type_custom_lifecycles, column: :namespace_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddWorkItemTypeFkToWorkItemTypeCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_type_custom_lifecycles, :work_item_types,
column: :work_item_type_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_type_custom_lifecycles, column: :work_item_type_id
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddLifecycleFkToWorkItemTypeCustomLifecycles < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_concurrent_foreign_key :work_item_type_custom_lifecycles, :work_item_custom_lifecycles,
column: :lifecycle_id, on_delete: :cascade
end
def down
remove_foreign_key_if_exists :work_item_type_custom_lifecycles, column: :lifecycle_id
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddConstraintsToWorkItemCustomStatuses < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_check_constraint :work_item_custom_statuses, 'category > 0', constraint_name
end
def down
remove_check_constraint :work_item_custom_statuses, constraint_name
end
private
def constraint_name
check_constraint_name(:work_item_custom_statuses, :category, 'positive')
end
end

View File

@ -0,0 +1 @@
714cab6ca7bc87f6039c0c1d967b1e2388bd8a1f31de93989f778bfb7b78f2d3

View File

@ -0,0 +1 @@
585ce1ee7e12eed9f42dea313b51a06066a5246dca33fd01406e6cc00fc7c11f

View File

@ -0,0 +1 @@
fa33adec5be175d106873a06238e19550477a1148c7df1e0afd92ff4e9e38754

View File

@ -0,0 +1 @@
f0812398c78e3638bcbbe0ab36ef7c00be3a75d934cf67d649fbfa49f9bac82d

View File

@ -0,0 +1 @@
f596ff1f7890e5c72fa188b695a75f599178897c0d8b03e4cf922771099ef570

View File

@ -0,0 +1 @@
5e8d636a4769ae19d0e6944cc44158375591caec73d98d00976800330711f95b

View File

@ -0,0 +1 @@
b694bfa9009e4157239b4aa8f00bec178e0e34623390dca776d8749252370f7f

View File

@ -0,0 +1 @@
e813a2c65a5bfe770ca7265dbfafc1c0f07bc675dd3d1019bcdd3c73dca308b6

View File

@ -0,0 +1 @@
fea14c21c6c6095e1cc75ec5d91ca68ee251526ec57fd6dc6158661f42419c7b

View File

@ -0,0 +1 @@
c5371d0dc3018f961b6896ee397ad0480cbbda052e982f2ae9435807bfd4e85b

View File

@ -0,0 +1 @@
d82db1ad54f57442fbf34fab3b9c47889b306c663f7334899b89a1993a3c38b8

View File

@ -0,0 +1 @@
a6db729f2309953268392c13dc4d5bf1324590f6608646567251ec832e8bd051

View File

@ -0,0 +1 @@
1d701d25a12f8bcf46a22d3d9ea7863e14e50855d73afc2e9ce897ff4d53bb8c

View File

@ -0,0 +1 @@
e719a4ce58be91853f8f640e13d7bc4291e743f6fd281655de49249a36606f1d

View File

@ -0,0 +1 @@
f6c271a17b3f5a898b1b3c3616e2c3057312c05b0e94eed57c383d4059189394

View File

@ -0,0 +1 @@
8fe7d68d8b13040e3d89792ef26146b3f14f785cdc14abf76d6b1ef22bad5ebe

View File

@ -0,0 +1 @@
6c2672fb3be2944c3840986ce70797f2b262596b8709f858305174620387e5f1

View File

@ -25606,6 +25606,71 @@ CREATE SEQUENCE work_item_current_statuses_id_seq
ALTER SEQUENCE work_item_current_statuses_id_seq OWNED BY work_item_current_statuses.id;
CREATE TABLE work_item_custom_lifecycle_statuses (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
lifecycle_id bigint NOT NULL,
status_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
"position" integer DEFAULT 0 NOT NULL,
CONSTRAINT check_91172799d3 CHECK (("position" >= 0))
);
CREATE SEQUENCE work_item_custom_lifecycle_statuses_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE work_item_custom_lifecycle_statuses_id_seq OWNED BY work_item_custom_lifecycle_statuses.id;
CREATE TABLE work_item_custom_lifecycles (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
default_open_status_id bigint NOT NULL,
default_closed_status_id bigint NOT NULL,
default_duplicate_status_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
name text NOT NULL,
CONSTRAINT check_1feff2de99 CHECK ((char_length(name) <= 255))
);
CREATE SEQUENCE work_item_custom_lifecycles_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE work_item_custom_lifecycles_id_seq OWNED BY work_item_custom_lifecycles.id;
CREATE TABLE work_item_custom_statuses (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
category smallint DEFAULT 1 NOT NULL,
name text NOT NULL,
description text,
color text NOT NULL,
CONSTRAINT check_4789467800 CHECK ((char_length(color) <= 7)),
CONSTRAINT check_720a7c4d24 CHECK ((char_length(name) <= 255)),
CONSTRAINT check_8ea8b3c991 CHECK ((char_length(description) <= 255)),
CONSTRAINT check_ff2bac1606 CHECK ((category > 0))
);
CREATE SEQUENCE work_item_custom_statuses_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE work_item_custom_statuses_id_seq OWNED BY work_item_custom_statuses.id;
CREATE TABLE work_item_dates_sources (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
@ -25766,6 +25831,24 @@ CREATE SEQUENCE work_item_type_custom_fields_id_seq
ALTER SEQUENCE work_item_type_custom_fields_id_seq OWNED BY work_item_type_custom_fields.id;
CREATE TABLE work_item_type_custom_lifecycles (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
work_item_type_id bigint NOT NULL,
lifecycle_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
);
CREATE SEQUENCE work_item_type_custom_lifecycles_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE work_item_type_custom_lifecycles_id_seq OWNED BY work_item_type_custom_lifecycles.id;
CREATE TABLE work_item_type_user_preferences (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -28017,6 +28100,12 @@ ALTER TABLE ONLY wiki_repository_states ALTER COLUMN id SET DEFAULT nextval('wik
ALTER TABLE ONLY work_item_current_statuses ALTER COLUMN id SET DEFAULT nextval('work_item_current_statuses_id_seq'::regclass);
ALTER TABLE ONLY work_item_custom_lifecycle_statuses ALTER COLUMN id SET DEFAULT nextval('work_item_custom_lifecycle_statuses_id_seq'::regclass);
ALTER TABLE ONLY work_item_custom_lifecycles ALTER COLUMN id SET DEFAULT nextval('work_item_custom_lifecycles_id_seq'::regclass);
ALTER TABLE ONLY work_item_custom_statuses ALTER COLUMN id SET DEFAULT nextval('work_item_custom_statuses_id_seq'::regclass);
ALTER TABLE ONLY work_item_hierarchy_restrictions ALTER COLUMN id SET DEFAULT nextval('work_item_hierarchy_restrictions_id_seq'::regclass);
ALTER TABLE ONLY work_item_number_field_values ALTER COLUMN id SET DEFAULT nextval('work_item_number_field_values_id_seq'::regclass);
@ -28031,6 +28120,8 @@ ALTER TABLE ONLY work_item_text_field_values ALTER COLUMN id SET DEFAULT nextval
ALTER TABLE ONLY work_item_type_custom_fields ALTER COLUMN id SET DEFAULT nextval('work_item_type_custom_fields_id_seq'::regclass);
ALTER TABLE ONLY work_item_type_custom_lifecycles ALTER COLUMN id SET DEFAULT nextval('work_item_type_custom_lifecycles_id_seq'::regclass);
ALTER TABLE ONLY work_item_type_user_preferences ALTER COLUMN id SET DEFAULT nextval('work_item_type_user_preferences_id_seq'::regclass);
ALTER TABLE ONLY work_item_widget_definitions ALTER COLUMN id SET DEFAULT nextval('work_item_widget_definitions_id_seq'::regclass);
@ -31215,6 +31306,15 @@ ALTER TABLE ONLY work_item_colors
ALTER TABLE ONLY work_item_current_statuses
ADD CONSTRAINT work_item_current_statuses_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_custom_lifecycle_statuses
ADD CONSTRAINT work_item_custom_lifecycle_statuses_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_custom_lifecycles
ADD CONSTRAINT work_item_custom_lifecycles_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_custom_statuses
ADD CONSTRAINT work_item_custom_statuses_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_dates_sources
ADD CONSTRAINT work_item_dates_sources_pkey PRIMARY KEY (issue_id);
@ -31242,6 +31342,9 @@ ALTER TABLE ONLY work_item_text_field_values
ALTER TABLE ONLY work_item_type_custom_fields
ADD CONSTRAINT work_item_type_custom_fields_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_type_custom_lifecycles
ADD CONSTRAINT work_item_type_custom_lifecycles_pkey PRIMARY KEY (id);
ALTER TABLE ONLY work_item_type_user_preferences
ADD CONSTRAINT work_item_type_user_preferences_pkey PRIMARY KEY (id);
@ -33154,6 +33257,8 @@ CREATE UNIQUE INDEX idx_jira_connect_subscriptions_on_installation_id_namespace_
CREATE INDEX idx_keys_expires_at_and_before_expiry_notification_undelivered ON keys USING btree (date(timezone('UTC'::text, expires_at)), before_expiry_notification_delivered_at) WHERE (before_expiry_notification_delivered_at IS NULL);
CREATE UNIQUE INDEX idx_lifecycle_statuses_on_lifecycle_and_status ON work_item_custom_lifecycle_statuses USING btree (lifecycle_id, status_id);
CREATE INDEX idx_member_roles_on_base_access_level ON member_roles USING btree (base_access_level);
CREATE INDEX idx_members_created_at_user_id_invite_token ON members USING btree (created_at) WHERE ((invite_token IS NOT NULL) AND (user_id IS NULL));
@ -33426,6 +33531,16 @@ CREATE UNIQUE INDEX idx_wi_current_statuses_on_wi_id_custom_status_id_unique ON
CREATE UNIQUE INDEX idx_wi_current_statuses_on_wi_id_system_def_status_id_unique ON work_item_current_statuses USING btree (work_item_id, system_defined_status_id);
CREATE INDEX idx_wi_custom_lifecycle_statuses_on_namespace_id ON work_item_custom_lifecycle_statuses USING btree (namespace_id);
CREATE INDEX idx_wi_custom_lifecycle_statuses_on_status_id ON work_item_custom_lifecycle_statuses USING btree (status_id);
CREATE INDEX idx_wi_custom_lifecycles_on_closed_status_id ON work_item_custom_lifecycles USING btree (default_closed_status_id);
CREATE INDEX idx_wi_custom_lifecycles_on_duplicate_status_id ON work_item_custom_lifecycles USING btree (default_duplicate_status_id);
CREATE INDEX idx_wi_custom_lifecycles_on_open_status_id ON work_item_custom_lifecycles USING btree (default_open_status_id);
CREATE UNIQUE INDEX idx_wi_number_values_on_work_item_id_custom_field_id ON work_item_number_field_values USING btree (work_item_id, custom_field_id);
CREATE INDEX idx_wi_select_field_values_on_custom_field_select_option_id ON work_item_select_field_values USING btree (custom_field_select_option_id);
@ -33436,6 +33551,12 @@ CREATE UNIQUE INDEX idx_wi_text_values_on_work_item_id_custom_field_id ON work_i
CREATE UNIQUE INDEX idx_wi_type_custom_fields_on_ns_id_wi_type_id_custom_field_id ON work_item_type_custom_fields USING btree (namespace_id, work_item_type_id, custom_field_id);
CREATE INDEX idx_wi_type_custom_lifecycles_on_lifecycle_id ON work_item_type_custom_lifecycles USING btree (lifecycle_id);
CREATE UNIQUE INDEX idx_wi_type_custom_lifecycles_on_namespace_type_lifecycle ON work_item_type_custom_lifecycles USING btree (namespace_id, work_item_type_id, lifecycle_id);
CREATE INDEX idx_wi_type_custom_lifecycles_on_work_item_type_id ON work_item_type_custom_lifecycles USING btree (work_item_type_id);
CREATE INDEX idx_zoekt_last_indexed_at_gt_used_storage_bytes_updated_at ON zoekt_indices USING btree (used_storage_bytes_updated_at) WHERE (last_indexed_at >= used_storage_bytes_updated_at);
CREATE INDEX idx_zoekt_repositories_on_zoekt_index_id_and_size_bytes ON zoekt_repositories USING btree (zoekt_index_id, size_bytes);
@ -38006,6 +38127,10 @@ CREATE INDEX index_work_item_current_statuses_on_namespace_id ON work_item_curre
CREATE UNIQUE INDEX index_work_item_current_statuses_on_work_item_id ON work_item_current_statuses USING btree (work_item_id);
CREATE UNIQUE INDEX index_work_item_custom_lifecycles_on_namespace_id_and_name ON work_item_custom_lifecycles USING btree (namespace_id, name);
CREATE UNIQUE INDEX index_work_item_custom_statuses_on_namespace_id_and_name ON work_item_custom_statuses USING btree (namespace_id, name);
CREATE INDEX index_work_item_hierarchy_restrictions_on_child_type_id ON work_item_hierarchy_restrictions USING btree (child_type_id);
CREATE UNIQUE INDEX index_work_item_hierarchy_restrictions_on_parent_and_child ON work_item_hierarchy_restrictions USING btree (parent_type_id, child_type_id);
@ -41688,6 +41813,9 @@ ALTER TABLE ONLY deployments
ALTER TABLE ONLY projects_branch_rules_merge_request_approval_settings
ADD CONSTRAINT fk_00acf20382 FOREIGN KEY (protected_branch_id) REFERENCES protected_branches(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_custom_lifecycles
ADD CONSTRAINT fk_00c659d395 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY epics
ADD CONSTRAINT fk_013c9f36ca FOREIGN KEY (due_date_sourcing_epic_id) REFERENCES epics(id) ON DELETE SET NULL;
@ -41712,9 +41840,15 @@ ALTER TABLE ONLY audit_events_instance_google_cloud_logging_configurations
ALTER TABLE ONLY service_desk_settings
ADD CONSTRAINT fk_03afb71f06 FOREIGN KEY (file_template_project_id) REFERENCES projects(id) ON DELETE SET NULL;
ALTER TABLE ONLY work_item_type_custom_lifecycles
ADD CONSTRAINT fk_03c6229585 FOREIGN KEY (lifecycle_id) REFERENCES work_item_custom_lifecycles(id) ON DELETE CASCADE;
ALTER TABLE ONLY design_management_designs_versions
ADD CONSTRAINT fk_03c671965c FOREIGN KEY (design_id) REFERENCES design_management_designs(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_type_custom_lifecycles
ADD CONSTRAINT fk_0425cd8e8b FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY external_status_checks_protected_branches
ADD CONSTRAINT fk_0480f2308c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -41766,6 +41900,9 @@ ALTER TABLE ONLY observability_traces_issues_connections
ALTER TABLE ONLY targeted_message_dismissals
ADD CONSTRAINT fk_08c30af7ff FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_custom_lifecycle_statuses
ADD CONSTRAINT fk_08e006a1a3 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_request_assignment_events
ADD CONSTRAINT fk_08f7602bfd FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
@ -41781,6 +41918,9 @@ ALTER TABLE ONLY dast_sites
ALTER TABLE ONLY project_saved_replies
ADD CONSTRAINT fk_0ace76afbb FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY work_item_custom_lifecycles
ADD CONSTRAINT fk_0b028ab81c FOREIGN KEY (default_open_status_id) REFERENCES work_item_custom_statuses(id) ON DELETE CASCADE;
ALTER TABLE ONLY subscription_seat_assignments
ADD CONSTRAINT fk_0b6bc63773 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -41835,6 +41975,9 @@ ALTER TABLE ONLY packages_package_file_build_infos
ALTER TABLE ONLY audit_events_streaming_event_type_filters
ADD CONSTRAINT fk_107946dffb FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_type_custom_lifecycles
ADD CONSTRAINT fk_111d417cb7 FOREIGN KEY (work_item_type_id) REFERENCES work_item_types(id) ON DELETE CASCADE;
ALTER TABLE ONLY group_deletion_schedules
ADD CONSTRAINT fk_11e3ebfcdd FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -42183,6 +42326,9 @@ ALTER TABLE ONLY sprints
ALTER TABLE ONLY operations_feature_flags_issues
ADD CONSTRAINT fk_3685a990ae FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_custom_statuses
ADD CONSTRAINT fk_3694bacabe FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY push_event_payloads
ADD CONSTRAINT fk_36c74129da FOREIGN KEY (event_id) REFERENCES events(id) ON DELETE CASCADE;
@ -42579,6 +42725,9 @@ ALTER TABLE ONLY audit_events_streaming_instance_namespace_filters
ALTER TABLE ONLY terraform_state_versions
ADD CONSTRAINT fk_6e81384d7f FOREIGN KEY (created_by_user_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY work_item_custom_lifecycles
ADD CONSTRAINT fk_6e8df43239 FOREIGN KEY (default_duplicate_status_id) REFERENCES work_item_custom_statuses(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_environment_approval_rules
ADD CONSTRAINT fk_6ee8249821 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -42717,6 +42866,9 @@ ALTER TABLE ONLY issue_customer_relations_contacts
ALTER TABLE ONLY ssh_signatures
ADD CONSTRAINT fk_7d2f93996c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_custom_lifecycles
ADD CONSTRAINT fk_7d5eb33a21 FOREIGN KEY (default_closed_status_id) REFERENCES work_item_custom_statuses(id) ON DELETE CASCADE;
ALTER TABLE ONLY sent_notifications
ADD CONSTRAINT fk_7d7663e36a FOREIGN KEY (issue_email_participant_id) REFERENCES issue_email_participants(id) ON DELETE SET NULL NOT VALID;
@ -42840,6 +42992,9 @@ ALTER TABLE ONLY protected_branch_merge_access_levels
ALTER TABLE ONLY work_item_dates_sources
ADD CONSTRAINT fk_8a4948b668 FOREIGN KEY (start_date_sourcing_work_item_id) REFERENCES issues(id) ON DELETE SET NULL;
ALTER TABLE ONLY work_item_custom_lifecycle_statuses
ADD CONSTRAINT fk_8a6dadaf44 FOREIGN KEY (status_id) REFERENCES work_item_custom_statuses(id) ON DELETE CASCADE;
ALTER TABLE ONLY targeted_message_namespaces
ADD CONSTRAINT fk_8ba73cd32a FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -43050,6 +43205,9 @@ ALTER TABLE ONLY issuable_metric_images
ALTER TABLE ONLY operations_strategies
ADD CONSTRAINT fk_a542e10c31 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_custom_lifecycle_statuses
ADD CONSTRAINT fk_a546eef539 FOREIGN KEY (lifecycle_id) REFERENCES work_item_custom_lifecycles(id) ON DELETE CASCADE;
ALTER TABLE ONLY lfs_objects_projects
ADD CONSTRAINT fk_a56e02279c FOREIGN KEY (lfs_object_id) REFERENCES lfs_objects(id) ON DELETE RESTRICT NOT VALID;

View File

@ -5,7 +5,7 @@
#
# For a list of all options, see https://vale.sh/docs/topics/styles/
extends: existence
message: "Use exactly one space between sentences and clauses. Check '%s' for spacing problems."
message: "Use exactly one space with punctuation. Check '%s' for spacing problems."
link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation
vocab: false
level: error
@ -13,3 +13,4 @@ nonword: true
tokens:
- '[a-z][.?!,][A-Z]'
- '[\w.?!,\(\)\-":] {2,}[\w.?!,\(\)\-":]'
- '[a-z] +[.?!,:] +'

View File

@ -199,7 +199,7 @@ To update a streaming destination's event filters:
### Override default content type header
By default, streaming destinations use a `content-type` header of `application/x-www-form-urlencoded`. However, you
might want to set the `content-type` header to something else. For example ,`application/json`.
might want to set the `content-type` header to something else. For example, `application/json`.
To override the `content-type` header default value for an instance streaming destination, use either:

View File

@ -343,7 +343,7 @@ To unban a user:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Overview > Users**.
1. In the search box , filter by **State=Banned** and press <kbd>Enter</kbd>.
1. In the search box, filter by **State=Banned** and press <kbd>Enter</kbd>.
1. Next to the member you want to ban, select the vertical ellipsis ({{< icon name="ellipsis_v" >}}).
1. From the dropdown list, select **Unban member**.

View File

@ -1162,7 +1162,7 @@ Prerequisites:
The configuration process is interactive. Add at least two "remotes": one for the object storage provider your data is currently on (`old`), and one for the provider you are moving to (`new`).
1. Verify that you can read the old data. The following example refers to the `uploads` bucket , but your bucket may have a different name:
1. Verify that you can read the old data. The following example refers to the `uploads` bucket, but your bucket may have a different name:
```shell
rclone ls old:uploads | head

View File

@ -628,7 +628,7 @@ The messages returned are informational and can be ignored.
### PostgreSQL socket errors when executing the `gitlab:env:info` Rake task
After running `sudo gitlab-rake gitlab:env:info` on Gitaly or other non-Rails nodes , you might see the following error:
After running `sudo gitlab-rake gitlab:env:info` on Gitaly or other non-Rails nodes, you might see the following error:
```plaintext
PG::ConnectionBad: could not connect to server: No such file or directory

View File

@ -33,7 +33,7 @@ Parameters:
| `project_id` | integer | no | The ID of a project |
| `group_id` | integer | no | The ID of a group |
| `state` | string | no | The state of the to-do item. Can be either `pending` or `done` |
| `type` | string | no | The type of to-do item. Can be either `Issue`, `MergeRequest`, `Commit`, `Epic`, `DesignManagement::Design`, `AlertManagement::Alert`, `Project`, `Namespace` or `Vulnerability` |
| `type` | string | no | The type of to-do item. Can be either `Issue`, `MergeRequest`, `Commit`, `Epic`, `DesignManagement::Design`, `AlertManagement::Alert`, `Project`, `Namespace`, `Vulnerability` or `WikiPage::Meta` |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/todos"

View File

@ -27,7 +27,7 @@ sprite_icon(icon_name, size: nil, css_class: '')
- **`icon_name`**: Use the `icon_name` for the SVG sprite in the list of
([GitLab SVGs](https://gitlab-org.gitlab.io/gitlab-svgs)).
- **`size` (optional)**: Use one of the following sizes : 16, 24, 32, 48, 72 (this
- **`size` (optional)**: Use one of the following sizes: 16, 24, 32, 48, 72 (this
is translated into a `s16` class)
- **`css_class` (optional)**: If you want to add additional CSS classes.

View File

@ -102,7 +102,7 @@ If you see errors like `Missing field 'descriptionHtml' while writing result` ,
The error "Missing field 'description'" indicates that your GraphQL query expects a field named "description" in the response, but the data you're receiving from your backend (or how it's being processed by Apollo Client) is missing that field. This is causing Apollo Client's cache to fail when it tries to update the store with the incomplete data.
To debug this , follow the below steps
To debug this, follow the below steps
1. Open the error stack developer console
@ -139,7 +139,7 @@ If data related to a query has been invalidated or updated, the cache might not
When using mutations, the cache might not automatically update unless you configure `refetchQueries` or use a manual cache update after the mutation.
For example : in the first query you have a couple of fields that were not requested in the subsequent query
For example: in the first query you have a couple of fields that were not requested in the subsequent query
```graphql
query workItemTreeQuery($id: WorkItemID!, $pageSize: Int = 100, $endCursor: String) {

View File

@ -17,8 +17,8 @@ Standard context, also referred to as [Cloud context](https://gitlab.com/gitlab-
| Field | Type | Description | Example |
|-------------------|---------------|---------------------------------------------------------------------------------------------------|---------------------|
| `project_id` | integer, null | ID of the associated project. This is available when tracking is done inside any project path. (example : [GitLab project](https://gitlab.com/gitlab-org/gitlab)) | `12345` |
| `namespace_id` | integer, null | ID of the associated namespace. This is available when tracking is done inside any group path. (example : [GitLab-org](https://gitlab.com/gitlab-org)) | `67890` |
| `project_id` | integer, null | ID of the associated project. This is available when tracking is done inside any project path. (example: [GitLab project](https://gitlab.com/gitlab-org/gitlab)) | `12345` |
| `namespace_id` | integer, null | ID of the associated namespace. This is available when tracking is done inside any group path. (example: [GitLab-org](https://gitlab.com/gitlab-org)) | `67890` |
| `user_id` | integer, null | ID of the associated user. This gets pseudonymized in the Snowplow enricher. Refer to the [metrics dictionary](https://metrics.gitlab.com/identifiers/). | `longhash` |
| `global_user_id` | string, null | An anonymized `user_id` hash unique across instances. | `longhash` |
| `is_gitlab_team_member` | boolean, null | Indicates if the action was triggered by a GitLab team member. | `true`, `false` |

View File

@ -163,11 +163,23 @@ To use Docker with `replace` in the `go.mod` file:
Users may use tools other than Docker to orchestrate their containers and run their analyzers,
such as [containerd](https://containerd.io/), [Podman](https://podman.io/), or [skopeo](https://github.com/containers/skopeo).
In order to avoid inadvertently adding proprietary Docker features which might break customer tools, we [run a periodic test](https://gitlab.com/gitlab-org/security-products/tests/analyzer-containerization-support/-/blob/main/.gitlab-ci.yml?ref_type=heads) for all analyzers, to ensure that these tools still function as expected, and a Slack alert is raised if a failure occurs.
To ensure compatibility with these tools, we [periodicically test](https://gitlab.com/gitlab-org/security-products/tests/analyzer-containerization-support/-/blob/main/.gitlab-ci.yml?ref_type=heads)
all analyzers using a scheduled pipeline. A Slack alert is raised if a test fails.
In addition to the periodic test, analyzers using the [`ci-templates` `docker-test.yml` template](https://gitlab.com/gitlab-org/security-products/ci-templates/-/blob/master/includes-dev/docker-test.yml) include a [`check docker manifest`](https://gitlab.com/gitlab-org/security-products/ci-templates/-/blob/c0f217560b134f4ebe6024b26a41f77cea885c2c/includes-dev/docker-test.yml#L157-165) test in their pipelines, to prevent proprietary Docker features from being merged in the first place.
To avoid compatibility issues when building analyzer Docker images, use the [OCI media types](https://docs.docker.com/build/exporters/#oci-media-types) instead of the default proprietary Docker media types.
When creating a new analyzer, or changing the location of existing analyzer images, ensure that the analyzer is accounted for in the periodic test and consider using the shared [`ci-templates`](https://gitlab.com/gitlab-org/security-products/ci-templates/).
In addition to the periodic test, we ensure compatibility for users of the [`ci-templates` repo](https://gitlab.com/gitlab-org/security-products/ci-templates):
1. Analyzers using the [`ci-templates` `docker-test.yml` template](https://gitlab.com/gitlab-org/security-products/ci-templates/-/blob/master/includes-dev/docker-test.yml)
include [`tests`](https://gitlab.com/gitlab-org/security-products/ci-templates/-/blob/08319f7586fd9cc66f58ca894525ab54a2b7d831/includes-dev/docker-test.yml#L155-179) to ensure our Docker images function correctly with supported Docker tools.
These tests are executed in Merge Request pipelines and scheduled pipelines, and prevent images from being released if they break the supported Docker tools.
1. The [`ci-templates` `docker.yml` template](https://gitlab.com/gitlab-org/security-products/ci-templates/-/blob/master/includes-dev/docker.yml)
specifies [`oci-mediatypes=true`](https://docs.docker.com/build/exporters/#oci-media-types) for the `docker buildx` command when building analyzer images.
This builds images using [OCI](https://opencontainers.org/) media types rather than Docker proprietary media types.
When creating a new analyzer, or changing the location of existing analyzer images,
add it to the periodic test, or consider using the shared [`ci-templates`](https://gitlab.com/gitlab-org/security-products/ci-templates/) which includes an automated test.
## Analyzer scripts

View File

@ -121,7 +121,7 @@ We also have a [reusable base dropdown widget wrapper](https://gitlab.com/gitlab
1. Depending on the input field i.e a dropdown, input text or any other custom design we should make sure that we use an [existing wrapper](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/components/shared/work_item_sidebar_dropdown_widget.vue) or completely new component
1. Ideally any new widget should be behind an FF to make sure we have room for testing unless there is a priority for the widget.
1. Create the new widget in the [folder](https://gitlab.com/gitlab-org/gitlab/-/tree/master/app/assets/javascripts/work_items/components)
1. If it is an editable widget in the sidebar , you should include it in [work_item_attributes_wrapper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/app/assets/javascripts/work_items/components/work_item_attributes_wrapper.vue)
1. If it is an editable widget in the sidebar, you should include it in [work_item_attributes_wrapper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/app/assets/javascripts/work_items/components/work_item_attributes_wrapper.vue)
### Steps
@ -174,9 +174,9 @@ To resolve this, update the mocked `Gitlab::QueryLimiting::Transaction.threshold
Since create view is almost identical to detail view, and we wanted to store in the draft data of each widget, each new work item for a specific type has a new cache entry apollo.
For example , when we initialise the create view , we have a function `setNewWorkItemCache` [in work items cache utils](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/graphql/cache_utils) which is called in both [create view work item modal](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/components/create_work_item_modal.vue) and also [create work item component](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/components/create_work_item.vue)
For example, when we initialise the create view, we have a function `setNewWorkItemCache` [in work items cache utils](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/graphql/cache_utils) which is called in both [create view work item modal](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/components/create_work_item_modal.vue) and also [create work item component](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/components/create_work_item.vue)
You can include the create work item view in any vue file depending on usage. If you pass the `workItemType` of the create view , it will only include the applicable work item widgets which are fetched from [work item types query](../api/graphql/reference/_index.md#workitemtype) and only showing the ones in [widget definitions](../api/graphql/reference/_index.md#workitemwidgetdefinition)
You can include the create work item view in any vue file depending on usage. If you pass the `workItemType` of the create view, it will only include the applicable work item widgets which are fetched from [work item types query](../api/graphql/reference/_index.md#workitemtype) and only showing the ones in [widget definitions](../api/graphql/reference/_index.md#workitemwidgetdefinition)
We have a [local mutation](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/graphql/update_new_work_item.mutation.graphql) to update the work item draft data in create view
@ -202,7 +202,7 @@ if (this.workItemId === newWorkItemId(this.workItemType)) {
### Support new work item widget in local mutation
1. Add the input type in [work item local mutation typedefs](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/graphql/typedefs.graphql#L55). It can be anything , a custom object or a primitive value.
1. Add the input type in [work item local mutation typedefs](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/work_items/graphql/typedefs.graphql#L55). It can be anything, a custom object or a primitive value.
Example if you want add `parent` which has the name and ID of the parent of the work item

View File

@ -31,7 +31,7 @@ the [doorkeeper-openid_connect repository](https://github.com/doorkeeper-gem/doo
## Enable OIDC for OAuth applications
To enable OIDC for an OAuth application, you need to select the `openid` scope in the application
settings. For more information , see [Configure GitLab as an OAuth 2.0 authentication identity provider](oauth_provider.md).
settings. For more information, see [Configure GitLab as an OAuth 2.0 authentication identity provider](oauth_provider.md).
## Settings discovery

View File

@ -232,7 +232,7 @@ as WebAuthn, must be used (IA-2).
- GitLab [provides instructions](../user/ssh.md) on how to configure SSH keys to authenticate and communicate with Git. [Commits can be signed](../user/project/repository/signed_commits/ssh.md), providing additional verification for anyone with a public key.
- Keys should be configured to meet applicable strength and complexity requirements, such as using FIPS 140-2 and FIPS 140-3 validated ciphers . Administrators can [restrict minimum key technologies and key lengths](ssh_keys_restrictions.md). Additionally, administrators can [block or ban compromised keys](ssh_keys_restrictions.md#block-banned-or-compromised-keys).
- Keys should be configured to meet applicable strength and complexity requirements, such as using FIPS 140-2 and FIPS 140-3 validated ciphers. Administrators can [restrict minimum key technologies and key lengths](ssh_keys_restrictions.md). Additionally, administrators can [block or ban compromised keys](ssh_keys_restrictions.md#block-banned-or-compromised-keys).
**Personal access tokens**

View File

@ -202,4 +202,4 @@ You can also consider implementing abuse rate limiting as detailed in [Git abuse
GitLab SIRT maintains an active repository of detections in the [GitLab SIRT public project](https://gitlab.com/gitlab-security-oss/guard/-/tree/main/detections).
The detections in this repository are based on the audit events and in the general Sigma rule format. You can use sigma rule converter to get the rules in your desired format. Please refer to the repository for more information about Sigma format and tools related to it . Make sure you have GitLab audit logs ingested to your SIEM. You should follow the audit event streaming guide [for your self-managed instance](../administration/audit_event_streaming/_index.md) or [GitLab.com top-level group](../user/compliance/audit_event_streaming.md) to stream audit events to your desired destination.
The detections in this repository are based on the audit events and in the general Sigma rule format. You can use sigma rule converter to get the rules in your desired format. Please refer to the repository for more information about Sigma format and tools related to it. Make sure you have GitLab audit logs ingested to your SIEM. You should follow the audit event streaming guide [for your self-managed instance](../administration/audit_event_streaming/_index.md) or [GitLab.com top-level group](../user/compliance/audit_event_streaming.md) to stream audit events to your desired destination.

View File

@ -624,7 +624,7 @@ In the previous sample, you could use the script `user-pre-scan-set-up.sh` to al
## Exclude Paths
When testing an API it can be useful to exclude certain paths. For example, you might exclude testing of an authentication service or an older version of the API. To exclude paths, use the `FUZZAPI_EXCLUDE_PATHS` CI/CD variable . This variable is specified in your `.gitlab-ci.yml` file. To exclude multiple paths, separate entries using the `;` character. In the provided paths you can use a single character wildcard `?` and `*` for a multiple character wildcard.
When testing an API it can be useful to exclude certain paths. For example, you might exclude testing of an authentication service or an older version of the API. To exclude paths, use the `FUZZAPI_EXCLUDE_PATHS` CI/CD variable. This variable is specified in your `.gitlab-ci.yml` file. To exclude multiple paths, separate entries using the `;` character. In the provided paths you can use a single character wildcard `?` and `*` for a multiple character wildcard.
To verify the paths are excluded, review the `Tested Operations` and `Excluded Operations` portion of the job output. You should not see any excluded paths listed under `Tested Operations`.

View File

@ -654,7 +654,7 @@ Consider using `APISEC_REQUEST_HEADERS_BASE64` when storing secret header values
## Exclude Paths
When testing an API it can be useful to exclude certain paths. For example, you might exclude testing of an authentication service or an older version of the API. To exclude paths, use the `APISEC_EXCLUDE_PATHS` CI/CD variable . This variable is specified in your `.gitlab-ci.yml` file. To exclude multiple paths, separate entries using the `;` character. In the provided paths you can use a single character wildcard `?` and `*` for a multiple character wildcard.
When testing an API it can be useful to exclude certain paths. For example, you might exclude testing of an authentication service or an older version of the API. To exclude paths, use the `APISEC_EXCLUDE_PATHS` CI/CD variable. This variable is specified in your `.gitlab-ci.yml` file. To exclude multiple paths, separate entries using the `;` character. In the provided paths you can use a single character wildcard `?` and `*` for a multiple character wildcard.
To verify the paths are excluded, review the `Tested Operations` and `Excluded Operations` portion of the job output. You should not see any excluded paths listed under `Tested Operations`.

View File

@ -396,7 +396,7 @@ scan.
mode with `SECRET_DETECTION_LOG_OPTIONS` set to the commit range between last run and current
SHA. You can override this behavior by specifying CI/CD variables in the scan
execution policy. For more information, see
[Full history pipeline secret detection](../secret_detection/pipeline/_index.md#historic-scan).
[Full history pipeline secret detection](../secret_detection/pipeline/_index.md#run-a-historic-scan).
- For `triggered` scan execution policies, secret detection works just like regular scan
[configured manually in the `.gitlab-ci.yml`](../secret_detection/pipeline/_index.md#edit-the-gitlab-ciyml-file-manually).
- Container scanning: A scan that is configured for the `pipeline` rule type ignores the agent

View File

@ -72,17 +72,28 @@ By default, when you run a pipeline:
To override the default behavior, use the [available CI/CD variables](configure.md#available-cicd-variables).
### Historic scan
### Run a historic scan
By default, pipeline secret detection scans only the current state of the Git repository. Any secrets
contained in the repository's history are not detected. Run a historic scan to check for secrets from
all commits and branches in the Git repository.
You should do a historic scan only once, after enabling pipeline secret detection. Historic scans
You should run a historic scan only once, after enabling pipeline secret detection. Historic scans
can take a long time, especially for larger repositories with lengthy Git histories. After
completing an initial historic scan, use only standard pipeline secret detection as part of your
pipeline.
To run a historic scan:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select **Build > Pipelines**.
1. Select **New pipeline**.
1. Add a CI/CD variable:
1. From the dropdown list, select **Variable**.
1. In the **Input variable key** box, enter `SECRET_DETECTION_HISTORIC_SCAN`.
1. In the **Input variable value** box, enter `true`.
1. Select **New pipeline**.
### Advanced vulnerability tracking
{{< details >}}

View File

@ -221,7 +221,7 @@ To update a streaming destination's namespace filters:
### Override default content type header
By default, streaming destinations use a `content-type` header of `application/x-www-form-urlencoded`. However, you
might want to set the `content-type` header to something else. For example ,`application/json`.
might want to set the `content-type` header to something else. For example, `application/json`.
To override the `content-type` header default value for a top-level group streaming destination, use either:

View File

@ -462,6 +462,7 @@ than 1000. The cached value is rounded to thousands or millions and updated ever
{{< history >}}
- Filtering by group was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385191) in GitLab 15.9.
- Filtering by custom fields was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/525462) in GitLab 17.11.
{{< /history >}}
@ -474,6 +475,7 @@ You can filter the list of epics by:
- Confidentiality
- Reaction emoji
- Groups
- [Custom fields](../../../../../gitlab/doc/user/work_items/custom_fields.md) enabled for epics
![epics filter](img/epics_filter_v14_7.png)

View File

@ -97,7 +97,7 @@ You can add or remove data from export files. For example, you can:
To edit a project export file:
1. Extract the exported `.tar.gz` file.
1. Edit the appropriate file . For example, `tree/project/project_members.ndjson`.
1. Edit the appropriate file. For example, `tree/project/project_members.ndjson`.
1. Compress the files back to a `.tar.gz` file.
You can also make sure that all members were exported by checking the `project_members.ndjson` file.

View File

@ -34,9 +34,7 @@ Configure custom fields for a group to track data points like business value, ri
These fields appear in all work items across the group, its subgroups, and projects.
Custom fields help teams standardize how they record and report information across the entire workflow.
This standardization creates consistency across projects.
<!-- Use the sentence below when custom fields show on filters: -->
<!-- This standardization creates consistency across projects and supports more powerful filtering and reporting capabilities. -->
This standardization creates consistency across projects and supports more powerful filtering and reporting capabilities.
Choose from various field types to accommodate different data requirements and planning scenarios:
- Single-select

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module API
module Entities
class WikiPage
class Meta < Grape::Entity
expose :id, documentation: { type: 'integer', example: 2 }
expose :canonical_slug, as: :slug, documentation: { type: 'string', example: 'home' }
expose :title, documentation: { type: 'string', example: 'Page title' }
end
end
end
end

View File

@ -43,6 +43,7 @@ module FastSpecHelper
# @return [void]
def self.spec_requires_and_configuration
require 'gitlab/rspec/next_instance_of'
require 'hashdiff'
require_relative 'support/patches/rspec_mocks_doubles_fast_spec_helper_patch'
require_relative 'support/matchers/result_matchers'
require_relative 'support/railway_oriented_programming'

View File

@ -735,22 +735,22 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
describe('#group_confirm_modal_data') do
using RSpec::Parameterized::TableSyntax
let_it_be(:group) { create(:group, path: "foo") }
fake_form_id = "fake_form_id"
where(:prevent_delete_response, :is_button_disabled, :form_value_id, :permanently_remove, :button_text, :has_security_policy_project) do
true | "true" | nil | false | "Delete" | true
true | "true" | fake_form_id | true | nil | false
false | "true" | nil | false | "Delete group" | true
false | "false" | fake_form_id | true | nil | false
where(:prevent_delete_response, :adjourned_deletion, :is_button_disabled, :form_value_id, :permanently_remove, :button_text, :has_security_policy_project) do
true | false | "true" | nil | false | "Delete" | true
true | true | "true" | fake_form_id | true | nil | false
false | false | "true" | nil | false | "Delete group" | true
false | true | "false" | fake_form_id | true | nil | false
end
with_them do
it "returns expected parameters" do
allow(group).to receive(:linked_to_subscription?).and_return(prevent_delete_response)
allow(group).to receive(:adjourned_deletion?).and_return(adjourned_deletion)
expected = helper.group_confirm_modal_data(group: group, remove_form_id: form_value_id, button_text: button_text, has_security_policy_project: has_security_policy_project)
expected = helper.group_confirm_modal_data(group: group, remove_form_id: form_value_id, button_text: button_text, has_security_policy_project: has_security_policy_project, permanently_remove: permanently_remove)
expect(expected).to eq({
button_text: button_text.nil? ? "Delete group" : button_text,
confirm_danger_message: remove_group_message(group, permanently_remove),
@ -764,6 +764,81 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
end
describe '#remove_group_message' do
let_it_be(:group) { create(:group) }
let(:delayed_deletion_message) { "The contents of this group, its subgroups and projects will be permanently deleted after" }
let(:permanent_deletion_message) { ["You are about to delete the group #{group.name}", "After you delete a group, you <strong>cannot</strong> restore it or its components."] }
subject { helper.remove_group_message(group, false) }
shared_examples 'permanent deletion message' do
it 'returns the message related to permanent deletion' do
expect(subject).to include(*permanent_deletion_message)
end
end
shared_examples 'delayed deletion message' do
it 'returns the message related to delayed deletion' do
expect(subject).to include(delayed_deletion_message)
end
end
context 'delayed deletion feature is available' do
before do
allow(group).to receive(:adjourned_deletion?).and_return(true)
end
it_behaves_like 'delayed deletion message'
context 'group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
allow(group).to receive(:marked_for_deletion?).and_return(true)
end
it_behaves_like 'permanent deletion message'
end
context 'when group delay deletion is enabled' do
before do
stub_application_setting(delayed_group_deletion: true)
end
it_behaves_like 'delayed deletion message'
end
context 'when group delay deletion is disabled' do
before do
stub_application_setting(delayed_group_deletion: false)
end
it_behaves_like 'delayed deletion message'
end
context "group has not been marked for deletion" do
let(:group) { build(:group) }
context "'permanently_remove' argument is set to 'true'" do
it "displays permanent deletion message" do
allow(group).to receive(:marked_for_deletion?).and_return(false)
allow(group).to receive(:adjourned_deletion?).and_return(true)
expect(subject).to include(delayed_deletion_message)
expect(helper.remove_group_message(group, true)).to include(*permanent_deletion_message)
end
end
end
end
context 'delayed deletion feature is not available' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it_behaves_like 'permanent deletion message'
end
end
describe '#group_merge_requests' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }

View File

@ -6,8 +6,10 @@ require 'email_spec'
RSpec.describe Emails::Groups do
include EmailSpec::Matchers
# rubocop:disable RSpec/FactoryBot/AvoidCreate -- Need associations
let(:group) { create(:group) }
let(:user) { create(:user) }
# rubocop:enable RSpec/FactoryBot/AvoidCreate
before do
group.add_owner(user)
@ -38,4 +40,32 @@ RSpec.describe Emails::Groups do
expect(subject).to have_body_text "Group #{group.name} couldn't be exported."
end
end
describe '#group_scheduled_for_deletion' do
# rubocop:disable RSpec/FactoryBot/AvoidCreate -- Need associations
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group_with_deletion_schedule, owners: user) }
let_it_be(:sub_group) { create(:group_with_deletion_schedule, parent: group) }
# rubocop:enable RSpec/FactoryBot/AvoidCreate
let_it_be(:deletion_adjourned_period) { 7 }
let_it_be(:deletion_date) { (Time.current + deletion_adjourned_period.days).strftime('%B %-d, %Y') }
let_it_be(:group_retain_url) { "http://localhost/groups/#{sub_group.full_path}/-/edit#js-advanced-settings" }
before do
stub_application_setting(deletion_adjourned_period: deletion_adjourned_period)
end
subject { Notify.group_scheduled_for_deletion(user.id, sub_group.id) }
it 'has the expected content', :aggregate_failures, :freeze_time do
is_expected.to have_subject("#{sub_group.name} | Group scheduled for deletion")
is_expected.to have_body_text(
"has been marked for deletion and will be removed in #{deletion_adjourned_period} days."
)
is_expected.to have_body_text(deletion_date)
is_expected.to have_body_text("href=\"#{group_retain_url}\"")
end
end
end

View File

@ -43,20 +43,6 @@ RSpec.describe 'Bulk update work items', feature_category: :team_planning do
end
end
context 'when the `bulk_update_work_items_mutation` feature flag is disabled' do
before do
stub_feature_flags(bulk_update_work_items_mutation: false)
end
it 'returns a resource not available error' do
post_graphql_mutation(mutation, current_user: current_user)
expect_graphql_errors_to_include(
'`bulk_update_work_items_mutation` feature flag is disabled.'
)
end
end
context 'when user can not update all work_items' do
let_it_be(:forbidden_work_item) { create(:work_item, project: private_project) }
let(:updatable_work_item_ids) { updatable_work_items.map { |i| i.to_gid.to_s } + [forbidden_work_item.to_gid.to_s] }

View File

@ -20,6 +20,8 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
let_it_be(:group_request_todo) { create(:todo, author: author_1, user: john_doe, project: nil, group: group_2, target: group_2, action: Todo::MEMBER_ACCESS_REQUESTED) }
let_it_be(:alert_todo) { create(:todo, project: project_1, author: john_doe, user: john_doe, target: alert) }
let_it_be(:merge_request_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request) }
let_it_be(:wiki_page_meta) { create(:wiki_page_meta, :for_wiki_page, container: project_1) }
let_it_be(:wiki_page_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: wiki_page_meta, action: Todo::MENTIONED) }
let_it_be(:pending_1) { create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe, target: issue) }
let_it_be(:pending_2) { create(:todo, project: project_2, author: author_2, user: john_doe, target: create(:issue, project: project_2)) }
let_it_be(:pending_3) { create(:on_commit_todo, project: project_1, author: author_2, user: john_doe) }
@ -69,7 +71,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(8)
expect(json_response.length).to eq(9)
expect(json_response[0]).to include(
'id' => pending_5.id,
@ -108,9 +110,19 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
)
)
# Only issues get a merge request count at the moment
expect(json_response[4].dig('target', 'merge_requests_count')).to be_nil
expect(json_response[4]).to include(
'target_type' => 'WikiPage::Meta',
'action_name' => 'mentioned',
'target' => hash_including(
'id' => wiki_page_meta.id,
'title' => wiki_page_meta.title,
'slug' => wiki_page_meta.canonical_slug
)
)
# Only issues get a merge request count at the moment
expect(json_response[5].dig('target', 'merge_requests_count')).to be_nil
expect(json_response[5]).to include(
'target_type' => 'MergeRequest',
'target' => hash_including(
'upvotes' => 1,
@ -118,7 +130,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
)
)
expect(json_response[5]).to include(
expect(json_response[6]).to include(
'target_type' => 'AlertManagement::Alert',
'target' => hash_including(
'iid' => alert.iid,
@ -126,7 +138,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
)
)
expect(json_response[6]).to include(
expect(json_response[7]).to include(
'target_type' => 'Namespace',
'action_name' => 'member_access_requested',
'target' => hash_including(
@ -137,7 +149,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
'target_url' => Gitlab::Routing.url_helpers.group_group_members_url(group_2, tab: 'access_requests')
)
expect(json_response[7]).to include(
expect(json_response[8]).to include(
'target_type' => 'Project',
'action_name' => 'member_access_requested',
'target' => hash_including(
@ -158,7 +170,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
get api('/todos', john_doe)
expect(json_response.count).to eq(8)
expect(json_response.count).to eq(9)
expect(json_response.map { |t| t['id'] }).not_to include(no_access_todo.id, pending_4.id)
end
end
@ -170,7 +182,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(3)
expect(json_response.length).to eq(4)
end
end
@ -216,7 +228,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(5)
expect(json_response.length).to eq(6)
end
end
@ -227,7 +239,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
expect(json_response.length).to eq(3)
end
end
end

View File

@ -6,13 +6,12 @@ RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_pro
let_it_be(:user) { create(:user) }
let(:licensed) { false }
let(:service) { described_class.new(group, user, {}) }
let_it_be_with_reload(:group) { create(:group, owners: user) }
subject(:result) { service.execute(licensed: licensed) }
context 'when marking the group for deletion' do
context 'with user that can admin the group' do
let_it_be_with_reload(:group) { create(:group, owners: user) }
context 'for a group that has not been marked for deletion' do
it 'marks the group for deletion', :freeze_time do
result
@ -25,12 +24,6 @@ RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_pro
expect(result).to eq({ status: :success })
end
it 'calls send_group_deletion_notification when successful' do
expect(service).to receive(:send_group_deletion_notification)
result
end
it 'logs the event' do
allow(Gitlab::AppLogger).to receive(:info).and_call_original
expect(Gitlab::AppLogger).to receive(:info).with(
@ -40,6 +33,62 @@ RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_pro
result
end
context 'when notification feature flag is enabled and adjourned deletion is enabled' do
before do
stub_feature_flags(group_deletion_notification_email: true)
allow(group).to receive(:adjourned_deletion?).and_return(true)
end
it 'sends a notification email' do
expect_next_instance_of(NotificationService) do |service|
expect(service).to receive(:group_scheduled_for_deletion).with(group)
end
result
end
end
context 'when notification feature flag is disabled' do
before do
stub_feature_flags(group_deletion_notification_email: false)
allow(group).to receive(:adjourned_deletion?).and_return(true)
end
it 'does not send a notification email' do
expect(NotificationService).not_to receive(:new)
result
end
end
context 'when notification feature flag is enabled for specific group' do
before do
stub_feature_flags(group_deletion_notification_email: group)
allow(group).to receive(:adjourned_deletion?).and_return(true)
end
it 'sends a notification email' do
expect_next_instance_of(NotificationService) do |service|
expect(service).to receive(:group_scheduled_for_deletion).with(group)
end
result
end
end
context 'when adjourned deletion is disabled' do
before do
stub_feature_flags(group_deletion_notification_email: true)
allow(group).to receive(:adjourned_deletion?).and_return(false)
end
it 'does not send a notification email' do
expect(NotificationService).not_to receive(:new)
result
end
end
context 'when marking for deletion fails' do
before do
expect_next_instance_of(GroupDeletionSchedule) do |group_deletion_schedule|
@ -54,8 +103,8 @@ RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_pro
expect(result).to eq({ status: :error, message: 'error message' })
end
it 'does not call send_group_deletion_notification' do
expect(service).not_to receive(:send_group_deletion_notification)
it 'does not send notification' do
expect(NotificationService).not_to receive(:new)
result
end

View File

@ -4621,6 +4621,64 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
describe 'group scheduled for deletion' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
context 'when group emails are disabled' do
before do
allow(group).to receive(:emails_disabled?).and_return(true)
end
it 'does not send any emails' do
expect(Notify).not_to receive(:group_scheduled_for_deletion)
subject.group_scheduled_for_deletion(group)
end
end
context 'when group emails are enabled' do
before do
allow(group).to receive(:emails_disabled?).and_return(false)
end
context 'when user is owner' do
it 'sends email' do
group.add_owner(user)
expect(Notify).to receive(:group_scheduled_for_deletion).with(user.id, group.id).and_call_original
subject.group_scheduled_for_deletion(group)
end
context 'when owner is blocked' do
it 'does not send email' do
group.add_owner(user)
user.block!
expect(Notify).not_to receive(:group_scheduled_for_deletion)
subject.group_scheduled_for_deletion(group)
end
end
end
context 'when group has multiple owners' do
let_it_be(:another_user) { create(:user) }
it 'sends email to all owners' do
group.add_owner(user)
group.add_owner(another_user)
expect(Notify).to receive(:group_scheduled_for_deletion).with(user.id, group.id).and_call_original
expect(Notify).to receive(:group_scheduled_for_deletion).with(another_user.id, group.id).and_call_original
subject.group_scheduled_for_deletion(group)
end
end
end
end
def build_team(project)
@u_watcher = create_global_setting_for(create(:user), :watch)
@u_participating = create_global_setting_for(create(:user), :participating)

View File

@ -75,8 +75,6 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
end
it 'does not send notification email' do
stub_feature_flags(project_deletion_notification_email: true)
expect(NotificationService).not_to receive(:new)
result
@ -112,35 +110,6 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
describe '#send_project_deletion_notification' do
context 'when all conditions are met' do
before do
stub_feature_flags(project_deletion_notification_email: true)
allow(project).to receive_messages(adjourned_deletion?: true, marked_for_deletion?: true)
end
it 'sends a notification email' do
expect_next_instance_of(NotificationService) do |service|
expect(service).to receive(:project_scheduled_for_deletion).with(project)
end
execute_send_project_deletion_notification
end
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(project_deletion_notification_email: false)
allow(project).to receive_messages(adjourned_deletion?: true, marked_for_deletion?: true)
end
it 'does not send a notification email' do
expect(NotificationService).not_to receive(:new)
execute_send_project_deletion_notification
end
end
context 'when feature flag is enabled for specific project' do
before do
stub_feature_flags(project_deletion_notification_email: project)
allow(project).to receive_messages(adjourned_deletion?: true, marked_for_deletion?: true)
end
@ -155,7 +124,6 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
context 'when adjourned deletion is disabled' do
before do
stub_feature_flags(project_deletion_notification_email: true)
allow(project).to receive_messages(adjourned_deletion?: false, marked_for_deletion?: true)
end
@ -168,7 +136,6 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
context 'when project is not marked for deletion' do
before do
stub_feature_flags(project_deletion_notification_email: true)
allow(project).to receive_messages(adjourned_deletion?: true, marked_for_deletion?: false)
end