Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-02 09:18:48 +00:00
parent da10347975
commit ca9cb5e30d
69 changed files with 1255 additions and 1240 deletions

View File

@ -1329,3 +1329,7 @@ Scalability/RandomCronSchedule:
Enabled: true
Include:
- 'config/initializers/1_settings.rb'
Migration/PreventFeatureFlagsUsage:
Enabled: true
EnforcedSince: 20250505000000

View File

@ -0,0 +1,6 @@
---
Migration/PreventFeatureFlagsUsage:
Details: grace period
Exclude:
- 'db/migrate/20250228183319_migrate_vscode_extension_marketplace_feature_flag_to_data.rb'
- 'db/post_migrate/20250404151331_backfill_ci_job_live_trace_application_setting.rb'

View File

@ -987,7 +987,6 @@ RSpec/FeatureCategory:
- 'ee/spec/validators/ldap_filter_validator_spec.rb'
- 'ee/spec/validators/password/complexity_validator_spec.rb'
- 'ee/spec/validators/user_existence_validator_spec.rb'
- 'ee/spec/views/admin/application_settings/_deletion_protection_settings.html.haml_spec.rb'
- 'ee/spec/views/admin/application_settings/_git_abuse_rate_limit.html.haml_spec.rb'
- 'ee/spec/views/admin/application_settings/general.html.haml_spec.rb'
- 'ee/spec/views/admin/dev_ops_report/show.html.haml_spec.rb'

View File

@ -1 +1 @@
e8c0552b8152cf9363ccd1d9e9002225904d5202
d02aa3b4de57ed8a1a53156be2909b7199659e6c

View File

@ -108,6 +108,7 @@ export default {
return {
board: { ...boardDefaults, ...this.currentBoard },
isLoading: false,
isDisabled: false,
};
},
apollo: {
@ -147,7 +148,7 @@ export default {
return !this.canAdminBoard;
},
submitDisabled() {
return this.isLoading || this.board.name.length === 0;
return this.isLoading || this.board.name.length === 0 || this.isDisabled;
},
primaryProps() {
return {
@ -259,6 +260,7 @@ export default {
setError({ error, message: this.$options.i18n.deleteErrorMessage });
} finally {
this.isLoading = false;
this.isDisabled = true;
}
} else {
try {
@ -273,6 +275,7 @@ export default {
setError({ error, message: this.$options.i18n.saveErrorMessage });
} finally {
this.isLoading = false;
this.isDisabled = true;
}
}
},

View File

@ -29,6 +29,7 @@ import { HISTORY_BUTTON_CLICK } from '~/tracking/constants';
import { initFindFileShortcut } from '~/projects/behaviors';
import initHeaderApp from '~/repository/init_header_app';
import createRouter from '~/repository/router';
import initFileTreeBrowser from '~/repository/file_tree_browser';
Vue.use(Vuex);
Vue.use(VueApollo);
@ -86,6 +87,7 @@ if (viewBlobEl) {
...dataset
} = viewBlobEl.dataset;
const router = createRouter(projectPath, originalBranch);
initFileTreeBrowser(router);
initHeaderApp({ router, isBlobView: true });

View File

@ -0,0 +1,84 @@
<script>
import {
GlTooltipDirective,
GlBadge,
GlButtonGroup,
GlButton,
GlSearchBoxByType,
} from '@gitlab/ui';
import { getModifierKey } from '~/constants';
import { __, s__, sprintf } from '~/locale';
export default {
i18n: {
listViewToggleTitle: __('List view'),
treeViewToggleTitle: __('Tree view'),
},
directives: {
GlTooltip: GlTooltipDirective,
},
components: {
GlBadge,
GlButtonGroup,
GlButton,
GlSearchBoxByType,
},
props: {
totalFilesCount: {
type: Number,
required: true,
},
},
data() {
return {
search: '',
renderTreeList: false,
};
},
searchPlaceholder: sprintf(s__('Repository|Search (e.g. *.vue) (%{modifierKey}P)'), {
modifierKey: getModifierKey(),
}),
};
</script>
<template>
<div class="tree-list-holder">
<div class="gl-mb-3 gl-flex gl-items-center">
<h5 class="gl-my-0 gl-inline-block">{{ __('Files') }}</h5>
<gl-badge class="gl-ml-2">{{ totalFilesCount }}</gl-badge>
<gl-button-group class="gl-ml-auto">
<gl-button
v-gl-tooltip.hover
icon="list-bulleted"
:selected="!renderTreeList"
:title="$options.i18n.listViewToggleTitle"
:aria-label="$options.i18n.listViewToggleTitle"
@click="renderTreeList = false"
/>
<gl-button
v-gl-tooltip.hover
icon="file-tree"
:selected="renderTreeList"
:title="$options.i18n.treeViewToggleTitle"
:aria-label="$options.i18n.treeViewToggleTitle"
@click="renderTreeList = true"
/>
</gl-button-group>
</div>
<label for="repository-tree-search" class="sr-only">{{ $options.searchPlaceholder }}</label>
<gl-search-box-by-type
id="repository-tree-search"
v-model="search"
:placeholder="$options.searchPlaceholder"
:clear-button-title="__('Clear search')"
name="repository-tree-search"
class="gl-mb-3"
/>
<div>
<!-- TODO: implement recycle-scroller + list files (file-row components) -->
<p class="text-center gl-my-6">
{{ __('No files found') }}
</p>
</div>
</div>
</template>

View File

@ -0,0 +1,29 @@
<script>
import FileBrowserHeight from '~/diffs/components/file_browser_height.vue';
import TreeList from './components/tree_list.vue';
export const TREE_WIDTH = 320;
export default {
name: 'FileTreeBrowser',
components: {
TreeList,
FileBrowserHeight,
},
data() {
return {
treeWidth: TREE_WIDTH,
isLoading: false,
totalFilesCount: 0,
};
},
};
</script>
<template>
<div>
<file-browser-height :style="{ width: `${treeWidth}px` }" class="repository-tree-list">
<tree-list v-if="!isLoading" class="gl-mr-5 gl-mt-5" :total-files-count="totalFilesCount" />
</file-browser-height>
</div>
</template>

View File

@ -0,0 +1,26 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { pinia } from '~/pinia/instance';
import FileBrowser from './file_tree_browser.vue';
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
export default async function initBrowserComponent(router) {
const el = document.getElementById('js-file-browser');
if (!el) return false;
return new Vue({
el,
pinia,
router,
apolloProvider,
render(h) {
return h(FileBrowser);
},
});
}

View File

@ -10,6 +10,7 @@ import RefSelector from '~/ref/components/ref_selector.vue';
import HighlightWorker from '~/vue_shared/components/source_viewer/workers/highlight_worker?worker';
import CodeDropdown from '~/vue_shared/components/code_dropdown/code_dropdown.vue';
import CompactCodeDropdown from 'ee_else_ce/repository/components/code_dropdown/compact_code_dropdown.vue';
import initFileTreeBrowser from '~/repository/file_tree_browser';
import App from './components/app.vue';
import Breadcrumbs from './components/header_area/breadcrumbs.vue';
import ForkInfo from './components/fork_info.vue';
@ -47,6 +48,7 @@ export default function setupVueRepositoryList() {
targetBranch,
} = dataset;
const router = createRouter(projectPath, escapedRef);
initFileTreeBrowser(router);
apolloProvider.clients.defaultClient.cache.writeQuery({
query: commitsQuery,

View File

@ -1,4 +1,4 @@
import { GlButton, GlTableLite } from '@gitlab/ui';
import { GlButton, GlTableLite, GlKeysetPagination } from '@gitlab/ui';
import CrudComponent from './crud_component.vue';
export default {
@ -44,6 +44,50 @@ const Template = (args, { argTypes }) => ({
`,
});
const TableTemplate = (args, { argTypes }) => ({
components: { CrudComponent, GlButton, GlTableLite },
props: Object.keys(argTypes),
template: `
<crud-component v-bind="$props" ref="crudComponent">
<gl-table-lite
:items="tableItems"
:fields="tableFields" />
<template #form>
<p>Add form</p>
<div class="gl-flex gl-gap-3">
<gl-button variant="confirm">Add item</gl-button>
<gl-button @click="$refs.crudComponent.hideForm">Cancel</gl-button>
</div>
</template>
</crud-component>
`,
});
const ContentListTemplate = (args, { argTypes }) => ({
components: { CrudComponent, GlButton, GlKeysetPagination },
props: Object.keys(argTypes),
template: `
<crud-component v-bind="$props" ref="crudComponent">
<ul class="content-list">
<li v-for="item in items">{{ item.label }}</li>
</ul>
<template #form>
<p>Add form</p>
<div class="gl-flex gl-gap-3">
<gl-button variant="confirm">Add item</gl-button>
<gl-button @click="$refs.crudComponent.hideForm">Cancel</gl-button>
</div>
</template>
<template v-if="pagination" #pagination>
<gl-keyset-pagination v-bind="paginationProps" />
</template>
</crud-component>
`,
});
const defaultArgs = {
descriptionEnabled: false,
customActions: false,
@ -83,7 +127,7 @@ WithFooter.args = {
isEmpty: false,
};
export const WithPagnation = Template.bind({});
export const WithPagnation = ContentListTemplate.bind({});
WithPagnation.args = {
...defaultArgs,
title: 'CRUD Component title',
@ -92,7 +136,22 @@ WithPagnation.args = {
count: 99,
toggleText: 'Add action',
pagination: true,
paginationProps: { hasPreviousPage: false, hasNextPage: true },
isEmpty: false,
items: [
{
label: 'First item',
},
{
label: 'Second item',
},
{
label: 'Third item',
},
{
label: 'Fourth item',
},
],
};
export const WithCustomActions = Template.bind({});
@ -138,46 +197,6 @@ isCollapsible.args = {
isEmpty: false,
};
const TableTemplate = (args, { argTypes }) => ({
components: { CrudComponent, GlButton, GlTableLite },
props: Object.keys(argTypes),
template: `
<crud-component v-bind="$props" ref="crudComponent">
<gl-table-lite
:items="tableItems"
:fields="tableFields" />
<template #form>
<p>Add form</p>
<div class="gl-flex gl-gap-3">
<gl-button variant="confirm">Add item</gl-button>
<gl-button @click="$refs.crudComponent.hideForm">Cancel</gl-button>
</div>
</template>
</crud-component>
`,
});
const ContentListTemplate = (args, { argTypes }) => ({
components: { CrudComponent, GlButton },
props: Object.keys(argTypes),
template: `
<crud-component v-bind="$props" ref="crudComponent">
<ul class="content-list">
<li v-for="item in items">{{ item.label }}</li>
</ul>
<template #form>
<p>Add form</p>
<div class="gl-flex gl-gap-3">
<gl-button variant="confirm">Add item</gl-button>
<gl-button @click="$refs.crudComponent.hideForm">Cancel</gl-button>
</div>
</template>
</crud-component>
`,
});
export const TableExample = TableTemplate.bind({});
TableExample.args = {
title: 'Hooks',

View File

@ -53,6 +53,7 @@ class Projects::BlobController < Projects::ApplicationController
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
push_frontend_feature_flag(:directory_code_dropdown_updates, current_user)
push_frontend_feature_flag(:ci_pipeline_status_realtime, @project)
push_frontend_feature_flag(:repository_file_tree_browser, @project)
end
def new

View File

@ -25,6 +25,7 @@ class Projects::TreeController < Projects::ApplicationController
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
push_frontend_feature_flag(:directory_code_dropdown_updates, current_user)
push_frontend_feature_flag(:ci_pipeline_status_realtime, @project)
push_frontend_feature_flag(:repository_file_tree_browser, @project)
end
feature_category :source_code_management

View File

@ -5,25 +5,18 @@ module Namespaces
extend ActiveSupport::Concern
def adjourned_deletion?
return false unless Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
adjourned_deletion_configured?
end
def adjourned_deletion_configured?
return false unless Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
deletion_adjourned_period > 0
end
def marked_for_deletion?
return false unless Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
marked_for_deletion_on.present?
end
def self_or_ancestor_marked_for_deletion
return unless Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
return self if marked_for_deletion?
ancestors(hierarchy_order: :asc).joins(:deletion_schedule).first

View File

@ -21,7 +21,7 @@ class Todo < ApplicationRecord
MERGE_TRAIN_REMOVED = 8 # This is an EE-only feature
REVIEW_REQUESTED = 9
MEMBER_ACCESS_REQUESTED = 10
REVIEW_SUBMITTED = 11 # This is an EE-only feature
REVIEW_SUBMITTED = 11
OKR_CHECKIN_REQUESTED = 12 # This is an EE-only feature
ADDED_APPROVER = 13 # This is an EE-only feature,
SSH_KEY_EXPIRED = 14

View File

@ -245,9 +245,7 @@ module Ci
def present_build!(build)
# We need to use the presenter here because Gitaly calls in the presenter
# may fail, and we need to ensure the response has been generated.
presented_build = @logger.instrument(:present_build_presenter) do
::Ci::BuildRunnerPresenter.new(build) # rubocop:disable CodeReuse/Presenter -- old code
end
presented_build = ::Ci::BuildRunnerPresenter.new(build) # rubocop:disable CodeReuse/Presenter -- old code
@logger.instrument(:present_build_logs) do
log_artifacts_context(build)

View File

@ -2,10 +2,9 @@
module Groups # rubocop:disable Gitlab/BoundedContexts -- existing top-level module
class MarkForDeletionService < BaseService
def execute(licensed: false)
def execute
return error(_('You are not authorized to perform this action')) unless can?(current_user, :remove_group, group)
return error(_('Group has been already marked for deletion')) if group.marked_for_deletion_on.present?
return error(_('Cannot mark group for deletion: feature not supported')) unless licensed || feature_downtiered?
result = create_deletion_schedule
if result[:status] == :success
@ -43,10 +42,6 @@ module Groups # rubocop:disable Gitlab/BoundedContexts -- existing top-level mod
def log_event
log_info("User #{current_user.id} marked group #{group.full_path} for deletion")
end
def feature_downtiered?
Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
end
end
end

View File

@ -2,11 +2,9 @@
module Projects
class MarkForDeletionService < BaseService
def execute(licensed: false)
def execute
return success if project.marked_for_deletion_at?
return error('Cannot mark project for deletion: feature not supported') unless licensed || feature_downtiered?
result = ::Projects::UpdateService.new(
project,
current_user,
@ -47,10 +45,6 @@ module Projects
deleting_user: current_user
}
end
def feature_downtiered?
Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
end
end
end

View File

@ -195,6 +195,9 @@ class TodoService
#
def new_review(merge_request, current_user)
resolve_todos_for_target(merge_request, current_user)
# Create a new todo for assignees and author
create_review_submitted_todo(merge_request, current_user)
end
# When user marks a target as todo
@ -308,6 +311,14 @@ class TodoService
create_todos(approvers, attributes, namespace, project)
end
def create_review_submitted_todo(target, review_author)
users = (target.assignees | [target.author]).reject { |u| u.id == review_author.id }
project = target.project
attributes = attributes_for_todo(project, target, review_author, Todo::REVIEW_SUBMITTED)
create_todos(users, attributes, project.namespace, project)
end
private
def create_todos(users, attributes, namespace, project)

View File

@ -1,3 +1 @@
- return unless Feature.enabled?(:downtier_delayed_deletion, :instance, type: :gitlab_com_derisk)
#js-admin-deletion-protection-settings{ data: deletion_protection_data }

View File

@ -5,13 +5,21 @@
- content_for :prefetch_asset_tags do
- webpack_preload_asset_tag('monaco', prefetch: true)
- add_page_startup_graphql_call('repository/blob_info', { projectPath: @project.full_path, ref: current_ref, refType: @ref_type.to_s.upcase.presence, filePath: @blob.path, shouldFetchRawText: @blob.rendered_as_text? && !@blob.rich_viewer })
- repository_file_tree_browser_enabled = Feature.enabled?(:repository_file_tree_browser, @project)
- @force_fluid_layout = repository_file_tree_browser_enabled
.js-signature-container{ data: { 'signatures-path': signatures_path } }
= render 'projects/last_push'
#tree-holder.tree-holder.gl-pt-4
= render 'blob', blob: @blob
- if repository_file_tree_browser_enabled
.gl-flex
#js-file-browser
#tree-holder.tree-holder.gl-pt-4.gl-w-full
= render 'blob', blob: @blob
- else
#tree-holder.tree-holder.gl-pt-4
= render 'blob', blob: @blob
= render 'shared/web_ide_path'

View File

@ -5,13 +5,23 @@
- add_page_startup_graphql_call('repository/permissions', { projectPath: @project.full_path })
- add_page_startup_graphql_call('repository/files', { nextPageCursor: "", pageSize: 100, projectPath: @project.full_path, ref: current_ref, path: current_route_path || "/", refType: ref_type_enum_value})
- breadcrumb_title _("Repository")
- repository_file_tree_browser_enabled = Feature.enabled?(:repository_file_tree_browser, @project)
- @force_fluid_layout = repository_file_tree_browser_enabled
- page_title @path.presence || _("Files"), @ref
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, project_commits_url(@project, @ref, rss_url_options), title: "#{@project.name}:#{@ref} commits")
= render 'projects/last_push'
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
- if repository_file_tree_browser_enabled
.gl-flex
#js-file-browser
.gl-w-full
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
- else
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
= render 'shared/web_ide_path'
-# https://gitlab.com/gitlab-org/gitlab/-/issues/408388#note_1578533983

View File

@ -1,9 +1,10 @@
---
name: downtier_delayed_deletion
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/526403
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/185850
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/536634
milestone: '17.11'
group: group::authorization
type: gitlab_com_derisk
name: repository_file_tree_browser
description:
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/19530
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189445
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/537970
milestone: '18.0'
group: group::source code
type: wip
default_enabled: false

View File

@ -10,3 +10,5 @@ milestone: '9.3'
gitlab_schema: gitlab_main_cell
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463856
table_size: small
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189420
removed_in_milestone: '18.0'

View File

@ -1,32 +0,0 @@
# frozen_string_literal: true
class FinalizeBackfillPartitionWebHookLogDaily < Gitlab::Database::Migration[2.2]
milestone '18.0'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
def up
return if should_not_run?
# rubocop:disable Migration/BatchMigrationsPostOnly -- Must be run before we switch to new table
# Does not run on .com
ensure_batched_background_migration_is_finished(
job_class_name: 'BackfillPartitionedWebHookLogsDaily',
table_name: :web_hook_logs,
column_name: :id,
job_arguments: [],
finalize: true
)
# rubocop:enable Migration/BatchMigrationsPostOnly
end
def down
# no-op
end
private
def should_not_run?
Gitlab.com_except_jh?
end
end

View File

@ -1,52 +0,0 @@
# frozen_string_literal: true
class QueueBackfillPartitionWebHookLogDaily < Gitlab::Database::Migration[2.2]
milestone '17.9'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'BackfillPartitionedWebHookLogsDaily'
STRATEGY = 'PrimaryKeyBatchingStrategy'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
TABLE_NAME = 'web_hook_logs'
def up
return if should_not_run?
(max_id, max_created_at) = define_batchable_model(TABLE_NAME)
.order(id: :desc, created_at: :desc)
.pick(:id, :created_at)
max_id ||= 0
max_created_at ||= Time.current.to_s
Gitlab::Database::BackgroundMigration::BatchedMigration.create!(
gitlab_schema: :gitlab_main,
job_class_name: MIGRATION,
job_arguments: [],
table_name: TABLE_NAME.to_sym,
column_name: :id,
min_cursor: [0, 1.month.ago.to_s],
max_cursor: [max_id, max_created_at],
interval: DELAY_INTERVAL,
pause_ms: 100,
batch_class_name: STRATEGY,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE,
status_event: :execute
)
end
def down
return if should_not_run?
delete_batched_background_migration(MIGRATION, TABLE_NAME.to_sym, :id, [])
end
private
def should_not_run?
Gitlab.com_except_jh?
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class RenameWebHookLogsSequence < Gitlab::Database::Migration[2.3]
milestone '18.0'
def up
connection.execute(<<~SQL)
ALTER SEQUENCE web_hook_logs_id_seq RENAME TO web_hook_logs_daily_id_seq;
ALTER SEQUENCE web_hook_logs_daily_id_seq OWNED BY web_hook_logs_daily.id;
SQL
end
def down
connection.execute(<<~SQL)
ALTER SEQUENCE web_hook_logs_daily_id_seq RENAME TO web_hook_logs_id_seq;
ALTER SEQUENCE web_hook_logs_id_seq OWNED BY web_hook_logs.id;
SQL
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class DropWebHookLogsTableSyncTrigger < Gitlab::Database::Migration[2.3]
include Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers
milestone '18.0'
OLD_PARTITIONED_TABLE_NAME = 'web_hook_logs'
NEW_PARTITIONED_TABLE_NAME = 'web_hook_logs_daily'
def up
drop_trigger_to_sync_tables(OLD_PARTITIONED_TABLE_NAME)
end
def down
create_trigger_to_sync_tables(OLD_PARTITIONED_TABLE_NAME, NEW_PARTITIONED_TABLE_NAME, %w[id created_at])
end
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
class DropTableWebHookLogs < Gitlab::Database::Migration[2.3]
include Gitlab::Database::PartitioningMigrationHelpers
disable_ddl_transaction!
milestone '18.0'
NEW_TABLE_NAME = :web_hook_logs_daily
OLD_TABLE_NAME = :web_hook_logs
INDEX_NAME_1 = :index_web_hook_logs_on_web_hook_id_and_created_at
INDEX_NAME_2 = :index_web_hook_logs_part_on_created_at_and_web_hook_id
def up
drop_table(:web_hook_logs)
end
def down
transaction do
execute(<<~SQL)
CREATE TABLE #{OLD_TABLE_NAME} (
LIKE #{NEW_TABLE_NAME} INCLUDING ALL EXCLUDING INDEXES,
PRIMARY KEY (id, created_at)
) PARTITION BY RANGE (created_at);
CREATE TABLE IF NOT EXISTS #{partition_name(nil)}
PARTITION OF #{OLD_TABLE_NAME}
FOR VALUES FROM (MINVALUE) TO (\'#{current_date.prev_month.beginning_of_month}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date.prev_month)}
PARTITION OF #{OLD_TABLE_NAME}
FOR VALUES FROM (\'#{current_date.prev_month.beginning_of_month}\') TO (\'#{current_date.prev_month.end_of_month}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date)}
PARTITION OF #{OLD_TABLE_NAME}
FOR VALUES FROM (\'#{current_date.beginning_of_month}\') TO (\'#{current_date.end_of_month}\');
CREATE TABLE IF NOT EXISTS #{partition_name(current_date.next_month)}
PARTITION OF #{OLD_TABLE_NAME}
FOR VALUES FROM (\'#{current_date.next_month.beginning_of_month}\') TO (\'#{current_date.next_month.end_of_month}\')
SQL
end
add_concurrent_partitioned_index(OLD_TABLE_NAME, [:web_hook_id, :created_at], name: INDEX_NAME_1)
add_concurrent_partitioned_index(OLD_TABLE_NAME, [:created_at, :web_hook_id], name: INDEX_NAME_2)
end
private
def current_date
Date.current
end
def partition_name(date)
suffix = date&.strftime('%Y%m') || '000000'
"gitlab_partitions_dynamic.#{OLD_TABLE_NAME}_#{suffix}"
end
end

View File

@ -1 +0,0 @@
269b23b333b01f896bfc2962cf024b980cbd5a17a110a5d2ebe9df77e472d6f4

View File

@ -1 +0,0 @@
2fbbcd4afa8fff0132a03c6ad4758fdb926551d681e87ddbce70cb1e3c8f4259

View File

@ -0,0 +1 @@
065dc977c3f972aebc346f68946fb68fe11510bab1b737d4eaf41bed35ca8cc6

View File

@ -0,0 +1 @@
d9d3da6fa39a790723135fe8c4d4164314cfed2e0031a9f970ab431407168903

View File

@ -0,0 +1 @@
7063a00d2436122355bc4f4f5111cf8f96a7057cf89ab0fc14974d6ea69d4337

View File

@ -880,64 +880,6 @@ RETURN NEW;
END
$$;
CREATE FUNCTION table_sync_function_29bc99d6db() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF (TG_OP = 'DELETE') THEN
DELETE FROM web_hook_logs_daily where "id" = OLD."id" AND "created_at" = OLD."created_at";
ELSIF (TG_OP = 'UPDATE') THEN
UPDATE web_hook_logs_daily
SET "web_hook_id" = NEW."web_hook_id",
"trigger" = NEW."trigger",
"url" = NEW."url",
"request_headers" = NEW."request_headers",
"request_data" = NEW."request_data",
"response_headers" = NEW."response_headers",
"response_body" = NEW."response_body",
"response_status" = NEW."response_status",
"execution_duration" = NEW."execution_duration",
"internal_error_message" = NEW."internal_error_message",
"updated_at" = NEW."updated_at",
"url_hash" = NEW."url_hash"
WHERE web_hook_logs_daily."id" = NEW."id" AND web_hook_logs_daily."created_at" = NEW."created_at";
ELSIF (TG_OP = 'INSERT') THEN
INSERT INTO web_hook_logs_daily ("id",
"web_hook_id",
"trigger",
"url",
"request_headers",
"request_data",
"response_headers",
"response_body",
"response_status",
"execution_duration",
"internal_error_message",
"updated_at",
"created_at",
"url_hash")
VALUES (NEW."id",
NEW."web_hook_id",
NEW."trigger",
NEW."url",
NEW."request_headers",
NEW."request_data",
NEW."response_headers",
NEW."response_body",
NEW."response_status",
NEW."execution_duration",
NEW."internal_error_message",
NEW."updated_at",
NEW."created_at",
NEW."url_hash");
END IF;
RETURN NULL;
END
$$;
COMMENT ON FUNCTION table_sync_function_29bc99d6db() IS 'Partitioning migration: table sync for web_hook_logs table';
CREATE FUNCTION table_sync_function_40ecbfb353() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -4968,35 +4910,8 @@ CREATE TABLE vulnerability_archives (
)
PARTITION BY RANGE (date);
CREATE TABLE web_hook_logs (
id bigint NOT NULL,
web_hook_id bigint NOT NULL,
trigger character varying,
url character varying,
request_headers text,
request_data text,
response_headers text,
response_body text,
response_status character varying,
execution_duration double precision,
internal_error_message character varying,
updated_at timestamp without time zone NOT NULL,
created_at timestamp without time zone NOT NULL,
url_hash text
)
PARTITION BY RANGE (created_at);
CREATE SEQUENCE web_hook_logs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE web_hook_logs_id_seq OWNED BY web_hook_logs.id;
CREATE TABLE web_hook_logs_daily (
id bigint DEFAULT nextval('web_hook_logs_id_seq'::regclass) NOT NULL,
id bigint NOT NULL,
web_hook_id bigint NOT NULL,
trigger character varying,
url character varying,
@ -25337,6 +25252,15 @@ CREATE SEQUENCE vulnerability_user_mentions_id_seq
ALTER SEQUENCE vulnerability_user_mentions_id_seq OWNED BY vulnerability_user_mentions.id;
CREATE SEQUENCE web_hook_logs_daily_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE web_hook_logs_daily_id_seq OWNED BY web_hook_logs_daily.id;
CREATE TABLE web_hooks (
id bigint NOT NULL,
project_id bigint,
@ -28010,7 +27934,7 @@ ALTER TABLE ONLY vulnerability_statistics ALTER COLUMN id SET DEFAULT nextval('v
ALTER TABLE ONLY vulnerability_user_mentions ALTER COLUMN id SET DEFAULT nextval('vulnerability_user_mentions_id_seq'::regclass);
ALTER TABLE ONLY web_hook_logs ALTER COLUMN id SET DEFAULT nextval('web_hook_logs_id_seq'::regclass);
ALTER TABLE ONLY web_hook_logs_daily ALTER COLUMN id SET DEFAULT nextval('web_hook_logs_daily_id_seq'::regclass);
ALTER TABLE ONLY web_hooks ALTER COLUMN id SET DEFAULT nextval('web_hooks_id_seq'::regclass);
@ -29087,9 +29011,6 @@ ALTER TABLE packages_packages
ALTER TABLE sprints
ADD CONSTRAINT check_df3816aed7 CHECK ((due_date IS NOT NULL)) NOT VALID;
ALTER TABLE web_hook_logs
ADD CONSTRAINT check_df72cb58f5 CHECK ((char_length(url_hash) <= 44)) NOT VALID;
ALTER TABLE ONLY ci_build_needs
ADD CONSTRAINT ci_build_needs_pkey PRIMARY KEY (id);
@ -31199,9 +31120,6 @@ ALTER TABLE ONLY vulnerability_user_mentions
ALTER TABLE ONLY web_hook_logs_daily
ADD CONSTRAINT web_hook_logs_daily_pkey PRIMARY KEY (id, created_at);
ALTER TABLE ONLY web_hook_logs
ADD CONSTRAINT web_hook_logs_pkey PRIMARY KEY (id, created_at);
ALTER TABLE ONLY web_hooks
ADD CONSTRAINT web_hooks_pkey PRIMARY KEY (id);
@ -37959,10 +37877,6 @@ CREATE INDEX index_web_hook_logs_daily_on_web_hook_id_and_created_at ON ONLY web
CREATE INDEX index_web_hook_logs_daily_part_on_created_at_and_web_hook_id ON ONLY web_hook_logs_daily USING btree (created_at, web_hook_id);
CREATE INDEX index_web_hook_logs_on_web_hook_id_and_created_at ON ONLY web_hook_logs USING btree (web_hook_id, created_at);
CREATE INDEX index_web_hook_logs_part_on_created_at_and_web_hook_id ON ONLY web_hook_logs USING btree (created_at, web_hook_id);
CREATE INDEX index_web_hooks_on_group_id ON web_hooks USING btree (group_id) WHERE ((type)::text = 'GroupHook'::text);
CREATE INDEX index_web_hooks_on_integration_id ON web_hooks USING btree (integration_id);
@ -41213,8 +41127,6 @@ CREATE TRIGGER push_rules_loose_fk_trigger AFTER DELETE ON push_rules REFERENCIN
CREATE TRIGGER table_sync_trigger_4ea4473e79 AFTER INSERT OR DELETE OR UPDATE ON uploads FOR EACH ROW EXECUTE FUNCTION table_sync_function_40ecbfb353();
CREATE TRIGGER table_sync_trigger_b99eb6998c AFTER INSERT OR DELETE OR UPDATE ON web_hook_logs FOR EACH ROW EXECUTE FUNCTION table_sync_function_29bc99d6db();
CREATE TRIGGER tags_loose_fk_trigger AFTER DELETE ON tags REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER terraform_state_versions_loose_fk_trigger AFTER DELETE ON terraform_state_versions REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();

View File

@ -963,6 +963,9 @@ This command supports the following options:
- `--staging`: Uses the `staging` environment.
- `--staging_ref`: Uses the `staging_ref` environment.
- `--production` : Uses the `production` environment (default).
- Filter by job class
- `--job-class-name JOB_CLASS_NAME`: Only list jobs for the given job class.
- This is the `migration_job_name` in the YAML definition of the background migration.
Output example:

View File

@ -33,15 +33,6 @@ title: Code Suggestions
Use GitLab Duo Code Suggestions to write code more efficiently by using generative AI to suggest code while you're developing.
Before you start using Code Suggestions, decide which of the following methods
you want to use to manage Code Suggestions requests:
- On GitLab.com or GitLab Self-Managed, the default GitLab AI vendor models and
cloud-based AI gateway that is hosted by GitLab.
- On GitLab Self-Managed, in GitLab 17.9 and later, [GitLab Duo Self-Hosted with a supported self-hosted model](../../../../administration/gitlab_duo_self_hosted/_index.md).
Self-hosted models maximize security and privacy by making sure nothing is
sent to an external model.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
[View a click-through demo](https://gitlab.navattic.com/code-suggestions).
<!-- Video published on 2023-12-09 --> <!-- Demo published on 2024-02-01 -->
@ -317,6 +308,17 @@ However, Code Suggestions might generate suggestions that are:
When using Code Suggestions, code review best practices still apply.
## Available language models
Different language models can be the source for Code Suggestions.
- On GitLab.com: GitLab hosts the models and connects to them through the cloud-based AI gateway.
- On GitLab Self-Managed, two options exist:
- GitLab can [host the models and connects to them through the cloud-based AI gateway](set_up.md).
- Your organization can [use GitLab Duo Self-Hosted](../../../../administration/gitlab_duo_self_hosted/_index.md),
which means you host the AI gateway and language models. You can use GitLab AI vendor models
or the other supported language models.
## How the prompt is built
To learn about the code that builds the prompt, see these files:

View File

@ -1,71 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPartitionedWebHookLogsDaily < BatchedMigrationJob
cursor :id, :created_at
operation_name :update_all
feature_category :integrations
PARTITION_RANGE_CONDITION_REGEX = /'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})'/
# rubocop:disable Metrics/BlockLength -- raw SQL is more readable for this migration
# rubocop:disable Metrics/MethodLength -- raw SQL is more readable for this migration
def perform
each_sub_batch do |relation|
connection.execute(<<~SQL)
INSERT INTO web_hook_logs_daily (
web_hook_id,
trigger,
url,
request_headers,
request_data,
response_headers,
response_body,
response_status,
execution_duration,
internal_error_message,
url_hash,
created_at,
updated_at
)
SELECT
source.web_hook_id,
source.trigger,
source.url,
source.request_headers,
source.request_data,
source.response_headers,
source.response_body,
source.response_status,
source.execution_duration,
source.internal_error_message,
source.url_hash,
source.created_at,
source.updated_at
FROM web_hook_logs AS source
WHERE id IN (#{relation.select(:id).to_sql})
AND created_at >= #{connection.quote(partition_lower_range)} -- Insert data in an existing partition
ON CONFLICT (id, created_at) DO NOTHING
SQL
end
end
# rubocop:enable Metrics/BlockLength
# rubocop:enable Metrics/MethodLength
private
def first_partition
Gitlab::Database::PostgresPartitionedTable
.find_by_name_in_current_schema('web_hook_logs_daily')
.postgres_partitions
.order(identifier: :asc)
.first
end
def partition_lower_range
first_partition.condition.match(PARTITION_RANGE_CONDITION_REGEX)[1]
end
end
end
end

View File

@ -270,7 +270,7 @@ dependency-scanning:
ADDITIONAL_SUPPORTED_FILES: "pom.xml,build.gradle,build.gradle.kts,build.sbt,requirements.pip,Pipfile,requires.txt,setup.py"
stage: !reference [.ds-analyzer, stage]
image:
name: "$SECURE_ANALYZERS_PREFIX/security-products/dependency-scanning:v0"
name: "$SECURE_ANALYZERS_PREFIX/dependency-scanning:v0"
script:
- /analyzer run
allow_failure: true

View File

@ -4,20 +4,36 @@ module Gitlab
module Metrics
module Samplers
class ConcurrencyLimitSampler < BaseSampler
DEFAULT_SAMPLING_INTERVAL_SECONDS = 60
include ExclusiveLeaseGuard
DEFAULT_SAMPLING_INTERVAL_SECONDS = 30
def sample
worker_maps.workers.each do |w|
queue_size = concurrent_limit_service.queue_size(w.name)
report_queue_size(w.name, queue_size) if queue_size > 0
try_obtain_lease do
worker_maps.workers.each do |w|
queue_size = concurrent_limit_service.queue_size(w.name)
report_queue_size(w.name, queue_size) if queue_size > 0
concurrent_worker_count = concurrent_limit_service.concurrent_worker_count(w.name)
report_concurrent_workers(w.name, concurrent_worker_count) if concurrent_worker_count > 0
concurrent_worker_count = concurrent_limit_service.concurrent_worker_count(w.name)
report_concurrent_workers(w.name, concurrent_worker_count) if concurrent_worker_count > 0
end
end
end
private
# Used by ExclusiveLeaseGuard
def lease_timeout
# Lease timeout and sampling interval should be the same
# so that only 1 process runs the sampler on every sampling interval
DEFAULT_SAMPLING_INTERVAL_SECONDS
end
# Overrides ExclusiveLeaseGuard to not release lease after the sample to ensure we do not oversample
def lease_release?
false
end
def worker_maps
Gitlab::SidekiqMiddleware::ConcurrencyLimit::WorkersMap
end

View File

@ -12082,9 +12082,6 @@ msgstr ""
msgid "Cannot make the epic confidential if it contains non-confidential issues"
msgstr ""
msgid "Cannot mark group for deletion: feature not supported"
msgstr ""
msgid "Cannot mark the %{work_item_type} as duplicate of itself."
msgstr ""
@ -40184,6 +40181,9 @@ msgstr ""
msgid "No file selected"
msgstr ""
msgid "No files found"
msgstr ""
msgid "No files found."
msgstr ""
@ -50811,6 +50811,9 @@ msgstr ""
msgid "Repository|New tag"
msgstr ""
msgid "Repository|Search (e.g. *.vue) (%{modifierKey}P)"
msgstr ""
msgid "Repository|This directory"
msgstr ""
@ -55066,6 +55069,12 @@ msgstr ""
msgid "SecurityOrchestration|Send bot message"
msgstr ""
msgid "SecurityOrchestration|Settings from outside of the policy can override variables when the policy runs, except the variables defined in the denylist."
msgstr ""
msgid "SecurityOrchestration|Settings from outside of the policy cannot override variables when the policy runs, except for the variables defined in the allowlist."
msgstr ""
msgid "SecurityOrchestration|Several merge request approval policy criteria have been deprecated. Policies using these criteria will not work after GitLab 18.0 (May 10, 2025). You must edit these policies to replace or remove the deprecated criteria."
msgstr ""
@ -55246,6 +55255,9 @@ msgstr ""
msgid "SecurityOrchestration|Variable option"
msgstr ""
msgid "SecurityOrchestration|Variables override configuration"
msgstr ""
msgid "SecurityOrchestration|Variables override configuration has invalid structure."
msgstr ""

View File

@ -1,4 +1,4 @@
ARG GDK_SHA=e71df04fc9595c5cf6ab2ecc0fee703164f7b132
ARG GDK_SHA=59037d83cf1134ab20ec3965f9c97442d566a35e
# Use tag prefix when running on 'stable' branch to make sure 'protected' image is used which is not deleted by registry cleanup
ARG GDK_BASE_TAG_PREFIX

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
require_relative '../../migration_helpers'
module RuboCop
module Cop
module Migration
# This cop prevents the use of Feature.enabled? and Feature.disabled? in migrations.
# Using feature flags in migrations is forbidden to avoid breaking the migration in the future.
# Instead, use the feature_flag_enabled?(feature_name) migration helper method.
# https://docs.gitlab.com/development/migration_style_guide/#using-application-code-in-migrations-discouraged
class PreventFeatureFlagsUsage < RuboCop::Cop::Base
include MigrationHelpers
MSG = "Do not use Feature.enabled? or Feature.disabled? in migrations. " \
"Use the feature_flag_enabled?(feature_name) migration helper method."
# @!method feature_enabled?(node)
def_node_matcher :feature_enabled?, <<~PATTERN
(send (const nil? :Feature) :enabled? ...)
PATTERN
# @!method feature_disabled?(node)
def_node_matcher :feature_disabled?, <<~PATTERN
(send (const nil? :Feature) :disabled? ...)
PATTERN
def on_def(node)
return unless in_migration?(node)
node.each_descendant(:send) do |send_node|
add_offense(send_node) if feature_enabled?(send_node) || feature_disabled?(send_node)
end
end
end
end
end
end

View File

@ -7,7 +7,6 @@ RSpec.describe Admin::ProjectsController, feature_category: :groups_and_projects
before do
sign_in(create(:admin))
stub_feature_flags(downtier_delayed_deletion: false)
end
describe 'GET /projects' do
@ -48,6 +47,8 @@ RSpec.describe Admin::ProjectsController, feature_category: :groups_and_projects
end
it 'does not have N+1 queries', :use_clean_rails_memory_store_caching, :request_store do
pending('https://gitlab.com/gitlab-org/gitlab/-/issues/538822')
get :index
control = ActiveRecord::QueryRecorder.new { get :index }

View File

@ -106,6 +106,61 @@ RSpec.describe GroupsController, :with_current_organization, factory_default: :k
)
end
end
context 'adjourned deletion' do
render_views
let_it_be(:subgroup) { create(:group, :private, parent: group) }
let(:ancestor_notice_regex) do
/The parent group of this group is pending deletion, so this group will also be deleted on .*./
end
subject(:get_show) { get :show, params: { id: subgroup.to_param } }
context 'when the parent group has not been scheduled for deletion' do
it 'does not show the notice' do
subject
expect(response.body).not_to match(ancestor_notice_regex)
end
end
context 'when the parent group has been scheduled for deletion' do
before do
create(:group_deletion_schedule,
group: subgroup.parent,
marked_for_deletion_on: Date.current,
deleting_user: user
)
end
it 'shows the notice that the parent group has been scheduled for deletion' do
subject
expect(response.body).to match(ancestor_notice_regex)
end
context 'when the group itself has also been scheduled for deletion' do
before do
create(:group_deletion_schedule,
group: subgroup,
marked_for_deletion_on: Date.current,
deleting_user: user
)
end
it 'does not show the notice that the parent group has been scheduled for deletion' do
subject
expect(response.body).not_to match(ancestor_notice_regex)
# However, shows the notice that the project has been marked for deletion.
expect(response.body).to match(
/This group and its subgroups and projects are pending deletion, and will be deleted on .*./
)
end
end
end
end
end
describe 'GET #details' do
@ -508,56 +563,108 @@ RSpec.describe GroupsController, :with_current_organization, factory_default: :k
sign_in(user)
end
context 'delayed deletion feature is available' do
context 'success' do
it 'marks the group for delayed deletion' do
expect { subject }.to change { group.reload.marked_for_deletion? }.from(false).to(true)
end
context 'success' do
it 'marks the group for delayed deletion' do
expect { subject }.to change { group.reload.marked_for_deletion? }.from(false).to(true)
end
it 'does not immediately delete the group' do
Sidekiq::Testing.fake! do
expect { subject }.not_to change { GroupDestroyWorker.jobs.size }
end
it 'does not immediately delete the group' do
Sidekiq::Testing.fake! do
expect { subject }.not_to change { GroupDestroyWorker.jobs.size }
end
end
context 'for a html request' do
it 'redirects to group path' do
subject
expect(response).to redirect_to(group_path(group))
end
end
context 'for a json request', :freeze_time do
let(:format) { :json }
it 'returns json with message' do
subject
# FIXME: Replace `group.marked_for_deletion_on` with `group` after https://gitlab.com/gitlab-org/gitlab/-/work_items/527085
expect(json_response['message'])
.to eq(
"'#{group.name}' has been scheduled for deletion and will be deleted on " \
"#{permanent_deletion_date_formatted(group.marked_for_deletion_on)}.")
end
end
end
context 'failure' do
before do
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'does not mark the group for deletion' do
expect { subject }.not_to change { group.reload.marked_for_deletion? }.from(false)
end
context 'for a html request' do
it 'redirects to group edit page' do
subject
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:alert]).to include 'error'
end
end
context 'for a json request' do
let(:format) { :json }
it 'returns json with message' do
subject
expect(json_response['message']).to eq("error")
end
end
end
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
end
context 'when permanently_remove param is set' do
let(:params) { { permanently_remove: true } }
context 'for a html request' do
it 'redirects to group path' do
it 'deletes the group immediately and redirects to root path' do
expect(GroupDestroyWorker).to receive(:perform_async)
subject
expect(response).to redirect_to(group_path(group))
expect(response).to redirect_to(root_path)
expect(flash[:toast]).to include "Group '#{group.name}' is being deleted."
end
end
context 'for a json request', :freeze_time do
context 'for a json request' do
let(:format) { :json }
it 'returns json with message' do
it 'deletes the group immediately and returns json with message' do
expect(GroupDestroyWorker).to receive(:perform_async)
subject
# FIXME: Replace `group.marked_for_deletion_on` with `group` after https://gitlab.com/gitlab-org/gitlab/-/work_items/527085
expect(json_response['message'])
.to eq(
"'#{group.name}' has been scheduled for deletion and will be deleted on " \
"#{permanent_deletion_date_formatted(group.marked_for_deletion_on)}.")
expect(json_response['message']).to eq("Group '#{group.name}' is being deleted.")
end
end
end
context 'failure' do
before do
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'does not mark the group for deletion' do
expect { subject }.not_to change { group.reload.marked_for_deletion? }.from(false)
end
context 'when permanently_remove param is not set' do
context 'for a html request' do
it 'redirects to group edit page' do
it 'redirects to edit path with error' do
subject
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:alert]).to include 'error'
expect(flash[:alert]).to include "Group has been already marked for deletion"
end
end
@ -567,93 +674,10 @@ RSpec.describe GroupsController, :with_current_organization, factory_default: :k
it 'returns json with message' do
subject
expect(json_response['message']).to eq("error")
expect(json_response['message']).to eq("Group has been already marked for deletion")
end
end
end
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
end
context 'when permanently_remove param is set' do
let(:params) { { permanently_remove: true } }
context 'for a html request' do
it 'deletes the group immediately and redirects to root path' do
expect(GroupDestroyWorker).to receive(:perform_async)
subject
expect(response).to redirect_to(root_path)
expect(flash[:toast]).to include "Group '#{group.name}' is being deleted."
end
end
context 'for a json request' do
let(:format) { :json }
it 'deletes the group immediately and returns json with message' do
expect(GroupDestroyWorker).to receive(:perform_async)
subject
expect(json_response['message']).to eq("Group '#{group.name}' is being deleted.")
end
end
end
context 'when permanently_remove param is not set' do
context 'for a html request' do
it 'redirects to edit path with error' do
subject
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:alert]).to include "Group has been already marked for deletion"
end
end
context 'for a json request' do
let(:format) { :json }
it 'returns json with message' do
subject
expect(json_response['message']).to eq("Group has been already marked for deletion")
end
end
end
end
end
context 'delayed deletion feature is not available', :sidekiq_inline do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
context 'for a html request' do
it 'immediately schedules a group destroy and redirects to root page with alert about immediate deletion' do
Sidekiq::Testing.fake! do
expect { subject }.to change { GroupDestroyWorker.jobs.size }.by(1)
end
expect(response).to redirect_to(root_path)
expect(flash[:toast]).to include "Group '#{group.name}' is being deleted."
end
end
context 'for a json request' do
let(:format) { :json }
it 'immediately schedules a group destroy and returns json with message' do
Sidekiq::Testing.fake! do
expect { subject }.to change { GroupDestroyWorker.jobs.size }.by(1)
end
expect(json_response['message']).to eq("Group '#{group.name}' is being deleted.")
end
end
end
end
@ -685,47 +709,33 @@ RSpec.describe GroupsController, :with_current_organization, factory_default: :k
sign_in(user)
end
context 'when the delayed deletion feature is available' do
context 'when the restore succeeds' do
it 'restores the group' do
expect { subject }.to change { group.reload.marked_for_deletion? }.from(true).to(false)
end
it 'renders success notice upon restoring' do
subject
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:notice]).to include "Group '#{group.name}' has been successfully restored."
end
context 'when the restore succeeds' do
it 'restores the group' do
expect { subject }.to change { group.reload.marked_for_deletion? }.from(true).to(false)
end
context 'when the restore fails' do
before do
allow(::Groups::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'renders success notice upon restoring' do
subject
it 'does not restore the group' do
expect { subject }.not_to change { group.reload.marked_for_deletion? }.from(true)
end
it 'redirects to group edit page' do
subject
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:alert]).to include 'error'
end
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:notice]).to include "Group '#{group.name}' has been successfully restored."
end
end
context 'when delayed deletion feature is not available' do
context 'when the restore fails' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
allow(::Groups::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'returns 404' do
it 'does not restore the group' do
expect { subject }.not_to change { group.reload.marked_for_deletion? }.from(true)
end
it 'redirects to group edit page' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response).to redirect_to(edit_group_path(group))
expect(flash[:alert]).to include 'error'
end
end
end

View File

@ -1103,47 +1103,91 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
describe "#destroy", :enable_admin_mode do
let_it_be(:admin) { create(:admin) }
context 'when the delayed deletion feature is not available' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
let_it_be(:group) { create(:group, owners: user) }
let_it_be_with_reload(:project) { create(:project, group: group) }
it "redirects to the dashboard", :sidekiq_might_not_need_inline do
controller.instance_variable_set(:@project, project)
sign_in(admin)
before do
sign_in(user)
end
orig_id = project.id
shared_examples 'deletes project right away' do
specify :aggregate_failures do
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect { Project.find(orig_id) }.to raise_error(ActiveRecord::RecordNotFound)
expect(project.marked_for_deletion?).to be_falsey
expect(response).to have_gitlab_http_status(:found)
expect(flash[:toast]).to eq(format(_("Project &#39;%{project_name}&#39; is being deleted."), project_name: project.full_name))
expect(response).to redirect_to(dashboard_projects_path)
end
end
context "when the project is forked" do
let(:project) { create(:project, :repository) }
let(:forked_project) { fork_project(project, nil, repository: true) }
let(:merge_request) do
create(:merge_request,
source_project: forked_project,
target_project: project)
shared_examples 'marks project for deletion' do
specify :aggregate_failures do
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect(project.reload.marked_for_deletion?).to be_truthy
expect(project.reload.hidden?).to be_falsey
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_path(project))
expect(flash[:toast]).to be_nil
end
end
it_behaves_like 'marks project for deletion'
it 'does not mark project for deletion because of error' do
message = 'Error'
expect(::Projects::MarkForDeletionService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: message })
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
expect(flash[:alert]).to include(message)
end
context 'when instance setting is set to 0 days' do
it 'deletes project right away' do
stub_application_setting(deletion_adjourned_period: 0)
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect(project.marked_for_deletion?).to be_falsey
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(dashboard_projects_path)
end
end
context 'when project is already marked for deletion' do
let_it_be(:project) { create(:project, group: group, marked_for_deletion_at: Date.current) }
context 'when permanently_delete param is set' do
it 'deletes project right away' do
expect(ProjectDestroyWorker).to receive(:perform_async)
delete :destroy, params: { namespace_id: project.namespace, id: project, permanently_delete: true }
expect(project.reload.pending_delete).to eq(true)
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(dashboard_projects_path)
end
end
it "closes all related merge requests", :sidekiq_might_not_need_inline do
project.merge_requests << merge_request
sign_in(admin)
context 'when permanently_delete param is not set' do
it 'does nothing' do
expect(ProjectDestroyWorker).not_to receive(:perform_async)
delete :destroy, params: { namespace_id: forked_project.namespace, id: forked_project }
delete :destroy, params: { namespace_id: project.namespace, id: project }
expect(merge_request.reload.state).to eq('closed')
expect(project.reload.pending_delete).to eq(false)
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_path(project))
end
end
end
context 'when the delayed deletion feature is available' do
let_it_be(:group) { create(:group, owners: user) }
let_it_be_with_reload(:project) { create(:project, group: group) }
context 'for projects in user namespace' do
let_it_be_with_reload(:project) { create(:project, namespace: user.namespace) }
before do
sign_in(user)

View File

@ -7,7 +7,6 @@ RSpec.describe 'Group', :with_current_organization, feature_category: :groups_an
before do
sign_in(user)
stub_feature_flags(downtier_delayed_deletion: false)
end
matcher :have_namespace_error_message do
@ -333,10 +332,11 @@ RSpec.describe 'Group', :with_current_organization, feature_category: :groups_an
expect(page).to have_selector '#confirm_name_input:focus'
end
it 'removes group', :sidekiq_might_not_need_inline do
expect { remove_with_confirm('Delete group', group.path) }.to change { Group.count }.by(-1)
expect(group.members.all.count).to be_zero
expect(page).to have_content "is being deleted"
it 'marks the group for deletion' do
expect { remove_with_confirm('Delete group', group.path) }.to change {
group.reload.marked_for_deletion?
}.from(false).to(true)
expect(page).to have_content "pending deletion"
end
end

View File

@ -7,10 +7,6 @@ RSpec.describe 'Copy as GFM', :js, feature_category: :markdown do
include RepoHelpers
include ActionView::Helpers::JavaScriptHelper
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
describe 'Copying rendered GFM' do
before do
@feat = MarkdownFeature.new

View File

@ -274,62 +274,33 @@ RSpec.describe 'Project', feature_category: :source_code_management do
describe 'removal', :js do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public, owners: user) }
let_it_be_with_reload(:project) { create(:project, group: group) }
let_it_be_with_reload(:project_to_delete) { create(:project, group: group) }
before do
stub_application_setting(deletion_adjourned_period: 7)
sign_in user
visit edit_project_path(project_to_delete)
end
context 'when the downtier_delayed_deletion feature flag is enabled' do
let(:project_to_delete) { project }
it 'deletes project delayed and is restorable', :freeze_time do
deletion_adjourned_period = ::Gitlab::CurrentSettings.deletion_adjourned_period
deletion_date = (Time.now.utc + deletion_adjourned_period.days).strftime('%F')
before do
sign_in user
visit edit_project_path(project)
end
expect(page).to have_content("This action will place this project, including all its resources, in a pending deletion state for #{deletion_adjourned_period} days, and delete it permanently on #{deletion_date}.")
it 'deletes project delayed and is restorable', :freeze_time do
deletion_adjourned_period = ::Gitlab::CurrentSettings.deletion_adjourned_period
deletion_date = (Time.now.utc + deletion_adjourned_period.days).strftime('%F')
click_button "Delete project"
expect(page).to have_content("This action will place this project, including all its resources, in a pending deletion state for #{deletion_adjourned_period} days, and delete it permanently on #{deletion_date}.")
expect(page).to have_content("This project can be restored until #{deletion_date}.")
click_button "Delete project"
fill_in 'confirm_name_input', with: project_to_delete.path_with_namespace
click_button 'Yes, delete project'
wait_for_requests
expect(page).to have_content("This project can be restored until #{deletion_date}.")
expect(page).to have_content("This project is pending deletion, and will be deleted on #{deletion_date}. Repository and other project resources are read-only.")
fill_in 'confirm_name_input', with: project_to_delete.path_with_namespace
click_button 'Yes, delete project'
wait_for_requests
visit inactive_dashboard_projects_path
expect(page).to have_content("This project is pending deletion, and will be deleted on #{deletion_date}. Repository and other project resources are read-only.")
visit inactive_dashboard_projects_path
expect(page).to have_content(project_to_delete.name_with_namespace)
end
end
context 'when the downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
sign_in(user)
visit edit_project_path(project)
end
it 'focuses on the confirmation field' do
click_button 'Delete project'
expect(page).to have_selector '#confirm_name_input:focus'
end
it 'deletes a project', :sidekiq_inline do
expect { remove_with_confirm('Delete project', project.path_with_namespace, 'Yes, delete project') }.to change { Project.count }.by(-1)
expect(page).to have_content "Project '#{project.full_name}' is being deleted."
expect(Project.all.count).to be_zero
expect(project.issues).to be_empty
expect(project.merge_requests).to be_empty
end
expect(page).to have_content(project_to_delete.name_with_namespace)
end
end

View File

@ -0,0 +1,56 @@
import { nextTick } from 'vue';
import { GlBadge, GlButton, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TreeList from '~/repository/file_tree_browser/components/tree_list.vue';
describe('TreeList', () => {
let wrapper;
const findBadge = () => wrapper.findComponent(GlBadge);
const findListViewButton = () => wrapper.findAllComponents(GlButton).at(0);
const findTreeViewButton = () => wrapper.findAllComponents(GlButton).at(1);
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findNoFilesMessage = () => wrapper.findByText('No files found');
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(TreeList, {
propsData: { totalFilesCount: 5, ...props },
});
};
beforeEach(() => createComponent());
it('renders the header and file count badge', () => {
expect(wrapper.find('h5').text()).toBe('Files');
expect(findBadge().text()).toBe('5');
});
it('renders list and tree view buttons', () => {
expect(findListViewButton().props('selected')).toBe(true);
expect(findTreeViewButton().props('selected')).toBe(false);
});
it('selects the tree view button when clicked', async () => {
findTreeViewButton().vm.$emit('click');
await nextTick();
expect(findTreeViewButton().props('selected')).toBe(true);
expect(findListViewButton().props('selected')).toBe(false);
});
it('selects the list view button when clicked', async () => {
findListViewButton().vm.$emit('click');
await nextTick();
expect(findListViewButton().props('selected')).toBe(true);
expect(findTreeViewButton().props('selected')).toBe(false);
});
it('renders search box', () => {
expect(findSearchBox().exists()).toBe(true);
});
it('renders empty state message when no files are available', () => {
expect(findNoFilesMessage().exists()).toBe(true);
});
});

View File

@ -0,0 +1,25 @@
import { shallowMount } from '@vue/test-utils';
import FileTreeBrowser, { TREE_WIDTH } from '~/repository/file_tree_browser/file_tree_browser.vue';
import FileBrowserHeight from '~/diffs/components/file_browser_height.vue';
import TreeList from '~/repository/file_tree_browser/components/tree_list.vue';
describe('FileTreeBrowser', () => {
let wrapper;
const findFileBrowserHeight = () => wrapper.findComponent(FileBrowserHeight);
const findTreeList = () => wrapper.findComponent(TreeList);
const createComponent = (props = {}) => {
wrapper = shallowMount(FileTreeBrowser, { propsData: props });
};
beforeEach(() => createComponent());
it('renders the file browser height component', () => {
expect(findFileBrowserHeight().attributes('style')).toBe(`width: ${TREE_WIDTH}px;`);
});
it('renders the tree list component when not loading', () => {
expect(findTreeList().props('totalFilesCount')).toBe(0);
});
});

View File

@ -783,60 +783,46 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
end
context 'delayed deletion feature is available' do
it_behaves_like 'delayed deletion message'
context 'group is already marked for deletion' do
before do
allow(group).to receive(:adjourned_deletion?).and_return(true)
end
it_behaves_like 'delayed deletion message'
context 'group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
allow(group).to receive(:marked_for_deletion?).and_return(true)
end
it_behaves_like 'permanent deletion message'
end
context 'when group delay deletion is enabled' do
before do
stub_application_setting(delayed_group_deletion: true)
end
it_behaves_like 'delayed deletion message'
end
context 'when group delay deletion is disabled' do
before do
stub_application_setting(delayed_group_deletion: false)
end
it_behaves_like 'delayed deletion message'
end
context "group has not been marked for deletion" do
let(:group) { build(:group) }
context "'permanently_remove' argument is set to 'true'" do
it "displays permanent deletion message" do
allow(group).to receive(:marked_for_deletion?).and_return(false)
allow(group).to receive(:adjourned_deletion?).and_return(true)
expect(subject).to include(delayed_deletion_message)
expect(helper.remove_group_message(group, true)).to include(*permanent_deletion_message)
end
end
end
end
context 'delayed deletion feature is not available' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
allow(group).to receive(:marked_for_deletion?).and_return(true)
end
it_behaves_like 'permanent deletion message'
end
context 'when group delay deletion is enabled' do
before do
stub_application_setting(delayed_group_deletion: true)
end
it_behaves_like 'delayed deletion message'
end
context 'when group delay deletion is disabled' do
before do
stub_application_setting(delayed_group_deletion: false)
end
it_behaves_like 'delayed deletion message'
end
context "group has not been marked for deletion" do
let(:group) { build(:group) }
context "'permanently_remove' argument is set to 'true'" do
it "displays permanent deletion message" do
allow(group).to receive(:marked_for_deletion?).and_return(false)
allow(group).to receive(:adjourned_deletion?).and_return(true)
expect(subject).to include(delayed_deletion_message)
expect(helper.remove_group_message(group, true)).to include(*permanent_deletion_message)
end
end
end
end
describe '#group_merge_requests' do

View File

@ -1,87 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionedWebHookLogsDaily,
:migration_with_transaction,
feature_category: :integrations do
let(:connection) { ApplicationRecord.connection }
let(:web_hook_logs) { table(:web_hook_logs, primary_key: :id) }
let(:web_hook_logs_daily) { table(:web_hook_logs_daily, primary_key: :id) }
let(:start_cursor) { [0, nil] }
let(:end_cursor) { [web_hook_logs.last.id, Time.current.to_s] }
let(:migration) do
described_class.new(
start_cursor: start_cursor,
end_cursor: end_cursor,
batch_table: :web_hook_logs,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: connection
)
end
before do
connection.transaction do
from = 1.month.ago.beginning_of_month
to = 1.month.ago.end_of_month
suffix = from.strftime('%Y%m')
partition_name = "gitlab_partitions_dynamic.web_hook_logs_#{suffix}"
current_month_start = Time.current.beginning_of_month
current_month_end = current_month_start.end_of_month
current_month_suffix = current_month_start.strftime('%Y%m')
current_month_partition_name = "gitlab_partitions_dynamic.web_hook_logs_#{current_month_suffix}"
connection.execute <<~SQL
ALTER TABLE web_hook_logs DISABLE TRIGGER ALL; -- Don't sync records to partitioned table
CREATE TABLE IF NOT EXISTS #{partition_name}
PARTITION OF public.web_hook_logs
FOR VALUES FROM (#{connection.quote(from)}) TO (#{connection.quote(to)});
CREATE TABLE IF NOT EXISTS #{current_month_partition_name}
PARTITION OF public.web_hook_logs
FOR VALUES FROM (#{connection.quote(current_month_start)}) TO (#{connection.quote(current_month_end)});
SQL
create_web_hook_logs(created_at: from)
create_web_hook_logs(created_at: 1.day.ago)
connection.execute <<~SQL
ALTER TABLE web_hook_logs ENABLE TRIGGER ALL;
SQL
end
end
describe '#perform' do
it 'backfills web_hook_logs_daily from web_hook_logs only for existing partition' do
migration.perform
expect(web_hook_logs_daily.count).to eq(1)
end
end
private
def create_web_hook_logs(**params)
web_hook_logs_params = {
web_hook_id: 1,
trigger: 'push',
url: 'https://example.com/webhook',
request_headers: { "Content-Type": "application/json" },
request_data: { key: "value" },
response_headers: { Server: "nginx" },
response_body: { status: "success" },
response_status: '200',
execution_duration: 0.5,
url_hash: 'abc123',
updated_at: params[:created_at]
}
web_hook_logs_params.merge!(params)
web_hook_logs.create!(web_hook_logs_params)
end
end

View File

@ -2,10 +2,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Samplers::ConcurrencyLimitSampler, feature_category: :scalability do
RSpec.describe Gitlab::Metrics::Samplers::ConcurrencyLimitSampler, :clean_gitlab_redis_shared_state,
feature_category: :scalability do
include ExclusiveLeaseHelpers
let(:workers_with_limits) { [Import::ReassignPlaceholderUserRecordsWorker] * 5 }
let(:lease_key) { 'gitlab/metrics/samplers/concurrency_limit_sampler' }
let(:sampler) { described_class.new }
subject(:sample) { described_class.new.sample }
subject(:sample) { sampler.sample }
it_behaves_like 'metrics sampler', 'CONCURRENCY_LIMIT_SAMPLER'
@ -58,5 +62,44 @@ RSpec.describe Gitlab::Metrics::Samplers::ConcurrencyLimitSampler, feature_categ
sample
end
context 'when lease can be obtained' do
before do
stub_exclusive_lease(lease_key, timeout: described_class::DEFAULT_SAMPLING_INTERVAL_SECONDS)
end
it 'calls concurrent_limit_service methods' do
expect(Gitlab::SidekiqMiddleware::ConcurrencyLimit::ConcurrencyLimitService)
.to receive(:queue_size)
.exactly(workers_with_limits.size)
.and_call_original
expect(Gitlab::SidekiqMiddleware::ConcurrencyLimit::ConcurrencyLimitService)
.to receive(:concurrent_worker_count)
.exactly(workers_with_limits.size)
.and_call_original
sample
end
it 'does not release the lease' do
sample
expect(sampler.exclusive_lease.exists?).to be_truthy
end
end
context 'when exclusive lease cannot be obtained' do
before do
stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_SAMPLING_INTERVAL_SECONDS)
end
it 'does not call concurrent_limit_service' do
expect(Gitlab::SidekiqMiddleware::ConcurrencyLimit::ConcurrencyLimitService).not_to receive(:queue_size)
expect(Gitlab::SidekiqMiddleware::ConcurrencyLimit::ConcurrencyLimitService)
.not_to receive(:concurrent_worker_count)
sample
end
end
end
end

View File

@ -1,50 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPartitionWebHookLogDaily, feature_category: :integrations do
let!(:batched_migration) { described_class::MIGRATION }
context 'when executed on .com' do
before do
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
end
it 'does not schedule a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
end
end
end
context 'when executed on self managed' do
before do
allow(Gitlab).to receive(:com_except_jh?).and_return(false)
end
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :web_hook_logs,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end
end

View File

@ -13,17 +13,6 @@ RSpec.describe Namespaces::AdjournedDeletable, feature_category: :groups_and_pro
expect(project).to receive(:adjourned_deletion_configured?).and_return(false)
expect(project.adjourned_deletion?).to be false
end
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns false', :aggregate_failures do
expect(project).not_to receive(:adjourned_deletion_configured?)
expect(project.adjourned_deletion?).to be false
end
end
end
describe '#adjourned_deletion_configured?' do
@ -45,14 +34,6 @@ RSpec.describe Namespaces::AdjournedDeletable, feature_category: :groups_and_pro
end
it { is_expected.to be true }
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it { is_expected.to be false }
end
end
end
end
@ -67,16 +48,6 @@ RSpec.describe Namespaces::AdjournedDeletable, feature_category: :groups_and_pro
it 'returns true' do
expect(project.marked_for_deletion?).to be true
end
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns false' do
expect(project.marked_for_deletion?).to be false
end
end
end
context 'when marked_for_deletion_at is nil' do
@ -102,16 +73,6 @@ RSpec.describe Namespaces::AdjournedDeletable, feature_category: :groups_and_pro
it 'returns self' do
expect(project.self_or_ancestor_marked_for_deletion).to eq(project)
end
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns nil' do
expect(project.self_or_ancestor_marked_for_deletion).to be_nil
end
end
end
context 'when the project is not marked for deletion' do
@ -122,16 +83,6 @@ RSpec.describe Namespaces::AdjournedDeletable, feature_category: :groups_and_pro
it 'returns the first ancestor marked for deletion' do
expect(project.self_or_ancestor_marked_for_deletion).to eq(group)
end
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns nil' do
expect(project.self_or_ancestor_marked_for_deletion).to be_nil
end
end
end
context 'when no ancestor is marked for deletion' do

View File

@ -1009,28 +1009,10 @@ RSpec.describe API::Groups, :with_current_organization, feature_category: :group
end
context 'marked_for_deletion_on attribute' do
context 'when the downtier_delayed_deletion feature flag is enabled' do
before do
stub_feature_flags(downtier_delayed_deletion: true)
end
it 'is exposed' do
get api("/groups/#{group1.id}", user1)
it 'is exposed' do
get api("/groups/#{group1.id}", user1)
expect(json_response).to have_key 'marked_for_deletion_on'
end
end
context 'when the downtier_delayed_deletion feature flag is not enabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'is not exposed' do
get api("/groups/#{group1.id}", user1)
expect(json_response).not_to have_key 'marked_for_deletion_on'
end
expect(json_response).to have_key 'marked_for_deletion_on'
end
end
@ -3312,115 +3294,99 @@ RSpec.describe API::Groups, :with_current_organization, feature_category: :group
end
end
context 'feature is available' do
context 'when delayed group deletion is enabled' do
before do
stub_application_setting(delayed_group_deletion: true)
it_behaves_like 'marks group for delayed deletion'
context 'when deletion adjourned period is 0' do
before do
stub_application_setting(deletion_adjourned_period: 0)
end
it_behaves_like 'immediately enqueues the job to delete the group'
end
context 'when delayed group deletion is disabled' do
before do
stub_application_setting(delayed_group_deletion: false)
end
it_behaves_like 'marks group for delayed deletion'
end
context 'when permanently_remove param is sent' do
before do
stub_application_setting(delayed_group_deletion: true)
end
context 'if permanently_remove is true' do
let(:params) { { permanently_remove: true } }
context 'if group is a subgroup' do
let(:subgroup) { create(:group, parent: group) }
subject { delete api("/groups/#{subgroup.id}", user), params: params }
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: subgroup, marked_for_deletion_on: Date.current)
end
context 'when full_path param is not passed' do
it_behaves_like 'does not immediately enqueues the job to delete the group',
'`full_path` is incorrect. You must enter the complete path for the subgroup.'
end
context 'when full_path param is not equal to full_path' do
let(:params) { { permanently_remove: true, full_path: subgroup.path } }
it_behaves_like 'does not immediately enqueues the job to delete the group',
'`full_path` is incorrect. You must enter the complete path for the subgroup.'
end
context 'when the full_path param is passed and it matches the full path of subgroup' do
let(:params) { { permanently_remove: true, full_path: subgroup.full_path } }
it_behaves_like 'immediately enqueues the job to delete the group'
end
end
context 'when group is not marked for deletion' do
it_behaves_like 'does not immediately enqueues the job to delete the group', 'Group must be marked for deletion first.'
end
end
it_behaves_like 'marks group for delayed deletion'
context 'if group is not a subgroup' do
subject { delete api("/groups/#{group.id}", user), params: params }
context 'when deletion adjourned period is 0' do
before do
stub_application_setting(deletion_adjourned_period: 0)
end
it_behaves_like 'immediately enqueues the job to delete the group'
end
context 'when permanently_remove param is sent' do
before do
stub_application_setting(delayed_group_deletion: true)
end
context 'if permanently_remove is true' do
let(:params) { { permanently_remove: true } }
context 'if group is a subgroup' do
let(:subgroup) { create(:group, parent: group) }
subject { delete api("/groups/#{subgroup.id}", user), params: params }
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: subgroup, marked_for_deletion_on: Date.current)
end
context 'when full_path param is not passed' do
it_behaves_like 'does not immediately enqueues the job to delete the group',
'`full_path` is incorrect. You must enter the complete path for the subgroup.'
end
context 'when full_path param is not equal to full_path' do
let(:params) { { permanently_remove: true, full_path: subgroup.path } }
it_behaves_like 'does not immediately enqueues the job to delete the group',
'`full_path` is incorrect. You must enter the complete path for the subgroup.'
end
context 'when the full_path param is passed and it matches the full path of subgroup' do
let(:params) { { permanently_remove: true, full_path: subgroup.full_path } }
it_behaves_like 'immediately enqueues the job to delete the group'
end
end
context 'when group is not marked for deletion' do
it_behaves_like 'does not immediately enqueues the job to delete the group', 'Group must be marked for deletion first.'
end
end
context 'if group is not a subgroup' do
subject { delete api("/groups/#{group.id}", user), params: params }
it_behaves_like 'does not immediately enqueues the job to delete the group', '`permanently_remove` option is only available for subgroups.'
end
end
context 'if permanently_remove is not true' do
context 'when it is false' do
let(:params) { { permanently_remove: false } }
it_behaves_like 'marks group for delayed deletion'
end
context 'when it is non boolean' do
let(:params) { { permanently_remove: 'something_random' } }
it_behaves_like 'marks group for delayed deletion'
end
end
it_behaves_like 'does not immediately enqueues the job to delete the group', '`permanently_remove` option is only available for subgroups.'
end
end
context 'when the mark for deletion service fails' do
before do
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute)
.and_return({ status: :error, message: 'error' })
context 'if permanently_remove is not true' do
context 'when it is false' do
let(:params) { { permanently_remove: false } }
it_behaves_like 'marks group for delayed deletion'
end
it 'returns an error' do
subject
context 'when it is non boolean' do
let(:params) { { permanently_remove: 'something_random' } }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('error')
it_behaves_like 'marks group for delayed deletion'
end
end
end
context 'feature is not available' do
context 'when the mark for deletion service fails' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute)
.and_return({ status: :error, message: 'error' })
end
it_behaves_like 'immediately enqueues the job to delete the group'
it 'returns an error' do
subject
context 'when permanently_remove param is sent' do
before do
params.merge!(permanently_remove: true)
end
it_behaves_like 'immediately enqueues the job to delete the group'
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('error')
end
end
end
@ -3434,51 +3400,37 @@ RSpec.describe API::Groups, :with_current_organization, feature_category: :group
subject { post api("/groups/#{group.id}/restore", user) }
context 'when the downtier_delayed_deletion feature flag is enabled' do
context 'when authenticated as owner' do
context 'restoring is successful' do
it 'restores the group to original state' do
subject
context 'when authenticated as owner' do
context 'restoring is successful' do
it 'restores the group to original state' do
subject
expect(response).to have_gitlab_http_status(:created)
expect(json_response['marked_for_deletion_on']).to be_falsey
end
end
context 'when restoring fails' do
before do
allow(::Groups::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'returns error' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('error')
end
expect(response).to have_gitlab_http_status(:created)
expect(json_response['marked_for_deletion_on']).to be_falsey
end
end
context 'when authenticated as a user without access to the group' do
subject { post api("/groups/#{group.id}/restore", unauthorized_user) }
context 'when restoring fails' do
before do
allow(::Groups::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: 'error' })
end
it 'returns 403' do
it 'returns error' do
subject
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('error')
end
end
end
context 'when the downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
context 'when authenticated as a user without access to the group' do
subject { post api("/groups/#{group.id}/restore", unauthorized_user) }
it 'returns 404' do
it 'returns 403' do
subject
expect(response).to have_gitlab_http_status(:not_found)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end

View File

@ -3277,8 +3277,8 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
context 'when project belongs to a user namespace' do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
it 'returns user web_url and avatar_url' do
get api(path, user)
@ -4098,39 +4098,25 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
let_it_be(:group) { create(:group, owners: user) }
let_it_be_with_reload(:project) { create(:project, group: group) }
context 'when the feature is available' do
it 'restores project' do
project.update!(archived: true, marked_for_deletion_at: 1.day.ago, deleting_user: user)
it 'restores project' do
project.update!(archived: true, marked_for_deletion_at: 1.day.ago, deleting_user: user)
post api("/projects/#{project.id}/restore", user)
post api("/projects/#{project.id}/restore", user)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_falsey
expect(json_response['marked_for_deletion_at']).to be_falsey
expect(json_response['marked_for_deletion_on']).to be_falsey
end
it 'returns error if project is already being deleted' do
message = 'Error'
expect(::Projects::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: message })
post api("/projects/#{project.id}/restore", user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["message"]).to eq(message)
end
expect(response).to have_gitlab_http_status(:created)
expect(json_response['archived']).to be_falsey
expect(json_response['marked_for_deletion_at']).to be_falsey
expect(json_response['marked_for_deletion_on']).to be_falsey
end
context 'when the feature is not available' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns error if project is already being deleted' do
message = 'Error'
expect(::Projects::RestoreService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: message })
it 'returns error' do
post api("/projects/#{project.id}/restore", user)
post api("/projects/#{project.id}/restore", user)
expect(response).to have_gitlab_http_status(:not_found)
end
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["message"]).to eq(message)
end
end
@ -5594,8 +5580,12 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
end
shared_examples 'marks project for deletion' do
it :aggregate_failures do
context 'for delayed deletion' do
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, group: group, owners: user) }
let(:params) { {} }
it 'marks the project for deletion' do
expect(::Projects::MarkForDeletionService).to receive(:new).with(project, user, {}).and_call_original
delete api(path, user), params: params
@ -5603,71 +5593,59 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:accepted)
expect(project.reload.marked_for_deletion?).to be_truthy
end
end
context 'for delayed deletion' do
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, group: group, owners: user) }
let(:params) { {} }
it 'returns error if project cannot be marked for deletion' do
message = 'Error'
expect(::Projects::MarkForDeletionService).to receive_message_chain(:new, :execute).and_return({ status: :error, message: message })
before do
stub_licensed_features(adjourned_deletion_for_projects_and_groups: false)
delete api("/projects/#{project.id}", user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response["message"]).to eq(message)
end
context 'when the downtier_delayed_deletion feature flag is enabled' do
it_behaves_like 'marks project for deletion'
context 'when permanently_remove param is true' do
before do
params.merge!(permanently_remove: true)
end
context 'when permanently_remove param is true' do
context 'when project is already marked for deletion' do
before do
params.merge!(permanently_remove: true)
project.update!(archived: true, marked_for_deletion_at: 1.day.ago, deleting_user: user)
end
context 'when project is already marked for deletion' do
context 'with correct project full path' do
before do
project.update!(archived: true, marked_for_deletion_at: 1.day.ago, deleting_user: user)
params.merge!(full_path: project.full_path)
end
context 'with correct project full path' do
before do
params.merge!(full_path: project.full_path)
end
it_behaves_like 'deletes project immediately'
end
context 'with incorrect project full path' do
let(:error_message) { '`full_path` is incorrect. You must enter the complete path for the project.' }
before do
params.merge!(full_path: "#{project.full_path}-wrong-path")
end
it_behaves_like 'immediately delete project error'
end
it_behaves_like 'deletes project immediately'
end
context 'when project is not marked for deletion' do
let(:error_message) { 'Project must be marked for deletion first.' }
context 'with incorrect project full path' do
let(:error_message) { '`full_path` is incorrect. You must enter the complete path for the project.' }
before do
params.merge!(full_path: "#{project.full_path}-wrong-path")
end
it_behaves_like 'immediately delete project error'
end
end
context 'when project is not marked for deletion' do
let(:error_message) { 'Project must be marked for deletion first.' }
it_behaves_like 'immediately delete project error'
end
end
context 'when the downtier_delayed_deletion feature flag is disabled' do
context 'when deletion adjourned period is 0' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
stub_application_setting(deletion_adjourned_period: 0)
end
it_behaves_like 'deletes project immediately'
context 'when permanently_remove param is true' do
before do
params.merge!(permanently_remove: true)
end
it_behaves_like 'deletes project immediately'
end
end
end
end

View File

@ -5,10 +5,6 @@ require 'spec_helper'
RSpec.describe Organizations::GroupsController, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
describe 'GET #new' do
subject(:gitlab_request) { get new_groups_organization_path(organization) }
@ -200,25 +196,21 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
end
describe 'DELETE #destroy' do
let_it_be(:group) { create(:group, organization: organization) }
let_it_be_with_reload(:group) { create(:group, organization: organization) }
shared_examples 'deletes the group' do
shared_examples 'marks the group for deletion' do
specify do
expect_next_instance_of(Groups::DestroyService) do |instance|
expect(instance).to receive(:async_execute)
end
gitlab_request
expect(group).to be_marked_for_deletion
end
end
shared_examples 'unable to delete the group' do
shared_examples 'does not mark the group for deletion' do
specify do
expect_any_instance_of(Groups::DestroyService) do |instance|
expect(instance).not_to receive(:async_execute)
end
gitlab_request
expect(group).not_to be_marked_for_deletion
end
end
@ -236,7 +228,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
end
it_behaves_like 'organization - redirects to sign in page'
it_behaves_like 'unable to delete the group'
it_behaves_like 'does not mark the group for deletion'
end
end
@ -252,7 +244,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
it_behaves_like 'organization - successful response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
it_behaves_like 'deletes the group'
it_behaves_like 'marks the group for deletion'
end
context 'as a group owner' do
@ -262,94 +254,69 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
it_behaves_like 'organization - successful response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
it_behaves_like 'deletes the group'
context 'when destroy service raises DestroyError' do
let(:error_message) { "Error deleting group" }
context 'when mark for deletion succeeds' do
it 'marks the group for delayed deletion' do
expect { gitlab_request }.to change { group.reload.marked_for_deletion? }.from(false).to(true)
end
before do
allow_next_instance_of(Groups::DestroyService) do |instance|
allow(instance).to receive(:async_execute)
.and_raise(Groups::DestroyService::DestroyError, error_message)
it 'does not immediately delete the group' do
Sidekiq::Testing.fake! do
expect { gitlab_request }.not_to change { GroupDestroyWorker.jobs.size }
end
end
it 'returns the error message' do
it 'schedules the group for deletion' do
gitlab_request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq(error_message)
message = format("'%{group_name}' has been scheduled for removal on", group_name: group.name)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['message']).to include(message)
end
end
context 'when delayed deletion feature is available' do
context 'when mark for deletion fails' do
let(:error) { 'error' }
before do
stub_feature_flags(downtier_delayed_deletion: true)
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute)
.and_return({ status: :error, message: error })
end
context 'when mark for deletion succeeds' do
it 'marks the group for delayed deletion' do
expect { gitlab_request }.to change { group.reload.marked_for_deletion? }.from(false).to(true)
end
it 'does not mark the group for deletion' do
expect { gitlab_request }.not_to change { group.reload.marked_for_deletion? }.from(false)
end
it 'does not immediately delete the group' do
Sidekiq::Testing.fake! do
expect { gitlab_request }.not_to change { GroupDestroyWorker.jobs.size }
end
end
it 'renders the error' do
gitlab_request
it 'schedules the group for deletion' do
gitlab_request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to include(error)
end
end
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
end
context 'when permanently_remove param is set' do
it 'deletes the group immediately' do
expect(GroupDestroyWorker).to receive(:perform_async)
delete groups_organization_path(organization, id: group.to_param, permanently_remove: true)
message = format("'%{group_name}' has been scheduled for removal on", group_name: group.name)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['message']).to include(message)
expect(json_response['message']).to include "Group '#{group.name}' is being deleted."
end
end
context 'when mark for deletion fails' do
let(:error) { 'error' }
before do
allow(::Groups::MarkForDeletionService).to receive_message_chain(:new, :execute)
.and_return({ status: :error, message: error })
end
it 'does not mark the group for deletion' do
expect { gitlab_request }.not_to change { group.reload.marked_for_deletion? }.from(false)
end
it 'renders the error' do
context 'when permanently_remove param is not set' do
it 'does nothing' do
gitlab_request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to include(error)
end
end
context 'when group is already marked for deletion' do
before do
create(:group_deletion_schedule, group: group, marked_for_deletion_on: Date.current)
end
context 'when permanently_remove param is set' do
it 'deletes the group immediately' do
expect(GroupDestroyWorker).to receive(:perform_async)
delete groups_organization_path(organization, id: group.to_param, permanently_remove: true)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['message']).to include "Group '#{group.name}' is being deleted."
end
end
context 'when permanently_remove param is not set' do
it 'does nothing' do
gitlab_request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to include "Group has been already marked for deletion"
end
expect(json_response['message']).to include "Group has been already marked for deletion"
end
end
end
@ -358,7 +325,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
context 'as a user that is not an owner' do
it_behaves_like 'organization - not found response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
it_behaves_like 'unable to delete the group'
it_behaves_like 'does not mark the group for deletion'
end
context 'as an organization owner' do
@ -369,7 +336,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
it_behaves_like 'organization - successful response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
it_behaves_like 'deletes the group'
it_behaves_like 'marks the group for deletion'
end
end
end
@ -392,7 +359,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
it_behaves_like 'organization - not found response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
it_behaves_like 'unable to delete the group'
it_behaves_like 'does not mark the group for deletion'
end
context 'when group does not exist' do
@ -412,7 +379,7 @@ RSpec.describe Organizations::GroupsController, feature_category: :cell do
end
it_behaves_like 'organization - not found response'
it_behaves_like 'unable to delete the group'
it_behaves_like 'does not mark the group for deletion'
end
end
end

View File

@ -0,0 +1,128 @@
# frozen_string_literal: true
require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/migration/prevent_feature_flags_usage'
RSpec.describe RuboCop::Cop::Migration::PreventFeatureFlagsUsage, feature_category: :database do
include RuboCop::MigrationHelpers
let(:offense) do
"Do not use Feature.enabled? or Feature.disabled? in migrations. " \
"Use the feature_flag_enabled?(feature_name) migration helper method."
end
context 'when in migration' do
before do
allow(cop).to receive(:in_migration?).and_return(true)
end
context 'when using Feature.enabled?' do
it 'registers an offense' do
expect_offense(<<~RUBY)
def change
if Feature.enabled?(:some_feature)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
do_something
end
end
RUBY
end
it 'registers an offense with a variable' do
expect_offense(<<~RUBY)
def change
feature_name = :some_feature
if Feature.enabled?(feature_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
do_something
end
end
RUBY
end
it 'registers an offense with a string argument' do
expect_offense(<<~RUBY)
def change
if Feature.enabled?('some_feature')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
do_something
end
end
RUBY
end
end
context 'when using Feature.disabled?' do
it 'registers an offense' do
expect_offense(<<~RUBY)
def change
if Feature.disabled?(:some_feature)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
do_something
end
end
RUBY
end
it 'registers an offense with a variable' do
expect_offense(<<~RUBY)
def change
feature_name = :some_feature
if Feature.disabled?(feature_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
do_something
end
end
RUBY
end
end
context 'when using feature_flag_enabled? helper' do
it 'does not register an offense' do
expect_no_offenses(<<~RUBY)
def change
if feature_flag_enabled?(:some_feature)
do_something
end
end
RUBY
end
end
context 'when using other methods on Feature' do
it 'does not register an offense' do
expect_no_offenses(<<~RUBY)
def change
Feature.something_else(:some_feature)
end
RUBY
end
end
end
context 'when outside of migration' do
before do
allow(cop).to receive(:in_migration?).and_return(false)
end
it 'does not register an offense for Feature.enabled?' do
expect_no_offenses(<<~RUBY)
def some_method
if Feature.enabled?(:some_feature)
do_something
end
end
RUBY
end
it 'does not register an offense for Feature.disabled?' do
expect_no_offenses(<<~RUBY)
def some_method
if Feature.disabled?(:some_feature)
do_something
end
end
RUBY
end
end
end

View File

@ -98,7 +98,6 @@ module Ci
process_build_duration_s: { count: 1, max: anything, sum: anything },
process_build_runner_matched_duration_s: { count: 1, max: anything, sum: anything },
process_build_present_build_duration_s: { count: 1, max: anything, sum: anything },
present_build_presenter_duration_s: { count: 1, max: anything, sum: anything },
present_build_logs_duration_s: { count: 1, max: anything, sum: anything },
present_build_response_json_duration_s: { count: 1, max: anything, sum: anything },
process_build_assign_runner_duration_s: { count: 1, max: anything, sum: anything },

View File

@ -4,11 +4,10 @@ require 'spec_helper'
RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let(:licensed) { false }
let(:service) { described_class.new(group, user, {}) }
let_it_be_with_reload(:group) { create(:group, owners: user) }
subject(:result) { service.execute(licensed: licensed) }
subject(:result) { service.execute }
context 'when marking the group for deletion' do
context 'with user that can admin the group' do
@ -79,24 +78,6 @@ RSpec.describe Groups::MarkForDeletionService, feature_category: :groups_and_pro
result
end
end
context 'when the downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
it 'returns error' do
expect(result).to eq({ status: :error, message: 'Cannot mark group for deletion: feature not supported' })
end
context 'when the feature is licensed', unless: Gitlab.ee? do
let(:licensed) { true }
it 'is successful' do
expect(result[:status]).to eq(:success)
end
end
end
end
context 'for a group that has been marked for deletion' do

View File

@ -10,11 +10,10 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
let(:original_project_path) { project.path }
let(:original_project_name) { project.name }
let(:licensed) { false }
let(:service) { described_class.new(project, user) }
let(:notification_service) { instance_double(NotificationService) }
subject(:result) { service.execute(licensed: licensed) }
subject(:result) { service.execute }
before do
allow(NotificationService).to receive(:new).and_return(notification_service)
@ -76,51 +75,24 @@ RSpec.describe Projects::MarkForDeletionService, feature_category: :groups_and_p
result
end
end
context 'when project is already marked for deletion' do
let(:marked_for_deletion_at) { 2.days.ago }
before do
project.update!(marked_for_deletion_at: marked_for_deletion_at)
end
it 'does not change original date', :freeze_time, :aggregate_failures do
expect(result[:status]).to eq(:success)
expect(project.marked_for_deletion_at).to eq(marked_for_deletion_at.to_date)
end
it 'does not send notification email' do
expect(NotificationService).not_to receive(:new)
result
end
end
end
context 'with downtier_delayed_deletion feature flag disabled' do
context 'when project is already marked for deletion' do
let(:marked_for_deletion_at) { 2.days.ago }
before do
stub_feature_flags(downtier_delayed_deletion: false)
project.update!(marked_for_deletion_at: marked_for_deletion_at)
end
it 'returns an error response and does not send notification' do
expect(notification_service).not_to receive(:project_scheduled_for_deletion)
expect(result).to eq(status: :error, message: 'Cannot mark project for deletion: feature not supported')
it 'does not change original date', :freeze_time, :aggregate_failures do
expect(result[:status]).to eq(:success)
expect(project.marked_for_deletion_at).to eq(marked_for_deletion_at.to_date)
end
context 'when the feature is licensed', unless: Gitlab.ee? do
let(:licensed) { true }
it 'does not send notification email' do
expect(NotificationService).not_to receive(:new)
it 'is successful' do
expect(result[:status]).to eq(:success)
end
it 'sends project deletion notification' do
allow(project).to receive(:adjourned_deletion?).and_return(true)
expect(notification_service).to receive(:project_scheduled_for_deletion).with(project)
result
end
result
end
end
end

View File

@ -1232,6 +1232,31 @@ RSpec.describe TodoService, feature_category: :notifications do
expect(second_todo.reload).to be_done
expect(third_todo.reload).to be_done
end
it 'creates a pending todo for reviewed merge request author and assignees' do
service.new_review(assigned_mr, member)
should_create_todo(user: john_doe, author: member, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
should_create_todo(user: assigned_mr.author, author: member, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
end
context 'when merge request author is the review author' do
it 'does not create a pending todo for reviewed merge request author' do
service.new_review(assigned_mr, assigned_mr.author)
should_create_todo(user: john_doe, author: assigned_mr.author, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
should_not_create_todo(user: assigned_mr.author, author: assigned_mr.author, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
end
end
context 'when merge request assignee is the review author' do
it 'does not create a pending todo for reviewed merge request author' do
service.new_review(assigned_mr, john_doe)
should_not_create_todo(user: john_doe, author: john_doe, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
should_create_todo(user: assigned_mr.author, author: john_doe, target: assigned_mr, action: Todo::REVIEW_SUBMITTED)
end
end
end
end

View File

@ -2266,7 +2266,6 @@
- './ee/spec/validators/ldap_filter_validator_spec.rb'
- './ee/spec/validators/password/complexity_validator_spec.rb'
- './ee/spec/validators/user_existence_validator_spec.rb'
- './ee/spec/views/admin/application_settings/_deletion_protection_settings.html.haml_spec.rb'
- './ee/spec/views/admin/application_settings/_elasticsearch_form.html.haml_spec.rb'
- './ee/spec/views/admin/application_settings/general.html.haml_spec.rb'
- './ee/spec/views/admin/application_settings/_git_abuse_rate_limit.html.haml_spec.rb'

View File

@ -104,26 +104,19 @@ RSpec.shared_examples 'has expected jobs' do |jobs|
end
end
RSpec.shared_examples 'has expected image tag' do |tag, jobs|
jobs.each do |job|
it "uses image tag #{tag} for job #{job}" do
build = pipeline.builds.find_by(name: job)
image_tag = expand_job_image(build).rpartition(':').last
expect(image_tag).to eql(tag)
end
RSpec.shared_examples 'has expected image' do |job, image|
it "uses image #{image} for job #{job}" do
build = pipeline.builds.find_by(name: job)
expect(expand_job_image(build)).to eql(image)
end
end
RSpec.shared_examples 'uses SECURE_ANALYZERS_PREFIX' do |jobs|
context 'when SECURE_ANALYZERS_PREFIX is set', fips_mode: false do
include_context 'with CI variables', { 'SECURE_ANALYZERS_PREFIX' => 'my.custom-registry' }
jobs.each do |job|
it "uses SECURE_ANALYZERS_PREFIX for the image of job #{job}" do
build = pipeline.builds.find_by(name: job)
image_without_tag = expand_job_image(build).rpartition(':').first
expect(image_without_tag).to start_with('my.custom-registry')
end
jobs.each do |job|
it "uses SECURE_ANALYZERS_PREFIX for the image of job #{job}" do
build = pipeline.builds.find_by(name: job)
image_without_tag = expand_job_image(build).rpartition(':').first
expect(image_without_tag).to start_with('my.custom-registry')
end
end
end

View File

@ -14,15 +14,9 @@ RSpec.describe 'admin/application_settings/_deletion_protection_settings', featu
assign(:application_setting, application_setting)
end
context 'when feature flag is enabled' do
before do
stub_feature_flags(downtier_delayed_deletion: true)
end
it 'renders the deletion protection settings app root' do
render
it 'renders the deletion protection settings app root' do
render
expect(rendered).to have_selector('#js-admin-deletion-protection-settings')
end
expect(rendered).to have_selector('#js-admin-deletion-protection-settings')
end
end

View File

@ -6,7 +6,6 @@ RSpec.describe 'groups/settings/_remove.html.haml', feature_category: :groups_an
let_it_be(:group) { build_stubbed(:group) }
before do
stub_feature_flags(downtier_delayed_deletion: false)
allow(view).to receive(:current_user).and_return(double.as_null_object)
end

View File

@ -133,7 +133,11 @@ RSpec.describe 'projects/edit' do
describe 'restoring a project', feature_category: :groups_and_projects do
let_it_be(:organization) { build_stubbed(:organization) }
shared_examples_for 'renders restore project settings' do
context 'when project is pending deletion' do
let_it_be(:project) do
build_stubbed(:project, marked_for_deletion_at: Date.current, organization: organization)
end
it 'renders restore project card and action' do
render
@ -142,7 +146,7 @@ RSpec.describe 'projects/edit' do
end
end
shared_examples_for 'does not render restore project settings' do
context 'when project is not pending deletion' do
it 'does not render restore project card and action' do
render
@ -150,37 +154,5 @@ RSpec.describe 'projects/edit' do
expect(rendered).not_to have_link('Restore project')
end
end
context 'when downtier_delayed_deletion feature flag is enabled' do
context 'when project is pending deletion' do
let_it_be(:project) do
build_stubbed(:project, marked_for_deletion_at: Date.current, organization: organization)
end
it_behaves_like 'renders restore project settings'
end
context 'when project is not pending deletion' do
it_behaves_like 'does not render restore project settings'
end
end
context 'when downtier_delayed_deletion feature flag is disabled' do
before do
stub_feature_flags(downtier_delayed_deletion: false)
end
context 'when project is pending deletion' do
let_it_be(:project) do
build_stubbed(:project, marked_for_deletion_at: Date.current, organization: organization)
end
it_behaves_like 'does not render restore project settings'
end
context 'when project is not pending deletion' do
it_behaves_like 'does not render restore project settings'
end
end
end
end