diff --git a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_add_note.vue b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_add_note.vue
index a1e2c3f3c7a..610b34a466f 100644
--- a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_add_note.vue
+++ b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_add_note.vue
@@ -64,6 +64,9 @@ export default {
? 'gl-relative gl-display-flex gl-align-items-flex-start gl-flex-nowrap'
: '';
},
+ commentButtonText() {
+ return this.isNewDiscussion ? __('Comment') : __('Reply');
+ },
},
watch: {
showCommentForm: {
@@ -131,7 +134,7 @@ export default {
:abuse-report-id="abuseReportId"
:is-submitting="isSubmitting"
:autosave-key="autosaveKey"
- :is-new-discussion="isNewDiscussion"
+ :comment-button-text="commentButtonText"
@submitForm="addNote"
@cancelEditing="cancelEditing"
/>
diff --git a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_comment_form.vue b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_comment_form.vue
index 646ffc690d0..e7ee916fe5d 100644
--- a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_comment_form.vue
+++ b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_comment_form.vue
@@ -35,16 +35,16 @@ export default {
type: String,
required: true,
},
- isNewDiscussion: {
- type: Boolean,
- required: false,
- default: false,
- },
initialValue: {
type: String,
required: false,
default: '',
},
+ commentButtonText: {
+ type: String,
+ required: false,
+ default: '',
+ },
},
data() {
return {
@@ -63,9 +63,6 @@ export default {
markdownDocsPath() {
return helpPagePath('user/markdown');
},
- commentButtonText() {
- return this.isNewDiscussion ? __('Comment') : __('Reply');
- },
},
methods: {
setCommentText(newText) {
diff --git a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_edit_note.vue b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_edit_note.vue
new file mode 100644
index 00000000000..e2c348f8079
--- /dev/null
+++ b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_edit_note.vue
@@ -0,0 +1,98 @@
+
+
+
+
+
diff --git a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note.vue b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note.vue
index 213a7c85fe2..9fea483c036 100644
--- a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note.vue
+++ b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note.vue
@@ -4,6 +4,8 @@ import SafeHtml from '~/vue_shared/directives/safe_html';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import NoteHeader from '~/notes/components/note_header.vue';
+import EditedAt from '~/issues/show/components/edited.vue';
+import AbuseReportEditNote from './abuse_report_edit_note.vue';
import NoteBody from './abuse_report_note_body.vue';
import AbuseReportNoteActions from './abuse_report_note_actions.vue';
@@ -16,9 +18,11 @@ export default {
GlAvatarLink,
GlAvatar,
TimelineEntryItem,
+ AbuseReportEditNote,
NoteHeader,
NoteBody,
AbuseReportNoteActions,
+ EditedAt,
},
props: {
abuseReportId: {
@@ -35,6 +39,11 @@ export default {
default: false,
},
},
+ data() {
+ return {
+ isEditing: false,
+ };
+ },
computed: {
noteAnchorId() {
return `note_${getIdFromGraphQLId(this.note.id)}`;
@@ -45,10 +54,16 @@ export default {
authorId() {
return getIdFromGraphQLId(this.author.id);
},
+ editedAtClasses() {
+ return this.showReplyButton ? 'gl-text-secondary gl-pl-3' : 'gl-text-secondary gl-pl-8';
+ },
},
methods: {
- startReplying() {
- this.$emit('startReplying');
+ startEditing() {
+ this.isEditing = true;
+ },
+ cancelEditing() {
+ this.isEditing = false;
},
},
};
@@ -71,8 +86,14 @@ export default {
/>
-
-
+
+
+
@@ -93,6 +116,14 @@ export default {
+
+
diff --git a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note_actions.vue b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note_actions.vue
index 91bfaf60f38..e35e231fc1b 100644
--- a/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note_actions.vue
+++ b/app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_note_actions.vue
@@ -1,17 +1,30 @@
@@ -23,5 +36,14 @@ export default {
ref="replyButton"
@startReplying="$emit('startReplying')"
/>
+
diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb
index a2a850ae05f..655fdf8b8ec 100644
--- a/app/helpers/application_settings_helper.rb
+++ b/app/helpers/application_settings_helper.rb
@@ -497,6 +497,7 @@ module ApplicationSettingsHelper
:pipeline_limit_per_project_user_sha,
:invitation_flow_enforcement,
:can_create_group,
+ :bulk_import_concurrent_pipeline_batch_limit,
:bulk_import_enabled,
:bulk_import_max_download_file_size,
:allow_runner_registration_token,
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index 51cdd993c97..99075f04fdd 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -814,6 +814,10 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
allow_nil: false,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
+ validates :bulk_import_concurrent_pipeline_batch_limit,
+ presence: true,
+ numericality: { only_integer: true, greater_than: 0 }
+
validates :allow_runner_registration_token,
allow_nil: false,
inclusion: { in: [true, false], message: N_('must be a boolean value') }
diff --git a/app/models/bulk_imports/batch_tracker.rb b/app/models/bulk_imports/batch_tracker.rb
index 8561dfacb6e..09f220b96b0 100644
--- a/app/models/bulk_imports/batch_tracker.rb
+++ b/app/models/bulk_imports/batch_tracker.rb
@@ -8,7 +8,17 @@ module BulkImports
validates :batch_number, presence: true, uniqueness: { scope: :tracker_id }
+ IN_PROGRESS_STATES = %i[created started].freeze
+
scope :by_last_updated, -> { order(updated_at: :desc) }
+ scope :in_progress, -> { with_status(IN_PROGRESS_STATES) }
+
+ # rubocop: disable Database/AvoidUsingPluckWithoutLimit -- We should use this method only when scoped to a tracker.
+ # Batches are self-limiting per tracker based on the amount of data being imported.
+ def self.pluck_batch_numbers
+ pluck(:batch_number)
+ end
+ # rubocop: enable Database/AvoidUsingPluckWithoutLimit
state_machine :status, initial: :created do
state :created, value: 0
diff --git a/app/models/bulk_imports/export_status.rb b/app/models/bulk_imports/export_status.rb
index 3d820e65d5b..9e3815e7569 100644
--- a/app/models/bulk_imports/export_status.rb
+++ b/app/models/bulk_imports/export_status.rb
@@ -4,6 +4,8 @@ module BulkImports
class ExportStatus
include Gitlab::Utils::StrongMemoize
+ CACHE_KEY = 'bulk_imports/export_status/%{entity_id}/%{relation}'
+
def initialize(pipeline_tracker, relation)
@pipeline_tracker = pipeline_tracker
@relation = relation
@@ -50,11 +52,12 @@ module BulkImports
def status
strong_memoize(:status) do
- status = fetch_status
+ # As an optimization, once an export status has finished or failed it will
+ # be cached, so we do not fetch from the remote source again.
+ status = status_from_cache
+ next status if status
- next status if status.is_a?(Hash) || status.nil?
-
- status.find { |item| item['relation'] == relation }
+ status_from_remote
rescue BulkImports::NetworkError => e
raise BulkImports::RetryPipelineError.new(e.message, 2.seconds) if e.retriable?(pipeline_tracker)
@@ -64,8 +67,38 @@ module BulkImports
end
end
- def fetch_status
- client.get(status_endpoint, relation: relation).parsed_response
+ def status_from_cache
+ status = Gitlab::Cache::Import::Caching.read(cache_key)
+
+ Gitlab::Json.parse(status) if status
+ end
+
+ def status_from_remote
+ raw_status = client.get(status_endpoint, relation: relation).parsed_response
+
+ parse_status_from_remote(raw_status).tap do |status|
+ cache_status(status) if cache_status?(status)
+ end
+ end
+
+ def parse_status_from_remote(status)
+ # Non-batched status
+ return status if status.is_a?(Hash) || status.nil?
+
+ # Batched status
+ status.find { |item| item['relation'] == relation }
+ end
+
+ def cache_status?(status)
+ status.present? && status['status'].in?([Export::FINISHED, Export::FAILED])
+ end
+
+ def cache_status(status)
+ Gitlab::Cache::Import::Caching.write(cache_key, status.to_json)
+ end
+
+ def cache_key
+ Kernel.format(CACHE_KEY, entity_id: entity.id, relation: relation)
end
def status_endpoint
diff --git a/app/presenters/project_presenter.rb b/app/presenters/project_presenter.rb
index 70488a91808..5226b2c1611 100644
--- a/app/presenters/project_presenter.rb
+++ b/app/presenters/project_presenter.rb
@@ -395,7 +395,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
else
AnchorData.new(
false,
- statistic_icon + _('Enable Auto DevOps'),
+ content_tag(:span, statistic_icon('plus', 'gl-mr-3') + _('Enable Auto DevOps')),
project_settings_ci_cd_path(project, anchor: 'autodevops-settings')
)
end
@@ -408,7 +408,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
if can_instantiate_cluster?
if clusters.empty?
if Feature.enabled?(:project_overview_reorg)
- AnchorData.new(false, content_tag(:span, statistic_icon('plus', 'gl-mr-3') + _('Add Kubernetes cluster'), class: 'btn-link'), project_clusters_path(project))
+ AnchorData.new(false, content_tag(:span, statistic_icon('plus', 'gl-mr-3') + _('Add Kubernetes cluster')), project_clusters_path(project))
else
AnchorData.new(false, content_tag(:span, statistic_icon + _('Add Kubernetes cluster')), project_clusters_path(project))
end
@@ -424,7 +424,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
return unless can_view_pipeline_editor?(project)
if cicd_missing?
- AnchorData.new(false, statistic_icon + _('Set up CI/CD'), project_ci_pipeline_editor_path(project))
+ AnchorData.new(false, content_tag(:span, statistic_icon('plus', 'gl-mr-3') + _('Set up CI/CD')), project_ci_pipeline_editor_path(project))
elsif repository.gitlab_ci_yml.present?
AnchorData.new(false, statistic_icon('rocket') + _('CI/CD configuration'), project_ci_pipeline_editor_path(project), 'btn-default')
end
@@ -484,7 +484,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
return unless can?(current_user, :admin_project, project)
if Feature.enabled?(:project_overview_reorg)
- AnchorData.new(false, content_tag(:span, statistic_icon('plus', 'gl-blue-500! gl-mr-3') + _('Configure Integrations'), class: 'btn-link'), project_settings_integrations_path(project), nil, nil, nil)
+ AnchorData.new(false, content_tag(:span, statistic_icon('plus', 'gl-blue-500! gl-mr-3') + _('Configure Integrations')), project_settings_integrations_path(project), nil, nil, nil)
else
AnchorData.new(false, content_tag(:span, statistic_icon('settings') + _('Configure Integrations')), project_settings_integrations_path(project), nil, nil, nil)
end
diff --git a/app/views/shared/doorkeeper/applications/_index.html.haml b/app/views/shared/doorkeeper/applications/_index.html.haml
index f28cc64b969..c1aeff9acee 100644
--- a/app/views/shared/doorkeeper/applications/_index.html.haml
+++ b/app/views/shared/doorkeeper/applications/_index.html.haml
@@ -61,8 +61,9 @@
= _("You don't have any applications.")
- else
- .bs-callout.bs-callout-disabled
- = _('Adding new applications is disabled in your GitLab instance. Please contact your GitLab administrator to get the permission.')
+ = render Pajamas::AlertComponent.new(variant: :warning, dismissible: false, alert_options: { class: 'gl-mb-5' }) do |c|
+ - c.with_body do
+ = s_('Adding new applications is disabled in your GitLab instance. Please contact your GitLab administrator to get the permission.')
- if oauth_authorized_applications_enabled
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card oauth-authorized-applications' }, header_options: { class: 'gl-new-card-header' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c|
diff --git a/app/workers/bulk_imports/finish_batched_pipeline_worker.rb b/app/workers/bulk_imports/finish_batched_pipeline_worker.rb
index 60676f4bd15..2670dc5438d 100644
--- a/app/workers/bulk_imports/finish_batched_pipeline_worker.rb
+++ b/app/workers/bulk_imports/finish_batched_pipeline_worker.rb
@@ -42,7 +42,7 @@ module BulkImports
end
def import_in_progress?
- sorted_batches.any? { |b| b.started? || b.created? }
+ sorted_batches.in_progress.any?
end
def most_recent_batch_stale?
diff --git a/app/workers/bulk_imports/pipeline_worker.rb b/app/workers/bulk_imports/pipeline_worker.rb
index 184b321ca7a..0bb9464c6de 100644
--- a/app/workers/bulk_imports/pipeline_worker.rb
+++ b/app/workers/bulk_imports/pipeline_worker.rb
@@ -4,9 +4,12 @@ module BulkImports
class PipelineWorker
include ApplicationWorker
include ExclusiveLeaseGuard
+ include Gitlab::Utils::StrongMemoize
FILE_EXTRACTION_PIPELINE_PERFORM_DELAY = 10.seconds
+ LimitedBatches = Struct.new(:numbers, :final?, keyword_init: true).freeze
+
DEFER_ON_HEALTH_DELAY = 5.minutes
data_consistency :always
@@ -52,7 +55,6 @@ module BulkImports
try_obtain_lease do
if pipeline_tracker.enqueued? || pipeline_tracker.started?
logger.info(log_attributes(message: 'Pipeline starting'))
-
run
end
end
@@ -84,7 +86,8 @@ module BulkImports
return pipeline_tracker.finish! if export_status.batches_count < 1
- enqueue_batches
+ enqueue_limited_batches
+ re_enqueue unless all_batches_enqueued?
else
log_extra_metadata_on_done(:batched, false)
@@ -194,6 +197,54 @@ module BulkImports
Time.zone.now - (pipeline_tracker.created_at || entity.created_at)
end
+ def enqueue_limited_batches
+ next_batch.numbers.each do |batch_number|
+ batch = pipeline_tracker.batches.create!(batch_number: batch_number)
+
+ with_context(bulk_import_entity_id: entity.id) do
+ ::BulkImports::PipelineBatchWorker.perform_async(batch.id)
+ end
+ end
+
+ log_extra_metadata_on_done(:tracker_batch_numbers_enqueued, next_batch.numbers)
+ log_extra_metadata_on_done(:tracker_final_batch_was_enqueued, next_batch.final?)
+ end
+
+ def all_batches_enqueued?
+ next_batch.final?
+ end
+
+ def next_batch
+ all_batch_numbers = (1..export_status.batches_count).to_a
+
+ created_batch_numbers = pipeline_tracker.batches.pluck_batch_numbers
+
+ remaining_batch_numbers = all_batch_numbers - created_batch_numbers
+
+ if Feature.disabled?(:bulk_import_limit_concurrent_batches, context.portable)
+ return LimitedBatches.new(numbers: remaining_batch_numbers, final?: true)
+ end
+
+ limit = next_batch_count
+
+ LimitedBatches.new(
+ numbers: remaining_batch_numbers.first(limit),
+ final?: remaining_batch_numbers.count <= limit
+ )
+ end
+ strong_memoize_attr :next_batch
+
+ # Calculate the number of batches, up to `batch_limit`, to process in the
+ # next round.
+ def next_batch_count
+ limit = batch_limit - pipeline_tracker.batches.in_progress.limit(batch_limit).count
+ [limit, 0].max
+ end
+
+ def batch_limit
+ ::Gitlab::CurrentSettings.bulk_import_concurrent_pipeline_batch_limit
+ end
+
def lease_timeout
30
end
@@ -201,15 +252,5 @@ module BulkImports
def lease_key
"gitlab:bulk_imports:pipeline_worker:#{pipeline_tracker.id}"
end
-
- def enqueue_batches
- 1.upto(export_status.batches_count) do |batch_number|
- batch = pipeline_tracker.batches.find_or_create_by!(batch_number: batch_number) # rubocop:disable CodeReuse/ActiveRecord
-
- with_context(bulk_import_entity_id: entity.id) do
- ::BulkImports::PipelineBatchWorker.perform_async(batch.id)
- end
- end
- end
end
end
diff --git a/config/feature_flags/development/bulk_import_limit_concurrent_batches.yml b/config/feature_flags/development/bulk_import_limit_concurrent_batches.yml
new file mode 100644
index 00000000000..4bbd0bd5773
--- /dev/null
+++ b/config/feature_flags/development/bulk_import_limit_concurrent_batches.yml
@@ -0,0 +1,8 @@
+---
+name: bulk_import_limit_concurrent_batches
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136018
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/431561
+milestone: '16.7'
+type: development
+group: group::import and integrate
+default_enabled: false
diff --git a/db/migrate/20231108132916_index_batch_tracker_status.rb b/db/migrate/20231108132916_index_batch_tracker_status.rb
new file mode 100644
index 00000000000..099cbae6fc1
--- /dev/null
+++ b/db/migrate/20231108132916_index_batch_tracker_status.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class IndexBatchTrackerStatus < Gitlab::Database::Migration[2.2]
+ disable_ddl_transaction!
+
+ milestone '16.7'
+
+ INDEX_NAME = 'index_batch_trackers_on_tracker_id_status'
+
+ def up
+ add_concurrent_index :bulk_import_batch_trackers, [:tracker_id, :status], name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :bulk_import_batch_trackers, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20231108143957_add_concurrent_direct_transfer_batch_limit_to_application_settings.rb b/db/migrate/20231108143957_add_concurrent_direct_transfer_batch_limit_to_application_settings.rb
new file mode 100644
index 00000000000..064385e02fc
--- /dev/null
+++ b/db/migrate/20231108143957_add_concurrent_direct_transfer_batch_limit_to_application_settings.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddConcurrentDirectTransferBatchLimitToApplicationSettings < Gitlab::Database::Migration[2.2]
+ milestone '16.7'
+
+ def change
+ add_column :application_settings, :bulk_import_concurrent_pipeline_batch_limit, :smallint, default: 25, null: false
+ end
+end
diff --git a/db/schema_migrations/20231108132916 b/db/schema_migrations/20231108132916
new file mode 100644
index 00000000000..a7c7d98f18c
--- /dev/null
+++ b/db/schema_migrations/20231108132916
@@ -0,0 +1 @@
+4f67f8ebf48cb7ea22e5451c3b548a5f7dc59b0e2b29d51ac73a04860214a25f
\ No newline at end of file
diff --git a/db/schema_migrations/20231108143957 b/db/schema_migrations/20231108143957
new file mode 100644
index 00000000000..ec3f916ea2e
--- /dev/null
+++ b/db/schema_migrations/20231108143957
@@ -0,0 +1 @@
+fc18cfa407a2270af8be9de77b5078544e27afb38e4ad87f3b2c06e24f58add0
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 9fb23cc0cc4..c008eae969b 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -12276,6 +12276,7 @@ CREATE TABLE application_settings (
update_namespace_name_rate_limit smallint DEFAULT 120 NOT NULL,
pre_receive_secret_detection_enabled boolean DEFAULT false NOT NULL,
can_create_organization boolean DEFAULT true NOT NULL,
+ bulk_import_concurrent_pipeline_batch_limit smallint DEFAULT 25 NOT NULL,
web_ide_oauth_application_id integer,
instance_level_ai_beta_features_enabled boolean DEFAULT false NOT NULL,
security_txt_content text,
@@ -31892,6 +31893,8 @@ CREATE INDEX index_badges_on_group_id ON badges USING btree (group_id);
CREATE INDEX index_badges_on_project_id ON badges USING btree (project_id);
+CREATE INDEX index_batch_trackers_on_tracker_id_status ON bulk_import_batch_trackers USING btree (tracker_id, status);
+
CREATE INDEX index_batched_background_migrations_on_status ON batched_background_migrations USING btree (status);
CREATE UNIQUE INDEX index_batched_background_migrations_on_unique_configuration ON batched_background_migrations USING btree (job_class_name, table_name, column_name, job_arguments);
diff --git a/doc/api/settings.md b/doc/api/settings.md
index 0af3444febd..cf34cf3d65e 100644
--- a/doc/api/settings.md
+++ b/doc/api/settings.md
@@ -126,7 +126,8 @@ Example response:
"package_registry_allow_anyone_to_pull_option": true,
"bulk_import_max_download_file_size": 5120,
"project_jobs_api_rate_limit": 600,
- "security_txt_content": null
+ "security_txt_content": null,
+ "bulk_import_concurrent_pipeline_batch_limit": 25
}
```
@@ -272,7 +273,8 @@ Example response:
"package_registry_allow_anyone_to_pull_option": true,
"bulk_import_max_download_file_size": 5120,
"project_jobs_api_rate_limit": 600,
- "security_txt_content": null
+ "security_txt_content": null,
+ "bulk_import_concurrent_pipeline_batch_limit": 25
}
```
@@ -621,6 +623,7 @@ listed in the descriptions of the relevant settings.
| `valid_runner_registrars` | array of strings | no | List of types which are allowed to register a GitLab Runner. Can be `[]`, `['group']`, `['project']` or `['group', 'project']`. |
| `whats_new_variant` | string | no | What's new variant, possible values: `all_tiers`, `current_tier`, and `disabled`. |
| `wiki_page_max_content_bytes` | integer | no | Maximum wiki page content size in **bytes**. Default: 52428800 Bytes (50 MB). The minimum value is 1024 bytes. |
+| `bulk_import_concurrent_pipeline_batch_limit` | integer | no | Maximum simultaneous Direct Transfer batches to process. |
### Configure inactive project deletion
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index 162e24af099..9de9f19ccd7 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -212,6 +212,7 @@ module API
optional :pipeline_limit_per_project_user_sha, type: Integer, desc: "Maximum number of pipeline creation requests allowed per minute per user and commit. Set to 0 for unlimited requests per minute."
optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab for Jira Cloud app"
optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab for Jira Cloud app"
+ optional :bulk_import_concurrent_pipeline_batch_limit, type: Integer, desc: 'Maximum simultaneous Direct Transfer pipeline batches to process'
optional :bulk_import_enabled, type: Boolean, desc: 'Enable migrating GitLab groups and projects by direct transfer'
optional :bulk_import_max_download_file, type: Integer, desc: 'Maximum download file size in MB when importing from source GitLab instances by direct transfer'
optional :allow_runner_registration_token, type: Boolean, desc: 'Allow registering runners using a registration token'
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js
index 5a3ec3836c5..959b52beaef 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_add_note_spec.js
@@ -60,7 +60,7 @@ describe('Abuse Report Add Note', () => {
abuseReportId: mockAbuseReportId,
isSubmitting: false,
autosaveKey: `${mockAbuseReportId}-comment`,
- isNewDiscussion: true,
+ commentButtonText: 'Comment',
initialValue: '',
});
});
@@ -211,6 +211,7 @@ describe('Abuse Report Add Note', () => {
await findReplyTextarea().trigger('click');
expect(findAbuseReportCommentForm().exists()).toBe(true);
+ expect(findAbuseReportCommentForm().props('commentButtonText')).toBe('Reply');
});
it('should show comment form if `showCommentForm` is true', () => {
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js
index e521806f8f6..2265ef7d441 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_comment_form_spec.js
@@ -36,7 +36,7 @@ describe('Abuse Report Comment Form', () => {
isSubmitting = false,
initialValue = mockInitialValue,
autosaveKey = mockAutosaveKey,
- isNewDiscussion = true,
+ commentButtonText = 'Comment',
} = {}) => {
wrapper = shallowMount(AbuseReportCommentForm, {
propsData: {
@@ -44,7 +44,7 @@ describe('Abuse Report Comment Form', () => {
isSubmitting,
initialValue,
autosaveKey,
- isNewDiscussion,
+ commentButtonText,
},
provide: {
uploadNoteAttachmentPath: 'test-upload-path',
@@ -119,7 +119,7 @@ describe('Abuse Report Comment Form', () => {
});
it('should show `Reply` button if its not a new discussion', () => {
- createComponent({ isNewDiscussion: false });
+ createComponent({ commentButtonText: 'Reply' });
expect(findCommentButton().text()).toBe('Reply');
});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js
new file mode 100644
index 00000000000..88f243b2501
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_edit_note_spec.js
@@ -0,0 +1,129 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/alert';
+import { clearDraft } from '~/lib/utils/autosave';
+import waitForPromises from 'helpers/wait_for_promises';
+import updateNoteMutation from '~/admin/abuse_report/graphql/notes/update_abuse_report_note.mutation.graphql';
+import AbuseReportEditNote from '~/admin/abuse_report/components/notes/abuse_report_edit_note.vue';
+import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
+
+import {
+ mockAbuseReport,
+ mockDiscussionWithNoReplies,
+ editAbuseReportNoteResponse,
+} from '../../mock_data';
+
+jest.mock('~/alert');
+jest.mock('~/lib/utils/autosave');
+Vue.use(VueApollo);
+
+describe('Abuse Report Edit Note', () => {
+ let wrapper;
+
+ const mockAbuseReportId = mockAbuseReport.report.globalId;
+ const mockNote = mockDiscussionWithNoReplies[0];
+
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(editAbuseReportNoteResponse);
+
+ const findAbuseReportCommentForm = () => wrapper.findComponent(AbuseReportCommentForm);
+
+ const createComponent = ({
+ mutationHandler = mutationSuccessHandler,
+ abuseReportId = mockAbuseReportId,
+ discussionId = '',
+ note = mockNote,
+ } = {}) => {
+ wrapper = shallowMountExtended(AbuseReportEditNote, {
+ apolloProvider: createMockApollo([[updateNoteMutation, mutationHandler]]),
+ propsData: {
+ abuseReportId,
+ discussionId,
+ note,
+ },
+ });
+ };
+
+ describe('Default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should show the comment form', () => {
+ expect(findAbuseReportCommentForm().exists()).toBe(true);
+ expect(findAbuseReportCommentForm().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ isSubmitting: false,
+ autosaveKey: `${mockNote.id}-comment`,
+ commentButtonText: 'Save comment',
+ initialValue: mockNote.body,
+ });
+ });
+ });
+
+ describe('Editing a comment', () => {
+ const noteText = 'Updated comment';
+
+ beforeEach(() => {
+ createComponent();
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+ });
+
+ it('should call the mutation with provided noteText', async () => {
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(true);
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ id: mockNote.id,
+ body: noteText,
+ },
+ });
+
+ await waitForPromises();
+
+ expect(findAbuseReportCommentForm().props('isSubmitting')).toBe(false);
+ });
+
+ it('should clear draft from local storage', async () => {
+ await waitForPromises();
+
+ expect(clearDraft).toHaveBeenCalledWith(`${mockNote.id}-comment`);
+ });
+
+ it('should emit `cancelEditing` event', async () => {
+ await waitForPromises();
+
+ expect(wrapper.emitted('cancelEditing')).toHaveLength(1);
+ });
+
+ it.each`
+ description | errorResponse
+ ${'with an error response'} | ${new Error('The note could not be found')}
+ ${'without an error ressponse'} | ${null}
+ `('should show an error when mutation fails $description', async ({ errorResponse }) => {
+ createComponent({
+ mutationHandler: jest.fn().mockRejectedValue(errorResponse),
+ });
+
+ findAbuseReportCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ });
+
+ await waitForPromises();
+
+ const errorMessage = errorResponse
+ ? 'Your comment could not be updated because the note could not be found.'
+ : 'Something went wrong while editing your comment. Please try again.';
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: errorMessage,
+ captureError: true,
+ parent: wrapper.vm.$el,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js
index b24da749004..1ddfb6145fc 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_actions_spec.js
@@ -1,17 +1,23 @@
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import ReplyButton from '~/notes/components/note_actions/reply_button.vue';
import AbuseReportNoteActions from '~/admin/abuse_report/components/notes/abuse_report_note_actions.vue';
describe('Abuse Report Note Actions', () => {
let wrapper;
- const mockShowReplyButton = true;
const findReplyButton = () => wrapper.findComponent(ReplyButton);
+ const findEditButton = () => wrapper.findComponent(GlButton);
- const createComponent = ({ showReplyButton = mockShowReplyButton } = {}) => {
+ const createComponent = ({ showReplyButton = true, showEditButton = true } = {}) => {
wrapper = shallowMount(AbuseReportNoteActions, {
propsData: {
showReplyButton,
+ showEditButton,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
},
});
};
@@ -25,11 +31,26 @@ describe('Abuse Report Note Actions', () => {
expect(findReplyButton().exists()).toBe(true);
});
- it('should emit `startReplying`', () => {
+ it('should emit `startReplying` when reply button is clicked', () => {
findReplyButton().vm.$emit('startReplying');
expect(wrapper.emitted('startReplying')).toHaveLength(1);
});
+
+ it('should show edit button', () => {
+ expect(findEditButton().exists()).toBe(true);
+ expect(findEditButton().attributes()).toMatchObject({
+ icon: 'pencil',
+ title: 'Edit comment',
+ 'aria-label': 'Edit comment',
+ });
+ });
+
+ it('should emit `startEditing` when edit button is clicked', () => {
+ findEditButton().vm.$emit('click');
+
+ expect(wrapper.emitted('startEditing')).toHaveLength(1);
+ });
});
describe('When `showReplyButton` is false', () => {
@@ -43,4 +64,16 @@ describe('Abuse Report Note Actions', () => {
expect(findReplyButton().exists()).toBe(false);
});
});
+
+ describe('When `showEditButton` is false', () => {
+ beforeEach(() => {
+ createComponent({
+ showEditButton: false,
+ });
+ });
+
+ it('should not show edit button', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
index 33be59898e9..f0174cd7b4a 100644
--- a/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
+++ b/spec/frontend/admin/abuse_report/components/notes/abuse_report_note_spec.js
@@ -2,7 +2,9 @@ import { shallowMount } from '@vue/test-utils';
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
import AbuseReportNote from '~/admin/abuse_report/components/notes/abuse_report_note.vue';
import NoteHeader from '~/notes/components/note_header.vue';
+import EditedAt from '~/issues/show/components/edited.vue';
import AbuseReportNoteBody from '~/admin/abuse_report/components/notes/abuse_report_note_body.vue';
+import AbuseReportEditNote from '~/admin/abuse_report/components/notes/abuse_report_edit_note.vue';
import AbuseReportNoteActions from '~/admin/abuse_report/components/notes/abuse_report_note_actions.vue';
import { mockAbuseReport, mockDiscussionWithNoReplies } from '../../mock_data';
@@ -18,6 +20,10 @@ describe('Abuse Report Note', () => {
const findNoteHeader = () => wrapper.findComponent(NoteHeader);
const findNoteBody = () => wrapper.findComponent(AbuseReportNoteBody);
+
+ const findEditNote = () => wrapper.findComponent(AbuseReportEditNote);
+ const findEditedAt = () => wrapper.findComponent(EditedAt);
+
const findNoteActions = () => wrapper.findComponent(AbuseReportNoteActions);
const createComponent = ({
@@ -86,11 +92,84 @@ describe('Abuse Report Note', () => {
});
});
+ describe('Editing', () => {
+ it('should not be in edit mode by default', () => {
+ expect(findEditNote().exists()).toBe(false);
+ });
+
+ it('should trigger edit mode when `startEditing` event is emitted', async () => {
+ await findNoteActions().vm.$emit('startEditing');
+
+ expect(findEditNote().exists()).toBe(true);
+ expect(findEditNote().props()).toMatchObject({
+ abuseReportId: mockAbuseReportId,
+ note: mockNote,
+ });
+
+ expect(findNoteHeader().exists()).toBe(false);
+ expect(findNoteBody().exists()).toBe(false);
+ });
+
+ it('should hide edit mode when `cancelEditing` event is emitted', async () => {
+ await findNoteActions().vm.$emit('startEditing');
+ await findEditNote().vm.$emit('cancelEditing');
+
+ expect(findEditNote().exists()).toBe(false);
+
+ expect(findNoteHeader().exists()).toBe(true);
+ expect(findNoteBody().exists()).toBe(true);
+ });
+ });
+
+ describe('Edited At', () => {
+ it('should not show edited-at if lastEditedBy is null', () => {
+ expect(findEditedAt().exists()).toBe(false);
+ });
+
+ it('should show edited-at if lastEditedBy is not null', () => {
+ createComponent({
+ note: {
+ ...mockNote,
+ lastEditedBy: { name: 'user', webPath: '/user' },
+ lastEditedAt: '2023-10-20T02:46:50Z',
+ },
+ });
+
+ expect(findEditedAt().exists()).toBe(true);
+
+ expect(findEditedAt().props()).toMatchObject({
+ updatedAt: '2023-10-20T02:46:50Z',
+ updatedByName: 'user',
+ updatedByPath: '/user',
+ });
+
+ expect(findEditedAt().classes()).toEqual(
+ expect.arrayContaining(['gl-text-secondary', 'gl-pl-3']),
+ );
+ });
+
+ it('should add the correct classList when showReplyButton is false', () => {
+ createComponent({
+ note: {
+ ...mockNote,
+ lastEditedBy: { name: 'user', webPath: '/user' },
+ lastEditedAt: '2023-10-20T02:46:50Z',
+ },
+ showReplyButton: false,
+ });
+
+ expect(findEditedAt().classes()).toEqual(
+ expect.arrayContaining(['gl-text-secondary', 'gl-pl-8']),
+ );
+ });
+ });
+
describe('Actions', () => {
it('should show note actions', () => {
expect(findNoteActions().exists()).toBe(true);
expect(findNoteActions().props()).toMatchObject({
showReplyButton: mockShowReplyButton,
+ showEditButton: true,
});
});
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index 06ea7841547..a958c4d30b1 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -139,7 +139,7 @@ export const mockDiscussionWithNoReplies = [
body: 'Comment 1',
bodyHtml: '\u003cp data-sourcepos="1:1-1:9" dir="auto"\u003eComment 1\u003c/p\u003e',
createdAt: '2023-10-19T06:11:13Z',
- lastEditedAt: '2023-10-20T02:46:50Z',
+ lastEditedAt: null,
url: 'http://127.0.0.1:3000/admin/abuse_reports/1#note_1',
resolved: false,
author: {
@@ -355,7 +355,7 @@ export const createAbuseReportNoteResponse = {
body: 'Another comment',
bodyHtml: 'Another comment
',
createdAt: '2023-11-02T02:45:46Z',
- lastEditedAt: '2023-11-02T02:45:46Z',
+ lastEditedAt: null,
url: 'http://127.0.0.1:3000/admin/abuse_reports/20#note_6',
resolved: false,
author: {
@@ -389,3 +389,34 @@ export const createAbuseReportNoteResponse = {
},
},
};
+
+export const editAbuseReportNoteResponse = {
+ data: {
+ updateNote: {
+ errors: [],
+ note: {
+ id: 'gid://gitlab/Note/1',
+ body: 'Updated comment',
+ bodyHtml: 'Updated comment
',
+ createdAt: '2023-10-20T07:47:42Z',
+ lastEditedAt: '2023-10-20T07:47:42Z',
+ url: 'http://127.0.0.1:3000/admin/abuse_reports/1#note_1',
+ resolved: false,
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+ lastEditedBy: 'root',
+ userPermissions: {
+ adminNote: true,
+ __typename: 'NotePermissions',
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index c1c3c1fea91..f3709e67037 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -1,11 +1,11 @@
import { GlLink, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
@@ -97,59 +97,56 @@ describe('SidebarDropdownWidget', () => {
...requestHandlers,
]);
- wrapper = extendedWrapper(
- mount(SidebarDropdownWidget, {
- provide: { canUpdate: true },
- apolloProvider: mockApollo,
- propsData: {
- workspacePath: mockIssue.projectPath,
- attrWorkspacePath: mockIssue.projectPath,
- iid: mockIssue.iid,
- issuableType: TYPE_ISSUE,
- issuableAttribute: IssuableAttributeType.Milestone,
- },
- attachTo: document.body,
- }),
- );
+ wrapper = mountExtended(SidebarDropdownWidget, {
+ provide: { canUpdate: true },
+ apolloProvider: mockApollo,
+ propsData: {
+ workspacePath: mockIssue.projectPath,
+ attrWorkspacePath: mockIssue.projectPath,
+ iid: mockIssue.iid,
+ issuableType: TYPE_ISSUE,
+ issuableAttribute: IssuableAttributeType.Milestone,
+ },
+ attachTo: document.body,
+ });
await waitForApollo();
};
const createComponent = ({ data = {}, mutationPromise = mutationSuccess, queries = {} } = {}) => {
- wrapper = extendedWrapper(
- shallowMount(SidebarDropdownWidget, {
- provide: { canUpdate: true },
- data() {
- return data;
- },
- propsData: {
- workspacePath: '',
- attrWorkspacePath: '',
- iid: '',
- issuableType: TYPE_ISSUE,
- issuableAttribute: IssuableAttributeType.Milestone,
- },
- mocks: {
- $apollo: {
- mutate: mutationPromise(),
- queries: {
- issuable: { loading: false },
- attributesList: { loading: false },
- ...queries,
- },
+ wrapper = shallowMountExtended(SidebarDropdownWidget, {
+ provide: { canUpdate: true },
+ data() {
+ return data;
+ },
+ propsData: {
+ workspacePath: '',
+ attrWorkspacePath: '',
+ iid: '',
+ issuableType: TYPE_ISSUE,
+ issuableAttribute: IssuableAttributeType.Milestone,
+ },
+ mocks: {
+ $apollo: {
+ mutate: mutationPromise(),
+ queries: {
+ issuable: { loading: false },
+ attributesList: { loading: false },
+ ...queries,
},
},
- directives: {
- GlTooltip: createMockDirective('gl-tooltip'),
- },
- stubs: {
- SidebarEditableItem,
- GlSearchBoxByType,
- },
- }),
- );
-
- wrapper.vm.$refs.dropdown.show = jest.fn();
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ stubs: {
+ SidebarEditableItem,
+ GlSearchBoxByType,
+ SidebarDropdown: stubComponent(SidebarDropdown, {
+ methods: { show: jest.fn() },
+ }),
+ },
+ });
};
describe('when not editing', () => {
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 05e3bffbeaf..d16a78be533 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.default_branch_protection_defaults).to eq({}) }
it { expect(setting.max_decompressed_archive_size).to eq(25600) }
it { expect(setting.decompress_archive_file_timeout).to eq(210) }
+ it { expect(setting.bulk_import_concurrent_pipeline_batch_limit).to eq(25) }
end
describe 'validations' do
@@ -1351,6 +1352,13 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.with_message("must be a value between 0 and 1")
end
end
+
+ describe 'bulk_import_concurrent_pipeline_batch_limit' do
+ it do
+ is_expected.to validate_numericality_of(:bulk_import_concurrent_pipeline_batch_limit)
+ .is_greater_than(0)
+ end
+ end
end
context 'restrict creating duplicates' do
diff --git a/spec/models/bulk_imports/batch_tracker_spec.rb b/spec/models/bulk_imports/batch_tracker_spec.rb
index 336943228c7..1c7cbc0cb8c 100644
--- a/spec/models/bulk_imports/batch_tracker_spec.rb
+++ b/spec/models/bulk_imports/batch_tracker_spec.rb
@@ -13,4 +13,19 @@ RSpec.describe BulkImports::BatchTracker, type: :model, feature_category: :impor
it { is_expected.to validate_presence_of(:batch_number) }
it { is_expected.to validate_uniqueness_of(:batch_number).scoped_to(:tracker_id) }
end
+
+ describe 'scopes' do
+ describe '.in_progress' do
+ it 'returns only batches that are in progress' do
+ created = create(:bulk_import_batch_tracker, :created)
+ started = create(:bulk_import_batch_tracker, :started)
+ create(:bulk_import_batch_tracker, :finished)
+ create(:bulk_import_batch_tracker, :timeout)
+ create(:bulk_import_batch_tracker, :failed)
+ create(:bulk_import_batch_tracker, :skipped)
+
+ expect(described_class.in_progress).to contain_exactly(created, started)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index e7a037b54be..aa3bce78534 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
+RSpec.describe BulkImports::ExportStatus, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:relation) { 'labels' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
@@ -282,4 +282,111 @@ RSpec.describe BulkImports::ExportStatus, feature_category: :importers do
end
end
end
+
+ describe 'caching' do
+ let(:cached_status) do
+ subject.send(:status)
+ subject.send(:status_from_cache)
+ end
+
+ shared_examples 'does not result in a cached status' do
+ specify do
+ expect(cached_status).to be_nil
+ end
+ end
+
+ shared_examples 'results in a cached status' do
+ specify do
+ expect(cached_status).to include('status' => status)
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ include_examples 'does not result in a cached status'
+ end
+ end
+
+ context 'when export status is started' do
+ let(:status) { BulkImports::Export::STARTED }
+
+ it_behaves_like 'does not result in a cached status'
+ end
+
+ context 'when export status is failed' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ it_behaves_like 'results in a cached status'
+ end
+
+ context 'when export status is finished' do
+ let(:status) { BulkImports::Export::FINISHED }
+
+ it_behaves_like 'results in a cached status'
+ end
+
+ context 'when export status is not present' do
+ let(:status) { nil }
+
+ it_behaves_like 'does not result in a cached status'
+ end
+
+ context 'when the cache is empty' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+
+ context 'when the cache is not empty' do
+ let(:status) { BulkImports::Export::FAILED }
+
+ before do
+ Gitlab::Cache::Import::Caching.write(
+ described_class.new(tracker, 'labels').send(:cache_key),
+ { 'status' => 'mock status' }.to_json
+ )
+ end
+
+ it 'does not fetch the status from the remote' do
+ expect(subject).not_to receive(:status_from_remote)
+ expect(subject.send(:status)).to eq({ 'status' => 'mock status' })
+ end
+
+ context 'with a different entity' do
+ before do
+ tracker.entity = create(:bulk_import_entity, bulk_import: import, source_full_path: 'foo')
+ end
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+
+ context 'with a different relation' do
+ let_it_be(:relation) { 'merge_requests' }
+
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [
+ { 'relation' => 'labels', 'status' => status },
+ { 'relation' => 'merge_requests', 'status' => status }
+ ])
+ end
+
+ it 'fetches the status from the remote' do
+ expect(subject).to receive(:status_from_remote).and_call_original
+ expect(subject.send(:status)).to include('status' => status)
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index c304ae514a6..4e24689c17a 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -93,6 +93,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['default_branch_protection_defaults']).to be_kind_of(Hash)
expect(json_response['max_login_attempts']).to be_nil
expect(json_response['failed_login_attempts_unlock_period_in_minutes']).to be_nil
+ expect(json_response['bulk_import_concurrent_pipeline_batch_limit']).to eq(25)
end
end
@@ -202,6 +203,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
jira_connect_proxy_url: 'http://example.com',
bulk_import_enabled: false,
bulk_import_max_download_file_size: 1,
+ bulk_import_concurrent_pipeline_batch_limit: 2,
allow_runner_registration_token: true,
user_defaults_to_private_profile: true,
default_syntax_highlighting_theme: 2,
@@ -296,6 +298,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['max_import_remote_file_size']).to be(2)
expect(json_response['bulk_import_max_download_file_size']).to be(1)
expect(json_response['security_txt_content']).to be(nil)
+ expect(json_response['bulk_import_concurrent_pipeline_batch_limit']).to be(2)
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 19688596a18..368c7537641 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- include_examples 'an idempotent worker' do
+ it_behaves_like 'an idempotent worker' do
let(:job_args) { [pipeline_tracker.id, pipeline_tracker.stage, entity.id] }
it 'runs the pipeline and sets tracker to finished' do
@@ -478,8 +478,8 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
end
end
- context 'when export is batched' do
- let(:batches_count) { 2 }
+ context 'when export is batched', :aggregate_failures do
+ let(:batches_count) { 3 }
before do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
@@ -489,10 +489,14 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
allow(status).to receive(:empty?).and_return(false)
allow(status).to receive(:failed?).and_return(false)
end
+ allow(worker).to receive(:log_extra_metadata_on_done).and_call_original
end
it 'enqueues pipeline batches' do
- expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:batched, true)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -500,7 +504,24 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.status_name).to eq(:started)
expect(pipeline_tracker.batched).to eq(true)
- expect(pipeline_tracker.batches.count).to eq(batches_count)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
end
context 'when batches count is less than 1' do
@@ -514,6 +535,127 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
expect(pipeline_tracker.reload.status_name).to eq(:finished)
end
end
+
+ context 'when pipeline batch enqueuing should be limited' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:bulk_import_concurrent_pipeline_batch_limit).and_return(2)
+ end
+
+ it 'only enqueues limited batches and reenqueues itself' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+
+ context 'when there is a batch in progress' do
+ where(:status) { BulkImports::BatchTracker::IN_PROGRESS_STATES }
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'counts the in progress batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).once
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, false)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2)
+ expect(described_class.jobs).to contain_exactly(
+ hash_including(
+ 'args' => [pipeline_tracker.id, pipeline_tracker.stage, entity.id],
+ 'scheduled_at' => be_within(1).of(10.seconds.from_now.to_i)
+ )
+ )
+ end
+ end
+ end
+
+ context 'when there is a batch that has finished' do
+ where(:status) do
+ all_statuses = BulkImports::BatchTracker.state_machines[:status].states.map(&:name)
+ all_statuses - BulkImports::BatchTracker::IN_PROGRESS_STATES
+ end
+
+ with_them do
+ before do
+ create(:bulk_import_batch_tracker, status, batch_number: 1, tracker: pipeline_tracker)
+ end
+
+ it 'does not count the finished batch against the limit' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import_limit_concurrent_batches: false)
+ end
+
+ it 'does not limit batches' do
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).exactly(3).times
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 2, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+
+ it 'still enqueues only missing pipelines batches' do
+ create(:bulk_import_batch_tracker, tracker: pipeline_tracker, batch_number: 2)
+ expect(BulkImports::PipelineBatchWorker).to receive(:perform_async).twice
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_batch_numbers_enqueued, [1, 3])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:tracker_final_batch_was_enqueued, true)
+
+ worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:started)
+ expect(pipeline_tracker.batched).to eq(true)
+ expect(pipeline_tracker.batches.pluck_batch_numbers).to contain_exactly(1, 2, 3)
+ expect(described_class.jobs).to be_empty
+ end
+ end
+ end
end
end
end