diff --git a/.gitlab/issue_templates/Geo Replicate a new Git repository type.md b/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
index 30a5b53ade6..61fbf1aadda 100644
--- a/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
+++ b/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
@@ -485,10 +485,19 @@ That's all of the required database changes.
end
trait :verification_succeeded do
+ synced
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
verified_at { 5.days.ago }
end
+
+ trait :verification_failed do
+ synced
+ verification_failure { 'Could not calculate the checksum' }
+ verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_failed) }
+ verification_retry_count { 1 }
+ verification_retry_at { 2.hours.from_now }
+ end
end
end
```
@@ -519,15 +528,15 @@ That's all of the required database changes.
FactoryBot.modify do
factory :cool_widget do
trait :verification_succeeded do
- repository
- verification_checksum { 'abc' }
- verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
+ repository
+ verification_checksum { 'abc' }
+ verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
end
trait :verification_failed do
- repository
- verification_failure { 'Could not calculate the checksum' }
- verification_state { CoolWidget.verification_state_value(:verification_failed) }
+ repository
+ verification_failure { 'Could not calculate the checksum' }
+ verification_state { CoolWidget.verification_state_value(:verification_failed) }
end
end
end
diff --git a/.gitlab/issue_templates/Geo Replicate a new blob type.md b/.gitlab/issue_templates/Geo Replicate a new blob type.md
index 228f19ea861..cc5b764f7a2 100644
--- a/.gitlab/issue_templates/Geo Replicate a new blob type.md
+++ b/.gitlab/issue_templates/Geo Replicate a new blob type.md
@@ -442,10 +442,19 @@ That's all of the required database changes.
end
trait :verification_succeeded do
+ synced
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
verified_at { 5.days.ago }
end
+
+ trait :verification_failed do
+ synced
+ verification_failure { 'Could not calculate the checksum' }
+ verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_failed) }
+ verification_retry_count { 1 }
+ verification_retry_at { 2.hours.from_now }
+ end
end
end
```
@@ -468,7 +477,7 @@ That's all of the required database changes.
end
```
-- [ ] Add the following to `spec/factories/cool_widgets.rb`:
+- [ ] Add the following to `ee/spec/factories/cool_widgets.rb`:
```ruby
# frozen_string_literal: true
@@ -476,15 +485,24 @@ That's all of the required database changes.
FactoryBot.modify do
factory :cool_widget do
trait :verification_succeeded do
- with_file
- verification_checksum { 'abc' }
- verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
+ with_file
+ verification_checksum { 'abc' }
+ verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
end
trait :verification_failed do
- with_file
- verification_failure { 'Could not calculate the checksum' }
- verification_state { CoolWidget.verification_state_value(:verification_failed) }
+ with_file
+ verification_failure { 'Could not calculate the checksum' }
+ verification_state { CoolWidget.verification_state_value(:verification_failed) }
+
+ #
+ # Geo::VerifiableReplicator#after_verifiable_update tries to verify
+ # the replicable async and marks it as verification started when the
+ # model record is created/updated.
+ #
+ after(:create) do |instance, _|
+ instance.verification_failed!
+ end
end
end
end
diff --git a/.rubocop_todo/gitlab/strong_memoize_attr.yml b/.rubocop_todo/gitlab/strong_memoize_attr.yml
index 23fb60dd384..c40f51cf04e 100644
--- a/.rubocop_todo/gitlab/strong_memoize_attr.yml
+++ b/.rubocop_todo/gitlab/strong_memoize_attr.yml
@@ -217,7 +217,6 @@ Gitlab/StrongMemoizeAttr:
- 'app/services/quick_actions/interpret_service.rb'
- 'app/services/releases/base_service.rb'
- 'app/services/resource_access_tokens/revoke_service.rb'
- - 'app/services/resource_events/base_synthetic_notes_builder_service.rb'
- 'app/services/search/global_service.rb'
- 'app/services/search/project_service.rb'
- 'app/services/search_service.rb'
diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml
index 6753f97efae..0c14824eb34 100644
--- a/.rubocop_todo/rspec/feature_category.yml
+++ b/.rubocop_todo/rspec/feature_category.yml
@@ -2573,7 +2573,6 @@ RSpec/FeatureCategory:
- 'spec/lib/api/entities/application_setting_spec.rb'
- 'spec/lib/api/entities/branch_spec.rb'
- 'spec/lib/api/entities/bulk_import_spec.rb'
- - 'spec/lib/api/entities/bulk_imports/entity_failure_spec.rb'
- 'spec/lib/api/entities/bulk_imports/entity_spec.rb'
- 'spec/lib/api/entities/bulk_imports/export_status_spec.rb'
- 'spec/lib/api/entities/changelog_spec.rb'
@@ -2695,9 +2694,7 @@ RSpec/FeatureCategory:
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
- - 'spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb'
- - 'spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb'
diff --git a/Gemfile b/Gemfile
index b6bae47c047..3fee0267294 100644
--- a/Gemfile
+++ b/Gemfile
@@ -196,7 +196,7 @@ gem 'seed-fu', '~> 2.3.7' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'elasticsearch-model', '~> 7.2' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'elasticsearch-api', '7.13.3' # rubocop:todo Gemfile/MissingFeatureCategory
-gem 'aws-sdk-core', '~> 3.185.1' # rubocop:todo Gemfile/MissingFeatureCategory
+gem 'aws-sdk-core', '~> 3.185.2' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-cloudformation', '~> 1' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-s3', '~> 1.136.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'faraday_middleware-aws-sigv4', '~>0.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
diff --git a/Gemfile.checksum b/Gemfile.checksum
index 40e024c1382..fa57b6abfbc 100644
--- a/Gemfile.checksum
+++ b/Gemfile.checksum
@@ -36,7 +36,7 @@
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
{"name":"aws-partitions","version":"1.761.0","platform":"ruby","checksum":"291e444e1edfc92c5521a6dbdd1236ccc3f122b3520163b2be6ec5b6ef350ef2"},
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
-{"name":"aws-sdk-core","version":"3.185.1","platform":"ruby","checksum":"572ada4eaf8393a9999d9a50adc2dcb78cc742c26a5727248c27f02cdaf97973"},
+{"name":"aws-sdk-core","version":"3.185.2","platform":"ruby","checksum":"75878c00df67750de85537cc851b1281770f2270392de73b9dedcecba314b0ce"},
{"name":"aws-sdk-kms","version":"1.64.0","platform":"ruby","checksum":"40de596c95047bfc6e1aacea24f3df6241aa716b6f7ce08ac4c5f7e3120395ad"},
{"name":"aws-sdk-s3","version":"1.136.0","platform":"ruby","checksum":"3547302a85d51de6cc75b48fb37d328f65f6526e7fc73a27a5b1b871f99a8d63"},
{"name":"aws-sigv4","version":"1.6.0","platform":"ruby","checksum":"ca9e6a15cd424f1f32b524b9760995331459bc22e67d3daad4fcf0c0084b087d"},
diff --git a/Gemfile.lock b/Gemfile.lock
index 0ec05189101..29cdca969fc 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -270,7 +270,7 @@ GEM
aws-sdk-cloudformation (1.41.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
- aws-sdk-core (3.185.1)
+ aws-sdk-core (3.185.2)
aws-eventstream (~> 1, >= 1.0.2)
aws-partitions (~> 1, >= 1.651.0)
aws-sigv4 (~> 1.5)
@@ -1749,7 +1749,7 @@ DEPENDENCIES
autoprefixer-rails (= 10.2.5.1)
awesome_print
aws-sdk-cloudformation (~> 1)
- aws-sdk-core (~> 3.185.1)
+ aws-sdk-core (~> 3.185.2)
aws-sdk-s3 (~> 1.136.0)
axe-core-rspec
babosa (~> 2.0)
diff --git a/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue b/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue
index a32c5f476fb..c628981717e 100644
--- a/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue
+++ b/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue
@@ -371,7 +371,6 @@ export default {
:label-text="$options.i18n.key"
class="gl-border-none gl-pb-0! gl-mb-n5"
data-testid="ci-variable-key"
- data-qa-selector="ci_variable_key_field"
/>
{{ $options.i18n.deleteVariable }}
{{ modalActionText }}
diff --git a/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_table.vue b/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_table.vue
index 30cadadfe45..86287d586ec 100644
--- a/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_table.vue
+++ b/app/assets/javascripts/ci/ci_variable_list/components/ci_variable_table.vue
@@ -243,7 +243,6 @@ export default {
{{ $options.i18n.addButton }}
" "https://gitlab
"updated_at": "2021-06-18T09:46:27.003Z"
}
```
+
+## Get list of failed import records for group or project migration entity
+
+```plaintext
+GET /bulk_imports/:id/entities/:entity_id/failures
+```
+
+```shell
+curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/1/entities/2/failures"
+```
+
+```json
+{
+ "relation": "issues",
+ "exception_message": "Error!",
+ "exception_class": "StandardError",
+ "correlation_id_value": "06289e4b064329a69de7bb2d7a1b5a97",
+ "source_url": "https://gitlab.example/project/full/path/-/issues/1",
+ "source_title": "Issue title"
+}
+```
diff --git a/doc/user/application_security/continuous_vulnerability_scanning/index.md b/doc/user/application_security/continuous_vulnerability_scanning/index.md
index 4094a0add28..e31fc5f7eb0 100644
--- a/doc/user/application_security/continuous_vulnerability_scanning/index.md
+++ b/doc/user/application_security/continuous_vulnerability_scanning/index.md
@@ -29,10 +29,9 @@ To enable Continuous Vulnerability Scanning:
- Enable the Continuous Vulnerability Scanning setting in the project's [security configuration](../configuration/index.md).
- Enable [Dependency Scanning](../dependency_scanning/index.md#configuration) and ensure that its prerequisites are met.
+- On GitLab self-managed only, you can [choose package registry metadata to synchronize](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. For this data synchronization to work, you must allow outbound network traffic from your GitLab instance to the domain `storage.googleapis.com`. If you have limited or no network connectivity then please refer to the documentation section [running in an offline environment](#running-in-an-offline-environment) for further guidance.
-On GitLab self-managed only, you can [choose package registry metadata to sync](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance.
-
-### Requirements for offline environments
+### Running in an offline environment
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access to external resources through the internet,
some adjustments are required to successfully scan CycloneDX reports for vulnerabilities.
diff --git a/doc/user/application_security/terminology/index.md b/doc/user/application_security/terminology/index.md
index 0f0a61a2b02..f09672685de 100644
--- a/doc/user/application_security/terminology/index.md
+++ b/doc/user/application_security/terminology/index.md
@@ -259,7 +259,7 @@ A finding's primary identifier is a value that is unique to each finding. The ex
of the finding's [first identifier](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/v2.4.0-rc1/dist/sast-report-format.json#L228)
combine to create the value.
-Examples of primary identifiers include `PluginID` for OWASP Zed Attack Proxy (ZAP), or `CVE` for
+Examples of primary identifiers include `PluginID` for Zed Attack Proxy (ZAP), or `CVE` for
Trivy. The identifier must be stable. Subsequent scans must return the same value for the
same finding, even if the location has slightly changed.
diff --git a/doc/user/compliance/license_scanning_of_cyclonedx_files/index.md b/doc/user/compliance/license_scanning_of_cyclonedx_files/index.md
index 81f7cc61782..5d7a689e610 100644
--- a/doc/user/compliance/license_scanning_of_cyclonedx_files/index.md
+++ b/doc/user/compliance/license_scanning_of_cyclonedx_files/index.md
@@ -22,16 +22,11 @@ Licenses not in the SPDX list are reported as "Unknown". License information can
## Configuration
-Prerequisites:
+To enable License scanning of CycloneDX files:
-- On GitLab self-managed only, enable [Synchronization with the GitLab License Database](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. On GitLab SaaS this step has already been completed.
- Enable [Dependency Scanning](../../application_security/dependency_scanning/index.md#enabling-the-analyzer)
and ensure that its prerequisites are met.
-
-From the `.gitlab-ci.yml` file, remove the deprecated line `Jobs/License-Scanning.gitlab-ci.yml`, if
-it's present.
-
-On GitLab self-managed only, you can [choose package registry metadata to sync](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance.
+- On GitLab self-managed only, you can [choose package registry metadata to synchronize](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. For this data synchronization to work, you must allow outbound network traffic from your GitLab instance to the domain `storage.googleapis.com`. If you have limited or no network connectivity then please refer to the documentation section [running in an offline environment](#running-in-an-offline-environment) for further guidance.
## Supported languages and package managers
diff --git a/doc/user/discussions/img/add_internal_note_v15_0.png b/doc/user/discussions/img/add_internal_note_v15_0.png
deleted file mode 100644
index cf052edd5e7..00000000000
Binary files a/doc/user/discussions/img/add_internal_note_v15_0.png and /dev/null differ
diff --git a/doc/user/discussions/img/add_internal_note_v16_6.png b/doc/user/discussions/img/add_internal_note_v16_6.png
new file mode 100644
index 00000000000..0d6b4c05160
Binary files /dev/null and b/doc/user/discussions/img/add_internal_note_v16_6.png differ
diff --git a/doc/user/discussions/img/create_thread_v16_6.png b/doc/user/discussions/img/create_thread_v16_6.png
new file mode 100644
index 00000000000..3e0abb3d589
Binary files /dev/null and b/doc/user/discussions/img/create_thread_v16_6.png differ
diff --git a/doc/user/discussions/img/discussion_comment.png b/doc/user/discussions/img/discussion_comment.png
deleted file mode 100644
index 3fec5962363..00000000000
Binary files a/doc/user/discussions/img/discussion_comment.png and /dev/null differ
diff --git a/doc/user/discussions/img/quickly_assign_commenter_v13_1.png b/doc/user/discussions/img/quickly_assign_commenter_v13_1.png
deleted file mode 100644
index aa8f65ef6c4..00000000000
Binary files a/doc/user/discussions/img/quickly_assign_commenter_v13_1.png and /dev/null differ
diff --git a/doc/user/discussions/img/quickly_assign_commenter_v16_6.png b/doc/user/discussions/img/quickly_assign_commenter_v16_6.png
new file mode 100644
index 00000000000..7d6e54fdfa2
Binary files /dev/null and b/doc/user/discussions/img/quickly_assign_commenter_v16_6.png differ
diff --git a/doc/user/discussions/index.md b/doc/user/discussions/index.md
index ae74b534e02..50f2eca8d05 100644
--- a/doc/user/discussions/index.md
+++ b/doc/user/discussions/index.md
@@ -192,7 +192,7 @@ To add an internal note:
1. Below the comment, select the **Make this an internal note** checkbox.
1. Select **Add internal note**.
-
+
You can also mark an [issue as confidential](../project/issues/confidential_issues.md).
@@ -233,7 +233,7 @@ You can assign an issue to a user who made a comment.
1. In the comment, select the **More Actions** (**{ellipsis_v}**) menu.
1. Select **Assign to commenting user**:
- 
+ 
1. To unassign the commenter, select the button again.
## Create a thread by replying to a standard comment
@@ -272,9 +272,9 @@ To create a thread:
1. From the list, select **Start thread**.
1. Select **Start thread** again.
-A threaded comment is created.
+
-
+A threaded comment is created.
## Resolve a thread
diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md
index 8b7e185508b..351762228fb 100644
--- a/doc/user/project/deploy_tokens/index.md
+++ b/doc/user/project/deploy_tokens/index.md
@@ -88,7 +88,8 @@ Create a deploy token to automate deployment tasks that can run independently of
Prerequisites:
-- You must have at least the Maintainer role for the project or group.
+- To create a group deploy token, you must have the Owner role for the group.
+- To create a project deploy token, you must have at least the Maintainer role for the project.
1. On the left sidebar, select **Search or go to** and find your project or group.
1. Select **Settings > Repository**.
@@ -106,7 +107,8 @@ Revoke a token when it's no longer required.
Prerequisites:
-- You must have at least the Maintainer role for the project or group.
+- To revoke a group deploy token, you must have the Owner role for the group.
+- To revoke a project deploy token, you must have at least the Maintainer role for the project.
To revoke a deploy token:
diff --git a/lib/api/bulk_imports.rb b/lib/api/bulk_imports.rb
index 9bcc16cf211..9dc0e5bae9b 100644
--- a/lib/api/bulk_imports.rb
+++ b/lib/api/bulk_imports.rb
@@ -214,6 +214,23 @@ module API
get ':import_id/entities/:entity_id' do
present bulk_import_entity, with: Entities::BulkImports::Entity
end
+
+ desc 'Get GitLab Migration entity failures' do
+ detail 'This feature was introduced in GitLab 16.6'
+ success code: 200, model: Entities::BulkImports::EntityFailure
+ failure [
+ { code: 401, message: 'Unauthorized' },
+ { code: 404, message: 'Not found' },
+ { code: 503, message: 'Service unavailable' }
+ ]
+ end
+ params do
+ requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
+ requires :entity_id, type: Integer, desc: "The ID of GitLab Migration entity"
+ end
+ get ':import_id/entities/:entity_id/failures' do
+ present paginate(bulk_import_entity.failures), with: Entities::BulkImports::EntityFailure
+ end
end
end
end
diff --git a/lib/api/entities/bulk_imports/entity_failure.rb b/lib/api/entities/bulk_imports/entity_failure.rb
index 3e69e7fa2aa..08708a7c961 100644
--- a/lib/api/entities/bulk_imports/entity_failure.rb
+++ b/lib/api/entities/bulk_imports/entity_failure.rb
@@ -4,18 +4,14 @@ module API
module Entities
module BulkImports
class EntityFailure < Grape::Entity
- expose :relation, documentation: { type: 'string', example: 'group' }
- expose :pipeline_step, as: :step, documentation: { type: 'string', example: 'extractor' }
+ expose :relation, documentation: { type: 'string', example: 'label' }
expose :exception_message, documentation: { type: 'string', example: 'error message' } do |failure|
::Projects::ImportErrorFilter.filter_message(failure.exception_message.truncate(72))
end
expose :exception_class, documentation: { type: 'string', example: 'Exception' }
expose :correlation_id_value, documentation: { type: 'string', example: 'dfcf583058ed4508e4c7c617bd7f0edd' }
- expose :created_at, documentation: { type: 'dateTime', example: '2012-05-28T04:42:42-07:00' }
- expose :pipeline_class, documentation: {
- type: 'string', example: 'BulkImports::Groups::Pipelines::GroupPipeline'
- }
- expose :pipeline_step, documentation: { type: 'string', example: 'extractor' }
+ expose :source_url, documentation: { type: 'string', example: 'https://source.gitlab.com/group/-/epics/1' }
+ expose :source_title, documentation: { type: 'string', example: 'title' }
end
end
end
diff --git a/lib/bulk_imports/pipeline/runner.rb b/lib/bulk_imports/pipeline/runner.rb
index cd237f13269..328c5a5941f 100644
--- a/lib/bulk_imports/pipeline/runner.rb
+++ b/lib/bulk_imports/pipeline/runner.rb
@@ -25,7 +25,7 @@ module BulkImports
end
end
- run_pipeline_step(:loader, loader.class.name) do
+ run_pipeline_step(:loader, loader.class.name, entry) do
loader.load(context, entry)
end
@@ -49,7 +49,7 @@ module BulkImports
private # rubocop:disable Lint/UselessAccessModifier
- def run_pipeline_step(step, class_name = nil)
+ def run_pipeline_step(step, class_name = nil, entry = nil)
raise MarkedAsFailedError if context.entity.failed?
info(pipeline_step: step, step_class: class_name)
@@ -65,11 +65,11 @@ module BulkImports
rescue BulkImports::NetworkError => e
raise BulkImports::RetryPipelineError.new(e.message, e.retry_delay) if e.retriable?(context.tracker)
- log_and_fail(e, step)
+ log_and_fail(e, step, entry)
rescue BulkImports::RetryPipelineError
raise
rescue StandardError => e
- log_and_fail(e, step)
+ log_and_fail(e, step, entry)
end
def extracted_data_from
@@ -95,8 +95,8 @@ module BulkImports
run if extracted_data.has_next_page?
end
- def log_and_fail(exception, step)
- log_import_failure(exception, step)
+ def log_and_fail(exception, step, entry = nil)
+ log_import_failure(exception, step, entry)
if abort_on_failure?
tracker.fail_op!
@@ -114,7 +114,7 @@ module BulkImports
tracker.skip!
end
- def log_import_failure(exception, step)
+ def log_import_failure(exception, step, entry)
failure_attributes = {
bulk_import_entity_id: context.entity.id,
pipeline_class: pipeline,
@@ -124,6 +124,11 @@ module BulkImports
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
}
+ if entry
+ failure_attributes[:source_url] = BulkImports::SourceUrlBuilder.new(context, entry).url
+ failure_attributes[:source_title] = entry.try(:title) || entry.try(:name)
+ end
+
log_exception(
exception,
log_params(
diff --git a/lib/bulk_imports/source_url_builder.rb b/lib/bulk_imports/source_url_builder.rb
new file mode 100644
index 00000000000..875b2eae9f7
--- /dev/null
+++ b/lib/bulk_imports/source_url_builder.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class SourceUrlBuilder
+ ALLOWED_RELATIONS = %w[
+ issues
+ merge_requests
+ epics
+ milestones
+ ].freeze
+
+ attr_reader :context, :entity, :entry
+
+ # @param [BulkImports::Pipeline::Context] context
+ # @param [ApplicationRecord] entry
+ def initialize(context, entry)
+ @context = context
+ @entity = context.entity
+ @entry = entry
+ end
+
+ # Builds a source URL for the given entry if iid is present
+ def url
+ return unless entry.is_a?(ApplicationRecord)
+ return unless iid
+ return unless ALLOWED_RELATIONS.include?(relation)
+
+ File.join(source_instance_url, group_prefix, source_full_path, '-', relation, iid.to_s)
+ end
+
+ private
+
+ def iid
+ @iid ||= entry.try(:iid)
+ end
+
+ def relation
+ @relation ||= context.tracker.pipeline_class.relation
+ end
+
+ def source_instance_url
+ @source_instance_url ||= context.bulk_import.configuration.url
+ end
+
+ def source_full_path
+ @source_full_path ||= entity.source_full_path
+ end
+
+ # Group milestone (or epic) url is /groups/:group_path/-/milestones/:iid
+ # Project milestone url is /:project_path/-/milestones/:iid
+ def group_prefix
+ return '' if entity.project?
+
+ entity.pluralized_name
+ end
+ end
+end
diff --git a/lib/gitlab/jira/http_client.rb b/lib/gitlab/jira/http_client.rb
index 7abfe8e38e8..2b8b01e2023 100644
--- a/lib/gitlab/jira/http_client.rb
+++ b/lib/gitlab/jira/http_client.rb
@@ -34,6 +34,17 @@ module Gitlab
request_params[:headers][:Cookie] = get_cookies if options[:use_cookies]
request_params[:base_uri] = uri.to_s
request_params.merge!(auth_params)
+ # Setting defaults here so we can also set `timeout` which prevents setting defaults in the HTTP gem's code
+ request_params[:open_timeout] = options[:open_timeout] || default_timeout_for(:open_timeout)
+ request_params[:read_timeout] = options[:read_timeout] || default_timeout_for(:read_timeout)
+ request_params[:write_timeout] = options[:write_timeout] || default_timeout_for(:write_timeout)
+ # Global timeout. Needs to be at least as high as the maximum defined in other timeouts
+ request_params[:timeout] = [
+ Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT,
+ request_params[:open_timeout],
+ request_params[:read_timeout],
+ request_params[:write_timeout]
+ ].max
result = Gitlab::HTTP.public_send(http_method, path, **request_params) # rubocop:disable GitlabSecurity/PublicSend
@authenticated = result.response.is_a?(Net::HTTPOK)
@@ -52,6 +63,10 @@ module Gitlab
private
+ def default_timeout_for(param)
+ Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS[param]
+ end
+
def auth_params
return {} unless @options[:username] && @options[:password]
diff --git a/lib/gitlab/jira_import/base_importer.rb b/lib/gitlab/jira_import/base_importer.rb
index 2b83f0492cb..04ef1a0ef68 100644
--- a/lib/gitlab/jira_import/base_importer.rb
+++ b/lib/gitlab/jira_import/base_importer.rb
@@ -5,7 +5,7 @@ module Gitlab
class BaseImporter
attr_reader :project, :client, :formatter, :jira_project_key, :running_import
- def initialize(project)
+ def initialize(project, client = nil)
Gitlab::JiraImport.validate_project_settings!(project)
@running_import = project.latest_jira_import
@@ -14,7 +14,7 @@ module Gitlab
raise Projects::ImportService::Error, _('Unable to find Jira project to import data from.') unless @jira_project_key
@project = project
- @client = project.jira_integration.client
+ @client = client || project.jira_integration.client
@formatter = Gitlab::ImportFormatter.new
end
diff --git a/lib/gitlab/jira_import/issues_importer.rb b/lib/gitlab/jira_import/issues_importer.rb
index 458f7c3f470..54ececc4938 100644
--- a/lib/gitlab/jira_import/issues_importer.rb
+++ b/lib/gitlab/jira_import/issues_importer.rb
@@ -10,7 +10,7 @@ module Gitlab
attr_reader :imported_items_cache_key, :start_at, :job_waiter
- def initialize(project)
+ def initialize(project, client = nil)
super
# get cached start_at value, or zero if not cached yet
@start_at = Gitlab::JiraImport.get_issues_next_start_at(project.id)
diff --git a/qa/qa/page/project/settings/ci_variables.rb b/qa/qa/page/project/settings/ci_variables.rb
index 8fdda1db97b..4d5bf06f95b 100644
--- a/qa/qa/page/project/settings/ci_variables.rb
+++ b/qa/qa/page/project/settings/ci_variables.rb
@@ -8,33 +8,33 @@ module QA
include QA::Page::Settings::Common
view 'app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue' do
- element :ci_variable_key_field
- element :ci_variable_value_field
- element :ci_variable_save_button
+ element 'ci-variable-key'
+ element 'ci-variable-value'
+ element 'ci-variable-confirm-button'
end
def fill_variable(key, value, masked = false)
- within_element(:ci_variable_key_field) { find('input').set key }
- fill_element :ci_variable_value_field, value
+ within_element('ci-variable-key') { find('input').set key }
+ fill_element 'ci-variable-value', value
click_ci_variable_save_button
wait_until(reload: false) do
- within_element('ci-variable-table') { has_element?(:edit_ci_variable_button) }
+ within_element('ci-variable-table') { has_element?('edit-ci-variable-button') }
end
end
def click_add_variable
- click_element :add_ci_variable_button
+ click_element 'add-ci-variable-button'
end
def click_edit_ci_variable
within_element('ci-variable-table') do
- click_element :edit_ci_variable_button
+ click_element 'edit-ci-variable-button'
end
end
def click_ci_variable_save_button
- click_element :ci_variable_save_button
+ click_element 'ci-variable-confirm-button'
end
end
end
diff --git a/qa/qa/specs/features/api/1_manage/import/import_large_github_repo_spec.rb b/qa/qa/specs/features/api/1_manage/import/import_large_github_repo_spec.rb
index 9c02bb39589..0019eb47eeb 100644
--- a/qa/qa/specs/features/api/1_manage/import/import_large_github_repo_spec.rb
+++ b/qa/qa/specs/features/api/1_manage/import/import_large_github_repo_spec.rb
@@ -123,7 +123,12 @@ module QA
access_token: ENV['QA_LARGE_IMPORT_GH_TOKEN'] || Runtime::Env.github_access_token,
per_page: 100,
middleware: Faraday::RackBuilder.new do |builder|
- builder.use(Faraday::Retry::Middleware, exceptions: [Octokit::InternalServerError, Octokit::ServerError])
+ builder.use(Faraday::Retry::Middleware,
+ max: 3,
+ interval: 1,
+ retry_block: ->(exception:, **) { logger.warn("Request to GitHub failed: '#{exception}', retrying") },
+ exceptions: [Octokit::InternalServerError, Octokit::ServerError]
+ )
builder.use(Faraday::Response::RaiseError) # faraday retry swallows errors, so it needs to be re-raised
end
)
@@ -161,52 +166,33 @@ module QA
end
let(:gh_issues) do
- issues = gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
+ gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
id = issue.number
+ logger.debug("- Fetching comments and events for issue #{id} -")
hash[id] = {
url: issue.html_url,
title: issue.title,
body: issue.body || '',
- comments: gh_issue_comments[id]
+ comments: fetch_issuable_comments(id, "issue"),
+ events: fetch_issuable_events(id)
}
end
-
- fetch_github_events(issues, "issue")
end
let(:gh_prs) do
- prs = gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
+ gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
id = pr.number
+ logger.debug("- Fetching comments and events for pr #{id} -")
hash[id] = {
url: pr.html_url,
title: pr.title,
body: pr.body || '',
- comments: [*gh_pr_comments[id], *gh_issue_comments[id]].compact
+ comments: fetch_issuable_comments(id, "pr"),
+ events: fetch_issuable_events(id)
}
end
-
- fetch_github_events(prs, "pr")
end
- # rubocop:disable Layout/LineLength
- let(:gh_issue_comments) do
- logger.info("- Fetching issue comments -")
- with_paginated_request { github_client.issues_comments(github_repo) }.each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
- hash[id_from_url(c.html_url)] << c.body&.gsub(gh_link_pattern, dummy_url)
- end
- end
-
- let(:gh_pr_comments) do
- logger.info("- Fetching pr comments -")
- with_paginated_request { github_client.pull_requests_comments(github_repo) }.each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
- hash[id_from_url(c.html_url)] << c.body
- # some suggestions can contain extra whitespaces which gitlab will remove
- &.gsub(/suggestion\s+\r/, "suggestion\r")
- &.gsub(gh_link_pattern, dummy_url)
- end
- end
- # rubocop:enable Layout/LineLength
-
let(:imported_project) do
Resource::ProjectImportedFromGithub.fabricate_via_api! do |project|
project.add_name_uuid = false
@@ -282,7 +268,7 @@ module QA
issue_events: gl_issues.sum { |_k, v| v[:events].length }
}
},
- not_imported: {
+ diff: {
mrs: @mr_diff,
issues: @issue_diff
}
@@ -415,24 +401,35 @@ module QA
#
private
- # Fetch github events and add to issue object
+ # Fetch issuable object comments
#
- # @param [Hash] issuables
+ # @param [Integer] id
# @param [String] type
- # @return [Hash]
- def fetch_github_events(issuables, type)
- logger.info("- Fetching #{type} events -")
- issuables.to_h do |id, issuable|
- logger.debug("Fetching events for #{type} !#{id}")
- events = with_paginated_request { github_client.issue_events(github_repo, id) }
- .map { |event| event[:event] }
- .reject { |event| unsupported_events.include?(event) }
+ # @return [Array]
+ def fetch_issuable_comments(id, type)
+ pr = type == "pr"
+ comments = []
+ # every pr is also an issue, so when fetching pr comments, issue endpoint has to be used as well
+ comments.push(*with_paginated_request { github_client.issue_comments(github_repo, id) })
+ comments.push(*with_paginated_request { github_client.pull_request_comments(github_repo, id) }) if pr
+ comments.map! { |comment| comment.body&.gsub(gh_link_pattern, dummy_url) }
+ return comments unless pr
- [id, issuable.merge({ events: events })]
- end
+ # some suggestions can contain extra whitespaces which gitlab will remove
+ comments.map { |comment| comment.gsub(/suggestion\s+\r/, "suggestion\r") }
end
- # Verify imported mrs or issues and return missing items
+ # Fetch issuable object events
+ #
+ # @param [Integer] id
+ # @return [Array]
+ def fetch_issuable_events(id)
+ with_paginated_request { github_client.issue_events(github_repo, id) }
+ .map { |event| event[:event] }
+ .reject { |event| unsupported_events.include?(event) }
+ end
+
+ # Verify imported mrs or issues and return content diff
#
# @param [String] type verification object, 'mrs' or 'issues'
# @return [Hash]
@@ -443,18 +440,20 @@ module QA
actual = type == 'mr' ? mrs : gl_issues
missing_objects = (expected.keys - actual.keys).map { |it| expected[it].slice(:title, :url) }
+ extra_objects = (actual.keys - expected.keys).map { |it| actual[it].slice(:title, :url) }
count_msg = <<~MSG
Expected to contain all of GitHub's #{type}s. Gitlab: #{actual.length}, Github: #{expected.length}.
Missing: #{missing_objects.map { |it| it[:url] }}
MSG
expect(expected.length <= actual.length).to be_truthy, count_msg
- missing_content = verify_comments_and_events(type, actual, expected)
+ content_diff = verify_comments_and_events(type, actual, expected)
{
- "#{type}s": missing_objects.empty? ? nil : missing_objects,
- "#{type}_content": missing_content.empty? ? nil : missing_content
- }.compact
+ "extra_#{type}s": extra_objects,
+ "missing_#{type}s": missing_objects,
+ "#{type}_content_diff": content_diff
+ }.compact_blank
end
# Verify imported comments and events
@@ -464,7 +463,7 @@ module QA
# @param [Hash] expected
# @return [Hash]
def verify_comments_and_events(type, actual, expected)
- actual.each_with_object([]) do |(key, actual_item), missing_content|
+ actual.each_with_object([]) do |(key, actual_item), content_diff|
expected_item = expected[key]
title = actual_item[:title]
msg = "expected #{type} with iid '#{key}' to have"
@@ -498,19 +497,23 @@ module QA
MSG
expect(actual_events).to include(*expected_events), event_count_msg
- # Save missing comments and events
+ # Save comment and event diff
#
- comment_diff = expected_comments - actual_comments
- event_diff = expected_events - actual_events
- next if comment_diff.empty? && event_diff.empty?
+ missing_comments = expected_comments - actual_comments
+ extra_comments = actual_comments - expected_comments
+ missing_events = expected_events - actual_events
+ extra_events = actual_events - expected_events
+ next if [missing_comments, missing_events, extra_comments, extra_events].all?(&:empty?)
- missing_content << {
+ content_diff << {
title: title,
github_url: expected_item[:url],
gitlab_url: actual_item[:url],
- missing_comments: comment_diff.empty? ? nil : comment_diff,
- missing_events: event_diff.empty? ? nil : event_diff
- }.compact
+ missing_comments: missing_comments,
+ extra_comments: extra_comments,
+ missing_events: missing_events,
+ extra_events: extra_events
+ }.compact_blank
end
end
@@ -671,16 +674,6 @@ module QA
File.open("tmp/github-import-data.json", "w") { |file| file.write(JSON.pretty_generate(json)) }
end
- # Extract id number from web url of issue or pull request
- #
- # Some endpoints don't return object id as separate parameter so web url can be used as a workaround
- #
- # @param [String] url
- # @return [Integer]
- def id_from_url(url)
- url.match(%r{(?issues|pull)/(?\d+)})&.named_captures&.fetch("id", nil).to_i
- end
-
# Custom pagination for github requests
#
# Default autopagination doesn't work correctly with rate limit
diff --git a/qa/qa/specs/features/browser_ui/10_govern/login/login_via_oauth_and_oidc_with_gitlab_as_idp_spec.rb b/qa/qa/specs/features/browser_ui/10_govern/login/login_via_oauth_and_oidc_with_gitlab_as_idp_spec.rb
index 5907f7654a0..a7781d265c7 100644
--- a/qa/qa/specs/features/browser_ui/10_govern/login/login_via_oauth_and_oidc_with_gitlab_as_idp_spec.rb
+++ b/qa/qa/specs/features/browser_ui/10_govern/login/login_via_oauth_and_oidc_with_gitlab_as_idp_spec.rb
@@ -71,7 +71,10 @@ module QA
end
end
- describe 'OIDC' do
+ describe 'OIDC', quarantine: {
+ issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/429723',
+ type: :flaky
+ } do
let(:consumer_name) { 'gitlab-oidc-consumer' }
let(:redirect_uri) { "#{consumer_host}/users/auth/openid_connect/callback" }
let(:scopes) { %w[openid profile email] }
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
index bdd390126dd..77b1ac5a9cc 100644
--- a/spec/factories/ci/pipeline_artifacts.rb
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -22,14 +22,6 @@ FactoryBot.define do
locked { :unlocked }
end
- trait :checksummed do
- verification_checksum { 'abc' }
- end
-
- trait :checksum_failure do
- verification_failure { 'Could not calculate the checksum' }
- end
-
trait :expired do
expire_at { Date.yesterday }
end
diff --git a/spec/factories/snippet_repositories.rb b/spec/factories/snippet_repositories.rb
index c3a6bc3ae31..1f9e68514bb 100644
--- a/spec/factories/snippet_repositories.rb
+++ b/spec/factories/snippet_repositories.rb
@@ -8,13 +8,5 @@ FactoryBot.define do
snippet_repository.shard_name = snippet_repository.snippet.repository_storage
snippet_repository.disk_path = snippet_repository.snippet.disk_path
end
-
- trait(:checksummed) do
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- verification_failure { 'Could not calculate the checksum' }
- end
end
end
diff --git a/spec/factories/terraform/state_version.rb b/spec/factories/terraform/state_version.rb
index c6bd08815cf..5386dfa98f2 100644
--- a/spec/factories/terraform/state_version.rb
+++ b/spec/factories/terraform/state_version.rb
@@ -8,13 +8,5 @@ FactoryBot.define do
sequence(:version)
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
-
- trait(:checksummed) do
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- verification_failure { 'Could not calculate the checksum' }
- end
end
end
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
index 207ea7aa060..802433c7036 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -67,9 +67,9 @@ describe('CI Variable Drawer', () => {
});
};
- const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-btn');
+ const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-button');
const findConfirmDeleteModal = () => wrapper.findComponent(GlModal);
- const findDeleteBtn = () => wrapper.findByTestId('ci-variable-delete-btn');
+ const findDeleteBtn = () => wrapper.findByTestId('ci-variable-delete-button');
const findDisabledEnvironmentScopeDropdown = () => wrapper.findComponent(GlFormInput);
const findDrawer = () => wrapper.findComponent(GlDrawer);
const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
index 0132102b117..217e6c11630 100644
--- a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::BulkImports::EntityFailure do
+RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :importers do
let_it_be(:failure) { create(:bulk_import_failure) }
subject { described_class.new(failure).as_json }
@@ -10,11 +10,11 @@ RSpec.describe API::Entities::BulkImports::EntityFailure do
it 'has the correct attributes' do
expect(subject).to include(
:relation,
- :step,
- :exception_class,
:exception_message,
+ :exception_class,
:correlation_id_value,
- :created_at
+ :source_url,
+ :source_title
)
end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index aa31f38cd94..01adde79740 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -43,7 +43,9 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
stub_const('BulkImports::MyPipeline', pipeline)
end
- let_it_be_with_reload(:entity) { create(:bulk_import_entity) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let_it_be_with_reload(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
@@ -119,6 +121,56 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
expect(entity.failed?).to eq(false)
end
end
+
+ context 'when failure happens during loader' do
+ before do
+ allow(tracker).to receive(:pipeline_class).and_return(BulkImports::MyPipeline)
+ allow(BulkImports::MyPipeline).to receive(:relation).and_return(relation)
+
+ allow_next_instance_of(BulkImports::Extractor) do |extractor|
+ allow(extractor).to receive(:extract).with(context).and_return(extracted_data)
+ end
+
+ allow_next_instance_of(BulkImports::Transformer) do |transformer|
+ allow(transformer).to receive(:transform).with(context, extracted_data.data.first).and_return(entry)
+ end
+
+ allow_next_instance_of(BulkImports::Loader) do |loader|
+ allow(loader).to receive(:load).with(context, entry).and_raise(StandardError, 'Error!')
+ end
+ end
+
+ context 'when entry has title' do
+ let(:relation) { 'issues' }
+ let(:entry) { Issue.new(iid: 1, title: 'hello world') }
+
+ it 'creates failure record with source url and title' do
+ subject.run
+
+ failure = entity.failures.first
+ expected_source_url = File.join(configuration.url, 'groups', entity.source_full_path, '-', 'issues', '1')
+
+ expect(failure).to be_present
+ expect(failure.source_url).to eq(expected_source_url)
+ expect(failure.source_title).to eq('hello world')
+ end
+ end
+
+ context 'when entry has name' do
+ let(:relation) { 'boards' }
+ let(:entry) { Board.new(name: 'hello world') }
+
+ it 'creates failure record with name' do
+ subject.run
+
+ failure = entity.failures.first
+
+ expect(failure).to be_present
+ expect(failure.source_url).to be_nil
+ expect(failure.source_title).to eq('hello world')
+ end
+ end
+ end
end
describe 'pipeline runner' do
@@ -363,7 +415,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
def extracted_data(has_next_page: false)
BulkImports::Pipeline::ExtractedData.new(
- data: { foo: :bar },
+ data: {
+ 'foo' => 'bar',
+ 'title' => 'hello world',
+ 'iid' => 1
+ },
page_info: {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
diff --git a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
index 9dac8e45ef9..334c2004b59 100644
--- a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
+RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity, pipeline_name: described_class) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let_it_be(:policy) do
diff --git a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
index b7197814f9c..f00da47d9f5 100644
--- a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline do
+RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity, pipeline_name: described_class) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:attributes) { {} }
diff --git a/spec/lib/bulk_imports/source_url_builder_spec.rb b/spec/lib/bulk_imports/source_url_builder_spec.rb
new file mode 100644
index 00000000000..2c0e042314b
--- /dev/null
+++ b/spec/lib/bulk_imports/source_url_builder_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::SourceUrlBuilder, feature_category: :importers do
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+
+ let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:entry) { Issue.new(iid: 1, title: 'hello world') }
+
+ describe '#url' do
+ subject { described_class.new(context, entry) }
+
+ before do
+ allow(subject).to receive(:relation).and_return('issues')
+ end
+
+ context 'when relation is allowed' do
+ context 'when entity is a group' do
+ it 'returns the url specific to groups' do
+ expected_url = File.join(
+ configuration.url,
+ 'groups',
+ entity.source_full_path,
+ '-',
+ 'issues',
+ '1'
+ )
+
+ expect(subject.url).to eq(expected_url)
+ end
+ end
+
+ context 'when entity is a project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity, bulk_import: bulk_import) }
+
+ it 'returns the url' do
+ expected_url = File.join(
+ configuration.url,
+ entity.source_full_path,
+ '-',
+ 'issues',
+ '1'
+ )
+
+ expect(subject.url).to eq(expected_url)
+ end
+ end
+ end
+
+ context 'when entry is not an ApplicationRecord' do
+ let(:entry) { 'not an ApplicationRecord' }
+
+ it 'returns nil' do
+ expect(subject.url).to be_nil
+ end
+ end
+
+ context 'when relation is not allowed' do
+ it 'returns nil' do
+ allow(subject).to receive(:relation).and_return('not_allowed')
+
+ expect(subject.url).to be_nil
+ end
+ end
+
+ context 'when entry has no iid' do
+ let(:entry) { Issue.new }
+
+ it 'returns nil' do
+ expect(subject.url).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index d3ae92ea52a..af021c51035 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -603,6 +603,17 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
jira_integration.client.get('/foo')
end
+ context 'when a custom read_timeout option is passed as an argument' do
+ it 'uses the default GitLab::HTTP timeouts plus a custom read_timeout' do
+ expected_timeouts = Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS.merge(read_timeout: 2.minutes, timeout: 2.minutes)
+
+ expect(Gitlab::HTTP_V2::Client).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, '/foo', hash_including(expected_timeouts)).and_call_original
+
+ jira_integration.client(read_timeout: 2.minutes).get('/foo')
+ end
+ end
+
context 'with basic auth' do
before do
jira_integration.jira_auth_type = 0
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index d3d4a723616..bbc01b30361 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -394,7 +394,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
- expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
+ expect(json_response.first['failures'].first['exception_message']).to eq(failure_3.exception_message)
end
it_behaves_like 'disabled feature'
@@ -420,4 +420,17 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
+
+ describe 'GET /bulk_imports/:id/entities/:entity_id/failures' do
+ let(:request) { get api("/bulk_imports/#{import_2.id}/entities/#{entity_3.id}/failures", user) }
+
+ it 'returns specified entity failures' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.first['exception_message']).to eq(failure_3.exception_message)
+ end
+
+ it_behaves_like 'disabled feature'
+ end
end
diff --git a/spec/services/issuable/discussions_list_service_spec.rb b/spec/services/issuable/discussions_list_service_spec.rb
index 446cc286e28..9c791ce9cd3 100644
--- a/spec/services/issuable/discussions_list_service_spec.rb
+++ b/spec/services/issuable/discussions_list_service_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe Issuable::DiscussionsListService, feature_category: :team_plannin
expect(discussions_service.execute).to be_empty
end
end
+
+ context 'when issue exists at the group level' do
+ let_it_be(:issuable) { create(:issue, :group_level, namespace: group) }
+
+ it_behaves_like 'listing issuable discussions', :guest, 1, 7
+ end
end
describe 'fetching notes for merge requests' do
diff --git a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
index 594f9618770..f9b03fc1b44 100644
--- a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
@@ -25,7 +25,11 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker, feature_category:
end
context 'when import started', :clean_gitlab_redis_cache do
- let_it_be(:jira_integration) { create(:jira_integration, project: project) }
+ let(:job_waiter) { Gitlab::JobWaiter.new(2, 'some-job-key') }
+
+ before_all do
+ create(:jira_integration, project: project)
+ end
before do
jira_import.start!
@@ -34,6 +38,40 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker, feature_category:
end
end
+ it 'uses a custom http client for the issues importer' do
+ jira_integration = project.jira_integration
+ client = instance_double(JIRA::Client)
+ issue_importer = instance_double(Gitlab::JiraImport::IssuesImporter)
+
+ allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
+ allow(issue_importer).to receive(:execute).and_return(job_waiter)
+
+ expect(jira_integration).to receive(:client).with(read_timeout: 2.minutes).and_return(client)
+ expect(Gitlab::JiraImport::IssuesImporter).to receive(:new).with(
+ project,
+ client
+ ).and_return(issue_importer)
+
+ described_class.new.perform(project.id)
+ end
+
+ context 'when increase_jira_import_issues_timeout feature flag is disabled' do
+ before do
+ stub_feature_flags(increase_jira_import_issues_timeout: false)
+ end
+
+ it 'does not provide a custom client to IssuesImporter' do
+ issue_importer = instance_double(Gitlab::JiraImport::IssuesImporter)
+ expect(Gitlab::JiraImport::IssuesImporter).to receive(:new).with(
+ instance_of(Project),
+ nil
+ ).and_return(issue_importer)
+ allow(issue_importer).to receive(:execute).and_return(job_waiter)
+
+ described_class.new.perform(project.id)
+ end
+ end
+
context 'when start_at is nil' do
it_behaves_like 'advance to next stage', :attachments
end