Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
24fb09b2eb
commit
533fed8bd8
|
|
@ -485,10 +485,19 @@ That's all of the required database changes.
|
||||||
end
|
end
|
||||||
|
|
||||||
trait :verification_succeeded do
|
trait :verification_succeeded do
|
||||||
|
synced
|
||||||
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
|
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
|
||||||
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
|
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
|
||||||
verified_at { 5.days.ago }
|
verified_at { 5.days.ago }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
trait :verification_failed do
|
||||||
|
synced
|
||||||
|
verification_failure { 'Could not calculate the checksum' }
|
||||||
|
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_failed) }
|
||||||
|
verification_retry_count { 1 }
|
||||||
|
verification_retry_at { 2.hours.from_now }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
```
|
```
|
||||||
|
|
@ -519,15 +528,15 @@ That's all of the required database changes.
|
||||||
FactoryBot.modify do
|
FactoryBot.modify do
|
||||||
factory :cool_widget do
|
factory :cool_widget do
|
||||||
trait :verification_succeeded do
|
trait :verification_succeeded do
|
||||||
repository
|
repository
|
||||||
verification_checksum { 'abc' }
|
verification_checksum { 'abc' }
|
||||||
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
|
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
|
||||||
end
|
end
|
||||||
|
|
||||||
trait :verification_failed do
|
trait :verification_failed do
|
||||||
repository
|
repository
|
||||||
verification_failure { 'Could not calculate the checksum' }
|
verification_failure { 'Could not calculate the checksum' }
|
||||||
verification_state { CoolWidget.verification_state_value(:verification_failed) }
|
verification_state { CoolWidget.verification_state_value(:verification_failed) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -442,10 +442,19 @@ That's all of the required database changes.
|
||||||
end
|
end
|
||||||
|
|
||||||
trait :verification_succeeded do
|
trait :verification_succeeded do
|
||||||
|
synced
|
||||||
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
|
verification_checksum { 'e079a831cab27bcda7d81cd9b48296d0c3dd92ef' }
|
||||||
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
|
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_succeeded) }
|
||||||
verified_at { 5.days.ago }
|
verified_at { 5.days.ago }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
trait :verification_failed do
|
||||||
|
synced
|
||||||
|
verification_failure { 'Could not calculate the checksum' }
|
||||||
|
verification_state { Geo::CoolWidgetRegistry.verification_state_value(:verification_failed) }
|
||||||
|
verification_retry_count { 1 }
|
||||||
|
verification_retry_at { 2.hours.from_now }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
```
|
```
|
||||||
|
|
@ -468,7 +477,7 @@ That's all of the required database changes.
|
||||||
end
|
end
|
||||||
```
|
```
|
||||||
|
|
||||||
- [ ] Add the following to `spec/factories/cool_widgets.rb`:
|
- [ ] Add the following to `ee/spec/factories/cool_widgets.rb`:
|
||||||
|
|
||||||
```ruby
|
```ruby
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
@ -476,15 +485,24 @@ That's all of the required database changes.
|
||||||
FactoryBot.modify do
|
FactoryBot.modify do
|
||||||
factory :cool_widget do
|
factory :cool_widget do
|
||||||
trait :verification_succeeded do
|
trait :verification_succeeded do
|
||||||
with_file
|
with_file
|
||||||
verification_checksum { 'abc' }
|
verification_checksum { 'abc' }
|
||||||
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
|
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
|
||||||
end
|
end
|
||||||
|
|
||||||
trait :verification_failed do
|
trait :verification_failed do
|
||||||
with_file
|
with_file
|
||||||
verification_failure { 'Could not calculate the checksum' }
|
verification_failure { 'Could not calculate the checksum' }
|
||||||
verification_state { CoolWidget.verification_state_value(:verification_failed) }
|
verification_state { CoolWidget.verification_state_value(:verification_failed) }
|
||||||
|
|
||||||
|
#
|
||||||
|
# Geo::VerifiableReplicator#after_verifiable_update tries to verify
|
||||||
|
# the replicable async and marks it as verification started when the
|
||||||
|
# model record is created/updated.
|
||||||
|
#
|
||||||
|
after(:create) do |instance, _|
|
||||||
|
instance.verification_failed!
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -217,7 +217,6 @@ Gitlab/StrongMemoizeAttr:
|
||||||
- 'app/services/quick_actions/interpret_service.rb'
|
- 'app/services/quick_actions/interpret_service.rb'
|
||||||
- 'app/services/releases/base_service.rb'
|
- 'app/services/releases/base_service.rb'
|
||||||
- 'app/services/resource_access_tokens/revoke_service.rb'
|
- 'app/services/resource_access_tokens/revoke_service.rb'
|
||||||
- 'app/services/resource_events/base_synthetic_notes_builder_service.rb'
|
|
||||||
- 'app/services/search/global_service.rb'
|
- 'app/services/search/global_service.rb'
|
||||||
- 'app/services/search/project_service.rb'
|
- 'app/services/search/project_service.rb'
|
||||||
- 'app/services/search_service.rb'
|
- 'app/services/search_service.rb'
|
||||||
|
|
|
||||||
|
|
@ -2573,7 +2573,6 @@ RSpec/FeatureCategory:
|
||||||
- 'spec/lib/api/entities/application_setting_spec.rb'
|
- 'spec/lib/api/entities/application_setting_spec.rb'
|
||||||
- 'spec/lib/api/entities/branch_spec.rb'
|
- 'spec/lib/api/entities/branch_spec.rb'
|
||||||
- 'spec/lib/api/entities/bulk_import_spec.rb'
|
- 'spec/lib/api/entities/bulk_import_spec.rb'
|
||||||
- 'spec/lib/api/entities/bulk_imports/entity_failure_spec.rb'
|
|
||||||
- 'spec/lib/api/entities/bulk_imports/entity_spec.rb'
|
- 'spec/lib/api/entities/bulk_imports/entity_spec.rb'
|
||||||
- 'spec/lib/api/entities/bulk_imports/export_status_spec.rb'
|
- 'spec/lib/api/entities/bulk_imports/export_status_spec.rb'
|
||||||
- 'spec/lib/api/entities/changelog_spec.rb'
|
- 'spec/lib/api/entities/changelog_spec.rb'
|
||||||
|
|
@ -2695,9 +2694,7 @@ RSpec/FeatureCategory:
|
||||||
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
|
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb'
|
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb'
|
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
|
||||||
- 'spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb'
|
- 'spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb'
|
||||||
|
|
|
||||||
2
Gemfile
2
Gemfile
|
|
@ -196,7 +196,7 @@ gem 'seed-fu', '~> 2.3.7' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'elasticsearch-model', '~> 7.2' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'elasticsearch-model', '~> 7.2' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'elasticsearch-api', '7.13.3' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'elasticsearch-api', '7.13.3' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'aws-sdk-core', '~> 3.185.1' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'aws-sdk-core', '~> 3.185.2' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'aws-sdk-cloudformation', '~> 1' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'aws-sdk-cloudformation', '~> 1' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'aws-sdk-s3', '~> 1.136.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'aws-sdk-s3', '~> 1.136.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
gem 'faraday_middleware-aws-sigv4', '~>0.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
gem 'faraday_middleware-aws-sigv4', '~>0.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,7 @@
|
||||||
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
|
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
|
||||||
{"name":"aws-partitions","version":"1.761.0","platform":"ruby","checksum":"291e444e1edfc92c5521a6dbdd1236ccc3f122b3520163b2be6ec5b6ef350ef2"},
|
{"name":"aws-partitions","version":"1.761.0","platform":"ruby","checksum":"291e444e1edfc92c5521a6dbdd1236ccc3f122b3520163b2be6ec5b6ef350ef2"},
|
||||||
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
|
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
|
||||||
{"name":"aws-sdk-core","version":"3.185.1","platform":"ruby","checksum":"572ada4eaf8393a9999d9a50adc2dcb78cc742c26a5727248c27f02cdaf97973"},
|
{"name":"aws-sdk-core","version":"3.185.2","platform":"ruby","checksum":"75878c00df67750de85537cc851b1281770f2270392de73b9dedcecba314b0ce"},
|
||||||
{"name":"aws-sdk-kms","version":"1.64.0","platform":"ruby","checksum":"40de596c95047bfc6e1aacea24f3df6241aa716b6f7ce08ac4c5f7e3120395ad"},
|
{"name":"aws-sdk-kms","version":"1.64.0","platform":"ruby","checksum":"40de596c95047bfc6e1aacea24f3df6241aa716b6f7ce08ac4c5f7e3120395ad"},
|
||||||
{"name":"aws-sdk-s3","version":"1.136.0","platform":"ruby","checksum":"3547302a85d51de6cc75b48fb37d328f65f6526e7fc73a27a5b1b871f99a8d63"},
|
{"name":"aws-sdk-s3","version":"1.136.0","platform":"ruby","checksum":"3547302a85d51de6cc75b48fb37d328f65f6526e7fc73a27a5b1b871f99a8d63"},
|
||||||
{"name":"aws-sigv4","version":"1.6.0","platform":"ruby","checksum":"ca9e6a15cd424f1f32b524b9760995331459bc22e67d3daad4fcf0c0084b087d"},
|
{"name":"aws-sigv4","version":"1.6.0","platform":"ruby","checksum":"ca9e6a15cd424f1f32b524b9760995331459bc22e67d3daad4fcf0c0084b087d"},
|
||||||
|
|
|
||||||
|
|
@ -270,7 +270,7 @@ GEM
|
||||||
aws-sdk-cloudformation (1.41.0)
|
aws-sdk-cloudformation (1.41.0)
|
||||||
aws-sdk-core (~> 3, >= 3.99.0)
|
aws-sdk-core (~> 3, >= 3.99.0)
|
||||||
aws-sigv4 (~> 1.1)
|
aws-sigv4 (~> 1.1)
|
||||||
aws-sdk-core (3.185.1)
|
aws-sdk-core (3.185.2)
|
||||||
aws-eventstream (~> 1, >= 1.0.2)
|
aws-eventstream (~> 1, >= 1.0.2)
|
||||||
aws-partitions (~> 1, >= 1.651.0)
|
aws-partitions (~> 1, >= 1.651.0)
|
||||||
aws-sigv4 (~> 1.5)
|
aws-sigv4 (~> 1.5)
|
||||||
|
|
@ -1749,7 +1749,7 @@ DEPENDENCIES
|
||||||
autoprefixer-rails (= 10.2.5.1)
|
autoprefixer-rails (= 10.2.5.1)
|
||||||
awesome_print
|
awesome_print
|
||||||
aws-sdk-cloudformation (~> 1)
|
aws-sdk-cloudformation (~> 1)
|
||||||
aws-sdk-core (~> 3.185.1)
|
aws-sdk-core (~> 3.185.2)
|
||||||
aws-sdk-s3 (~> 1.136.0)
|
aws-sdk-s3 (~> 1.136.0)
|
||||||
axe-core-rspec
|
axe-core-rspec
|
||||||
babosa (~> 2.0)
|
babosa (~> 2.0)
|
||||||
|
|
|
||||||
|
|
@ -371,7 +371,6 @@ export default {
|
||||||
:label-text="$options.i18n.key"
|
:label-text="$options.i18n.key"
|
||||||
class="gl-border-none gl-pb-0! gl-mb-n5"
|
class="gl-border-none gl-pb-0! gl-mb-n5"
|
||||||
data-testid="ci-variable-key"
|
data-testid="ci-variable-key"
|
||||||
data-qa-selector="ci_variable_key_field"
|
|
||||||
/>
|
/>
|
||||||
<gl-form-group
|
<gl-form-group
|
||||||
:label="$options.i18n.value"
|
:label="$options.i18n.value"
|
||||||
|
|
@ -388,7 +387,6 @@ export default {
|
||||||
rows="3"
|
rows="3"
|
||||||
max-rows="10"
|
max-rows="10"
|
||||||
data-testid="ci-variable-value"
|
data-testid="ci-variable-value"
|
||||||
data-qa-selector="ci_variable_value_field"
|
|
||||||
spellcheck="false"
|
spellcheck="false"
|
||||||
/>
|
/>
|
||||||
<p
|
<p
|
||||||
|
|
@ -419,15 +417,14 @@ export default {
|
||||||
variant="danger"
|
variant="danger"
|
||||||
category="secondary"
|
category="secondary"
|
||||||
class="gl-mr-3"
|
class="gl-mr-3"
|
||||||
data-testid="ci-variable-delete-btn"
|
data-testid="ci-variable-delete-button"
|
||||||
>{{ $options.i18n.deleteVariable }}</gl-button
|
>{{ $options.i18n.deleteVariable }}</gl-button
|
||||||
>
|
>
|
||||||
<gl-button
|
<gl-button
|
||||||
category="primary"
|
category="primary"
|
||||||
variant="confirm"
|
variant="confirm"
|
||||||
:disabled="!canSubmit"
|
:disabled="!canSubmit"
|
||||||
data-testid="ci-variable-confirm-btn"
|
data-testid="ci-variable-confirm-button"
|
||||||
data-qa-selector="ci_variable_save_button"
|
|
||||||
@click="submit"
|
@click="submit"
|
||||||
>{{ modalActionText }}
|
>{{ modalActionText }}
|
||||||
</gl-button>
|
</gl-button>
|
||||||
|
|
|
||||||
|
|
@ -243,7 +243,6 @@ export default {
|
||||||
<gl-button
|
<gl-button
|
||||||
size="small"
|
size="small"
|
||||||
:disabled="exceedsVariableLimit"
|
:disabled="exceedsVariableLimit"
|
||||||
data-qa-selector="add_ci_variable_button"
|
|
||||||
data-testid="add-ci-variable-button"
|
data-testid="add-ci-variable-button"
|
||||||
@click="setSelectedVariable()"
|
@click="setSelectedVariable()"
|
||||||
>{{ $options.i18n.addButton }}</gl-button
|
>{{ $options.i18n.addButton }}</gl-button
|
||||||
|
|
@ -376,7 +375,7 @@ export default {
|
||||||
size="small"
|
size="small"
|
||||||
class="gl-mr-3"
|
class="gl-mr-3"
|
||||||
:aria-label="$options.i18n.editButton"
|
:aria-label="$options.i18n.editButton"
|
||||||
data-qa-selector="edit_ci_variable_button"
|
data-testid="edit-ci-variable-button"
|
||||||
@click="setSelectedVariable(item.index)"
|
@click="setSelectedVariable(item.index)"
|
||||||
/>
|
/>
|
||||||
<gl-button
|
<gl-button
|
||||||
|
|
|
||||||
|
|
@ -184,8 +184,8 @@ module Integrations
|
||||||
options
|
options
|
||||||
end
|
end
|
||||||
|
|
||||||
def client
|
def client(additional_options = {})
|
||||||
@client ||= JIRA::Client.new(options).tap do |client|
|
JIRA::Client.new(options.merge(additional_options)).tap do |client|
|
||||||
# Replaces JIRA default http client with our implementation
|
# Replaces JIRA default http client with our implementation
|
||||||
client.request_client = Gitlab::Jira::HttpClient.new(client.options)
|
client.request_client = Gitlab::Jira::HttpClient.new(client.options)
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ class ResourceLabelEvent < ResourceEvent
|
||||||
end
|
end
|
||||||
|
|
||||||
def resource_parent
|
def resource_parent
|
||||||
issuable.project || issuable.group
|
issuable.try(:resource_parent) || issuable.project || issuable.group
|
||||||
end
|
end
|
||||||
|
|
||||||
def discussion_id_key
|
def discussion_id_key
|
||||||
|
|
|
||||||
|
|
@ -44,10 +44,9 @@ module ResourceEvents
|
||||||
end
|
end
|
||||||
|
|
||||||
def resource_parent
|
def resource_parent
|
||||||
strong_memoize(:resource_parent) do
|
resource.try(:resource_parent) || resource.project || resource.group
|
||||||
resource.project || resource.group
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
strong_memoize_attr :resource_parent
|
||||||
|
|
||||||
def table_name
|
def table_name
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
|
||||||
|
|
@ -37,4 +37,4 @@ module ResourceEvents
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
ResourceEvents::MergeIntoNotesService.prepend_mod_with('ResourceEvents::MergeIntoNotesService')
|
ResourceEvents::MergeIntoNotesService.prepend_mod
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,14 @@ module Gitlab
|
||||||
private
|
private
|
||||||
|
|
||||||
def import(project)
|
def import(project)
|
||||||
jobs_waiter = Gitlab::JiraImport::IssuesImporter.new(project).execute
|
jira_client = if Feature.enabled?(:increase_jira_import_issues_timeout)
|
||||||
|
project.jira_integration.client(read_timeout: 2.minutes)
|
||||||
|
end
|
||||||
|
|
||||||
|
jobs_waiter = Gitlab::JiraImport::IssuesImporter.new(
|
||||||
|
project,
|
||||||
|
jira_client
|
||||||
|
).execute
|
||||||
|
|
||||||
project.latest_jira_import.refresh_jid_expiration
|
project.latest_jira_import.refresh_jid_expiration
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
name: increase_jira_import_issues_timeout
|
||||||
|
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135050
|
||||||
|
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/429293
|
||||||
|
milestone: '16.6'
|
||||||
|
type: development
|
||||||
|
group: group::project management
|
||||||
|
default_enabled: false
|
||||||
|
|
@ -0,0 +1,12 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class AddFieldsToBulkImportFailures < Gitlab::Database::Migration[2.2]
|
||||||
|
milestone '16.6'
|
||||||
|
|
||||||
|
# rubocop:disable Migration/AddLimitToTextColumns
|
||||||
|
def change
|
||||||
|
add_column :bulk_import_failures, :source_url, :text
|
||||||
|
add_column :bulk_import_failures, :source_title, :text
|
||||||
|
end
|
||||||
|
# rubocop:enable Migration/AddLimitToTextColumns
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class AddTextLimitToBulkImportFailures < Gitlab::Database::Migration[2.2]
|
||||||
|
milestone '16.6'
|
||||||
|
disable_ddl_transaction!
|
||||||
|
|
||||||
|
def up
|
||||||
|
add_text_limit :bulk_import_failures, :source_url, 255
|
||||||
|
add_text_limit :bulk_import_failures, :source_title, 255
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
remove_text_limit :bulk_import_failures, :source_url
|
||||||
|
remove_text_limit :bulk_import_failures, :source_title
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
6103bd075183ce4196dee2b140cb960f075cc7d3f4fc4f370bb6217c3ff1e758
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
9627d5af229e51bee8a5a8c47beedf5bd0b3b2ce89f4cc209fe96089e662c749
|
||||||
|
|
@ -13118,10 +13118,14 @@ CREATE TABLE bulk_import_failures (
|
||||||
exception_message text NOT NULL,
|
exception_message text NOT NULL,
|
||||||
correlation_id_value text,
|
correlation_id_value text,
|
||||||
pipeline_step text,
|
pipeline_step text,
|
||||||
|
source_url text,
|
||||||
|
source_title text,
|
||||||
CONSTRAINT check_053d65c7a4 CHECK ((char_length(pipeline_class) <= 255)),
|
CONSTRAINT check_053d65c7a4 CHECK ((char_length(pipeline_class) <= 255)),
|
||||||
CONSTRAINT check_6eca8f972e CHECK ((char_length(exception_message) <= 255)),
|
CONSTRAINT check_6eca8f972e CHECK ((char_length(exception_message) <= 255)),
|
||||||
CONSTRAINT check_721a422375 CHECK ((char_length(pipeline_step) <= 255)),
|
CONSTRAINT check_721a422375 CHECK ((char_length(pipeline_step) <= 255)),
|
||||||
|
CONSTRAINT check_74414228d4 CHECK ((char_length(source_title) <= 255)),
|
||||||
CONSTRAINT check_c7dba8398e CHECK ((char_length(exception_class) <= 255)),
|
CONSTRAINT check_c7dba8398e CHECK ((char_length(exception_class) <= 255)),
|
||||||
|
CONSTRAINT check_e035a720ad CHECK ((char_length(source_url) <= 255)),
|
||||||
CONSTRAINT check_e787285882 CHECK ((char_length(correlation_id_value) <= 255))
|
CONSTRAINT check_e787285882 CHECK ((char_length(correlation_id_value) <= 255))
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -257,3 +257,24 @@ curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
|
||||||
"updated_at": "2021-06-18T09:46:27.003Z"
|
"updated_at": "2021-06-18T09:46:27.003Z"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Get list of failed import records for group or project migration entity
|
||||||
|
|
||||||
|
```plaintext
|
||||||
|
GET /bulk_imports/:id/entities/:entity_id/failures
|
||||||
|
```
|
||||||
|
|
||||||
|
```shell
|
||||||
|
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports/1/entities/2/failures"
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"relation": "issues",
|
||||||
|
"exception_message": "Error!",
|
||||||
|
"exception_class": "StandardError",
|
||||||
|
"correlation_id_value": "06289e4b064329a69de7bb2d7a1b5a97",
|
||||||
|
"source_url": "https://gitlab.example/project/full/path/-/issues/1",
|
||||||
|
"source_title": "Issue title"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -29,10 +29,9 @@ To enable Continuous Vulnerability Scanning:
|
||||||
|
|
||||||
- Enable the Continuous Vulnerability Scanning setting in the project's [security configuration](../configuration/index.md).
|
- Enable the Continuous Vulnerability Scanning setting in the project's [security configuration](../configuration/index.md).
|
||||||
- Enable [Dependency Scanning](../dependency_scanning/index.md#configuration) and ensure that its prerequisites are met.
|
- Enable [Dependency Scanning](../dependency_scanning/index.md#configuration) and ensure that its prerequisites are met.
|
||||||
|
- On GitLab self-managed only, you can [choose package registry metadata to synchronize](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. For this data synchronization to work, you must allow outbound network traffic from your GitLab instance to the domain `storage.googleapis.com`. If you have limited or no network connectivity then please refer to the documentation section [running in an offline environment](#running-in-an-offline-environment) for further guidance.
|
||||||
|
|
||||||
On GitLab self-managed only, you can [choose package registry metadata to sync](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance.
|
### Running in an offline environment
|
||||||
|
|
||||||
### Requirements for offline environments
|
|
||||||
|
|
||||||
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access to external resources through the internet,
|
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access to external resources through the internet,
|
||||||
some adjustments are required to successfully scan CycloneDX reports for vulnerabilities.
|
some adjustments are required to successfully scan CycloneDX reports for vulnerabilities.
|
||||||
|
|
|
||||||
|
|
@ -259,7 +259,7 @@ A finding's primary identifier is a value that is unique to each finding. The ex
|
||||||
of the finding's [first identifier](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/v2.4.0-rc1/dist/sast-report-format.json#L228)
|
of the finding's [first identifier](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/v2.4.0-rc1/dist/sast-report-format.json#L228)
|
||||||
combine to create the value.
|
combine to create the value.
|
||||||
|
|
||||||
Examples of primary identifiers include `PluginID` for OWASP Zed Attack Proxy (ZAP), or `CVE` for
|
Examples of primary identifiers include `PluginID` for Zed Attack Proxy (ZAP), or `CVE` for
|
||||||
Trivy. The identifier must be stable. Subsequent scans must return the same value for the
|
Trivy. The identifier must be stable. Subsequent scans must return the same value for the
|
||||||
same finding, even if the location has slightly changed.
|
same finding, even if the location has slightly changed.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,16 +22,11 @@ Licenses not in the SPDX list are reported as "Unknown". License information can
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
Prerequisites:
|
To enable License scanning of CycloneDX files:
|
||||||
|
|
||||||
- On GitLab self-managed only, enable [Synchronization with the GitLab License Database](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. On GitLab SaaS this step has already been completed.
|
|
||||||
- Enable [Dependency Scanning](../../application_security/dependency_scanning/index.md#enabling-the-analyzer)
|
- Enable [Dependency Scanning](../../application_security/dependency_scanning/index.md#enabling-the-analyzer)
|
||||||
and ensure that its prerequisites are met.
|
and ensure that its prerequisites are met.
|
||||||
|
- On GitLab self-managed only, you can [choose package registry metadata to synchronize](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance. For this data synchronization to work, you must allow outbound network traffic from your GitLab instance to the domain `storage.googleapis.com`. If you have limited or no network connectivity then please refer to the documentation section [running in an offline environment](#running-in-an-offline-environment) for further guidance.
|
||||||
From the `.gitlab-ci.yml` file, remove the deprecated line `Jobs/License-Scanning.gitlab-ci.yml`, if
|
|
||||||
it's present.
|
|
||||||
|
|
||||||
On GitLab self-managed only, you can [choose package registry metadata to sync](../../../administration/settings/security_and_compliance.md#choose-package-registry-metadata-to-sync) in the Admin Area for the GitLab instance.
|
|
||||||
|
|
||||||
## Supported languages and package managers
|
## Supported languages and package managers
|
||||||
|
|
||||||
|
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 8.3 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 14 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 43 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 11 KiB |
|
|
@ -192,7 +192,7 @@ To add an internal note:
|
||||||
1. Below the comment, select the **Make this an internal note** checkbox.
|
1. Below the comment, select the **Make this an internal note** checkbox.
|
||||||
1. Select **Add internal note**.
|
1. Select **Add internal note**.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
You can also mark an [issue as confidential](../project/issues/confidential_issues.md).
|
You can also mark an [issue as confidential](../project/issues/confidential_issues.md).
|
||||||
|
|
||||||
|
|
@ -233,7 +233,7 @@ You can assign an issue to a user who made a comment.
|
||||||
|
|
||||||
1. In the comment, select the **More Actions** (**{ellipsis_v}**) menu.
|
1. In the comment, select the **More Actions** (**{ellipsis_v}**) menu.
|
||||||
1. Select **Assign to commenting user**:
|
1. Select **Assign to commenting user**:
|
||||||

|

|
||||||
1. To unassign the commenter, select the button again.
|
1. To unassign the commenter, select the button again.
|
||||||
|
|
||||||
## Create a thread by replying to a standard comment
|
## Create a thread by replying to a standard comment
|
||||||
|
|
@ -272,9 +272,9 @@ To create a thread:
|
||||||
1. From the list, select **Start thread**.
|
1. From the list, select **Start thread**.
|
||||||
1. Select **Start thread** again.
|
1. Select **Start thread** again.
|
||||||
|
|
||||||
A threaded comment is created.
|

|
||||||
|
|
||||||

|
A threaded comment is created.
|
||||||
|
|
||||||
## Resolve a thread
|
## Resolve a thread
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -88,7 +88,8 @@ Create a deploy token to automate deployment tasks that can run independently of
|
||||||
|
|
||||||
Prerequisites:
|
Prerequisites:
|
||||||
|
|
||||||
- You must have at least the Maintainer role for the project or group.
|
- To create a group deploy token, you must have the Owner role for the group.
|
||||||
|
- To create a project deploy token, you must have at least the Maintainer role for the project.
|
||||||
|
|
||||||
1. On the left sidebar, select **Search or go to** and find your project or group.
|
1. On the left sidebar, select **Search or go to** and find your project or group.
|
||||||
1. Select **Settings > Repository**.
|
1. Select **Settings > Repository**.
|
||||||
|
|
@ -106,7 +107,8 @@ Revoke a token when it's no longer required.
|
||||||
|
|
||||||
Prerequisites:
|
Prerequisites:
|
||||||
|
|
||||||
- You must have at least the Maintainer role for the project or group.
|
- To revoke a group deploy token, you must have the Owner role for the group.
|
||||||
|
- To revoke a project deploy token, you must have at least the Maintainer role for the project.
|
||||||
|
|
||||||
To revoke a deploy token:
|
To revoke a deploy token:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -214,6 +214,23 @@ module API
|
||||||
get ':import_id/entities/:entity_id' do
|
get ':import_id/entities/:entity_id' do
|
||||||
present bulk_import_entity, with: Entities::BulkImports::Entity
|
present bulk_import_entity, with: Entities::BulkImports::Entity
|
||||||
end
|
end
|
||||||
|
|
||||||
|
desc 'Get GitLab Migration entity failures' do
|
||||||
|
detail 'This feature was introduced in GitLab 16.6'
|
||||||
|
success code: 200, model: Entities::BulkImports::EntityFailure
|
||||||
|
failure [
|
||||||
|
{ code: 401, message: 'Unauthorized' },
|
||||||
|
{ code: 404, message: 'Not found' },
|
||||||
|
{ code: 503, message: 'Service unavailable' }
|
||||||
|
]
|
||||||
|
end
|
||||||
|
params do
|
||||||
|
requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
|
||||||
|
requires :entity_id, type: Integer, desc: "The ID of GitLab Migration entity"
|
||||||
|
end
|
||||||
|
get ':import_id/entities/:entity_id/failures' do
|
||||||
|
present paginate(bulk_import_entity.failures), with: Entities::BulkImports::EntityFailure
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -4,18 +4,14 @@ module API
|
||||||
module Entities
|
module Entities
|
||||||
module BulkImports
|
module BulkImports
|
||||||
class EntityFailure < Grape::Entity
|
class EntityFailure < Grape::Entity
|
||||||
expose :relation, documentation: { type: 'string', example: 'group' }
|
expose :relation, documentation: { type: 'string', example: 'label' }
|
||||||
expose :pipeline_step, as: :step, documentation: { type: 'string', example: 'extractor' }
|
|
||||||
expose :exception_message, documentation: { type: 'string', example: 'error message' } do |failure|
|
expose :exception_message, documentation: { type: 'string', example: 'error message' } do |failure|
|
||||||
::Projects::ImportErrorFilter.filter_message(failure.exception_message.truncate(72))
|
::Projects::ImportErrorFilter.filter_message(failure.exception_message.truncate(72))
|
||||||
end
|
end
|
||||||
expose :exception_class, documentation: { type: 'string', example: 'Exception' }
|
expose :exception_class, documentation: { type: 'string', example: 'Exception' }
|
||||||
expose :correlation_id_value, documentation: { type: 'string', example: 'dfcf583058ed4508e4c7c617bd7f0edd' }
|
expose :correlation_id_value, documentation: { type: 'string', example: 'dfcf583058ed4508e4c7c617bd7f0edd' }
|
||||||
expose :created_at, documentation: { type: 'dateTime', example: '2012-05-28T04:42:42-07:00' }
|
expose :source_url, documentation: { type: 'string', example: 'https://source.gitlab.com/group/-/epics/1' }
|
||||||
expose :pipeline_class, documentation: {
|
expose :source_title, documentation: { type: 'string', example: 'title' }
|
||||||
type: 'string', example: 'BulkImports::Groups::Pipelines::GroupPipeline'
|
|
||||||
}
|
|
||||||
expose :pipeline_step, documentation: { type: 'string', example: 'extractor' }
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ module BulkImports
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
run_pipeline_step(:loader, loader.class.name) do
|
run_pipeline_step(:loader, loader.class.name, entry) do
|
||||||
loader.load(context, entry)
|
loader.load(context, entry)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -49,7 +49,7 @@ module BulkImports
|
||||||
|
|
||||||
private # rubocop:disable Lint/UselessAccessModifier
|
private # rubocop:disable Lint/UselessAccessModifier
|
||||||
|
|
||||||
def run_pipeline_step(step, class_name = nil)
|
def run_pipeline_step(step, class_name = nil, entry = nil)
|
||||||
raise MarkedAsFailedError if context.entity.failed?
|
raise MarkedAsFailedError if context.entity.failed?
|
||||||
|
|
||||||
info(pipeline_step: step, step_class: class_name)
|
info(pipeline_step: step, step_class: class_name)
|
||||||
|
|
@ -65,11 +65,11 @@ module BulkImports
|
||||||
rescue BulkImports::NetworkError => e
|
rescue BulkImports::NetworkError => e
|
||||||
raise BulkImports::RetryPipelineError.new(e.message, e.retry_delay) if e.retriable?(context.tracker)
|
raise BulkImports::RetryPipelineError.new(e.message, e.retry_delay) if e.retriable?(context.tracker)
|
||||||
|
|
||||||
log_and_fail(e, step)
|
log_and_fail(e, step, entry)
|
||||||
rescue BulkImports::RetryPipelineError
|
rescue BulkImports::RetryPipelineError
|
||||||
raise
|
raise
|
||||||
rescue StandardError => e
|
rescue StandardError => e
|
||||||
log_and_fail(e, step)
|
log_and_fail(e, step, entry)
|
||||||
end
|
end
|
||||||
|
|
||||||
def extracted_data_from
|
def extracted_data_from
|
||||||
|
|
@ -95,8 +95,8 @@ module BulkImports
|
||||||
run if extracted_data.has_next_page?
|
run if extracted_data.has_next_page?
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_and_fail(exception, step)
|
def log_and_fail(exception, step, entry = nil)
|
||||||
log_import_failure(exception, step)
|
log_import_failure(exception, step, entry)
|
||||||
|
|
||||||
if abort_on_failure?
|
if abort_on_failure?
|
||||||
tracker.fail_op!
|
tracker.fail_op!
|
||||||
|
|
@ -114,7 +114,7 @@ module BulkImports
|
||||||
tracker.skip!
|
tracker.skip!
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_import_failure(exception, step)
|
def log_import_failure(exception, step, entry)
|
||||||
failure_attributes = {
|
failure_attributes = {
|
||||||
bulk_import_entity_id: context.entity.id,
|
bulk_import_entity_id: context.entity.id,
|
||||||
pipeline_class: pipeline,
|
pipeline_class: pipeline,
|
||||||
|
|
@ -124,6 +124,11 @@ module BulkImports
|
||||||
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
|
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if entry
|
||||||
|
failure_attributes[:source_url] = BulkImports::SourceUrlBuilder.new(context, entry).url
|
||||||
|
failure_attributes[:source_title] = entry.try(:title) || entry.try(:name)
|
||||||
|
end
|
||||||
|
|
||||||
log_exception(
|
log_exception(
|
||||||
exception,
|
exception,
|
||||||
log_params(
|
log_params(
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module BulkImports
|
||||||
|
class SourceUrlBuilder
|
||||||
|
ALLOWED_RELATIONS = %w[
|
||||||
|
issues
|
||||||
|
merge_requests
|
||||||
|
epics
|
||||||
|
milestones
|
||||||
|
].freeze
|
||||||
|
|
||||||
|
attr_reader :context, :entity, :entry
|
||||||
|
|
||||||
|
# @param [BulkImports::Pipeline::Context] context
|
||||||
|
# @param [ApplicationRecord] entry
|
||||||
|
def initialize(context, entry)
|
||||||
|
@context = context
|
||||||
|
@entity = context.entity
|
||||||
|
@entry = entry
|
||||||
|
end
|
||||||
|
|
||||||
|
# Builds a source URL for the given entry if iid is present
|
||||||
|
def url
|
||||||
|
return unless entry.is_a?(ApplicationRecord)
|
||||||
|
return unless iid
|
||||||
|
return unless ALLOWED_RELATIONS.include?(relation)
|
||||||
|
|
||||||
|
File.join(source_instance_url, group_prefix, source_full_path, '-', relation, iid.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def iid
|
||||||
|
@iid ||= entry.try(:iid)
|
||||||
|
end
|
||||||
|
|
||||||
|
def relation
|
||||||
|
@relation ||= context.tracker.pipeline_class.relation
|
||||||
|
end
|
||||||
|
|
||||||
|
def source_instance_url
|
||||||
|
@source_instance_url ||= context.bulk_import.configuration.url
|
||||||
|
end
|
||||||
|
|
||||||
|
def source_full_path
|
||||||
|
@source_full_path ||= entity.source_full_path
|
||||||
|
end
|
||||||
|
|
||||||
|
# Group milestone (or epic) url is /groups/:group_path/-/milestones/:iid
|
||||||
|
# Project milestone url is /:project_path/-/milestones/:iid
|
||||||
|
def group_prefix
|
||||||
|
return '' if entity.project?
|
||||||
|
|
||||||
|
entity.pluralized_name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -34,6 +34,17 @@ module Gitlab
|
||||||
request_params[:headers][:Cookie] = get_cookies if options[:use_cookies]
|
request_params[:headers][:Cookie] = get_cookies if options[:use_cookies]
|
||||||
request_params[:base_uri] = uri.to_s
|
request_params[:base_uri] = uri.to_s
|
||||||
request_params.merge!(auth_params)
|
request_params.merge!(auth_params)
|
||||||
|
# Setting defaults here so we can also set `timeout` which prevents setting defaults in the HTTP gem's code
|
||||||
|
request_params[:open_timeout] = options[:open_timeout] || default_timeout_for(:open_timeout)
|
||||||
|
request_params[:read_timeout] = options[:read_timeout] || default_timeout_for(:read_timeout)
|
||||||
|
request_params[:write_timeout] = options[:write_timeout] || default_timeout_for(:write_timeout)
|
||||||
|
# Global timeout. Needs to be at least as high as the maximum defined in other timeouts
|
||||||
|
request_params[:timeout] = [
|
||||||
|
Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT,
|
||||||
|
request_params[:open_timeout],
|
||||||
|
request_params[:read_timeout],
|
||||||
|
request_params[:write_timeout]
|
||||||
|
].max
|
||||||
|
|
||||||
result = Gitlab::HTTP.public_send(http_method, path, **request_params) # rubocop:disable GitlabSecurity/PublicSend
|
result = Gitlab::HTTP.public_send(http_method, path, **request_params) # rubocop:disable GitlabSecurity/PublicSend
|
||||||
@authenticated = result.response.is_a?(Net::HTTPOK)
|
@authenticated = result.response.is_a?(Net::HTTPOK)
|
||||||
|
|
@ -52,6 +63,10 @@ module Gitlab
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def default_timeout_for(param)
|
||||||
|
Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS[param]
|
||||||
|
end
|
||||||
|
|
||||||
def auth_params
|
def auth_params
|
||||||
return {} unless @options[:username] && @options[:password]
|
return {} unless @options[:username] && @options[:password]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ module Gitlab
|
||||||
class BaseImporter
|
class BaseImporter
|
||||||
attr_reader :project, :client, :formatter, :jira_project_key, :running_import
|
attr_reader :project, :client, :formatter, :jira_project_key, :running_import
|
||||||
|
|
||||||
def initialize(project)
|
def initialize(project, client = nil)
|
||||||
Gitlab::JiraImport.validate_project_settings!(project)
|
Gitlab::JiraImport.validate_project_settings!(project)
|
||||||
|
|
||||||
@running_import = project.latest_jira_import
|
@running_import = project.latest_jira_import
|
||||||
|
|
@ -14,7 +14,7 @@ module Gitlab
|
||||||
raise Projects::ImportService::Error, _('Unable to find Jira project to import data from.') unless @jira_project_key
|
raise Projects::ImportService::Error, _('Unable to find Jira project to import data from.') unless @jira_project_key
|
||||||
|
|
||||||
@project = project
|
@project = project
|
||||||
@client = project.jira_integration.client
|
@client = client || project.jira_integration.client
|
||||||
@formatter = Gitlab::ImportFormatter.new
|
@formatter = Gitlab::ImportFormatter.new
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ module Gitlab
|
||||||
|
|
||||||
attr_reader :imported_items_cache_key, :start_at, :job_waiter
|
attr_reader :imported_items_cache_key, :start_at, :job_waiter
|
||||||
|
|
||||||
def initialize(project)
|
def initialize(project, client = nil)
|
||||||
super
|
super
|
||||||
# get cached start_at value, or zero if not cached yet
|
# get cached start_at value, or zero if not cached yet
|
||||||
@start_at = Gitlab::JiraImport.get_issues_next_start_at(project.id)
|
@start_at = Gitlab::JiraImport.get_issues_next_start_at(project.id)
|
||||||
|
|
|
||||||
|
|
@ -8,33 +8,33 @@ module QA
|
||||||
include QA::Page::Settings::Common
|
include QA::Page::Settings::Common
|
||||||
|
|
||||||
view 'app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue' do
|
view 'app/assets/javascripts/ci/ci_variable_list/components/ci_variable_drawer.vue' do
|
||||||
element :ci_variable_key_field
|
element 'ci-variable-key'
|
||||||
element :ci_variable_value_field
|
element 'ci-variable-value'
|
||||||
element :ci_variable_save_button
|
element 'ci-variable-confirm-button'
|
||||||
end
|
end
|
||||||
|
|
||||||
def fill_variable(key, value, masked = false)
|
def fill_variable(key, value, masked = false)
|
||||||
within_element(:ci_variable_key_field) { find('input').set key }
|
within_element('ci-variable-key') { find('input').set key }
|
||||||
fill_element :ci_variable_value_field, value
|
fill_element 'ci-variable-value', value
|
||||||
click_ci_variable_save_button
|
click_ci_variable_save_button
|
||||||
|
|
||||||
wait_until(reload: false) do
|
wait_until(reload: false) do
|
||||||
within_element('ci-variable-table') { has_element?(:edit_ci_variable_button) }
|
within_element('ci-variable-table') { has_element?('edit-ci-variable-button') }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def click_add_variable
|
def click_add_variable
|
||||||
click_element :add_ci_variable_button
|
click_element 'add-ci-variable-button'
|
||||||
end
|
end
|
||||||
|
|
||||||
def click_edit_ci_variable
|
def click_edit_ci_variable
|
||||||
within_element('ci-variable-table') do
|
within_element('ci-variable-table') do
|
||||||
click_element :edit_ci_variable_button
|
click_element 'edit-ci-variable-button'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def click_ci_variable_save_button
|
def click_ci_variable_save_button
|
||||||
click_element :ci_variable_save_button
|
click_element 'ci-variable-confirm-button'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,12 @@ module QA
|
||||||
access_token: ENV['QA_LARGE_IMPORT_GH_TOKEN'] || Runtime::Env.github_access_token,
|
access_token: ENV['QA_LARGE_IMPORT_GH_TOKEN'] || Runtime::Env.github_access_token,
|
||||||
per_page: 100,
|
per_page: 100,
|
||||||
middleware: Faraday::RackBuilder.new do |builder|
|
middleware: Faraday::RackBuilder.new do |builder|
|
||||||
builder.use(Faraday::Retry::Middleware, exceptions: [Octokit::InternalServerError, Octokit::ServerError])
|
builder.use(Faraday::Retry::Middleware,
|
||||||
|
max: 3,
|
||||||
|
interval: 1,
|
||||||
|
retry_block: ->(exception:, **) { logger.warn("Request to GitHub failed: '#{exception}', retrying") },
|
||||||
|
exceptions: [Octokit::InternalServerError, Octokit::ServerError]
|
||||||
|
)
|
||||||
builder.use(Faraday::Response::RaiseError) # faraday retry swallows errors, so it needs to be re-raised
|
builder.use(Faraday::Response::RaiseError) # faraday retry swallows errors, so it needs to be re-raised
|
||||||
end
|
end
|
||||||
)
|
)
|
||||||
|
|
@ -161,52 +166,33 @@ module QA
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:gh_issues) do
|
let(:gh_issues) do
|
||||||
issues = gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
|
gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
|
||||||
id = issue.number
|
id = issue.number
|
||||||
|
logger.debug("- Fetching comments and events for issue #{id} -")
|
||||||
hash[id] = {
|
hash[id] = {
|
||||||
url: issue.html_url,
|
url: issue.html_url,
|
||||||
title: issue.title,
|
title: issue.title,
|
||||||
body: issue.body || '',
|
body: issue.body || '',
|
||||||
comments: gh_issue_comments[id]
|
comments: fetch_issuable_comments(id, "issue"),
|
||||||
|
events: fetch_issuable_events(id)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
fetch_github_events(issues, "issue")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:gh_prs) do
|
let(:gh_prs) do
|
||||||
prs = gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
|
gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
|
||||||
id = pr.number
|
id = pr.number
|
||||||
|
logger.debug("- Fetching comments and events for pr #{id} -")
|
||||||
hash[id] = {
|
hash[id] = {
|
||||||
url: pr.html_url,
|
url: pr.html_url,
|
||||||
title: pr.title,
|
title: pr.title,
|
||||||
body: pr.body || '',
|
body: pr.body || '',
|
||||||
comments: [*gh_pr_comments[id], *gh_issue_comments[id]].compact
|
comments: fetch_issuable_comments(id, "pr"),
|
||||||
|
events: fetch_issuable_events(id)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
fetch_github_events(prs, "pr")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# rubocop:disable Layout/LineLength
|
|
||||||
let(:gh_issue_comments) do
|
|
||||||
logger.info("- Fetching issue comments -")
|
|
||||||
with_paginated_request { github_client.issues_comments(github_repo) }.each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
|
|
||||||
hash[id_from_url(c.html_url)] << c.body&.gsub(gh_link_pattern, dummy_url)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
let(:gh_pr_comments) do
|
|
||||||
logger.info("- Fetching pr comments -")
|
|
||||||
with_paginated_request { github_client.pull_requests_comments(github_repo) }.each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
|
|
||||||
hash[id_from_url(c.html_url)] << c.body
|
|
||||||
# some suggestions can contain extra whitespaces which gitlab will remove
|
|
||||||
&.gsub(/suggestion\s+\r/, "suggestion\r")
|
|
||||||
&.gsub(gh_link_pattern, dummy_url)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
# rubocop:enable Layout/LineLength
|
|
||||||
|
|
||||||
let(:imported_project) do
|
let(:imported_project) do
|
||||||
Resource::ProjectImportedFromGithub.fabricate_via_api! do |project|
|
Resource::ProjectImportedFromGithub.fabricate_via_api! do |project|
|
||||||
project.add_name_uuid = false
|
project.add_name_uuid = false
|
||||||
|
|
@ -282,7 +268,7 @@ module QA
|
||||||
issue_events: gl_issues.sum { |_k, v| v[:events].length }
|
issue_events: gl_issues.sum { |_k, v| v[:events].length }
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
not_imported: {
|
diff: {
|
||||||
mrs: @mr_diff,
|
mrs: @mr_diff,
|
||||||
issues: @issue_diff
|
issues: @issue_diff
|
||||||
}
|
}
|
||||||
|
|
@ -415,24 +401,35 @@ module QA
|
||||||
#
|
#
|
||||||
private
|
private
|
||||||
|
|
||||||
# Fetch github events and add to issue object
|
# Fetch issuable object comments
|
||||||
#
|
#
|
||||||
# @param [Hash] issuables
|
# @param [Integer] id
|
||||||
# @param [String] type
|
# @param [String] type
|
||||||
# @return [Hash]
|
# @return [Array]
|
||||||
def fetch_github_events(issuables, type)
|
def fetch_issuable_comments(id, type)
|
||||||
logger.info("- Fetching #{type} events -")
|
pr = type == "pr"
|
||||||
issuables.to_h do |id, issuable|
|
comments = []
|
||||||
logger.debug("Fetching events for #{type} !#{id}")
|
# every pr is also an issue, so when fetching pr comments, issue endpoint has to be used as well
|
||||||
events = with_paginated_request { github_client.issue_events(github_repo, id) }
|
comments.push(*with_paginated_request { github_client.issue_comments(github_repo, id) })
|
||||||
.map { |event| event[:event] }
|
comments.push(*with_paginated_request { github_client.pull_request_comments(github_repo, id) }) if pr
|
||||||
.reject { |event| unsupported_events.include?(event) }
|
comments.map! { |comment| comment.body&.gsub(gh_link_pattern, dummy_url) }
|
||||||
|
return comments unless pr
|
||||||
|
|
||||||
[id, issuable.merge({ events: events })]
|
# some suggestions can contain extra whitespaces which gitlab will remove
|
||||||
end
|
comments.map { |comment| comment.gsub(/suggestion\s+\r/, "suggestion\r") }
|
||||||
end
|
end
|
||||||
|
|
||||||
# Verify imported mrs or issues and return missing items
|
# Fetch issuable object events
|
||||||
|
#
|
||||||
|
# @param [Integer] id
|
||||||
|
# @return [Array]
|
||||||
|
def fetch_issuable_events(id)
|
||||||
|
with_paginated_request { github_client.issue_events(github_repo, id) }
|
||||||
|
.map { |event| event[:event] }
|
||||||
|
.reject { |event| unsupported_events.include?(event) }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Verify imported mrs or issues and return content diff
|
||||||
#
|
#
|
||||||
# @param [String] type verification object, 'mrs' or 'issues'
|
# @param [String] type verification object, 'mrs' or 'issues'
|
||||||
# @return [Hash]
|
# @return [Hash]
|
||||||
|
|
@ -443,18 +440,20 @@ module QA
|
||||||
actual = type == 'mr' ? mrs : gl_issues
|
actual = type == 'mr' ? mrs : gl_issues
|
||||||
|
|
||||||
missing_objects = (expected.keys - actual.keys).map { |it| expected[it].slice(:title, :url) }
|
missing_objects = (expected.keys - actual.keys).map { |it| expected[it].slice(:title, :url) }
|
||||||
|
extra_objects = (actual.keys - expected.keys).map { |it| actual[it].slice(:title, :url) }
|
||||||
count_msg = <<~MSG
|
count_msg = <<~MSG
|
||||||
Expected to contain all of GitHub's #{type}s. Gitlab: #{actual.length}, Github: #{expected.length}.
|
Expected to contain all of GitHub's #{type}s. Gitlab: #{actual.length}, Github: #{expected.length}.
|
||||||
Missing: #{missing_objects.map { |it| it[:url] }}
|
Missing: #{missing_objects.map { |it| it[:url] }}
|
||||||
MSG
|
MSG
|
||||||
expect(expected.length <= actual.length).to be_truthy, count_msg
|
expect(expected.length <= actual.length).to be_truthy, count_msg
|
||||||
|
|
||||||
missing_content = verify_comments_and_events(type, actual, expected)
|
content_diff = verify_comments_and_events(type, actual, expected)
|
||||||
|
|
||||||
{
|
{
|
||||||
"#{type}s": missing_objects.empty? ? nil : missing_objects,
|
"extra_#{type}s": extra_objects,
|
||||||
"#{type}_content": missing_content.empty? ? nil : missing_content
|
"missing_#{type}s": missing_objects,
|
||||||
}.compact
|
"#{type}_content_diff": content_diff
|
||||||
|
}.compact_blank
|
||||||
end
|
end
|
||||||
|
|
||||||
# Verify imported comments and events
|
# Verify imported comments and events
|
||||||
|
|
@ -464,7 +463,7 @@ module QA
|
||||||
# @param [Hash] expected
|
# @param [Hash] expected
|
||||||
# @return [Hash]
|
# @return [Hash]
|
||||||
def verify_comments_and_events(type, actual, expected)
|
def verify_comments_and_events(type, actual, expected)
|
||||||
actual.each_with_object([]) do |(key, actual_item), missing_content|
|
actual.each_with_object([]) do |(key, actual_item), content_diff|
|
||||||
expected_item = expected[key]
|
expected_item = expected[key]
|
||||||
title = actual_item[:title]
|
title = actual_item[:title]
|
||||||
msg = "expected #{type} with iid '#{key}' to have"
|
msg = "expected #{type} with iid '#{key}' to have"
|
||||||
|
|
@ -498,19 +497,23 @@ module QA
|
||||||
MSG
|
MSG
|
||||||
expect(actual_events).to include(*expected_events), event_count_msg
|
expect(actual_events).to include(*expected_events), event_count_msg
|
||||||
|
|
||||||
# Save missing comments and events
|
# Save comment and event diff
|
||||||
#
|
#
|
||||||
comment_diff = expected_comments - actual_comments
|
missing_comments = expected_comments - actual_comments
|
||||||
event_diff = expected_events - actual_events
|
extra_comments = actual_comments - expected_comments
|
||||||
next if comment_diff.empty? && event_diff.empty?
|
missing_events = expected_events - actual_events
|
||||||
|
extra_events = actual_events - expected_events
|
||||||
|
next if [missing_comments, missing_events, extra_comments, extra_events].all?(&:empty?)
|
||||||
|
|
||||||
missing_content << {
|
content_diff << {
|
||||||
title: title,
|
title: title,
|
||||||
github_url: expected_item[:url],
|
github_url: expected_item[:url],
|
||||||
gitlab_url: actual_item[:url],
|
gitlab_url: actual_item[:url],
|
||||||
missing_comments: comment_diff.empty? ? nil : comment_diff,
|
missing_comments: missing_comments,
|
||||||
missing_events: event_diff.empty? ? nil : event_diff
|
extra_comments: extra_comments,
|
||||||
}.compact
|
missing_events: missing_events,
|
||||||
|
extra_events: extra_events
|
||||||
|
}.compact_blank
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -671,16 +674,6 @@ module QA
|
||||||
File.open("tmp/github-import-data.json", "w") { |file| file.write(JSON.pretty_generate(json)) }
|
File.open("tmp/github-import-data.json", "w") { |file| file.write(JSON.pretty_generate(json)) }
|
||||||
end
|
end
|
||||||
|
|
||||||
# Extract id number from web url of issue or pull request
|
|
||||||
#
|
|
||||||
# Some endpoints don't return object id as separate parameter so web url can be used as a workaround
|
|
||||||
#
|
|
||||||
# @param [String] url
|
|
||||||
# @return [Integer]
|
|
||||||
def id_from_url(url)
|
|
||||||
url.match(%r{(?<type>issues|pull)/(?<id>\d+)})&.named_captures&.fetch("id", nil).to_i
|
|
||||||
end
|
|
||||||
|
|
||||||
# Custom pagination for github requests
|
# Custom pagination for github requests
|
||||||
#
|
#
|
||||||
# Default autopagination doesn't work correctly with rate limit
|
# Default autopagination doesn't work correctly with rate limit
|
||||||
|
|
|
||||||
|
|
@ -71,7 +71,10 @@ module QA
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'OIDC' do
|
describe 'OIDC', quarantine: {
|
||||||
|
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/429723',
|
||||||
|
type: :flaky
|
||||||
|
} do
|
||||||
let(:consumer_name) { 'gitlab-oidc-consumer' }
|
let(:consumer_name) { 'gitlab-oidc-consumer' }
|
||||||
let(:redirect_uri) { "#{consumer_host}/users/auth/openid_connect/callback" }
|
let(:redirect_uri) { "#{consumer_host}/users/auth/openid_connect/callback" }
|
||||||
let(:scopes) { %w[openid profile email] }
|
let(:scopes) { %w[openid profile email] }
|
||||||
|
|
|
||||||
|
|
@ -22,14 +22,6 @@ FactoryBot.define do
|
||||||
locked { :unlocked }
|
locked { :unlocked }
|
||||||
end
|
end
|
||||||
|
|
||||||
trait :checksummed do
|
|
||||||
verification_checksum { 'abc' }
|
|
||||||
end
|
|
||||||
|
|
||||||
trait :checksum_failure do
|
|
||||||
verification_failure { 'Could not calculate the checksum' }
|
|
||||||
end
|
|
||||||
|
|
||||||
trait :expired do
|
trait :expired do
|
||||||
expire_at { Date.yesterday }
|
expire_at { Date.yesterday }
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,5 @@ FactoryBot.define do
|
||||||
snippet_repository.shard_name = snippet_repository.snippet.repository_storage
|
snippet_repository.shard_name = snippet_repository.snippet.repository_storage
|
||||||
snippet_repository.disk_path = snippet_repository.snippet.disk_path
|
snippet_repository.disk_path = snippet_repository.snippet.disk_path
|
||||||
end
|
end
|
||||||
|
|
||||||
trait(:checksummed) do
|
|
||||||
verification_checksum { 'abc' }
|
|
||||||
end
|
|
||||||
|
|
||||||
trait(:checksum_failure) do
|
|
||||||
verification_failure { 'Could not calculate the checksum' }
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,5 @@ FactoryBot.define do
|
||||||
|
|
||||||
sequence(:version)
|
sequence(:version)
|
||||||
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
|
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
|
||||||
|
|
||||||
trait(:checksummed) do
|
|
||||||
verification_checksum { 'abc' }
|
|
||||||
end
|
|
||||||
|
|
||||||
trait(:checksum_failure) do
|
|
||||||
verification_failure { 'Could not calculate the checksum' }
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -67,9 +67,9 @@ describe('CI Variable Drawer', () => {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-btn');
|
const findConfirmBtn = () => wrapper.findByTestId('ci-variable-confirm-button');
|
||||||
const findConfirmDeleteModal = () => wrapper.findComponent(GlModal);
|
const findConfirmDeleteModal = () => wrapper.findComponent(GlModal);
|
||||||
const findDeleteBtn = () => wrapper.findByTestId('ci-variable-delete-btn');
|
const findDeleteBtn = () => wrapper.findByTestId('ci-variable-delete-button');
|
||||||
const findDisabledEnvironmentScopeDropdown = () => wrapper.findComponent(GlFormInput);
|
const findDisabledEnvironmentScopeDropdown = () => wrapper.findComponent(GlFormInput);
|
||||||
const findDrawer = () => wrapper.findComponent(GlDrawer);
|
const findDrawer = () => wrapper.findComponent(GlDrawer);
|
||||||
const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
|
const findEnvironmentScopeDropdown = () => wrapper.findComponent(CiEnvironmentsDropdown);
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
RSpec.describe API::Entities::BulkImports::EntityFailure do
|
RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :importers do
|
||||||
let_it_be(:failure) { create(:bulk_import_failure) }
|
let_it_be(:failure) { create(:bulk_import_failure) }
|
||||||
|
|
||||||
subject { described_class.new(failure).as_json }
|
subject { described_class.new(failure).as_json }
|
||||||
|
|
@ -10,11 +10,11 @@ RSpec.describe API::Entities::BulkImports::EntityFailure do
|
||||||
it 'has the correct attributes' do
|
it 'has the correct attributes' do
|
||||||
expect(subject).to include(
|
expect(subject).to include(
|
||||||
:relation,
|
:relation,
|
||||||
:step,
|
|
||||||
:exception_class,
|
|
||||||
:exception_message,
|
:exception_message,
|
||||||
|
:exception_class,
|
||||||
:correlation_id_value,
|
:correlation_id_value,
|
||||||
:created_at
|
:source_url,
|
||||||
|
:source_title
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,9 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
|
||||||
stub_const('BulkImports::MyPipeline', pipeline)
|
stub_const('BulkImports::MyPipeline', pipeline)
|
||||||
end
|
end
|
||||||
|
|
||||||
let_it_be_with_reload(:entity) { create(:bulk_import_entity) }
|
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||||
|
let_it_be(:configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
|
||||||
|
let_it_be_with_reload(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
|
||||||
|
|
||||||
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||||
let(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
|
let(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
|
||||||
|
|
@ -119,6 +121,56 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
|
||||||
expect(entity.failed?).to eq(false)
|
expect(entity.failed?).to eq(false)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when failure happens during loader' do
|
||||||
|
before do
|
||||||
|
allow(tracker).to receive(:pipeline_class).and_return(BulkImports::MyPipeline)
|
||||||
|
allow(BulkImports::MyPipeline).to receive(:relation).and_return(relation)
|
||||||
|
|
||||||
|
allow_next_instance_of(BulkImports::Extractor) do |extractor|
|
||||||
|
allow(extractor).to receive(:extract).with(context).and_return(extracted_data)
|
||||||
|
end
|
||||||
|
|
||||||
|
allow_next_instance_of(BulkImports::Transformer) do |transformer|
|
||||||
|
allow(transformer).to receive(:transform).with(context, extracted_data.data.first).and_return(entry)
|
||||||
|
end
|
||||||
|
|
||||||
|
allow_next_instance_of(BulkImports::Loader) do |loader|
|
||||||
|
allow(loader).to receive(:load).with(context, entry).and_raise(StandardError, 'Error!')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when entry has title' do
|
||||||
|
let(:relation) { 'issues' }
|
||||||
|
let(:entry) { Issue.new(iid: 1, title: 'hello world') }
|
||||||
|
|
||||||
|
it 'creates failure record with source url and title' do
|
||||||
|
subject.run
|
||||||
|
|
||||||
|
failure = entity.failures.first
|
||||||
|
expected_source_url = File.join(configuration.url, 'groups', entity.source_full_path, '-', 'issues', '1')
|
||||||
|
|
||||||
|
expect(failure).to be_present
|
||||||
|
expect(failure.source_url).to eq(expected_source_url)
|
||||||
|
expect(failure.source_title).to eq('hello world')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when entry has name' do
|
||||||
|
let(:relation) { 'boards' }
|
||||||
|
let(:entry) { Board.new(name: 'hello world') }
|
||||||
|
|
||||||
|
it 'creates failure record with name' do
|
||||||
|
subject.run
|
||||||
|
|
||||||
|
failure = entity.failures.first
|
||||||
|
|
||||||
|
expect(failure).to be_present
|
||||||
|
expect(failure.source_url).to be_nil
|
||||||
|
expect(failure.source_title).to eq('hello world')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'pipeline runner' do
|
describe 'pipeline runner' do
|
||||||
|
|
@ -363,7 +415,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
|
||||||
|
|
||||||
def extracted_data(has_next_page: false)
|
def extracted_data(has_next_page: false)
|
||||||
BulkImports::Pipeline::ExtractedData.new(
|
BulkImports::Pipeline::ExtractedData.new(
|
||||||
data: { foo: :bar },
|
data: {
|
||||||
|
'foo' => 'bar',
|
||||||
|
'title' => 'hello world',
|
||||||
|
'iid' => 1
|
||||||
|
},
|
||||||
page_info: {
|
page_info: {
|
||||||
'has_next_page' => has_next_page,
|
'has_next_page' => has_next_page,
|
||||||
'next_page' => has_next_page ? 'cursor' : nil
|
'next_page' => has_next_page ? 'cursor' : nil
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
|
RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline, feature_category: :importers do
|
||||||
let_it_be(:project) { create(:project) }
|
let_it_be(:project) { create(:project) }
|
||||||
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
|
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
|
||||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity, pipeline_name: described_class) }
|
||||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||||
|
|
||||||
let_it_be(:policy) do
|
let_it_be(:policy) do
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,11 @@
|
||||||
|
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline do
|
RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline, feature_category: :importers do
|
||||||
let_it_be(:project) { create(:project) }
|
let_it_be(:project) { create(:project) }
|
||||||
let_it_be(:bulk_import) { create(:bulk_import) }
|
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||||
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
|
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
|
||||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity, pipeline_name: described_class) }
|
||||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||||
|
|
||||||
let(:attributes) { {} }
|
let(:attributes) { {} }
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,78 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
RSpec.describe BulkImports::SourceUrlBuilder, feature_category: :importers do
|
||||||
|
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||||
|
let_it_be(:configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
|
||||||
|
|
||||||
|
let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
|
||||||
|
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||||
|
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||||
|
let(:entry) { Issue.new(iid: 1, title: 'hello world') }
|
||||||
|
|
||||||
|
describe '#url' do
|
||||||
|
subject { described_class.new(context, entry) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow(subject).to receive(:relation).and_return('issues')
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when relation is allowed' do
|
||||||
|
context 'when entity is a group' do
|
||||||
|
it 'returns the url specific to groups' do
|
||||||
|
expected_url = File.join(
|
||||||
|
configuration.url,
|
||||||
|
'groups',
|
||||||
|
entity.source_full_path,
|
||||||
|
'-',
|
||||||
|
'issues',
|
||||||
|
'1'
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(subject.url).to eq(expected_url)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when entity is a project' do
|
||||||
|
let(:entity) { create(:bulk_import_entity, :project_entity, bulk_import: bulk_import) }
|
||||||
|
|
||||||
|
it 'returns the url' do
|
||||||
|
expected_url = File.join(
|
||||||
|
configuration.url,
|
||||||
|
entity.source_full_path,
|
||||||
|
'-',
|
||||||
|
'issues',
|
||||||
|
'1'
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(subject.url).to eq(expected_url)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when entry is not an ApplicationRecord' do
|
||||||
|
let(:entry) { 'not an ApplicationRecord' }
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(subject.url).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when relation is not allowed' do
|
||||||
|
it 'returns nil' do
|
||||||
|
allow(subject).to receive(:relation).and_return('not_allowed')
|
||||||
|
|
||||||
|
expect(subject.url).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when entry has no iid' do
|
||||||
|
let(:entry) { Issue.new }
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(subject.url).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -603,6 +603,17 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
|
||||||
jira_integration.client.get('/foo')
|
jira_integration.client.get('/foo')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when a custom read_timeout option is passed as an argument' do
|
||||||
|
it 'uses the default GitLab::HTTP timeouts plus a custom read_timeout' do
|
||||||
|
expected_timeouts = Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS.merge(read_timeout: 2.minutes, timeout: 2.minutes)
|
||||||
|
|
||||||
|
expect(Gitlab::HTTP_V2::Client).to receive(:httparty_perform_request)
|
||||||
|
.with(Net::HTTP::Get, '/foo', hash_including(expected_timeouts)).and_call_original
|
||||||
|
|
||||||
|
jira_integration.client(read_timeout: 2.minutes).get('/foo')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'with basic auth' do
|
context 'with basic auth' do
|
||||||
before do
|
before do
|
||||||
jira_integration.jira_auth_type = 0
|
jira_integration.jira_auth_type = 0
|
||||||
|
|
|
||||||
|
|
@ -394,7 +394,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
|
||||||
|
|
||||||
expect(response).to have_gitlab_http_status(:ok)
|
expect(response).to have_gitlab_http_status(:ok)
|
||||||
expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
|
expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
|
||||||
expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
|
expect(json_response.first['failures'].first['exception_message']).to eq(failure_3.exception_message)
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'disabled feature'
|
it_behaves_like 'disabled feature'
|
||||||
|
|
@ -420,4 +420,17 @@ RSpec.describe API::BulkImports, feature_category: :importers do
|
||||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe 'GET /bulk_imports/:id/entities/:entity_id/failures' do
|
||||||
|
let(:request) { get api("/bulk_imports/#{import_2.id}/entities/#{entity_3.id}/failures", user) }
|
||||||
|
|
||||||
|
it 'returns specified entity failures' do
|
||||||
|
request
|
||||||
|
|
||||||
|
expect(response).to have_gitlab_http_status(:ok)
|
||||||
|
expect(json_response.first['exception_message']).to eq(failure_3.exception_message)
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'disabled feature'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,12 @@ RSpec.describe Issuable::DiscussionsListService, feature_category: :team_plannin
|
||||||
expect(discussions_service.execute).to be_empty
|
expect(discussions_service.execute).to be_empty
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when issue exists at the group level' do
|
||||||
|
let_it_be(:issuable) { create(:issue, :group_level, namespace: group) }
|
||||||
|
|
||||||
|
it_behaves_like 'listing issuable discussions', :guest, 1, 7
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'fetching notes for merge requests' do
|
describe 'fetching notes for merge requests' do
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,11 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker, feature_category:
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when import started', :clean_gitlab_redis_cache do
|
context 'when import started', :clean_gitlab_redis_cache do
|
||||||
let_it_be(:jira_integration) { create(:jira_integration, project: project) }
|
let(:job_waiter) { Gitlab::JobWaiter.new(2, 'some-job-key') }
|
||||||
|
|
||||||
|
before_all do
|
||||||
|
create(:jira_integration, project: project)
|
||||||
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
jira_import.start!
|
jira_import.start!
|
||||||
|
|
@ -34,6 +38,40 @@ RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker, feature_category:
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'uses a custom http client for the issues importer' do
|
||||||
|
jira_integration = project.jira_integration
|
||||||
|
client = instance_double(JIRA::Client)
|
||||||
|
issue_importer = instance_double(Gitlab::JiraImport::IssuesImporter)
|
||||||
|
|
||||||
|
allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
|
||||||
|
allow(issue_importer).to receive(:execute).and_return(job_waiter)
|
||||||
|
|
||||||
|
expect(jira_integration).to receive(:client).with(read_timeout: 2.minutes).and_return(client)
|
||||||
|
expect(Gitlab::JiraImport::IssuesImporter).to receive(:new).with(
|
||||||
|
project,
|
||||||
|
client
|
||||||
|
).and_return(issue_importer)
|
||||||
|
|
||||||
|
described_class.new.perform(project.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when increase_jira_import_issues_timeout feature flag is disabled' do
|
||||||
|
before do
|
||||||
|
stub_feature_flags(increase_jira_import_issues_timeout: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not provide a custom client to IssuesImporter' do
|
||||||
|
issue_importer = instance_double(Gitlab::JiraImport::IssuesImporter)
|
||||||
|
expect(Gitlab::JiraImport::IssuesImporter).to receive(:new).with(
|
||||||
|
instance_of(Project),
|
||||||
|
nil
|
||||||
|
).and_return(issue_importer)
|
||||||
|
allow(issue_importer).to receive(:execute).and_return(job_waiter)
|
||||||
|
|
||||||
|
described_class.new.perform(project.id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'when start_at is nil' do
|
context 'when start_at is nil' do
|
||||||
it_behaves_like 'advance to next stage', :attachments
|
it_behaves_like 'advance to next stage', :attachments
|
||||||
end
|
end
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue