Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
fd8183c340
commit
7d8fc3b6b6
|
|
@ -73,7 +73,6 @@ yourself as a reviewer if it's not ready for merge yet.
|
|||
- [ ] Title:
|
||||
- Length limit: 7 words (not including articles or prepositions).
|
||||
- Capitalization: ensure the title is [sentence cased](https://design.gitlab.com/content/punctuation#case).
|
||||
- Rewrite to exclude the words `deprecation`, `deprecate`, `removal`, and `remove` if necessary.
|
||||
- [ ] Consistency:
|
||||
- Ensure that all resources (docs, deprecation, etc.) refer to the feature with the same term / feature name.
|
||||
- [ ] Content:
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ yourself as a reviewer if it's not yet ready for merge.
|
|||
- [ ] Title:
|
||||
- Length limit: 7 words (not including articles or prepositions).
|
||||
- Capitalization: ensure the title is [sentence cased](https://design.gitlab.com/content/punctuation#case).
|
||||
- Rewrite to exclude the words `removal` and `remove` if necessary.
|
||||
- [ ] Consistency:
|
||||
- Ensure that all resources (docs, removal, etc.) refer to the feature with the same term / feature name.
|
||||
- [ ] Content:
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ module Resolvers
|
|||
|
||||
def preloads
|
||||
{
|
||||
previous_stage_jobs_and_needs: [:needs, :pipeline],
|
||||
previous_stage_jobs_or_needs: [:needs, :pipeline],
|
||||
artifacts: [:job_artifacts],
|
||||
pipeline: [:user]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ module Resolvers
|
|||
# see app/graphql/types/issue_connection.rb
|
||||
type 'Types::IssueConnection', null: true
|
||||
|
||||
before_connection_authorization do |nodes, current_user|
|
||||
projects = nodes.map(&:project)
|
||||
::Preloaders::UserMaxAccessLevelInProjectsPreloader.new(projects, current_user).execute
|
||||
end
|
||||
|
||||
def resolve_with_lookahead(**args)
|
||||
return unless Feature.enabled?(:root_level_issues_query)
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ module Resolvers
|
|||
|
||||
def preloads
|
||||
{
|
||||
previous_stage_jobs_and_needs: [:needs, :pipeline],
|
||||
previous_stage_jobs_or_needs: [:needs, :pipeline],
|
||||
artifacts: [:job_artifacts],
|
||||
pipeline: [:user]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ module Types
|
|||
resolver: Resolvers::Ci::PipelineStagesResolver
|
||||
|
||||
field :user,
|
||||
type: Types::UserType,
|
||||
type: 'Types::UserType',
|
||||
null: true,
|
||||
description: 'Pipeline user.'
|
||||
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ module Types
|
|||
counts = ::Ci::Runner.project_type
|
||||
.select(:id, 'COUNT(ci_runner_projects.id) as count')
|
||||
.left_outer_joins(:runner_projects)
|
||||
.where(id: ids)
|
||||
.id_in(ids)
|
||||
.group(:id)
|
||||
.index_by(&:id)
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ module Ci
|
|||
|
||||
belongs_to :project
|
||||
belongs_to :trigger_request
|
||||
|
||||
# To be removed upon :ci_bridge_remove_sourced_pipelines feature flag removal
|
||||
has_many :sourced_pipelines, class_name: "::Ci::Sources::Pipeline",
|
||||
foreign_key: :source_job_id,
|
||||
inverse_of: :source_bridge
|
||||
|
|
@ -87,8 +89,20 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def sourced_pipelines
|
||||
if Feature.enabled?(:ci_bridge_remove_sourced_pipelines, project)
|
||||
raise 'Ci::Bridge does not have sourced_pipelines association'
|
||||
end
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def has_downstream_pipeline?
|
||||
sourced_pipelines.exists?
|
||||
if Feature.enabled?(:ci_bridge_remove_sourced_pipelines, project)
|
||||
sourced_pipeline.present?
|
||||
else
|
||||
sourced_pipelines.exists?
|
||||
end
|
||||
end
|
||||
|
||||
def downstream_pipeline_params
|
||||
|
|
|
|||
|
|
@ -381,7 +381,7 @@ class Project < ApplicationRecord
|
|||
has_one :auto_devops, class_name: 'ProjectAutoDevops', inverse_of: :project, autosave: true
|
||||
has_many :custom_attributes, class_name: 'ProjectCustomAttribute'
|
||||
|
||||
has_many :project_badges, class_name: 'ProjectBadge'
|
||||
has_many :project_badges, class_name: 'ProjectBadge', inverse_of: :project
|
||||
has_one :ci_cd_settings, class_name: 'ProjectCiCdSetting', inverse_of: :project, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
||||
|
||||
has_many :remote_mirrors, inverse_of: :project
|
||||
|
|
|
|||
|
|
@ -36,6 +36,8 @@ module BulkImports
|
|||
end
|
||||
|
||||
def execute
|
||||
validate_scopes!
|
||||
|
||||
bulk_import = create_bulk_import
|
||||
|
||||
Gitlab::Tracking.event(self.class.name, 'create', label: 'bulk_import_group')
|
||||
|
|
@ -43,7 +45,7 @@ module BulkImports
|
|||
BulkImportWorker.perform_async(bulk_import.id)
|
||||
|
||||
ServiceResponse.success(payload: bulk_import)
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
rescue ActiveRecord::RecordInvalid, BulkImports::Error => e
|
||||
ServiceResponse.error(
|
||||
message: e.message,
|
||||
http_status: :unprocessable_entity
|
||||
|
|
@ -52,6 +54,10 @@ module BulkImports
|
|||
|
||||
private
|
||||
|
||||
def validate_scopes!
|
||||
client.validate_import_scopes!
|
||||
end
|
||||
|
||||
def create_bulk_import
|
||||
BulkImport.transaction do
|
||||
bulk_import = BulkImport.create!(
|
||||
|
|
|
|||
|
|
@ -13,13 +13,15 @@ module BulkImports
|
|||
def perform(entity_id)
|
||||
entity = BulkImports::Entity.find(entity_id)
|
||||
|
||||
validate_scopes!(entity)
|
||||
|
||||
entity.update!(source_xid: entity_source_xid(entity)) if entity.source_xid.nil?
|
||||
|
||||
request_export(entity)
|
||||
|
||||
BulkImports::EntityWorker.perform_async(entity_id)
|
||||
rescue BulkImports::NetworkError => e
|
||||
if e.retriable?(entity)
|
||||
rescue BulkImports::NetworkError, BulkImports::Error => e
|
||||
if e.class != BulkImports::Error && e.retriable?(entity)
|
||||
retry_request(e, entity)
|
||||
else
|
||||
log_exception(e,
|
||||
|
|
@ -42,12 +44,16 @@ module BulkImports
|
|||
|
||||
private
|
||||
|
||||
def validate_scopes!(entity)
|
||||
http_client(entity).validate_import_scopes!
|
||||
end
|
||||
|
||||
def request_export(entity)
|
||||
http_client(entity).post(entity.export_relations_url_path)
|
||||
end
|
||||
|
||||
def http_client(entity)
|
||||
@client ||= Clients::HTTP.new(
|
||||
@client ||= BulkImports::Clients::HTTP.new(
|
||||
url: entity.bulk_import.configuration.url,
|
||||
token: entity.bulk_import.configuration.access_token
|
||||
)
|
||||
|
|
|
|||
|
|
@ -61,6 +61,8 @@ module BulkImports
|
|||
def run
|
||||
return skip_tracker if entity.failed?
|
||||
|
||||
validate_scopes!
|
||||
|
||||
raise(Pipeline::ExpiredError, 'Pipeline timeout') if job_timeout?
|
||||
raise(Pipeline::FailedError, "Export from source instance failed: #{export_status.error}") if export_failed?
|
||||
raise(Pipeline::ExpiredError, 'Empty export status on source instance') if empty_export_timeout?
|
||||
|
|
@ -72,7 +74,7 @@ module BulkImports
|
|||
pipeline_tracker.finish!
|
||||
rescue BulkImports::RetryPipelineError => e
|
||||
retry_tracker(e)
|
||||
rescue StandardError => e
|
||||
rescue StandardError, BulkImports::Error => e
|
||||
fail_tracker(e)
|
||||
end
|
||||
|
||||
|
|
@ -80,6 +82,17 @@ module BulkImports
|
|||
entity.bulk_import.source_version_info.to_s
|
||||
end
|
||||
|
||||
def validate_scopes!
|
||||
client.validate_import_scopes!
|
||||
end
|
||||
|
||||
def client
|
||||
@client ||= BulkImports::Clients::HTTP.new(
|
||||
url: entity.bulk_import.configuration.url,
|
||||
token: entity.bulk_import.configuration.access_token
|
||||
)
|
||||
end
|
||||
|
||||
def fail_tracker(exception)
|
||||
pipeline_tracker.update!(status_event: 'fail_op', jid: jid)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: ci_bridge_remove_sourced_pipelines
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105708
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/384359
|
||||
milestone: '15.7'
|
||||
type: development
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: scan_execution_tags
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104954
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385169
|
||||
milestone: '15.7'
|
||||
type: development
|
||||
group: group::security policies
|
||||
default_enabled: false
|
||||
|
|
@ -39,5 +39,4 @@ tier:
|
|||
performance_indicator_type:
|
||||
- gmau
|
||||
- paid_gmau
|
||||
- smau
|
||||
milestone: "<13.9"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
- title: 'The Phabricator task importer is deprecated'
|
||||
announcement_milestone: '15.7'
|
||||
announcement_date: '2022-12-22'
|
||||
removal_milestone: '16.0'
|
||||
removal_date: '2023-05-22'
|
||||
breaking_change: true
|
||||
body: |
|
||||
The [Phabricator task importer](https://docs.gitlab.com/ee/user/project/import/phabricator.html) is being deprecated. Phabricator itself as a project is no longer actively maintained since June 1, 2021. We haven't observed imports using this tool. There has been no activity on the open related issues on GitLab.
|
||||
stage: manage
|
||||
tiers:
|
||||
issue_url: https://gitlab.com/gitlab-com/Product/-/issues/4894
|
||||
documentation_url: https://docs.gitlab.com/ee/user/project/import/phabricator.html
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ScheduleDisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb < Gitlab::Database::Migration[2.0]
|
||||
MIGRATION = 'DisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb'
|
||||
INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 4_000
|
||||
MAX_BATCH_SIZE = 50_000
|
||||
SUB_BATCH_SIZE = 250
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
return unless Gitlab.com?
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:project_settings,
|
||||
:project_id,
|
||||
job_interval: INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
max_batch_size: MAX_BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
return unless Gitlab.com?
|
||||
|
||||
delete_batched_background_migration(MIGRATION, :project_settings, :project_id, [])
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
0cb708a3cb823676e473bf961aa6920d156598c3c5455b87c89cb1833dfb509c
|
||||
|
|
@ -116,7 +116,7 @@ if you are using that type:
|
|||
|
||||
Troubleshooting guides are available for some CI/CD features and related topics:
|
||||
|
||||
- [Container Registry](../user/packages/container_registry/index.md#troubleshooting-the-gitlab-container-registry)
|
||||
- [Container Registry](../user/packages/container_registry/troubleshoot_container_registry.md)
|
||||
- [GitLab Runner](https://docs.gitlab.com/runner/faq/)
|
||||
- [Merge Trains](pipelines/merge_trains.md#troubleshooting)
|
||||
- [Docker Build](docker/using_docker_build.md#troubleshooting)
|
||||
|
|
|
|||
|
|
@ -229,4 +229,4 @@ For information on how to contribute documentation, see GitLab
|
|||
## Getting an Enterprise Edition License
|
||||
|
||||
If you need a license for contributing to an EE-feature, see
|
||||
[relevant information](https://about.gitlab.com/handbook/marketing/community-relations/code-contributor-program/#contributing-to-the-gitlab-enterprise-edition-ee).
|
||||
[relevant information](https://about.gitlab.com/handbook/marketing/community-relations/code-contributor-program/operations/#contributing-to-the-gitlab-enterprise-edition-ee).
|
||||
|
|
|
|||
|
|
@ -7,26 +7,27 @@ type: reference
|
|||
|
||||
# SAML SSO for self-managed GitLab instances **(FREE SELF)**
|
||||
|
||||
This page describes instance-wide SAML 2.0 for self-managed GitLab instances. For
|
||||
SAML 2.0 on GitLab.com, see [SAML SSO for GitLab.com groups](../user/group/saml_sso/index.md).
|
||||
This page describes how to set up instance-wide SAML single sign on (SSO) for
|
||||
self-managed GitLab instances.
|
||||
|
||||
You should also reference the [OmniAuth documentation](omniauth.md) for general
|
||||
settings that apply to all OmniAuth providers.
|
||||
You can configure GitLab to act as a SAML service provider (SP). This allows
|
||||
GitLab to consume assertions from a SAML identity provider (IdP), such as
|
||||
Okta, to authenticate users.
|
||||
|
||||
If required, you can find a [glossary of common terms](#glossary-of-common-terms).
|
||||
To set up SAML on GitLab.com, see [SAML SSO for GitLab.com groups](../user/group/saml_sso/index.md).
|
||||
|
||||
GitLab can be configured to act as a SAML 2.0 Service Provider (SP). This allows
|
||||
GitLab to consume assertions from a SAML 2.0 identity provider (IdP), such as
|
||||
Okta to authenticate users.
|
||||
For more information on:
|
||||
|
||||
- OmniAuth provider settings, see the [OmniAuth documentation](omniauth.md).
|
||||
- Commonly-used terms, see the [glossary of common terms](#glossary-of-common-terms).
|
||||
|
||||
## Configure SAML support in GitLab
|
||||
|
||||
1. Make sure GitLab is configured with HTTPS.
|
||||
See [Using HTTPS](../install/installation.md#using-https) for instructions.
|
||||
1. Make sure GitLab is [configured with HTTPS](../install/installation.md#using-https).
|
||||
|
||||
1. On your GitLab server, open the configuration file.
|
||||
|
||||
For Omnibus package:
|
||||
For Omnibus installations:
|
||||
|
||||
```shell
|
||||
sudo editor /etc/gitlab/gitlab.rb
|
||||
|
|
@ -40,11 +41,12 @@ Okta to authenticate users.
|
|||
sudo -u git -H editor config/gitlab.yml
|
||||
```
|
||||
|
||||
1. See [Configure initial settings](omniauth.md#configure-initial-settings) for initial settings.
|
||||
1. To allow your users to use SAML to sign up without having to manually create
|
||||
an account first, add the following values to your configuration:
|
||||
1. Edit the initial [configuration settings](omniauth.md#configure-initial-settings).
|
||||
|
||||
For Omnibus package:
|
||||
1. To allow your users to use SAML to sign up without having to manually create
|
||||
an account first, add the following values to your configuration.
|
||||
|
||||
For Omnibus installations:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['omniauth_allow_single_sign_on'] = ['saml']
|
||||
|
|
@ -61,9 +63,9 @@ Okta to authenticate users.
|
|||
```
|
||||
|
||||
1. Optional. You can automatically link SAML users with existing GitLab users if their
|
||||
email addresses match by adding the following setting:
|
||||
email addresses match by adding the following setting.
|
||||
|
||||
For Omnibus package:
|
||||
For Omnibus installations:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['omniauth_auto_link_saml_user'] = true
|
||||
|
|
@ -76,19 +78,20 @@ Okta to authenticate users.
|
|||
```
|
||||
|
||||
Alternatively, a user can manually link their SAML identity to an existing GitLab
|
||||
account by following the steps in
|
||||
[Enable OmniAuth for an existing user](omniauth.md#enable-omniauth-for-an-existing-user).
|
||||
account by [enabling OmniAuth for an existing user](omniauth.md#enable-omniauth-for-an-existing-user).
|
||||
|
||||
1. Ensure that the SAML [`NameID`](../user/group/saml_sso/index.md#nameid) and email
|
||||
address are fixed for each user. These attributes define the SAML user. If users
|
||||
can change these attributes, they can impersonate others.
|
||||
1. Configure the following attributes so your SAML users cannot change them:
|
||||
|
||||
Refer to the documentation for your SAML identity provider for information on
|
||||
how to fix these attributes.
|
||||
- [`NameID`](../user/group/saml_sso/index.md#nameid)
|
||||
- `Email` when used with `omniauth_auto_link_saml_user`
|
||||
|
||||
1. Add the provider configuration:
|
||||
If users can change these attributes, they can sign in as other authorized users.
|
||||
See your SAML IdP documentation for information on how to make these attributes
|
||||
unchangeable.
|
||||
|
||||
For Omnibus package:
|
||||
1. Add the provider configuration.
|
||||
|
||||
For Omnibus installations:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['omniauth_providers'] = [
|
||||
|
|
@ -124,23 +127,26 @@ Okta to authenticate users.
|
|||
}
|
||||
```
|
||||
|
||||
1. Change the value for `assertion_consumer_service_url` to match the HTTPS endpoint
|
||||
of GitLab (append `users/auth/saml/callback` to the HTTPS URL of your GitLab
|
||||
installation to generate the correct value).
|
||||
1. Match the value for `assertion_consumer_service_url` to the HTTPS endpoint
|
||||
of GitLab. To generate the correct value, append `users/auth/saml/callback` to the
|
||||
HTTPS URL of your GitLab installation.
|
||||
|
||||
1. Change the values of `idp_cert_fingerprint`, `idp_sso_target_url`,
|
||||
`name_identifier_format` to match your IdP. If a fingerprint is used it must
|
||||
be a SHA1 fingerprint; check
|
||||
[the OmniAuth SAML documentation](https://github.com/omniauth/omniauth-saml)
|
||||
for more details on these options.
|
||||
See the [notes on configuring a SAML 2.0 app on your IdP](#configure-saml-on-your-idp) for more information.
|
||||
1. Change the following values to match your IdP:
|
||||
- `idp_cert_fingerprint`.
|
||||
- `idp_sso_target_url`.
|
||||
- `name_identifier_format`.
|
||||
If you use a `idp_cert_fingerprint`, it must be a SHA1 fingerprint. For more
|
||||
information on these values, see the
|
||||
[OmniAuth SAML documentation](https://github.com/omniauth/omniauth-saml).
|
||||
For more information on other configuration settings, see
|
||||
[configuring SAML on your IdP](#configure-saml-on-your-idp).
|
||||
|
||||
1. Change the value of `issuer` to a unique name, which identifies the application
|
||||
to the IdP.
|
||||
|
||||
1. For the changes to take effect:
|
||||
- If you installed via Omnibus, [reconfigure GitLab](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
- If you installed from source, [restart GitLab](../administration/restart_gitlab.md#installations-from-source).
|
||||
1. For the changes to take effect, if you installed:
|
||||
- Using Omnibus, [reconfigure GitLab](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
- From source, [restart GitLab](../administration/restart_gitlab.md#installations-from-source).
|
||||
|
||||
### Register GitLab in your SAML IdP
|
||||
|
||||
|
|
|
|||
|
|
@ -145,6 +145,20 @@ From GitLab 16.0 and later, the runner registration methods implemented by the n
|
|||
|
||||
<div class="deprecation removal-160 breaking-change">
|
||||
|
||||
### The Phabricator task importer is deprecated
|
||||
|
||||
Planned removal: GitLab <span class="removal-milestone">16.0</span> (2023-05-22)
|
||||
|
||||
WARNING:
|
||||
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
|
||||
Review the details carefully before upgrading.
|
||||
|
||||
The [Phabricator task importer](https://docs.gitlab.com/ee/user/project/import/phabricator.html) is being deprecated. Phabricator itself as a project is no longer actively maintained since June 1, 2021. We haven't observed imports using this tool. There has been no activity on the open related issues on GitLab.
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation removal-160 breaking-change">
|
||||
|
||||
### ZenTao integration
|
||||
|
||||
End of Support: GitLab <span class="removal-milestone">16.0</span> (2023-05-22)<br />
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Free user limit **(FREE SAAS)**
|
||||
|
||||
From October 19, 2022, a five-user limit will apply to top-level [namespaces](namespace/index.md) with private visibility on GitLab SaaS. These limits will roll out gradually, and impacted users will be notified in GitLab.com at least 60 days before the limit is applied.
|
||||
A five-user limit applies to top-level [namespaces](namespace/index.md) with private visibility on GitLab SaaS. This limit is being rolled out gradually, and impacted users will be notified in GitLab.com at least 60 days before the limit is applied.
|
||||
|
||||
When the five-user limit is applied, top-level private namespaces exceeding the user limit are placed in a read-only state. These namespaces cannot write new data to repositories, Git Large File Storage (LFS), packages, or registries.
|
||||
|
||||
|
|
|
|||
|
|
@ -38,15 +38,14 @@ If a project is public, so is the Container Registry.
|
|||
|
||||
### View the tags of a specific image
|
||||
|
||||
You can view a list of tags associated with a given container image:
|
||||
You can use the Container Registry **Tag Details** page to view a list of tags associated with a given container image:
|
||||
|
||||
1. Go to your project or group.
|
||||
1. Go to **Packages and registries > Container Registry**.
|
||||
1. Select the container image you are interested in.
|
||||
|
||||
This brings up the Container Registry **Tag Details** page. You can view details about each tag,
|
||||
such as when it was published, how much storage it consumes, and the manifest and configuration
|
||||
digests.
|
||||
You can view details about each tag, such as when it was published, how much storage it consumes,
|
||||
and the manifest and configuration digests.
|
||||
|
||||
You can search, sort (by tag name), filter, and [delete](#delete-images-using-the-gitlab-ui)
|
||||
tags on this page. You can share a filtered view by copying the URL from your browser.
|
||||
|
|
@ -198,7 +197,7 @@ You can configure your `.gitlab-ci.yml` file to build and push images to the Con
|
|||
|
||||
If you use the Git SHA in your image tag, each job is unique and you
|
||||
should never have a stale image. However, it's still possible to have a
|
||||
stale image if you re-build a given commit after a dependency has changed.
|
||||
stale image if you rebuild a given commit after a dependency has changed.
|
||||
- Don't build directly to the `latest` tag because multiple jobs may be
|
||||
happening simultaneously.
|
||||
|
||||
|
|
@ -550,181 +549,3 @@ this setting. However, disabling the Container Registry disables all Container R
|
|||
| Private project with Container Registry visibility <br/> set to **Everyone With Access** (UI) or `enabled` (API) | View Container Registry <br/> and pull images | No | No | Yes |
|
||||
| Private project with Container Registry visibility <br/> set to **Only Project Members** (UI) or `private` (API) | View Container Registry <br/> and pull images | No | No | Yes |
|
||||
| Any project with Container Registry `disabled` | All operations on Container Registry | No | No | No |
|
||||
|
||||
## Troubleshooting the GitLab Container Registry
|
||||
|
||||
### Migrating OCI container images to GitLab Container Registry
|
||||
|
||||
Migrating built container images to the GitLab registry is not a current feature. However, an [epic](https://gitlab.com/groups/gitlab-org/-/epics/5210) is open to track the work on this feature.
|
||||
|
||||
Some third-party tools can help migrate container images, for example, [skopeo](https://github.com/containers/skopeo), which can [copy container images](https://github.com/containers/skopeo#copying-images) between various storage mechanisms. You can use skopeo to copy from container registries, container storage backends, local directories, and local OCI-layout directories to the GitLab Container Registry.
|
||||
|
||||
### Docker connection error
|
||||
|
||||
A Docker connection error can occur when there are special characters in either the group,
|
||||
project or branch name. Special characters can include:
|
||||
|
||||
- Leading underscore
|
||||
- Trailing hyphen/dash
|
||||
|
||||
To get around this, you can [change the group path](../../group/manage.md#change-a-groups-path),
|
||||
[change the project path](../../project/settings/index.md#rename-a-repository) or change the branch
|
||||
name.
|
||||
|
||||
You may also get a `404 Not Found` or `Unknown Manifest` message if you are using
|
||||
a Docker Engine version earlier than 17.12. Later versions of Docker Engine use
|
||||
[the v2 API](https://docs.docker.com/registry/spec/manifest-v2-2/).
|
||||
|
||||
The images in your GitLab Container Registry must also use the Docker v2 API.
|
||||
For information on how to update your images, see the [Docker help](https://docs.docker.com/registry/spec/deprecated-schema-v1).
|
||||
|
||||
### `Blob unknown to registry` error when pushing a manifest list
|
||||
|
||||
When [pushing a Docker manifest list](https://docs.docker.com/engine/reference/commandline/manifest/#create-and-push-a-manifest-list)
|
||||
to the GitLab Container Registry, you may receive the error
|
||||
`manifest blob unknown: blob unknown to registry`. This error is likely caused by having multiple images
|
||||
with different architectures, spread out over several repositories instead of the same repository.
|
||||
|
||||
For example, you may have two images, each representing an architecture:
|
||||
|
||||
- The `amd64` platform
|
||||
- The `arm64v8` platform
|
||||
|
||||
To build a multi-arch image with these images, you must push them to the same repository as the
|
||||
multi-arch image.
|
||||
|
||||
To address the `Blob unknown to registry` error, include the architecture in the tag name of
|
||||
individual images. For example, use `mygroup/myapp:1.0.0-amd64` and `mygroup/myapp:1.0.0-arm64v8`.
|
||||
You can then tag the manifest list with `mygroup/myapp:1.0.0`.
|
||||
|
||||
### Troubleshoot as a GitLab server administrator
|
||||
|
||||
Troubleshooting the GitLab Container Registry, most of the times, requires
|
||||
you to sign in to GitLab server with administrator access.
|
||||
|
||||
[Read how to troubleshoot the Container Registry](../../../administration/packages/container_registry.md#troubleshooting).
|
||||
|
||||
### Unable to change path or transfer a project
|
||||
|
||||
If you try to change a project's path or transfer a project to a new namespace,
|
||||
you may receive one of the following errors:
|
||||
|
||||
- "Project cannot be transferred, because tags are present in its container registry."
|
||||
- "Namespace cannot be moved because at least one project has tags in container registry."
|
||||
|
||||
This issue occurs when the project has images in the Container Registry.
|
||||
You must delete or move these images before you can change the path or transfer
|
||||
the project.
|
||||
|
||||
The following procedure uses these sample project names:
|
||||
|
||||
- For the current project: `gitlab.example.com/org/build/sample_project/cr:v2.9.1`
|
||||
- For the new project: `gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1`
|
||||
|
||||
Use your own URLs to complete the following steps:
|
||||
|
||||
1. Download the Docker images on your computer:
|
||||
|
||||
```shell
|
||||
docker login gitlab.example.com
|
||||
docker pull gitlab.example.com/org/build/sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
NOTE:
|
||||
For container registry authentication, use either a
|
||||
[personal access token](../../profile/personal_access_tokens.md) or a
|
||||
[deploy token](../../project/deploy_tokens/index.md).
|
||||
|
||||
1. Rename the images to match the new project name:
|
||||
|
||||
```shell
|
||||
docker tag gitlab.example.com/org/build/sample_project/cr:v2.9.1 gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
1. Delete the images in the old project by using the [UI](#delete-images) or [API](../../../api/packages.md#delete-a-project-package).
|
||||
There may be a delay while the images are queued and deleted.
|
||||
1. Change the path or transfer the project by going to **Settings > General**
|
||||
and expanding **Advanced**.
|
||||
1. Restore the images:
|
||||
|
||||
```shell
|
||||
docker push gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
Follow [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/18383) for details.
|
||||
|
||||
### Tags on S3 backend remain after successful deletion requests
|
||||
|
||||
With S3 as your storage backend, tags may remain even though:
|
||||
|
||||
- In the UI, you see that the tags are scheduled for deletion.
|
||||
- In the API, you get an HTTP `200` response.
|
||||
- The registry log shows a successful `Delete` request.
|
||||
|
||||
An example `DELETE` request in the registry log:
|
||||
|
||||
```shell
|
||||
{"content_type":"","correlation_id":"01FQGNSKVMHQEAVE21KYTJN2P4","duration_ms":62,"host":"localhost:5000","level":"info","method":"DELETE","msg":"access","proto":"HTTP/1.1","referrer":"","remote_addr":"127.0.0.1:47498","remote_ip":"127.0.0.1","status":202,"system":"http","time":"2021-12-22T08:58:15Z","ttfb_ms":62,"uri":"/v2/<path to repo>/tags/reference/<tag_name>","user_agent":"GitLab/<version>","written_bytes":0}
|
||||
```
|
||||
|
||||
There may be some errors not properly cached. Follow these steps to investigate further:
|
||||
|
||||
1. In your configuration file, set the registry's log level to `debug`, and the S3 driver's log
|
||||
level to `logdebugwithhttpbody`. For Omnibus, make these edits in the `gitlab.rb` file:
|
||||
|
||||
```shell
|
||||
# Change the registry['log_level'] to debug
|
||||
registry['log_level'] = 'debug'
|
||||
|
||||
# Set log level for registry log from storage side
|
||||
registry['storage'] = {
|
||||
's3' => {
|
||||
'bucket' => 'your-s3-bucket',
|
||||
'region' => 'your-s3-region'
|
||||
},
|
||||
|
||||
'loglevel' = "logdebugwithhttpbody"
|
||||
}
|
||||
```
|
||||
|
||||
Then save and reconfigure GitLab:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl reconfigure
|
||||
```
|
||||
|
||||
1. Attempt to delete one or more tags using the GitLab UI or API.
|
||||
|
||||
1. Inspect the registry logs and look for a response from S3. Although the response could be
|
||||
`200 OK`, the body might have the error `AccessDenied`. This indicates a permission problem from
|
||||
the S3 side.
|
||||
|
||||
1. Ensure your S3 configuration has the `deleteObject` permission scope. Here's an
|
||||
[example role for an S3 bucket](../../../administration/object_storage.md#iam-permissions).
|
||||
Once adjusted, trigger another tag deletion. You should be able to successfully delete tags.
|
||||
|
||||
Follow [this issue](https://gitlab.com/gitlab-org/container-registry/-/issues/551) for details.
|
||||
|
||||
### `unauthorized: authentication required` when pushing large images
|
||||
|
||||
When pushing large images, you might get an error like the following:
|
||||
|
||||
```shell
|
||||
docker push gitlab.example.com/myproject/docs:latest
|
||||
The push refers to a repository [gitlab.example.com/myproject/docs]
|
||||
630816f32edb: Preparing
|
||||
530d5553aec8: Preparing
|
||||
...
|
||||
4b0bab9ff599: Waiting
|
||||
d1c800db26c7: Waiting
|
||||
42755cf4ee95: Waiting
|
||||
unauthorized: authentication required
|
||||
```
|
||||
|
||||
On self-managed GitLab instances, by default, tokens for the Container Registry expire every five minutes.
|
||||
When pushing larger images, or images that take longer than five minutes to push,
|
||||
you might encounter this error. On GitLab.com, the expiration time is 15 minutes.
|
||||
|
||||
If you are using self-managed GitLab, you can ask an administrator to
|
||||
[increase the token duration](../../../administration/packages/container_registry.md#increase-token-duration)
|
||||
if necessary.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,129 @@
|
|||
---
|
||||
stage: Package
|
||||
group: Container Registry
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Troubleshooting the GitLab Container Registry
|
||||
|
||||
You must sign in to GitLab with administrator rights to troubleshoot most issues with the GitLab Container Registry.
|
||||
|
||||
You can find [additional troubleshooting information](../../../administration/packages/container_registry.md#troubleshooting) in the GitLab Container Registry administration documentation.
|
||||
|
||||
## Migrating OCI container images to GitLab Container Registry
|
||||
|
||||
Migrating container images to the GitLab registry is not supported, but [epic](https://gitlab.com/groups/gitlab-org/-/epics/5210) proposes to change this behavior.
|
||||
|
||||
You can use third-party tools to migrate container images. For example, [skopeo](https://github.com/containers/skopeo), can [copy container images](https://github.com/containers/skopeo#copying-images) between various storage mechanisms. You can use skopeo to copy from container registries, container storage backends, local directories, and local OCI-layout directories to the GitLab Container Registry.
|
||||
|
||||
## Docker connection error
|
||||
|
||||
A Docker connection error can occur when there are special characters in either the group,
|
||||
project, or branch name. Special characters include:
|
||||
|
||||
- A leading underscore.
|
||||
- A trailing hyphen or dash.
|
||||
|
||||
To resolve this error, you can change the [group path](../../group/manage.md#change-a-groups-path),
|
||||
the [project path](../../project/settings/index.md#rename-a-repository) or the branch name.
|
||||
|
||||
You may get a `404 Not Found` or `Unknown Manifest` error message if you use
|
||||
Docker Engine 17.11 or earlier. Current versions of Docker Engine use
|
||||
the [v2 API](https://docs.docker.com/registry/spec/manifest-v2-2/).
|
||||
|
||||
The images in your GitLab Container Registry must use the Docker v2 API.
|
||||
For information on how to update version 1 images to version 2, see the [Docker documentation](https://docs.docker.com/registry/spec/deprecated-schema-v1).
|
||||
|
||||
## `Blob unknown to registry` error when pushing a manifest list
|
||||
|
||||
When [pushing a Docker manifest list](https://docs.docker.com/engine/reference/commandline/manifest/#create-and-push-a-manifest-list)
|
||||
to the GitLab Container Registry, you may receive the error
|
||||
`manifest blob unknown: blob unknown to registry`. This error is likely caused by having multiple images
|
||||
with different architectures spread out over several repositories instead of the same repository.
|
||||
|
||||
For example, you may have two images, each representing an architecture:
|
||||
|
||||
- The `amd64` platform.
|
||||
- The `arm64v8` platform.
|
||||
|
||||
To build a multi-arch image with these images, you must push them to the same repository as the
|
||||
multi-arch image.
|
||||
|
||||
To address the `Blob unknown to registry` error, include the architecture in the tag name of
|
||||
individual images. For example, use `mygroup/myapp:1.0.0-amd64` and `mygroup/myapp:1.0.0-arm64v8`.
|
||||
You can then tag the manifest list with `mygroup/myapp:1.0.0`.
|
||||
|
||||
## Unable to change project path or transfer a project
|
||||
|
||||
If you try to change a project path or transfer a project to a new namespace,
|
||||
you may receive one of the following errors:
|
||||
|
||||
- Project cannot be transferred because tags are present in its container registry.
|
||||
- Namespace cannot be moved because at least one project has tags in the container registry.
|
||||
|
||||
This error occurs when the project has images in the Container Registry.
|
||||
You must delete or move these images before you change the path or transfer
|
||||
the project.
|
||||
|
||||
The following procedure uses these sample project names:
|
||||
|
||||
- For the current project: `gitlab.example.com/org/build/sample_project/cr:v2.9.1`.
|
||||
- For the new project: `gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1`.
|
||||
|
||||
1. Download the Docker images on your computer:
|
||||
|
||||
```shell
|
||||
docker login gitlab.example.com
|
||||
docker pull gitlab.example.com/org/build/sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
NOTE:
|
||||
Use either a [personal access token](../../profile/personal_access_tokens.md) or a
|
||||
[deploy token](../../project/deploy_tokens/index.md) to authenticate your user account.
|
||||
|
||||
1. Rename the images to match the new project name:
|
||||
|
||||
```shell
|
||||
docker tag gitlab.example.com/org/build/sample_project/cr:v2.9.1 gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
1. Delete the images in the old project by using the [UI](index.md#delete-images) or [API](../../../api/packages.md#delete-a-project-package).
|
||||
There may be a delay while the images are queued and deleted.
|
||||
1. Change the path or transfer the project:
|
||||
|
||||
1. On the top bar, select **Main menu > Projects** and find your project.
|
||||
1. On the left sidebar, select **Settings > General**.
|
||||
1. Expand the **Advanced** section.
|
||||
1. In the **Change path** text box, edit the path.
|
||||
1. Select **Change path**.
|
||||
|
||||
1. Restore the images:
|
||||
|
||||
```shell
|
||||
docker push gitlab.example.com/new_org/build/new_sample_project/cr:v2.9.1
|
||||
```
|
||||
|
||||
See this [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/18383) for details.
|
||||
|
||||
## `unauthorized: authentication required` when pushing large images
|
||||
|
||||
When pushing large images, you may see an authentication error like the following:
|
||||
|
||||
```shell
|
||||
docker push gitlab.example.com/myproject/docs:latest
|
||||
The push refers to a repository [gitlab.example.com/myproject/docs]
|
||||
630816f32edb: Preparing
|
||||
530d5553aec8: Preparing
|
||||
...
|
||||
4b0bab9ff599: Waiting
|
||||
d1c800db26c7: Waiting
|
||||
42755cf4ee95: Waiting
|
||||
unauthorized: authentication required
|
||||
```
|
||||
|
||||
This error happens when your authentication token expires before the image push is complete. By default, tokens for
|
||||
the Container Registry on self-managed GitLab instances expire every five minutes. On GitLab.com, the token expiration
|
||||
time is set to 15 minutes.
|
||||
|
||||
If you are using self-managed GitLab, you can ask an administrator to
|
||||
[increase the token duration](../../../administration/packages/container_registry.md#increase-token-duration).
|
||||
|
|
@ -9,11 +9,47 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
You can share projects with other [groups](../../group/index.md). This makes it
|
||||
possible to add a group of users to a project with a single action.
|
||||
|
||||
## Groups as collections of users
|
||||
For example, if `Project A` belongs to `Group 1`, the members of `Group 1` have access to the project.
|
||||
If `Project A` already belongs to another `Group 2`, the owner of `Group 2` can share `Project A`
|
||||
with `Group 1`, so that both members of `Group 1` and `Group 2` have access to the project.
|
||||
|
||||
Groups are used primarily to [create collections of projects](../../group/index.md), but you can also
|
||||
take advantage of the fact that groups define collections of _users_, namely the group
|
||||
members.
|
||||
When a project is shared with a group:
|
||||
|
||||
- All group members, including members of subgroups or projects that belong to the group,
|
||||
are assigned the same role in the project.
|
||||
This role is displayed in the Max role column of the Project members list.
|
||||
- The group is listed in the **Groups** tab.
|
||||
- The project is listed on the group dashboard.
|
||||
|
||||
Be aware of the restrictions that apply when sharing projects with:
|
||||
|
||||
- [Groups with a more restrictive visibility level](#share-projects-with-groups-with-a-more-restrictive-visibility-level).
|
||||
- [Group lock](#share-project-with-group-lock).
|
||||
|
||||
## Share projects with groups with a more restrictive visibility level
|
||||
|
||||
You can share projects only down the group's organization structure.
|
||||
This means you can share a project with a group that has a more restrictive
|
||||
[visibility level](../../public_access.md#project-and-group-visibility) than the project,
|
||||
but not with a group that has a less restrictive visibility level.
|
||||
|
||||
For example, you can share:
|
||||
|
||||
- A public project with a private group.
|
||||
- A public project with an internal group.
|
||||
- An internal project with a private group.
|
||||
|
||||
This restriction applies to subgroups as well. For example, `group/subgroup01/project`:
|
||||
|
||||
- Can not be shared with `group`.
|
||||
- Can be shared with `group/subgroup02` or `group/subgroup01/subgroup03`.
|
||||
|
||||
When you share a project with a group that has a more restrictive visibility level than the project:
|
||||
|
||||
- The group name is visible to all users that can view the project members page.
|
||||
- Owners of the project have access to members of the group when they mention them in issues or merge requests.
|
||||
- Project members who are direct or indirect members of the group can see
|
||||
group members listed in addition to members of the project.
|
||||
|
||||
## Share a project with a group of users
|
||||
|
||||
|
|
@ -24,65 +60,22 @@ members.
|
|||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/352526) in GitLab 14.9.
|
||||
[Feature flag `invite_members_group_modal`](https://gitlab.com/gitlab-org/gitlab/-/issues/352526) removed.
|
||||
|
||||
You can share a project only with:
|
||||
You can share a project only with groups:
|
||||
|
||||
- Groups for which you have an explicitly defined [membership](index.md).
|
||||
- Groups that contain a nested subgroup or project for which you have an explicitly defined role.
|
||||
- Where you have an explicitly defined [membership](index.md).
|
||||
- That contain a nested subgroup or project you have an explicitly defined role for.
|
||||
- You are an administrator of.
|
||||
|
||||
Administrators can share projects with any group in the instance.
|
||||
To share a project with a group:
|
||||
|
||||
The primary mechanism to give a group of users, say 'Engineering', access to a project,
|
||||
say 'Project Acme', in GitLab is to make the 'Engineering' group the owner of 'Project
|
||||
Acme'. But what if 'Project Acme' already belongs to another group, say 'Open Source'?
|
||||
This is where the group sharing feature can be of use.
|
||||
|
||||
To share 'Project Acme' with the 'Engineering' group:
|
||||
|
||||
1. For 'Project Acme' use the left navigation menu to go to **Project information > Members**.
|
||||
1. On the top bar, select **Main menu > Projects** and find your project.
|
||||
1. In the left navigation menu, select **Project information > Members**.
|
||||
1. Select **Invite a group**.
|
||||
1. Add the 'Engineering' group with the maximum role of your choice.
|
||||
1. **Select a group** you want to add to the project.
|
||||
1. **Select a role** you want to assign to the group.
|
||||
1. Optional. Select an **Access expiration date**.
|
||||
1. Select **Invite**.
|
||||
|
||||
After sharing 'Project Acme' with 'Engineering':
|
||||
|
||||
- The group is listed in the **Groups** tab.
|
||||
- The project is listed on the group dashboard.
|
||||
- All members, including members of subgroups or projects that belong to the group, gain access
|
||||
to the project with a role based on the outcome of [maximum role](#maximum-role).
|
||||
|
||||
When you share a project, be aware of the following restrictions and outcomes:
|
||||
|
||||
- [Maximum role](#maximum-role)
|
||||
- [Sharing projects with groups with a more restrictive visibility level](#sharing-projects-with-groups-with-a-more-restrictive-visibility-level)
|
||||
- [Sharing project with group lock](#share-project-with-group-lock)
|
||||
|
||||
## Maximum role
|
||||
|
||||
When you invite a group to a project, all members of the group are assigned the same role in the project. This role is displayed in the Max role column of the Project members list.
|
||||
|
||||
### Share a project with a subgroup
|
||||
|
||||
You can't share a project with a group that's an ancestor of a [subgroup](../../group/subgroups/index.md) the project is
|
||||
in. That means you can only share down the group's organization structure. For example, `group/subgroup01/project`:
|
||||
|
||||
- Can not be shared with `group`.
|
||||
- Can be shared with `group/subgroup02` or `group/subgroup01/subgroup03`.
|
||||
|
||||
## Sharing projects with groups with a more restrictive visibility level
|
||||
|
||||
There are several outcomes you must be aware of when you share a project with a group that has a more restrictive [visibility level](../../public_access.md#project-and-group-visibility) than the project. For example, when you:
|
||||
|
||||
- Share a public project with a private group.
|
||||
- Share a public project with an internal group.
|
||||
- Share an internal project with a private group.
|
||||
|
||||
The following outcomes occur:
|
||||
|
||||
- The group name is visible to all users that can view the project members page.
|
||||
- Owners of the project have access to members of the group when they mention them in issues or merge requests.
|
||||
- Project members who are direct or indirect members of the group can see group members listed in addition to members of the project.
|
||||
|
||||
## Share project with group lock
|
||||
|
||||
It is possible to prevent projects in a group from
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ module API
|
|||
render_api_error!(e.message, 422)
|
||||
end
|
||||
|
||||
STATE_NAME_URI_REQUIREMENTS = { name: API::NO_SLASH_URL_PART_REGEX }.freeze
|
||||
|
||||
before do
|
||||
authenticate!
|
||||
authorize! :read_terraform_state, user_project
|
||||
|
|
@ -45,7 +47,7 @@ module API
|
|||
end
|
||||
|
||||
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
|
||||
namespace ':id/terraform/state/:name' do
|
||||
namespace ':id/terraform/state/:name', requirements: STATE_NAME_URI_REQUIREMENTS do
|
||||
params do
|
||||
requires :name, type: String, desc: 'The name of a Terraform state'
|
||||
optional :ID, type: String, limit: 255, desc: 'Terraform state lock ID'
|
||||
|
|
|
|||
|
|
@ -66,6 +66,14 @@ module BulkImports
|
|||
instance_version >= BulkImport.min_gl_version_for_project_migration
|
||||
end
|
||||
|
||||
def validate_import_scopes!
|
||||
response = self.get("/personal_access_tokens/self")
|
||||
|
||||
return if response['scopes'].include?('api')
|
||||
|
||||
raise BulkImports::Error.scope_validation_failure
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def validate_instance_version!
|
||||
|
|
|
|||
|
|
@ -5,5 +5,9 @@ module BulkImports
|
|||
def self.unsupported_gitlab_version
|
||||
self.new("Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MIN_MAJOR_VERSION}.")
|
||||
end
|
||||
|
||||
def self.scope_validation_failure
|
||||
self.new("Migration aborted as the provided personal access token is no longer valid.")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# Set `project_settings.legacy_open_source_license_available` to false for projects less than 5 MB
|
||||
class DisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb < ::Gitlab::BackgroundMigration::BatchedMigrationJob
|
||||
scope_to ->(relation) do
|
||||
relation
|
||||
.where(legacy_open_source_license_available: true)
|
||||
end
|
||||
|
||||
operation_name :disable_legacy_open_source_license_for_projects_less_than_five_mb
|
||||
|
||||
def perform
|
||||
each_sub_batch do |sub_batch|
|
||||
updates = { legacy_open_source_license_available: false, updated_at: Time.current }
|
||||
|
||||
sub_batch
|
||||
.joins('INNER JOIN project_statistics ON project_statistics.project_id = project_settings.project_id')
|
||||
.where('project_statistics.repository_size < ?', 5.megabyte)
|
||||
.update_all(updates)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module SystemCheck
|
||||
module App
|
||||
class GitlabCableConfigExistsCheck < SystemCheck::BaseCheck
|
||||
set_name 'Cable config exists?'
|
||||
|
||||
def check?
|
||||
cable_config_file = Rails.root.join('config/cable.yml')
|
||||
|
||||
File.exist?(cable_config_file)
|
||||
end
|
||||
|
||||
def show_error
|
||||
try_fixing_it(
|
||||
'Copy config/cable.yml.example to config/cable.yml',
|
||||
'Update config/cable.yml to match your setup'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module SystemCheck
|
||||
module App
|
||||
class GitlabResqueConfigExistsCheck < SystemCheck::BaseCheck
|
||||
set_name 'Resque config exists?'
|
||||
|
||||
def check?
|
||||
resque_config_file = Rails.root.join('config/resque.yml')
|
||||
|
||||
File.exist?(resque_config_file)
|
||||
end
|
||||
|
||||
def show_error
|
||||
try_fixing_it(
|
||||
'Copy config/resque.yml.example to config/resque.yml',
|
||||
'Update config/resque.yml to match your setup'
|
||||
)
|
||||
for_more_information(
|
||||
see_installation_guide_section('GitLab')
|
||||
)
|
||||
fix_and_rerun
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -17,6 +17,8 @@ module SystemCheck
|
|||
SystemCheck::App::OrphanedGroupMembersCheck,
|
||||
SystemCheck::App::GitlabConfigExistsCheck,
|
||||
SystemCheck::App::GitlabConfigUpToDateCheck,
|
||||
SystemCheck::App::GitlabCableConfigExistsCheck,
|
||||
SystemCheck::App::GitlabResqueConfigExistsCheck,
|
||||
SystemCheck::App::LogWritableCheck,
|
||||
SystemCheck::App::TmpWritableCheck,
|
||||
SystemCheck::App::UploadsDirectoryExistsCheck,
|
||||
|
|
|
|||
|
|
@ -36121,6 +36121,12 @@ msgstr ""
|
|||
msgid "ScanExecutionPolicy|%{thenLabelStart}Then%{thenLabelEnd} Require a %{scan} scan to run with site profile %{siteProfile} and scanner profile %{scannerProfile}"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|%{thenLabelStart}Then%{thenLabelEnd} Require a %{scan} scan to run with site profile %{siteProfile} and scanner profile %{scannerProfile} with tags %{tags}"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|%{thenLabelStart}Then%{thenLabelEnd} Require a %{scan} scan to run with tags %{tags}"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|A pipeline is run"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36148,9 +36154,15 @@ msgstr ""
|
|||
msgid "ScanExecutionPolicy|Select site profile"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|Select tags (if any)"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|Site profile"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|Tags"
|
||||
msgstr ""
|
||||
|
||||
msgid "ScanExecutionPolicy|agent"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb,
|
||||
:migration,
|
||||
schema: 20221018095434,
|
||||
feature_category: :projects do
|
||||
let(:namespaces_table) { table(:namespaces) }
|
||||
let(:projects_table) { table(:projects) }
|
||||
let(:project_settings_table) { table(:project_settings) }
|
||||
let(:project_statistics_table) { table(:project_statistics) }
|
||||
|
||||
subject(:perform_migration) do
|
||||
described_class.new(start_id: project_settings_table.minimum(:project_id),
|
||||
end_id: project_settings_table.maximum(:project_id),
|
||||
batch_table: :project_settings,
|
||||
batch_column: :project_id,
|
||||
sub_batch_size: 2,
|
||||
pause_ms: 0,
|
||||
connection: ActiveRecord::Base.connection)
|
||||
.perform
|
||||
end
|
||||
|
||||
it 'sets `legacy_open_source_license_available` to false only for projects less than 5 MB', :aggregate_failures do
|
||||
project_setting_2_mb = create_legacy_license_project_setting(repo_size: 2)
|
||||
project_setting_4_mb = create_legacy_license_project_setting(repo_size: 4)
|
||||
project_setting_5_mb = create_legacy_license_project_setting(repo_size: 5)
|
||||
project_setting_6_mb = create_legacy_license_project_setting(repo_size: 6)
|
||||
|
||||
record = ActiveRecord::QueryRecorder.new do
|
||||
expect { perform_migration }
|
||||
.to change { migrated_attribute(project_setting_2_mb) }.from(true).to(false)
|
||||
.and change { migrated_attribute(project_setting_4_mb) }.from(true).to(false)
|
||||
.and not_change { migrated_attribute(project_setting_5_mb) }.from(true)
|
||||
.and not_change { migrated_attribute(project_setting_6_mb) }.from(true)
|
||||
end
|
||||
|
||||
expect(record.count).to eq(15)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# @param repo_size: Repo size in MB
|
||||
def create_legacy_license_project_setting(repo_size:)
|
||||
path = "path-for-repo-size-#{repo_size}"
|
||||
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
|
||||
project_namespace =
|
||||
namespaces_table.create!(name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project')
|
||||
project = projects_table
|
||||
.create!(name: path, path: path, namespace_id: namespace.id, project_namespace_id: project_namespace.id)
|
||||
|
||||
size_in_bytes = 1.megabyte * repo_size
|
||||
project_statistics_table.create!(project_id: project.id, namespace_id: namespace.id, repository_size: size_in_bytes)
|
||||
project_settings_table.create!(project_id: project.id, legacy_open_source_license_available: true)
|
||||
end
|
||||
|
||||
def migrated_attribute(project_setting)
|
||||
project_settings_table.find(project_setting.project_id).legacy_open_source_license_available
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe SystemCheck::App::GitlabCableConfigExistsCheck, feature_category: :redis do
|
||||
subject(:system_check) { described_class.new }
|
||||
|
||||
describe '#check?' do
|
||||
subject { system_check.check? }
|
||||
|
||||
context 'when config/cable.yml exists' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(true) }
|
||||
end
|
||||
|
||||
context 'when config/cable.yml does not exist' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe SystemCheck::App::GitlabResqueConfigExistsCheck, feature_category: :redis do
|
||||
subject(:system_check) { described_class.new }
|
||||
|
||||
describe '#check?' do
|
||||
subject { system_check.check? }
|
||||
|
||||
context 'when config/resque.yml exists' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(true) }
|
||||
end
|
||||
|
||||
context 'when config/resque.yml does not exist' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(false) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb do
|
||||
let_it_be(:migration) { described_class.new }
|
||||
let_it_be(:post_migration) { described_class::MIGRATION }
|
||||
|
||||
context 'when on gitlab.com' do
|
||||
before do
|
||||
allow(Gitlab).to receive(:com?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
it 'schedules background jobs for each batch of project_settings' do
|
||||
migration.up
|
||||
|
||||
expect(post_migration).to(
|
||||
have_scheduled_batched_migration(
|
||||
table_name: :project_settings,
|
||||
column_name: :project_id,
|
||||
interval: described_class::INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
max_batch_size: described_class::MAX_BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'deletes all batched migration records' do
|
||||
migration.down
|
||||
|
||||
expect(post_migration).not_to have_scheduled_batched_migration
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when on self-managed instance' do
|
||||
before do
|
||||
allow(Gitlab).to receive(:com?).and_return(false)
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
it 'does not schedule background job' do
|
||||
expect(migration).not_to receive(:queue_batched_background_migration)
|
||||
|
||||
migration.up
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'does not delete background job' do
|
||||
expect(migration).not_to receive(:delete_batched_background_migration)
|
||||
|
||||
migration.down
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::Bridge do
|
||||
RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
|
@ -34,6 +34,24 @@ RSpec.describe Ci::Bridge do
|
|||
expect(bridge).to have_one(:downstream_pipeline)
|
||||
end
|
||||
|
||||
describe '#sourced_pipelines' do
|
||||
subject { bridge.sourced_pipelines }
|
||||
|
||||
it 'raises error' do
|
||||
expect { subject }.to raise_error RuntimeError, 'Ci::Bridge does not have sourced_pipelines association'
|
||||
end
|
||||
|
||||
context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
|
||||
end
|
||||
|
||||
it 'returns the sourced_pipelines association' do
|
||||
expect(bridge.sourced_pipelines).to eq([])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#retryable?' do
|
||||
let(:bridge) { create(:ci_bridge, :success) }
|
||||
|
||||
|
|
|
|||
|
|
@ -44,6 +44,8 @@ RSpec.describe API::BulkImports, feature_category: :importers do
|
|||
end
|
||||
|
||||
describe 'POST /bulk_imports' do
|
||||
let(:api_response) { { 'scopes' => %w[api read_repository] } }
|
||||
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance)
|
||||
|
|
@ -53,6 +55,9 @@ RSpec.describe API::BulkImports, feature_category: :importers do
|
|||
allow(instance)
|
||||
.to receive(:instance_enterprise)
|
||||
.and_return(false)
|
||||
allow(instance)
|
||||
.to receive(:get)
|
||||
.and_return(api_response)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -164,6 +164,19 @@ RSpec.describe 'getting an issue list at root level' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when fetching issues from multiple projects' do
|
||||
it 'avoids N+1 queries' do
|
||||
post_query # warm-up
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new { post_query }
|
||||
|
||||
new_private_project = create(:project, :private).tap { |project| project.add_developer(current_user) }
|
||||
create(:issue, project: new_private_project)
|
||||
|
||||
expect { post_query }.not_to exceed_query_limit(control)
|
||||
end
|
||||
end
|
||||
|
||||
def execute_query
|
||||
post_query
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,13 +9,15 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
let_it_be(:developer) { create(:user, developer_projects: [project]) }
|
||||
let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
|
||||
|
||||
let!(:state) { create(:terraform_state, :with_version, project: project) }
|
||||
|
||||
let(:current_user) { maintainer }
|
||||
let(:auth_header) { user_basic_auth_header(current_user) }
|
||||
let(:project_id) { project.id }
|
||||
let(:state_name) { state.name }
|
||||
|
||||
let(:state_name) { "some-state" }
|
||||
let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}" }
|
||||
let!(:state) do
|
||||
create(:terraform_state, :with_version, project: project, name: URI.decode_www_form_component(state_name))
|
||||
end
|
||||
|
||||
before do
|
||||
stub_terraform_state_object_storage
|
||||
|
|
@ -91,15 +93,24 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
end
|
||||
end
|
||||
|
||||
context 'personal acceess token authentication' do
|
||||
shared_examples 'can access terraform state' do
|
||||
it 'returns terraform state of a project of given state name' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(state.reload.latest_file.read)
|
||||
end
|
||||
end
|
||||
|
||||
context 'personal access token authentication' do
|
||||
context 'with maintainer permissions' do
|
||||
let(:current_user) { maintainer }
|
||||
|
||||
it 'returns terraform state belonging to a project of given state name' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(state.reload.latest_file.read)
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
it_behaves_like 'can access terraform state' do
|
||||
let(:state_name) { given_state_name }
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a project that does not exist' do
|
||||
|
|
@ -112,18 +123,23 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
end
|
||||
end
|
||||
|
||||
context 'with invalid state name' do
|
||||
let(:state_name) { 'foo/bar' }
|
||||
|
||||
it 'returns a 404 error' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'cannot access a state that is scheduled for deletion'
|
||||
end
|
||||
|
||||
context 'with developer permissions' do
|
||||
let(:current_user) { developer }
|
||||
|
||||
it 'returns terraform state belonging to a project of given state name' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(state.reload.latest_file.read)
|
||||
end
|
||||
it_behaves_like 'can access terraform state'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -133,12 +149,7 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
context 'with maintainer permissions' do
|
||||
let(:job) { create(:ci_build, status: :running, project: project, user: maintainer) }
|
||||
|
||||
it 'returns terraform state belonging to a project of given state name' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(state.reload.latest_file.read)
|
||||
end
|
||||
it_behaves_like 'can access terraform state'
|
||||
|
||||
it 'returns unauthorized if the the job is not running' do
|
||||
job.update!(status: :failed)
|
||||
|
|
@ -161,12 +172,7 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
context 'with developer permissions' do
|
||||
let(:job) { create(:ci_build, status: :running, project: project, user: developer) }
|
||||
|
||||
it 'returns terraform state belonging to a project of given state name' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.body).to eq(state.reload.latest_file.read)
|
||||
end
|
||||
it_behaves_like 'can access terraform state'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -182,11 +188,26 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
context 'with maintainer permissions' do
|
||||
let(:current_user) { maintainer }
|
||||
|
||||
it 'updates the state' do
|
||||
expect { request }.to change { Terraform::State.count }.by(0)
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
let(:state_name) { given_state_name }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
it 'updates the state' do
|
||||
expect { request }.to change { Terraform::State.count }.by(0)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid state name' do
|
||||
let(:state_name) { 'foo/bar' }
|
||||
|
||||
it 'returns a 404 error' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when serial already exists' do
|
||||
|
|
@ -224,16 +245,24 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
end
|
||||
|
||||
context 'when there is no terraform state of a given name' do
|
||||
let(:state_name) { 'example2' }
|
||||
let(:non_existing_state_name) { 'non-existing-state' }
|
||||
let(:non_existing_state_path) { "/projects/#{project_id}/terraform/state/#{non_existing_state_name}" }
|
||||
|
||||
subject(:request) { post api(non_existing_state_path), headers: auth_header, as: :json, params: params }
|
||||
|
||||
context 'with maintainer permissions' do
|
||||
let(:current_user) { maintainer }
|
||||
|
||||
it 'creates a new state' do
|
||||
expect { request }.to change { Terraform::State.count }.by(1)
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
let(:state_name) { given_state_name }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
it 'creates a new state' do
|
||||
expect { request }.to change { Terraform::State.count }.by(1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -280,14 +309,29 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
let(:current_user) { maintainer }
|
||||
let(:deletion_service) { instance_double(Terraform::States::TriggerDestroyService) }
|
||||
|
||||
it 'schedules the state for deletion and returns empty body' do
|
||||
expect(Terraform::States::TriggerDestroyService).to receive(:new).and_return(deletion_service)
|
||||
expect(deletion_service).to receive(:execute).once
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
let(:state_name) { given_state_name }
|
||||
|
||||
request
|
||||
it 'schedules the state for deletion and returns empty body' do
|
||||
expect(Terraform::States::TriggerDestroyService).to receive(:new).and_return(deletion_service)
|
||||
expect(deletion_service).to receive(:execute).once
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(Gitlab::Json.parse(response.body)).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid state name' do
|
||||
let(:state_name) { 'foo/bar' }
|
||||
|
||||
it 'returns a 404 error' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'cannot access a state that is scheduled for deletion'
|
||||
|
|
@ -322,10 +366,25 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
it_behaves_like 'endpoint with unique user tracking'
|
||||
it_behaves_like 'cannot access a state that is scheduled for deletion'
|
||||
|
||||
it 'locks the terraform state' do
|
||||
request
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
let(:state_name) { given_state_name }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
it 'locks the terraform state' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid state name' do
|
||||
let(:state_name) { 'foo/bar' }
|
||||
|
||||
it 'returns a 404 error' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'state is already locked' do
|
||||
|
|
@ -379,23 +438,39 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu
|
|||
let(:lock_id) { 'irrelevant to this test, just needs to be present' }
|
||||
end
|
||||
|
||||
context 'with the correct lock id' do
|
||||
let(:lock_id) { '123-456' }
|
||||
where(given_state_name: %w[test-state test.state test%2Ffoo])
|
||||
with_them do
|
||||
let(:state_name) { given_state_name }
|
||||
|
||||
it 'removes the terraform state lock' do
|
||||
request
|
||||
context 'with the correct lock id' do
|
||||
let(:lock_id) { '123-456' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
it 'removes the terraform state lock' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no lock id (force-unlock)' do
|
||||
let(:params) { {} }
|
||||
|
||||
it 'removes the terraform state lock' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no lock id (force-unlock)' do
|
||||
let(:params) { {} }
|
||||
context 'with invalid state name' do
|
||||
let(:lock_id) { '123-456' }
|
||||
let(:state_name) { 'foo/bar' }
|
||||
|
||||
it 'removes the terraform state lock' do
|
||||
it 'returns a 404 error' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::CreateService do
|
||||
RSpec.describe BulkImports::CreateService, feature_category: :importers do
|
||||
let(:user) { create(:user) }
|
||||
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
|
||||
let(:destination_group) { create(:group, path: 'destination1') }
|
||||
|
|
@ -38,170 +38,195 @@ RSpec.describe BulkImports::CreateService do
|
|||
::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance).to receive(:instance_version).and_return(source_version)
|
||||
allow(instance).to receive(:instance_enterprise).and_return(false)
|
||||
end
|
||||
end
|
||||
context 'when token validation fails' do
|
||||
let(:invalid_scopes_api_response) { { 'scopes' => %w[read_user] } }
|
||||
|
||||
it 'creates bulk import' do
|
||||
parent_group.add_owner(user)
|
||||
expect { subject.execute }.to change { BulkImport.count }.by(1)
|
||||
|
||||
last_bulk_import = BulkImport.last
|
||||
|
||||
expect(last_bulk_import.user).to eq(user)
|
||||
expect(last_bulk_import.source_version).to eq(source_version.to_s)
|
||||
expect(last_bulk_import.user).to eq(user)
|
||||
expect(last_bulk_import.source_enterprise).to eq(false)
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'bulk_import_group'
|
||||
)
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
|
||||
it 'creates bulk import entities' do
|
||||
expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
|
||||
end
|
||||
|
||||
it 'creates bulk import configuration' do
|
||||
expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
|
||||
end
|
||||
|
||||
it 'enqueues BulkImportWorker' do
|
||||
expect(BulkImportWorker).to receive(:perform_async)
|
||||
|
||||
subject.execute
|
||||
end
|
||||
|
||||
it 'returns success ServiceResponse' do
|
||||
result = subject.execute
|
||||
|
||||
expect(result).to be_a(ServiceResponse)
|
||||
expect(result).to be_success
|
||||
end
|
||||
|
||||
it 'returns ServiceResponse with error if validation fails' do
|
||||
params[0][:source_full_path] = nil
|
||||
|
||||
result = subject.execute
|
||||
|
||||
expect(result).to be_a(ServiceResponse)
|
||||
expect(result).to be_error
|
||||
expect(result.message).to eq("Validation failed: Source full path can't be blank")
|
||||
end
|
||||
|
||||
describe '#user-role' do
|
||||
context 'when there is a parent_namespace and the user is a member' do
|
||||
let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination200',
|
||||
destination_namespace: 'parent-group'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'defines access_level from parent namespace membership' do
|
||||
parent_group.add_guest(user)
|
||||
subject.execute
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
|
||||
)
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance).to receive(:instance_version).and_return(source_version)
|
||||
allow(instance).to receive(:instance_enterprise).and_return(false)
|
||||
allow(instance).to receive(:get).and_return(invalid_scopes_api_response)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a parent_namespace and the user is not a member' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination-group-1',
|
||||
destination_namespace: 'parent-group'
|
||||
}
|
||||
]
|
||||
end
|
||||
it 'returns ServiceResponse with error if token does not have api scope' do
|
||||
result = subject.execute
|
||||
|
||||
it 'defines access_level as not a member' do
|
||||
subject.execute
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Not a member', import_type: 'bulk_import_group' }
|
||||
)
|
||||
expect(result).to be_a(ServiceResponse)
|
||||
expect(result).to be_error
|
||||
expect(result.message).to eq("Migration aborted as the provided personal access token is no longer valid.")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when token validation succeeds' do
|
||||
let(:valid_scopes_response) { { 'scopes' => %w[api read_repository] } }
|
||||
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance).to receive(:instance_version).and_return(source_version)
|
||||
allow(instance).to receive(:instance_enterprise).and_return(false)
|
||||
allow(instance).to receive(:get).and_return(valid_scopes_response)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a destination_namespace but no parent_namespace' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination-group-1',
|
||||
destination_namespace: 'destination1'
|
||||
}
|
||||
]
|
||||
end
|
||||
it 'creates bulk import' do
|
||||
parent_group.add_owner(user)
|
||||
expect { subject.execute }.to change { BulkImport.count }.by(1)
|
||||
|
||||
it 'defines access_level from destination_namespace' do
|
||||
destination_group.add_developer(user)
|
||||
subject.execute
|
||||
last_bulk_import = BulkImport.last
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
expect(last_bulk_import.user).to eq(user)
|
||||
expect(last_bulk_import.source_version).to eq(source_version.to_s)
|
||||
expect(last_bulk_import.user).to eq(user)
|
||||
expect(last_bulk_import.source_enterprise).to eq(false)
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'bulk_import_group'
|
||||
)
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
|
||||
context 'when there is no destination_namespace or parent_namespace' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destinationational mcdestiny',
|
||||
destination_namespace: 'destinational-mcdestiny'
|
||||
}
|
||||
]
|
||||
it 'creates bulk import entities' do
|
||||
expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
|
||||
end
|
||||
|
||||
it 'creates bulk import configuration' do
|
||||
expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
|
||||
end
|
||||
|
||||
it 'enqueues BulkImportWorker' do
|
||||
expect(BulkImportWorker).to receive(:perform_async)
|
||||
|
||||
subject.execute
|
||||
end
|
||||
|
||||
it 'returns success ServiceResponse' do
|
||||
result = subject.execute
|
||||
|
||||
expect(result).to be_a(ServiceResponse)
|
||||
expect(result).to be_success
|
||||
end
|
||||
|
||||
it 'returns ServiceResponse with error if path validation fails' do
|
||||
params[0][:source_full_path] = nil
|
||||
|
||||
result = subject.execute
|
||||
|
||||
expect(result).to be_a(ServiceResponse)
|
||||
expect(result).to be_error
|
||||
expect(result.message).to eq("Validation failed: Source full path can't be blank")
|
||||
end
|
||||
|
||||
describe '#user-role' do
|
||||
context 'when there is a parent_namespace and the user is a member' do
|
||||
let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination200',
|
||||
destination_namespace: 'parent-group'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'defines access_level from parent namespace membership' do
|
||||
parent_group.add_guest(user)
|
||||
subject.execute
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
it 'defines access_level as owner' do
|
||||
subject.execute
|
||||
context 'when there is a parent_namespace and the user is not a member' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination-group-1',
|
||||
destination_namespace: 'parent-group'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
|
||||
)
|
||||
it 'defines access_level as not a member' do
|
||||
subject.execute
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Not a member', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a destination_namespace but no parent_namespace' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destination-group-1',
|
||||
destination_namespace: 'destination1'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'defines access_level from destination_namespace' do
|
||||
destination_group.add_developer(user)
|
||||
subject.execute
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is no destination_namespace or parent_namespace' do
|
||||
let(:params) do
|
||||
[
|
||||
{
|
||||
source_type: 'group_entity',
|
||||
source_full_path: 'full/path/to/group1',
|
||||
destination_slug: 'destinationational mcdestiny',
|
||||
destination_namespace: 'destinational-mcdestiny'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'defines access_level as owner' do
|
||||
subject.execute
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'BulkImports::CreateService',
|
||||
action: 'create',
|
||||
label: 'import_access_level',
|
||||
user: user,
|
||||
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
||||
RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category: :continuous_integration do
|
||||
include Ci::SourcePipelineHelpers
|
||||
|
||||
# Using let_it_be on user and projects for these specs can cause
|
||||
|
|
@ -41,6 +41,12 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
|
||||
subject { service.execute(bridge) }
|
||||
|
||||
shared_context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when downstream project has not been found' do
|
||||
let(:trigger) do
|
||||
{ trigger: { project: 'unknown/project' } }
|
||||
|
|
@ -116,12 +122,25 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
it 'creates a new pipeline in a downstream project' do
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq downstream_project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
|
||||
context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
|
||||
|
||||
it 'creates a new pipeline in a downstream project' do
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq downstream_project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'logs downstream pipeline creation' do
|
||||
let(:downstream_pipeline) { pipeline }
|
||||
let(:expected_root_pipeline) { upstream_pipeline }
|
||||
|
|
@ -150,9 +169,9 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when bridge job has already any downstream pipelines' do
|
||||
context 'when bridge job has already any downstream pipeline' do
|
||||
before do
|
||||
bridge.sourced_pipelines.create!(
|
||||
bridge.create_sourced_pipeline!(
|
||||
source_pipeline: bridge.pipeline,
|
||||
source_project: bridge.project,
|
||||
project: bridge.project,
|
||||
|
|
@ -171,6 +190,31 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
expect(subject).to be_error
|
||||
expect(subject.message).to eq("Already has a downstream pipeline")
|
||||
end
|
||||
|
||||
context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
|
||||
|
||||
before do
|
||||
bridge.sourced_pipelines.create!(
|
||||
source_pipeline: bridge.pipeline,
|
||||
source_project: bridge.project,
|
||||
project: bridge.project,
|
||||
pipeline: create(:ci_pipeline, project: bridge.project)
|
||||
)
|
||||
end
|
||||
|
||||
it 'logs an error and exits' do
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.with(
|
||||
instance_of(described_class::DuplicateDownstreamPipelineError),
|
||||
bridge_id: bridge.id, project_id: bridge.project.id)
|
||||
.and_call_original
|
||||
expect(Ci::CreatePipelineService).not_to receive(:new)
|
||||
expect(subject).to be_error
|
||||
expect(subject.message).to eq("Already has a downstream pipeline")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when target ref is not specified' do
|
||||
|
|
@ -198,12 +242,25 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
it 'creates a new pipeline in a downstream project' do
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq downstream_project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
|
||||
context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
|
||||
|
||||
it 'creates a new pipeline in a downstream project' do
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq downstream_project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
end
|
||||
|
||||
it 'updates the bridge status when downstream pipeline gets processed' do
|
||||
expect(pipeline.reload).to be_failed
|
||||
expect(bridge.reload).to be_failed
|
||||
|
|
@ -249,12 +306,26 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
|
|||
expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq bridge.project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
|
||||
context 'when ci_bridge_remove_sourced_pipelines is disabled' do
|
||||
include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
|
||||
|
||||
it 'creates a child pipeline in the same project' do
|
||||
expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
|
||||
expect(pipeline.user).to eq bridge.user
|
||||
expect(pipeline.project).to eq bridge.project
|
||||
expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
|
||||
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
|
||||
expect(pipeline.source_bridge).to eq bridge
|
||||
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
|
||||
end
|
||||
end
|
||||
|
||||
it 'updates bridge status when downstream pipeline gets processed' do
|
||||
expect(pipeline.reload).to be_created
|
||||
expect(bridge.reload).to be_success
|
||||
|
|
|
|||
|
|
@ -2,21 +2,50 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::ExportRequestWorker do
|
||||
RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
|
||||
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
|
||||
let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
|
||||
|
||||
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
|
||||
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
|
||||
let(:job_args) { [entity.id] }
|
||||
let(:response_headers) { { 'Content-Type' => 'application/json' } }
|
||||
let(:request_query) { { page: 1, per_page: 30, private_token: 'token' } }
|
||||
let(:personal_access_tokens_response) do
|
||||
{
|
||||
scopes: %w[api read_repository]
|
||||
}
|
||||
end
|
||||
|
||||
let_it_be(:source_version) do
|
||||
Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
|
||||
::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
before do
|
||||
allow(Gitlab::HTTP)
|
||||
.to receive(:get)
|
||||
.with(version_url, anything)
|
||||
.and_return(double(code: 200, success?: true, parsed_response: { 'version' => Gitlab::VERSION }))
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
|
||||
stub_request(:get, 'https://gitlab.example/api/v4/version').with(query: request_query)
|
||||
.to_return(status: 200, body: { 'version' => Gitlab::VERSION }.to_json, headers: response_headers)
|
||||
stub_request(:get, 'https://gitlab.example/api/v4/personal_access_tokens/self').with(query: request_query)
|
||||
.to_return(status: 200, body: personal_access_tokens_response.to_json, headers: response_headers)
|
||||
end
|
||||
|
||||
context 'when scope validation fails' do
|
||||
let(:personal_access_tokens_response) { { scopes: ['read_user'] } }
|
||||
|
||||
it 'creates a failure record' do
|
||||
expect(BulkImports::Failure)
|
||||
.to receive(:create)
|
||||
.with(
|
||||
a_hash_including(
|
||||
bulk_import_entity_id: entity.id,
|
||||
pipeline_class: 'ExportRequestWorker',
|
||||
exception_class: 'BulkImports::Error',
|
||||
exception_message: 'Migration aborted as the provided personal access token is no longer valid.',
|
||||
correlation_id_value: anything
|
||||
)
|
||||
).twice
|
||||
|
||||
perform_multiple(job_args)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'requests relations export for api resource' do
|
||||
|
|
@ -161,22 +190,22 @@ RSpec.describe BulkImports::ExportRequestWorker do
|
|||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when entity is group' do
|
||||
let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
|
||||
let(:expected) { "/groups/#{entity.source_xid}/export_relations" }
|
||||
let(:full_path_url) { '/groups/foo%2Fbar/export_relations' }
|
||||
context 'when entity is group' do
|
||||
let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
|
||||
let(:expected) { "/groups/#{entity.source_xid}/export_relations" }
|
||||
let(:full_path_url) { '/groups/foo%2Fbar/export_relations' }
|
||||
|
||||
it_behaves_like 'requests relations export for api resource'
|
||||
end
|
||||
it_behaves_like 'requests relations export for api resource'
|
||||
end
|
||||
|
||||
context 'when entity is project' do
|
||||
let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
|
||||
let(:expected) { "/projects/#{entity.source_xid}/export_relations" }
|
||||
let(:full_path_url) { '/projects/foo%2Fbar/export_relations' }
|
||||
context 'when entity is project' do
|
||||
let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
|
||||
let(:expected) { "/projects/#{entity.source_xid}/export_relations" }
|
||||
let(:full_path_url) { '/projects/foo%2Fbar/export_relations' }
|
||||
|
||||
it_behaves_like 'requests relations export for api resource'
|
||||
it_behaves_like 'requests relations export for api resource'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::PipelineWorker do
|
||||
RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
|
||||
let(:pipeline_class) do
|
||||
Class.new do
|
||||
def initialize(_); end
|
||||
|
|
@ -18,94 +18,32 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
|
||||
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
|
||||
|
||||
before do
|
||||
stub_const('FakePipeline', pipeline_class)
|
||||
|
||||
allow(entity).to receive(:pipeline_exists?).with('FakePipeline').and_return(true)
|
||||
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
|
||||
allow(instance).to receive(:pipelines)
|
||||
.and_return([{ stage: 0, pipeline: pipeline_class }])
|
||||
end
|
||||
let_it_be(:source_version) do
|
||||
Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
|
||||
::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
|
||||
end
|
||||
|
||||
shared_examples 'successfully runs the pipeline' do
|
||||
it 'runs the given pipeline successfully' do
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path
|
||||
)
|
||||
)
|
||||
context 'when scope validation fails' do
|
||||
let(:invalid_scope_api_response) { { 'scopes' => %w[read_user] } }
|
||||
|
||||
before do
|
||||
stub_const('FakePipeline', pipeline_class)
|
||||
|
||||
allow(entity).to receive(:pipeline_exists?).with('FakePipeline').and_return(true)
|
||||
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance).to receive(:instance_version).and_return(source_version)
|
||||
allow(instance).to receive(:instance_enterprise).and_return(false)
|
||||
allow(instance).to receive(:get).and_return(invalid_scope_api_response)
|
||||
end
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
pipeline_tracker.reload
|
||||
|
||||
expect(pipeline_tracker.status_name).to eq(:finished)
|
||||
expect(pipeline_tracker.jid).to eq('jid')
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'successfully runs the pipeline' do
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline cannot be found' do
|
||||
it 'logs the error' do
|
||||
pipeline_tracker = create(
|
||||
:bulk_import_tracker,
|
||||
:finished,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline'
|
||||
)
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_tracker_id' => pipeline_tracker.id,
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s,
|
||||
'message' => 'Unstarted pipeline not found'
|
||||
)
|
||||
)
|
||||
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
|
||||
allow(instance).to receive(:pipelines)
|
||||
.and_return([{ stage: 0, pipeline: pipeline_class }])
|
||||
end
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline raises an exception' do
|
||||
it 'logs the error' do
|
||||
it 'creates a failure record' do
|
||||
pipeline_tracker = create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
|
|
@ -113,51 +51,6 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
status_event: 'enqueue'
|
||||
)
|
||||
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
expect_next_instance_of(pipeline_class) do |pipeline|
|
||||
expect(pipeline)
|
||||
.to receive(:run)
|
||||
.and_raise(StandardError, 'Error!')
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'class' => 'BulkImports::PipelineWorker',
|
||||
'exception.backtrace' => anything,
|
||||
'exception.message' => 'Error!',
|
||||
'message' => 'Pipeline failed',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s,
|
||||
'importer' => 'gitlab_migration'
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.with(
|
||||
instance_of(StandardError),
|
||||
bulk_import_entity_id: entity.id,
|
||||
bulk_import_id: entity.bulk_import.id,
|
||||
bulk_import_entity_type: entity.source_type,
|
||||
source_full_path: entity.source_full_path,
|
||||
pipeline_name: pipeline_tracker.pipeline_name,
|
||||
importer: 'gitlab_migration',
|
||||
source_version: entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
expect(BulkImports::Failure)
|
||||
.to receive(:create)
|
||||
.with(
|
||||
|
|
@ -165,22 +58,102 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
bulk_import_entity_id: entity.id,
|
||||
pipeline_class: 'FakePipeline',
|
||||
pipeline_step: 'pipeline_worker_run',
|
||||
exception_class: 'StandardError',
|
||||
exception_message: 'Error!',
|
||||
exception_class: 'BulkImports::Error',
|
||||
exception_message: 'Migration aborted as the provided personal access token is no longer valid.',
|
||||
correlation_id_value: anything
|
||||
)
|
||||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
||||
pipeline_tracker.reload
|
||||
context 'when scope validation succeeds' do
|
||||
let(:valid_scope_api_response) { { 'scopes' => %w[api read_repository] } }
|
||||
|
||||
expect(pipeline_tracker.status_name).to eq(:failed)
|
||||
expect(pipeline_tracker.jid).to eq('jid')
|
||||
before do
|
||||
stub_const('FakePipeline', pipeline_class)
|
||||
|
||||
allow(entity).to receive(:pipeline_exists?).with('FakePipeline').and_return(true)
|
||||
|
||||
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
|
||||
allow(instance).to receive(:instance_version).and_return(source_version)
|
||||
allow(instance).to receive(:instance_enterprise).and_return(false)
|
||||
allow(instance).to receive(:get).and_return(valid_scope_api_response)
|
||||
end
|
||||
|
||||
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
|
||||
allow(instance).to receive(:pipelines)
|
||||
.and_return([{ stage: 0, pipeline: pipeline_class }])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when entity is failed' do
|
||||
it 'marks tracker as skipped and logs the skip' do
|
||||
shared_examples 'successfully runs the pipeline' do
|
||||
it 'runs the given pipeline successfully' do
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
pipeline_tracker.reload
|
||||
|
||||
expect(pipeline_tracker.status_name).to eq(:finished)
|
||||
expect(pipeline_tracker.jid).to eq('jid')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline cannot be found' do
|
||||
it 'logs the error' do
|
||||
pipeline_tracker = create(
|
||||
:bulk_import_tracker,
|
||||
:finished,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline'
|
||||
)
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_tracker_id' => pipeline_tracker.id,
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s,
|
||||
'message' => 'Unstarted pipeline not found'
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline raises an exception' do
|
||||
it 'logs the error' do
|
||||
pipeline_tracker = create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
|
|
@ -188,13 +161,17 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
status_event: 'enqueue'
|
||||
)
|
||||
|
||||
entity.update!(status: -1)
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
expect_next_instance_of(pipeline_class) do |pipeline|
|
||||
expect(pipeline)
|
||||
.to receive(:run)
|
||||
.and_raise(StandardError, 'Error!')
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
allow(logger).to receive(:info)
|
||||
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
|
|
@ -202,161 +179,204 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'message' => 'Skipping pipeline due to failed entity'
|
||||
'class' => 'BulkImports::PipelineWorker',
|
||||
'exception.backtrace' => anything,
|
||||
'exception.message' => 'Error!',
|
||||
'message' => 'Pipeline failed',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s,
|
||||
'importer' => 'gitlab_migration'
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:skipped)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when retry pipeline error is raised' do
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
end
|
||||
|
||||
let(:exception) do
|
||||
BulkImports::RetryPipelineError.new('Error!', 60)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
expect_next_instance_of(pipeline_class) do |pipeline|
|
||||
expect(pipeline)
|
||||
.to receive(:run)
|
||||
.and_raise(exception)
|
||||
end
|
||||
end
|
||||
|
||||
it 'reenqueues the worker' do
|
||||
expect_any_instance_of(BulkImports::Tracker) do |tracker|
|
||||
expect(tracker).to receive(:retry).and_call_original
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.with(
|
||||
60.seconds,
|
||||
pipeline_tracker.id,
|
||||
pipeline_tracker.stage,
|
||||
pipeline_tracker.entity.id
|
||||
instance_of(StandardError),
|
||||
bulk_import_entity_id: entity.id,
|
||||
bulk_import_id: entity.bulk_import.id,
|
||||
bulk_import_entity_type: entity.source_type,
|
||||
source_full_path: entity.source_full_path,
|
||||
pipeline_name: pipeline_tracker.pipeline_name,
|
||||
importer: 'gitlab_migration',
|
||||
source_version: entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
|
||||
expect(BulkImports::EntityWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(entity.id, pipeline_tracker.stage)
|
||||
|
||||
expect(BulkImports::Failure)
|
||||
.to receive(:create)
|
||||
.with(
|
||||
a_hash_including(
|
||||
bulk_import_entity_id: entity.id,
|
||||
pipeline_class: 'FakePipeline',
|
||||
pipeline_step: 'pipeline_worker_run',
|
||||
exception_class: 'StandardError',
|
||||
exception_message: 'Error!',
|
||||
correlation_id_value: anything
|
||||
)
|
||||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
pipeline_tracker.reload
|
||||
|
||||
expect(pipeline_tracker.enqueued?).to be_truthy
|
||||
expect(pipeline_tracker.status_name).to eq(:failed)
|
||||
expect(pipeline_tracker.jid).to eq('jid')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file extraction pipeline' do
|
||||
let(:file_extraction_pipeline) do
|
||||
Class.new do
|
||||
def initialize(_); end
|
||||
context 'when entity is failed' do
|
||||
it 'marks tracker as skipped and logs the skip' do
|
||||
pipeline_tracker = create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
|
||||
def run; end
|
||||
entity.update!(status: -1)
|
||||
|
||||
def self.file_extraction_pipeline?
|
||||
true
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
allow(logger).to receive(:info)
|
||||
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'message' => 'Skipping pipeline due to failed entity'
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:skipped)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when retry pipeline error is raised' do
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
end
|
||||
|
||||
def self.relation
|
||||
'test'
|
||||
let(:exception) do
|
||||
BulkImports::RetryPipelineError.new('Error!', 60)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:jid).and_return('jid')
|
||||
|
||||
expect_next_instance_of(pipeline_class) do |pipeline|
|
||||
expect(pipeline)
|
||||
.to receive(:run)
|
||||
.and_raise(exception)
|
||||
end
|
||||
end
|
||||
|
||||
it 'reenqueues the worker' do
|
||||
expect_any_instance_of(BulkImports::Tracker) do |tracker|
|
||||
expect(tracker).to receive(:retry).and_call_original
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:info)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'FakePipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(
|
||||
60.seconds,
|
||||
pipeline_tracker.id,
|
||||
pipeline_tracker.stage,
|
||||
pipeline_tracker.entity.id
|
||||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
pipeline_tracker.reload
|
||||
|
||||
expect(pipeline_tracker.enqueued?).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'NdjsonPipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
end
|
||||
context 'when file extraction pipeline' do
|
||||
let(:file_extraction_pipeline) do
|
||||
Class.new do
|
||||
def initialize(_); end
|
||||
|
||||
before do
|
||||
stub_const('NdjsonPipeline', file_extraction_pipeline)
|
||||
def run; end
|
||||
|
||||
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
|
||||
allow(instance).to receive(:pipelines)
|
||||
.and_return([{ stage: 0, pipeline: file_extraction_pipeline }])
|
||||
end
|
||||
end
|
||||
def self.file_extraction_pipeline?
|
||||
true
|
||||
end
|
||||
|
||||
it 'runs the pipeline successfully' do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:started?).and_return(false)
|
||||
allow(status).to receive(:empty?).and_return(false)
|
||||
allow(status).to receive(:failed?).and_return(false)
|
||||
def self.relation
|
||||
'test'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
entity: entity,
|
||||
pipeline_name: 'NdjsonPipeline',
|
||||
status_event: 'enqueue'
|
||||
)
|
||||
end
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:finished)
|
||||
end
|
||||
before do
|
||||
stub_const('NdjsonPipeline', file_extraction_pipeline)
|
||||
|
||||
context 'when export status is started' do
|
||||
it 'reenqueues pipeline worker' do
|
||||
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
|
||||
allow(instance).to receive(:pipelines)
|
||||
.and_return([{ stage: 0, pipeline: file_extraction_pipeline }])
|
||||
end
|
||||
end
|
||||
|
||||
it 'runs the pipeline successfully' do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:started?).and_return(true)
|
||||
allow(status).to receive(:started?).and_return(false)
|
||||
allow(status).to receive(:empty?).and_return(false)
|
||||
allow(status).to receive(:failed?).and_return(false)
|
||||
end
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(
|
||||
described_class::FILE_EXTRACTION_PIPELINE_PERFORM_DELAY,
|
||||
pipeline_tracker.id,
|
||||
pipeline_tracker.stage,
|
||||
entity.id
|
||||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when export status is empty' do
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:started?).and_return(false)
|
||||
allow(status).to receive(:empty?).and_return(true)
|
||||
allow(status).to receive(:failed?).and_return(false)
|
||||
end
|
||||
|
||||
entity.update!(created_at: entity_created_at)
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:finished)
|
||||
end
|
||||
|
||||
context 'when timeout is not reached' do
|
||||
let(:entity_created_at) { 1.minute.ago }
|
||||
|
||||
context 'when export status is started' do
|
||||
it 'reenqueues pipeline worker' do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:started?).and_return(true)
|
||||
allow(status).to receive(:empty?).and_return(false)
|
||||
allow(status).to receive(:failed?).and_return(false)
|
||||
end
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(
|
||||
|
|
@ -367,15 +387,76 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:enqueued)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timeout is reached' do
|
||||
let(:entity_created_at) { 10.minutes.ago }
|
||||
context 'when export status is empty' do
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:started?).and_return(false)
|
||||
allow(status).to receive(:empty?).and_return(true)
|
||||
allow(status).to receive(:failed?).and_return(false)
|
||||
end
|
||||
|
||||
entity.update!(created_at: entity_created_at)
|
||||
end
|
||||
|
||||
context 'when timeout is not reached' do
|
||||
let(:entity_created_at) { 1.minute.ago }
|
||||
|
||||
it 'reenqueues pipeline worker' do
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(
|
||||
described_class::FILE_EXTRACTION_PIPELINE_PERFORM_DELAY,
|
||||
pipeline_tracker.id,
|
||||
pipeline_tracker.stage,
|
||||
entity.id
|
||||
)
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:enqueued)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timeout is reached' do
|
||||
let(:entity_created_at) { 10.minutes.ago }
|
||||
|
||||
it 'marks as failed and logs the error' do
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'NdjsonPipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'class' => 'BulkImports::PipelineWorker',
|
||||
'exception.backtrace' => anything,
|
||||
'exception.class' => 'BulkImports::Pipeline::ExpiredError',
|
||||
'exception.message' => 'Empty export status on source instance',
|
||||
'importer' => 'gitlab_migration',
|
||||
'message' => 'Pipeline failed',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:failed)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job reaches timeout' do
|
||||
it 'marks as failed and logs the error' do
|
||||
old_created_at = entity.created_at
|
||||
entity.update!(created_at: (BulkImports::Pipeline::NDJSON_EXPORT_TIMEOUT + 1.hour).ago)
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
|
|
@ -389,7 +470,7 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
'class' => 'BulkImports::PipelineWorker',
|
||||
'exception.backtrace' => anything,
|
||||
'exception.class' => 'BulkImports::Pipeline::ExpiredError',
|
||||
'exception.message' => 'Empty export status on source instance',
|
||||
'exception.message' => 'Pipeline timeout',
|
||||
'importer' => 'gitlab_migration',
|
||||
'message' => 'Pipeline failed',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s
|
||||
|
|
@ -400,74 +481,42 @@ RSpec.describe BulkImports::PipelineWorker do
|
|||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:failed)
|
||||
|
||||
entity.update!(created_at: old_created_at)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when export status is failed' do
|
||||
it 'marks as failed and logs the error' do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:failed?).and_return(true)
|
||||
allow(status).to receive(:error).and_return('Error!')
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'NdjsonPipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'exception.backtrace' => anything,
|
||||
'exception.class' => 'BulkImports::Pipeline::FailedError',
|
||||
'exception.message' => 'Export from source instance failed: Error!',
|
||||
'importer' => 'gitlab_migration',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:failed)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job reaches timeout' do
|
||||
it 'marks as failed and logs the error' do
|
||||
old_created_at = entity.created_at
|
||||
entity.update!(created_at: (BulkImports::Pipeline::NDJSON_EXPORT_TIMEOUT + 1.hour).ago)
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'NdjsonPipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'class' => 'BulkImports::PipelineWorker',
|
||||
'exception.backtrace' => anything,
|
||||
'exception.class' => 'BulkImports::Pipeline::ExpiredError',
|
||||
'exception.message' => 'Pipeline timeout',
|
||||
'importer' => 'gitlab_migration',
|
||||
'message' => 'Pipeline failed',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:failed)
|
||||
|
||||
entity.update!(created_at: old_created_at)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when export status is failed' do
|
||||
it 'marks as failed and logs the error' do
|
||||
allow_next_instance_of(BulkImports::ExportStatus) do |status|
|
||||
allow(status).to receive(:failed?).and_return(true)
|
||||
allow(status).to receive(:error).and_return('Error!')
|
||||
end
|
||||
|
||||
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
|
||||
expect(logger)
|
||||
.to receive(:error)
|
||||
.with(
|
||||
hash_including(
|
||||
'pipeline_name' => 'NdjsonPipeline',
|
||||
'bulk_import_entity_id' => entity.id,
|
||||
'bulk_import_id' => entity.bulk_import_id,
|
||||
'bulk_import_entity_type' => entity.source_type,
|
||||
'source_full_path' => entity.source_full_path,
|
||||
'exception.backtrace' => anything,
|
||||
'exception.class' => 'BulkImports::Pipeline::FailedError',
|
||||
'exception.message' => 'Export from source instance failed: Error!',
|
||||
'importer' => 'gitlab_migration',
|
||||
'source_version' => entity.bulk_import.source_version_info.to_s
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
|
||||
expect(pipeline_tracker.reload.status_name).to eq(:failed)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue