Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-13 12:13:10 +00:00
parent c283bdb7bc
commit 8ebd99a81f
65 changed files with 404 additions and 295 deletions

View File

@ -67,6 +67,7 @@ include:
RECORD_DEPRECATIONS: "true"
GEO_SECONDARY_PROXY: 0
SUCCESSFULLY_RETRIED_TEST_EXIT_CODE: 137
EVENT_PROF: "sql.active_record"
needs:
- job: "setup-test-env"
- job: "retrieve-tests-metadata"

View File

@ -19,7 +19,7 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
Doorkeeper::Application.revoke_tokens_and_grants_for(params[:id], current_resource_owner)
end
redirect_to applications_profile_url,
redirect_to user_settings_applications_url,
status: :found,
notice: I18n.t(:notice, scope: [:doorkeeper, :flash, :authorized_applications, :destroy])
end

View File

@ -26,7 +26,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
respond_to do |format|
format.json do
enabled_keys = find_keys(filter: :enabled_keys)
render json: serialize(enabled_keys)
render json: { keys: serialize(enabled_keys) }
end
end
end
@ -35,7 +35,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
respond_to do |format|
format.json do
available_project_keys = find_keys(filter: :available_project_keys)
render json: serialize(available_project_keys)
render json: { keys: serialize(available_project_keys) }
end
end
end
@ -45,7 +45,7 @@ class Projects::DeployKeysController < Projects::ApplicationController
format.json do
available_public_keys = find_keys(filter: :available_public_keys)
render json: serialize(available_public_keys)
render json: { keys: serialize(available_public_keys) }
end
end
end

View File

@ -18,12 +18,12 @@ module Mutations
required: true,
description: 'Full path of the project where a protection rule is located.'
argument :container_path_pattern,
argument :repository_path_pattern,
GraphQL::Types::String,
required: true,
description:
'ContainerRegistryname protected by the protection rule. For example `@my-scope/my-container-*`. ' \
'Wildcard character `*` allowed.'
'Container repository path pattern protected by the protection rule. ' \
'For example `my-project/my-container-*`. Wildcard character `*` allowed.'
argument :push_protected_up_to_access_level,
Types::ContainerRegistry::Protection::RuleAccessLevelEnum,

View File

@ -15,12 +15,12 @@ module Types
null: false,
description: 'ID of the container registry protection rule.'
field :container_path_pattern,
field :repository_path_pattern,
GraphQL::Types::String,
null: false,
description:
'Container repository path pattern protected by the protection rule. ' \
'For example `@my-scope/my-container-*`. Wildcard character `*` allowed.'
'For example `my-project/my-container-*`. Wildcard character `*` allowed.'
field :push_protected_up_to_access_level,
Types::ContainerRegistry::Protection::RuleAccessLevelEnum,

View File

@ -17,6 +17,7 @@ class BulkImport < ApplicationRecord
enum source_type: { gitlab: 0 }
scope :stale, -> { where('updated_at < ?', 24.hours.ago).where(status: [0, 1]) }
scope :order_by_updated_at_and_id, ->(direction) { order(updated_at: direction, id: :asc) }
scope :order_by_created_at, ->(direction) { order(created_at: direction) }
state_machine :status, initial: :created do

View File

@ -57,6 +57,8 @@ class BulkImports::Entity < ApplicationRecord
scope :stale, -> { where('updated_at < ?', 24.hours.ago).where(status: [0, 1]) }
scope :by_bulk_import_id, ->(bulk_import_id) { where(bulk_import_id: bulk_import_id) }
scope :order_by_created_at, ->(direction) { order(created_at: direction) }
scope :order_by_updated_at_and_id, ->(direction) { order(updated_at: direction, id: :asc) }
scope :with_trackers, -> { includes(:trackers) }
alias_attribute :destination_slug, :destination_name

View File

@ -3,6 +3,10 @@
module ContainerRegistry
module Protection
class Rule < ApplicationRecord
include IgnorableColumns
ignore_column :container_path_pattern, remove_with: '16.8', remove_after: '2023-12-22'
enum delete_protected_up_to_access_level:
Gitlab::Access.sym_options_with_owner.slice(:maintainer, :owner, :developer),
_prefix: :delete_protected_up_to
@ -12,7 +16,7 @@ module ContainerRegistry
belongs_to :project, inverse_of: :container_registry_protection_rules
validates :container_path_pattern, presence: true, uniqueness: { scope: :project_id }, length: { maximum: 255 }
validates :repository_path_pattern, presence: true, uniqueness: { scope: :project_id }, length: { maximum: 255 }
validates :delete_protected_up_to_access_level, presence: true
validates :push_protected_up_to_access_level, presence: true
end

View File

@ -2,6 +2,8 @@
module DeployKeys
class BasicDeployKeyEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :user_id
expose :title
@ -14,6 +16,17 @@ module DeployKeys
expose :updated_at
expose :can_edit
expose :user, as: :owner, using: ::API::Entities::UserBasic, if: -> (_, opts) { can_read_owner?(opts) }
expose :edit_path, if: -> (_, opts) { opts[:project] } do |deploy_key|
edit_project_deploy_key_path(options[:project], deploy_key)
end
expose :enable_path, if: -> (_, opts) { opts[:project] } do |deploy_key|
enable_project_deploy_key_path(options[:project], deploy_key)
end
expose :disable_path, if: -> (_, opts) { opts[:project] } do |deploy_key|
disable_project_deploy_key_path(options[:project], deploy_key)
end
private

View File

@ -4,7 +4,7 @@ module ContainerRegistry
module Protection
class CreateRuleService < BaseService
ALLOWED_ATTRIBUTES = %i[
container_path_pattern
repository_path_pattern
push_protected_up_to_access_level
delete_protected_up_to_access_level
].freeze

View File

@ -16,7 +16,7 @@ module Import
track_access_level('github')
if project.persisted?
store_import_settings(project, access_params)
store_import_settings(project)
success(project)
elsif project.errors[:import_source_disabled].present?
error(project.errors[:import_source_disabled], :forbidden)
@ -134,13 +134,12 @@ module Import
error(translated_message, http_status)
end
def store_import_settings(project, access_params)
def store_import_settings(project)
Gitlab::GithubImport::Settings
.new(project)
.write(
timeout_strategy: params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT,
optional_stages: params[:optional_stages],
additional_access_tokens: access_params[:additional_access_tokens]
optional_stages: params[:optional_stages]
)
end
end

View File

@ -12,22 +12,27 @@ module BulkImports
feature_category :importers
# Using Keyset pagination for scopes that involve timestamp indexes
def perform
BulkImport.stale.find_each do |import|
logger.error(message: 'BulkImport stale', bulk_import_id: import.id)
import.cleanup_stale
Gitlab::Pagination::Keyset::Iterator.new(scope: bulk_import_scope).each_batch do |imports|
imports.each do |import|
logger.error(message: 'BulkImport stale', bulk_import_id: import.id)
import.cleanup_stale
end
end
BulkImports::Entity.includes(:trackers).stale.find_each do |entity| # rubocop: disable CodeReuse/ActiveRecord
ApplicationRecord.transaction do
logger.with_entity(entity).error(
message: 'BulkImports::Entity stale'
)
Gitlab::Pagination::Keyset::Iterator.new(scope: entity_scope).each_batch do |entities|
entities.each do |entity|
ApplicationRecord.transaction do
logger.with_entity(entity).error(
message: 'BulkImports::Entity stale'
)
entity.cleanup_stale
entity.cleanup_stale
entity.trackers.find_each do |tracker|
tracker.cleanup_stale
entity.trackers.find_each do |tracker|
tracker.cleanup_stale
end
end
end
end
@ -36,5 +41,13 @@ module BulkImports
def logger
@logger ||= Logger.build
end
def bulk_import_scope
BulkImport.stale.order_by_updated_at_and_id(:asc)
end
def entity_scope
BulkImports::Entity.with_trackers.stale.order_by_updated_at_and_id(:asc)
end
end
end

View File

@ -57,12 +57,7 @@ module Gitlab
end
info(project.id, message: 'importer finished')
rescue NoMethodError => e
# This exception will be more useful in development when a new
# Representation is created but the developer forgot to add a
# `#github_identifiers` method.
track_and_raise_exception(project, e, fail_import: true)
rescue ActiveRecord::RecordInvalid, NotRetriableError => e
rescue ActiveRecord::RecordInvalid, NotRetriableError, NoMethodError => e
# We do not raise exception to prevent job retry
track_exception(project, e)
rescue StandardError => e

View File

@ -1,8 +0,0 @@
---
name: custom_roles_in_members_page
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128491
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/422897
milestone: '16.3'
type: development
group: group::authorization
default_enabled: false

View File

@ -6,7 +6,7 @@ devise_for :emails, path: 'profile/emails', controllers: { confirmations: :confi
resource :profile, only: [:show, :update] do
member do
get :audit_log, to: redirect('-/user_settings/authentication_log')
get :applications, to: 'oauth/applications#index'
get :applications, to: redirect('-/user_settings/applications')
put :reset_incoming_email_token
put :reset_feed_token

View File

@ -3,6 +3,7 @@
namespace :user_settings do
scope module: 'user_settings' do
get :authentication_log
get :applications, to: '/oauth/applications#index'
end
resources :active_sessions, only: [:index, :destroy]
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class RenameContainerRegistryProtectionRulesContainerPathPattern < Gitlab::Database::Migration[2.2]
milestone '16.7'
disable_ddl_transaction!
def up
rename_column_concurrently :container_registry_protection_rules, :container_path_pattern, :repository_path_pattern
end
def down
undo_rename_column_concurrently :container_registry_protection_rules, :container_path_pattern,
:repository_path_pattern
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class RenameIndexIContainerProtectionUniqueProjectIdContainerPathPattern < Gitlab::Database::Migration[2.2]
milestone '16.7'
disable_ddl_transaction!
def up
rename_index :container_registry_protection_rules, :idx_copy_d01a85dee8,
:i_container_protection_unique_project_repository_path_pattern
end
def down
rename_index :container_registry_protection_rules, :i_container_protection_unique_project_repository_path_pattern,
:idx_copy_d01a85dee8
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class CleanupContainerRegistryProtectionRulesContainerPathPatternAtRename < Gitlab::Database::Migration[2.2]
milestone '16.7'
disable_ddl_transaction!
def up
cleanup_concurrent_column_rename :container_registry_protection_rules, :container_path_pattern,
:repository_path_pattern
end
def down
undo_cleanup_concurrent_column_rename :container_registry_protection_rules, :container_path_pattern,
:repository_path_pattern
# Restoring the old index name `:i_container_protection_unique_project_id_container_path_pattern`
# that was changed in the following migrations:
# - `db/migrate/20231126200903_rename_container_registry_protection_rules_container_path_pattern.rb`
# - `db/migrate/20231126200904_rename_index_i_container_protection_unique_project_id_container_path_pattern.rb`
if index_exists?(:container_registry_protection_rules, [:project_id, :container_path_pattern],
name: :i_container_protection_unique_project_container_path_pattern)
rename_index :container_registry_protection_rules, :i_container_protection_unique_project_container_path_pattern,
:i_container_protection_unique_project_id_container_path_pattern
end
end
end

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
class CleanupCiStagesPipelineIdBigintForSelfHost < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone "16.7"
TABLE = :ci_stages
REFERENCING_TABLE = :ci_pipelines
COLUMN = :pipeline_id
OLD_COLUMN = :pipeline_id_convert_to_bigint
INDEXES = {
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_name' => [
[:pipeline_id_convert_to_bigint, :name], { unique: true }
],
'index_ci_stages_on_pipeline_id_convert_to_bigint' => [
[:pipeline_id_convert_to_bigint], {}
],
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_id' => [
[:pipeline_id_convert_to_bigint, :id], { where: 'status = ANY (ARRAY[0, 1, 2, 8, 9, 10])' }
],
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_position' => [
[:pipeline_id_convert_to_bigint, :position], {}
]
}
def up
return unless column_exists?(TABLE, OLD_COLUMN)
with_lock_retries(raise_on_exhaustion: true) do
lock_tables(REFERENCING_TABLE, TABLE)
cleanup_conversion_of_integer_to_bigint(TABLE, [COLUMN])
end
end
def down
return if column_exists?(TABLE, OLD_COLUMN)
# See db/post_migrate/20231120070345_cleanup_ci_stages_pipeline_id_bigint.rb
# Both Gitlab.com and dev/test envinronments will be handled in that migration.
return if Gitlab.com_except_jh? || Gitlab.dev_or_test_env?
restore_conversion_of_integer_to_bigint(TABLE, [COLUMN])
INDEXES.each do |index_name, (columns, options)|
add_concurrent_index(TABLE, columns, name: index_name, **options)
end
add_concurrent_foreign_key(
TABLE, REFERENCING_TABLE,
column: OLD_COLUMN,
on_delete: :cascade, validate: true, reverse_lock_order: true
)
end
end

View File

@ -0,0 +1 @@
89c33f31982aa26a63cdbd1fd35d51c984006d6ae66dc3cac8e88f3a8fadf461

View File

@ -0,0 +1 @@
9b1a9d983f5feebe9a7a64a653ff300dbb7b5d12520d751d875527755ca15c61

View File

@ -0,0 +1 @@
f841d351a89d3ed9b2fea8d386b528aff5f1a267c214f6f0a150281377522d44

View File

@ -0,0 +1 @@
b59e995833c187b21e561f3be24d53e1e6e56cee1f7a5933546b55f8a3a731c8

View File

@ -15203,8 +15203,9 @@ CREATE TABLE container_registry_protection_rules (
updated_at timestamp with time zone NOT NULL,
delete_protected_up_to_access_level smallint NOT NULL,
push_protected_up_to_access_level smallint NOT NULL,
container_path_pattern text NOT NULL,
CONSTRAINT check_96811ef9dc CHECK ((char_length(container_path_pattern) <= 255))
repository_path_pattern text,
CONSTRAINT check_3658b31291 CHECK ((repository_path_pattern IS NOT NULL)),
CONSTRAINT check_d53a270af5 CHECK ((char_length(repository_path_pattern) <= 255))
);
CREATE SEQUENCE container_registry_protection_rules_id_seq
@ -31408,7 +31409,7 @@ CREATE INDEX i_compliance_violations_on_project_id_severity_and_id ON merge_requ
CREATE INDEX i_compliance_violations_on_project_id_title_and_id ON merge_requests_compliance_violations USING btree (target_project_id, title, id);
CREATE UNIQUE INDEX i_container_protection_unique_project_id_container_path_pattern ON container_registry_protection_rules USING btree (project_id, container_path_pattern);
CREATE UNIQUE INDEX i_container_protection_unique_project_repository_path_pattern ON container_registry_protection_rules USING btree (project_id, repository_path_pattern);
CREATE INDEX i_custom_email_verifications_on_triggered_at_and_state_started ON service_desk_custom_email_verifications USING btree (triggered_at) WHERE (state = 0);

View File

@ -2555,10 +2555,10 @@ Input type: `CreateContainerRegistryProtectionRuleInput`
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationcreatecontainerregistryprotectionruleclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationcreatecontainerregistryprotectionrulecontainerpathpattern"></a>`containerPathPattern` | [`String!`](#string) | ContainerRegistryname protected by the protection rule. For example `@my-scope/my-container-*`. Wildcard character `*` allowed. |
| <a id="mutationcreatecontainerregistryprotectionruledeleteprotecteduptoaccesslevel"></a>`deleteProtectedUpToAccessLevel` | [`ContainerRegistryProtectionRuleAccessLevel!`](#containerregistryprotectionruleaccesslevel) | Max GitLab access level to prevent from deleting container images in the container registry. For example `DEVELOPER`, `MAINTAINER`, `OWNER`. |
| <a id="mutationcreatecontainerregistryprotectionruleprojectpath"></a>`projectPath` | [`ID!`](#id) | Full path of the project where a protection rule is located. |
| <a id="mutationcreatecontainerregistryprotectionrulepushprotecteduptoaccesslevel"></a>`pushProtectedUpToAccessLevel` | [`ContainerRegistryProtectionRuleAccessLevel!`](#containerregistryprotectionruleaccesslevel) | Max GitLab access level to prevent from pushing container images to the container registry. For example `DEVELOPER`, `MAINTAINER`, `OWNER`. |
| <a id="mutationcreatecontainerregistryprotectionrulerepositorypathpattern"></a>`repositoryPathPattern` | [`String!`](#string) | Container repository path pattern protected by the protection rule. For example `my-project/my-container-*`. Wildcard character `*` allowed. |
#### Fields
@ -16496,10 +16496,10 @@ A container registry protection rule designed to prevent users with a certain ac
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="containerregistryprotectionrulecontainerpathpattern"></a>`containerPathPattern` | [`String!`](#string) | Container repository path pattern protected by the protection rule. For example `@my-scope/my-container-*`. Wildcard character `*` allowed. |
| <a id="containerregistryprotectionruledeleteprotecteduptoaccesslevel"></a>`deleteProtectedUpToAccessLevel` | [`ContainerRegistryProtectionRuleAccessLevel!`](#containerregistryprotectionruleaccesslevel) | Max GitLab access level to prevent from pushing container images to the container registry. For example `DEVELOPER`, `MAINTAINER`, `OWNER`. |
| <a id="containerregistryprotectionruleid"></a>`id` | [`ContainerRegistryProtectionRuleID!`](#containerregistryprotectionruleid) | ID of the container registry protection rule. |
| <a id="containerregistryprotectionrulepushprotecteduptoaccesslevel"></a>`pushProtectedUpToAccessLevel` | [`ContainerRegistryProtectionRuleAccessLevel!`](#containerregistryprotectionruleaccesslevel) | Max GitLab access level to prevent from pushing container images to the container registry. For example `DEVELOPER`, `MAINTAINER`, `OWNER`. |
| <a id="containerregistryprotectionrulerepositorypathpattern"></a>`repositoryPathPattern` | [`String!`](#string) | Container repository path pattern protected by the protection rule. For example `my-project/my-container-*`. Wildcard character `*` allowed. |
### `ContainerRepository`

View File

@ -64,10 +64,6 @@ The status can be one of the following:
- `1`: `finished`
- `-1`: `failed`
- `0` - `started`
- `1` - `finished`
- `-1` - `failed`
```json
[
{
@ -76,6 +72,7 @@ The status can be one of the following:
"error": null,
"updated_at": "2021-05-04T11:25:20.423Z",
"batched": true,
"batches_count": 1,
"batches": [
{
"status": 1,
@ -91,7 +88,8 @@ The status can be one of the following:
"status": 1,
"error": null,
"updated_at": "2021-05-04T11:25:20.085Z",
"batched": false
"batched": false,
"batches_count": 0
}
]
```

View File

@ -53,7 +53,6 @@ curl --request POST \
"attachments_import": true,
"collaborators_import": true
},
"additional_access_tokens": "foo,bar"
}'
```
@ -66,8 +65,6 @@ The following keys are available for `optional_stages`:
For more information, see [Select additional items to import](../user/project/import/github.md#select-additional-items-to-import).
You can supply multiple personal access tokens in `additional_access_tokens` from different user accounts to import projects faster.
Example response:
```json

View File

@ -53,7 +53,7 @@ Refer to the [OAuth RFC](https://www.rfc-editor.org/rfc/rfc6749) to find out
how all those flows work and pick the right one for your use case.
Authorization code (with or without PKCE) flow requires `application` to be
registered first via the `/profile/applications` page in your user's account.
registered first via the `/user_settings/applications` page in your user's account.
During registration, by enabling proper scopes, you can limit the range of
resources which the `application` can access. Upon creation, you obtain the
`application` credentials: _Application ID_ and _Client Secret_. The _Client Secret_

View File

@ -75,6 +75,7 @@ The status can be one of the following:
"error": null,
"updated_at": "2021-05-04T11:25:20.423Z",
"batched": true,
"batches_count": 1,
"batches": [
{
"status": 1,
@ -90,7 +91,8 @@ The status can be one of the following:
"status": 1,
"error": null,
"updated_at": "2021-05-04T11:25:20.085Z",
"batched": false
"batched": false,
"batches_count": 0
}
]
```

View File

@ -232,6 +232,8 @@ Before the interval expires, runners automatically request a new runner authenti
To ensure runners don't reveal sensitive information, you can configure them to only run jobs
on [protected branches](../../user/project/protected_branches.md), or jobs that have [protected tags](../../user/project/protected_tags.md).
Runners configured to run jobs on protected branches cannot run jobs in [merge request pipelines](../pipelines/merge_request_pipelines.md).
### For a shared runner
Prerequisites:

View File

@ -214,6 +214,7 @@ the default value [is the same as for self-managed instances](../../administrati
|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------|
| [Repository size including LFS](../../administration/settings/account_and_limit_settings.md#repository-size-limit) | 10 GB |
| [Maximum import size](../project/settings/import_export.md#import-a-project-and-its-data) | 5 GiB |
| [Maximum export size](../project/settings/import_export.md#export-a-project-and-its-data) | 40 GiB |
| [Maximum remote file size for imports from external object storages](../../administration/settings/import_and_export_settings.md#maximum-remote-file-size-for-imports) | 10 GiB |
| [Maximum download file size when importing from source GitLab instances by direct transfer](../../administration/settings/import_and_export_settings.md#maximum-download-file-size-for-imports-by-direct-transfer) | 5 GiB |
| Maximum attachment size | 100 MiB |

View File

@ -9,8 +9,7 @@ module API
def access_params
{
github_access_token: params[:personal_access_token],
additional_access_tokens: params[:additional_access_tokens]
github_access_token: params[:personal_access_token]
}
end

View File

@ -33,11 +33,6 @@ module API
optional :optional_stages, type: Hash, desc: 'Optional stages of import to be performed'
optional :timeout_strategy, type: String, values: ::ProjectImportData::TIMEOUT_STRATEGIES,
desc: 'Strategy for behavior on timeouts'
optional :additional_access_tokens,
type: Array[String],
coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce,
desc: 'Additional list of personal access tokens',
documentation: { example: 'foo,bar' }
end
post 'import/github' do
result = Import::GithubService.new(client, current_user, params).execute(access_params, provider)

View File

@ -8,18 +8,12 @@ module Gitlab
def self.new_client_for(project, token: nil, host: nil, parallel: true)
token_to_use = token || project.import_data&.credentials&.fetch(:user)
token_pool = project.import_data&.credentials&.dig(:additional_access_tokens)
options = {
Client.new(
token_to_use,
host: host.presence || self.formatted_import_url(project),
per_page: self.per_page(project),
parallel: parallel
}
if token_pool
ClientPool.new(token_pool: token_pool.append(token_to_use), **options)
else
Client.new(token_to_use, **options)
end
)
end
# Returns the ID of the ghost user.

View File

@ -1,39 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
class ClientPool
delegate_missing_to :best_client
def initialize(token_pool:, per_page:, parallel:, host: nil)
@token_pool = token_pool
@host = host
@per_page = per_page
@parallel = parallel
end
# Returns the client with the most remaining requests, or the client with
# the closest rate limit reset time, if all clients are rate limited.
def best_client
clients_with_requests_remaining = clients.select(&:requests_remaining?)
return clients_with_requests_remaining.max_by(&:remaining_requests) if clients_with_requests_remaining.any?
clients.min_by(&:rate_limit_resets_in)
end
private
def clients
@clients ||= @token_pool.map do |token|
Client.new(
token,
host: @host,
per_page: @per_page,
parallel: @parallel
)
end
end
end
end
end

View File

@ -57,16 +57,13 @@ module Gitlab
user_settings = user_settings.to_h.with_indifferent_access
optional_stages = fetch_stages_from_params(user_settings[:optional_stages])
credentials = project.import_data&.credentials&.merge(
additional_access_tokens: user_settings[:additional_access_tokens]
)
import_data = project.build_or_assign_import_data(
data: {
optional_stages: optional_stages,
timeout_strategy: user_settings[:timeout_strategy]
},
credentials: credentials
credentials: project.import_data&.credentials
)
import_data.save!

View File

@ -8,7 +8,7 @@ module Sidebars
override :link
def link
applications_profile_path
user_settings_applications_path
end
override :title

View File

@ -6,8 +6,7 @@ module QA
attr_accessor :issue_events_import,
:full_notes_import,
:attachments_import,
:allow_partial_import,
:additional_access_tokens
:allow_partial_import
attribute :github_repo_id do
github_client.repository(github_repository_path).id
@ -62,7 +61,6 @@ module QA
new_name: name,
target_namespace: @personal_namespace || group.full_path,
personal_access_token: github_personal_access_token,
additional_access_tokens: additional_access_tokens,
ci_cd_only: false,
optional_stages: {
single_endpoint_issue_events_import: issue_events_import,

View File

@ -502,10 +502,6 @@ module QA
ENV['QA_GITHUB_ACCESS_TOKEN'].to_s.strip
end
def github_additional_access_tokens
ENV['QA_ADDITIONAL_GITHUB_ACCESS_TOKENS']
end
def require_github_access_token!
return unless github_access_token.empty?

View File

@ -203,7 +203,6 @@ module QA
project.add_name_uuid = false
project.name = 'imported-project'
project.github_personal_access_token = Runtime::Env.github_access_token
project.additional_access_tokens = Runtime::Env.github_additional_access_tokens
project.github_repository_path = github_repo
project.personal_namespace = user.username
project.api_client = Runtime::API::Client.new(user: user)

View File

@ -76,7 +76,7 @@ RSpec.describe Projects::DeployKeysController, feature_category: :continuous_del
it 'returns only enabled keys' do
get :enabled_keys, params: params.merge(format: :json)
expect(json_response.pluck("id")).to match_array([deploy_key_for_target_project.deploy_key_id])
expect(json_response['keys'].pluck("id")).to match_array([deploy_key_for_target_project.deploy_key_id])
end
end
@ -88,7 +88,7 @@ RSpec.describe Projects::DeployKeysController, feature_category: :continuous_del
it 'returns available project keys' do
get :available_project_keys, params: params.merge(format: :json)
expect(json_response.pluck("id")).to match_array([deploy_key_for_accessible_project.deploy_key_id])
expect(json_response['keys'].pluck("id")).to match_array([deploy_key_for_accessible_project.deploy_key_id])
end
end
@ -100,7 +100,7 @@ RSpec.describe Projects::DeployKeysController, feature_category: :continuous_del
it 'returns available public keys' do
get :available_public_keys, params: params.merge(format: :json)
expect(json_response.pluck("id")).to match_array([deploy_key_public.id])
expect(json_response['keys'].pluck("id")).to match_array([deploy_key_public.id])
end
end
end

View File

@ -3,7 +3,7 @@
FactoryBot.define do
factory :container_registry_protection_rule, class: 'ContainerRegistry::Protection::Rule' do
project
container_path_pattern { '@my_scope/my_container' }
repository_path_pattern { 'my_project/my_container' }
delete_protected_up_to_access_level { :developer }
push_protected_up_to_access_level { :developer }
end

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'User manages applications', feature_category: :user_profile do
let_it_be(:user) { create(:user) }
let_it_be(:new_application_path) { applications_profile_path }
let_it_be(:new_application_path) { user_settings_applications_path }
let_it_be(:index_path) { oauth_applications_path }
before do

View File

@ -12,12 +12,19 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
let(:project2) { create(:project, :internal) }
let(:project3) { create(:project, :internal) }
let(:project4) { create(:project, :internal) }
let(:project_key) { create(:deploy_key) }
let(:internal_key) { create(:deploy_key) }
before do
# Using an admin for these fixtures because they are used for verifying a frontend
# component that would normally get its data from `Admin::DeployKeysController`
sign_in(admin)
enable_admin_mode!(admin)
create(:rsa_deploy_key_5120, public: true)
create(:deploy_keys_project, project: project, deploy_key: project_key)
create(:deploy_keys_project, project: project2, deploy_key: internal_key)
create(:deploy_keys_project, project: project3, deploy_key: project_key)
create(:deploy_keys_project, project: project4, deploy_key: project_key)
end
after do
@ -27,14 +34,6 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
render_views
it 'deploy_keys/keys.json' do
create(:rsa_deploy_key_5120, public: true)
project_key = create(:deploy_key)
internal_key = create(:deploy_key)
create(:deploy_keys_project, project: project, deploy_key: project_key)
create(:deploy_keys_project, project: project2, deploy_key: internal_key)
create(:deploy_keys_project, project: project3, deploy_key: project_key)
create(:deploy_keys_project, project: project4, deploy_key: project_key)
get :index, params: {
namespace_id: project.namespace.to_param,
project_id: project
@ -42,4 +41,31 @@ RSpec.describe Projects::DeployKeysController, '(JavaScript fixtures)', type: :c
expect(response).to be_successful
end
it 'deploy_keys/enabled_keys.json' do
get :enabled_keys, params: {
namespace_id: project.namespace.to_param,
project_id: project
}, format: :json
expect(response).to be_successful
end
it 'deploy_keys/available_project_keys.json' do
get :available_project_keys, params: {
namespace_id: project.namespace.to_param,
project_id: project
}, format: :json
expect(response).to be_successful
end
it 'deploy_keys/available_public_keys.json' do
get :available_public_keys, params: {
namespace_id: project.namespace.to_param,
project_id: project
}, format: :json
expect(response).to be_successful
end
end

View File

@ -15,8 +15,8 @@ RSpec.describe GitlabSchema.types['ContainerRegistryProtectionRule'], feature_ca
it { is_expected.to have_non_null_graphql_type(::Types::GlobalIDType[::ContainerRegistry::Protection::Rule]) }
end
describe 'container_path_pattern' do
subject { described_class.fields['containerPathPattern'] }
describe 'repository_path_pattern' do
subject { described_class.fields['repositoryPathPattern'] }
it { is_expected.to have_non_null_graphql_type(GraphQL::Types::String) }
end

View File

@ -7,7 +7,6 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
helper = Class.new.include(described_class).new
def helper.params = {
personal_access_token: 'foo',
additional_access_tokens: 'bar',
github_hostname: 'github.example.com'
}
helper
@ -21,7 +20,7 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
describe '#access_params' do
it 'makes the passed in personal access token and extra tokens accessible' do
expect(subject.access_params).to eq({ github_access_token: 'foo', additional_access_tokens: 'bar' })
expect(subject.access_params).to eq({ github_access_token: 'foo' })
end
end

View File

@ -1,41 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ClientPool, feature_category: :importers do
subject(:pool) { described_class.new(token_pool: %w[foo bar], per_page: 1, parallel: true) }
describe '#best_client' do
it 'returns the client with the most remaining requests' do
allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
instance_double(
Gitlab::GithubImport::Client,
requests_remaining?: true, remaining_requests: 10, rate_limit_resets_in: 1
),
instance_double(
Gitlab::GithubImport::Client,
requests_remaining?: true, remaining_requests: 20, rate_limit_resets_in: 2
)
)
expect(pool.best_client.remaining_requests).to eq(20)
end
context 'when all clients are rate limited' do
it 'returns the client with the closest rate limit reset time' do
allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
instance_double(
Gitlab::GithubImport::Client,
requests_remaining?: false, remaining_requests: 10, rate_limit_resets_in: 10
),
instance_double(
Gitlab::GithubImport::Client,
requests_remaining?: false, remaining_requests: 20, rate_limit_resets_in: 20
)
)
expect(pool.best_client.rate_limit_resets_in).to eq(10)
end
end
end
end

View File

@ -62,12 +62,11 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
collaborators_import: false,
foo: :bar
},
timeout_strategy: "optimistic",
additional_access_tokens: %w[foo bar]
timeout_strategy: "optimistic"
}.stringify_keys
end
it 'puts optional steps, timeout strategy & access tokens into projects import_data' do
it 'puts optional steps and timeout strategy into projects import_data' do
project.build_or_assign_import_data(credentials: { user: 'token' })
settings.write(data_input)
@ -76,8 +75,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
.to eq optional_stages.stringify_keys
expect(project.import_data.data['timeout_strategy'])
.to eq("optimistic")
expect(project.import_data.credentials.fetch(:additional_access_tokens))
.to eq(data_input['additional_access_tokens'])
end
end

View File

@ -11,8 +11,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: nil) }
it 'returns a new Client with a custom token' do
allow(project).to receive(:import_data)
expect(described_class::Client)
.to receive(:new)
.with('123', host: nil, parallel: true, per_page: 100)
@ -26,7 +24,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
.twice
expect(described_class::Client)
.to receive(:new)
@ -49,31 +46,12 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
described_class.ghost_user_id
end
end
context 'when there are additional access tokens' do
it 'returns a new ClientPool containing all tokens' do
import_data = double(:import_data, credentials: { user: '123', additional_access_tokens: %w[foo bar] })
expect(project)
.to receive(:import_data)
.and_return(import_data)
.twice
expect(described_class::ClientPool)
.to receive(:new)
.with(token_pool: %w[foo bar 123], host: nil, parallel: true, per_page: 100)
described_class.new_client_for(project)
end
end
end
context 'GitHub Enterprise' do
let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil) }
it 'returns a new Client with a custom token' do
allow(project).to receive(:import_data)
expect(described_class::Client)
.to receive(:new)
.with('123', host: 'http://github.another-domain.com/api/v3', parallel: true, per_page: 100)
@ -87,7 +65,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
.twice
expect(described_class::Client)
.to receive(:new)

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::ApplicationsMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
link: '/-/profile/applications',
link: '/-/user_settings/applications',
title: _('Applications'),
icon: 'applications',
active_routes: { controller: 'oauth/applications' }

View File

@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe BulkImport, type: :model, feature_category: :importers do
let_it_be(:created_bulk_import) { create(:bulk_import, :created) }
let_it_be(:started_bulk_import) { create(:bulk_import, :started) }
let_it_be(:finished_bulk_import) { create(:bulk_import, :finished) }
let_it_be(:created_bulk_import) { create(:bulk_import, :created, updated_at: 2.hours.ago) }
let_it_be(:started_bulk_import) { create(:bulk_import, :started, updated_at: 3.hours.ago) }
let_it_be(:finished_bulk_import) { create(:bulk_import, :finished, updated_at: 1.hour.ago) }
let_it_be(:failed_bulk_import) { create(:bulk_import, :failed) }
let_it_be(:stale_created_bulk_import) { create(:bulk_import, :created, updated_at: 3.days.ago) }
let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, updated_at: 3.days.ago) }
let_it_be(:stale_started_bulk_import) { create(:bulk_import, :started, updated_at: 2.days.ago) }
describe 'associations' do
it { is_expected.to belong_to(:user).required }
@ -23,10 +23,27 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
end
describe '.stale scope' do
subject { described_class.stale }
describe 'scopes' do
describe '.stale' do
subject { described_class.stale }
it { is_expected.to contain_exactly(stale_created_bulk_import, stale_started_bulk_import) }
it { is_expected.to contain_exactly(stale_created_bulk_import, stale_started_bulk_import) }
end
describe '.order_by_updated_at_and_id' do
subject { described_class.order_by_updated_at_and_id(:desc) }
it 'sorts by given direction' do
is_expected.to eq([
failed_bulk_import,
finished_bulk_import,
created_bulk_import,
started_bulk_import,
stale_started_bulk_import,
stale_created_bulk_import
])
end
end
end
describe '.all_human_statuses' do

View File

@ -200,6 +200,15 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(described_class.stale).to contain_exactly(entity_1)
end
end
describe '.order_by_updated_at_and_id' do
it 'returns entities ordered by updated_at and id' do
entity_1 = create(:bulk_import_entity, updated_at: 3.days.ago)
entity_2 = create(:bulk_import_entity, updated_at: 2.days.ago)
expect(described_class.order_by_updated_at_and_id(:desc)).to eq([entity_2, entity_1])
end
end
end
describe '.all_human_statuses' do

View File

@ -38,9 +38,9 @@ RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_catego
describe 'validations' do
subject { build(:container_registry_protection_rule) }
describe '#container_path_pattern' do
it { is_expected.to validate_presence_of(:container_path_pattern) }
it { is_expected.to validate_length_of(:container_path_pattern).is_at_most(255) }
describe '#repository_path_pattern' do
it { is_expected.to validate_presence_of(:repository_path_pattern) }
it { is_expected.to validate_length_of(:repository_path_pattern).is_at_most(255) }
end
describe '#delete_protected_up_to_access_level' do

View File

@ -15,7 +15,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
let(:kwargs) do
{
project_path: project.full_path,
container_path_pattern: container_registry_protection_rule_attributes.container_path_pattern,
repository_path_pattern: container_registry_protection_rule_attributes.repository_path_pattern,
push_protected_up_to_access_level: 'MAINTAINER',
delete_protected_up_to_access_level: 'MAINTAINER'
}
@ -26,7 +26,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
<<~QUERY
containerRegistryProtectionRule {
id
containerPathPattern
repositoryPathPattern
}
clientMutationId
errors
@ -48,7 +48,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
'errors' => be_blank,
'containerRegistryProtectionRule' => {
'id' => be_present,
'containerPathPattern' => kwargs[:container_path_pattern]
'repositoryPathPattern' => kwargs[:repository_path_pattern]
}
)
end
@ -57,7 +57,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
expect { subject }.to change { ::ContainerRegistry::Protection::Rule.count }.by(1)
expect(::ContainerRegistry::Protection::Rule.where(project: project,
container_path_pattern: kwargs[:container_path_pattern])).to exist
repository_path_pattern: kwargs[:repository_path_pattern])).to exist
end
end
@ -84,9 +84,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
}
end
context 'with invalid input field `containerPathPattern`' do
context 'with invalid input field `repositoryPathPattern`' do
let(:kwargs) do
super().merge(container_path_pattern: '')
super().merge(repository_path_pattern: '')
end
it_behaves_like 'an erroneous response'
@ -95,7 +95,7 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
it {
subject.tap do
expect(mutation_response['errors']).to eq ["Container path pattern can't be blank"]
expect(mutation_response['errors']).to eq ["Repository path pattern can't be blank"]
end
}
end
@ -108,9 +108,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
context 'when container name pattern is slightly different' do
let(:kwargs) do
# The field `container_path_pattern` is unique; this is why we change the value in a minimum way
# The field `repository_path_pattern` is unique; this is why we change the value in a minimum way
super().merge(
container_path_pattern: "#{existing_container_registry_protection_rule.container_path_pattern}-unique"
repository_path_pattern: "#{existing_container_registry_protection_rule.repository_path_pattern}-unique"
)
end
@ -121,9 +121,9 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
end
end
context 'when field `container_path_pattern` is taken' do
context 'when field `repository_path_pattern` is taken' do
let(:kwargs) do
super().merge(container_path_pattern: existing_container_registry_protection_rule.container_path_pattern,
super().merge(repository_path_pattern: existing_container_registry_protection_rule.repository_path_pattern,
push_protected_up_to_access_level: 'MAINTAINER')
end
@ -134,12 +134,12 @@ RSpec.describe 'Creating the container registry protection rule', :aggregate_fai
it 'returns without error' do
subject
expect(mutation_response['errors']).to eq ['Container path pattern has already been taken']
expect(mutation_response['errors']).to eq ['Repository path pattern has already been taken']
end
it 'does not create new container protection rules' do
expect(::ContainerRegistry::Protection::Rule.where(project: project,
container_path_pattern: kwargs[:container_path_pattern],
repository_path_pattern: kwargs[:repository_path_pattern],
push_protected_up_to_access_level: Gitlab::Access::MAINTAINER)).not_to exist
end
end

View File

@ -41,7 +41,7 @@ RSpec.describe 'Deleting a container registry protection rule', :aggregate_failu
'errors' => be_blank,
'containerRegistryProtectionRule' => {
'id' => container_protection_rule.to_global_id.to_s,
'containerPathPattern' => container_protection_rule.container_path_pattern,
'repositoryPathPattern' => container_protection_rule.repository_path_pattern,
'deleteProtectedUpToAccessLevel' => container_protection_rule.delete_protected_up_to_access_level.upcase,
'pushProtectedUpToAccessLevel' => container_protection_rule.push_protected_up_to_access_level.upcase
}
@ -50,7 +50,7 @@ RSpec.describe 'Deleting a container registry protection rule', :aggregate_failu
context 'with existing container registry protection rule belonging to other project' do
let_it_be(:container_protection_rule) do
create(:container_registry_protection_rule, container_path_pattern: 'protection_rule_other_project')
create(:container_registry_protection_rule, repository_path_pattern: 'protection_rule_other_project')
end
it_behaves_like 'an erroneous reponse'
@ -61,7 +61,7 @@ RSpec.describe 'Deleting a container registry protection rule', :aggregate_failu
context 'with deleted container registry protection rule' do
let!(:container_protection_rule) do
create(:container_registry_protection_rule, project: project,
container_path_pattern: 'protection_rule_deleted').destroy!
repository_path_pattern: 'protection_rule_deleted').destroy!
end
it_behaves_like 'an erroneous reponse'

View File

@ -311,6 +311,8 @@ RSpec.describe API::GroupExport, feature_category: :importers do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'badges')
expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
expect(json_response.pluck('batched')).to all(eq(false))
expect(json_response.pluck('batches_count')).to all(eq(0))
end
context 'when relation is specified' do
@ -322,6 +324,36 @@ RSpec.describe API::GroupExport, feature_category: :importers do
expect(json_response['status']).to eq(0)
end
end
context 'when there is a batched export' do
let_it_be(:batched_export) do
create(:bulk_import_export, :started, :batched, group: group, relation: 'boards', batches_count: 1)
end
let_it_be(:batch) { create(:bulk_import_export_batch, objects_count: 5, export: batched_export) }
it 'returns a list of batched relation export statuses' do
get api(status_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include(
hash_including(
'relation' => batched_export.relation,
'batched' => true,
'batches_count' => 1,
'batches' => contain_exactly(
{
'batch_number' => 1,
'error' => nil,
'objects_count' => batch.objects_count,
'status' => batch.status,
'updated_at' => batch.updated_at.as_json
}
)
)
)
end
end
end
context 'when bulk import is disabled' do

View File

@ -5,8 +5,7 @@ require 'spec_helper'
RSpec.describe API::ImportGithub, feature_category: :importers do
let(:token) { "asdasd12345" }
let(:provider) { :github }
let(:access_params) { { github_access_token: token, additional_access_tokens: additional_access_tokens } }
let(:additional_access_tokens) { nil }
let(:access_params) { { github_access_token: token } }
let(:provider_username) { user.username }
let(:provider_user) { double('provider', login: provider_username).as_null_object }
let(:provider_repo) do
@ -134,28 +133,6 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when additional access tokens are provided' do
let(:additional_access_tokens) { 'token1,token2' }
it 'returns 201' do
expected_access_params = { github_access_token: token, additional_access_tokens: %w[token1 token2] }
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new)
.with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **expected_access_params)
.and_return(double(execute: project))
post api("/import/github", user), params: {
target_namespace: user.namespace_path,
personal_access_token: token,
repo_id: non_existing_record_id,
additional_access_tokens: 'token1,token2'
}
expect(response).to have_gitlab_http_status(:created)
end
end
end
describe "POST /import/github/cancel" do

View File

@ -688,6 +688,8 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'project_badges')
expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
expect(json_response.pluck('batched')).to all(eq(false))
expect(json_response.pluck('batches_count')).to all(eq(0))
end
context 'when relation is specified' do
@ -699,6 +701,36 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
expect(json_response['status']).to eq(0)
end
end
context 'when there is a batched export' do
let_it_be(:batched_export) do
create(:bulk_import_export, :started, :batched, project: project, relation: 'issues', batches_count: 1)
end
let_it_be(:batch) { create(:bulk_import_export_batch, objects_count: 5, export: batched_export) }
it 'returns a list of batched relation export statuses' do
get api(status_path, user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include(
hash_including(
'relation' => batched_export.relation,
'batched' => true,
'batches_count' => 1,
'batches' => contain_exactly(
{
'batch_number' => 1,
'error' => nil,
'objects_count' => batch.objects_count,
'status' => batch.status,
'updated_at' => batch.updated_at.as_json
}
)
)
)
end
end
end
context 'with bulk_import is disabled' do

View File

@ -13,4 +13,14 @@ RSpec.describe "Legacy routes", type: :request, feature_category: :system_access
get "/-/profile/audit_log"
expect(response).to redirect_to('/-/user_settings/authentication_log')
end
it "/-/profile/active_sessions" do
get "/-/profile/active_sessions"
expect(response).to redirect_to('/-/user_settings/active_sessions')
end
it "/-/profile/applications" do
get "/-/profile/applications"
expect(response).to redirect_to('/-/user_settings/applications')
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe DeployKeys::BasicDeployKeyEntity do
RSpec.describe DeployKeys::BasicDeployKeyEntity, feature_category: :continuous_delivery do
include RequestAwareEntity
let(:user) { create(:user) }
@ -56,7 +56,18 @@ RSpec.describe DeployKeys::BasicDeployKeyEntity do
end
context 'project deploy key' do
let(:options) { { user: user, project: project } }
it { expect(entity.as_json).to include(can_edit: true) }
it { expect(entity.as_json).to include(edit_path: edit_project_deploy_key_path(options[:project], deploy_key)) }
it do
expect(entity.as_json).to include(enable_path: enable_project_deploy_key_path(options[:project], deploy_key))
end
it do
expect(entity.as_json).to include(disable_path: disable_project_deploy_key_path(options[:project], deploy_key))
end
end
context 'public deploy key' do

View File

@ -22,7 +22,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
container_registry_protection_rule:
be_a(ContainerRegistry::Protection::Rule)
.and(have_attributes(
container_path_pattern: params[:container_path_pattern],
repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level].to_s,
delete_protected_up_to_access_level: params[:delete_protected_up_to_access_level].to_s
))
@ -36,7 +36,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
expect(
ContainerRegistry::Protection::Rule.where(
project: project,
container_path_pattern: params[:container_path_pattern],
repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level]
)
).to exist
@ -57,7 +57,7 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
expect(
ContainerRegistry::Protection::Rule.where(
project: project,
container_path_pattern: params[:container_path_pattern],
repository_path_pattern: params[:repository_path_pattern],
push_protected_up_to_access_level: params[:push_protected_up_to_access_level]
)
).not_to exist
@ -67,12 +67,12 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
it_behaves_like 'a successful service response'
context 'when fields are invalid' do
context 'when container_path_pattern is invalid' do
let(:params) { super().merge(container_path_pattern: '') }
context 'when repository_path_pattern is invalid' do
let(:params) { super().merge(repository_path_pattern: '') }
it_behaves_like 'an erroneous service response'
it { is_expected.to have_attributes(message: match(/Container path pattern can't be blank/)) }
it { is_expected.to have_attributes(message: match(/Repository path pattern can't be blank/)) }
end
context 'when delete_protected_up_to_access_level is invalid' do
@ -100,8 +100,8 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
context 'when container registry name pattern is slightly different' do
let(:params) do
super().merge(
# The field `container_path_pattern` is unique; this is why we change the value in a minimum way
container_path_pattern: "#{existing_container_registry_protection_rule.container_path_pattern}-unique",
# The field `repository_path_pattern` is unique; this is why we change the value in a minimum way
repository_path_pattern: "#{existing_container_registry_protection_rule.repository_path_pattern}-unique",
push_protected_up_to_access_level:
existing_container_registry_protection_rule.push_protected_up_to_access_level
)
@ -110,17 +110,17 @@ RSpec.describe ContainerRegistry::Protection::CreateRuleService, '#execute', fea
it_behaves_like 'a successful service response'
end
context 'when field `container_path_pattern` is taken' do
context 'when field `repository_path_pattern` is taken' do
let(:params) do
super().merge(
container_path_pattern: existing_container_registry_protection_rule.container_path_pattern,
repository_path_pattern: existing_container_registry_protection_rule.repository_path_pattern,
push_protected_up_to_access_level: :maintainer
)
end
it_behaves_like 'an erroneous service response'
it { is_expected.to have_attributes(errors: ['Container path pattern has already been taken']) }
it { is_expected.to have_attributes(errors: ['Repository path pattern has already been taken']) }
it { expect { subject }.not_to change { existing_container_registry_protection_rule.updated_at } }
end

View File

@ -58,7 +58,7 @@ RSpec.describe ContainerRegistry::Protection::DeleteRuleService, '#execute', fea
context 'with deleted container registry protection rule' do
let!(:container_registry_protection_rule) do
create(:container_registry_protection_rule, project: project,
container_path_pattern: 'protection_rule_deleted').destroy!
repository_path_pattern: 'protection_rule_deleted').destroy!
end
it_behaves_like 'a successful service response'

View File

@ -5,12 +5,7 @@ require 'spec_helper'
RSpec.describe Import::GithubService, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:token) { 'complex-token' }
let_it_be(:access_params) do
{
github_access_token: 'github-complex-token',
additional_access_tokens: %w[foo bar]
}
end
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
let(:user_namespace_path) { user.namespace_path }
@ -37,7 +32,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to receive(:write)
.with(
optional_stages: optional_stages,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@ -98,7 +92,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
.with(optional_stages: nil,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -124,7 +117,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -157,7 +149,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -194,7 +185,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@ -210,7 +200,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
additional_access_tokens: access_params[:additional_access_tokens],
timeout_strategy: timeout_strategy
)
end
@ -224,7 +213,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
additional_access_tokens: %w[foo bar],
timeout_strategy: timeout_strategy
)
end

View File

@ -201,25 +201,22 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
expect(project.import_failures.last.exception_message).to eq('some error')
end
context 'without github_identifiers defined' do
context 'when a NoMethod error is raised' do
let(:stubbed_representation) { representation_class.instance_eval { undef_method :github_identifiers } }
it 'logs error when representation does not have a github_id' do
expect(importer_class).not_to receive(:new)
it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
project_id: project.id,
exception: a_kind_of(NoMethodError),
error_source: 'klass_name',
fail_import: true,
fail_import: false,
external_identifiers: { object_type: 'dummy' }
)
.and_call_original
expect { worker.import(project, client, { 'number' => 10 }) }
.to raise_error(NoMethodError, /^undefined method `github_identifiers/)
worker.import(project, client, { 'number' => 10 })
end
end
@ -237,7 +234,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.and_raise(exception)
end
it 'logs an error' do
it 'logs the error but does not re-raise it, so the worker does not retry' do
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(