Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
8f306a7dc5
commit
7379b60d2c
|
|
@ -99,8 +99,11 @@ export default {
|
|||
|
||||
setOriginalDropdownData() {
|
||||
const $labelSelect = $('.bulk-update .js-label-select');
|
||||
const dirtyLabelIds = $labelSelect.data('marked') || [];
|
||||
const chosenLabelIds = [...this.getOriginalMarkedIds(), ...dirtyLabelIds];
|
||||
|
||||
$labelSelect.data('common', this.getOriginalCommonIds());
|
||||
$labelSelect.data('marked', this.getOriginalMarkedIds());
|
||||
$labelSelect.data('marked', chosenLabelIds);
|
||||
$labelSelect.data('indeterminate', this.getOriginalIndeterminateIds());
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -97,11 +97,6 @@
|
|||
padding-top: 16px;
|
||||
}
|
||||
|
||||
.gl-shim-mx-2 {
|
||||
margin-left: 4px;
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.gl-text-purple { color: $purple; }
|
||||
.gl-text-gray-800 { color: $gray-800; }
|
||||
.gl-bg-purple-light { background-color: $purple-light; }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module WorkhorseImportExportUpload
|
||||
extend ActiveSupport::Concern
|
||||
include WorkhorseRequest
|
||||
|
||||
included do
|
||||
skip_before_action :verify_authenticity_token, only: %i[authorize]
|
||||
before_action :verify_workhorse_api!, only: %i[authorize]
|
||||
end
|
||||
|
||||
def authorize
|
||||
set_workhorse_internal_api_content_type
|
||||
|
||||
authorized = ImportExportUploader.workhorse_authorize(
|
||||
has_length: false,
|
||||
maximum_size: ImportExportUpload::MAXIMUM_IMPORT_FILE_SIZE
|
||||
)
|
||||
|
||||
render json: authorized
|
||||
rescue SocketError
|
||||
render json: _("Error uploading file"), status: :internal_server_error
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def file_is_valid?(file)
|
||||
return false unless file.is_a?(::UploadedFile)
|
||||
|
||||
ImportExportUploader::EXTENSION_WHITELIST
|
||||
.include?(File.extname(file.original_filename).delete('.'))
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Import::GitlabGroupsController < ApplicationController
|
||||
include WorkhorseImportExportUpload
|
||||
|
||||
before_action :ensure_group_import_enabled
|
||||
before_action :import_rate_limit, only: %i[create]
|
||||
|
||||
def create
|
||||
unless file_is_valid?(group_params[:file])
|
||||
return redirect_back_or_default(options: { alert: _('Unable to process group import file') })
|
||||
end
|
||||
|
||||
group_data = group_params.except(:file).merge(
|
||||
visibility_level: closest_allowed_visibility_level,
|
||||
import_export_upload: ImportExportUpload.new(import_file: group_params[:file])
|
||||
)
|
||||
|
||||
group = ::Groups::CreateService.new(current_user, group_data).execute
|
||||
|
||||
if group.persisted?
|
||||
Groups::ImportExport::ImportService.new(group: group, user: current_user).async_execute
|
||||
|
||||
redirect_to(
|
||||
group_path(group),
|
||||
notice: _("Group '%{group_name}' is being imported.") % { group_name: group.name }
|
||||
)
|
||||
else
|
||||
redirect_back_or_default(
|
||||
options: { alert: _("Group could not be imported: %{errors}") % { errors: group.errors.full_messages.to_sentence } }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def group_params
|
||||
params.permit(:path, :name, :parent_id, :file)
|
||||
end
|
||||
|
||||
def closest_allowed_visibility_level
|
||||
if group_params[:parent_id].present?
|
||||
parent_group = Group.find(group_params[:parent_id])
|
||||
|
||||
Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
|
||||
else
|
||||
Gitlab::VisibilityLevel::PRIVATE
|
||||
end
|
||||
end
|
||||
|
||||
def ensure_group_import_enabled
|
||||
render_404 unless Feature.enabled?(:group_import_export, @group, default_enabled: true)
|
||||
end
|
||||
|
||||
def import_rate_limit
|
||||
if Gitlab::ApplicationRateLimiter.throttled?(:group_import, scope: current_user)
|
||||
Gitlab::ApplicationRateLimiter.log_request(request, :group_import_request_limit, current_user)
|
||||
|
||||
flash[:alert] = _('This endpoint has been requested too many times. Try again later.')
|
||||
redirect_to new_group_path
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,14 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Import::GitlabProjectsController < Import::BaseController
|
||||
include WorkhorseRequest
|
||||
include WorkhorseImportExportUpload
|
||||
|
||||
before_action :whitelist_query_limiting, only: [:create]
|
||||
before_action :verify_gitlab_project_import_enabled
|
||||
|
||||
skip_before_action :verify_authenticity_token, only: [:authorize]
|
||||
before_action :verify_workhorse_api!, only: [:authorize]
|
||||
|
||||
def new
|
||||
@namespace = Namespace.find(project_params[:namespace_id])
|
||||
return render_404 unless current_user.can?(:create_projects, @namespace)
|
||||
|
|
@ -17,7 +14,7 @@ class Import::GitlabProjectsController < Import::BaseController
|
|||
end
|
||||
|
||||
def create
|
||||
unless file_is_valid?
|
||||
unless file_is_valid?(project_params[:file])
|
||||
return redirect_back_or_default(options: { alert: _("You need to upload a GitLab project export archive (ending in .gz).") })
|
||||
end
|
||||
|
||||
|
|
@ -33,28 +30,8 @@ class Import::GitlabProjectsController < Import::BaseController
|
|||
end
|
||||
end
|
||||
|
||||
def authorize
|
||||
set_workhorse_internal_api_content_type
|
||||
|
||||
authorized = ImportExportUploader.workhorse_authorize(
|
||||
has_length: false,
|
||||
maximum_size: Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i)
|
||||
|
||||
render json: authorized
|
||||
rescue SocketError
|
||||
render json: _("Error uploading file"), status: :internal_server_error
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def file_is_valid?
|
||||
return false unless project_params[:file].is_a?(::UploadedFile)
|
||||
|
||||
filename = project_params[:file].original_filename
|
||||
|
||||
ImportExportUploader::EXTENSION_WHITELIST.include?(File.extname(filename).delete('.'))
|
||||
end
|
||||
|
||||
def verify_gitlab_project_import_enabled
|
||||
render_404 unless gitlab_project_import_enabled?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,6 +15,12 @@ module Types
|
|||
description: 'User that started the Jira import'
|
||||
field :jira_project_key, GraphQL::STRING_TYPE, null: false,
|
||||
description: 'Project key for the imported Jira project'
|
||||
field :imported_issues_count, GraphQL::INT_TYPE, null: false,
|
||||
description: 'Count of issues that were successfully imported'
|
||||
field :failed_to_import_count, GraphQL::INT_TYPE, null: false,
|
||||
description: 'Count of issues that failed to import'
|
||||
field :total_issue_count, GraphQL::INT_TYPE, null: false,
|
||||
description: 'Total count of issues that were attempted to import'
|
||||
end
|
||||
# rubocop: enable Graphql/AuthorizeTypes
|
||||
end
|
||||
|
|
|
|||
|
|
@ -37,11 +37,12 @@ module TodosHelper
|
|||
end
|
||||
|
||||
def todo_target_title(todo)
|
||||
if todo.target
|
||||
"\"#{todo.target.title}\""
|
||||
else
|
||||
""
|
||||
end
|
||||
# Design To Dos' filenames are displayed in `#todo_target_link` (see `Design#to_reference`),
|
||||
# so to avoid displaying duplicate filenames in the To Do list for designs,
|
||||
# we return an empty string here.
|
||||
return "" if todo.target.blank? || todo.for_design?
|
||||
|
||||
"\"#{todo.target.title}\""
|
||||
end
|
||||
|
||||
def todo_parent_path(todo)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ class ImportExportUpload < ApplicationRecord
|
|||
include WithUploads
|
||||
include ObjectStorage::BackgroundMove
|
||||
|
||||
MAXIMUM_IMPORT_FILE_SIZE = 50.megabytes.freeze
|
||||
|
||||
belongs_to :project
|
||||
belongs_to :group
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,10 @@ module Groups
|
|||
@shared = Gitlab::ImportExport::Shared.new(@group)
|
||||
end
|
||||
|
||||
def async_execute
|
||||
GroupImportWorker.perform_async(current_user.id, group.id)
|
||||
end
|
||||
|
||||
def execute
|
||||
if valid_user_permissions? && import_file && restorer.restore
|
||||
notify_success
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
= _('Expand')
|
||||
%p
|
||||
= _('Action to take when receiving an alert.')
|
||||
= link_to help_page_path('user/project/integrations/prometheus', anchor: 'taking-action-on-an-alert-ultimate') do
|
||||
= link_to help_page_path('user/project/integrations/prometheus', anchor: 'taking-action-on-incidents-ultimate') do
|
||||
= _('More information')
|
||||
.settings-content
|
||||
= form_for @project, url: project_settings_operations_path(@project), method: :patch do |f|
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Expose Jira imported issues count in GraphQL
|
||||
merge_request: 32580
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix duplicate filename displayed in design todos
|
||||
merge_request: 32274
|
||||
author: Arun Kumar Mohan
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Allow different in bulk editing issues
|
||||
merge_request: 32734
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix broken help link on operations settings page
|
||||
merge_request: 32722
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -63,6 +63,10 @@ namespace :import do
|
|||
post :authorize
|
||||
end
|
||||
|
||||
resource :gitlab_group, only: [:create] do
|
||||
post :authorize
|
||||
end
|
||||
|
||||
resource :manifest, only: [:create, :new], controller: :manifest do
|
||||
get :status
|
||||
get :jobs
|
||||
|
|
|
|||
|
|
@ -46,6 +46,8 @@ Line breaks have been added to this example for legibility:
|
|||
"gitaly_duration_s":0.16,
|
||||
"redis_calls":115,
|
||||
"redis_duration_s":0.13,
|
||||
"redis_read_bytes":1507378,
|
||||
"redis_write_bytes":2920,
|
||||
"correlation_id":"O1SdybnnIq7",
|
||||
"cpu_s":17.50,
|
||||
"db_duration_s":0.08,
|
||||
|
|
@ -56,7 +58,7 @@ Line breaks have been added to this example for legibility:
|
|||
|
||||
This example was a GET request for a specific
|
||||
issue. Each line also contains performance data, with times in
|
||||
milliseconds:
|
||||
seconds:
|
||||
|
||||
1. `duration_s`: total time taken to retrieve the request
|
||||
1. `queue_duration_s`: total time that the request was queued inside GitLab Workhorse
|
||||
|
|
@ -67,6 +69,8 @@ milliseconds:
|
|||
1. `gitaly_duration_s`: total time taken by Gitaly calls
|
||||
1. `gitaly_calls`: total number of calls made to Gitaly
|
||||
1. `redis_calls`: total number of calls made to Redis
|
||||
1. `redis_read_bytes`: total bytes read from Redis
|
||||
1. `redis_write_bytes`: total bytes written to Redis
|
||||
|
||||
User clone and fetch activity using HTTP transport appears in this log as `action: git_upload_pack`.
|
||||
|
||||
|
|
|
|||
|
|
@ -5420,6 +5420,16 @@ type JiraImport {
|
|||
"""
|
||||
createdAt: Time
|
||||
|
||||
"""
|
||||
Count of issues that failed to import
|
||||
"""
|
||||
failedToImportCount: Int!
|
||||
|
||||
"""
|
||||
Count of issues that were successfully imported
|
||||
"""
|
||||
importedIssuesCount: Int!
|
||||
|
||||
"""
|
||||
Project key for the imported Jira project
|
||||
"""
|
||||
|
|
@ -5434,6 +5444,11 @@ type JiraImport {
|
|||
User that started the Jira import
|
||||
"""
|
||||
scheduledBy: User
|
||||
|
||||
"""
|
||||
Total count of issues that were attempted to import
|
||||
"""
|
||||
totalIssueCount: Int!
|
||||
}
|
||||
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -15028,6 +15028,42 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "failedToImportCount",
|
||||
"description": "Count of issues that failed to import",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "importedIssuesCount",
|
||||
"description": "Count of issues that were successfully imported",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "jiraProjectKey",
|
||||
"description": "Project key for the imported Jira project",
|
||||
|
|
@ -15073,6 +15109,24 @@
|
|||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "totalIssueCount",
|
||||
"description": "Total count of issues that were attempted to import",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "Int",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
|
|
|
|||
|
|
@ -801,9 +801,12 @@ Represents an iteration object.
|
|||
| Name | Type | Description |
|
||||
| --- | ---- | ---------- |
|
||||
| `createdAt` | Time | Timestamp of when the Jira import was created |
|
||||
| `failedToImportCount` | Int! | Count of issues that failed to import |
|
||||
| `importedIssuesCount` | Int! | Count of issues that were successfully imported |
|
||||
| `jiraProjectKey` | String! | Project key for the imported Jira project |
|
||||
| `scheduledAt` | Time | Timestamp of when the Jira import was scheduled |
|
||||
| `scheduledBy` | User | User that started the Jira import |
|
||||
| `totalIssueCount` | Int! | Total count of issues that were attempted to import |
|
||||
|
||||
## JiraImportStartPayload
|
||||
|
||||
|
|
|
|||
|
|
@ -418,14 +418,14 @@ The following are some available Rake tasks:
|
|||
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects and queues Sidekiq jobs to index them in the background. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index on the Elasticsearch side only if it doesn't already exist. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:delete_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index on the Elasticsearch instance. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:recreate_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<INDEX_NAME>]` and `gitlab:elastic:create_empty_index[<INDEX_NAME>]`. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index and assigns an alias for it on the Elasticsearch side only if it doesn't already exist. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index and alias (if exists) on the Elasticsearch instance. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. |
|
||||
|
||||
NOTE: **Note:**
|
||||
The `INDEX_NAME` parameter is optional and will use the default index name from the current `RAILS_ENV` if not set.
|
||||
The `TARGET_NAME` parameter is optional and will use the default index/alias name from the current `RAILS_ENV` if not set.
|
||||
|
||||
### Environment variables
|
||||
|
||||
|
|
|
|||
|
|
@ -2,8 +2,6 @@
|
|||
|
||||
module API
|
||||
class GroupImport < Grape::API
|
||||
MAXIMUM_FILE_SIZE = 50.megabytes.freeze
|
||||
|
||||
helpers do
|
||||
def parent_group
|
||||
find_group!(params[:parent_id]) if params[:parent_id].present?
|
||||
|
|
@ -38,7 +36,10 @@ module API
|
|||
status 200
|
||||
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
|
||||
|
||||
ImportExportUploader.workhorse_authorize(has_length: false, maximum_size: MAXIMUM_FILE_SIZE)
|
||||
ImportExportUploader.workhorse_authorize(
|
||||
has_length: false,
|
||||
maximum_size: ImportExportUpload::MAXIMUM_IMPORT_FILE_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
desc 'Create a new group import' do
|
||||
|
|
@ -76,7 +77,7 @@ module API
|
|||
group = ::Groups::CreateService.new(current_user, group_params).execute
|
||||
|
||||
if group.persisted?
|
||||
GroupImportWorker.perform_async(current_user.id, group.id) # rubocop:disable CodeReuse/Worker
|
||||
::Groups::ImportExport::ImportService.new(group: group, user: current_user).async_execute
|
||||
|
||||
accepted!
|
||||
else
|
||||
|
|
|
|||
|
|
@ -28,7 +28,8 @@ module Gitlab
|
|||
play_pipeline_schedule: { threshold: 1, interval: 1.minute },
|
||||
show_raw_controller: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.raw_blob_request_limit }, interval: 1.minute },
|
||||
group_export: { threshold: 1, interval: 5.minutes },
|
||||
group_download_export: { threshold: 10, interval: 10.minutes }
|
||||
group_download_export: { threshold: 10, interval: 10.minutes },
|
||||
group_import: { threshold: 30, interval: 5.minutes }
|
||||
}.freeze
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,8 @@ module Gitlab
|
|||
REDIS_REQUEST_COUNT = :redis_request_count
|
||||
REDIS_CALL_DURATION = :redis_call_duration
|
||||
REDIS_CALL_DETAILS = :redis_call_details
|
||||
REDIS_READ_BYTES = :redis_read_bytes
|
||||
REDIS_WRITE_BYTES = :redis_write_bytes
|
||||
|
||||
# Milliseconds represented in seconds (from 1 to 500 milliseconds).
|
||||
QUERY_TIME_BUCKETS = [0.001, 0.0025, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5].freeze
|
||||
|
|
@ -36,6 +38,24 @@ module Gitlab
|
|||
::RequestStore[REDIS_REQUEST_COUNT] += 1
|
||||
end
|
||||
|
||||
def self.increment_read_bytes(num_bytes)
|
||||
::RequestStore[REDIS_READ_BYTES] ||= 0
|
||||
::RequestStore[REDIS_READ_BYTES] += num_bytes
|
||||
end
|
||||
|
||||
def self.increment_write_bytes(num_bytes)
|
||||
::RequestStore[REDIS_WRITE_BYTES] ||= 0
|
||||
::RequestStore[REDIS_WRITE_BYTES] += num_bytes
|
||||
end
|
||||
|
||||
def self.read_bytes
|
||||
::RequestStore[REDIS_READ_BYTES] || 0
|
||||
end
|
||||
|
||||
def self.write_bytes
|
||||
::RequestStore[REDIS_WRITE_BYTES] || 0
|
||||
end
|
||||
|
||||
def self.detail_store
|
||||
::RequestStore[REDIS_CALL_DETAILS] ||= []
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'redis'
|
||||
|
||||
module Gitlab
|
||||
module Instrumentation
|
||||
class RedisDriver < ::Redis::Connection::Ruby
|
||||
def write(command)
|
||||
measure_write_size(command) if ::RequestStore.active?
|
||||
super
|
||||
end
|
||||
|
||||
def read
|
||||
result = super
|
||||
measure_read_size(result) if ::RequestStore.active?
|
||||
result
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def measure_write_size(command)
|
||||
size = 0
|
||||
|
||||
# Mimic what happens in
|
||||
# https://github.com/redis/redis-rb/blob/f597f21a6b954b685cf939febbc638f6c803e3a7/lib/redis/connection/command_helper.rb#L8.
|
||||
# This count is an approximation that omits the Redis protocol overhead
|
||||
# of type prefixes, length prefixes and line endings.
|
||||
command.each do |x|
|
||||
size += begin
|
||||
if x.is_a? Array
|
||||
x.inject(0) { |sum, y| sum + y.to_s.bytesize }
|
||||
else
|
||||
x.to_s.bytesize
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
::Gitlab::Instrumentation::Redis.increment_write_bytes(size)
|
||||
end
|
||||
|
||||
def measure_read_size(result)
|
||||
# The superclass can return one of four types of results from read:
|
||||
# https://github.com/redis/redis-rb/blob/f597f21a6b954b685cf939febbc638f6c803e3a7/lib/redis/connection/ruby.rb#L406
|
||||
#
|
||||
# 1. Error (exception, will not reach this line)
|
||||
# 2. Status (string)
|
||||
# 3. Integer (will be converted to string by to_s.bytesize and thrown away)
|
||||
# 4. "Binary" string (i.e. may contain zero byte)
|
||||
# 5. Array of binary string (recurses back into read)
|
||||
|
||||
# Avoid double-counting array responses: the array elements themselves
|
||||
# are retrieved with 'read'.
|
||||
unless result.is_a? Array
|
||||
# This count is an approximation that omits the Redis protocol overhead
|
||||
# of type prefixes, length prefixes and line endings.
|
||||
::Gitlab::Instrumentation::Redis.increment_read_bytes(result.to_s.bytesize)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -4,7 +4,7 @@ module Gitlab
|
|||
module InstrumentationHelper
|
||||
extend self
|
||||
|
||||
KEYS = %i(gitaly_calls gitaly_duration_s rugged_calls rugged_duration_s redis_calls redis_duration_s).freeze
|
||||
KEYS = %i(gitaly_calls gitaly_duration_s rugged_calls rugged_duration_s redis_calls redis_duration_s redis_read_bytes redis_write_bytes).freeze
|
||||
DURATION_PRECISION = 6 # microseconds
|
||||
|
||||
def add_instrumentation_data(payload)
|
||||
|
|
@ -28,6 +28,13 @@ module Gitlab
|
|||
payload[:redis_calls] = redis_calls
|
||||
payload[:redis_duration_s] = Gitlab::Instrumentation::Redis.query_time
|
||||
end
|
||||
|
||||
redis_read_bytes = Gitlab::Instrumentation::Redis.read_bytes
|
||||
redis_write_bytes = Gitlab::Instrumentation::Redis.write_bytes
|
||||
if redis_read_bytes > 0 || redis_write_bytes > 0
|
||||
payload[:redis_read_bytes] = redis_read_bytes
|
||||
payload[:redis_write_bytes] = redis_write_bytes
|
||||
end
|
||||
end
|
||||
|
||||
# Returns the queuing duration for a Sidekiq job in seconds, as a float, if the
|
||||
|
|
|
|||
|
|
@ -100,6 +100,8 @@ module Gitlab
|
|||
redis_url = config.delete(:url)
|
||||
redis_uri = URI.parse(redis_url)
|
||||
|
||||
config[:driver] ||= ::Gitlab::Instrumentation::RedisDriver
|
||||
|
||||
if redis_uri.scheme == 'unix'
|
||||
# Redis::Store does not handle Unix sockets well, so let's do it for them
|
||||
config[:path] = redis_uri.path
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ module Gitlab
|
|||
def configuration_toml(gitaly_dir, storage_paths)
|
||||
nodes = [{ storage: 'default', address: "unix:#{gitaly_dir}/gitaly.socket", primary: true, token: 'secret' }]
|
||||
storages = [{ name: 'default', node: nodes }]
|
||||
config = { socket_path: "#{gitaly_dir}/praefect.socket", virtual_storage: storages }
|
||||
config = { socket_path: "#{gitaly_dir}/praefect.socket", memory_queue_enabled: true, virtual_storage: storages }
|
||||
config[:token] = 'secret' if Rails.env.test?
|
||||
|
||||
TomlRB.dump(config)
|
||||
|
|
|
|||
|
|
@ -10662,6 +10662,9 @@ msgstr ""
|
|||
msgid "Group %{group_name} was successfully created."
|
||||
msgstr ""
|
||||
|
||||
msgid "Group '%{group_name}' is being imported."
|
||||
msgstr ""
|
||||
|
||||
msgid "Group Audit Events"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -10695,6 +10698,9 @@ msgstr ""
|
|||
msgid "Group avatar"
|
||||
msgstr ""
|
||||
|
||||
msgid "Group could not be imported: %{errors}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Group description"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -23104,6 +23110,9 @@ msgstr ""
|
|||
msgid "Unable to load the merge request widget. Try reloading the page."
|
||||
msgstr ""
|
||||
|
||||
msgid "Unable to process group import file"
|
||||
msgstr ""
|
||||
|
||||
msgid "Unable to resolve"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -302,7 +302,23 @@ describe 'Issues > Labels bulk assignment' do
|
|||
sleep 1 # needed
|
||||
|
||||
expect(find("#issue_#{issue1.id}")).to have_content 'bug'
|
||||
expect(find("#issue_#{issue1.id}")).not_to have_content 'feature'
|
||||
expect(find("#issue_#{issue1.id}")).to have_content 'feature'
|
||||
end
|
||||
end
|
||||
|
||||
context 'mark previously toggled label' do
|
||||
before do
|
||||
enable_bulk_update
|
||||
end
|
||||
|
||||
it do
|
||||
open_labels_dropdown ['feature']
|
||||
|
||||
check_issue issue1
|
||||
|
||||
update_issues
|
||||
|
||||
expect(find("#issue_#{issue1.id}")).to have_content 'feature'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ describe GitlabSchema.types['JiraImport'] do
|
|||
specify { expect(described_class.graphql_name).to eq('JiraImport') }
|
||||
|
||||
it 'has the expected fields' do
|
||||
expect(described_class).to have_graphql_fields(:jira_project_key, :createdAt, :scheduled_at, :scheduled_by)
|
||||
expect(described_class).to have_graphql_fields(
|
||||
:jira_project_key, :created_at, :scheduled_at, :scheduled_by,
|
||||
:failed_to_import_count, :imported_issues_count, :total_issue_count
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ require 'spec_helper'
|
|||
describe TodosHelper do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:author) { create(:user) }
|
||||
let_it_be(:issue) { create(:issue) }
|
||||
let_it_be(:issue) { create(:issue, title: 'Issue 1') }
|
||||
let_it_be(:design) { create(:design, issue: issue) }
|
||||
let_it_be(:note) do
|
||||
create(:note,
|
||||
|
|
@ -68,6 +68,41 @@ describe TodosHelper do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#todo_target_title' do
|
||||
context 'when the target does not exist' do
|
||||
let(:todo) { double('Todo', target: nil) }
|
||||
|
||||
it 'returns an empty string' do
|
||||
title = helper.todo_target_title(todo)
|
||||
expect(title).to eq("")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when given a design todo' do
|
||||
let(:todo) { design_todo }
|
||||
|
||||
it 'returns an empty string' do
|
||||
title = helper.todo_target_title(todo)
|
||||
expect(title).to eq("")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when given a non-design todo' do
|
||||
let(:todo) do
|
||||
create(:todo, :assigned,
|
||||
user: user,
|
||||
project: issue.project,
|
||||
target: issue,
|
||||
author: author)
|
||||
end
|
||||
|
||||
it 'returns the title' do
|
||||
title = helper.todo_target_title(todo)
|
||||
expect(title).to eq("\"Issue 1\"")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#todo_target_path' do
|
||||
context 'when given a design' do
|
||||
let(:todo) { design_todo }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rspec-parameterized'
|
||||
|
||||
describe Gitlab::Instrumentation::RedisDriver, :clean_gitlab_redis_shared_state, :request_store do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
describe 'read and write' do
|
||||
where(:setup, :command, :expect_write, :expect_read) do
|
||||
# The response is 'OK', the request size is the combined size of array
|
||||
# elements. Exercise counting of a status reply.
|
||||
[] | [:set, 'foo', 'bar'] | 3 + 3 + 3 | 2
|
||||
|
||||
# The response is 1001, so 4 bytes. Exercise counting an integer reply.
|
||||
[[:set, 'foobar', 1000]] | [:incr, 'foobar'] | 4 + 6 | 4
|
||||
|
||||
# Exercise counting empty multi bulk reply
|
||||
[] | [:hgetall, 'foobar'] | 7 + 6 | 0
|
||||
|
||||
# Hgetall response length is combined length of keys and values in the
|
||||
# hash. Exercises counting of a multi bulk reply
|
||||
[[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | 7 + 6 | 5 + 11
|
||||
|
||||
# Exercise counting of a bulk reply
|
||||
[[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | 3 + 3 | 3 * 100
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'counts bytes read and written' do
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
setup.each { |cmd| redis.call(cmd) }
|
||||
RequestStore.clear!
|
||||
redis.call(command)
|
||||
end
|
||||
|
||||
expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
|
||||
expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -34,12 +34,14 @@ describe Gitlab::InstrumentationHelper do
|
|||
|
||||
context 'when Redis calls are made' do
|
||||
it 'adds Redis data and omits Gitaly data' do
|
||||
Gitlab::Redis::Cache.with { |redis| redis.get('test-instrumentation') }
|
||||
Gitlab::Redis::Cache.with { |redis| redis.set('test-instrumentation', 123) }
|
||||
|
||||
subject
|
||||
|
||||
expect(payload[:redis_calls]).to eq(1)
|
||||
expect(payload[:redis_duration_s]).to be >= 0
|
||||
expect(payload[:redis_read_bytes]).to be >= 0
|
||||
expect(payload[:redis_write_bytes]).to be >= 0
|
||||
expect(payload[:gitaly_calls]).to be_nil
|
||||
expect(payload[:gitaly_duration]).to be_nil
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,9 +7,30 @@ describe 'query Jira import data' do
|
|||
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
let_it_be(:project) { create(:project, :private, :import_started, import_type: 'jira') }
|
||||
let_it_be(:jira_import1) { create(:jira_import_state, :finished, project: project, jira_project_key: 'AA', user: current_user, created_at: 2.days.ago) }
|
||||
let_it_be(:jira_import2) { create(:jira_import_state, :finished, project: project, jira_project_key: 'BB', user: current_user, created_at: 5.days.ago) }
|
||||
|
||||
let_it_be(:jira_import1) do
|
||||
create(
|
||||
:jira_import_state, :finished,
|
||||
project: project,
|
||||
jira_project_key: 'AA',
|
||||
user: current_user,
|
||||
created_at: 2.days.ago,
|
||||
failed_to_import_count: 2,
|
||||
imported_issues_count: 2,
|
||||
total_issue_count: 4
|
||||
)
|
||||
end
|
||||
let_it_be(:jira_import2) do
|
||||
create(
|
||||
:jira_import_state, :finished,
|
||||
project: project,
|
||||
jira_project_key: 'BB',
|
||||
user: current_user,
|
||||
created_at: 5.days.ago,
|
||||
failed_to_import_count: 1,
|
||||
imported_issues_count: 2,
|
||||
total_issue_count: 3
|
||||
)
|
||||
end
|
||||
let(:query) do
|
||||
%(
|
||||
query {
|
||||
|
|
@ -23,6 +44,9 @@ describe 'query Jira import data' do
|
|||
scheduledBy {
|
||||
username
|
||||
}
|
||||
importedIssuesCount
|
||||
failedToImportCount
|
||||
totalIssueCount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -64,10 +88,16 @@ describe 'query Jira import data' do
|
|||
it 'retuns list of jira imports' do
|
||||
jira_proket_keys = jira_imports.map {|ji| ji['jiraProjectKey']}
|
||||
usernames = jira_imports.map {|ji| ji.dig('scheduledBy', 'username')}
|
||||
imported_issues_count = jira_imports.map {|ji| ji.dig('importedIssuesCount')}
|
||||
failed_issues_count = jira_imports.map {|ji| ji.dig('failedToImportCount')}
|
||||
total_issue_count = jira_imports.map {|ji| ji.dig('totalIssueCount')}
|
||||
|
||||
expect(jira_imports.size).to eq 2
|
||||
expect(jira_proket_keys).to eq %w(BB AA)
|
||||
expect(usernames).to eq [current_user.username, current_user.username]
|
||||
expect(imported_issues_count).to eq [2, 2]
|
||||
expect(failed_issues_count).to eq [1, 2]
|
||||
expect(total_issue_count).to eq [3, 4]
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,258 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Import::GitlabGroupsController do
|
||||
include WorkhorseHelpers
|
||||
|
||||
let(:import_path) { "#{Dir.tmpdir}/gitlab_groups_controller_spec" }
|
||||
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
|
||||
let(:workhorse_headers) do
|
||||
{ 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token }
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next_instance_of(Gitlab::ImportExport) do |import_export|
|
||||
expect(import_export).to receive(:storage_path).and_return(import_path)
|
||||
end
|
||||
|
||||
stub_uploads_object_storage(ImportExportUploader)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(import_path, secure: true)
|
||||
end
|
||||
|
||||
describe 'POST create' do
|
||||
subject(:import_request) { upload_archive(file_upload, workhorse_headers, request_params) }
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
let(:file) { File.join('spec', %w[fixtures group_export.tar.gz]) }
|
||||
let(:file_upload) { fixture_file_upload(file) }
|
||||
|
||||
before do
|
||||
login_as(user)
|
||||
end
|
||||
|
||||
def upload_archive(file, headers = {}, params = {})
|
||||
workhorse_finalize(
|
||||
import_gitlab_group_path,
|
||||
method: :post,
|
||||
file_key: :file,
|
||||
params: params.merge(file: file),
|
||||
headers: headers,
|
||||
send_rewritten_field: true
|
||||
)
|
||||
end
|
||||
|
||||
context 'when importing without a parent group' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
|
||||
|
||||
it 'successfully creates the group' do
|
||||
expect { import_request }.to change { Group.count }.by 1
|
||||
|
||||
group = Group.find_by(name: 'test-group-import')
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
expect(response).to redirect_to(group_path(group))
|
||||
expect(flash[:notice]).to include('is being imported')
|
||||
end
|
||||
|
||||
it 'imports the group data', :sidekiq_inline do
|
||||
allow(GroupImportWorker).to receive(:perform_async).and_call_original
|
||||
|
||||
import_request
|
||||
|
||||
group = Group.find_by(name: 'test-group-import')
|
||||
|
||||
expect(GroupImportWorker).to have_received(:perform_async).with(user.id, group.id)
|
||||
|
||||
expect(group.description).to eq 'A voluptate non sequi temporibus quam at.'
|
||||
expect(group.visibility_level).to eq Gitlab::VisibilityLevel::PRIVATE
|
||||
end
|
||||
end
|
||||
|
||||
context 'when importing to a parent group' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: parent_group.id } }
|
||||
let(:parent_group) { create(:group) }
|
||||
|
||||
before do
|
||||
parent_group.add_owner(user)
|
||||
end
|
||||
|
||||
it 'creates a new group under the parent' do
|
||||
expect { import_request }
|
||||
.to change { parent_group.children.reload.size }.by 1
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
end
|
||||
|
||||
shared_examples 'is created with the parent visibility level' do |visibility_level|
|
||||
before do
|
||||
parent_group.update!(visibility_level: visibility_level)
|
||||
end
|
||||
|
||||
it "imports a #{Gitlab::VisibilityLevel.level_name(visibility_level)} group" do
|
||||
import_request
|
||||
|
||||
group = parent_group.children.find_by(name: 'test-group-import')
|
||||
expect(group.visibility_level).to eq visibility_level
|
||||
end
|
||||
end
|
||||
|
||||
[
|
||||
Gitlab::VisibilityLevel::PUBLIC,
|
||||
Gitlab::VisibilityLevel::INTERNAL,
|
||||
Gitlab::VisibilityLevel::PRIVATE
|
||||
].each do |visibility_level|
|
||||
context "when the parent is #{Gitlab::VisibilityLevel.level_name(visibility_level)}" do
|
||||
include_examples 'is created with the parent visibility level', visibility_level
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when supplied invalid params' do
|
||||
subject(:import_request) do
|
||||
upload_archive(
|
||||
file_upload,
|
||||
workhorse_headers,
|
||||
{ path: '', name: '' }
|
||||
)
|
||||
end
|
||||
|
||||
it 'responds with an error' do
|
||||
expect { import_request }.not_to change { Group.count }
|
||||
|
||||
expect(flash[:alert])
|
||||
.to include('Group could not be imported', "Name can't be blank", 'Group URL is too short')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user is not authorized to create groups' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
|
||||
let(:user) { create(:user, can_create_group: false) }
|
||||
|
||||
it 'returns an error' do
|
||||
expect { import_request }.not_to change { Group.count }
|
||||
|
||||
expect(flash[:alert]).to eq 'Group could not be imported: You don’t have permission to create groups.'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the requests exceed the rate limit' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
|
||||
|
||||
before do
|
||||
allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
|
||||
end
|
||||
|
||||
it 'throttles the requests' do
|
||||
import_request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
expect(flash[:alert]).to eq 'This endpoint has been requested too many times. Try again later.'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group import FF is disabled' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
|
||||
|
||||
before do
|
||||
stub_feature_flags(group_import_export: false)
|
||||
end
|
||||
|
||||
it 'returns an error' do
|
||||
expect { import_request }.not_to change { Group.count }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the parent group is invalid' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: -1 } }
|
||||
|
||||
it 'does not create a new group' do
|
||||
expect { import_request }.not_to change { Group.count }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user is not an owner of the parent group' do
|
||||
let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: parent_group.id } }
|
||||
let(:parent_group) { create(:group) }
|
||||
|
||||
it 'returns an error' do
|
||||
expect { import_request }.not_to change { parent_group.children.reload.count }
|
||||
|
||||
expect(flash[:alert]).to include "You don’t have permission to create a subgroup in this group"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST authorize' do
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
before do
|
||||
login_as(user)
|
||||
end
|
||||
|
||||
context 'when using a workhorse header' do
|
||||
subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
|
||||
|
||||
it 'authorizes the request' do
|
||||
authorize_request
|
||||
|
||||
expect(response).to have_gitlab_http_status :ok
|
||||
expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
|
||||
expect(json_response['TempPath']).to eq ImportExportUploader.workhorse_local_upload_path
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the request bypasses gitlab-workhorse' do
|
||||
subject(:authorize_request) { post authorize_import_gitlab_group_path }
|
||||
|
||||
it 'rejects the request' do
|
||||
expect { authorize_request }.to raise_error(JWT::DecodeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when direct upload is enabled' do
|
||||
subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
|
||||
|
||||
before do
|
||||
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
|
||||
end
|
||||
|
||||
it 'accepts the request and stores the files' do
|
||||
authorize_request
|
||||
|
||||
expect(response).to have_gitlab_http_status :ok
|
||||
expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
|
||||
expect(json_response).not_to have_key 'TempPath'
|
||||
|
||||
expect(json_response['RemoteObject'].keys)
|
||||
.to include('ID', 'GetURL', 'StoreURL', 'DeleteURL', 'MultipartUpload')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when direct upload is disabled' do
|
||||
subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
|
||||
|
||||
before do
|
||||
stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
|
||||
end
|
||||
|
||||
it 'handles the local file' do
|
||||
authorize_request
|
||||
|
||||
expect(response).to have_gitlab_http_status :ok
|
||||
expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
|
||||
|
||||
expect(json_response['TempPath']).to eq ImportExportUploader.workhorse_local_upload_path
|
||||
expect(json_response['RemoteObject']).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3,6 +3,37 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Groups::ImportExport::ImportService do
|
||||
describe '#async_execute' do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:group) { create(:group) }
|
||||
|
||||
context 'when the job can be successfully scheduled' do
|
||||
subject(:import_service) { described_class.new(group: group, user: user) }
|
||||
|
||||
it 'enqueues an import job' do
|
||||
expect(GroupImportWorker).to receive(:perform_async).with(user.id, group.id)
|
||||
|
||||
import_service.async_execute
|
||||
end
|
||||
|
||||
it 'returns truthy' do
|
||||
expect(import_service.async_execute).to be_truthy
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the job cannot be scheduled' do
|
||||
subject(:import_service) { described_class.new(group: group, user: user) }
|
||||
|
||||
before do
|
||||
allow(GroupImportWorker).to receive(:perform_async).and_return(nil)
|
||||
end
|
||||
|
||||
it 'returns falsey' do
|
||||
expect(import_service.async_execute).to be_falsey
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with group_import_ndjson feature flag disabled' do
|
||||
let(:user) { create(:admin) }
|
||||
let(:group) { create(:group) }
|
||||
|
|
|
|||
Loading…
Reference in New Issue