Setup Phabricator import
This sets up all the basics for importing Phabricator tasks into GitLab issues. To import all tasks from a Phabricator instance into GitLab, we'll import all of them into a new project that will have its repository disabled. The import is hooked into a regular ProjectImport setup, but similar to the GitHub parallel importer takes care of all the imports itself. In this iteration, we're importing each page of tasks in a separate sidekiq job. The first thing we do when requesting a new page of tasks is schedule the next page to be imported. But to avoid deadlocks, we only allow a single job per worker type to run at the same time. For now we're only importing basic Issue information, this should be extended to richer information.
This commit is contained in:
parent
6189c869b8
commit
589b2db06c
|
|
@ -42,7 +42,7 @@ class ApplicationController < ActionController::Base
|
|||
:bitbucket_server_import_enabled?,
|
||||
:google_code_import_enabled?, :fogbugz_import_enabled?,
|
||||
:git_import_enabled?, :gitlab_project_import_enabled?,
|
||||
:manifest_import_enabled?
|
||||
:manifest_import_enabled?, :phabricator_import_enabled?
|
||||
|
||||
# Adds `no-store` to the DEFAULT_CACHE_CONTROL, to prevent security
|
||||
# concerns due to caching private data.
|
||||
|
|
@ -424,6 +424,10 @@ class ApplicationController < ActionController::Base
|
|||
Group.supports_nested_objects? && Gitlab::CurrentSettings.import_sources.include?('manifest')
|
||||
end
|
||||
|
||||
def phabricator_import_enabled?
|
||||
Gitlab::PhabricatorImport.available?
|
||||
end
|
||||
|
||||
# U2F (universal 2nd factor) devices need a unique identifier for the application
|
||||
# to perform authentication.
|
||||
# https://developers.yubico.com/U2F/App_ID.html
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Import::PhabricatorController < Import::BaseController
|
||||
include ImportHelper
|
||||
|
||||
before_action :verify_import_enabled
|
||||
|
||||
def new
|
||||
end
|
||||
|
||||
def create
|
||||
@project = Gitlab::PhabricatorImport::ProjectCreator
|
||||
.new(current_user, import_params).execute
|
||||
|
||||
if @project&.persisted?
|
||||
redirect_to @project
|
||||
else
|
||||
@name = params[:name]
|
||||
@path = params[:path]
|
||||
@errors = @project&.errors&.full_messages || [_("Invalid import params")]
|
||||
|
||||
render :new
|
||||
end
|
||||
end
|
||||
|
||||
def verify_import_enabled
|
||||
render_404 unless phabricator_import_enabled?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def import_params
|
||||
params.permit(:path, :phabricator_server_url, :api_token, :name, :namespace_id)
|
||||
end
|
||||
end
|
||||
|
|
@ -7,28 +7,7 @@
|
|||
%hr
|
||||
|
||||
= form_tag import_gitlab_project_path, class: 'new_project', multipart: true do
|
||||
.row
|
||||
.form-group.project-name.col-sm-12
|
||||
= label_tag :name, _('Project name'), class: 'label-bold'
|
||||
= text_field_tag :name, @name, placeholder: "My awesome project", class: "js-project-name form-control input-lg", autofocus: true
|
||||
.form-group.col-12.col-sm-6
|
||||
= label_tag :namespace_id, _('Project URL'), class: 'label-bold'
|
||||
.form-group
|
||||
.input-group
|
||||
- if current_user.can_select_namespace?
|
||||
.input-group-prepend.has-tooltip{ title: root_url }
|
||||
.input-group-text
|
||||
= root_url
|
||||
= select_tag :namespace_id, namespaces_options(namespace_id_from(params) || :current_user, display_path: true, extra_group: namespace_id_from(params)), class: 'select2 js-select-namespace', tabindex: 1
|
||||
|
||||
- else
|
||||
.input-group-prepend.static-namespace.has-tooltip{ title: user_url(current_user.username) + '/' }
|
||||
.input-group-text.border-0
|
||||
#{user_url(current_user.username)}/
|
||||
= hidden_field_tag :namespace_id, value: current_user.namespace_id
|
||||
.form-group.col-12.col-sm-6.project-path
|
||||
= label_tag :path, _('Project slug'), class: 'label-bold'
|
||||
= text_field_tag :path, @path, placeholder: "my-awesome-project", class: "js-path-name form-control", tabindex: 2, required: true
|
||||
= render 'import/shared/new_project_form'
|
||||
|
||||
.row
|
||||
.form-group.col-md-12
|
||||
|
|
|
|||
|
|
@ -4,9 +4,5 @@
|
|||
%h3.page-title
|
||||
= _('Manifest file import')
|
||||
|
||||
- if @errors.present?
|
||||
.alert.alert-danger
|
||||
- @errors.each do |error|
|
||||
= error
|
||||
|
||||
= render 'import/shared/errors'
|
||||
= render 'form'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
- title = _('Phabricator Server Import')
|
||||
- page_title title
|
||||
- breadcrumb_title title
|
||||
- header_title _("Projects"), root_path
|
||||
|
||||
%h3.page-title
|
||||
= icon 'issues', text: _('Import tasks from Phabricator into issues')
|
||||
|
||||
= render 'import/shared/errors'
|
||||
|
||||
= form_tag import_phabricator_path, class: 'new_project', method: :post do
|
||||
= render 'import/shared/new_project_form'
|
||||
|
||||
%h4.prepend-top-0= _('Enter in your Phabricator Server URL and personal access token below')
|
||||
|
||||
.form-group.row
|
||||
= label_tag :phabricator_server_url, _('Phabricator Server URL'), class: 'col-form-label col-md-2'
|
||||
.col-md-4
|
||||
= text_field_tag :phabricator_server_url, params[:phabricator_server_url], class: 'form-control append-right-8', placeholder: 'https://your-phabricator-server', size: 40
|
||||
.form-group.row
|
||||
= label_tag :api_token, _('API Token'), class: 'col-form-label col-md-2'
|
||||
.col-md-4
|
||||
= password_field_tag :api_token, params[:api_token], class: 'form-control append-right-8', placeholder: _('Personal Access Token'), size: 40
|
||||
.form-actions
|
||||
= submit_tag _('Import tasks'), class: 'btn btn-success'
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
- if @errors.present?
|
||||
.alert.alert-danger
|
||||
- @errors.each do |error|
|
||||
= error
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
.row
|
||||
.form-group.project-name.col-sm-12
|
||||
= label_tag :name, _('Project name'), class: 'label-bold'
|
||||
= text_field_tag :name, @name, placeholder: "My awesome project", class: "js-project-name form-control input-lg", autofocus: true
|
||||
.form-group.col-12.col-sm-6
|
||||
= label_tag :namespace_id, _('Project URL'), class: 'label-bold'
|
||||
.form-group
|
||||
.input-group.flex-nowrap
|
||||
- if current_user.can_select_namespace?
|
||||
.input-group-prepend.flex-shrink-0.has-tooltip{ title: root_url }
|
||||
.input-group-text
|
||||
= root_url
|
||||
= select_tag :namespace_id, namespaces_options(namespace_id_from(params) || :current_user, display_path: true, extra_group: namespace_id_from(params)), class: 'select2 js-select-namespace', tabindex: 1
|
||||
- else
|
||||
.input-group-prepend.static-namespace.has-tooltip{ title: user_url(current_user.username) + '/' }
|
||||
.input-group-text.border-0
|
||||
#{user_url(current_user.username)}/
|
||||
= hidden_field_tag :namespace_id, value: current_user.namespace_id
|
||||
.form-group.col-12.col-sm-6.project-path
|
||||
= label_tag :path, _('Project slug'), class: 'label-bold'
|
||||
= text_field_tag :path, @path, placeholder: "my-awesome-project", class: "js-path-name form-control", tabindex: 2, required: true
|
||||
|
|
@ -63,6 +63,13 @@
|
|||
= link_to new_import_manifest_path, class: 'btn import_manifest', data: { track_label: "#{track_label}", track_event: "click_button", track_property: "manifest_file" } do
|
||||
= icon('file-text-o', text: 'Manifest file')
|
||||
|
||||
- if phabricator_import_enabled?
|
||||
%div
|
||||
= link_to new_import_phabricator_path, class: 'btn import_phabricator', data: { track_label: "#{track_label}", track_event: "click_button", track_property: "phabricator" } do
|
||||
= custom_icon('issues')
|
||||
= _("Phabricator Tasks")
|
||||
|
||||
|
||||
.js-toggle-content.toggle-import-form{ class: ('hide' if active_tab != 'import') }
|
||||
= form_for @project, html: { class: 'new_project' } do |f|
|
||||
%hr
|
||||
|
|
|
|||
|
|
@ -67,4 +67,6 @@ namespace :import do
|
|||
get :jobs
|
||||
post :upload
|
||||
end
|
||||
|
||||
resource :phabricator, only: [:create, :new], controller: :phabricator
|
||||
end
|
||||
|
|
|
|||
|
|
@ -91,3 +91,4 @@
|
|||
- [chat_notification, 2]
|
||||
- [migrate_external_diffs, 1]
|
||||
- [update_project_statistics, 1]
|
||||
- [phabricator_import_import_tasks, 1]
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
1. [From repo by URL](repo_by_url.md)
|
||||
1. [By uploading a manifest file (AOSP)](manifest.md)
|
||||
1. [From Gemnasium](gemnasium.md)
|
||||
1. [From Phabricator](phabricator.md)
|
||||
|
||||
In addition to the specific migration documentation above, you can import any
|
||||
Git repository via HTTP from the New Project page. Be aware that if the
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
# Import Phabricator tasks into a GitLab project
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/60562) in
|
||||
GitLab 12.0.
|
||||
|
||||
GitLab allows you to import all tasks from a Phabricator instance into
|
||||
GitLab issues. The import creates a single project with the
|
||||
repository disabled.
|
||||
|
||||
Currently, only the following basic fields are imported:
|
||||
|
||||
- Title
|
||||
- Description
|
||||
- State (open or closed)
|
||||
- Created at
|
||||
- Closed at
|
||||
|
||||
|
||||
## Enabling this feature
|
||||
|
||||
While this feature is incomplete, a feature flag is required to enable it so that
|
||||
we can gain early feedback before releasing it for everyone. To enable it:
|
||||
|
||||
1. Enable Phabricator as an [import source](../../admin_area/settings/visibility_and_access_controls.md#import-sources) in the Admin area.
|
||||
|
||||
``` {.ruby}
|
||||
Feature.enable(:phabricator_import)
|
||||
```
|
||||
|
||||
The [import
|
||||
source](../../admin_area/settings/visibility_and_access_controls.md#import-sources)
|
||||
also needs to be activated by an admin in the admin interface.
|
||||
|
|
@ -29,29 +29,13 @@ module Gitlab
|
|||
end
|
||||
|
||||
def execute
|
||||
jid = generate_jid
|
||||
|
||||
# The original import JID is the JID of the RepositoryImportWorker job,
|
||||
# which will be removed once that job completes. Reusing that JID could
|
||||
# result in StuckImportJobsWorker marking the job as stuck before we get
|
||||
# to running Stage::ImportRepositoryWorker.
|
||||
#
|
||||
# We work around this by setting the JID to a custom generated one, then
|
||||
# refreshing it in the various stages whenever necessary.
|
||||
Gitlab::SidekiqStatus
|
||||
.set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
|
||||
|
||||
project.import_state.update_column(:jid, jid)
|
||||
Gitlab::Import::SetAsyncJid.set_jid(project)
|
||||
|
||||
Stage::ImportRepositoryWorker
|
||||
.perform_async(project.id)
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def generate_jid
|
||||
"github-importer/#{project.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,13 @@ module Gitlab
|
|||
BlockedUrlError = Class.new(StandardError)
|
||||
RedirectionTooDeep = Class.new(StandardError)
|
||||
|
||||
HTTP_ERRORS = [
|
||||
SocketError, OpenSSL::SSL::SSLError, Errno::ECONNRESET,
|
||||
Errno::ECONNREFUSED, Errno::EHOSTUNREACH, Net::OpenTimeout,
|
||||
Net::ReadTimeout, Gitlab::HTTP::BlockedUrlError,
|
||||
Gitlab::HTTP::RedirectionTooDeep
|
||||
].freeze
|
||||
|
||||
include HTTParty # rubocop:disable Gitlab/HTTParty
|
||||
|
||||
connection_adapter ProxyHTTPConnectionAdapter
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# The original import JID is the JID of the RepositoryImportWorker job,
|
||||
# which will be removed once that job completes. Reusing that JID could
|
||||
# result in StuckImportJobsWorker marking the job as stuck before we get
|
||||
# to running Stage::ImportRepositoryWorker.
|
||||
#
|
||||
# We work around this by setting the JID to a custom generated one, then
|
||||
# refreshing it in the various stages whenever necessary.
|
||||
module Gitlab
|
||||
module Import
|
||||
module SetAsyncJid
|
||||
def self.set_jid(project)
|
||||
jid = generate_jid(project)
|
||||
|
||||
Gitlab::SidekiqStatus
|
||||
.set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
|
||||
|
||||
project.import_state.update_column(:jid, jid)
|
||||
end
|
||||
|
||||
def self.generate_jid(project)
|
||||
"async-import/#{project.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -20,7 +20,8 @@ module Gitlab
|
|||
ImportSource.new('git', 'Repo by URL', nil),
|
||||
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
|
||||
ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
|
||||
ImportSource.new('manifest', 'Manifest file', nil)
|
||||
ImportSource.new('manifest', 'Manifest file', nil),
|
||||
ImportSource.new('phabricator', 'Phabricator', Gitlab::PhabricatorImport::Importer)
|
||||
].freeze
|
||||
|
||||
class << self
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
BaseError = Class.new(StandardError)
|
||||
|
||||
def self.available?
|
||||
Feature.enabled?(:phabricator_import) &&
|
||||
Gitlab::CurrentSettings.import_sources.include?('phabricator')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# All workers within a Phabricator import should inherit from this worker and
|
||||
# implement the `#import` method. The jobs should then be scheduled using the
|
||||
# `.schedule` class method instead of `.perform_async`
|
||||
#
|
||||
# Doing this makes sure that only one job of that type is running at the same time
|
||||
# for a certain project. This will avoid deadlocks. When a job is already running
|
||||
# we'll wait for it for 10 times 5 seconds to restart. If the running job hasn't
|
||||
# finished, by then, we'll retry in 30 seconds.
|
||||
#
|
||||
# It also makes sure that we keep the import state of the project up to date:
|
||||
# - It keeps track of the jobs so we know how many jobs are running for the
|
||||
# project
|
||||
# - It refreshes the import jid, so it doesn't get cleaned up by the
|
||||
# `StuckImportJobsWorker`
|
||||
# - It marks the import as failed if a job failed to many times
|
||||
# - It marks the import as finished when all remaining jobs are done
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
class BaseWorker
|
||||
include ApplicationWorker
|
||||
include ProjectImportOptions # This marks the project as failed after too many tries
|
||||
include Gitlab::ExclusiveLeaseHelpers
|
||||
|
||||
class << self
|
||||
def schedule(project_id, *args)
|
||||
perform_async(project_id, *args)
|
||||
add_job(project_id)
|
||||
end
|
||||
|
||||
def add_job(project_id)
|
||||
worker_state(project_id).add_job
|
||||
end
|
||||
|
||||
def remove_job(project_id)
|
||||
worker_state(project_id).remove_job
|
||||
end
|
||||
|
||||
def worker_state(project_id)
|
||||
Gitlab::PhabricatorImport::WorkerState.new(project_id)
|
||||
end
|
||||
end
|
||||
|
||||
def perform(project_id, *args)
|
||||
in_lock("#{self.class.name.underscore}/#{project_id}/#{args}", ttl: 2.hours, sleep_sec: 5.seconds) do
|
||||
project = Project.find_by_id(project_id)
|
||||
next unless project
|
||||
|
||||
# Bail if the import job already failed
|
||||
next unless project.import_state&.in_progress?
|
||||
|
||||
project.import_state.refresh_jid_expiration
|
||||
|
||||
import(project, *args)
|
||||
|
||||
# If this is the last running job, finish the import
|
||||
project.after_import if self.class.worker_state(project_id).running_count < 2
|
||||
|
||||
self.class.remove_job(project_id)
|
||||
end
|
||||
rescue Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError
|
||||
# Reschedule a job if there was already a running one
|
||||
# Running them at the same time could cause a deadlock updating the same
|
||||
# resource
|
||||
self.class.perform_in(30.seconds, project_id, *args)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def import(project, *args)
|
||||
importer_class.new(project, *args).execute
|
||||
end
|
||||
|
||||
def importer_class
|
||||
raise NotImplementedError, "Implement `#{__method__}` on #{self.class}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Cache
|
||||
class Map
|
||||
def initialize(project)
|
||||
@project = project
|
||||
end
|
||||
|
||||
def get_gitlab_model(phabricator_id)
|
||||
cached_info = get(phabricator_id)
|
||||
return unless cached_info[:classname] && cached_info[:database_id]
|
||||
|
||||
cached_info[:classname].constantize.find_by_id(cached_info[:database_id])
|
||||
end
|
||||
|
||||
def set_gitlab_model(object, phabricator_id)
|
||||
set(object.class, object.id, phabricator_id)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project
|
||||
|
||||
def set(klass_name, object_id, phabricator_id)
|
||||
key = cache_key_for_phabricator_id(phabricator_id)
|
||||
|
||||
redis.with do |r|
|
||||
r.multi do |multi|
|
||||
multi.mapped_hmset(key,
|
||||
{ classname: klass_name, database_id: object_id })
|
||||
multi.expire(key, timeout)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get(phabricator_id)
|
||||
key = cache_key_for_phabricator_id(phabricator_id)
|
||||
|
||||
redis.with do |r|
|
||||
r.pipelined do |pipe|
|
||||
# Extend the TTL when a key was
|
||||
pipe.expire(key, timeout)
|
||||
pipe.mapped_hmget(key, :classname, :database_id)
|
||||
end.last
|
||||
end
|
||||
end
|
||||
|
||||
def cache_key_for_phabricator_id(phabricator_id)
|
||||
"#{Redis::Cache::CACHE_NAMESPACE}/phabricator-import/#{project.id}/#{phabricator_id}"
|
||||
end
|
||||
|
||||
def redis
|
||||
Gitlab::Redis::Cache
|
||||
end
|
||||
|
||||
def timeout
|
||||
# Setting the timeout to the same one as we do for clearing stuck jobs
|
||||
# this makes sure all cache is available while the import is running.
|
||||
StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
ApiError = Class.new(Gitlab::PhabricatorImport::BaseError)
|
||||
ResponseError = Class.new(ApiError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
class Client
|
||||
def initialize(phabricator_url, api_token)
|
||||
@phabricator_url = phabricator_url
|
||||
@api_token = api_token
|
||||
end
|
||||
|
||||
def get(path, params: {})
|
||||
response = Gitlab::HTTP.get(build_url(path), body: build_params(params), headers: headers)
|
||||
Response.parse!(response)
|
||||
rescue *Gitlab::HTTP::HTTP_ERRORS => e
|
||||
# Wrap all errors from the API into an API-error.
|
||||
raise ApiError.new(e)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :phabricator_url, :api_token
|
||||
|
||||
def headers
|
||||
{ "Accept" => 'application/json' }
|
||||
end
|
||||
|
||||
def build_url(path)
|
||||
URI.join(phabricator_url, '/api/', path).to_s
|
||||
end
|
||||
|
||||
def build_params(params)
|
||||
params = params.dup
|
||||
params.compact!
|
||||
params.reverse_merge!("api.token" => api_token)
|
||||
|
||||
CGI.unescape(params.to_query)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
class Maniphest
|
||||
def initialize(phabricator_url:, api_token:)
|
||||
@client = Client.new(phabricator_url, api_token)
|
||||
end
|
||||
|
||||
def tasks(after: nil)
|
||||
TasksResponse.new(get_tasks(after))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def get_tasks(after)
|
||||
client.get('maniphest.search',
|
||||
params: {
|
||||
after: after,
|
||||
attachments: { projects: 1, subscribers: 1, columns: 1 }
|
||||
})
|
||||
end
|
||||
|
||||
attr_reader :client
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
class Pagination
|
||||
def initialize(cursor_json)
|
||||
@cursor_json = cursor_json
|
||||
end
|
||||
|
||||
def has_next_page?
|
||||
next_page.present?
|
||||
end
|
||||
|
||||
def next_page
|
||||
cursor_json["after"]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :cursor_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
class Response
|
||||
def self.parse!(http_response)
|
||||
unless http_response.success?
|
||||
raise Gitlab::PhabricatorImport::Conduit::ResponseError,
|
||||
"Phabricator responded with #{http_response.status}"
|
||||
end
|
||||
|
||||
response = new(JSON.parse(http_response.body))
|
||||
|
||||
unless response.success?
|
||||
raise ResponseError,
|
||||
"Phabricator Error: #{response.error_code}: #{response.error_info}"
|
||||
end
|
||||
|
||||
response
|
||||
rescue JSON::JSONError => e
|
||||
raise ResponseError.new(e)
|
||||
end
|
||||
|
||||
def initialize(json)
|
||||
@json = json
|
||||
end
|
||||
|
||||
def success?
|
||||
error_code.nil?
|
||||
end
|
||||
|
||||
def error_code
|
||||
json['error_code']
|
||||
end
|
||||
|
||||
def error_info
|
||||
json['error_info']
|
||||
end
|
||||
|
||||
def data
|
||||
json_result&.fetch('data')
|
||||
end
|
||||
|
||||
def pagination
|
||||
return unless cursor_info = json_result&.fetch('cursor')
|
||||
|
||||
@pagination ||= Pagination.new(cursor_info)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :json
|
||||
|
||||
def json_result
|
||||
json['result']
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Conduit
|
||||
class TasksResponse
|
||||
def initialize(conduit_response)
|
||||
@conduit_response = conduit_response
|
||||
end
|
||||
|
||||
delegate :pagination, to: :conduit_response
|
||||
|
||||
def tasks
|
||||
@tasks ||= conduit_response.data.map do |task_json|
|
||||
Gitlab::PhabricatorImport::Representation::Task.new(task_json)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :conduit_response
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
class ImportTasksWorker < BaseWorker
|
||||
def importer_class
|
||||
Gitlab::PhabricatorImport::Issues::Importer
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
class Importer
|
||||
def self.async?
|
||||
true
|
||||
end
|
||||
|
||||
def self.imports_repository?
|
||||
# This does not really import a repository, but we want to skip all
|
||||
# repository related tasks in the `Projects::ImportService`
|
||||
true
|
||||
end
|
||||
|
||||
def initialize(project)
|
||||
@project = project
|
||||
end
|
||||
|
||||
def execute
|
||||
Gitlab::Import::SetAsyncJid.set_jid(project)
|
||||
schedule_first_tasks_page
|
||||
|
||||
true
|
||||
rescue => e
|
||||
fail_import(e.message)
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project
|
||||
|
||||
def schedule_first_tasks_page
|
||||
ImportTasksWorker.schedule(project.id)
|
||||
end
|
||||
|
||||
def fail_import(message)
|
||||
project.import_state.mark_as_failed(message)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Issues
|
||||
class Importer
|
||||
def initialize(project, after = nil)
|
||||
@project, @after = project, after
|
||||
end
|
||||
|
||||
def execute
|
||||
schedule_next_batch
|
||||
|
||||
tasks_response.tasks.each do |task|
|
||||
TaskImporter.new(project, task).execute
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :after
|
||||
|
||||
def schedule_next_batch
|
||||
return unless tasks_response.pagination.has_next_page?
|
||||
|
||||
Gitlab::PhabricatorImport::ImportTasksWorker
|
||||
.schedule(project.id, tasks_response.pagination.next_page)
|
||||
end
|
||||
|
||||
def tasks_response
|
||||
@tasks_response ||= client.tasks(after: after)
|
||||
end
|
||||
|
||||
def client
|
||||
@client ||=
|
||||
Gitlab::PhabricatorImport::Conduit::Maniphest
|
||||
.new(phabricator_url: project.import_data.data['phabricator_url'],
|
||||
api_token: project.import_data.credentials[:api_token])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Issues
|
||||
class TaskImporter
|
||||
def initialize(project, task)
|
||||
@project, @task = project, task
|
||||
end
|
||||
|
||||
def execute
|
||||
# TODO: get the user from the project namespace from the username loaded by Phab-id
|
||||
# https://gitlab.com/gitlab-org/gitlab-ce/issues/60565
|
||||
issue.author = User.ghost
|
||||
|
||||
# TODO: Reformat the description with attachments, escaping accidental
|
||||
# links and add attachments
|
||||
# https://gitlab.com/gitlab-org/gitlab-ce/issues/60603
|
||||
issue.assign_attributes(task.issue_attributes)
|
||||
|
||||
save!
|
||||
|
||||
issue
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :task
|
||||
|
||||
def save!
|
||||
# Just avoiding an extra redis call, we've already updated the expiry
|
||||
# when reading the id from the map
|
||||
was_persisted = issue.persisted?
|
||||
|
||||
issue.save! if issue.changed?
|
||||
|
||||
object_map.set_gitlab_model(issue, task.phabricator_id) unless was_persisted
|
||||
end
|
||||
|
||||
def issue
|
||||
@issue ||= find_issue_by_phabricator_id(task.phabricator_id) ||
|
||||
project.issues.new
|
||||
end
|
||||
|
||||
def find_issue_by_phabricator_id(phabricator_id)
|
||||
object_map.get_gitlab_model(phabricator_id)
|
||||
end
|
||||
|
||||
def object_map
|
||||
Gitlab::PhabricatorImport::Cache::Map.new(project)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
class ProjectCreator
|
||||
def initialize(current_user, params)
|
||||
@current_user = current_user
|
||||
@params = params.dup
|
||||
end
|
||||
|
||||
def execute
|
||||
return unless import_url.present? && api_token.present?
|
||||
|
||||
project = Projects::CreateService.new(current_user, create_params).execute
|
||||
return project unless project.persisted?
|
||||
|
||||
project.project_feature.update!(project_feature_attributes)
|
||||
|
||||
project
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :current_user, :params
|
||||
|
||||
def create_params
|
||||
{
|
||||
name: project_name,
|
||||
path: project_path,
|
||||
namespace_id: namespace_id,
|
||||
import_type: 'phabricator',
|
||||
import_url: Project::UNKNOWN_IMPORT_URL,
|
||||
import_data: import_data
|
||||
}
|
||||
end
|
||||
|
||||
def project_name
|
||||
params[:name]
|
||||
end
|
||||
|
||||
def project_path
|
||||
params[:path]
|
||||
end
|
||||
|
||||
def namespace_id
|
||||
params[:namespace_id] || current_user.namespace_id
|
||||
end
|
||||
|
||||
def import_url
|
||||
params[:phabricator_server_url]
|
||||
end
|
||||
|
||||
def api_token
|
||||
params[:api_token]
|
||||
end
|
||||
|
||||
def project_feature_attributes
|
||||
@project_features_attributes ||= begin
|
||||
# everything disabled except for issues
|
||||
ProjectFeature::FEATURES.map do |feature|
|
||||
[ProjectFeature.access_level_attribute(feature), ProjectFeature::DISABLED]
|
||||
end.to_h.merge(ProjectFeature.access_level_attribute(:issues) => ProjectFeature::ENABLED)
|
||||
end
|
||||
end
|
||||
|
||||
def import_data
|
||||
{
|
||||
data: {
|
||||
phabricator_url: import_url
|
||||
},
|
||||
credentials: {
|
||||
api_token: params.fetch(:api_token)
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
module Representation
|
||||
class Task
|
||||
def initialize(json)
|
||||
@json = json
|
||||
end
|
||||
|
||||
def phabricator_id
|
||||
json['phid']
|
||||
end
|
||||
|
||||
def issue_attributes
|
||||
@issue_attributes ||= {
|
||||
title: issue_title,
|
||||
description: issue_description,
|
||||
state: issue_state,
|
||||
created_at: issue_created_at,
|
||||
closed_at: issue_closed_at
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :json
|
||||
|
||||
def issue_title
|
||||
# The 255 limit is the validation we impose on the Issue title in
|
||||
# Issuable
|
||||
@issue_title ||= json['fields']['name'].truncate(255)
|
||||
end
|
||||
|
||||
def issue_description
|
||||
json['fields']['description']['raw']
|
||||
end
|
||||
|
||||
def issue_state
|
||||
issue_closed_at.present? ? :closed : :opened
|
||||
end
|
||||
|
||||
def issue_created_at
|
||||
return unless json['fields']['dateCreated']
|
||||
|
||||
@issue_created_at ||= cast_datetime(json['fields']['dateCreated'])
|
||||
end
|
||||
|
||||
def issue_closed_at
|
||||
return unless json['fields']['dateClosed']
|
||||
|
||||
@issue_closed_at ||= cast_datetime(json['fields']['dateClosed'])
|
||||
end
|
||||
|
||||
def cast_datetime(value)
|
||||
Time.at(value.to_i)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
# frozen_string_literal: true
|
||||
module Gitlab
|
||||
module PhabricatorImport
|
||||
class WorkerState
|
||||
def initialize(project_id)
|
||||
@project_id = project_id
|
||||
end
|
||||
|
||||
def add_job
|
||||
redis.with do |r|
|
||||
r.pipelined do |pipe|
|
||||
pipe.incr(all_jobs_key)
|
||||
pipe.expire(all_jobs_key, timeout)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remove_job
|
||||
redis.with do |r|
|
||||
r.decr(all_jobs_key)
|
||||
end
|
||||
end
|
||||
|
||||
def running_count
|
||||
redis.with { |r| r.get(all_jobs_key) }.to_i
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project_id
|
||||
|
||||
def redis
|
||||
Gitlab::Redis::SharedState
|
||||
end
|
||||
|
||||
def all_jobs_key
|
||||
@all_jobs_key ||= "phabricator-import/jobs/project-#{project_id}/job-count"
|
||||
end
|
||||
|
||||
def timeout
|
||||
# Make sure we get rid of all the information after a job is marked
|
||||
# as failed/succeeded
|
||||
StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -451,6 +451,9 @@ msgstr ""
|
|||
msgid "API Help"
|
||||
msgstr ""
|
||||
|
||||
msgid "API Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "About GitLab"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3808,6 +3811,9 @@ msgstr ""
|
|||
msgid "Enter in your Bitbucket Server URL and personal access token below"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter in your Phabricator Server URL and personal access token below"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter the issue description"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -5171,6 +5177,12 @@ msgstr ""
|
|||
msgid "Import repository"
|
||||
msgstr ""
|
||||
|
||||
msgid "Import tasks"
|
||||
msgstr ""
|
||||
|
||||
msgid "Import tasks from Phabricator into issues"
|
||||
msgstr ""
|
||||
|
||||
msgid "Import timed out. Import took longer than %{import_jobs_expiration} seconds"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -5321,6 +5333,9 @@ msgstr ""
|
|||
msgid "Invalid file."
|
||||
msgstr ""
|
||||
|
||||
msgid "Invalid import params"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invalid input, please avoid emojis"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -6907,6 +6922,15 @@ msgstr ""
|
|||
msgid "Personal project creation is not allowed. Please contact your administrator with questions"
|
||||
msgstr ""
|
||||
|
||||
msgid "Phabricator Server Import"
|
||||
msgstr ""
|
||||
|
||||
msgid "Phabricator Server URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Phabricator Tasks"
|
||||
msgstr ""
|
||||
|
||||
msgid "Pick a name"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,92 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Import::PhabricatorController do
|
||||
let(:current_user) { create(:user) }
|
||||
|
||||
before do
|
||||
sign_in current_user
|
||||
end
|
||||
|
||||
describe 'GET #new' do
|
||||
subject { get :new }
|
||||
|
||||
context 'when the import source is not available' do
|
||||
before do
|
||||
stub_feature_flags(phabricator_import: true)
|
||||
stub_application_setting(import_sources: [])
|
||||
end
|
||||
|
||||
it { is_expected.to have_gitlab_http_status(404) }
|
||||
end
|
||||
|
||||
context 'when the feature is disabled' do
|
||||
before do
|
||||
stub_feature_flags(phabricator_import: false)
|
||||
stub_application_setting(import_sources: ['phabricator'])
|
||||
end
|
||||
|
||||
it { is_expected.to have_gitlab_http_status(404) }
|
||||
end
|
||||
|
||||
context 'when the import is available' do
|
||||
before do
|
||||
stub_feature_flags(phabricator_import: true)
|
||||
stub_application_setting(import_sources: ['phabricator'])
|
||||
end
|
||||
|
||||
it { is_expected.to have_gitlab_http_status(200) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST #create' do
|
||||
subject(:post_create) { post :create, params: params }
|
||||
|
||||
context 'with valid params' do
|
||||
let(:params) do
|
||||
{ path: 'phab-import',
|
||||
name: 'Phab import',
|
||||
phabricator_server_url: 'https://phabricator.example.com',
|
||||
api_token: 'hazaah',
|
||||
namespace_id: current_user.namespace_id }
|
||||
end
|
||||
|
||||
it 'creates a project to import' do
|
||||
expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
|
||||
expect(importer).to receive(:execute)
|
||||
end
|
||||
|
||||
expect { post_create }.to change { current_user.namespace.projects.reload.size }.from(0).to(1)
|
||||
|
||||
expect(current_user.namespace.projects.last).to be_import
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an import param is missing' do
|
||||
let(:params) do
|
||||
{ path: 'phab-import',
|
||||
name: 'Phab import',
|
||||
phabricator_server_url: nil,
|
||||
api_token: 'hazaah',
|
||||
namespace_id: current_user.namespace_id }
|
||||
end
|
||||
|
||||
it 'does not create the project' do
|
||||
expect { post_create }.not_to change { current_user.namespace.projects.reload.size }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a project param is missing' do
|
||||
let(:params) do
|
||||
{ phabricator_server_url: 'https://phabricator.example.com',
|
||||
api_token: 'hazaah',
|
||||
namespace_id: current_user.namespace_id }
|
||||
end
|
||||
|
||||
it 'does not create the project' do
|
||||
expect { post_create }.not_to change { current_user.namespace.projects.reload.size }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
{"result":null,"error_code":"ERR-INVALID-AUTH","error_info":"API token \"api-token\" has the wrong length. API tokens should be 32 characters long."}
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"result": {
|
||||
"data": [
|
||||
{
|
||||
"id": 283,
|
||||
"type": "TASK",
|
||||
"phid": "PHID-TASK-fswfs3wkowjb6cyyxtyx",
|
||||
"fields": {
|
||||
"name": "Things are slow",
|
||||
"description": {
|
||||
"raw": "Things are slow but should be fast!"
|
||||
},
|
||||
"authorPHID": "PHID-USER-nrtht5wijwbxquens3qr",
|
||||
"ownerPHID": "PHID-USER-nrtht5wijwbxquens3qr",
|
||||
"status": {
|
||||
"value": "resolved",
|
||||
"name": "Resolved",
|
||||
"color": null
|
||||
},
|
||||
"priority": {
|
||||
"value": 100,
|
||||
"subpriority": 8589934592,
|
||||
"name": "Super urgent",
|
||||
"color": "pink"
|
||||
},
|
||||
"points": null,
|
||||
"subtype": "default",
|
||||
"closerPHID": "PHID-USER-nrtht5wijwbxquens3qr",
|
||||
"dateClosed": 1374657042,
|
||||
"spacePHID": null,
|
||||
"dateCreated": 1374616241,
|
||||
"dateModified": 1374657044,
|
||||
"policy": {
|
||||
"view": "users",
|
||||
"interact": "users",
|
||||
"edit": "users"
|
||||
},
|
||||
"custom.field-1": null,
|
||||
"custom.field-2": null,
|
||||
"custom.field-3": null
|
||||
},
|
||||
"attachments": {}
|
||||
},
|
||||
{
|
||||
"id": 284,
|
||||
"type": "TASK",
|
||||
"phid": "PHID-TASK-5f73nyq5sjeh4cbmcsnb",
|
||||
"fields": {
|
||||
"name": "Things are broken",
|
||||
"description": {
|
||||
"raw": "Things are broken and should be fixed"
|
||||
},
|
||||
"authorPHID": "PHID-USER-nrtht5wijwbxquens3qr",
|
||||
"ownerPHID": "PHID-USER-h425fsrixz4gjxiyr7ot",
|
||||
"status": {
|
||||
"value": "resolved",
|
||||
"name": "Resolved",
|
||||
"color": null
|
||||
},
|
||||
"priority": {
|
||||
"value": 100,
|
||||
"subpriority": 8589803520,
|
||||
"name": "Super urgent",
|
||||
"color": "pink"
|
||||
},
|
||||
"points": null,
|
||||
"subtype": "default",
|
||||
"closerPHID": "PHID-USER-h425fsrixz4gjxiyr7ot",
|
||||
"dateClosed": 1375049556,
|
||||
"spacePHID": null,
|
||||
"dateCreated": 1374616578,
|
||||
"dateModified": 1375049556,
|
||||
"policy": {
|
||||
"view": "users",
|
||||
"interact": "users",
|
||||
"edit": "users"
|
||||
},
|
||||
"custom.field-1": null,
|
||||
"custom.field-2": null,
|
||||
"custom.field-3": null
|
||||
},
|
||||
"attachments": {}
|
||||
}
|
||||
],
|
||||
"maps": {},
|
||||
"query": {
|
||||
"queryKey": null
|
||||
},
|
||||
"cursor": {
|
||||
"limit": "2",
|
||||
"after": "284",
|
||||
"before": null,
|
||||
"order": null
|
||||
}
|
||||
},
|
||||
"error_code": null,
|
||||
"error_info": null
|
||||
}
|
||||
|
|
@ -25,18 +25,9 @@ describe Gitlab::GithubImport::ParallelImporter do
|
|||
end
|
||||
|
||||
it 'sets the JID in Redis' do
|
||||
expect(Gitlab::SidekiqStatus)
|
||||
.to receive(:set)
|
||||
.with("github-importer/#{project.id}", StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
|
||||
.and_call_original
|
||||
expect(Gitlab::Import::SetAsyncJid).to receive(:set_jid).with(project).and_call_original
|
||||
|
||||
importer.execute
|
||||
end
|
||||
|
||||
it 'updates the import JID of the project' do
|
||||
importer.execute
|
||||
|
||||
expect(project.import_state.reload.jid).to eq("github-importer/#{project.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Import::SetAsyncJid do
|
||||
describe '.set_jid', :clean_gitlab_redis_shared_state do
|
||||
let(:project) { create(:project, :import_scheduled) }
|
||||
|
||||
it 'sets the JID in Redis' do
|
||||
expect(Gitlab::SidekiqStatus)
|
||||
.to receive(:set)
|
||||
.with("async-import/#{project.id}", StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
|
||||
.and_call_original
|
||||
|
||||
described_class.set_jid(project)
|
||||
end
|
||||
|
||||
it 'updates the import JID of the project' do
|
||||
described_class.set_jid(project)
|
||||
|
||||
expect(project.import_state.reload.jid).to eq("async-import/#{project.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -14,7 +14,8 @@ describe Gitlab::ImportSources do
|
|||
'Repo by URL' => 'git',
|
||||
'GitLab export' => 'gitlab_project',
|
||||
'Gitea' => 'gitea',
|
||||
'Manifest file' => 'manifest'
|
||||
'Manifest file' => 'manifest',
|
||||
'Phabricator' => 'phabricator'
|
||||
}
|
||||
|
||||
expect(described_class.options).to eq(expected)
|
||||
|
|
@ -35,6 +36,7 @@ describe Gitlab::ImportSources do
|
|||
gitlab_project
|
||||
gitea
|
||||
manifest
|
||||
phabricator
|
||||
)
|
||||
|
||||
expect(described_class.values).to eq(expected)
|
||||
|
|
@ -53,6 +55,7 @@ describe Gitlab::ImportSources do
|
|||
fogbugz
|
||||
gitlab_project
|
||||
gitea
|
||||
phabricator
|
||||
)
|
||||
|
||||
expect(described_class.importer_names).to eq(expected)
|
||||
|
|
@ -70,7 +73,8 @@ describe Gitlab::ImportSources do
|
|||
'git' => nil,
|
||||
'gitlab_project' => Gitlab::ImportExport::Importer,
|
||||
'gitea' => Gitlab::LegacyGithubImport::Importer,
|
||||
'manifest' => nil
|
||||
'manifest' => nil,
|
||||
'phabricator' => Gitlab::PhabricatorImport::Importer
|
||||
}
|
||||
|
||||
import_sources.each do |name, klass|
|
||||
|
|
@ -91,7 +95,8 @@ describe Gitlab::ImportSources do
|
|||
'git' => 'Repo by URL',
|
||||
'gitlab_project' => 'GitLab export',
|
||||
'gitea' => 'Gitea',
|
||||
'manifest' => 'Manifest file'
|
||||
'manifest' => 'Manifest file',
|
||||
'phabricator' => 'Phabricator'
|
||||
}
|
||||
|
||||
import_sources.each do |name, title|
|
||||
|
|
@ -102,7 +107,7 @@ describe Gitlab::ImportSources do
|
|||
end
|
||||
|
||||
describe 'imports_repository? checker' do
|
||||
let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
|
||||
let(:allowed_importers) { %w[github gitlab_project bitbucket_server phabricator] }
|
||||
|
||||
it 'fails if any importer other than the allowed ones implements this method' do
|
||||
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,74 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::BaseWorker do
|
||||
let(:subclass) do
|
||||
# Creating an anonymous class for a worker is complicated, as we generate the
|
||||
# queue name from the class name.
|
||||
Gitlab::PhabricatorImport::ImportTasksWorker
|
||||
end
|
||||
|
||||
describe '.schedule' do
|
||||
let(:arguments) { %w[project_id the_next_page] }
|
||||
|
||||
it 'schedules the job' do
|
||||
expect(subclass).to receive(:perform_async).with(*arguments)
|
||||
|
||||
subclass.schedule(*arguments)
|
||||
end
|
||||
|
||||
it 'counts the scheduled job', :clean_gitlab_redis_shared_state do
|
||||
state = Gitlab::PhabricatorImport::WorkerState.new('project_id')
|
||||
|
||||
allow(subclass).to receive(:remove_job) # otherwise the job is removed before we saw it
|
||||
|
||||
expect { subclass.schedule(*arguments) }.to change { state.running_count }.by(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
let(:project) { create(:project, :import_started, import_url: "https://a.phab.instance") }
|
||||
let(:worker) { subclass.new }
|
||||
let(:state) { Gitlab::PhabricatorImport::WorkerState.new(project.id) }
|
||||
|
||||
before do
|
||||
allow(worker).to receive(:import)
|
||||
end
|
||||
|
||||
it 'does not break for a non-existing project' do
|
||||
expect { worker.perform('not a thing') }.not_to raise_error
|
||||
end
|
||||
|
||||
it 'does not do anything when the import is not in progress' do
|
||||
project = create(:project, :import_failed)
|
||||
|
||||
expect(worker).not_to receive(:import)
|
||||
|
||||
worker.perform(project.id)
|
||||
end
|
||||
|
||||
it 'calls import for the project' do
|
||||
expect(worker).to receive(:import).with(project, 'other_arg')
|
||||
|
||||
worker.perform(project.id, 'other_arg')
|
||||
end
|
||||
|
||||
it 'marks the project as imported if there was only one job running' do
|
||||
worker.perform(project.id)
|
||||
|
||||
expect(project.import_state.reload).to be_finished
|
||||
end
|
||||
|
||||
it 'does not mark the job as finished when there are more scheduled jobs' do
|
||||
2.times { state.add_job }
|
||||
|
||||
worker.perform(project.id)
|
||||
|
||||
expect(project.import_state.reload).to be_in_progress
|
||||
end
|
||||
|
||||
it 'decrements the job counter' do
|
||||
expect { worker.perform(project.id) }.to change { state.running_count }.by(-1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
|
||||
set(:project) { create(:project) }
|
||||
let(:redis) { Gitlab::Redis::Cache }
|
||||
subject(:map) { described_class.new(project) }
|
||||
|
||||
describe '#get_gitlab_model' do
|
||||
it 'returns nil if there was nothing cached for the phabricator id' do
|
||||
expect(map.get_gitlab_model('does not exist')).to be_nil
|
||||
end
|
||||
|
||||
it 'returns the object if it was set in redis' do
|
||||
issue = create(:issue, project: project)
|
||||
set_in_redis('exists', issue)
|
||||
|
||||
expect(map.get_gitlab_model('exists')).to eq(issue)
|
||||
end
|
||||
|
||||
it 'extends the TTL for the cache key' do
|
||||
set_in_redis('extend', create(:issue, project: project)) do |redis|
|
||||
redis.expire(cache_key('extend'), 10.seconds.to_i)
|
||||
end
|
||||
|
||||
map.get_gitlab_model('extend')
|
||||
|
||||
ttl = redis.with { |redis| redis.ttl(cache_key('extend')) }
|
||||
|
||||
expect(ttl).to be > 10.seconds
|
||||
end
|
||||
end
|
||||
|
||||
describe '#set_gitlab_model' do
|
||||
around do |example|
|
||||
Timecop.freeze { example.run }
|
||||
end
|
||||
|
||||
it 'sets the class and id in redis with a ttl' do
|
||||
issue = create(:issue, project: project)
|
||||
|
||||
map.set_gitlab_model(issue, 'it is set')
|
||||
|
||||
set_data, ttl = redis.with do |redis|
|
||||
redis.pipelined do |p|
|
||||
p.mapped_hmget(cache_key('it is set'), :classname, :database_id)
|
||||
p.ttl(cache_key('it is set'))
|
||||
end
|
||||
end
|
||||
|
||||
expect(set_data).to eq({ classname: 'Issue', database_id: issue.id.to_s })
|
||||
expect(ttl).to be_within(1.second).of(StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
|
||||
end
|
||||
end
|
||||
|
||||
def set_in_redis(key, object)
|
||||
redis.with do |redis|
|
||||
redis.mapped_hmset(cache_key(key),
|
||||
{ classname: object.class, database_id: object.id })
|
||||
yield(redis) if block_given?
|
||||
end
|
||||
end
|
||||
|
||||
def cache_key(phabricator_id)
|
||||
subject.__send__(:cache_key_for_phabricator_id, phabricator_id)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Conduit::Client do
|
||||
let(:client) do
|
||||
described_class.new('https://see-ya-later.phabricator', 'api-token')
|
||||
end
|
||||
|
||||
describe '#get' do
|
||||
it 'performs and parses a request' do
|
||||
params = { some: 'extra', values: %w[are passed] }
|
||||
stub_valid_request(params)
|
||||
|
||||
response = client.get('test', params: params)
|
||||
|
||||
expect(response).to be_a(Gitlab::PhabricatorImport::Conduit::Response)
|
||||
expect(response).to be_success
|
||||
end
|
||||
|
||||
it 'wraps request errors in an `ApiError`' do
|
||||
stub_timeout
|
||||
|
||||
expect { client.get('test') }.to raise_error(Gitlab::PhabricatorImport::Conduit::ApiError)
|
||||
end
|
||||
|
||||
it 'raises response error' do
|
||||
stub_error_response
|
||||
|
||||
expect { client.get('test') }
|
||||
.to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /has the wrong length/)
|
||||
end
|
||||
end
|
||||
|
||||
def stub_valid_request(params = {})
|
||||
WebMock.stub_request(
|
||||
:get, 'https://see-ya-later.phabricator/api/test'
|
||||
).with(
|
||||
body: CGI.unescape(params.reverse_merge('api.token' => 'api-token').to_query)
|
||||
).and_return(
|
||||
status: 200,
|
||||
body: fixture_file('phabricator_responses/maniphest.search.json')
|
||||
)
|
||||
end
|
||||
|
||||
def stub_timeout
|
||||
WebMock.stub_request(
|
||||
:get, 'https://see-ya-later.phabricator/api/test'
|
||||
).to_timeout
|
||||
end
|
||||
|
||||
def stub_error_response
|
||||
WebMock.stub_request(
|
||||
:get, 'https://see-ya-later.phabricator/api/test'
|
||||
).and_return(
|
||||
status: 200,
|
||||
body: fixture_file('phabricator_responses/auth_failed.json')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Conduit::Maniphest do
|
||||
let(:maniphest) do
|
||||
described_class.new(phabricator_url: 'https://see-ya-later.phabricator', api_token: 'api-token')
|
||||
end
|
||||
|
||||
describe '#tasks' do
|
||||
let(:fake_client) { double('Phabricator client') }
|
||||
|
||||
before do
|
||||
allow(maniphest).to receive(:client).and_return(fake_client)
|
||||
end
|
||||
|
||||
it 'calls the api with the correct params' do
|
||||
expected_params = {
|
||||
after: '123',
|
||||
attachments: {
|
||||
projects: 1, subscribers: 1, columns: 1
|
||||
}
|
||||
}
|
||||
|
||||
expect(fake_client).to receive(:get).with('maniphest.search',
|
||||
params: expected_params)
|
||||
|
||||
maniphest.tasks(after: '123')
|
||||
end
|
||||
|
||||
it 'returns a parsed response' do
|
||||
response = Gitlab::PhabricatorImport::Conduit::Response
|
||||
.new(fixture_file('phabricator_responses/maniphest.search.json'))
|
||||
|
||||
allow(fake_client).to receive(:get).and_return(response)
|
||||
|
||||
expect(maniphest.tasks).to be_a(Gitlab::PhabricatorImport::Conduit::TasksResponse)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Conduit::Response do
|
||||
let(:response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))}
|
||||
let(:error_response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/auth_failed.json'))) }
|
||||
|
||||
describe '.parse!' do
|
||||
it 'raises a ResponseError if the http response was not successfull' do
|
||||
fake_response = double(:http_response, success?: false, status: 401)
|
||||
|
||||
expect { described_class.parse!(fake_response) }
|
||||
.to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /responded with 401/)
|
||||
end
|
||||
|
||||
it 'raises a ResponseError if the response contained a Phabricator error' do
|
||||
fake_response = double(:http_response,
|
||||
success?: true,
|
||||
status: 200,
|
||||
body: fixture_file('phabricator_responses/auth_failed.json'))
|
||||
|
||||
expect { described_class.parse!(fake_response) }
|
||||
.to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /ERR-INVALID-AUTH: API token/)
|
||||
end
|
||||
|
||||
it 'raises a ResponseError if JSON parsing failed' do
|
||||
fake_response = double(:http_response,
|
||||
success?: true,
|
||||
status: 200,
|
||||
body: 'This is no JSON')
|
||||
|
||||
expect { described_class.parse!(fake_response) }
|
||||
.to raise_error(Gitlab::PhabricatorImport::Conduit::ResponseError, /unexpected token at/)
|
||||
end
|
||||
|
||||
it 'returns a parsed response for valid input' do
|
||||
fake_response = double(:http_response,
|
||||
success?: true,
|
||||
status: 200,
|
||||
body: fixture_file('phabricator_responses/maniphest.search.json'))
|
||||
|
||||
expect(described_class.parse!(fake_response)).to be_a(described_class)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#success?' do
|
||||
it { expect(response).to be_success }
|
||||
it { expect(error_response).not_to be_success }
|
||||
end
|
||||
|
||||
describe '#error_code' do
|
||||
it { expect(error_response.error_code).to eq('ERR-INVALID-AUTH') }
|
||||
it { expect(response.error_code).to be_nil }
|
||||
end
|
||||
|
||||
describe '#error_info' do
|
||||
it 'returns the correct error info' do
|
||||
expected_message = 'API token "api-token" has the wrong length. API tokens should be 32 characters long.'
|
||||
|
||||
expect(error_response.error_info).to eq(expected_message)
|
||||
end
|
||||
|
||||
it { expect(response.error_info).to be_nil }
|
||||
end
|
||||
|
||||
describe '#data' do
|
||||
it { expect(error_response.data).to be_nil }
|
||||
it { expect(response.data).to be_an(Array) }
|
||||
end
|
||||
|
||||
describe '#pagination' do
|
||||
it { expect(error_response.pagination).to be_nil }
|
||||
|
||||
it 'builds the pagination correctly' do
|
||||
expect(response.pagination).to be_a(Gitlab::PhabricatorImport::Conduit::Pagination)
|
||||
expect(response.pagination.next_page).to eq('284')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Conduit::TasksResponse do
|
||||
let(:conduit_response) do
|
||||
Gitlab::PhabricatorImport::Conduit::Response
|
||||
.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))
|
||||
end
|
||||
|
||||
subject(:response) { described_class.new(conduit_response) }
|
||||
|
||||
describe '#pagination' do
|
||||
it 'delegates to the conduit reponse' do
|
||||
expect(response.pagination).to eq(conduit_response.pagination)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#tasks' do
|
||||
it 'builds the correct tasks representation' do
|
||||
tasks = response.tasks
|
||||
|
||||
titles = tasks.map(&:issue_attributes).map { |attrs| attrs[:title] }
|
||||
|
||||
expect(titles).to contain_exactly('Things are slow', 'Things are broken')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::ImportTasksWorker do
|
||||
describe '#perform' do
|
||||
it 'calls the correct importer' do
|
||||
project = create(:project, :import_started, import_url: "https://the.phab.ulr")
|
||||
fake_importer = instance_double(Gitlab::PhabricatorImport::Issues::Importer)
|
||||
|
||||
expect(Gitlab::PhabricatorImport::Issues::Importer).to receive(:new).with(project).and_return(fake_importer)
|
||||
expect(fake_importer).to receive(:execute)
|
||||
|
||||
described_class.new.perform(project.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Importer do
|
||||
it { expect(described_class).to be_async }
|
||||
|
||||
it "acts like it's importing repositories" do
|
||||
expect(described_class).to be_imports_repository
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
let(:project) { create(:project, :import_scheduled) }
|
||||
subject(:importer) { described_class.new(project) }
|
||||
|
||||
it 'sets a custom jid that will be kept up to date' do
|
||||
expect { importer.execute }.to change { project.import_state.reload.jid }
|
||||
end
|
||||
|
||||
it 'starts importing tasks' do
|
||||
expect(Gitlab::PhabricatorImport::ImportTasksWorker).to receive(:schedule).with(project.id)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
|
||||
it 'marks the import as failed when something goes wrong' do
|
||||
allow(importer).to receive(:schedule_first_tasks_page).and_raise('Stuff is broken')
|
||||
|
||||
importer.execute
|
||||
|
||||
expect(project.import_state).to be_failed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Issues::Importer do
|
||||
set(:project) { create(:project) }
|
||||
|
||||
let(:response) do
|
||||
Gitlab::PhabricatorImport::Conduit::TasksResponse.new(
|
||||
Gitlab::PhabricatorImport::Conduit::Response
|
||||
.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))
|
||||
)
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project, nil) }
|
||||
|
||||
before do
|
||||
client = instance_double(Gitlab::PhabricatorImport::Conduit::Maniphest)
|
||||
|
||||
allow(client).to receive(:tasks).and_return(response)
|
||||
allow(importer).to receive(:client).and_return(client)
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
it 'imports each task in the response' do
|
||||
response.tasks.each do |task|
|
||||
task_importer = instance_double(Gitlab::PhabricatorImport::Issues::TaskImporter)
|
||||
|
||||
expect(task_importer).to receive(:execute)
|
||||
expect(Gitlab::PhabricatorImport::Issues::TaskImporter)
|
||||
.to receive(:new).with(project, task)
|
||||
.and_return(task_importer)
|
||||
end
|
||||
|
||||
importer.execute
|
||||
end
|
||||
|
||||
it 'schedules the next batch if there is one' do
|
||||
expect(Gitlab::PhabricatorImport::ImportTasksWorker)
|
||||
.to receive(:schedule).with(project.id, response.pagination.next_page)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
|
||||
it 'does not reschedule when there is no next page' do
|
||||
allow(response.pagination).to receive(:has_next_page?).and_return(false)
|
||||
|
||||
expect(Gitlab::PhabricatorImport::ImportTasksWorker)
|
||||
.not_to receive(:schedule)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Issues::TaskImporter do
|
||||
set(:project) { create(:project) }
|
||||
let(:task) do
|
||||
Gitlab::PhabricatorImport::Representation::Task.new(
|
||||
{
|
||||
'phid' => 'the-phid',
|
||||
'fields' => {
|
||||
'name' => 'Title',
|
||||
'description' => {
|
||||
'raw' => '# This is markdown\n it can contain more text.'
|
||||
},
|
||||
'dateCreated' => '1518688921',
|
||||
'dateClosed' => '1518789995'
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
it 'creates the issue with the expected attributes' do
|
||||
issue = described_class.new(project, task).execute
|
||||
|
||||
expect(issue.project).to eq(project)
|
||||
expect(issue).to be_persisted
|
||||
expect(issue.author).to eq(User.ghost)
|
||||
expect(issue.title).to eq('Title')
|
||||
expect(issue.description).to eq('# This is markdown\n it can contain more text.')
|
||||
expect(issue).to be_closed
|
||||
expect(issue.created_at).to eq(Time.at(1518688921))
|
||||
expect(issue.closed_at).to eq(Time.at(1518789995))
|
||||
end
|
||||
|
||||
it 'does not recreate the issue when called multiple times' do
|
||||
expect { described_class.new(project, task).execute }
|
||||
.to change { project.issues.reload.size }.from(0).to(1)
|
||||
expect { described_class.new(project, task).execute }
|
||||
.not_to change { project.issues.reload.size }
|
||||
end
|
||||
|
||||
it 'does not trigger a save when the object did not change' do
|
||||
existing_issue = create(:issue,
|
||||
task.issue_attributes.merge(author: User.ghost))
|
||||
importer = described_class.new(project, task)
|
||||
allow(importer).to receive(:issue).and_return(existing_issue)
|
||||
|
||||
expect(existing_issue).not_to receive(:save!)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::ProjectCreator do
|
||||
let(:user) { create(:user) }
|
||||
let(:params) do
|
||||
{ path: 'new-phab-import',
|
||||
phabricator_server_url: 'http://phab.example.com',
|
||||
api_token: 'the-token' }
|
||||
end
|
||||
subject(:creator) { described_class.new(user, params) }
|
||||
|
||||
describe '#execute' do
|
||||
it 'creates a project correctly and schedule an import' do
|
||||
expect_next_instance_of(Gitlab::PhabricatorImport::Importer) do |importer|
|
||||
expect(importer).to receive(:execute)
|
||||
end
|
||||
|
||||
project = creator.execute
|
||||
|
||||
expect(project).to be_persisted
|
||||
expect(project).to be_import
|
||||
expect(project.import_type).to eq('phabricator')
|
||||
expect(project.import_data.credentials).to match(a_hash_including(api_token: 'the-token'))
|
||||
expect(project.import_data.data).to match(a_hash_including('phabricator_url' => 'http://phab.example.com'))
|
||||
expect(project.import_url).to eq(Project::UNKNOWN_IMPORT_URL)
|
||||
expect(project.namespace).to eq(user.namespace)
|
||||
end
|
||||
|
||||
context 'when import params are missing' do
|
||||
let(:params) do
|
||||
{ path: 'new-phab-import',
|
||||
phabricator_server_url: 'http://phab.example.com',
|
||||
api_token: '' }
|
||||
end
|
||||
|
||||
it 'returns nil' do
|
||||
expect(creator.execute).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import params are invalid' do
|
||||
let(:params) do
|
||||
{ path: 'new-phab-import',
|
||||
namespace_id: '-1',
|
||||
phabricator_server_url: 'http://phab.example.com',
|
||||
api_token: 'the-token' }
|
||||
end
|
||||
|
||||
it 'returns an unpersisted project' do
|
||||
project = creator.execute
|
||||
|
||||
expect(project).not_to be_persisted
|
||||
expect(project).not_to be_valid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::Representation::Task do
|
||||
subject(:task) do
|
||||
described_class.new(
|
||||
{
|
||||
'phid' => 'the-phid',
|
||||
'fields' => {
|
||||
'name' => 'Title'.ljust(257, '.'), # A string padded to 257 chars
|
||||
'description' => {
|
||||
'raw' => '# This is markdown\n it can contain more text.'
|
||||
},
|
||||
'dateCreated' => '1518688921',
|
||||
'dateClosed' => '1518789995'
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
describe '#issue_attributes' do
|
||||
it 'contains the expected values' do
|
||||
expected_attributes = {
|
||||
title: 'Title'.ljust(255, '.'),
|
||||
description: '# This is markdown\n it can contain more text.',
|
||||
state: :closed,
|
||||
created_at: Time.at(1518688921),
|
||||
closed_at: Time.at(1518789995)
|
||||
}
|
||||
|
||||
expect(task.issue_attributes).to eq(expected_attributes)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::PhabricatorImport::WorkerState, :clean_gitlab_redis_shared_state do
|
||||
subject(:state) { described_class.new('weird-project-id') }
|
||||
let(:key) { 'phabricator-import/jobs/project-weird-project-id/job-count' }
|
||||
|
||||
describe '#add_job' do
|
||||
it 'increments the counter for jobs' do
|
||||
set_value(3)
|
||||
|
||||
expect { state.add_job }.to change { get_value }.from('3').to('4')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#remove_job' do
|
||||
it 'decrements the counter for jobs' do
|
||||
set_value(3)
|
||||
|
||||
expect { state.remove_job }.to change { get_value }.from('3').to('2')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#running_count' do
|
||||
it 'reads the value' do
|
||||
set_value(9)
|
||||
|
||||
expect(state.running_count).to eq(9)
|
||||
end
|
||||
|
||||
it 'returns 0 when nothing was set' do
|
||||
expect(state.running_count).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
def set_value(value)
|
||||
redis.with { |r| r.set(key, value) }
|
||||
end
|
||||
|
||||
def get_value
|
||||
redis.with { |r| r.get(key) }
|
||||
end
|
||||
|
||||
def redis
|
||||
Gitlab::Redis::SharedState
|
||||
end
|
||||
end
|
||||
|
|
@ -174,3 +174,15 @@ describe Import::GitlabProjectsController, 'routing' do
|
|||
expect(get('/import/gitlab_project/new')).to route_to('import/gitlab_projects#new')
|
||||
end
|
||||
end
|
||||
|
||||
# new_import_phabricator GET /import/phabricator/new(.:format) import/phabricator#new
|
||||
# import_phabricator POST /import/phabricator(.:format) import/phabricator#create
|
||||
describe Import::PhabricatorController, 'routing' do
|
||||
it 'to #create' do
|
||||
expect(post("/import/phabricator")).to route_to("import/phabricator#create")
|
||||
end
|
||||
|
||||
it 'to #new' do
|
||||
expect(get("/import/phabricator/new")).to route_to("import/phabricator#new")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue