diff --git a/README.md b/README.md
index ee46c4ccdf6..9fd168f208e 100644
--- a/README.md
+++ b/README.md
@@ -47,7 +47,7 @@ On [about.gitlab.com](https://about.gitlab.com/) you can find more information a
- [Community](https://about.gitlab.com/community/)
- [Hosted GitLab.com](https://about.gitlab.com/gitlab-com/) use GitLab as a free service
- [GitLab Enterprise Edition](https://about.gitlab.com/features/#enterprise) with additional features aimed at larger organizations.
-- [GitLab CI](https://about.gitlab.com/gitlab-ci/) a continuous integration (CI) server that is easy to integrate with GitLab.
+- [GitLab CI](https://about.gitlab.com/solutions/continuous-integration/) a continuous integration (CI) server that is easy to integrate with GitLab.
## Requirements
diff --git a/app/policies/project_member_policy.rb b/app/policies/project_member_policy.rb
index ace74dca448..9d6a8c22e6d 100644
--- a/app/policies/project_member_policy.rb
+++ b/app/policies/project_member_policy.rb
@@ -37,3 +37,5 @@ class ProjectMemberPolicy < BasePolicy
enable :withdraw_member_access_request
end
end
+
+ProjectMemberPolicy.prepend_mod_with('ProjectMemberPolicy')
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index e29f67308a2..8bc33fb46be 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -2352,6 +2352,15 @@
:weight: 1
:idempotent: false
:tags: []
+- :name: bitbucket_import_import_pull_request
+ :worker_name: Gitlab::BitbucketImport::ImportPullRequestWorker
+ :feature_category: :importers
+ :has_external_dependencies: true
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: false
+ :tags: []
- :name: bitbucket_import_stage_finish_import
:worker_name: Gitlab::BitbucketImport::Stage::FinishImportWorker
:feature_category: :importers
@@ -2361,6 +2370,15 @@
:weight: 1
:idempotent: false
:tags: []
+- :name: bitbucket_import_stage_import_pull_requests
+ :worker_name: Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker
+ :feature_category: :importers
+ :has_external_dependencies: true
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: false
+ :tags: []
- :name: bitbucket_import_stage_import_repository
:worker_name: Gitlab::BitbucketImport::Stage::ImportRepositoryWorker
:feature_category: :importers
diff --git a/app/workers/concerns/gitlab/bitbucket_import/object_importer.rb b/app/workers/concerns/gitlab/bitbucket_import/object_importer.rb
new file mode 100644
index 00000000000..26e6e2675ed
--- /dev/null
+++ b/app/workers/concerns/gitlab/bitbucket_import/object_importer.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ # ObjectImporter defines the base behaviour for every Sidekiq worker that
+ # imports a single resource such as a note or pull request.
+ module ObjectImporter
+ extend ActiveSupport::Concern
+
+ included do
+ include ApplicationWorker
+
+ data_consistency :always
+
+ feature_category :importers
+
+ worker_has_external_dependencies!
+
+ sidekiq_retries_exhausted do |msg|
+ args = msg['args']
+ jid = msg['jid']
+
+ # If a job is being exhausted we still want to notify the
+ # Gitlab::Import::AdvanceStageWorker to prevent the entire import from getting stuck
+ key = args.last
+ JobWaiter.notify(key, jid) if args.length == 3 && key && key.is_a?(String)
+ end
+ end
+
+ def perform(project_id, hash, notify_key)
+ project = Project.find_by_id(project_id)
+
+ return unless project
+
+ if project.import_state&.canceled?
+ info(project.id, message: 'project import canceled')
+ return
+ end
+
+ import(project, hash)
+ ensure
+ notify_waiter(notify_key)
+ end
+
+ private
+
+ # project - An instance of `Project` to import the data into.
+ # hash - A Hash containing the details of the object to import.
+ def import(project, hash)
+ info(project.id, message: 'importer started')
+
+ importer_class.new(project, hash).execute
+
+ info(project.id, message: 'importer finished')
+ rescue ActiveRecord::RecordInvalid => e
+ # We do not raise exception to prevent job retry
+ track_exception(project, e)
+ rescue StandardError => e
+ track_and_raise_exception(project, e)
+ end
+
+ def notify_waiter(key)
+ JobWaiter.notify(key, jid)
+ end
+
+ # Returns the class to use for importing the object.
+ def importer_class
+ raise NotImplementedError
+ end
+
+ def info(project_id, extra = {})
+ Logger.info(log_attributes(project_id, extra))
+ end
+
+ def log_attributes(project_id, extra = {})
+ extra.merge(
+ project_id: project_id,
+ importer: importer_class.name
+ )
+ end
+
+ def track_exception(project, exception, fail_import: false)
+ Gitlab::Import::ImportFailureService.track(
+ project_id: project.id,
+ error_source: importer_class.name,
+ exception: exception,
+ fail_import: fail_import
+ )
+ end
+
+ def track_and_raise_exception(project, exception, fail_import: false)
+ track_exception(project, exception, fail_import: fail_import)
+
+ raise(exception)
+ end
+ end
+ end
+end
diff --git a/app/workers/gitlab/bitbucket_import/import_pull_request_worker.rb b/app/workers/gitlab/bitbucket_import/import_pull_request_worker.rb
new file mode 100644
index 00000000000..5b06ddf7079
--- /dev/null
+++ b/app/workers/gitlab/bitbucket_import/import_pull_request_worker.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ class ImportPullRequestWorker # rubocop:disable Scalability/IdempotentWorker
+ include ObjectImporter
+
+ def importer_class
+ Importers::PullRequestImporter
+ end
+ end
+ end
+end
diff --git a/app/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker.rb b/app/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker.rb
new file mode 100644
index 00000000000..e1f3b5ab79a
--- /dev/null
+++ b/app/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ module Stage
+ class ImportPullRequestsWorker # rubocop:disable Scalability/IdempotentWorker
+ include StageMethods
+
+ private
+
+ # project - An instance of Project.
+ def import(project)
+ waiter = importer_class.new(project).execute
+
+ project.import_state.refresh_jid_expiration
+
+ AdvanceStageWorker.perform_async(
+ project.id,
+ { waiter.key => waiter.jobs_remaining },
+ :finish
+ )
+ end
+
+ def importer_class
+ Importers::PullRequestsImporter
+ end
+ end
+ end
+ end
+end
diff --git a/app/workers/gitlab/bitbucket_import/stage/import_repository_worker.rb b/app/workers/gitlab/bitbucket_import/stage/import_repository_worker.rb
index e311b837311..7c6503ae38f 100644
--- a/app/workers/gitlab/bitbucket_import/stage/import_repository_worker.rb
+++ b/app/workers/gitlab/bitbucket_import/stage/import_repository_worker.rb
@@ -13,7 +13,7 @@ module Gitlab
importer.execute
- FinishImportWorker.perform_async(project.id)
+ ImportPullRequestsWorker.perform_async(project.id)
end
def importer_class
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index a6e139ba7cf..4957e5f4979 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -81,8 +81,12 @@
- 1
- - bitbucket_import_advance_stage
- 1
+- - bitbucket_import_import_pull_request
+ - 1
- - bitbucket_import_stage_finish_import
- 1
+- - bitbucket_import_stage_import_pull_requests
+ - 1
- - bitbucket_import_stage_import_repository
- 1
- - bitbucket_server_import_advance_stage
diff --git a/danger/architecture/Dangerfile b/danger/architecture/Dangerfile
index 148180247fe..a8cfbab394f 100644
--- a/danger/architecture/Dangerfile
+++ b/danger/architecture/Dangerfile
@@ -7,7 +7,7 @@ return unless helper.ci?
blueprint_changes = helper.changed_files(%r{^doc/architecture/blueprints/.*})
BLUEPRINT_SHORT_MESSAGE = <<~MSG
-This merge request requires a review from an [Architecture Evolution Coach](https://about.gitlab.com/handbook/engineering/architecture/workflow/).
+This merge request might require a review from a [Coach Engineer](https://about.gitlab.com/handbook/engineering/architecture/workflow/).
MSG
BLUEPRINT_LONG_MESSAGE = <<~MSG
diff --git a/doc/api/users.md b/doc/api/users.md
index 58d852961d3..e5e8f7be759 100644
--- a/doc/api/users.md
+++ b/doc/api/users.md
@@ -2032,7 +2032,7 @@ POST /users/:user_id/impersonation_tokens
| ------------ | ------- | -------- | --------------------------------------------------------------------------- |
| `user_id` | integer | yes | ID of the user |
| `name` | string | yes | Name of the impersonation token |
-| `expires_at` | date | no | Expiration date of the impersonation token in ISO format (`YYYY-MM-DD`) |
+| `expires_at` | date | yes | Expiration date of the impersonation token in ISO format (`YYYY-MM-DD`) |
| `scopes` | array | yes | Array of scopes of the impersonation token (`api`, `read_user`) |
```shell
diff --git a/doc/user/ai_features.md b/doc/user/ai_features.md
index a31206c32f3..feea06666dc 100644
--- a/doc/user/ai_features.md
+++ b/doc/user/ai_features.md
@@ -11,7 +11,7 @@ GitLab is creating AI-assisted features across our DevSecOps platform. These fea
| Feature | Purpose | Large Language Model | Current availability | Maturity |
|-|-|-|-|-|
-| [Suggested Reviewers](project/merge_requests/reviews/index.md#suggested-reviewers) | Assists in creating faster and higher-quality reviews by automatically suggesting reviewers for your merge request. | GitLab creates a machine learning model for each project, which is used to generate reviewers
[View the issue](https://gitlab.com/gitlab-org/modelops/applied-ml/applied-ml-updates/-/issues/10) | SaaS only | [Generally Available (GA)](../policy/experiment-beta-support.md#generally-available-ga) |
+| [Suggested Reviewers](project/merge_requests/reviews/index.md#gitlab-duo-suggested-reviewers) | Assists in creating faster and higher-quality reviews by automatically suggesting reviewers for your merge request. | GitLab creates a machine learning model for each project, which is used to generate reviewers
[View the issue](https://gitlab.com/gitlab-org/modelops/applied-ml/applied-ml-updates/-/issues/10) | SaaS only | [Generally Available (GA)](../policy/experiment-beta-support.md#generally-available-ga) |
| [Code Suggestions](project/repository/code_suggestions/index.md) | Helps you write code more efficiently by viewing code suggestions as you type. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS
Self-managed | [Beta](../policy/experiment-beta-support.md#beta) |
| [Vulnerability summary](application_security/vulnerabilities/index.md#explaining-a-vulnerability) | Helps you remediate vulnerabilities more efficiently, uplevel your skills, and write more secure code. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview)
Anthropic's claude model if degraded performance | SaaS only
Ultimate tier | [Beta](../policy/experiment-beta-support.md#beta) |
| [Code explanation](#explain-code-in-the-web-ui-with-code-explanation) | Helps you understand code by explaining it in English language. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only
Ultimate tier | [Experiment](../policy/experiment-beta-support.md#experiment) |
diff --git a/doc/user/packages/container_registry/troubleshoot_container_registry.md b/doc/user/packages/container_registry/troubleshoot_container_registry.md
index 70a4a3c6542..13e14dfdeb4 100644
--- a/doc/user/packages/container_registry/troubleshoot_container_registry.md
+++ b/doc/user/packages/container_registry/troubleshoot_container_registry.md
@@ -127,3 +127,12 @@ time is set to 15 minutes.
If you are using self-managed GitLab, an administrator can
[increase the token duration](../../../administration/packages/container_registry.md#increase-token-duration).
+
+## Slow uploads when using `kaniko` to push large images
+
+When you push large images with `kaniko`, you might experience uncharacteristically long delays.
+
+This is typically a result of [a performance issue with `kaniko` and HTTP/2](https://github.com/GoogleContainerTools/kaniko/issues/2751).
+The current workaround is to use HTTP/1.1 when pushing with `kaniko`.
+
+To use HTTP/1.1, set the `GODEBUG` environment variable to `"http2client=0"`.
diff --git a/doc/user/project/merge_requests/reviews/index.md b/doc/user/project/merge_requests/reviews/index.md
index 3b8b3009c38..c09071e856c 100644
--- a/doc/user/project/merge_requests/reviews/index.md
+++ b/doc/user/project/merge_requests/reviews/index.md
@@ -21,7 +21,7 @@ review merge requests in Visual Studio Code.
For an overview, see [Merge request review](https://www.youtube.com/watch?v=2MayfXKpU08&list=PLFGfElNsQthYDx0A_FaNNfUm9NHsK6zED&index=183).
-## Suggested reviewers **(ULTIMATE SAAS)**
+## GitLab Duo Suggested Reviewers **(ULTIMATE SAAS)**
> - [Introduced](https://gitlab.com/groups/gitlab-org/modelops/applied-ml/review-recommender/-/epics/3) in GitLab 15.4 as a [Beta](../../../../policy/experiment-beta-support.md#beta) feature [with a flag](../../../../administration/feature_flags.md) named `suggested_reviewers_control`. Disabled by default.
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/368356) in GitLab 15.6.
@@ -34,11 +34,11 @@ To suggest reviewers, GitLab uses:
- The changes in the merge request
- The project's contribution graph
-Suggested Reviewers also integrates with Code Owners, profile status, and merge request rules, helping you make a more informed decision when choosing reviewers that can meet your review criteria.
+GitLab Duo Suggested Reviewers also integrates with Code Owners, profile status, and merge request rules, helping you make a more informed decision when choosing reviewers that can meet your review criteria.
-
+
-For more information, see [Data usage in Suggested Reviewers](data_usage.md).
+For more information, see [Data usage in GitLab Duo Suggested Reviewers](data_usage.md).
### Enable suggested reviewers
diff --git a/doc/user/project/repository/code_suggestions/self_managed.md b/doc/user/project/repository/code_suggestions/self_managed.md
index b6de4e632b9..3c149604086 100644
--- a/doc/user/project/repository/code_suggestions/self_managed.md
+++ b/doc/user/project/repository/code_suggestions/self_managed.md
@@ -50,7 +50,8 @@ To enable Code Suggestions for your self-managed GitLab instance:
1. Select **Admin Area**.
1. On the left sidebar, select **Settings > General**.
1. Expand **Code Suggestions** and select **Turn on Code Suggestions for this instance**.
- You do not need to enter anything into the **Personal access token** field.
+ In GitLab 16.3, you do not need to enter anything into the **Personal access token** field.
+ In GitLab 16.4 and later, there is no **Personal access token** field.
1. Select **Save changes**.
This setting is visible only in self-managed GitLab instances.
diff --git a/doc/user/project/settings/index.md b/doc/user/project/settings/index.md
index c7d78beb4ef..d9c114c0a59 100644
--- a/doc/user/project/settings/index.md
+++ b/doc/user/project/settings/index.md
@@ -119,7 +119,7 @@ Configure your project's merge request settings:
- [Merge only if pipeline succeeds](../merge_requests/merge_when_pipeline_succeeds.md).
- [Merge only when all threads are resolved](../merge_requests/index.md#prevent-merge-unless-all-threads-are-resolved).
- [Required associated issue from Jira](../../../integration/jira/issues.md#require-associated-jira-issue-for-merge-requests-to-be-merged).
- - [Suggested Reviewers](../merge_requests/reviews/index.md#suggested-reviewers)
+ - [GitLab Duo Suggested Reviewers](../merge_requests/reviews/index.md#gitlab-duo-suggested-reviewers)
- [**Delete source branch when merge request is accepted** option by default](#delete-the-source-branch-on-merge-by-default).
- Configure:
- [Suggested changes commit messages](../merge_requests/reviews/suggestions.md#configure-the-commit-message-for-applied-suggestions).
diff --git a/lib/bitbucket/representation/pull_request.rb b/lib/bitbucket/representation/pull_request.rb
index 91a55f9bd2d..6451b8f5d1f 100644
--- a/lib/bitbucket/representation/pull_request.rb
+++ b/lib/bitbucket/representation/pull_request.rb
@@ -39,19 +39,41 @@ module Bitbucket
end
def source_branch_name
- source_branch.dig('branch', 'name')
+ source_branch&.dig('branch', 'name')
end
def source_branch_sha
- source_branch.dig('commit', 'hash')
+ source_branch&.dig('commit', 'hash')
end
def target_branch_name
- target_branch.dig('branch', 'name')
+ target_branch&.dig('branch', 'name')
end
def target_branch_sha
- target_branch.dig('commit', 'hash')
+ target_branch&.dig('commit', 'hash')
+ end
+
+ def reviewers
+ raw['reviewers']&.pluck('username')
+ end
+
+ def to_hash
+ {
+ iid: iid,
+ author: author,
+ description: description,
+ created_at: created_at,
+ updated_at: updated_at,
+ state: state,
+ title: title,
+ source_branch_name: source_branch_name,
+ source_branch_sha: source_branch_sha,
+ merge_commit_sha: merge_commit_sha,
+ target_branch_name: target_branch_name,
+ target_branch_sha: target_branch_sha,
+ reviewers: reviewers
+ }
end
def merge_commit_sha
diff --git a/lib/gitlab/bitbucket_import/importers/pull_request_importer.rb b/lib/gitlab/bitbucket_import/importers/pull_request_importer.rb
new file mode 100644
index 00000000000..d76e08e1039
--- /dev/null
+++ b/lib/gitlab/bitbucket_import/importers/pull_request_importer.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ module Importers
+ class PullRequestImporter
+ include Loggable
+
+ def initialize(project, hash)
+ @project = project
+ @formatter = Gitlab::ImportFormatter.new
+ @user_finder = UserFinder.new(project)
+ @object = hash.with_indifferent_access
+ end
+
+ def execute
+ log_info(import_stage: 'import_pull_request', message: 'starting', iid: object[:iid])
+
+ description = ''
+ description += author_line
+ description += object[:description] if object[:description]
+
+ attributes = {
+ iid: object[:iid],
+ title: object[:title],
+ description: description,
+ source_project_id: project.id,
+ source_branch: Gitlab::Git.ref_name(object[:source_branch_name]),
+ source_branch_sha: source_branch_sha,
+ target_project_id: project.id,
+ target_branch: Gitlab::Git.ref_name(object[:target_branch_name]),
+ target_branch_sha: object[:target_branch_sha],
+ state_id: MergeRequest.available_states[object[:state]],
+ author_id: author_id,
+ created_at: object[:created_at],
+ updated_at: object[:updated_at]
+ }
+
+ creator = Gitlab::Import::MergeRequestCreator.new(project)
+
+ merge_request = creator.execute(attributes)
+
+ if merge_request
+ merge_request.assignee_ids = [author_id]
+ merge_request.reviewer_ids = reviewers
+ merge_request.save!
+ end
+
+ log_info(import_stage: 'import_pull_request', message: 'finished', iid: object[:iid])
+ rescue StandardError => e
+ Gitlab::Import::ImportFailureService.track(project_id: project.id, exception: e)
+ end
+
+ private
+
+ attr_reader :object, :project, :formatter, :user_finder
+
+ def author_line
+ return '' if find_user_id
+
+ formatter.author_line(object[:author])
+ end
+
+ def find_user_id
+ user_finder.find_user_id(object[:author])
+ end
+
+ def author_id
+ user_finder.gitlab_user_id(project, object[:author])
+ end
+
+ def reviewers
+ return [] unless object[:reviewers].present?
+
+ object[:reviewers].filter_map do |reviewer|
+ user_finder.find_user_id(reviewer)
+ end
+ end
+
+ def source_branch_sha
+ project.repository.commit(object[:source_branch_sha])&.sha ||
+ project.repository.commit(object[:merge_commit_sha])&.sha ||
+ object[:source_branch_sha]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_import/importers/pull_requests_importer.rb b/lib/gitlab/bitbucket_import/importers/pull_requests_importer.rb
new file mode 100644
index 00000000000..1c7ce7f2f3a
--- /dev/null
+++ b/lib/gitlab/bitbucket_import/importers/pull_requests_importer.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ module Importers
+ class PullRequestsImporter
+ include ParallelScheduling
+
+ def execute
+ log_info(import_stage: 'import_pull_requests', message: 'importing pull requests')
+
+ pull_requests = client.pull_requests(project.import_source)
+
+ pull_requests.each do |pull_request|
+ job_waiter.jobs_remaining += 1
+
+ next if already_enqueued?(pull_request)
+
+ job_delay = calculate_job_delay(job_waiter.jobs_remaining)
+
+ sidekiq_worker_class.perform_in(job_delay, project.id, pull_request.to_hash, job_waiter.key)
+
+ mark_as_enqueued(pull_request)
+ end
+
+ job_waiter
+ rescue StandardError => e
+ track_import_failure!(project, exception: e)
+ end
+
+ private
+
+ def sidekiq_worker_class
+ ImportPullRequestWorker
+ end
+
+ def collection_method
+ :pull_requests
+ end
+
+ def id_for_already_enqueued_cache(object)
+ object.iid
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_import/parallel_scheduling.rb b/lib/gitlab/bitbucket_import/parallel_scheduling.rb
new file mode 100644
index 00000000000..f4df9a35526
--- /dev/null
+++ b/lib/gitlab/bitbucket_import/parallel_scheduling.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ module ParallelScheduling
+ include Loggable
+
+ attr_reader :project, :already_enqueued_cache_key, :job_waiter_cache_key
+
+ # The base cache key to use for tracking already enqueued objects.
+ ALREADY_ENQUEUED_CACHE_KEY =
+ 'bitbucket-importer/already-enqueued/%{project}/%{collection}'
+
+ # The base cache key to use for storing job waiter key
+ JOB_WAITER_CACHE_KEY =
+ 'bitbucket-importer/job-waiter/%{project}/%{collection}'
+
+ BATCH_SIZE = 100
+
+ # project - An instance of `Project`.
+ def initialize(project)
+ @project = project
+
+ @already_enqueued_cache_key =
+ format(ALREADY_ENQUEUED_CACHE_KEY, project: project.id, collection: collection_method)
+ @job_waiter_cache_key =
+ format(JOB_WAITER_CACHE_KEY, project: project.id, collection: collection_method)
+ end
+
+ private
+
+ def client
+ @client ||= Bitbucket::Client.new(project.import_data.credentials)
+ end
+
+ # Returns the ID to use for the cache used for checking if an object has
+ # already been enqueued or not.
+ #
+ # object - The object we may want to import.
+ def id_for_already_enqueued_cache(object)
+ raise NotImplementedError
+ end
+
+ # The Sidekiq worker class used for scheduling the importing of objects in
+ # parallel.
+ def sidekiq_worker_class
+ raise NotImplementedError
+ end
+
+ # The name of the method to call to retrieve the data to import.
+ def collection_method
+ raise NotImplementedError
+ end
+
+ def job_waiter
+ @job_waiter ||= begin
+ key = Gitlab::Cache::Import::Caching.read(job_waiter_cache_key)
+ key ||= Gitlab::Cache::Import::Caching.write(job_waiter_cache_key, JobWaiter.generate_key)
+
+ JobWaiter.new(0, key)
+ end
+ end
+
+ def already_enqueued?(object)
+ id = id_for_already_enqueued_cache(object)
+
+ Gitlab::Cache::Import::Caching.set_includes?(already_enqueued_cache_key, id)
+ end
+
+ # Marks the given object as "already enqueued".
+ def mark_as_enqueued(object)
+ id = id_for_already_enqueued_cache(object)
+
+ Gitlab::Cache::Import::Caching.set_add(already_enqueued_cache_key, id)
+ end
+
+ def calculate_job_delay(job_index)
+ multiplier = (job_index / BATCH_SIZE)
+
+ (multiplier * 1.minute) + 1.second
+ end
+
+ def track_import_failure!(project, exception:, **args)
+ Gitlab::Import::ImportFailureService.track(
+ project_id: project.id,
+ error_source: self.class.name,
+ exception: exception,
+ **args
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_import/user_finder.rb b/lib/gitlab/bitbucket_import/user_finder.rb
new file mode 100644
index 00000000000..70ed94351d5
--- /dev/null
+++ b/lib/gitlab/bitbucket_import/user_finder.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BitbucketImport
+ class UserFinder
+ USER_ID_FOR_AUTHOR_CACHE_KEY = 'bitbucket-importer/user-finder/%{project_id}/%{author}'
+ CACHE_USER_ID_NOT_FOUND = -1
+
+ attr_reader :project
+
+ def initialize(project)
+ @project = project
+ end
+
+ def find_user_id(author)
+ return unless author
+
+ cache_key = build_cache_key(author)
+ cached_id = cache.read_integer(cache_key)
+
+ return if cached_id == CACHE_USER_ID_NOT_FOUND
+ return cached_id if cached_id
+
+ id = User.by_provider_and_extern_uid(:bitbucket, author).select(:id).first&.id
+
+ cache.write(cache_key, id || CACHE_USER_ID_NOT_FOUND)
+
+ id
+ end
+
+ def gitlab_user_id(project, username)
+ find_user_id(username) || project.creator_id
+ end
+
+ private
+
+ def cache
+ Cache::Import::Caching
+ end
+
+ def build_cache_key(author)
+ format(USER_ID_FOR_AUTHOR_CACHE_KEY, project_id: project.id, author: author)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/prometheus/queries/matched_metric_query.rb b/lib/gitlab/prometheus/queries/matched_metric_query.rb
new file mode 100644
index 00000000000..73de5a11998
--- /dev/null
+++ b/lib/gitlab/prometheus/queries/matched_metric_query.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Prometheus
+ module Queries
+ class MatchedMetricQuery < BaseQuery
+ MAX_QUERY_ITEMS = 40
+
+ def query
+ groups_data.map do |group, data|
+ {
+ group: group.name,
+ priority: group.priority,
+ active_metrics: data[:active_metrics],
+ metrics_missing_requirements: data[:metrics_missing_requirements]
+ }
+ end
+ end
+
+ private
+
+ def groups_data
+ metrics_groups = groups_with_active_metrics(Gitlab::Prometheus::MetricGroup.common_metrics)
+ lookup = active_series_lookup(metrics_groups)
+
+ groups = {}
+
+ metrics_groups.each do |group|
+ groups[group] ||= { active_metrics: 0, metrics_missing_requirements: 0 }
+ active_metrics = group.metrics.count { |metric| metric.required_metrics.all?(&lookup.method(:has_key?)) }
+
+ groups[group][:active_metrics] += active_metrics
+ groups[group][:metrics_missing_requirements] += group.metrics.count - active_metrics
+ end
+
+ groups
+ end
+
+ def active_series_lookup(metric_groups)
+ timeframe_start = 8.hours.ago
+ timeframe_end = Time.now
+
+ series = metric_groups.flat_map(&:metrics).flat_map(&:required_metrics).uniq
+
+ lookup = series.each_slice(MAX_QUERY_ITEMS).flat_map do |batched_series|
+ client_series(*batched_series, start_time: timeframe_start, end_time: timeframe_end)
+ .select(&method(:has_matching_label?))
+ .map { |series_info| [series_info['__name__'], true] }
+ end
+ lookup.to_h
+ end
+
+ def has_matching_label?(series_info)
+ series_info.key?('environment')
+ end
+
+ def available_metrics
+ @available_metrics ||= client_label_values || []
+ end
+
+ def filter_active_metrics(metric_group)
+ metric_group.metrics.select! do |metric|
+ metric.required_metrics.all?(&available_metrics.method(:include?))
+ end
+ metric_group
+ end
+
+ def groups_with_active_metrics(metric_groups)
+ metric_groups.map(&method(:filter_active_metrics)).select { |group| group.metrics.any? }
+ end
+
+ def metrics_with_required_series(metric_groups)
+ metric_groups.flat_map do |group|
+ group.metrics.select do |metric|
+ metric.required_metrics.all?(&available_metrics.method(:include?))
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/prometheus/queries/validate_query.rb b/lib/gitlab/prometheus/queries/validate_query.rb
new file mode 100644
index 00000000000..160db7d44bc
--- /dev/null
+++ b/lib/gitlab/prometheus/queries/validate_query.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Prometheus
+ module Queries
+ class ValidateQuery < BaseQuery
+ def query(query)
+ client_query(query)
+ { valid: true }
+ rescue Gitlab::PrometheusClient::QueryError, Gitlab::PrometheusClient::ConnectionError => ex
+ { valid: false, error: ex.message }
+ end
+
+ def self.transform_reactive_result(result)
+ result[:query] = result.delete :data
+ result
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index 544dd498aea..9ebf59ecf82 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -61,4 +61,50 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
it { expect(described_class.new('merge_commit' => { 'hash' => 'SHA' }).merge_commit_sha).to eq('SHA') }
it { expect(described_class.new({}).merge_commit_sha).to be_nil }
end
+
+ describe '#to_hash' do
+ it do
+ raw = {
+ 'id' => 11,
+ 'description' => 'description',
+ 'author' => { 'nickname' => 'user-1' },
+ 'state' => 'MERGED',
+ 'created_on' => 'created-at',
+ 'updated_on' => 'updated-at',
+ 'title' => 'title',
+ 'source' => {
+ 'branch' => { 'name' => 'source-branch-name' },
+ 'commit' => { 'hash' => 'source-commit-hash' }
+ },
+ 'destination' => {
+ 'branch' => { 'name' => 'destination-branch-name' },
+ 'commit' => { 'hash' => 'destination-commit-hash' }
+ },
+ 'merge_commit' => { 'hash' => 'merge-commit-hash' },
+ 'reviewers' => [
+ {
+ 'username' => 'user-2'
+ }
+ ]
+ }
+
+ expected_hash = {
+ author: 'user-1',
+ created_at: 'created-at',
+ description: 'description',
+ iid: 11,
+ source_branch_name: 'source-branch-name',
+ source_branch_sha: 'source-commit-hash',
+ merge_commit_sha: 'merge-commit-hash',
+ state: 'merged',
+ target_branch_name: 'destination-branch-name',
+ target_branch_sha: 'destination-commit-hash',
+ title: 'title',
+ updated_at: 'updated-at',
+ reviewers: ['user-2']
+ }
+
+ expect(described_class.new(raw).to_hash).to eq(expected_hash)
+ end
+ end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
new file mode 100644
index 00000000000..2eca6bb47d6
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestImporter, :clean_gitlab_redis_cache, feature_category: :importers do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:bitbucket_user) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: bitbucket_user, extern_uid: 'bitbucket_user', provider: :bitbucket) }
+ let_it_be(:identity_2) { create(:identity, user: user_2, extern_uid: 'user_2', provider: :bitbucket) }
+ let(:source_branch_sha) { project.repository.commit.sha }
+ let(:target_branch_sha) { project.repository.commit('refs/heads/master').sha }
+
+ let(:hash) do
+ {
+ author: 'bitbucket_user',
+ created_at: Date.today,
+ description: 'description',
+ iid: 11,
+ source_branch_name: 'source-branch-name',
+ source_branch_sha: source_branch_sha,
+ state: 'merged',
+ target_branch_name: 'destination-branch-name',
+ target_branch_sha: target_branch_sha,
+ title: 'title',
+ updated_at: Date.today,
+ reviewers: %w[user_2 user_3]
+ }
+ end
+
+ subject(:importer) { described_class.new(project, hash) }
+
+ describe '#execute' do
+ it 'calls MergeRequestCreator' do
+ expect(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute)
+
+ importer.execute
+ end
+
+ it 'creates a merge request with the correct attributes' do
+ expect { importer.execute }.to change { project.merge_requests.count }.from(0).to(1)
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.iid).to eq(11)
+ expect(merge_request.author).to eq(bitbucket_user)
+ expect(merge_request.title).to eq('title')
+ expect(merge_request.merged?).to be_truthy
+ expect(merge_request.created_at).to eq(Date.today)
+ expect(merge_request.description).to eq('description')
+ expect(merge_request.source_project_id).to eq(project.id)
+ expect(merge_request.target_project_id).to eq(project.id)
+ expect(merge_request.source_branch).to eq('source-branch-name')
+ expect(merge_request.target_branch).to eq('destination-branch-name')
+ expect(merge_request.assignee_ids).to eq([bitbucket_user.id])
+ expect(merge_request.reviewer_ids).to eq([user_2.id])
+ expect(merge_request.merge_request_diffs.first.base_commit_sha).to eq(source_branch_sha)
+ expect(merge_request.merge_request_diffs.first.head_commit_sha).to eq(target_branch_sha)
+ end
+
+ context 'when the state is closed' do
+ it 'marks merge request as closed' do
+ described_class.new(project, hash.merge(state: 'closed')).execute
+
+ expect(project.merge_requests.first.closed?).to be_truthy
+ end
+ end
+
+ context 'when the state is opened' do
+ it 'marks merge request as opened' do
+ described_class.new(project, hash.merge(state: 'opened')).execute
+
+ expect(project.merge_requests.first.opened?).to be_truthy
+ end
+ end
+
+ context 'when the author does not have a bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'sets the author and assignee to the project creator and adds the author to the description' do
+ importer.execute
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.author).to eq(project.creator)
+ expect(merge_request.assignee).to eq(project.creator)
+ expect(merge_request.description).to eq("*Created by: bitbucket_user*\n\ndescription")
+ end
+ end
+
+ context 'when none of the reviewers have an identity' do
+ before do
+ identity_2.destroy!
+ end
+
+ it 'does not set reviewer_ids' do
+ importer.execute
+
+ merge_request = project.merge_requests.first
+
+ expect(merge_request.reviewer_ids).to be_empty
+ end
+ end
+
+ describe 'head_commit_sha for merge request diff' do
+ let(:diff) { project.merge_requests.first.merge_request_diffs.first }
+ let(:min_length) { Commit::MIN_SHA_LENGTH }
+
+ context 'when the source commit hash from Bitbucket is found on the repo' do
+ it 'is set to the source commit hash' do
+ described_class.new(project, hash.merge(source_branch_sha: source_branch_sha)).execute
+
+ expect(diff.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+
+ context 'when the source commit hash is not found but the merge commit hash is found' do
+ it 'is set to the merge commit hash' do
+ attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: source_branch_sha }
+
+ described_class.new(project, hash.merge(attrs)).execute
+
+ expect(diff.head_commit_sha).to eq(source_branch_sha)
+ end
+ end
+
+ context 'when both the source commit and merge commit hash are not found' do
+ it 'is nil' do
+ attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: 'y' * min_length }
+
+ described_class.new(project, hash.merge(attrs)).execute
+
+ expect(diff.head_commit_sha).to be_nil
+ end
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(Gitlab::Import::MergeRequestCreator).to receive(:new).and_raise(StandardError)
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ it 'logs its progress' do
+ allow(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute)
+
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'starting', iid: anything)).and_call_original
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(include(message: 'finished', iid: anything)).and_call_original
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
new file mode 100644
index 00000000000..46bf099de0c
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, feature_category: :importers do
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:pull_requests).and_return(
+ [
+ Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }),
+ Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }),
+ Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' })
+ ],
+ []
+ )
+ end
+ end
+
+ it 'imports each pull request in parallel', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).exactly(3).times
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(3)
+ expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key))
+ .to match_array(%w[1 2 3])
+ end
+
+ context 'when the client raises an error' do
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |client|
+ allow(client).to receive(:pull_requests).and_raise(StandardError)
+ end
+ end
+
+ it 'tracks the failure and does not fail' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ importer.execute
+ end
+ end
+
+ context 'when pull request was already enqueued' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 1)
+ end
+
+ it 'does not schedule job for enqueued pull requests', :aggregate_failures do
+ expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).twice
+
+ waiter = importer.execute
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb
new file mode 100644
index 00000000000..4ac4c2e4813
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::UserFinder, :clean_gitlab_redis_cache, feature_category: :importers do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:identity) { create(:identity, user: user, extern_uid: 'uid', provider: :bitbucket) }
+ let(:created_id) { 1 }
+ let(:project) { instance_double(Project, creator_id: created_id, id: 1) }
+ let(:author) { 'uid' }
+ let(:cache_key) { format(described_class::USER_ID_FOR_AUTHOR_CACHE_KEY, project_id: project.id, author: author) }
+
+ subject(:user_finder) { described_class.new(project) }
+
+ describe '#find_user_id' do
+ it 'returns the user id' do
+ expect(User).to receive(:by_provider_and_extern_uid).and_call_original.once
+
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ end
+
+ context 'when the id is cached' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, user.id)
+ end
+
+ it 'does not attempt to find the user' do
+ expect(User).not_to receive(:by_provider_and_extern_uid)
+
+ expect(user_finder.find_user_id(author)).to eq(user.id)
+ end
+ end
+
+ context 'when -1 is cached' do
+ before do
+ Gitlab::Cache::Import::Caching.write(cache_key, -1)
+ end
+
+ it 'does not attempt to find the user and returns nil' do
+ expect(User).not_to receive(:by_provider_and_extern_uid)
+
+ expect(user_finder.find_user_id(author)).to be_nil
+ end
+ end
+
+ context 'when the user does not have a matching bitbucket identity' do
+ before do
+ identity.update!(provider: :github)
+ end
+
+ it 'returns nil' do
+ expect(user_finder.find_user_id(author)).to be_nil
+ end
+ end
+ end
+
+ describe '#gitlab_user_id' do
+ context 'when find_user_id returns a user' do
+ it 'returns the user id' do
+ expect(user_finder.gitlab_user_id(project, author)).to eq(user.id)
+ end
+ end
+
+ context 'when find_user_id does not return a user' do
+ before do
+ allow(user_finder).to receive(:find_user_id).and_return(nil)
+ end
+
+ it 'returns the project creator' do
+ expect(user_finder.gitlab_user_id(project, author)).to eq(created_id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
new file mode 100644
index 00000000000..60449aeef7d
--- /dev/null
+++ b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
+ include Prometheus::MetricBuilders
+
+ let(:metric_group_class) { Gitlab::Prometheus::MetricGroup }
+ let(:metric_class) { Gitlab::Prometheus::Metric }
+
+ def series_info_with_environment(*more_metrics)
+ %w{metric_a metric_b}.concat(more_metrics).map { |metric_name| { '__name__' => metric_name, 'environment' => '' } }
+ end
+
+ let(:metric_names) { %w{metric_a metric_b} }
+ let(:series_info_without_environment) do
+ [{ '__name__' => 'metric_a' },
+ { '__name__' => 'metric_b' }]
+ end
+
+ let(:partially_empty_series_info) { [{ '__name__' => 'metric_a', 'environment' => '' }] }
+ let(:empty_series_info) { [] }
+
+ let(:client) { double('prometheus_client') }
+
+ subject { described_class.new(client) }
+
+ context 'with one group where two metrics is found' do
+ before do
+ allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
+ allow(client).to receive(:label_values).and_return(metric_names)
+ end
+
+ context 'both metrics in the group pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_with_environment)
+ end
+
+ it 'responds with both metrics as actve' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 2, metrics_missing_requirements: 0 }])
+ end
+ end
+
+ context 'none of the metrics pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_without_environment)
+ end
+
+ it 'responds with both metrics missing requirements' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }])
+ end
+ end
+
+ context 'no series information found about the metrics' do
+ before do
+ allow(client).to receive(:series).and_return(empty_series_info)
+ end
+
+ it 'responds with both metrics missing requirements' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }])
+ end
+ end
+
+ context 'one of the series info was not found' do
+ before do
+ allow(client).to receive(:series).and_return(partially_empty_series_info)
+ end
+ it 'responds with one active and one missing metric' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 1 }])
+ end
+ end
+ end
+
+ context 'with one group where only one metric is found' do
+ before do
+ allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
+ allow(client).to receive(:label_values).and_return('metric_a')
+ end
+
+ context 'both metrics in the group pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_with_environment)
+ end
+
+ it 'responds with one metrics as active and no missing requiremens' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 0 }])
+ end
+ end
+
+ context 'no metrics in group pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_without_environment)
+ end
+
+ it 'responds with one metrics as active and no missing requiremens' do
+ expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 1 }])
+ end
+ end
+ end
+
+ context 'with two groups where metrics are found in each group' do
+ let(:second_metric_group) { simple_metric_group(name: 'nameb', metrics: simple_metrics(added_metric_name: 'metric_c')) }
+
+ before do
+ allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group, second_metric_group])
+ allow(client).to receive(:label_values).and_return('metric_c')
+ end
+
+ context 'all metrics in both groups pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_with_environment('metric_c'))
+ end
+
+ it 'responds with one metrics as active and no missing requiremens' do
+ expect(subject.query).to eq([
+ { group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 0 },
+ { group: 'nameb', priority: 1, active_metrics: 2, metrics_missing_requirements: 0 }
+ ]
+ )
+ end
+ end
+
+ context 'no metrics in groups pass requirements' do
+ before do
+ allow(client).to receive(:series).and_return(series_info_without_environment)
+ end
+
+ it 'responds with one metrics as active and no missing requiremens' do
+ expect(subject.query).to eq([
+ { group: 'name', priority: 1, active_metrics: 0, metrics_missing_requirements: 1 },
+ { group: 'nameb', priority: 1, active_metrics: 0, metrics_missing_requirements: 2 }
+ ]
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb b/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
new file mode 100644
index 00000000000..f09fa3548f8
--- /dev/null
+++ b/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Prometheus::Queries::ValidateQuery do
+ include PrometheusHelpers
+
+ let(:api_url) { 'https://prometheus.example.com' }
+ let(:client) { Gitlab::PrometheusClient.new(api_url) }
+ let(:query) { 'avg(metric)' }
+
+ subject { described_class.new(client) }
+
+ context 'valid query' do
+ before do
+ allow(client).to receive(:query).with(query)
+ end
+
+ it 'passess query to prometheus' do
+ expect(subject.query(query)).to eq(valid: true)
+
+ expect(client).to have_received(:query).with(query)
+ end
+ end
+
+ context 'invalid query' do
+ let(:query) { 'invalid query' }
+ let(:error_message) { "invalid parameter 'query': 1:9: parse error: unexpected identifier \"query\"" }
+
+ it 'returns invalid' do
+ freeze_time do
+ stub_prometheus_query_error(
+ prometheus_query_with_time_url(query, Time.now),
+ error_message
+ )
+
+ expect(subject.query(query)).to eq(valid: false, error: error_message)
+ end
+ end
+ end
+
+ context 'when exceptions occur' do
+ context 'Gitlab::HTTP::BlockedUrlError' do
+ let(:api_url) { 'http://192.168.1.1' }
+
+ let(:message) { "URL is blocked: Requests to the local network are not allowed" }
+
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+ end
+
+ it 'catches exception and returns invalid' do
+ freeze_time do
+ expect(subject.query(query)).to eq(valid: false, error: message)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index abb89832174..a3f2e99f3da 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -18,6 +18,29 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
describe '#query' do
+ describe 'validate_query' do
+ let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
+ let(:validation_query) { Gitlab::Prometheus::Queries::ValidateQuery.name }
+ let(:query) { 'avg(response)' }
+ let(:validation_respone) { { data: { valid: true } } }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ context 'with valid data' do
+ subject { integration.query(:validate, query) }
+
+ before do
+ stub_reactive_cache(integration, validation_respone, validation_query, query)
+ end
+
+ it 'returns query data' do
+ is_expected.to eq(query: { valid: true })
+ end
+ end
+ end
+
describe 'environment' do
let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
@@ -38,6 +61,25 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
end
end
+ describe 'matched_metrics' do
+ let(:matched_metrics_query) { Gitlab::Prometheus::Queries::MatchedMetricQuery }
+ let(:prometheus_client) { double(:prometheus_client, label_values: nil) }
+
+ context 'with valid data' do
+ subject { integration.query(:matched_metrics) }
+
+ before do
+ allow(integration).to receive(:prometheus_client).and_return(prometheus_client)
+ synchronous_reactive_cache(integration)
+ end
+
+ it 'returns reactive data' do
+ expect(subject[:success]).to be_truthy
+ expect(subject[:data]).to eq([])
+ end
+ end
+ end
+
describe 'deployment' do
let(:deployment) { build_stubbed(:deployment) }
let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb
new file mode 100644
index 00000000000..3dbe43d822f
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_import/object_import_shared_examples.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples Gitlab::BitbucketImport::ObjectImporter do
+ include AfterNextHelpers
+
+ describe '.sidekiq_retries_exhausted' do
+ let(:job) { { 'args' => [1, {}, 'key'], 'jid' => 'jid' } }
+
+ it 'notifies the waiter' do
+ expect(Gitlab::JobWaiter).to receive(:notify).with('key', 'jid')
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
+ end
+ end
+
+ describe '#perform' do
+ let_it_be(:import_started_project) { create(:project, :import_started) }
+
+ let(:project_id) { project_id }
+ let(:waiter_key) { 'key' }
+
+ shared_examples 'notifies the waiter' do
+ specify do
+ allow_next(worker.importer_class).to receive(:execute)
+
+ expect(Gitlab::JobWaiter).to receive(:notify).with(waiter_key, anything)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+ end
+
+ context 'when project does not exist' do
+ let(:project_id) { non_existing_record_id }
+
+ it_behaves_like 'notifies the waiter'
+ end
+
+ context 'when project has import started' do
+ let_it_be(:project) do
+ create(:project, :import_started, import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'token' => 'token' }
+ })
+ end
+
+ let(:project_id) { project.id }
+
+ it 'calls the importer' do
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info).twice
+ expect_next(worker.importer_class, project, kind_of(Hash)).to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+
+ context 'when the importer raises an ActiveRecord::RecordInvalid error' do
+ before do
+ allow_next(worker.importer_class).to receive(:execute).and_raise(ActiveRecord::RecordInvalid)
+ end
+
+ it 'tracks the error' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+ end
+
+ context 'when the importer raises a StandardError' do
+ before do
+ allow_next(worker.importer_class).to receive(:execute).and_raise(StandardError)
+ end
+
+ it 'tracks the error and raises the error' do
+ expect(Gitlab::Import::ImportFailureService).to receive(:track).once
+
+ expect { worker.perform(project_id, {}, waiter_key) }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ context 'when project import has been cancelled' do
+ let_it_be(:project_id) { create(:project, :import_canceled).id }
+
+ it 'does not call the importer' do
+ expect_next(worker.importer_class).not_to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+ end
+ end
+
+ describe '#importer_class' do
+ it 'does not raise a NotImplementedError' do
+ expect(worker.importer_class).not_to be_nil
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 612aed8b523..9a94a836d60 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -258,6 +258,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'GeoRepositoryDestroyWorker' => 3,
'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
+ 'Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportRepositoryWorker' => 3,
'Gitlab::BitbucketServerImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::FinishImportWorker' => 3,
diff --git a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
index 97c38dd429c..16e3a3dc481 100644
--- a/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/advance_stage_worker_spec.rb
@@ -2,11 +2,102 @@
require 'spec_helper'
-RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, feature_category: :importers do
+RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state, feature_category: :importers do
let(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123') }
let(:worker) { described_class.new }
+ describe '#perform' do
+ context 'when the project no longer exists' do
+ it 'does not perform any work' do
+ expect(worker).not_to receive(:wait_for_jobs)
+
+ worker.perform(-1, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_import_state)
+ .and_return(import_state)
+ end
+
+ it 'reschedules itself' do
+ expect(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({ '123' => 1 })
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+ end
+
+ context 'when there are no remaining jobs' do
+ before do
+ allow(worker)
+ .to receive(:find_import_state)
+ .and_return(import_state)
+
+ allow(worker)
+ .to receive(:wait_for_jobs)
+ .with({ '123' => 2 })
+ .and_return({})
+ end
+
+ it 'schedules the next stage' do
+ expect(import_state)
+ .to receive(:refresh_jid_expiration)
+
+ expect(Gitlab::BitbucketImport::Stage::FinishImportWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.perform(project.id, { '123' => 2 }, :finish)
+ end
+
+ it 'raises KeyError when the stage name is invalid' do
+ expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
+ .to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#wait_for_jobs' do
+ it 'waits for jobs to complete and returns a new pair of keys to wait for' do
+ waiter1 = instance_double(Gitlab::JobWaiter, jobs_remaining: 1, key: '123')
+ waiter2 = instance_double(Gitlab::JobWaiter, jobs_remaining: 0, key: '456')
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(2, '123')
+ .and_return(waiter1)
+
+ expect(Gitlab::JobWaiter)
+ .to receive(:new)
+ .ordered
+ .with(1, '456')
+ .and_return(waiter2)
+
+ expect(waiter1)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ expect(waiter2)
+ .to receive(:wait)
+ .with(described_class::BLOCKING_WAIT_TIME)
+
+ new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
+
+ expect(new_waiters).to eq({ '123' => 1 })
+ end
+ end
+
describe '#find_import_state' do
it 'returns a ProjectImportState' do
import_state.update_column(:status, 'started')
diff --git a/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb
new file mode 100644
index 00000000000..082499be515
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/import_pull_request_worker_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::ImportPullRequestWorker, feature_category: :importers do
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::ObjectImporter
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
new file mode 100644
index 00000000000..8f425066160
--- /dev/null
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_pull_requests_worker_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, :import_started) }
+
+ subject(:worker) { described_class.new }
+
+ it_behaves_like Gitlab::BitbucketImport::StageMethods
+
+ describe '#perform' do
+ context 'when the import succeeds' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsImporter) do |importer|
+ allow(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new(2, '123'))
+ end
+ end
+
+ it 'schedules the next stage' do
+ expect(Gitlab::BitbucketImport::AdvanceStageWorker).to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :finish)
+
+ worker.perform(project.id)
+ end
+
+ it 'logs stage start and finish' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'starting stage', project_id: project.id))
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info).with(hash_including(message: 'stage finished', project_id: project.id))
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when project does not exists' do
+ it 'does not call the importer' do
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsImporter).not_to receive(:new)
+
+ worker.perform(-1)
+ end
+ end
+
+ context 'when project import state is not `started`' do
+ it 'does not call the importer' do
+ project = create(:project, :import_canceled)
+
+ expect(Gitlab::BitbucketImport::Importers::PullRequestsImporter).not_to receive(:new)
+
+ worker.perform(project.id)
+ end
+ end
+
+ context 'when the importer fails' do
+ it 'does not schedule the next stage and raises error' do
+ exception = StandardError.new('Error')
+
+ allow_next_instance_of(Gitlab::BitbucketImport::Importers::PullRequestsImporter) do |importer|
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track).with(
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to change { Gitlab::BitbucketImport::AdvanceStageWorker.jobs.size }.by(0)
+ .and raise_error(exception)
+ end
+ end
+ end
+end
diff --git a/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb
index 164542142c1..2234a49d66c 100644
--- a/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_import/stage/import_repository_worker_spec.rb
@@ -9,11 +9,11 @@ RSpec.describe Gitlab::BitbucketImport::Stage::ImportRepositoryWorker, feature_c
it_behaves_like Gitlab::BitbucketImport::StageMethods
- it 'executes the importer and enqueues FinishImportWorker' do
+ it 'executes the importer and enqueues ImportPullRequestsWorker' do
expect(Gitlab::BitbucketImport::Importers::RepositoryImporter).to receive_message_chain(:new, :execute)
.and_return(true)
- expect(Gitlab::BitbucketImport::Stage::FinishImportWorker).to receive(:perform_async).with(project.id)
+ expect(Gitlab::BitbucketImport::Stage::ImportPullRequestsWorker).to receive(:perform_async).with(project.id)
.and_return(true).once
worker.perform(project.id)