Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-28 06:09:31 +00:00
parent a892b70e5d
commit 64a5bf8e84
35 changed files with 191 additions and 920 deletions

View File

@ -602,6 +602,7 @@ Metrics are gathered by `Geo::MetricsUpdateWorker`, persisted in `GeoNodeStatus`
| `geo_cool_widgets_verified` | Gauge | XX.Y | Number of Cool Widgets successfully verified on secondary | `url` |
| `geo_cool_widgets_verification_failed` | Gauge | XX.Y | Number of Cool Widgets that failed verification on secondary | `url` |
```
- [ ] Run the rake task `geo:dev:ssf_metrics` and commit the changes to `ee/config/metrics/object_schemas/geo_node_usage.json`
Cool Widget replication and verification metrics should now be available in the API, the `Admin > Geo > Sites` view, and Prometheus.

View File

@ -570,6 +570,7 @@ Metrics are gathered by `Geo::MetricsUpdateWorker`, persisted in `GeoNodeStatus`
| `geo_cool_widgets_verified` | Gauge | XX.Y | Number of Cool Widgets successfully verified on secondary | `url` |
| `geo_cool_widgets_verification_failed` | Gauge | XX.Y | Number of Cool Widgets that failed verification on secondary | `url` |
```
- [ ] Run the rake task `geo:dev:ssf_metrics` and commit the changes to `ee/config/metrics/object_schemas/geo_node_usage.json`
Cool Widget replication and verification metrics should now be available in the API, the `Admin > Geo > Sites` view, and Prometheus.

View File

@ -546,7 +546,6 @@ Gitlab/StrongMemoizeAttr:
- 'lib/gitlab/auth/otp/strategies/forti_token_cloud.rb'
- 'lib/gitlab/auth/request_authenticator.rb'
- 'lib/gitlab/background_migration/legacy_upload_mover.rb'
- 'lib/gitlab/bare_repository_import/repository.rb'
- 'lib/gitlab/blob_helper.rb'
- 'lib/gitlab/cache/ci/project_pipeline_status.rb'
- 'lib/gitlab/chat/command.rb'

View File

@ -1633,7 +1633,6 @@ Layout/ArgumentAlignment:
- 'lib/gitlab/background_migration/fix_incoherent_packages_size_on_project_statistics.rb'
- 'lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings.rb'
- 'lib/gitlab/background_migration/reset_status_on_container_repositories.rb'
- 'lib/gitlab/bare_repository_import/importer.rb'
- 'lib/gitlab/bitbucket_server_import/importer.rb'
- 'lib/gitlab/chat/command.rb'
- 'lib/gitlab/ci/ansi2json/line.rb'
@ -2072,7 +2071,6 @@ Layout/ArgumentAlignment:
- 'spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb'
- 'spec/lib/gitlab/background_migration/update_delayed_project_removal_to_null_for_user_namespaces_spec.rb'
- 'spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/wiki_formatter_spec.rb'

View File

@ -3882,7 +3882,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb'
- 'spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb'
- 'spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_server_import/importer_spec.rb'
- 'spec/lib/gitlab/buffered_io_spec.rb'

View File

@ -35,7 +35,6 @@ Layout/SpaceInLambdaLiteral:
- 'app/models/concerns/issuable.rb'
- 'app/models/concerns/mentionable.rb'
- 'app/models/concerns/milestoneable.rb'
- 'app/models/concerns/protected_ref_access.rb'
- 'app/models/concerns/reactive_caching.rb'
- 'app/models/concerns/shardable.rb'
- 'app/models/concerns/timebox.rb'

View File

@ -1656,8 +1656,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb'
- 'spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb'
- 'spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/repository_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_server_import/importer_spec.rb'
- 'spec/lib/gitlab/blame_spec.rb'

View File

@ -3160,8 +3160,6 @@ RSpec/MissingFeatureCategory:
- 'spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb'
- 'spec/lib/gitlab/background_migration_spec.rb'
- 'spec/lib/gitlab/backtrace_cleaner_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/repository_spec.rb'
- 'spec/lib/gitlab/batch_worker_context_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/wiki_formatter_spec.rb'

View File

@ -746,8 +746,6 @@ Style/IfUnlessModifier:
- 'lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata.rb'
- 'lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb'
- 'lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects.rb'
- 'lib/gitlab/bare_repository_import/importer.rb'
- 'lib/gitlab/bare_repository_import/repository.rb'
- 'lib/gitlab/bullet/exclusions.rb'
- 'lib/gitlab/cache/ci/project_pipeline_status.rb'
- 'lib/gitlab/changelog/config.rb'

View File

@ -456,7 +456,6 @@ Style/PercentLiteralDelimiters:
- 'lib/gitlab/auth/ldap/adapter.rb'
- 'lib/gitlab/auth/ldap/config.rb'
- 'lib/gitlab/background_migration/backfill_note_discussion_id.rb'
- 'lib/gitlab/bare_repository_import/importer.rb'
- 'lib/gitlab/ci/ansi2html.rb'
- 'lib/gitlab/ci/config/entry/bridge.rb'
- 'lib/gitlab/ci/reports/codequality_reports.rb'
@ -704,7 +703,6 @@ Style/PercentLiteralDelimiters:
- 'spec/lib/gitlab/auth/saml/user_spec.rb'
- 'spec/lib/gitlab/background_migration/batched_migration_job_spec.rb'
- 'spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/batch_worker_context_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/cache_spec.rb'

View File

@ -90,7 +90,6 @@ Style/StringConcatenation:
- 'lib/feature/definition.rb'
- 'lib/gitlab/auth/ldap/dn.rb'
- 'lib/gitlab/background_migration/backfill_project_repositories.rb'
- 'lib/gitlab/bare_repository_import/importer.rb'
- 'lib/gitlab/ci/build/artifacts/metadata/entry.rb'
- 'lib/gitlab/ci/status/build/retried.rb'
- 'lib/gitlab/config/entry/validators.rb'
@ -182,7 +181,6 @@ Style/StringConcatenation:
- 'spec/lib/container_registry/tag_spec.rb'
- 'spec/lib/gitlab/auth/o_auth/user_spec.rb'
- 'spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb'
- 'spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- 'spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb'
- 'spec/lib/gitlab/ci/config/entry/artifacts_spec.rb'
- 'spec/lib/gitlab/ci/config/external/mapper_spec.rb'

View File

@ -28,11 +28,7 @@ module ProtectedRefAccess
included do
scope :maintainer, -> { where(access_level: Gitlab::Access::MAINTAINER) }
scope :developer, -> { where(access_level: Gitlab::Access::DEVELOPER) }
scope :by_user, -> (user) { where(user_id: user) }
scope :by_group, -> (group) { where(group_id: group) }
scope :for_role, -> { where(user_id: nil, group_id: nil) }
scope :for_user, -> { where.not(user_id: nil) }
scope :for_group, -> { where.not(group_id: nil) }
validates :access_level, presence: true, if: :role?, inclusion: { in: allowed_access_levels }
end

View File

@ -1393,7 +1393,7 @@ class Project < ApplicationRecord
end
def import?
external_import? || forked? || gitlab_project_import? || jira_import? || bare_repository_import? || gitlab_project_migration?
external_import? || forked? || gitlab_project_import? || jira_import? || gitlab_project_migration?
end
def external_import?
@ -1404,10 +1404,6 @@ class Project < ApplicationRecord
Gitlab::UrlSanitizer.new(import_url).masked_url
end
def bare_repository_import?
import_type == 'bare_repository'
end
def jira_import?
import_type == 'jira' && latest_jira_import.present?
end

View File

@ -5,7 +5,7 @@ module Projects
include ValidatesClassificationLabel
ImportSourceDisabledError = Class.new(StandardError)
INTERNAL_IMPORT_SOURCES = %w[bare_repository gitlab_custom_project_template gitlab_project_migration].freeze
INTERNAL_IMPORT_SOURCES = %w[gitlab_custom_project_template gitlab_project_migration].freeze
def initialize(user, params)
@current_user = user
@ -299,7 +299,7 @@ module Projects
def import_schedule
if @project.errors.empty?
@project.import_state.schedule if @project.import? && !@project.bare_repository_import? && !@project.gitlab_project_migration?
@project.import_state.schedule if @project.import? && !@project.gitlab_project_migration?
else
fail(error: @project.errors.full_messages.join(', '))
end

View File

@ -1,37 +1,21 @@
- merge_access_levels = protected_branch.merge_access_levels.for_role
- push_access_levels = protected_branch.push_access_levels.for_role
- user_merge_access_levels = protected_branch.merge_access_levels.for_user
- user_push_access_levels = protected_branch.push_access_levels.for_user
- group_merge_access_levels = protected_branch.merge_access_levels.for_group
- group_push_access_levels = protected_branch.push_access_levels.for_group
%td.merge_access_levels-container
= hidden_field_tag "allowed_to_merge_#{protected_branch.id}", merge_access_levels.first&.access_level
= dropdown_tag((merge_access_levels.first&.humanize || 'Select') ,
options: { toggle_class: 'js-allowed-to-merge', dropdown_class: 'dropdown-menu-selectable js-allowed-to-merge-container capitalize-header',
data: { field_name: "allowed_to_merge_#{protected_branch.id}", preselected_items: access_levels_data(merge_access_levels) }})
- if user_merge_access_levels.any?
%p.small
= _('The following %{user} can also merge into this branch: %{branch}') % { user: 'user'.pluralize(user_merge_access_levels.size), branch: user_merge_access_levels.map(&:humanize).to_sentence }
- if group_merge_access_levels.any?
%p.small
= _('Members of %{group} can also merge into this branch: %{branch}') % { group: (group_merge_access_levels.size > 1 ? 'these groups' : 'this group'), branch: group_merge_access_levels.map(&:humanize).to_sentence }
= render_if_exists 'protected_branches/shared/user_merge_access_levels', protected_branch: protected_branch
= render_if_exists 'protected_branches/shared/group_merge_access_levels', protected_branch: protected_branch
%td.push_access_levels-container
= hidden_field_tag "allowed_to_push_#{protected_branch.id}", push_access_levels.first&.access_level
= dropdown_tag((push_access_levels.first&.humanize || 'Select') ,
options: { toggle_class: "js-allowed-to-push js-multiselect", dropdown_class: 'dropdown-menu-selectable js-allowed-to-push-container capitalize-header',
data: { field_name: "allowed_to_push_#{protected_branch.id}", preselected_items: access_levels_data(push_access_levels) }})
- if user_push_access_levels.any?
%p.small
= _('The following %{user} can also push to this branch: %{branch}') % { user: 'user'.pluralize(user_push_access_levels.size), branch: user_push_access_levels.map(&:humanize).to_sentence }
- if group_push_access_levels.any?
%p.small
= _('Members of %{group} can also push to this branch: %{branch}') % { group: (group_push_access_levels.size > 1 ? 'these groups' : 'this group'), branch: group_push_access_levels.map(&:humanize).to_sentence }
= render_if_exists 'protected_branches/shared/user_push_access_levels', protected_branch: protected_branch
= render_if_exists 'protected_branches/shared/group_push_access_levels', protected_branch: protected_branch
%td
= render Pajamas::ToggleComponent.new(classes: 'js-force-push-toggle',

View File

@ -12,6 +12,8 @@ class RemoveClustersApplicationsJobInstances < Gitlab::Database::Migration[2.0]
ClusterWaitForIngressIpAddressWorker
]
disable_ddl_transaction!
def up
sidekiq_remove_jobs(job_klasses: DEPRECATED_JOB_CLASSES)
end

View File

@ -226,3 +226,8 @@ For example in [`spec/tooling/danger/specs_spec.rb`](https://gitlab.com/gitlab-o
For features that support developers and they are not specific to a product group we use `feature_category: :shared`
For example [`spec/lib/gitlab/job_waiter_spec.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/lib/gitlab/job_waiter_spec.rb)
### Admin section
Adding feature categories is equally important when adding new parts to the Admin section. Historically, Admin sections were often marked as `not_owned` in the code. Now
you must ensure each new addition to the Admin section is properly annotated using `feature_category` notation.

View File

@ -36,7 +36,6 @@ Each event is defined in a separate YAML file consisting of the following fields
| `product_section` | yes | The [section](https://gitlab.com/gitlab-com/www-gitlab-com/-/blob/master/data/sections.yml). |
| `product_stage` | no | The [stage](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) for the event. |
| `product_group` | yes | The [group](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) that owns the event. |
| `product_category` | no | The [product category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) for the event. |
| `milestone` | no | The milestone when the event is introduced. |
| `introduced_by_url` | no | The URL to the merge request that introduced the event. |
| `distributions` | yes | The [distributions](https://about.gitlab.com/handbook/marketing/brand-and-product-marketing/product-and-solution-marketing/tiers/#definitions) where the tracked feature is available. Can be set to one or more of `ce` or `ee`. |
@ -64,7 +63,6 @@ identifiers:
product_section: dev
product_stage: plan
product_group: group::product planning
product_category: epics
milestone: "11.10"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10537
distributions:

View File

@ -2,175 +2,10 @@
stage: Systems
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
remove_date: '2023-05-22'
---
# Import bare repositories (deprecated) **(FREE SELF)**
# Import bare repositories (removed) **(FREE SELF)**
WARNING:
The Rake task for importing bare repositories was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108507)
in GitLab 15.8 and will be removed in GitLab 16.0.
Alternatives to using the `gitlab:import:repos` Rake task include:
- Migrating projects using either [an export file](../user/project/settings/import_export.md) or [direct transfer](../user/group/import/index.md#migrate-groups-by-direct-transfer-recommended) migrate repositories as well.
- Importing a [repository by URL](../user/project/import/repo_by_url.md).
- Importing a [repositories from a non-GitLab source](../user/project/import/index.md).
Rake tasks are available to import bare repositories into a GitLab instance.
When migrating from an existing GitLab instance, and to preserve ownership by users and their namespaces,
migrate projects using either:
- [Direct transfer](../user/group/import/index.md#migrate-groups-by-direct-transfer-recommended).
- [An export file](../user/project/settings/import_export.md).
When you import a repository:
- The owner of the project is the first administrator.
- The groups are created as needed, including subgroups.
- The owner of the group is the first administrator.
- Existing projects are skipped.
- Projects in hashed storage may be skipped. For more information, see
[Importing bare repositories from hashed storage](#importing-bare-repositories-from-hashed-storage).
- The existing Git repositories are moved from disk (removed from the original path).
- You must manually [push Git LFS objects](#push-git-lfs-objects).
To import bare repositories into a GitLab instance:
1. Create a new folder to import your Git repositories from.
You can also import projects into a (sub)group's namespace,
instead of the administrator's namespace. To do so, create subfolders and
give ownership and read/write/execute permissions of those subfolders to the
`git` user and its group:
```shell
sudo -u git mkdir -p /var/opt/gitlab/git-data/repository-import-$(date "+%Y-%m-%d")/<optional_groupname>/<optional_subgroup>
```
1. Copy your bare repositories inside this newly created folder. Note:
- Any `.git` repositories found on any of the subfolders are imported as projects.
- Groups are created as needed, these could be nested folders.
For example, if we copy the repositories to `/var/opt/gitlab/git-data/repository-import-2020-08-22`,
and repository `A` must be under the groups `G1` and `G2`, it must be created under those folders:
`/var/opt/gitlab/git-data/repository-import-2020-08-22/G1/G2/A.git`.
```shell
sudo cp -r /old/git/foo.git /var/opt/gitlab/git-data/repository-import-$(date "+%Y-%m-%d")/<optional_groupname>/<optional_subgroup>
# Do this once when you are done copying git repositories
sudo chown -R git:git /var/opt/gitlab/git-data/repository-import-$(date "+%Y-%m-%d")
```
`foo.git` must be owned by the `git` user and `git` users group.
If you are using an installation from source, replace `/var/opt/gitlab/` with `/home/git`.
1. Run the following command depending on your type of installation:
- Omnibus Installation
```shell
sudo gitlab-rake gitlab:import:repos["/var/opt/gitlab/git-data/repository-import-$(date "+%Y-%m-%d")"]
```
- Installation from source. Before running this command you must change to the directory where
your GitLab installation is located:
```shell
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:import:repos["/var/opt/gitlab/git-data/repository-import-$(date "+%Y-%m-%d")"] RAILS_ENV=production
```
## Example output
```plaintext
Processing /var/opt/gitlab/git-data/repository-import-1/a/b/c/blah.git
* Using namespace: a/b/c
* Created blah (a/b/c/blah)
* Skipping repo /var/opt/gitlab/git-data/repository-import-1/a/b/c/blah.wiki.git
Processing /var/opt/gitlab/git-data/repository-import-1/abcd.git
* Created abcd (abcd.git)
Processing /var/opt/gitlab/git-data/repository-import-1/group/xyz.git
* Using namespace: group (2)
* Created xyz (group/xyz.git)
* Skipping repo /var/opt/gitlab/git-data/repository-import-1/@shared/a/b/abcd.git
[...]
```
## Importing bare repositories from hashed storage
Projects in legacy storage have a directory structure that mirrors their full
project path in GitLab, including their namespace structure. This information is
leveraged by the bare repository importer to import projects into their proper
locations. Each project and its parent namespaces are meaningfully named.
However, the directory structure of projects in hashed storage do not contain
this information. This is beneficial for a variety of reasons, especially
improved performance and data integrity. See
[Repository Storage Types](../administration/repository_storage_types.md) for
more details.
The repositories that are importable depends on the version of GitLab.
### GitLab 10.3 or earlier
Importing bare repositories from hashed storage is unsupported.
### GitLab 10.4 and later
To support importing bare repositories from hashed storage, GitLab 10.4 and
later stores the full project path with each repository, in a special section of
the Git repository's configuration file. This section is formatted as follows:
```ini
[gitlab]
fullpath = gitlab-org/gitlab
```
However, existing repositories were not migrated to include this path.
Bare repositories are importable if the following events occurred to the
repository in GitLab 10.4 and later:
- Created
- Migrated to hashed storage
- Renamed
- Transferred to another namespace
- Ancestor renamed
- Ancestor transferred to another namespace
Bare repositories are **not** importable by GitLab 10.4 to GitLab 11.6, if all the following are true about the repository:
- It was created in GitLab 10.3 or earlier.
- It was not renamed, transferred, or migrated to [hashed storage](../administration/repository_storage_types.md#hashed-storage) in GitLab 10.4 to GitLab 11.6.
- Its ancestor namespaces were not renamed or transferred in GitLab 10.4 to GitLab 11.6.
[In GitLab 11.6](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/41776) and later, all
bare repositories are importable.
To manually migrate repositories yourself (for GitLab 10.4 to GitLab 11.6), you can use the
[Rails console](../administration/operations/rails_console.md#starting-a-rails-console-session)
to do so. In a Rails console session, run the following to migrate a project:
```ruby
project = Project.find_by_full_path('gitlab-org/gitlab')
project.set_full_path
```
In a Rails console session, run the following to migrate all of a namespace's
projects (this may take a while if there are thousands of projects in a namespace):
```ruby
namespace = Namespace.find_by_full_path('gitlab-org')
namespace.send(:write_projects_repository_config)
```
## Push Git LFS objects
The import task doesn't import Git LFS objects. You must manually push the LFS objects to the newly
created GitLab repository using the following command:
```shell
git lfs push --all
```
in GitLab 15.8 and [removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/118676) in GitLab 16.0.

View File

@ -116,7 +116,7 @@ continue to accept connections but mark their respective health check
endpoints to be unhealthy. On seeing this, the load balancer should disconnect
them gracefully.
Puma restarts only after completing all the currently processing requests.
Puma restarts only after completing all the currently-processing requests.
This ensures data and service integrity. Once they have restarted, the health
check end points are marked healthy.
@ -180,6 +180,9 @@ Before you update the main GitLab application you must (in order):
1. Upgrade the Gitaly nodes that reside on separate servers.
1. Upgrade Praefect if using Gitaly Cluster.
Because of a [known issue](https://gitlab.com/groups/gitlab-org/-/epics/10328), Gitaly and Gitaly Cluster upgrades
cause some downtime.
#### Upgrade Gitaly nodes
[Upgrade the GitLab package](package/index.md#upgrade-to-a-specific-version-using-the-official-repositories) on the Gitaly nodes one at a time to ensure access to Git repositories is maintained.
@ -465,7 +468,8 @@ Log in to your **primary** node, executing the following:
sudo SKIP_POST_DEPLOYMENT_MIGRATIONS=true gitlab-rake db:migrate
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the primary site to the secondary site if they're different. The file must be the same on all of a sites nodes.
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the primary site to the secondary site if they're different.
The file must be the same on all of a site's nodes.
### Update the Geo secondary site
@ -667,7 +671,8 @@ sudo gitlab-ctl hup puma
sudo gitlab-ctl restart sidekiq
```
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the primary site to the secondary site if they're different. The file must be the same on all of a sites nodes.
1. Copy the `/etc/gitlab/gitlab-secrets.json` file from the primary site to the secondary site if they're different. The
file must be the same on all of a site's nodes.
### Step 3: Update each Geo secondary multi-node deployment

View File

@ -13,7 +13,6 @@ identifiers:
product_section:
product_stage:
product_group:
product_category:
milestone: "<%= milestone %>"
introduced_by_url:
distributions:

View File

@ -1,132 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BareRepositoryImport
class Importer
NoAdminError = Class.new(StandardError)
def self.execute(import_path)
unless import_path.ends_with?('/')
import_path = "#{import_path}/"
end
repos_to_import = Dir.glob(import_path + '**/*.git')
unless user = User.admins.order_id_asc.first
raise NoAdminError, 'No admin user found to import repositories'
end
repos_to_import.each do |repo_path|
bare_repo = Gitlab::BareRepositoryImport::Repository.new(import_path, repo_path)
unless bare_repo.processable?
log " * Skipping repo #{bare_repo.repo_path}".color(:yellow)
next
end
log "Processing #{repo_path}".color(:yellow)
new(user, bare_repo).create_project_if_needed
end
end
# This is called from within a rake task only used by Admins, so allow writing
# to STDOUT
def self.log(message)
puts message # rubocop:disable Rails/Output
end
attr_reader :user, :project_name, :bare_repo
delegate :log, to: :class
delegate :project_name, :project_full_path, :group_path, :repo_path, :wiki_path, to: :bare_repo
def initialize(user, bare_repo)
@user = user
@bare_repo = bare_repo
end
def create_project_if_needed
if project = Project.find_by_full_path(project_full_path)
log " * #{project.name} (#{project_full_path}) exists"
return project
end
create_project
end
private
def create_project
group = find_or_create_groups
project = Projects::CreateService.new(user,
name: project_name,
path: project_name,
skip_disk_validation: true,
skip_wiki: bare_repo.wiki_exists?,
import_type: 'bare_repository',
namespace_id: group&.id).execute
if project.persisted? && mv_repositories(project)
log " * Created #{project.name} (#{project_full_path})".color(:green)
project.set_full_path
ProjectCacheWorker.perform_async(project.id)
else
log " * Failed trying to create #{project.name} (#{project_full_path})".color(:red)
log " Errors: #{project.errors.messages}".color(:red) if project.errors.any?
end
project
end
def mv_repositories(project)
mv_repo(bare_repo.repo_path, project.repository)
if bare_repo.wiki_exists?
mv_repo(bare_repo.wiki_path, project.wiki.repository)
end
true
rescue StandardError => e
log " * Failed to move repo: #{e.message}".color(:red)
false
end
def mv_repo(path, repository)
repository.create_from_bundle(bundle(path))
FileUtils.rm_rf(path)
end
def storage_path_for_shard(shard)
Gitlab.config.repositories.storages[shard].legacy_disk_path
end
def find_or_create_groups
return unless group_path.present?
log " * Using namespace: #{group_path}"
Groups::NestedCreateService.new(user, group_path: group_path).execute
end
def bundle(repo_path)
# TODO: we could save some time and disk space by using
# `git bundle create - --all` and streaming the bundle directly to
# Gitaly, rather than writing it on disk first
bundle_path = "#{repo_path}.bundle"
cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{repo_path} bundle create #{bundle_path} --all)
output, status = Gitlab::Popen.popen(cmd)
raise output unless status == 0
bundle_path
end
end
end
end

View File

@ -1,72 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BareRepositoryImport
class Repository
include ::Gitlab::Utils::StrongMemoize
attr_reader :group_path, :project_name, :repo_path
def initialize(root_path, repo_path)
unless root_path.ends_with?('/')
root_path = "#{root_path}/"
end
@root_path = root_path
@repo_path = repo_path
full_path =
if hashed? && !wiki?
repository.config.get('gitlab.fullpath')
else
repo_relative_path
end
# Split path into 'all/the/namespaces' and 'project_name'
@group_path, _, @project_name = full_path.to_s.rpartition('/')
end
def wiki_exists?
File.exist?(wiki_path)
end
def wiki_path
@wiki_path ||= repo_path.sub(/\.git$/, '.wiki.git')
end
def project_full_path
@project_full_path ||= "#{group_path}/#{project_name}"
end
def processable?
return false if wiki?
return false if hashed? && (group_path.blank? || project_name.blank?)
true
end
private
def wiki?
strong_memoize(:wiki) do
repo_path.end_with?('.wiki.git')
end
end
def hashed?
strong_memoize(:hashed) do
repo_relative_path.include?('@hashed')
end
end
def repo_relative_path
# Remove root path and `.git` at the end
repo_path[@root_path.size...-4]
end
def repository
@repository ||= Rugged::Repository.new(repo_path)
end
end
end
end

View File

@ -25,9 +25,14 @@ module Gitlab
times_in_a_row: DEFAULT_TIMES_IN_A_ROW,
max_attempts: DEFAULT_MAX_ATTEMPTS
)
kwargs = { times_in_a_row: times_in_a_row, max_attempts: max_attempts }
if transaction_open?
raise 'sidekiq_remove_jobs can not be run inside a transaction, ' \
'you can disable transactions by calling disable_ddl_transaction! ' \
'in the body of your migration class'
end
job_klasses_queues = job_klasses
.select { |job_klass| job_klass.to_s.safe_constantize.present? }
.map { |job_klass| job_klass.safe_constantize.queue }

View File

@ -9,7 +9,6 @@ module Gitlab
module ImportSources
ImportSource = Struct.new(:name, :title, :importer)
# We exclude `bare_repository` here as it has no import class associated
IMPORT_TABLE = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),

View File

@ -1,24 +0,0 @@
# frozen_string_literal: true
namespace :gitlab do
namespace :import do
# How to use:
#
# 1. copy the bare repos to a specific path that contain the group or subgroups structure as folders
# 2. run: bundle exec rake gitlab:import:repos[/path/to/repos] RAILS_ENV=production
#
# Notes:
# * The project owner will set to the first administator of the system
# * Existing projects will be skipped
desc "GitLab | Import | Import bare repositories from repositories -> storages into GitLab project instance"
task :repos, [:import_path] => :environment do |_t, args|
unless args.import_path
puts 'Please specify an import path that contains the repositories'.color(:red)
exit 1
end
Gitlab::BareRepositoryImport::Importer.execute(args.import_path)
end
end
end

View File

@ -5,9 +5,6 @@ ALLOWED = [
# https://gitlab.com/gitlab-org/gitaly/issues/760
'lib/elasticsearch/git/repository.rb',
# Needed to handle repositories that are not in any storage
'lib/gitlab/bare_repository_import/repository.rb',
# Needed to avoid using the git binary to validate a branch name
'lib/gitlab/git_ref_validator.rb',

View File

@ -13,7 +13,6 @@ identifiers:
product_section:
product_stage:
product_group:
product_category:
milestone: "13.11"
introduced_by_url:
distributions:

View File

@ -13,7 +13,6 @@ identifiers:
product_section:
product_stage:
product_group:
product_category:
milestone: "13.11"
introduced_by_url:
distributions:

View File

@ -1,196 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BareRepositoryImport::Importer do
let!(:admin) { create(:admin) }
let!(:base_dir) { Dir.mktmpdir + '/' }
let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:source_project) { TestEnv.factory_repo_bundle_path }
subject(:importer) { described_class.new(admin, bare_repository) }
before do
allow(described_class).to receive(:log)
end
after do
FileUtils.rm_rf(base_dir)
end
shared_examples 'importing a repository' do
describe '.execute' do
it 'creates a project for a repository in storage' do
FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
fake_importer = double
expect(described_class).to receive(:new).and_return(fake_importer)
expect(fake_importer).to receive(:create_project_if_needed)
described_class.execute(base_dir)
end
it 'skips wiki repos' do
repo_dir = File.join(base_dir, 'the-group', 'the-project.wiki.git')
FileUtils.mkdir_p(File.join(repo_dir))
expect(described_class).to receive(:log).with(" * Skipping repo #{repo_dir}")
expect(described_class).not_to receive(:new)
described_class.execute(base_dir)
end
context 'without admin users' do
let(:admin) { nil }
it 'raises an error' do
expect { described_class.execute(base_dir) }.to raise_error(Gitlab::BareRepositoryImport::Importer::NoAdminError)
end
end
end
describe '#create_project_if_needed' do
it 'starts an import for a project that did not exist' do
expect(importer).to receive(:create_project)
importer.create_project_if_needed
end
it 'skips importing when the project already exists' do
project = create(:project, path: 'a-project', namespace: existing_group)
expect(importer).not_to receive(:create_project)
expect(importer).to receive(:log).with(" * #{project.name} (#{project_path}) exists")
importer.create_project_if_needed
end
it 'creates a project with the correct path in the database' do
importer.create_project_if_needed
expect(Project.find_by_full_path(project_path)).not_to be_nil
end
it 'does not schedule an import' do
project = Project.find_by_full_path(project_path)
expect(project).not_to receive(:import_schedule)
importer.create_project_if_needed
end
it 'creates the Git repo on disk' do
prepare_repository("#{project_path}.git", source_project)
importer.create_project_if_needed
project = Project.find_by_full_path(project_path)
repo_path = "#{project.disk_path}.git"
expect(gitlab_shell.repository_exists?(project.repository_storage, repo_path)).to be(true)
end
context 'hashed storage enabled' do
it 'creates a project with the correct path in the database' do
stub_application_setting(hashed_storage_enabled: true)
importer.create_project_if_needed
expect(Project.find_by_full_path(project_path)).not_to be_nil
end
end
end
end
context 'with subgroups' do
let(:project_path) { 'a-group/a-sub-group/a-project' }
let(:existing_group) do
group = create(:group, path: 'a-group')
create(:group, path: 'a-sub-group', parent: group)
end
it_behaves_like 'importing a repository'
end
context 'without subgroups' do
let(:project_path) { 'a-group/a-project' }
let(:existing_group) { create(:group, path: 'a-group') }
it_behaves_like 'importing a repository'
end
context 'without groups' do
let(:project_path) { 'a-project' }
it 'starts an import for a project that did not exist' do
expect(importer).to receive(:create_project)
importer.create_project_if_needed
end
it 'creates a project with the correct path in the database' do
importer.create_project_if_needed
expect(Project.find_by_full_path("#{admin.full_path}/#{project_path}")).not_to be_nil
end
it 'creates the Git repo in disk' do
prepare_repository("#{project_path}.git", source_project)
importer.create_project_if_needed
project = Project.find_by_full_path("#{admin.full_path}/#{project_path}")
expect(gitlab_shell.repository_exists?(project.repository_storage, project.disk_path + '.git')).to be(true)
expect(gitlab_shell.repository_exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end
context 'with a repository already on disk' do
# This is a quick way to get a valid repository instead of copying an
# existing one. Since it's not persisted, the importer will try to
# create the project.
let(:project) { build(:project, :legacy_storage, :repository) }
let(:project_path) { project.full_path }
it 'moves an existing project to the correct path' do
original_commit_count = project.repository.commit_count
expect(importer).to receive(:create_project).and_call_original
new_project = importer.create_project_if_needed
expect(new_project.repository.commit_count).to eq(original_commit_count)
end
end
end
context 'with Wiki' do
let(:project_path) { 'a-group/a-project' }
let(:existing_group) { create(:group, path: 'a-group') }
it_behaves_like 'importing a repository'
it 'creates the Wiki git repo in disk' do
prepare_repository("#{project_path}.git", source_project)
prepare_repository("#{project_path}.wiki.git", source_project)
expect(Projects::CreateService).to receive(:new).with(admin, hash_including(skip_wiki: true,
import_type: 'bare_repository')).and_call_original
importer.create_project_if_needed
project = Project.find_by_full_path(project_path)
expect(gitlab_shell.repository_exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end
end
def prepare_repository(project_path, source_project)
repo_path = File.join(base_dir, project_path)
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare #{source_project} #{repo_path})
system(git_env, *cmd, chdir: base_dir, out: '/dev/null', err: '/dev/null')
end
end

View File

@ -1,123 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::BareRepositoryImport::Repository do
context 'legacy storage' do
subject { described_class.new('/full/path/', '/full/path/to/repo.git') }
it 'stores the repo path' do
expect(subject.repo_path).to eq('/full/path/to/repo.git')
end
it 'stores the group path' do
expect(subject.group_path).to eq('to')
end
it 'stores the project name' do
expect(subject.project_name).to eq('repo')
end
it 'stores the wiki path' do
expect(subject.wiki_path).to eq('/full/path/to/repo.wiki.git')
end
describe '#processable?' do
it 'returns false if it is a wiki' do
subject = described_class.new('/full/path/', '/full/path/to/a/b/my.wiki.git')
expect(subject).not_to be_processable
end
it 'returns true if group path is missing' do
subject = described_class.new('/full/path/', '/full/path/repo.git')
expect(subject).to be_processable
end
it 'returns true when group path and project name are present' do
expect(subject).to be_processable
end
end
describe '#project_full_path' do
it 'returns the project full path with trailing slash in the root path' do
expect(subject.project_full_path).to eq('to/repo')
end
it 'returns the project full path with no trailing slash in the root path' do
subject = described_class.new('/full/path', '/full/path/to/repo.git')
expect(subject.project_full_path).to eq('to/repo')
end
end
end
context 'hashed storage' do
let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" }
let(:root_path) { Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path } }
let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
let(:raw_repository) { Gitlab::Git::Repository.new('default', "#{hashed_path}.git", nil, nil) }
let(:full_path) { 'to/repo' }
before do
raw_repository.create_repository
raw_repository.set_full_path(full_path: full_path) if full_path
end
after do
raw_repository.remove
end
subject { described_class.new(root_path, repo_path) }
it 'stores the repo path' do
expect(subject.repo_path).to eq(repo_path)
end
it 'stores the wiki path' do
expect(subject.wiki_path).to eq(wiki_path)
end
it 'reads the group path from .git/config' do
expect(subject.group_path).to eq('to')
end
it 'reads the project name from .git/config' do
expect(subject.project_name).to eq('repo')
end
describe '#processable?' do
it 'returns false if it is a wiki' do
subject = described_class.new(root_path, wiki_path)
expect(subject).not_to be_processable
end
it 'returns true when group path and project name are present' do
expect(subject).to be_processable
end
context 'group and project name are missing' do
let(:full_path) { nil }
it 'returns false' do
expect(subject).not_to be_processable
end
end
end
describe '#project_full_path' do
it 'returns the project full path with trailing slash in the root path' do
expect(subject.project_full_path).to eq('to/repo')
end
it 'returns the project full path with no trailing slash in the root path' do
subject = described_class.new(root_path[0...-1], repo_path)
expect(subject.project_full_path).to eq('to/repo')
end
end
end
end

View File

@ -78,158 +78,174 @@ RSpec.describe Gitlab::Database::Migrations::SidekiqHelpers do
clear_queues
end
context "when the constant is not defined" do
it "doesn't try to delete it" do
my_non_constant = +"SomeThingThatIsNotAConstant"
context 'when inside a transaction' do
it 'raises RuntimeError' do
expect(model).to receive(:transaction_open?).and_return(true)
expect(Sidekiq::Queue).not_to receive(:new).with(any_args)
model.sidekiq_remove_jobs(job_klasses: [my_non_constant])
expect { model.sidekiq_remove_jobs(job_klasses: [worker.name]) }
.to raise_error(RuntimeError)
end
end
context "when the constant is defined" do
it "will use it find job instances to delete" do
my_constant = worker.name
expect(Sidekiq::Queue)
.to receive(:new)
.with(worker.queue)
.and_call_original
model.sidekiq_remove_jobs(job_klasses: [my_constant])
end
end
it "removes all related job instances from the job classes' queues" do
worker.perform_async
worker_two.perform_async
same_queue_different_worker.perform_async
unrelated_worker.perform_async
worker_queue = Sidekiq::Queue.new(worker.queue)
worker_two_queue = Sidekiq::Queue.new(worker_two.queue)
unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
expect(worker_queue.size).to eq(2)
expect(worker_two_queue.size).to eq(1)
expect(unrelated_queue.size).to eq(1)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(worker_queue.size).to eq(1)
expect(worker_two_queue.size).to eq(0)
expect(worker_queue.map(&:klass)).not_to include(worker.name)
expect(worker_queue.map(&:klass)).to include(
same_queue_different_worker.name
)
expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name)
expect(unrelated_queue.size).to eq(1)
end
context "when job instances are in the scheduled set" do
it "removes all related job instances from the scheduled set" do
worker.perform_in(1.hour)
worker_two.perform_in(1.hour)
unrelated_worker.perform_in(1.hour)
scheduled = Sidekiq::ScheduledSet.new
expect(scheduled.size).to eq(3)
expect(scheduled.map(&:klass)).to include(
worker.name,
worker_two.name,
unrelated_worker.name
)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(scheduled.size).to eq(1)
expect(scheduled.map(&:klass)).not_to include(worker.name)
expect(scheduled.map(&:klass)).not_to include(worker_two.name)
expect(scheduled.map(&:klass)).to include(unrelated_worker.name)
end
end
context "when job instances are in the retry set" do
include_context "when handling retried jobs"
it "removes all related job instances from the retry set" do
retry_in(worker, 1.hour)
retry_in(worker, 2.hours)
retry_in(worker, 3.hours)
retry_in(worker_two, 4.hours)
retry_in(unrelated_worker, 5.hours)
retries = Sidekiq::RetrySet.new
expect(retries.size).to eq(5)
expect(retries.map(&:klass)).to include(
worker.name,
worker_two.name,
unrelated_worker.name
)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(retries.size).to eq(1)
expect(retries.map(&:klass)).not_to include(worker.name)
expect(retries.map(&:klass)).not_to include(worker_two.name)
expect(retries.map(&:klass)).to include(unrelated_worker.name)
end
end
# Imitate job deletion returning zero and then non zero.
context "when job fails to be deleted" do
let(:job_double) do
instance_double(
"Sidekiq::JobRecord",
klass: worker.name
)
context 'when outside a transaction' do
before do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:disable_statement_timeout).and_call_original
end
context "and does not work enough times in a row before max attempts" do
it "tries the max attempts without succeeding" do
worker.perform_async
context "when the constant is not defined" do
it "doesn't try to delete it" do
my_non_constant = +"SomeThingThatIsNotAConstant"
allow(job_double).to receive(:delete).and_return(true)
# Scheduled set runs last so only need to stub out its values.
allow(Sidekiq::ScheduledSet)
.to receive(:new)
.and_return([job_double])
expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
.to eq(
{
attempts: 5,
success: false
}
)
expect(Sidekiq::Queue).not_to receive(:new).with(any_args)
model.sidekiq_remove_jobs(job_klasses: [my_non_constant])
end
end
context "and then it works enough times in a row before max attempts" do
it "succeeds" do
worker.perform_async
# attempt 1: false will increment the streak once to 1
# attempt 2: true resets it back to 0
# attempt 3: false will increment the streak once to 1
# attempt 4: false will increment the streak once to 2, loop breaks
allow(job_double).to receive(:delete).and_return(false, true, false)
worker.perform_async
# Scheduled set runs last so only need to stub out its values.
allow(Sidekiq::ScheduledSet)
context "when the constant is defined" do
it "will use it find job instances to delete" do
my_constant = worker.name
expect(Sidekiq::Queue)
.to receive(:new)
.and_return([job_double])
.with(worker.queue)
.and_call_original
model.sidekiq_remove_jobs(job_klasses: [my_constant])
end
end
expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
.to eq(
{
attempts: 4,
success: true
}
)
it "removes all related job instances from the job classes' queues" do
worker.perform_async
worker_two.perform_async
same_queue_different_worker.perform_async
unrelated_worker.perform_async
worker_queue = Sidekiq::Queue.new(worker.queue)
worker_two_queue = Sidekiq::Queue.new(worker_two.queue)
unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
expect(worker_queue.size).to eq(2)
expect(worker_two_queue.size).to eq(1)
expect(unrelated_queue.size).to eq(1)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(worker_queue.size).to eq(1)
expect(worker_two_queue.size).to eq(0)
expect(worker_queue.map(&:klass)).not_to include(worker.name)
expect(worker_queue.map(&:klass)).to include(
same_queue_different_worker.name
)
expect(worker_two_queue.map(&:klass)).not_to include(worker_two.name)
expect(unrelated_queue.size).to eq(1)
end
context "when job instances are in the scheduled set" do
it "removes all related job instances from the scheduled set" do
worker.perform_in(1.hour)
worker_two.perform_in(1.hour)
unrelated_worker.perform_in(1.hour)
scheduled = Sidekiq::ScheduledSet.new
expect(scheduled.size).to eq(3)
expect(scheduled.map(&:klass)).to include(
worker.name,
worker_two.name,
unrelated_worker.name
)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(scheduled.size).to eq(1)
expect(scheduled.map(&:klass)).not_to include(worker.name)
expect(scheduled.map(&:klass)).not_to include(worker_two.name)
expect(scheduled.map(&:klass)).to include(unrelated_worker.name)
end
end
context "when job instances are in the retry set" do
include_context "when handling retried jobs"
it "removes all related job instances from the retry set" do
retry_in(worker, 1.hour)
retry_in(worker, 2.hours)
retry_in(worker, 3.hours)
retry_in(worker_two, 4.hours)
retry_in(unrelated_worker, 5.hours)
retries = Sidekiq::RetrySet.new
expect(retries.size).to eq(5)
expect(retries.map(&:klass)).to include(
worker.name,
worker_two.name,
unrelated_worker.name
)
model.sidekiq_remove_jobs(job_klasses: [worker.name, worker_two.name])
expect(retries.size).to eq(1)
expect(retries.map(&:klass)).not_to include(worker.name)
expect(retries.map(&:klass)).not_to include(worker_two.name)
expect(retries.map(&:klass)).to include(unrelated_worker.name)
end
end
# Imitate job deletion returning zero and then non zero.
context "when job fails to be deleted" do
let(:job_double) do
instance_double(
"Sidekiq::JobRecord",
klass: worker.name
)
end
context "and does not work enough times in a row before max attempts" do
it "tries the max attempts without succeeding" do
worker.perform_async
allow(job_double).to receive(:delete).and_return(true)
# Scheduled set runs last so only need to stub out its values.
allow(Sidekiq::ScheduledSet)
.to receive(:new)
.and_return([job_double])
expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
.to eq(
{
attempts: 5,
success: false
}
)
end
end
context "and then it works enough times in a row before max attempts" do
it "succeeds" do
worker.perform_async
# attempt 1: false will increment the streak once to 1
# attempt 2: true resets it back to 0
# attempt 3: false will increment the streak once to 1
# attempt 4: false will increment the streak once to 2, loop breaks
allow(job_double).to receive(:delete).and_return(false, true, false)
worker.perform_async
# Scheduled set runs last so only need to stub out its values.
allow(Sidekiq::ScheduledSet)
.to receive(:new)
.and_return([job_double])
expect(model.sidekiq_remove_jobs(job_klasses: [worker.name]))
.to eq(
{
attempts: 4,
success: true
}
)
end
end
end
end

View File

@ -12,7 +12,6 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
property_description: 'The string "issue_id"',
value_description: 'ID of the issue',
extra_properties: { confidential: false },
product_category: 'collection',
product_stage: 'growth',
product_section: 'dev',
product_group: 'group::product analytics',
@ -47,7 +46,6 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
:property_description | 1
:value_description | 1
:extra_properties | 'smth'
:product_category | 1
:product_stage | 1
:product_section | nil
:product_group | nil

View File

@ -145,8 +145,8 @@ RSpec.describe Ci::ArchiveTraceService, '#execute', feature_category: :continuou
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_for_dev_exception)
.with(::Gitlab::Ci::Trace::ArchiveError,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
job_id: job.id).once
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
job_id: job.id).once
expect(Sidekiq.logger).to receive(:warn).with(
class: Ci::ArchiveTraceWorker.name,

View File

@ -5657,8 +5657,6 @@
- './spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb'
- './spec/lib/gitlab/background_task_spec.rb'
- './spec/lib/gitlab/backtrace_cleaner_spec.rb'
- './spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- './spec/lib/gitlab/bare_repository_import/repository_spec.rb'
- './spec/lib/gitlab/batch_worker_context_spec.rb'
- './spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- './spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'