Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
903ccf7c93
commit
c6b3ec3f56
|
|
@ -14,7 +14,7 @@ module Projects
|
|||
token = extract_alert_manager_token(request)
|
||||
result = notify_service.execute(token)
|
||||
|
||||
head(response_status(result))
|
||||
head result.http_status
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -33,12 +33,6 @@ module Projects
|
|||
.new(project, current_user, notification_payload)
|
||||
end
|
||||
|
||||
def response_status(result)
|
||||
return :ok if result.success?
|
||||
|
||||
result.http_status
|
||||
end
|
||||
|
||||
def notification_payload
|
||||
params.permit![:notification]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,12 +26,9 @@ module Projects
|
|||
|
||||
def notify
|
||||
token = extract_alert_manager_token(request)
|
||||
result = notify_service.execute(token)
|
||||
|
||||
if notify_service.execute(token)
|
||||
head :ok
|
||||
else
|
||||
head :unprocessable_entity
|
||||
end
|
||||
head result.http_status
|
||||
end
|
||||
|
||||
def create
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ module SubmoduleHelper
|
|||
project].join('')
|
||||
|
||||
url_with_dotgit = url_no_dotgit + '.git'
|
||||
url_with_dotgit == Gitlab::Shell.url_to_repo([namespace, '/', project].join(''))
|
||||
url_with_dotgit == Gitlab::RepositoryUrlBuilder.build([namespace, '/', project].join(''))
|
||||
end
|
||||
|
||||
def relative_self_url?(url)
|
||||
|
|
|
|||
|
|
@ -87,26 +87,15 @@ module HasRepository
|
|||
end
|
||||
|
||||
def url_to_repo
|
||||
Gitlab::Shell.url_to_repo(full_path)
|
||||
ssh_url_to_repo
|
||||
end
|
||||
|
||||
def ssh_url_to_repo
|
||||
url_to_repo
|
||||
Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :ssh)
|
||||
end
|
||||
|
||||
def http_url_to_repo
|
||||
custom_root = Gitlab::CurrentSettings.custom_http_clone_url_root
|
||||
|
||||
url = if custom_root.present?
|
||||
Gitlab::Utils.append_path(
|
||||
custom_root,
|
||||
web_url(only_path: true)
|
||||
)
|
||||
else
|
||||
web_url
|
||||
end
|
||||
|
||||
"#{url}.git"
|
||||
Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :http)
|
||||
end
|
||||
|
||||
def web_url(only_path: nil)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module UpdateHighestRole
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
HIGHEST_ROLE_LEASE_TIMEOUT = 10.minutes.to_i
|
||||
HIGHEST_ROLE_JOB_DELAY = 10.minutes
|
||||
|
||||
included do
|
||||
after_commit :update_highest_role
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Schedule a Sidekiq job to update the highest role for a User
|
||||
#
|
||||
# The job will be called outside of a transaction in order to ensure the changes
|
||||
# to be commited before attempting to update the highest role.
|
||||
# The exlusive lease will not be released after completion to prevent multiple jobs
|
||||
# being executed during the defined timeout.
|
||||
def update_highest_role
|
||||
return unless update_highest_role?
|
||||
|
||||
run_after_commit_or_now do
|
||||
lease_key = "update_highest_role:#{update_highest_role_attribute}"
|
||||
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: HIGHEST_ROLE_LEASE_TIMEOUT)
|
||||
|
||||
if lease.try_obtain
|
||||
UpdateHighestRoleWorker.perform_in(HIGHEST_ROLE_JOB_DELAY, update_highest_role_attribute)
|
||||
else
|
||||
# use same logging as ExclusiveLeaseGuard
|
||||
# rubocop:disable Gitlab/RailsLogger
|
||||
Rails.logger.error('Cannot obtain an exclusive lease. There must be another instance already in execution.')
|
||||
# rubocop:enable Gitlab/RailsLogger
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -16,7 +16,6 @@ module VersionedDescription
|
|||
def save_description_version
|
||||
self.saved_description_version = nil
|
||||
|
||||
return unless Feature.enabled?(:save_description_versions, issuing_parent, default_enabled: true)
|
||||
return unless saved_change_to_description?
|
||||
|
||||
unless description_versions.exists?
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ class Member < ApplicationRecord
|
|||
include Presentable
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
include FromUnion
|
||||
include UpdateHighestRole
|
||||
|
||||
attr_accessor :raw_invite_token
|
||||
|
||||
|
|
@ -100,7 +101,6 @@ class Member < ApplicationRecord
|
|||
after_destroy :destroy_notification_setting
|
||||
after_destroy :post_destroy_hook, unless: :pending?
|
||||
after_commit :refresh_member_authorized_projects
|
||||
after_commit :update_highest_role
|
||||
|
||||
default_value_for :notification_level, NotificationSetting.levels[:global]
|
||||
|
||||
|
|
@ -463,21 +463,15 @@ class Member < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
# Triggers the service to schedule a Sidekiq job to update the highest role
|
||||
# for a User
|
||||
#
|
||||
# The job will be called outside of a transaction in order to ensure the changes
|
||||
# for a Member to be commited before attempting to update the highest role.
|
||||
# rubocop: disable CodeReuse/ServiceClass
|
||||
def update_highest_role
|
||||
def update_highest_role?
|
||||
return unless user_id.present?
|
||||
return unless previous_changes[:access_level].present?
|
||||
|
||||
run_after_commit_or_now do
|
||||
Members::UpdateHighestRoleService.new(user_id).execute
|
||||
end
|
||||
previous_changes[:access_level].present?
|
||||
end
|
||||
|
||||
def update_highest_role_attribute
|
||||
user_id
|
||||
end
|
||||
# rubocop: enable CodeReuse/ServiceClass
|
||||
end
|
||||
|
||||
Member.prepend_if_ee('EE::Member')
|
||||
|
|
|
|||
|
|
@ -49,15 +49,15 @@ class ProjectWiki
|
|||
end
|
||||
|
||||
def url_to_repo
|
||||
Gitlab::Shell.url_to_repo(full_path)
|
||||
ssh_url_to_repo
|
||||
end
|
||||
|
||||
def ssh_url_to_repo
|
||||
url_to_repo
|
||||
Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :ssh)
|
||||
end
|
||||
|
||||
def http_url_to_repo
|
||||
@project.http_url_to_repo.sub(%r{git\z}, 'wiki.git')
|
||||
Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :http)
|
||||
end
|
||||
|
||||
def wiki_base_path
|
||||
|
|
|
|||
|
|
@ -258,10 +258,12 @@ class Snippet < ApplicationRecord
|
|||
super
|
||||
end
|
||||
|
||||
override :repository
|
||||
def repository
|
||||
@repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET)
|
||||
end
|
||||
|
||||
override :repository_size_checker
|
||||
def repository_size_checker
|
||||
strong_memoize(:repository_size_checker) do
|
||||
::Gitlab::RepositorySizeChecker.new(
|
||||
|
|
@ -271,6 +273,7 @@ class Snippet < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
override :storage
|
||||
def storage
|
||||
@storage ||= Storage::Hashed.new(self, prefix: Storage::Hashed::SNIPPET_REPOSITORY_PATH_PREFIX)
|
||||
end
|
||||
|
|
@ -278,6 +281,7 @@ class Snippet < ApplicationRecord
|
|||
# This is the full_path used to identify the
|
||||
# the snippet repository. It will be used mostly
|
||||
# for logging purposes.
|
||||
override :full_path
|
||||
def full_path
|
||||
return unless persisted?
|
||||
|
||||
|
|
@ -290,10 +294,6 @@ class Snippet < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
def url_to_repo
|
||||
Gitlab::Shell.url_to_repo(full_path.delete('@'))
|
||||
end
|
||||
|
||||
def repository_storage
|
||||
snippet_repository&.shard_name || self.class.pick_repository_storage
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ class User < ApplicationRecord
|
|||
include BatchDestroyDependentAssociations
|
||||
include HasUniqueInternalUsers
|
||||
include IgnorableColumns
|
||||
include UpdateHighestRole
|
||||
|
||||
DEFAULT_NOTIFICATION_LEVEL = :participating
|
||||
|
||||
|
|
@ -238,7 +239,6 @@ class User < ApplicationRecord
|
|||
end
|
||||
end
|
||||
end
|
||||
after_commit :update_highest_role, on: [:create, :update]
|
||||
|
||||
after_initialize :set_projects_limit
|
||||
|
||||
|
|
@ -1854,20 +1854,15 @@ class User < ApplicationRecord
|
|||
last_active_at.to_i <= MINIMUM_INACTIVE_DAYS.days.ago.to_i
|
||||
end
|
||||
|
||||
# Triggers the service to schedule a Sidekiq job to update the highest role
|
||||
# for a User
|
||||
#
|
||||
# The job will be called outside of a transaction in order to ensure the changes
|
||||
# for a Member to be commited before attempting to update the highest role.
|
||||
# rubocop: disable CodeReuse/ServiceClass
|
||||
def update_highest_role
|
||||
return unless (previous_changes.keys & %w(state user_type ghost)).any?
|
||||
def update_highest_role?
|
||||
return false unless persisted?
|
||||
|
||||
run_after_commit_or_now do
|
||||
Members::UpdateHighestRoleService.new(id).execute
|
||||
end
|
||||
(previous_changes.keys & %w(state user_type ghost)).any?
|
||||
end
|
||||
|
||||
def update_highest_role_attribute
|
||||
id
|
||||
end
|
||||
# rubocop: enable CodeReuse/ServiceClass
|
||||
end
|
||||
|
||||
User.prepend_if_ee('EE::User')
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ class MergeRequestWidgetEntity < Grape::Entity
|
|||
|
||||
def can_add_ci_config_path?(merge_request)
|
||||
merge_request.source_project&.uses_default_ci_config? &&
|
||||
merge_request.all_pipelines.none? &&
|
||||
!merge_request.source_project.has_ci? &&
|
||||
merge_request.commits_count.positive? &&
|
||||
can?(current_user, :read_build, merge_request.source_project) &&
|
||||
can?(current_user, :create_pipeline, merge_request.source_project)
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Members
|
||||
class UpdateHighestRoleService < ::BaseService
|
||||
include ExclusiveLeaseGuard
|
||||
|
||||
LEASE_TIMEOUT = 10.minutes.to_i
|
||||
DELAY = 10.minutes
|
||||
|
||||
attr_reader :user_id
|
||||
|
||||
def initialize(user_id)
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def execute
|
||||
try_obtain_lease do
|
||||
UpdateHighestRoleWorker.perform_in(DELAY, user_id)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def lease_key
|
||||
"update_highest_role:#{user_id}"
|
||||
end
|
||||
|
||||
def lease_timeout
|
||||
LEASE_TIMEOUT
|
||||
end
|
||||
|
||||
# Do not release the lease before the timeout to
|
||||
# prevent multiple jobs being executed during the
|
||||
# defined timeout
|
||||
def lease_release?
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -46,15 +46,15 @@ module Projects
|
|||
end
|
||||
|
||||
def bad_request
|
||||
ServiceResponse.error(message: 'Bad Request', http_status: 400)
|
||||
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
|
||||
end
|
||||
|
||||
def unauthorized
|
||||
ServiceResponse.error(message: 'Unauthorized', http_status: 401)
|
||||
ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized)
|
||||
end
|
||||
|
||||
def forbidden
|
||||
ServiceResponse.error(message: 'Forbidden', http_status: 403)
|
||||
ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -58,12 +58,7 @@ module Projects
|
|||
end
|
||||
|
||||
def tree_saver_class
|
||||
if ::Feature.enabled?(:streaming_serializer, project, default_enabled: true)
|
||||
Gitlab::ImportExport::Project::TreeSaver
|
||||
else
|
||||
# Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
|
||||
Gitlab::ImportExport::Project::LegacyTreeSaver
|
||||
end
|
||||
Gitlab::ImportExport::Project::TreeSaver
|
||||
end
|
||||
|
||||
def uploads_saver
|
||||
|
|
|
|||
|
|
@ -8,15 +8,15 @@ module Projects
|
|||
include IncidentManagement::Settings
|
||||
|
||||
def execute(token)
|
||||
return false unless valid_payload_size?
|
||||
return false unless valid_version?
|
||||
return false unless valid_alert_manager_token?(token)
|
||||
return bad_request unless valid_payload_size?
|
||||
return unprocessable_entity unless valid_version?
|
||||
return unauthorized unless valid_alert_manager_token?(token)
|
||||
|
||||
persist_events
|
||||
send_alert_email if send_email?
|
||||
process_incident_issues if process_issues?
|
||||
|
||||
true
|
||||
ServiceResponse.success
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -118,6 +118,18 @@ module Projects
|
|||
def persist_events
|
||||
CreateEventsService.new(project, nil, params).execute
|
||||
end
|
||||
|
||||
def bad_request
|
||||
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
|
||||
end
|
||||
|
||||
def unauthorized
|
||||
ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized)
|
||||
end
|
||||
|
||||
def unprocessable_entity
|
||||
ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
.form-group
|
||||
.collapse.js-well-changing-auth
|
||||
.changing-auth-method= icon('spinner spin lg')
|
||||
.changing-auth-method
|
||||
.well-password-auth.collapse.js-well-password-auth
|
||||
= f.label :password, _("Password"), class: "label-bold"
|
||||
= f.password_field :password, value: mirror.password, class: 'form-control qa-password', autocomplete: 'new-password'
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
.form-group.js-ssh-host-keys-section{ class: ('collapse' unless mirror.ssh_mirror_url?) }
|
||||
%button.btn.btn-inverted.btn-secondary.inline.js-detect-host-keys.append-right-10{ type: 'button', data: { qa_selector: 'detect_host_keys' } }
|
||||
= icon('spinner spin', class: 'js-spinner d-none')
|
||||
.js-spinner.d-none.spinner.mr-1
|
||||
= _('Detect host keys')
|
||||
.fingerprint-ssh-info.js-fingerprint-ssh-info.prepend-top-10.append-bottom-10{ class: ('collapse' unless mirror.ssh_mirror_url?) }
|
||||
%label.label-bold
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
.row
|
||||
.form-group.col-md-6
|
||||
= f.label :name, _('Name'), class: 'label-bold'
|
||||
= f.text_field :name, class: "form-control qa-personal-access-token-name-field", required: true
|
||||
= f.text_field :name, class: "form-control", required: true, data: { qa_selector: 'personal_access_token_name_field' }
|
||||
|
||||
.row
|
||||
.form-group.col-md-6
|
||||
|
|
@ -21,11 +21,11 @@
|
|||
|
||||
= render_if_exists 'personal_access_tokens/callout_max_personal_access_token_lifetime'
|
||||
|
||||
= f.text_field :expires_at, class: "datepicker form-control", placeholder: 'YYYY-MM-DD'
|
||||
= f.text_field :expires_at, class: "datepicker form-control", placeholder: 'YYYY-MM-DD', data: { qa_selector: 'expiry_date_field' }
|
||||
|
||||
.form-group
|
||||
= f.label :scopes, _('Scopes'), class: 'label-bold'
|
||||
= render 'shared/tokens/scopes_form', prefix: 'personal_access_token', token: token, scopes: scopes
|
||||
|
||||
.prepend-top-default
|
||||
= f.submit _('Create %{type} token') % { type: type }, class: "btn btn-success qa-create-token-button"
|
||||
= f.submit _('Create %{type} token') % { type: type }, class: "btn btn-success", data: { qa_selector: 'create_token_button' }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add added_lines and removed_lines columns to merge_request_metrics table
|
||||
merge_request: 28658
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Use faster streaming serializer for project exports
|
||||
merge_request: 28925
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Migrate .fa-spinner to .spinner for app/views/projects/mirrors
|
||||
merge_request: 25041
|
||||
author: nuwe1
|
||||
type: other
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Use concern instead of service to update highest role
|
||||
merge_request: 28791
|
||||
author:
|
||||
type: other
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddLineMetricsToMrMetrics < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_column :merge_request_metrics, :added_lines, :integer
|
||||
add_column :merge_request_metrics, :removed_lines, :integer
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_column :merge_request_metrics, :added_lines, :integer
|
||||
remove_column :merge_request_metrics, :removed_lines, :integer
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3749,7 +3749,9 @@ CREATE TABLE public.merge_request_metrics (
|
|||
modified_paths_size integer,
|
||||
commits_count integer,
|
||||
first_approved_at timestamp with time zone,
|
||||
first_reassigned_at timestamp with time zone
|
||||
first_reassigned_at timestamp with time zone,
|
||||
added_lines integer,
|
||||
removed_lines integer
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.merge_request_metrics_id_seq
|
||||
|
|
@ -12936,6 +12938,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200330123739
|
||||
20200330132913
|
||||
20200331220930
|
||||
20200402123926
|
||||
20200402135250
|
||||
20200403184110
|
||||
20200403185127
|
||||
|
|
|
|||
|
|
@ -1,32 +1,5 @@
|
|||
---
|
||||
type: reference
|
||||
redirect_to: '../object_storage.md'
|
||||
---
|
||||
|
||||
# Cloud Object Storage
|
||||
|
||||
GitLab supports utilizing a Cloud Object Storage service rather than [NFS](nfs.md) for holding
|
||||
numerous types of data. This is recommended in larger setups as object storage is
|
||||
typically much more performant, reliable, and scalable.
|
||||
|
||||
For configuring GitLab to use Object Storage refer to the following guides:
|
||||
|
||||
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
|
||||
1. Configure [database lookup of SSH keys](../operations/fast_ssh_key_lookup.md)
|
||||
to eliminate the need for a shared `authorized_keys` file.
|
||||
1. Configure [object storage for backups](../../raketasks/backup_restore.md#uploading-backups-to-a-remote-cloud-storage).
|
||||
1. Configure [object storage for job artifacts](../job_artifacts.md#using-object-storage)
|
||||
including [incremental logging](../job_logs.md#new-incremental-logging-architecture).
|
||||
1. Configure [object storage for LFS objects](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
|
||||
1. Configure [object storage for uploads](../uploads.md#using-object-storage-core-only).
|
||||
1. Configure [object storage for merge request diffs](../merge_request_diffs.md#using-object-storage).
|
||||
1. Configure [object storage for container registry](../packages/container_registry.md#container-registry-storage-driver) (optional feature).
|
||||
1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
|
||||
1. Configure [object storage for packages](../packages/index.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
|
||||
1. Configure [object storage for dependency proxy](../packages/dependency_proxy.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
|
||||
1. Configure [object storage for Pseudonymizer](../pseudonymizer.md#configuration) (optional feature). **(ULTIMATE ONLY)**
|
||||
|
||||
NOTE: **Note:**
|
||||
One current feature of GitLab that still requires a shared directory (NFS) is
|
||||
[GitLab Pages](../../user/project/pages/index.md).
|
||||
There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
|
||||
to eliminate the need for NFS to support GitLab Pages.
|
||||
This document was moved to [another location](../object_storage.md).
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ This configuration relies on valid AWS credentials to be configured already.
|
|||
Use an object storage option like AWS S3 to store job artifacts.
|
||||
|
||||
DANGER: **Danger:**
|
||||
If you're enabling S3 in [GitLab HA](high_availability/README.md), you will need to have an [NFS mount set up for CI logs and artifacts](high_availability/nfs.md#a-single-nfs-mount) or enable [incremental logging](job_logs.md#new-incremental-logging-architecture). If these settings are not set, you will risk job logs disappearing or not being saved.
|
||||
If you configure GitLab to store CI logs and artifacts on object storage, you must also enable [incremental logging](job_logs.md#new-incremental-logging-architecture). Otherwise, job logs will disappear or not be saved.
|
||||
|
||||
#### Object Storage Settings
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,140 @@
|
|||
---
|
||||
type: reference
|
||||
---
|
||||
|
||||
# Object Storage
|
||||
|
||||
GitLab supports using an object storage service for holding numerous types of data.
|
||||
In a high availability setup, it's recommended over [NFS](high_availability/nfs.md) and
|
||||
in general it's better in larger setups as object storage is
|
||||
typically much more performant, reliable, and scalable.
|
||||
|
||||
## Options
|
||||
|
||||
Object storage options that GitLab has tested, or is aware of customers using include:
|
||||
|
||||
- SaaS/Cloud solutions such as [Amazon S3](https://aws.amazon.com/s3/), [Google cloud storage](https://cloud.google.com/storage).
|
||||
- On-premises hardware and appliances from various storage vendors.
|
||||
- MinIO. We have [a guide to deploying this](https://docs.gitlab.com/charts/advanced/external-object-storage/minio.html) within our Helm Chart documentation.
|
||||
|
||||
## Configuration guides
|
||||
|
||||
For configuring GitLab to use Object Storage refer to the following guides:
|
||||
|
||||
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
|
||||
1. Configure [database lookup of SSH keys](operations/fast_ssh_key_lookup.md)
|
||||
to eliminate the need for a shared `authorized_keys` file.
|
||||
1. Configure [object storage for backups](../raketasks/backup_restore.md#uploading-backups-to-a-remote-cloud-storage).
|
||||
1. Configure [object storage for job artifacts](job_artifacts.md#using-object-storage)
|
||||
including [incremental logging](job_logs.md#new-incremental-logging-architecture).
|
||||
1. Configure [object storage for LFS objects](lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
|
||||
1. Configure [object storage for uploads](uploads.md#using-object-storage-core-only).
|
||||
1. Configure [object storage for merge request diffs](merge_request_diffs.md#using-object-storage).
|
||||
1. Configure [object storage for Container Registry](packages/container_registry.md#container-registry-storage-driver) (optional feature).
|
||||
1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
|
||||
1. Configure [object storage for packages](packages/index.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
|
||||
1. Configure [object storage for Dependency Proxy](packages/dependency_proxy.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
|
||||
1. Configure [object storage for Pseudonymizer](pseudonymizer.md#configuration) (optional feature). **(ULTIMATE ONLY)**
|
||||
1. Configure [object storage for autoscale Runner caching](https://docs.gitlab.com/runner/configuration/autoscale.html#distributed-runners-caching) (optional - for improved performance).
|
||||
|
||||
## Warnings, limitations, and known issues
|
||||
|
||||
### Use separate buckets
|
||||
|
||||
Using separate buckets for each data type is the recommended approach for GitLab.
|
||||
|
||||
A limitation of our configuration is that each use of object storage is separately configured.
|
||||
[We have an issue for improving this](https://gitlab.com/gitlab-org/gitlab/-/issues/23345)
|
||||
and easily using one bucket with separate folders is one improvement that this might bring.
|
||||
|
||||
There is at least one specific issue with using the same bucket:
|
||||
when GitLab is deployed with the Helm chart restore from backup
|
||||
[will not properly function](https://docs.gitlab.com/charts/advanced/external-object-storage/#lfs-artifacts-uploads-packages-external-diffs-pseudonymizer)
|
||||
unless separate buckets are used.
|
||||
|
||||
One risk of using a single bucket would be that if your organisation decided to
|
||||
migrate GitLab to the Helm deployment in the future. GitLab would run, but the situation with
|
||||
backups might not be realised until the organisation had a critical requirement for the backups to work.
|
||||
|
||||
### S3 API compatability issues
|
||||
|
||||
Not all S3 providers [are fully compatible](../raketasks/backup_restore.md#other-s3-providers)
|
||||
with the Fog library that GitLab uses. Symptoms include:
|
||||
|
||||
```plaintext
|
||||
411 Length Required
|
||||
```
|
||||
|
||||
### GitLab Pages requires NFS
|
||||
|
||||
If you're working to [scale out](high_availability/README.md) your GitLab implementation and
|
||||
one of your requirements is [GitLab Pages](../user/project/pages/index.md) this currently requires
|
||||
NFS. There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
|
||||
to remove this dependency. In the future, GitLab Pages may use
|
||||
[object storage](https://gitlab.com/gitlab-org/gitlab/-/issues/208135).
|
||||
|
||||
The dependency on disk storage also prevents Pages being deployed using the
|
||||
[GitLab Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/37).
|
||||
|
||||
### Incremental logging is required for CI to use object storage
|
||||
|
||||
If you configure GitLab to use object storage for CI logs and artifacts,
|
||||
[you must also enable incremental logging](job_artifacts.md#using-object-storage).
|
||||
|
||||
### Proxy Download
|
||||
|
||||
A number of the use cases for object storage allow client traffic to be redirected to the
|
||||
object storage back end, like when Git clients request large files via LFS or when
|
||||
downloading CI artifacts and logs.
|
||||
|
||||
When the files are stored on local block storage or NFS, GitLab has to act as a proxy.
|
||||
This is not the default behaviour with object storage.
|
||||
|
||||
The `proxy_download` setting controls this behaviour: the default is generally `false`.
|
||||
Verify this in the documentation for each use case. Set it to `true` so that GitLab proxies
|
||||
the files.
|
||||
|
||||
When not proxying files, GitLab returns an
|
||||
[HTTP 302 redirect with a pre-signed, time-limited object storage URL](https://gitlab.com/gitlab-org/gitlab/-/issues/32117#note_218532298).
|
||||
This can result in some of the following problems:
|
||||
|
||||
- If GitLab is using non-secure HTTP to access the object storage, clients may generate
|
||||
`https->http` downgrade errors and refuse to process the redirect. The solution to this
|
||||
is for GitLab to use HTTPS. LFS, for example, will generate this error:
|
||||
|
||||
```plaintext
|
||||
LFS: lfsapi/client: refusing insecure redirect, https->http
|
||||
```
|
||||
|
||||
- Clients will need to trust the certificate authority that issued the object storage
|
||||
certificate, or may return common TLS errors such as:
|
||||
|
||||
```plaintext
|
||||
x509: certificate signed by unknown authority
|
||||
```
|
||||
|
||||
- Clients will need network access to the object storage. Errors that might result
|
||||
if this access is not in place include:
|
||||
|
||||
```plaintext
|
||||
Received status code 403 from server: Forbidden
|
||||
```
|
||||
|
||||
Getting a `403 Forbidden` response is specifically called out on the
|
||||
[package repository documentation](packages/index.md#using-object-storage)
|
||||
as a side effect of how some build tools work.
|
||||
|
||||
### ETag mismatch
|
||||
|
||||
Using the default GitLab settings, some object storage back-ends such as
|
||||
[MinIO](https://gitlab.com/gitlab-org/gitlab/-/issues/23188)
|
||||
and [Alibaba](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/1564)
|
||||
might generate `ETag mismatch` errors.
|
||||
|
||||
When using GitLab direct upload, the
|
||||
[workaround for MinIO](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/1564#note_244497658)
|
||||
is to use the `--compat` parameter on the server.
|
||||
|
||||
We are working on a fix to GitLab component Workhorse, and also
|
||||
a workaround, in the mean time, to
|
||||
[allow ETag verification to be disabled](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18175).
|
||||
|
|
@ -915,7 +915,7 @@ nicely on different mobile devices.
|
|||
- When providing a command without output, don't prefix the shell command with `$`.
|
||||
- If you need to include triple backticks inside a code block, use four backticks
|
||||
for the codeblock fences instead of three.
|
||||
- For regular code blocks, always use a highlighting class corresponding to the
|
||||
- For regular fenced code blocks, always use a highlighting class corresponding to the
|
||||
language for better readability. Examples:
|
||||
|
||||
````markdown
|
||||
|
|
@ -936,7 +936,7 @@ nicely on different mobile devices.
|
|||
```
|
||||
````
|
||||
|
||||
Syntax highlighting is required for code blocks added to the GitLab documentation.
|
||||
Syntax highlighting is required for fenced code blocks added to the GitLab documentation.
|
||||
Refer to the table below for the most common language classes, or check the
|
||||
[complete list](https://github.com/rouge-ruby/rouge/wiki/List-of-supported-languages-and-lexers)
|
||||
of language classes available.
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ The following items will be exported:
|
|||
- Project and wiki repositories
|
||||
- Project uploads
|
||||
- Project configuration, including services
|
||||
- Issues with comments, merge requests with diffs and comments, labels, milestones, snippets,
|
||||
- Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, time tracking,
|
||||
and other project entities
|
||||
- Design Management files and data
|
||||
- LFS objects
|
||||
|
|
|
|||
|
|
@ -1,68 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module ImportExport
|
||||
module Project
|
||||
class LegacyTreeSaver
|
||||
attr_reader :full_path
|
||||
|
||||
def initialize(project:, current_user:, shared:, params: {})
|
||||
@params = params
|
||||
@project = project
|
||||
@current_user = current_user
|
||||
@shared = shared
|
||||
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
|
||||
end
|
||||
|
||||
def save
|
||||
project_tree = tree_saver.serialize(@project, reader.project_tree)
|
||||
fix_project_tree(project_tree)
|
||||
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
|
||||
|
||||
true
|
||||
rescue => e
|
||||
@shared.error(e)
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Aware that the resulting hash needs to be pure-hash and
|
||||
# does not include any AR objects anymore, only objects that run `.to_json`
|
||||
def fix_project_tree(project_tree)
|
||||
if @params[:description].present?
|
||||
project_tree['description'] = @params[:description]
|
||||
end
|
||||
|
||||
project_tree['project_members'] += group_members_array
|
||||
end
|
||||
|
||||
def reader
|
||||
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
|
||||
end
|
||||
|
||||
def group_members_array
|
||||
group_members.as_json(reader.group_members_tree).each do |group_member|
|
||||
group_member['source_type'] = 'Project' # Make group members project members of the future import
|
||||
end
|
||||
end
|
||||
|
||||
def group_members
|
||||
return [] unless @current_user.can?(:admin_group, @project.group)
|
||||
|
||||
# We need `.where.not(user_id: nil)` here otherwise when a group has an
|
||||
# invitee, it would make the following query return 0 rows since a NULL
|
||||
# user_id would be present in the subquery
|
||||
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
|
||||
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
|
||||
|
||||
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
|
||||
end
|
||||
|
||||
def tree_saver
|
||||
@tree_saver ||= Gitlab::ImportExport::LegacyRelationTreeSaver.new
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module RepositoryUrlBuilder
|
||||
class << self
|
||||
def build(path, protocol: :ssh)
|
||||
# TODO: See https://gitlab.com/gitlab-org/gitlab/-/issues/213021
|
||||
path = path.sub('@snippets', 'snippets')
|
||||
|
||||
case protocol
|
||||
when :ssh
|
||||
ssh_url(path)
|
||||
when :http
|
||||
http_url(path)
|
||||
else
|
||||
raise NotImplementedError.new("No URL builder defined for protocol #{protocol}")
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def ssh_url(path)
|
||||
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
|
||||
end
|
||||
|
||||
def http_url(path)
|
||||
root = Gitlab::CurrentSettings.custom_http_clone_url_root.presence || Gitlab::Routing.url_helpers.root_url
|
||||
|
||||
Gitlab::Utils.append_path(root, "#{path}.git")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -47,14 +47,6 @@ module Gitlab
|
|||
@version ||= File.read(gitlab_shell_version_file).chomp if File.readable?(gitlab_shell_version_file)
|
||||
end
|
||||
|
||||
# Return a SSH url for a given project path
|
||||
#
|
||||
# @param [String] full_path project path (URL)
|
||||
# @return [String] SSH URL
|
||||
def url_to_repo(full_path)
|
||||
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{full_path}.git"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def gitlab_shell_path
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'date'
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Profile
|
||||
class PersonalAccessTokens < Page::Base
|
||||
view 'app/views/shared/_personal_access_tokens_form.html.haml' do
|
||||
element :expiry_date_field
|
||||
element :personal_access_token_name_field
|
||||
element :create_token_button
|
||||
end
|
||||
|
|
@ -36,6 +39,13 @@ module QA
|
|||
find_element(:created_personal_access_token, wait: 30).value
|
||||
end
|
||||
|
||||
def fill_expiry_date(date)
|
||||
date = date.to_s if date.is_a?(Date)
|
||||
Date.strptime(date, '%Y-%m-%d') rescue ArgumentError raise "Expiry date must be in YYYY-MM-DD format"
|
||||
|
||||
fill_element(:expiry_date_field, date)
|
||||
end
|
||||
|
||||
def has_token_row_for_name?(token_name)
|
||||
page.has_css?('tr', text: token_name, wait: 1.0)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'date'
|
||||
|
||||
module QA
|
||||
module Resource
|
||||
##
|
||||
|
|
@ -19,6 +21,8 @@ module QA
|
|||
Page::Profile::PersonalAccessTokens.perform do |token_page|
|
||||
token_page.fill_token_name(name || 'api-test-token')
|
||||
token_page.check_api
|
||||
# Expire in 2 days just in case the token is created just before midnight
|
||||
token_page.fill_expiry_date(Date.today + 2)
|
||||
token_page.click_create_token_button
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ describe Projects::Alerting::NotificationsController do
|
|||
end
|
||||
|
||||
context 'when notification service fails' do
|
||||
let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: 401) }
|
||||
let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized) }
|
||||
|
||||
it 'responds with the service response' do
|
||||
make_request
|
||||
|
|
|
|||
|
|
@ -158,7 +158,8 @@ describe Projects::Prometheus::AlertsController do
|
|||
end
|
||||
|
||||
describe 'POST #notify' do
|
||||
let(:notify_service) { spy }
|
||||
let(:service_response) { ServiceResponse.success }
|
||||
let(:notify_service) { instance_double(Projects::Prometheus::Alerts::NotifyService, execute: service_response) }
|
||||
|
||||
before do
|
||||
sign_out(user)
|
||||
|
|
@ -170,7 +171,7 @@ describe Projects::Prometheus::AlertsController do
|
|||
end
|
||||
|
||||
it 'returns ok if notification succeeds' do
|
||||
expect(notify_service).to receive(:execute).and_return(true)
|
||||
expect(notify_service).to receive(:execute).and_return(ServiceResponse.success)
|
||||
|
||||
post :notify, params: project_params, session: { as: :json }
|
||||
|
||||
|
|
@ -178,7 +179,9 @@ describe Projects::Prometheus::AlertsController do
|
|||
end
|
||||
|
||||
it 'returns unprocessable entity if notification fails' do
|
||||
expect(notify_service).to receive(:execute).and_return(false)
|
||||
expect(notify_service).to receive(:execute).and_return(
|
||||
ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
|
||||
)
|
||||
|
||||
post :notify, params: project_params, session: { as: :json }
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,11 @@ describe 'Import/Export - project export integration test', :js do
|
|||
sign_in(user)
|
||||
end
|
||||
|
||||
shared_examples 'export file without sensitive words' do
|
||||
context "with streaming serializer" do
|
||||
before do
|
||||
stub_feature_flags(project_export_as_ndjson: false)
|
||||
end
|
||||
|
||||
it 'exports a project successfully', :sidekiq_inline do
|
||||
export_project_and_download_file(page, project)
|
||||
|
||||
|
|
@ -59,27 +63,8 @@ describe 'Import/Export - project export integration test', :js do
|
|||
end
|
||||
end
|
||||
|
||||
context "with legacy export" do
|
||||
before do
|
||||
stub_feature_flags(streaming_serializer: false)
|
||||
stub_feature_flags(project_export_as_ndjson: false)
|
||||
end
|
||||
|
||||
it_behaves_like "export file without sensitive words"
|
||||
end
|
||||
|
||||
context "with streaming serializer" do
|
||||
before do
|
||||
stub_feature_flags(streaming_serializer: true)
|
||||
stub_feature_flags(project_export_as_ndjson: false)
|
||||
end
|
||||
|
||||
it_behaves_like "export file without sensitive words"
|
||||
end
|
||||
|
||||
context "with ndjson" do
|
||||
before do
|
||||
stub_feature_flags(streaming_serializer: true)
|
||||
stub_feature_flags(project_export_as_ndjson: true)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,352 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::ImportExport::Project::LegacyTreeSaver do
|
||||
describe 'saves the project tree into a json object' do
|
||||
let(:shared) { project.import_export_shared }
|
||||
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
|
||||
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
|
||||
let(:user) { create(:user) }
|
||||
let!(:project) { setup_project }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
|
||||
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
|
||||
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(export_path)
|
||||
end
|
||||
|
||||
it 'saves project successfully' do
|
||||
expect(project_tree_saver.save).to be true
|
||||
end
|
||||
|
||||
# It is mostly duplicated in
|
||||
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
|
||||
# except:
|
||||
# context 'with description override' do
|
||||
# context 'group members' do
|
||||
# ^ These are specific for the Project::TreeSaver
|
||||
context 'JSON' do
|
||||
let(:saved_project_json) do
|
||||
project_tree_saver.save
|
||||
project_json(project_tree_saver.full_path)
|
||||
end
|
||||
|
||||
# It is not duplicated in
|
||||
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
|
||||
context 'with description override' do
|
||||
let(:params) { { description: 'Foo Bar' } }
|
||||
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
|
||||
|
||||
it 'overrides the project description' do
|
||||
expect(saved_project_json).to include({ 'description' => params[:description] })
|
||||
end
|
||||
end
|
||||
|
||||
it 'saves the correct json' do
|
||||
expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
|
||||
end
|
||||
|
||||
it 'has approvals_before_merge set' do
|
||||
expect(saved_project_json['approvals_before_merge']).to eq(1)
|
||||
end
|
||||
|
||||
it 'has milestones' do
|
||||
expect(saved_project_json['milestones']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge requests' do
|
||||
expect(saved_project_json['merge_requests']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge request\'s milestones' do
|
||||
expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge request\'s source branch SHA' do
|
||||
expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
|
||||
end
|
||||
|
||||
it 'has merge request\'s target branch SHA' do
|
||||
expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
|
||||
end
|
||||
|
||||
it 'has events' do
|
||||
expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has snippets' do
|
||||
expect(saved_project_json['snippets']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has snippet notes' do
|
||||
expect(saved_project_json['snippets'].first['notes']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has releases' do
|
||||
expect(saved_project_json['releases']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has no author on releases' do
|
||||
expect(saved_project_json['releases'].first['author']).to be_nil
|
||||
end
|
||||
|
||||
it 'has the author ID on releases' do
|
||||
expect(saved_project_json['releases'].first['author_id']).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has issues' do
|
||||
expect(saved_project_json['issues']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has issue comments' do
|
||||
notes = saved_project_json['issues'].first['notes']
|
||||
|
||||
expect(notes).not_to be_empty
|
||||
expect(notes.first['type']).to eq('DiscussionNote')
|
||||
end
|
||||
|
||||
it 'has issue assignees' do
|
||||
expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has author on issue comments' do
|
||||
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has project members' do
|
||||
expect(saved_project_json['project_members']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge requests diffs' do
|
||||
expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge request diff files' do
|
||||
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge request diff commits' do
|
||||
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge requests comments' do
|
||||
expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has author on merge requests comments' do
|
||||
expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has pipeline stages' do
|
||||
expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has pipeline statuses' do
|
||||
expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has pipeline builds' do
|
||||
builds_count = saved_project_json
|
||||
.dig('ci_pipelines', 0, 'stages', 0, 'statuses')
|
||||
.count { |hash| hash['type'] == 'Ci::Build' }
|
||||
|
||||
expect(builds_count).to eq(1)
|
||||
end
|
||||
|
||||
it 'has no when YML attributes but only the DB column' do
|
||||
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
|
||||
|
||||
saved_project_json
|
||||
end
|
||||
|
||||
it 'has pipeline commits' do
|
||||
expect(saved_project_json['ci_pipelines']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has ci pipeline notes' do
|
||||
expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has labels with no associations' do
|
||||
expect(saved_project_json['labels']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has labels associated to records' do
|
||||
expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has project and group labels' do
|
||||
label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
|
||||
|
||||
expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
|
||||
end
|
||||
|
||||
it 'has priorities associated to labels' do
|
||||
priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
|
||||
|
||||
expect(priorities).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has issue resource label events' do
|
||||
expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has merge request resource label events' do
|
||||
expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'saves the correct service type' do
|
||||
expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
|
||||
end
|
||||
|
||||
it 'saves the properties for a service' do
|
||||
expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
|
||||
end
|
||||
|
||||
it 'has project feature' do
|
||||
project_feature = saved_project_json['project_feature']
|
||||
expect(project_feature).not_to be_empty
|
||||
expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
|
||||
expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
|
||||
expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
|
||||
end
|
||||
|
||||
it 'has custom attributes' do
|
||||
expect(saved_project_json['custom_attributes'].count).to eq(2)
|
||||
end
|
||||
|
||||
it 'has badges' do
|
||||
expect(saved_project_json['project_badges'].count).to eq(2)
|
||||
end
|
||||
|
||||
it 'does not complain about non UTF-8 characters in MR diff files' do
|
||||
ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
|
||||
|
||||
expect(project_tree_saver.save).to be true
|
||||
end
|
||||
|
||||
context 'group members' do
|
||||
let(:user2) { create(:user, email: 'group@member.com') }
|
||||
let(:member_emails) do
|
||||
saved_project_json['project_members'].map do |pm|
|
||||
pm['user']['email']
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
Group.first.add_developer(user2)
|
||||
end
|
||||
|
||||
it 'does not export group members if it has no permission' do
|
||||
Group.first.add_developer(user)
|
||||
|
||||
expect(member_emails).not_to include('group@member.com')
|
||||
end
|
||||
|
||||
it 'does not export group members as maintainer' do
|
||||
Group.first.add_maintainer(user)
|
||||
|
||||
expect(member_emails).not_to include('group@member.com')
|
||||
end
|
||||
|
||||
it 'exports group members as group owner' do
|
||||
Group.first.add_owner(user)
|
||||
|
||||
expect(member_emails).to include('group@member.com')
|
||||
end
|
||||
|
||||
context 'as admin' do
|
||||
let(:user) { create(:admin) }
|
||||
|
||||
it 'exports group members as admin' do
|
||||
expect(member_emails).to include('group@member.com')
|
||||
end
|
||||
|
||||
it 'exports group members as project members' do
|
||||
member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
|
||||
|
||||
expect(member_types).to all(eq('Project'))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'project attributes' do
|
||||
it 'does not contain the runners token' do
|
||||
expect(saved_project_json).not_to include("runners_token" => 'token')
|
||||
end
|
||||
end
|
||||
|
||||
it 'has a board and a list' do
|
||||
expect(saved_project_json['boards'].first['lists']).not_to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def setup_project
|
||||
release = create(:release)
|
||||
group = create(:group)
|
||||
|
||||
project = create(:project,
|
||||
:public,
|
||||
:repository,
|
||||
:issues_disabled,
|
||||
:wiki_enabled,
|
||||
:builds_private,
|
||||
description: 'description',
|
||||
releases: [release],
|
||||
group: group,
|
||||
approvals_before_merge: 1
|
||||
)
|
||||
allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
|
||||
|
||||
issue = create(:issue, assignees: [user], project: project)
|
||||
snippet = create(:project_snippet, project: project)
|
||||
project_label = create(:label, project: project)
|
||||
group_label = create(:group_label, group: group)
|
||||
create(:label_link, label: project_label, target: issue)
|
||||
create(:label_link, label: group_label, target: issue)
|
||||
create(:label_priority, label: group_label, priority: 1)
|
||||
milestone = create(:milestone, project: project)
|
||||
merge_request = create(:merge_request, source_project: project, milestone: milestone)
|
||||
|
||||
ci_build = create(:ci_build, project: project, when: nil)
|
||||
ci_build.pipeline.update(project: project)
|
||||
create(:commit_status, project: project, pipeline: ci_build.pipeline)
|
||||
|
||||
create(:milestone, project: project)
|
||||
create(:discussion_note, noteable: issue, project: project)
|
||||
create(:note, noteable: merge_request, project: project)
|
||||
create(:note, noteable: snippet, project: project)
|
||||
create(:note_on_commit,
|
||||
author: user,
|
||||
project: project,
|
||||
commit_id: ci_build.pipeline.sha)
|
||||
|
||||
create(:resource_label_event, label: project_label, issue: issue)
|
||||
create(:resource_label_event, label: group_label, merge_request: merge_request)
|
||||
|
||||
create(:event, :created, target: milestone, project: project, author: user)
|
||||
create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
|
||||
|
||||
create(:project_custom_attribute, project: project)
|
||||
create(:project_custom_attribute, project: project)
|
||||
|
||||
create(:project_badge, project: project)
|
||||
create(:project_badge, project: project)
|
||||
|
||||
board = create(:board, project: project, name: 'TestBoard')
|
||||
create(:list, board: board, position: 0, label: project_label)
|
||||
|
||||
project
|
||||
end
|
||||
|
||||
def project_json(filename)
|
||||
::JSON.parse(IO.read(filename))
|
||||
end
|
||||
end
|
||||
|
|
@ -283,6 +283,8 @@ MergeRequest::Metrics:
|
|||
- commits_count
|
||||
- first_approved_at
|
||||
- first_reassigned_at
|
||||
- added_lines
|
||||
- removed_lines
|
||||
Ci::Pipeline:
|
||||
- id
|
||||
- project_id
|
||||
|
|
|
|||
|
|
@ -0,0 +1,56 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::RepositoryUrlBuilder do
|
||||
describe '.build' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:factory, :path_generator) do
|
||||
:project | ->(project) { project.full_path }
|
||||
:project_snippet | ->(snippet) { "#{snippet.project.full_path}/snippets/#{snippet.id}" }
|
||||
:project_wiki | ->(wiki) { "#{wiki.project.full_path}.wiki" }
|
||||
|
||||
:personal_snippet | ->(snippet) { "snippets/#{snippet.id}" }
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:container) { build_stubbed(factory) }
|
||||
let(:repository) { container.repository }
|
||||
let(:path) { path_generator.call(container) }
|
||||
let(:url) { subject.build(repository.full_path, protocol: protocol) }
|
||||
|
||||
context 'when passing SSH protocol' do
|
||||
let(:protocol) { :ssh }
|
||||
|
||||
it 'returns the SSH URL to the repository' do
|
||||
expect(url).to eq("#{Gitlab.config.gitlab_shell.ssh_path_prefix}#{path}.git")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing HTTP protocol' do
|
||||
let(:protocol) { :http }
|
||||
|
||||
it 'returns the HTTP URL to the repo without a username' do
|
||||
expect(url).to eq("#{Gitlab.config.gitlab.url}/#{path}.git")
|
||||
expect(url).not_to include('@')
|
||||
end
|
||||
|
||||
it 'includes the custom HTTP clone root if set' do
|
||||
clone_root = 'https://git.example.com:51234/mygitlab'
|
||||
stub_application_setting(custom_http_clone_url_root: clone_root)
|
||||
|
||||
expect(url).to eq("#{clone_root}/#{path}.git")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing an unsupported protocol' do
|
||||
let(:protocol) { :ftp }
|
||||
|
||||
it 'raises an exception' do
|
||||
expect { url }.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -10,14 +10,6 @@ describe Gitlab::Shell do
|
|||
|
||||
it { is_expected.to respond_to :remove_repository }
|
||||
|
||||
describe '.url_to_repo' do
|
||||
let(:full_path) { 'diaspora/disaspora-rails' }
|
||||
|
||||
subject { described_class.url_to_repo(full_path) }
|
||||
|
||||
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + full_path + '.git') }
|
||||
end
|
||||
|
||||
describe 'memoized secret_token' do
|
||||
let(:secret_file) { 'tmp/tests/.secret_shell_test' }
|
||||
let(:link_file) { 'tmp/tests/shell-secret-test/.gitlab_shell_secret' }
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Member do
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
describe "Associations" do
|
||||
|
|
@ -593,6 +595,9 @@ describe Member do
|
|||
end
|
||||
|
||||
context 'when after_commit :update_highest_role' do
|
||||
let!(:user) { create(:user) }
|
||||
let(:user_id) { user.id }
|
||||
|
||||
where(:member_type, :source_type) do
|
||||
:project_member | :project
|
||||
:group_member | :group
|
||||
|
|
@ -600,43 +605,34 @@ describe Member do
|
|||
|
||||
with_them do
|
||||
describe 'create member' do
|
||||
it 'initializes a new Members::UpdateHighestRoleService object' do
|
||||
source = create(source_type) # source owner initializes a new service object too
|
||||
user = create(:user)
|
||||
let!(:source) { create(source_type) }
|
||||
|
||||
expect(Members::UpdateHighestRoleService).to receive(:new).with(user.id).and_call_original
|
||||
subject { create(member_type, :guest, user: user, source_type => source) }
|
||||
|
||||
create(member_type, :guest, user: user, source_type => source)
|
||||
end
|
||||
include_examples 'update highest role with exclusive lease'
|
||||
end
|
||||
|
||||
context 'when member exists' do
|
||||
let!(:member) { create(member_type) }
|
||||
let!(:member) { create(member_type, user: user) }
|
||||
|
||||
describe 'update member' do
|
||||
context 'when access level was changed' do
|
||||
it 'initializes a new Members::UpdateHighestRoleService object' do
|
||||
expect(Members::UpdateHighestRoleService).to receive(:new).with(member.user_id).and_call_original
|
||||
subject { member.update(access_level: Gitlab::Access::GUEST) }
|
||||
|
||||
member.update(access_level: Gitlab::Access::GUEST)
|
||||
end
|
||||
include_examples 'update highest role with exclusive lease'
|
||||
end
|
||||
|
||||
context 'when access level was not changed' do
|
||||
it 'does not initialize a new Members::UpdateHighestRoleService object' do
|
||||
expect(Members::UpdateHighestRoleService).not_to receive(:new).with(member.user_id)
|
||||
subject { member.update(notification_level: NotificationSetting.levels[:disabled]) }
|
||||
|
||||
member.update(notification_level: NotificationSetting.levels[:disabled])
|
||||
end
|
||||
include_examples 'does not update the highest role'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'destroy member' do
|
||||
it 'initializes a new Members::UpdateHighestRoleService object' do
|
||||
expect(Members::UpdateHighestRoleService).to receive(:new).with(member.user_id).and_call_original
|
||||
subject { member.destroy }
|
||||
|
||||
member.destroy
|
||||
end
|
||||
include_examples 'update highest role with exclusive lease'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ describe ProjectWiki do
|
|||
|
||||
describe "#url_to_repo" do
|
||||
it "returns the correct ssh url to the repo" do
|
||||
expect(subject.url_to_repo).to eq(Gitlab::Shell.url_to_repo(subject.full_path))
|
||||
expect(subject.url_to_repo).to eq(Gitlab::RepositoryUrlBuilder.build(subject.repository.full_path, protocol: :ssh))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -45,27 +45,8 @@ describe ProjectWiki do
|
|||
end
|
||||
|
||||
describe "#http_url_to_repo" do
|
||||
let(:project) { create :project }
|
||||
|
||||
context 'when a custom HTTP clone URL root is not set' do
|
||||
it 'returns the full http url to the repo' do
|
||||
expected_url = "#{Gitlab.config.gitlab.url}/#{subject.full_path}.git"
|
||||
|
||||
expect(project_wiki.http_url_to_repo).to eq(expected_url)
|
||||
expect(project_wiki.http_url_to_repo).not_to include('@')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a custom HTTP clone URL root is set' do
|
||||
before do
|
||||
stub_application_setting(custom_http_clone_url_root: 'https://git.example.com:51234')
|
||||
end
|
||||
|
||||
it 'returns the full http url to the repo, with the root replaced with the custom one' do
|
||||
expected_url = "https://git.example.com:51234/#{subject.full_path}.git"
|
||||
|
||||
expect(project_wiki.http_url_to_repo).to eq(expected_url)
|
||||
end
|
||||
it "returns the correct http url to the repo" do
|
||||
expect(subject.http_url_to_repo).to eq(Gitlab::RepositoryUrlBuilder.build(subject.repository.full_path, protocol: :http))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -735,22 +735,6 @@ describe Snippet do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#url_to_repo' do
|
||||
subject { snippet.url_to_repo }
|
||||
|
||||
context 'with personal snippet' do
|
||||
let(:snippet) { create(:personal_snippet) }
|
||||
|
||||
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "snippets/#{snippet.id}.git") }
|
||||
end
|
||||
|
||||
context 'with project snippet' do
|
||||
let(:snippet) { create(:project_snippet) }
|
||||
|
||||
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "#{snippet.project.full_path}/snippets/#{snippet.id}.git") }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#versioned_enabled_for?' do
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ require 'spec_helper'
|
|||
describe User, :do_not_mock_admin_mode do
|
||||
include ProjectForksHelper
|
||||
include TermsHelper
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
it_behaves_like 'having unique enum values'
|
||||
|
||||
|
|
@ -4535,17 +4536,22 @@ describe User, :do_not_mock_admin_mode do
|
|||
|
||||
context 'when after_commit :update_highest_role' do
|
||||
describe 'create user' do
|
||||
it 'initializes a new Members::UpdateHighestRoleService object' do
|
||||
expect_next_instance_of(Members::UpdateHighestRoleService) do |service|
|
||||
expect(service).to receive(:execute)
|
||||
subject { create(:user) }
|
||||
|
||||
it 'schedules a job in the future', :aggregate_failures, :clean_gitlab_redis_shared_state do
|
||||
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
|
||||
allow(instance).to receive(:try_obtain).and_return('uuid')
|
||||
end
|
||||
|
||||
create(:user)
|
||||
expect(UpdateHighestRoleWorker).to receive(:perform_in).and_call_original
|
||||
|
||||
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user already exists' do
|
||||
let!(:user) { create(:user) }
|
||||
let(:user_id) { user.id }
|
||||
|
||||
describe 'update user' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
|
@ -4560,24 +4566,24 @@ describe User, :do_not_mock_admin_mode do
|
|||
|
||||
with_them do
|
||||
context 'when state was changed' do
|
||||
it 'initializes a new Members::UpdateHighestRoleService object' do
|
||||
expect_next_instance_of(Members::UpdateHighestRoleService) do |service|
|
||||
expect(service).to receive(:execute)
|
||||
end
|
||||
subject { user.update(attributes) }
|
||||
|
||||
user.update(attributes)
|
||||
end
|
||||
include_examples 'update highest role with exclusive lease'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when state was not changed' do
|
||||
it 'does not initialize a new Members::UpdateHighestRoleService object' do
|
||||
expect(Members::UpdateHighestRoleService).not_to receive(:new)
|
||||
subject { user.update(email: 'newmail@example.com') }
|
||||
|
||||
user.update(email: 'newmail@example.com')
|
||||
end
|
||||
include_examples 'does not update the highest role'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'destroy user' do
|
||||
subject { user.destroy }
|
||||
|
||||
include_examples 'does not update the highest role'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ require 'spec_helper'
|
|||
describe MergeRequestWidgetEntity do
|
||||
include ProjectForksHelper
|
||||
|
||||
let(:project) { create :project, :repository }
|
||||
let(:project) { create :project, :repository }
|
||||
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
|
||||
let(:user) { create(:user) }
|
||||
let(:user) { create(:user) }
|
||||
|
||||
let(:request) { double('request', current_user: user, project: project) }
|
||||
|
||||
|
|
@ -54,15 +54,17 @@ describe MergeRequestWidgetEntity do
|
|||
end
|
||||
|
||||
describe 'merge_request_add_ci_config_path' do
|
||||
let!(:project_auto_devops) { create(:project_auto_devops, :disabled, project: project) }
|
||||
|
||||
before do
|
||||
project.add_role(user, role)
|
||||
end
|
||||
|
||||
context 'when there are pipelines' do
|
||||
context 'when there is a standard ci config file in the source project' do
|
||||
let(:role) { :developer }
|
||||
|
||||
before do
|
||||
create(:ci_empty_pipeline, project: project, sha: resource.all_commit_shas.first, ref: resource.source_branch)
|
||||
project.repository.create_file(user, Gitlab::FileDetector::PATTERNS[:gitlab_ci], 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
|
||||
end
|
||||
|
||||
it 'no ci config path' do
|
||||
|
|
@ -70,7 +72,7 @@ describe MergeRequestWidgetEntity do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when there are no pipelines' do
|
||||
context 'when there is no standard ci config file in the source project' do
|
||||
context 'when user has permissions' do
|
||||
let(:role) { :developer }
|
||||
|
||||
|
|
@ -80,6 +82,16 @@ describe MergeRequestWidgetEntity do
|
|||
expect(subject[:merge_request_add_ci_config_path]).to eq(expected_path)
|
||||
end
|
||||
|
||||
context 'when auto devops is enabled' do
|
||||
before do
|
||||
project_auto_devops.enabled = true
|
||||
end
|
||||
|
||||
it 'returns a blank ci config path' do
|
||||
expect(subject[:merge_request_add_ci_config_path]).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source project is missing' do
|
||||
before do
|
||||
resource.source_project = nil
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'sidekiq/testing'
|
||||
|
||||
describe Members::UpdateHighestRoleService, :clean_gitlab_redis_shared_state do
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:lease_key) { "update_highest_role:#{user.id}" }
|
||||
let(:service) { described_class.new(user.id) }
|
||||
|
||||
describe '#perform' do
|
||||
subject { service.execute }
|
||||
|
||||
context 'when lease is obtained' do
|
||||
it 'takes the lease but does not release it', :aggregate_failures do
|
||||
expect_to_obtain_exclusive_lease(lease_key, 'uuid', timeout: described_class::LEASE_TIMEOUT)
|
||||
|
||||
subject
|
||||
|
||||
expect(service.exclusive_lease.exists?).to be_truthy
|
||||
end
|
||||
|
||||
it 'schedules a job in the future', :aggregate_failures do
|
||||
expect(UpdateHighestRoleWorker).to receive(:perform_in).with(described_class::DELAY, user.id).and_call_original
|
||||
|
||||
Sidekiq::Testing.fake! do
|
||||
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when lease cannot be obtained' do
|
||||
it 'only schedules one job' do
|
||||
Sidekiq::Testing.fake! do
|
||||
stub_exclusive_lease_taken(lease_key, timeout: described_class::LEASE_TIMEOUT)
|
||||
|
||||
expect { subject }.not_to change(UpdateHighestRoleWorker.jobs, :size)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -20,7 +20,7 @@ describe Projects::Alerting::NotifyService do
|
|||
.exactly(amount).times
|
||||
|
||||
Sidekiq::Testing.inline! do
|
||||
expect(subject.status).to eq(:success)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -36,7 +36,7 @@ describe Projects::Alerting::NotifyService do
|
|||
expect(notification_service)
|
||||
.to receive_message_chain(:async, :prometheus_alerts_fired)
|
||||
|
||||
expect(subject.status).to eq(:success)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -45,7 +45,7 @@ describe Projects::Alerting::NotifyService do
|
|||
expect(IncidentManagement::ProcessAlertWorker)
|
||||
.not_to receive(:perform_async)
|
||||
|
||||
expect(subject.status).to eq(:success)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ describe Projects::Alerting::NotifyService do
|
|||
expect(IncidentManagement::ProcessAlertWorker)
|
||||
.not_to receive(:perform_async)
|
||||
|
||||
expect(subject.status).to eq(:error)
|
||||
expect(subject).to be_error
|
||||
expect(subject.http_status).to eq(http_status)
|
||||
end
|
||||
end
|
||||
|
|
@ -102,7 +102,7 @@ describe Projects::Alerting::NotifyService do
|
|||
.and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
|
||||
end
|
||||
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: 400
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -114,13 +114,13 @@ describe Projects::Alerting::NotifyService do
|
|||
end
|
||||
|
||||
context 'with invalid token' do
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: 401
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: :unauthorized
|
||||
end
|
||||
|
||||
context 'with deactivated Alerts Service' do
|
||||
let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
|
||||
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: 403
|
||||
it_behaves_like 'does not process incident issues due to error', http_status: :forbidden
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,28 +26,10 @@ describe Projects::ImportExport::ExportService do
|
|||
service.execute
|
||||
end
|
||||
|
||||
context 'when :streaming_serializer feature is enabled' do
|
||||
before do
|
||||
stub_feature_flags(streaming_serializer: true)
|
||||
end
|
||||
it 'saves the models' do
|
||||
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
|
||||
|
||||
it 'saves the models' do
|
||||
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :streaming_serializer feature is disabled' do
|
||||
before do
|
||||
stub_feature_flags(streaming_serializer: false)
|
||||
end
|
||||
|
||||
it 'saves the models' do
|
||||
expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
|
||||
|
||||
service.execute
|
||||
end
|
||||
service.execute
|
||||
end
|
||||
|
||||
it 'saves the uploads' do
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
expect(notification_service)
|
||||
.to receive_message_chain(:async, :prometheus_alerts_fired)
|
||||
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -44,7 +44,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
.exactly(amount).times
|
||||
|
||||
Sidekiq::Testing.inline! do
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -54,7 +54,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
expect(IncidentManagement::ProcessPrometheusAlertWorker)
|
||||
.not_to receive(:perform_async)
|
||||
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
expect(create_events_service)
|
||||
.to receive(:execute)
|
||||
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
it_behaves_like 'persists events'
|
||||
end
|
||||
|
||||
shared_examples 'no notifications' do
|
||||
shared_examples 'no notifications' do |http_status:|
|
||||
let(:notification_service) { spy }
|
||||
let(:create_events_service) { spy }
|
||||
|
||||
|
|
@ -86,7 +86,8 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
expect(notification_service).not_to receive(:async)
|
||||
expect(create_events_service).not_to receive(:execute)
|
||||
|
||||
expect(subject).to eq(false)
|
||||
expect(subject).to be_error
|
||||
expect(subject.http_status).to eq(http_status)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -130,7 +131,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
when :success
|
||||
it_behaves_like 'notifies alerts'
|
||||
when :failure
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unauthorized
|
||||
else
|
||||
raise "invalid result: #{result.inspect}"
|
||||
end
|
||||
|
|
@ -140,7 +141,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
context 'without project specific cluster' do
|
||||
let!(:cluster) { create(:cluster, enabled: true) }
|
||||
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unauthorized
|
||||
end
|
||||
|
||||
context 'with manual prometheus installation' do
|
||||
|
|
@ -171,7 +172,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
when :success
|
||||
it_behaves_like 'notifies alerts'
|
||||
when :failure
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unauthorized
|
||||
else
|
||||
raise "invalid result: #{result.inspect}"
|
||||
end
|
||||
|
|
@ -193,7 +194,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
expect_any_instance_of(NotificationService)
|
||||
.not_to receive(:async)
|
||||
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -211,7 +212,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
it 'does not send notification' do
|
||||
expect(NotificationService).not_to receive(:new)
|
||||
|
||||
expect(subject).to eq(true)
|
||||
expect(subject).to be_success
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -260,19 +261,19 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
context 'without version' do
|
||||
let(:payload) { {} }
|
||||
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unprocessable_entity
|
||||
end
|
||||
|
||||
context 'when version is not "4"' do
|
||||
let(:payload) { { 'version' => '5' } }
|
||||
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unprocessable_entity
|
||||
end
|
||||
|
||||
context 'with missing alerts' do
|
||||
let(:payload) { { 'version' => '4' } }
|
||||
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :unauthorized
|
||||
end
|
||||
|
||||
context 'when the payload is too big' do
|
||||
|
|
@ -283,7 +284,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
|
|||
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
|
||||
end
|
||||
|
||||
it_behaves_like 'no notifications'
|
||||
it_behaves_like 'no notifications', http_status: :bad_request
|
||||
|
||||
it 'does not process issues' do
|
||||
expect(IncidentManagement::ProcessPrometheusAlertWorker)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# requires a subject and a user_id
|
||||
RSpec.shared_examples 'update highest role with exclusive lease' do
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
let(:lease_key) { "update_highest_role:#{user_id}" }
|
||||
|
||||
before do
|
||||
allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
|
||||
end
|
||||
|
||||
context 'when lease is obtained', :clean_gitlab_redis_shared_state do
|
||||
it 'takes the lease but does not release it', :aggregate_failures do
|
||||
expect_to_obtain_exclusive_lease(lease_key, 'uuid', timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
|
||||
expect(Gitlab::ExclusiveLease).not_to receive(:cancel).with(lease_key, 'uuid')
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'schedules a job in the future', :aggregate_failures do
|
||||
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
|
||||
allow(instance).to receive(:try_obtain).and_return('uuid')
|
||||
end
|
||||
|
||||
expect(UpdateHighestRoleWorker).to receive(:perform_in).with(described_class::HIGHEST_ROLE_JOB_DELAY, user_id).and_call_original
|
||||
|
||||
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when lease cannot be obtained', :clean_gitlab_redis_shared_state do
|
||||
it 'only schedules one job' do
|
||||
stub_exclusive_lease_taken(lease_key, timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
|
||||
|
||||
expect { subject }.not_to change(UpdateHighestRoleWorker.jobs, :size)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# requires a subject and a user_id
|
||||
RSpec.shared_examples 'does not update the highest role' do
|
||||
it 'does not obtain an exclusive lease' do
|
||||
allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
|
||||
|
||||
lease = stub_exclusive_lease("update_highest_role:#{user_id}", 'uuid', timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
|
||||
|
||||
expect(lease).not_to receive(:try_obtain)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
|
@ -9,54 +9,36 @@ RSpec.shared_examples 'versioned description' do
|
|||
let(:factory_name) { described_class.name.underscore.to_sym }
|
||||
let!(:model) { create(factory_name, description: 'Original description') }
|
||||
|
||||
context 'when feature is enabled' do
|
||||
context 'when description was changed' do
|
||||
before do
|
||||
stub_feature_flags(save_description_versions: true)
|
||||
model.update!(description: 'New description')
|
||||
end
|
||||
|
||||
context 'when description was changed' do
|
||||
before do
|
||||
model.update!(description: 'New description')
|
||||
end
|
||||
|
||||
it 'saves the old and new description for the first update' do
|
||||
expect(model.description_versions.first.description).to eq('Original description')
|
||||
expect(model.description_versions.last.description).to eq('New description')
|
||||
end
|
||||
|
||||
it 'only saves the new description for subsequent updates' do
|
||||
expect { model.update!(description: 'Another description') }.to change { model.description_versions.count }.by(1)
|
||||
|
||||
expect(model.description_versions.last.description).to eq('Another description')
|
||||
end
|
||||
|
||||
it 'sets the new description version to `saved_description_version`' do
|
||||
expect(model.saved_description_version).to eq(model.description_versions.last)
|
||||
end
|
||||
|
||||
it 'clears `saved_description_version` after another save that does not change description' do
|
||||
model.save!
|
||||
|
||||
expect(model.saved_description_version).to be_nil
|
||||
end
|
||||
it 'saves the old and new description for the first update' do
|
||||
expect(model.description_versions.first.description).to eq('Original description')
|
||||
expect(model.description_versions.last.description).to eq('New description')
|
||||
end
|
||||
|
||||
context 'when description was not changed' do
|
||||
it 'does not save any description version' do
|
||||
expect { model.save! }.not_to change { model.description_versions.count }
|
||||
it 'only saves the new description for subsequent updates' do
|
||||
expect { model.update!(description: 'Another description') }.to change { model.description_versions.count }.by(1)
|
||||
|
||||
expect(model.saved_description_version).to be_nil
|
||||
end
|
||||
expect(model.description_versions.last.description).to eq('Another description')
|
||||
end
|
||||
|
||||
it 'sets the new description version to `saved_description_version`' do
|
||||
expect(model.saved_description_version).to eq(model.description_versions.last)
|
||||
end
|
||||
|
||||
it 'clears `saved_description_version` after another save that does not change description' do
|
||||
model.save!
|
||||
|
||||
expect(model.saved_description_version).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when feature is disabled' do
|
||||
before do
|
||||
stub_feature_flags(save_description_versions: false)
|
||||
end
|
||||
|
||||
context 'when description was not changed' do
|
||||
it 'does not save any description version' do
|
||||
expect { model.update!(description: 'New description') }.not_to change { model.description_versions.count }
|
||||
expect { model.save! }.not_to change { model.description_versions.count }
|
||||
|
||||
expect(model.saved_description_version).to be_nil
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue