Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
7d8cc770b1
commit
f0224e32ca
|
|
@ -62,6 +62,7 @@ export default {
|
|||
},
|
||||
showTip() {
|
||||
return (
|
||||
this.showLinksToggle &&
|
||||
this.showLinks &&
|
||||
this.showLinksActive &&
|
||||
!this.tipPreviouslyDismissed &&
|
||||
|
|
|
|||
|
|
@ -662,6 +662,8 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
|
|||
validates :inactive_projects_send_warning_email_after_months,
|
||||
numericality: { only_integer: true, greater_than: 0, less_than: :inactive_projects_delete_after_months }
|
||||
|
||||
validates :database_apdex_settings, json_schema: { filename: 'application_setting_database_apdex_settings' }, allow_nil: true
|
||||
|
||||
attr_encrypted :asset_proxy_secret_key,
|
||||
mode: :per_attribute_iv,
|
||||
key: Settings.attr_encrypted_db_key_base_truncated,
|
||||
|
|
|
|||
|
|
@ -325,6 +325,12 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
after_transition running: ::Ci::Pipeline.completed_statuses + [:manual] do |pipeline|
|
||||
pipeline.run_after_commit do
|
||||
::Ci::UnlockRefArtifactsOnPipelineStopWorker.perform_async(pipeline.id)
|
||||
end
|
||||
end
|
||||
|
||||
after_transition any => [:success, :failed] do |pipeline|
|
||||
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
|
||||
|
||||
|
|
|
|||
|
|
@ -30,15 +30,6 @@ module Ci
|
|||
state :fixed, value: 3
|
||||
state :broken, value: 4
|
||||
state :still_failing, value: 5
|
||||
|
||||
after_transition any => [:fixed, :success] do |ci_ref|
|
||||
# Do not try to unlock if no artifacts are locked
|
||||
next unless ci_ref.artifacts_locked?
|
||||
|
||||
ci_ref.run_after_commit do
|
||||
Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(ci_ref.last_finished_pipeline_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class << self
|
||||
|
|
@ -55,6 +46,10 @@ module Ci
|
|||
Ci::Pipeline.last_finished_for_ref_id(self.id)&.id
|
||||
end
|
||||
|
||||
def last_successful_pipeline
|
||||
pipelines.ci_sources.success.order(id: :desc).take
|
||||
end
|
||||
|
||||
def artifacts_locked?
|
||||
self.pipelines.where(locked: :artifacts_locked).exists?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -39,14 +39,18 @@ module Ci
|
|||
|
||||
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
|
||||
|
||||
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
|
||||
result = parse_artifact(artifact)
|
||||
build_result = build_artifact(artifacts_file, params, metadata_file)
|
||||
return build_result unless build_result[:status] == :success
|
||||
|
||||
artifact = build_result[:artifact]
|
||||
artifact_metadata = build_result[:artifact_metadata]
|
||||
|
||||
track_artifact_uploader(artifact)
|
||||
|
||||
return result unless result[:status] == :success
|
||||
parse_result = parse_artifact(artifact)
|
||||
return parse_result unless parse_result[:status] == :success
|
||||
|
||||
persist_artifact(artifact, artifact_metadata, params)
|
||||
persist_artifact(artifact, artifact_metadata)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -76,40 +80,44 @@ module Ci
|
|||
end
|
||||
|
||||
def build_artifact(artifacts_file, params, metadata_file)
|
||||
expire_in = params['expire_in'] ||
|
||||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
|
||||
|
||||
artifact_attributes = {
|
||||
job: job,
|
||||
project: project,
|
||||
expire_in: expire_in
|
||||
expire_in: expire_in(params),
|
||||
accessibility: accessibility(params),
|
||||
locked: pipeline.locked
|
||||
}
|
||||
|
||||
artifact_attributes[:locked] = pipeline.locked
|
||||
file_attributes = {
|
||||
file_type: params[:artifact_type],
|
||||
file_format: params[:artifact_format],
|
||||
file_sha256: artifacts_file.sha256,
|
||||
file: artifacts_file
|
||||
}
|
||||
|
||||
artifact = Ci::JobArtifact.new(
|
||||
artifact_attributes.merge(
|
||||
file: artifacts_file,
|
||||
file_type: params[:artifact_type],
|
||||
file_format: params[:artifact_format],
|
||||
file_sha256: artifacts_file.sha256,
|
||||
accessibility: accessibility(params)
|
||||
)
|
||||
artifact = Ci::JobArtifact.new(artifact_attributes.merge(file_attributes))
|
||||
|
||||
artifact_metadata = build_metadata_artifact(artifact, metadata_file) if metadata_file
|
||||
|
||||
success(artifact: artifact, artifact_metadata: artifact_metadata)
|
||||
end
|
||||
|
||||
def build_metadata_artifact(job_artifact, metadata_file)
|
||||
Ci::JobArtifact.new(
|
||||
job: job_artifact.job,
|
||||
project: job_artifact.project,
|
||||
expire_at: job_artifact.expire_at,
|
||||
locked: job_artifact.locked,
|
||||
file: metadata_file,
|
||||
file_type: :metadata,
|
||||
file_format: :gzip,
|
||||
file_sha256: metadata_file.sha256,
|
||||
accessibility: job_artifact.accessibility
|
||||
)
|
||||
end
|
||||
|
||||
artifact_metadata = if metadata_file
|
||||
Ci::JobArtifact.new(
|
||||
artifact_attributes.merge(
|
||||
file: metadata_file,
|
||||
file_type: :metadata,
|
||||
file_format: :gzip,
|
||||
file_sha256: metadata_file.sha256,
|
||||
accessibility: accessibility(params)
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
[artifact, artifact_metadata]
|
||||
def expire_in(params)
|
||||
params['expire_in'] || Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
|
||||
end
|
||||
|
||||
def accessibility(params)
|
||||
|
|
@ -129,8 +137,8 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def persist_artifact(artifact, artifact_metadata, params)
|
||||
Ci::JobArtifact.transaction do
|
||||
def persist_artifact(artifact, artifact_metadata)
|
||||
job.transaction do
|
||||
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
|
||||
# Running it first because in migrations we lock the `ci_builds` table
|
||||
# first and then the others. This reduces the chances of deadlocks.
|
||||
|
|
@ -142,13 +150,13 @@ module Ci
|
|||
|
||||
success(artifact: artifact)
|
||||
rescue ActiveRecord::RecordNotUnique => error
|
||||
track_exception(error, params)
|
||||
track_exception(error, artifact.file_type)
|
||||
error('another artifact of the same type already exists', :bad_request)
|
||||
rescue *OBJECT_STORAGE_ERRORS => error
|
||||
track_exception(error, params)
|
||||
track_exception(error, artifact.file_type)
|
||||
error(error.message, :service_unavailable)
|
||||
rescue StandardError => error
|
||||
track_exception(error, params)
|
||||
track_exception(error, artifact.file_type)
|
||||
error(error.message, :bad_request)
|
||||
end
|
||||
|
||||
|
|
@ -159,11 +167,12 @@ module Ci
|
|||
existing_artifact.file_sha256 == artifacts_file.sha256
|
||||
end
|
||||
|
||||
def track_exception(error, params)
|
||||
Gitlab::ErrorTracking.track_exception(error,
|
||||
def track_exception(error, artifact_type)
|
||||
Gitlab::ErrorTracking.track_exception(
|
||||
error,
|
||||
job_id: job.id,
|
||||
project_id: job.project_id,
|
||||
uploading_type: params[:artifact_type]
|
||||
uploading_type: artifact_type
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,11 @@ module Ci
|
|||
class UnlockArtifactsService < ::BaseService
|
||||
BATCH_SIZE = 100
|
||||
|
||||
# This service performs either one of the following,
|
||||
# depending on whether `before_pipeline` is given.
|
||||
# 1. Without `before_pipeline`, it unlocks all the pipelines belonging to the given `ci_ref`
|
||||
# 2. With `before_pipeline`, it unlocks all the pipelines in the `ci_ref` that was created
|
||||
# before the given `before_pipeline`, with the exception of the last successful pipeline.
|
||||
def execute(ci_ref, before_pipeline = nil)
|
||||
results = {
|
||||
unlocked_pipelines: 0,
|
||||
|
|
@ -51,15 +56,15 @@ module Ci
|
|||
def unlock_pipelines_query(ci_ref, before_pipeline)
|
||||
ci_pipelines = ::Ci::Pipeline.arel_table
|
||||
|
||||
pipelines_scope = ci_ref.pipelines.artifacts_locked
|
||||
pipelines_scope = pipelines_scope.before_pipeline(before_pipeline) if before_pipeline
|
||||
pipelines_scope = pipelines_scope.select(:id).limit(BATCH_SIZE).lock('FOR UPDATE SKIP LOCKED')
|
||||
pipelines_to_unlock = ci_ref.pipelines.artifacts_locked
|
||||
pipelines_to_unlock = exclude_last_successful_pipeline(pipelines_to_unlock, ci_ref, before_pipeline)
|
||||
pipelines_to_unlock = pipelines_to_unlock.select(:id).limit(BATCH_SIZE).lock('FOR UPDATE SKIP LOCKED')
|
||||
|
||||
returning = Arel::Nodes::Grouping.new(ci_pipelines[:id])
|
||||
|
||||
Arel::UpdateManager.new
|
||||
.table(ci_pipelines)
|
||||
.where(ci_pipelines[:id].in(Arel.sql(pipelines_scope.to_sql)))
|
||||
.where(ci_pipelines[:id].in(Arel.sql(pipelines_to_unlock.to_sql)))
|
||||
.set([[ci_pipelines[:locked], ::Ci::Pipeline.lockeds[:unlocked]]])
|
||||
.to_sql + " RETURNING #{returning.to_sql}"
|
||||
end
|
||||
|
|
@ -67,6 +72,22 @@ module Ci
|
|||
|
||||
private
|
||||
|
||||
# rubocop:disable CodeReuse/ActiveRecord
|
||||
def exclude_last_successful_pipeline(pipelines_to_unlock, ci_ref, before_pipeline)
|
||||
return pipelines_to_unlock if before_pipeline.nil?
|
||||
|
||||
pipelines_to_unlock = pipelines_to_unlock.before_pipeline(before_pipeline)
|
||||
|
||||
last_successful_pipeline = ci_ref.last_successful_pipeline
|
||||
|
||||
if last_successful_pipeline.present?
|
||||
pipelines_to_unlock = pipelines_to_unlock.outside_pipeline_family(last_successful_pipeline)
|
||||
end
|
||||
|
||||
pipelines_to_unlock
|
||||
end
|
||||
# rubocop:enable CodeReuse/ActiveRecord
|
||||
|
||||
def unlock_job_artifacts(pipelines)
|
||||
return if pipelines.empty?
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"description": "Database Apdex Settings",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"prometheus_api_url": {
|
||||
"type": "string"
|
||||
},
|
||||
"apdex_sli_query": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"main": {
|
||||
"type": "string"
|
||||
},
|
||||
"ci": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"apdex_slo": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"main": {
|
||||
"type": "number",
|
||||
"format": "float"
|
||||
},
|
||||
"ci": {
|
||||
"type": "number",
|
||||
"format": "float"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
|
@ -1866,6 +1866,15 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: pipeline_background:ci_unlock_ref_artifacts_on_pipeline_stop
|
||||
:worker_name: Ci::UnlockRefArtifactsOnPipelineStopWorker
|
||||
:feature_category: :continuous_integration
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: pipeline_creation:ci_external_pull_requests_create_pipeline
|
||||
:worker_name: Ci::ExternalPullRequests::CreatePipelineWorker
|
||||
:feature_category: :continuous_integration
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
# TODO: Clean up this worker in a subsequent release.
|
||||
# The process to unlock job artifacts have been moved to
|
||||
# be triggered by the pipeline state transitions and
|
||||
# to use UnlockRefArtifactsOnPipelineStopWorker.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/397491
|
||||
class PipelineSuccessUnlockArtifactsWorker
|
||||
include ApplicationWorker
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
# This worker is triggered when the pipeline state
|
||||
# changes into one of the stopped statuses
|
||||
# `Ci::Pipeline.stopped_statuses`.
|
||||
# It unlocks the previous pipelines on the same ref
|
||||
# as the pipeline that has just completed
|
||||
# using `Ci::UnlockArtifactsService`.
|
||||
class UnlockRefArtifactsOnPipelineStopWorker
|
||||
include ApplicationWorker
|
||||
|
||||
data_consistency :always
|
||||
|
||||
include PipelineBackgroundQueue
|
||||
|
||||
idempotent!
|
||||
|
||||
def perform(pipeline_id)
|
||||
pipeline = ::Ci::Pipeline.find_by_id(pipeline_id)
|
||||
|
||||
return if pipeline.nil?
|
||||
return if pipeline.ci_ref.nil?
|
||||
|
||||
results = ::Ci::UnlockArtifactsService
|
||||
.new(pipeline.project, pipeline.user)
|
||||
.execute(pipeline.ci_ref, pipeline)
|
||||
|
||||
log_extra_metadata_on_done(:unlocked_pipelines, results[:unlocked_pipelines])
|
||||
log_extra_metadata_on_done(:unlocked_job_artifacts, results[:unlocked_job_artifacts])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: batched_migrations_health_status_patroni_apdex
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97087
|
||||
rollout_issue_url:
|
||||
milestone: '15.10'
|
||||
type: ops
|
||||
group: group::database
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddDatabaseApdexSettingsToApplicationSettings < Gitlab::Database::Migration[2.1]
|
||||
def change
|
||||
add_column :application_settings, :database_apdex_settings, :jsonb
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
7599c99edcb87dad69c8721de9179f32f99236fad928a07734f3c66f174b8546
|
||||
|
|
@ -11744,6 +11744,7 @@ CREATE TABLE application_settings (
|
|||
lock_memberships_to_saml boolean DEFAULT false NOT NULL,
|
||||
gitlab_dedicated_instance boolean DEFAULT false NOT NULL,
|
||||
update_runner_versions_enabled boolean DEFAULT true NOT NULL,
|
||||
database_apdex_settings jsonb,
|
||||
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
|
||||
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
|
||||
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
|
||||
|
|
|
|||
|
|
@ -220,7 +220,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def health_context
|
||||
HealthStatus::Context.new(connection, [table_name])
|
||||
HealthStatus::Context.new(connection, [table_name], gitlab_schema.to_sym)
|
||||
end
|
||||
|
||||
def hold!(until_time: 10.minutes.from_now)
|
||||
|
|
|
|||
|
|
@ -6,11 +6,12 @@ module Gitlab
|
|||
module HealthStatus
|
||||
DEFAULT_INIDICATORS = [
|
||||
Indicators::AutovacuumActiveOnTable,
|
||||
Indicators::WriteAheadLog
|
||||
Indicators::WriteAheadLog,
|
||||
Indicators::PatroniApdex
|
||||
].freeze
|
||||
|
||||
# Rather than passing along the migration, we use a more explicitly defined context
|
||||
Context = Struct.new(:connection, :tables)
|
||||
Context = Struct.new(:connection, :tables, :gitlab_schema)
|
||||
|
||||
def self.evaluate(migration, indicators = DEFAULT_INIDICATORS)
|
||||
indicators.map do |indicator|
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
module BackgroundMigration
|
||||
module HealthStatus
|
||||
module Indicators
|
||||
class PatroniApdex
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(context)
|
||||
@context = context
|
||||
end
|
||||
|
||||
def evaluate
|
||||
return Signals::NotAvailable.new(self.class, reason: 'indicator disabled') unless enabled?
|
||||
|
||||
connection_error_message = fetch_connection_error_message
|
||||
return unknown_signal(connection_error_message) if connection_error_message.present?
|
||||
|
||||
apdex_sli = fetch_sli(apdex_sli_query)
|
||||
return unknown_signal('Patroni service apdex can not be calculated') unless apdex_sli.present?
|
||||
|
||||
if apdex_sli.to_f > apdex_slo.to_f
|
||||
Signals::Normal.new(self.class, reason: 'Patroni service apdex is above SLO')
|
||||
else
|
||||
Signals::Stop.new(self.class, reason: 'Patroni service apdex is below SLO')
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :context
|
||||
|
||||
def enabled?
|
||||
Feature.enabled?(:batched_migrations_health_status_patroni_apdex, type: :ops)
|
||||
end
|
||||
|
||||
def unknown_signal(reason)
|
||||
Signals::Unknown.new(self.class, reason: reason)
|
||||
end
|
||||
|
||||
def fetch_connection_error_message
|
||||
return 'Patroni Apdex Settings not configured' unless database_apdex_settings.present?
|
||||
return 'Prometheus client is not ready' unless client.ready?
|
||||
return 'Apdex SLI query is not configured' unless apdex_sli_query
|
||||
return 'Apdex SLO is not configured' unless apdex_slo
|
||||
end
|
||||
|
||||
def client
|
||||
@client ||= Gitlab::PrometheusClient.new(
|
||||
database_apdex_settings[:prometheus_api_url],
|
||||
allow_local_requests: true,
|
||||
verify: true
|
||||
)
|
||||
end
|
||||
|
||||
def database_apdex_settings
|
||||
@database_apdex_settings ||= Gitlab::CurrentSettings.database_apdex_settings&.with_indifferent_access
|
||||
end
|
||||
|
||||
def apdex_sli_query
|
||||
{
|
||||
gitlab_main: database_apdex_settings[:apdex_sli_query][:main],
|
||||
gitlab_ci: database_apdex_settings[:apdex_sli_query][:ci]
|
||||
}.fetch(context.gitlab_schema.to_sym)
|
||||
end
|
||||
strong_memoize_attr :apdex_sli_query
|
||||
|
||||
def apdex_slo
|
||||
{
|
||||
gitlab_main: database_apdex_settings[:apdex_slo][:main],
|
||||
gitlab_ci: database_apdex_settings[:apdex_slo][:ci]
|
||||
}.fetch(context.gitlab_schema.to_sym)
|
||||
end
|
||||
strong_memoize_attr :apdex_slo
|
||||
|
||||
def fetch_sli(query)
|
||||
response = client.query(query)
|
||||
metric = response&.first || {}
|
||||
value = metric.fetch('value', [])
|
||||
|
||||
Array.wrap(value).second
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "spec_helper"
|
||||
|
||||
RSpec.describe "E-Mails > Issues", :js, feature_category: :team_planning do
|
||||
let_it_be(:project) { create(:project_empty_repo, :public, name: 'Long Earth') }
|
||||
let_it_be(:assignee) { create(:user, username: 'assignee', name: 'Joshua Valienté') }
|
||||
let_it_be(:author) { create(:user, username: 'author', name: 'Sally Linsay') }
|
||||
let_it_be(:current_user) { create(:user, username: 'current_user', name: 'Shi-mi') }
|
||||
let_it_be(:issue_with_assignee) do
|
||||
create(
|
||||
:issue, project: project, author: author, assignees: [assignee],
|
||||
title: 'All your base are belong to us')
|
||||
end
|
||||
|
||||
let_it_be(:issue_without_assignee) { create(:issue, project: project, author: author, title: 'No milk today!') }
|
||||
|
||||
before do
|
||||
project.add_developer(current_user)
|
||||
sign_in(current_user)
|
||||
end
|
||||
|
||||
it 'sends confirmation e-mail for assigning' do
|
||||
synchronous_notifications
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(author.id, issue_without_assignee.id, [], current_user.id, nil)
|
||||
.once
|
||||
.and_call_original
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(assignee.id, issue_without_assignee.id, [], current_user.id, NotificationReason::ASSIGNED)
|
||||
.once
|
||||
.and_call_original
|
||||
|
||||
visit issue_path(issue_without_assignee)
|
||||
assign_to(assignee)
|
||||
|
||||
expect(find('#notes-list')).to have_text("Shi-mi assigned to @assignee just now")
|
||||
end
|
||||
|
||||
it 'sends confirmation e-mail for reassigning' do
|
||||
synchronous_notifications
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(author.id, issue_with_assignee.id, [assignee.id], current_user.id, NotificationReason::ASSIGNED)
|
||||
.once
|
||||
.and_call_original
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(assignee.id, issue_with_assignee.id, [assignee.id], current_user.id, nil)
|
||||
.once
|
||||
.and_call_original
|
||||
|
||||
visit issue_path(issue_with_assignee)
|
||||
assign_to(author)
|
||||
|
||||
expect(find('#notes-list')).to have_text("Shi-mi assigned to @author and unassigned @assignee just now")
|
||||
end
|
||||
|
||||
it 'sends confirmation e-mail for unassigning' do
|
||||
synchronous_notifications
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(author.id, issue_with_assignee.id, [assignee.id], current_user.id, nil)
|
||||
.once
|
||||
.and_call_original
|
||||
expect(Notify).to receive(:reassigned_issue_email)
|
||||
.with(assignee.id, issue_with_assignee.id, [assignee.id], current_user.id, nil)
|
||||
.once
|
||||
.and_call_original
|
||||
|
||||
visit issue_path(issue_with_assignee)
|
||||
quick_action('/unassign')
|
||||
|
||||
expect(find('#notes-list')).to have_text("Shi-mi unassigned @assignee just now")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def assign_to(user)
|
||||
quick_action("/assign @#{user.username}")
|
||||
end
|
||||
|
||||
def quick_action(command)
|
||||
fill_in 'note[note]', with: command
|
||||
click_button 'Comment'
|
||||
end
|
||||
|
||||
def synchronous_notifications
|
||||
expect_next_instance_of(NotificationService) do |service|
|
||||
expect(service).to receive(:async).and_return(service)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -144,6 +144,7 @@ describe('the graph view selector component', () => {
|
|||
createComponent({
|
||||
props: {
|
||||
showLinks: true,
|
||||
type: LAYER_VIEW,
|
||||
},
|
||||
data: {
|
||||
showLinksActive: true,
|
||||
|
|
@ -162,6 +163,18 @@ describe('the graph view selector component', () => {
|
|||
await findHoverTip().find('button').trigger('click');
|
||||
expect(wrapper.emitted().dismissHoverTip).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('is displayed at first then hidden on swith to STAGE_VIEW then displayed on switch to LAYER_VIEW', async () => {
|
||||
expect(findHoverTip().exists()).toBe(true);
|
||||
expect(findHoverTip().text()).toBe(wrapper.vm.$options.i18n.hoverTipText);
|
||||
|
||||
await findStageViewButton().trigger('click');
|
||||
expect(findHoverTip().exists()).toBe(false);
|
||||
|
||||
await findLayerViewButton().trigger('click');
|
||||
expect(findHoverTip().exists()).toBe(true);
|
||||
expect(findHoverTip().text()).toBe(wrapper.vm.$options.i18n.hoverTipText);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when links are live and it has been previously dismissed', () => {
|
||||
|
|
@ -170,6 +183,7 @@ describe('the graph view selector component', () => {
|
|||
props: {
|
||||
showLinks: true,
|
||||
tipPreviouslyDismissed: true,
|
||||
type: LAYER_VIEW,
|
||||
},
|
||||
data: {
|
||||
showLinksActive: true,
|
||||
|
|
@ -187,6 +201,7 @@ describe('the graph view selector component', () => {
|
|||
createComponent({
|
||||
props: {
|
||||
showLinks: true,
|
||||
type: LAYER_VIEW,
|
||||
},
|
||||
data: {
|
||||
showLinksActive: false,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,148 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
|
||||
let(:schema) { :main }
|
||||
let(:connection) { Gitlab::Database.database_base_models[schema].connection }
|
||||
|
||||
around do |example|
|
||||
Gitlab::Database::SharedModel.using_connection(connection) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
describe '#evaluate' do
|
||||
let(:prometheus_url) { 'http://thanos:9090' }
|
||||
let(:prometheus_config) { [prometheus_url, { allow_local_requests: true, verify: true }] }
|
||||
|
||||
let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
|
||||
|
||||
let(:context) do
|
||||
Gitlab::Database::BackgroundMigration::HealthStatus::Context
|
||||
.new(connection, ['users'], gitlab_schema)
|
||||
end
|
||||
|
||||
let(:gitlab_schema) { "gitlab_#{schema}" }
|
||||
let(:client_ready) { true }
|
||||
let(:database_apdex_sli_query_main) { 'Apdex query for main' }
|
||||
let(:database_apdex_sli_query_ci) { 'Apdex query for ci' }
|
||||
let(:database_apdex_slo_main) { 0.99 }
|
||||
let(:database_apdex_slo_ci) { 0.95 }
|
||||
let(:database_apdex_settings) do
|
||||
{
|
||||
prometheus_api_url: prometheus_url,
|
||||
apdex_sli_query: {
|
||||
main: database_apdex_sli_query_main,
|
||||
ci: database_apdex_sli_query_ci
|
||||
},
|
||||
apdex_slo: {
|
||||
main: database_apdex_slo_main,
|
||||
ci: database_apdex_slo_ci
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
subject(:evaluate) { described_class.new(context).evaluate }
|
||||
|
||||
before do
|
||||
stub_application_setting(database_apdex_settings: database_apdex_settings)
|
||||
|
||||
allow(Gitlab::PrometheusClient).to receive(:new).with(*prometheus_config).and_return(prometheus_client)
|
||||
allow(prometheus_client).to receive(:ready?).and_return(client_ready)
|
||||
end
|
||||
|
||||
shared_examples 'Patroni Apdex Evaluator' do |schema|
|
||||
context "with #{schema} schema" do
|
||||
let(:schema) { schema }
|
||||
let(:apdex_slo_above_sli) { { main: 0.991, ci: 0.951 } }
|
||||
let(:apdex_slo_below_sli) { { main: 0.989, ci: 0.949 } }
|
||||
|
||||
it 'returns NoSignal signal in case the feature flag is disabled' do
|
||||
stub_feature_flags(batched_migrations_health_status_patroni_apdex: false)
|
||||
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
|
||||
expect(evaluate.reason).to include('indicator disabled')
|
||||
end
|
||||
|
||||
context 'without database_apdex_settings' do
|
||||
let(:database_apdex_settings) { nil }
|
||||
|
||||
it 'returns Unknown signal' do
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
|
||||
expect(evaluate.reason).to include('Patroni Apdex Settings not configured')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Prometheus client is not ready' do
|
||||
let(:client_ready) { false }
|
||||
|
||||
it 'returns Unknown signal' do
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
|
||||
expect(evaluate.reason).to include('Prometheus client is not ready')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when apdex SLI query is not configured' do
|
||||
let(:"database_apdex_sli_query_#{schema}") { nil }
|
||||
|
||||
it 'returns Unknown signal' do
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
|
||||
expect(evaluate.reason).to include('Apdex SLI query is not configured')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when slo is not configured' do
|
||||
let(:"database_apdex_slo_#{schema}") { nil }
|
||||
|
||||
it 'returns Unknown signal' do
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
|
||||
expect(evaluate.reason).to include('Apdex SLO is not configured')
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns Normal signal when Patroni apdex SLI is above SLO' do
|
||||
expect(prometheus_client).to receive(:query)
|
||||
.with(send("database_apdex_sli_query_#{schema}"))
|
||||
.and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }])
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
|
||||
expect(evaluate.reason).to include('Patroni service apdex is above SLO')
|
||||
end
|
||||
|
||||
it 'returns Stop signal when Patroni apdex is below SLO' do
|
||||
expect(prometheus_client).to receive(:query)
|
||||
.with(send("database_apdex_sli_query_#{schema}"))
|
||||
.and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }])
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
|
||||
expect(evaluate.reason).to include('Patroni service apdex is below SLO')
|
||||
end
|
||||
|
||||
context 'when Patroni apdex can not be calculated' do
|
||||
where(:result) do
|
||||
[
|
||||
nil,
|
||||
[],
|
||||
[{}],
|
||||
[{ 'value' => 1 }],
|
||||
[{ 'value' => [1] }]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'returns Unknown signal' do
|
||||
expect(prometheus_client).to receive(:query).and_return(result)
|
||||
expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
|
||||
expect(evaluate.reason).to include('Patroni service apdex can not be calculated')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Gitlab::Database.database_base_models.each do |database_base_model, connection|
|
||||
next unless connection.present?
|
||||
|
||||
it_behaves_like 'Patroni Apdex Evaluator', database_base_model.to_sym
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -19,8 +19,10 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
|
|||
let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
|
||||
let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
|
||||
let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
|
||||
let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
|
||||
let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
|
||||
let(:wal_indicator) { instance_double(wal_indicator_class) }
|
||||
let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
|
||||
|
||||
before do
|
||||
allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator)
|
||||
|
|
@ -36,8 +38,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
|
|||
expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
|
||||
expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator)
|
||||
expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
|
||||
expect(patroni_apdex_indicator_class).to receive(:new).with(migration.health_context)
|
||||
.and_return(patroni_apdex_indicator)
|
||||
expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
|
||||
|
||||
expect(evaluate).to contain_exactly(normal_signal, not_available_signal)
|
||||
expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -45,6 +45,20 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
|
|||
let(:ftp) { 'ftp://example.com' }
|
||||
let(:javascript) { 'javascript:alert(window.opener.document.location)' }
|
||||
|
||||
let_it_be(:valid_database_apdex_settings) do
|
||||
{
|
||||
prometheus_api_url: 'Prometheus URL',
|
||||
apdex_sli_query: {
|
||||
main: 'Apdex SLI query main',
|
||||
ci: 'Apdex SLI query ci'
|
||||
},
|
||||
apdex_slo: {
|
||||
main: 0.99,
|
||||
ci: 0.98
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
it { is_expected.to allow_value(nil).for(:home_page_url) }
|
||||
it { is_expected.to allow_value(http).for(:home_page_url) }
|
||||
it { is_expected.to allow_value(https).for(:home_page_url) }
|
||||
|
|
@ -256,6 +270,10 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
|
|||
it { is_expected.to allow_value(false).for(:gitlab_dedicated_instance) }
|
||||
it { is_expected.not_to allow_value(nil).for(:gitlab_dedicated_instance) }
|
||||
|
||||
it { is_expected.not_to allow_value(random: :value).for(:database_apdex_settings) }
|
||||
it { is_expected.to allow_value(nil).for(:database_apdex_settings) }
|
||||
it { is_expected.to allow_value(valid_database_apdex_settings).for(:database_apdex_settings) }
|
||||
|
||||
context 'when deactivate_dormant_users is enabled' do
|
||||
before do
|
||||
stub_application_setting(deactivate_dormant_users: true)
|
||||
|
|
|
|||
|
|
@ -153,6 +153,38 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
pipeline.succeed!
|
||||
end
|
||||
end
|
||||
|
||||
describe 'unlocking artifacts on after a running pipeline succeeds, skipped, canceled, failed or blocked' do
|
||||
shared_examples 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker' do |event:|
|
||||
let(:pipeline) { create(:ci_pipeline, :running) }
|
||||
|
||||
it 'schedules Ci::UnlockRefArtifactsOnPipelineStopWorker' do
|
||||
expect(Ci::UnlockRefArtifactsOnPipelineStopWorker).to receive(:perform_async).with(pipeline.id)
|
||||
|
||||
pipeline.fire_status_event(event)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when running pipeline is successful' do
|
||||
it_behaves_like 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker', event: :succeed
|
||||
end
|
||||
|
||||
context 'when running pipeline is skipped' do
|
||||
it_behaves_like 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker', event: :skip
|
||||
end
|
||||
|
||||
context 'when running pipeline is canceled' do
|
||||
it_behaves_like 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker', event: :cancel
|
||||
end
|
||||
|
||||
context 'when running pipeline failed' do
|
||||
it_behaves_like 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker', event: :drop
|
||||
end
|
||||
|
||||
context 'when running pipeline is blocked' do
|
||||
it_behaves_like 'scheduling Ci::UnlockRefArtifactsOnPipelineStopWorker', event: :block
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'pipeline age metric' do
|
||||
|
|
|
|||
|
|
@ -7,61 +7,6 @@ RSpec.describe Ci::Ref do
|
|||
|
||||
it { is_expected.to belong_to(:project) }
|
||||
|
||||
describe 'state machine transitions' do
|
||||
context 'unlock artifacts transition' do
|
||||
let(:ci_ref) { create(:ci_ref) }
|
||||
let(:unlock_artifacts_worker_spy) { class_spy(::Ci::PipelineSuccessUnlockArtifactsWorker) }
|
||||
|
||||
before do
|
||||
stub_const('Ci::PipelineSuccessUnlockArtifactsWorker', unlock_artifacts_worker_spy)
|
||||
end
|
||||
|
||||
context 'pipline is locked' do
|
||||
let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :artifacts_locked) }
|
||||
|
||||
where(:initial_state, :action, :count) do
|
||||
:unknown | :succeed! | 1
|
||||
:unknown | :do_fail! | 0
|
||||
:success | :succeed! | 1
|
||||
:success | :do_fail! | 0
|
||||
:failed | :succeed! | 1
|
||||
:failed | :do_fail! | 0
|
||||
:fixed | :succeed! | 1
|
||||
:fixed | :do_fail! | 0
|
||||
:broken | :succeed! | 1
|
||||
:broken | :do_fail! | 0
|
||||
:still_failing | :succeed | 1
|
||||
:still_failing | :do_fail | 0
|
||||
end
|
||||
|
||||
with_them do
|
||||
context "when transitioning states" do
|
||||
before do
|
||||
status_value = Ci::Ref.state_machines[:status].states[initial_state].value
|
||||
ci_ref.update!(status: status_value)
|
||||
end
|
||||
|
||||
it 'calls unlock artifacts service' do
|
||||
ci_ref.send(action)
|
||||
|
||||
expect(unlock_artifacts_worker_spy).to have_received(:perform_async).exactly(count).times
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'pipeline is unlocked' do
|
||||
let!(:pipeline) { create(:ci_pipeline, ci_ref_id: ci_ref.id, locked: :unlocked) }
|
||||
|
||||
it 'does not call unlock artifacts service' do
|
||||
ci_ref.succeed!
|
||||
|
||||
expect(unlock_artifacts_worker_spy).not_to have_received(:perform_async)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.ensure_for' do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
||||
|
|
@ -141,7 +86,7 @@ RSpec.describe Ci::Ref do
|
|||
expect(ci_ref.last_finished_pipeline_id).to eq(pipeline.id)
|
||||
end
|
||||
|
||||
context 'when the pipeline a dangling pipeline' do
|
||||
context 'when the pipeline is a dangling pipeline' do
|
||||
let(:pipeline_source) { Enums::Ci::Pipeline.sources[:ondemand_dast_scan] }
|
||||
|
||||
it 'returns nil' do
|
||||
|
|
@ -151,6 +96,47 @@ RSpec.describe Ci::Ref do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#last_successful_pipeline' do
|
||||
let_it_be(:ci_ref) { create(:ci_ref) }
|
||||
|
||||
let(:pipeline_source) { Enums::Ci::Pipeline.sources[:push] }
|
||||
|
||||
context 'when there are no successful pipelines' do
|
||||
let!(:pipeline) { create(:ci_pipeline, :running, ci_ref: ci_ref, source: pipeline_source) }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(ci_ref.last_successful_pipeline).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are successful pipelines' do
|
||||
let!(:successful_pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref, source: pipeline_source) }
|
||||
let!(:last_successful_pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref, source: pipeline_source) }
|
||||
|
||||
it 'returns the latest successful pipeline id' do
|
||||
expect(ci_ref.last_successful_pipeline).to eq(last_successful_pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are non-successful pipelines' do
|
||||
let!(:last_successful_pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref, source: pipeline_source) }
|
||||
let!(:failed_pipeline) { create(:ci_pipeline, :failed, ci_ref: ci_ref, source: pipeline_source) }
|
||||
|
||||
it 'returns the latest successful pipeline id' do
|
||||
expect(ci_ref.last_successful_pipeline).to eq(last_successful_pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline is a dangling pipeline' do
|
||||
let(:pipeline_source) { Enums::Ci::Pipeline.sources[:ondemand_dast_scan] }
|
||||
let!(:pipeline) { create(:ci_pipeline, :running, ci_ref: ci_ref, source: pipeline_source) }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(ci_ref.last_finished_pipeline_id).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#update_status_by!' do
|
||||
subject { ci_ref.update_status_by!(pipeline) }
|
||||
|
||||
|
|
|
|||
|
|
@ -238,7 +238,7 @@ RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :o
|
|||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api(path, admin, admin_mode: true) }
|
||||
let(:request) { api("/broadcast_messages/#{message.id}", admin, admin_mode: true) }
|
||||
end
|
||||
|
||||
it 'deletes the broadcast message for admins' do
|
||||
|
|
|
|||
|
|
@ -556,7 +556,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do
|
|||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api("/runners/#{shared_runner.id}", admin) }
|
||||
let(:request) { api("/runners/#{shared_runner.id}", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category:
|
|||
context 'with upload strategy' do
|
||||
context 'when params invalid' do
|
||||
it_behaves_like '400 response' do
|
||||
let(:request) { post(api(path, user), params: { 'upload[url]' => 'whatever' }) }
|
||||
let(:request) { post(api(path, user, admin_mode: user.admin?), params: { 'upload[url]' => 'whatever' }) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d
|
|||
|
||||
context 'with snippets disabled' do
|
||||
it_behaves_like '403 response' do
|
||||
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/user_agent_detail", admin) }
|
||||
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/user_agent_detail", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -127,7 +127,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d
|
|||
let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
|
||||
let(:params) { base_params.merge(file_params) }
|
||||
|
||||
subject { post api("/projects/#{project.id}/snippets/", actor), params: params }
|
||||
subject { post api("/projects/#{project.id}/snippets/", actor, admin_mode: actor.admin?), params: params }
|
||||
|
||||
shared_examples 'project snippet repository actions' do
|
||||
let(:snippet) { ProjectSnippet.find(json_response['id']) }
|
||||
|
|
@ -356,7 +356,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d
|
|||
|
||||
context 'with snippets disabled' do
|
||||
it_behaves_like '403 response' do
|
||||
let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin), params: { description: 'foo' } }
|
||||
let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin, admin_mode: true), params: { description: 'foo' } }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -382,12 +382,12 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d
|
|||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin) }
|
||||
let(:request) { api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin, admin_mode: true) }
|
||||
end
|
||||
|
||||
context 'with snippets disabled' do
|
||||
it_behaves_like '403 response' do
|
||||
let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin) }
|
||||
let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -416,14 +416,15 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d
|
|||
|
||||
context 'with snippets disabled' do
|
||||
it_behaves_like '403 response' do
|
||||
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/raw", admin) }
|
||||
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/raw", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'snippet blob content' do
|
||||
let_it_be(:snippet_with_empty_repo) { create(:project_snippet, :empty_repo, author: admin, project: project) }
|
||||
let_it_be(:admin_mode) { snippet.author.admin? }
|
||||
|
||||
subject { get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", snippet.author) }
|
||||
subject { get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", snippet.author, admin_mode: admin_mode) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3235,14 +3235,14 @@ RSpec.describe API::Projects, feature_category: :projects do
|
|||
|
||||
context 'for a forked project' do
|
||||
before do
|
||||
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
|
||||
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin, admin_mode: true)
|
||||
project_fork_target.reload
|
||||
expect(project_fork_target.forked_from_project).to be_present
|
||||
expect(project_fork_target).to be_forked
|
||||
end
|
||||
|
||||
it 'makes forked project unforked' do
|
||||
delete api("/projects/#{project_fork_target.id}/fork", admin)
|
||||
delete api("/projects/#{project_fork_target.id}/fork", admin, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
project_fork_target.reload
|
||||
|
|
@ -3251,7 +3251,7 @@ RSpec.describe API::Projects, feature_category: :projects do
|
|||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api("/projects/#{project_fork_target.id}/fork", admin) }
|
||||
let(:request) { api("/projects/#{project_fork_target.id}/fork", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -4526,7 +4526,7 @@ RSpec.describe API::Projects, feature_category: :projects do
|
|||
|
||||
it_behaves_like '412 response' do
|
||||
let(:success_status) { 202 }
|
||||
let(:request) { api("/projects/#{project.id}", admin) }
|
||||
let(:request) { api("/projects/#{project.id}", admin, admin_mode: true) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -251,7 +251,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
context 'as an admin' do
|
||||
context 'accesses their own profile' do
|
||||
it 'contains the note of the user' do
|
||||
get api("/user", admin)
|
||||
get api("/user", admin, admin_mode: true)
|
||||
|
||||
expect(json_response).to have_key('note')
|
||||
expect(json_response['note']).to eq(admin.note)
|
||||
|
|
@ -477,7 +477,6 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
context 'exclude_internal param' do
|
||||
let_it_be(:internal_user) { User.alert_bot }
|
||||
|
||||
# why is this working without admin_mode?
|
||||
it 'returns all users when it is not set' do
|
||||
get api("/users?exclude_internal=false", admin)
|
||||
|
||||
|
|
@ -2741,7 +2740,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
end
|
||||
|
||||
context 'with admin' do
|
||||
let(:admin_personal_access_token) { create(:personal_access_token, user: admin).token }
|
||||
let(:admin_personal_access_token) { create(:personal_access_token, :admin_mode, user: admin).token }
|
||||
|
||||
context 'with personal access token' do
|
||||
it 'returns 403 without private token when sudo defined' do
|
||||
|
|
@ -2750,7 +2749,6 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
|
||||
# why does this work without admin_mode?
|
||||
it 'returns initial current user without private token but with is_admin when sudo not defined' do
|
||||
get api("/user?private_token=#{admin_personal_access_token}", version: version)
|
||||
|
||||
|
|
@ -4010,8 +4008,10 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
context 'requested by admin user' do
|
||||
let(:requesting_user) { create(:user, :admin) }
|
||||
|
||||
subject { get api("/users/#{user.id}/memberships", requesting_user, admin_mode: true) }
|
||||
|
||||
it "responses successfully" do
|
||||
get api("/users/#{user.id}/memberships", requesting_user, admin_mode: true)
|
||||
subject
|
||||
|
||||
aggregate_failures 'expect successful response including groups and projects' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -4024,19 +4024,19 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
end
|
||||
end
|
||||
|
||||
# why does this work without admin_mode?
|
||||
it 'does not submit N+1 DB queries' do
|
||||
# Avoid setup queries
|
||||
get api("/users/#{user.id}/memberships", requesting_user)
|
||||
subject
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
get api("/users/#{user.id}/memberships", requesting_user)
|
||||
subject
|
||||
end
|
||||
|
||||
create_list(:project, 5).map { |project| project.add_guest(user) }
|
||||
|
||||
expect do
|
||||
get api("/users/#{user.id}/memberships", requesting_user)
|
||||
subject
|
||||
end.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,160 +2,187 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifacts do
|
||||
RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
|
||||
include WorkhorseHelpers
|
||||
include Gitlab::Utils::Gzip
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
let(:service) { described_class.new(job) }
|
||||
let(:job) { create(:ci_build, project: project) }
|
||||
let(:artifacts_sha256) { '0' * 64 }
|
||||
let(:metadata_file) { nil }
|
||||
|
||||
let(:artifacts_file) do
|
||||
file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256)
|
||||
end
|
||||
describe '#authorize', :aggregate_failures do
|
||||
let(:artifact_type) { 'archive' }
|
||||
let(:filesize) { nil }
|
||||
|
||||
let(:params) do
|
||||
{
|
||||
'artifact_type' => 'archive',
|
||||
'artifact_format' => 'zip'
|
||||
}.with_indifferent_access
|
||||
end
|
||||
subject(:authorize) { service.authorize(artifact_type: artifact_type, filesize: filesize) }
|
||||
|
||||
def file_to_upload(path, params = {})
|
||||
upload = Tempfile.new('upload')
|
||||
FileUtils.copy(path, upload.path)
|
||||
# This is a workaround for https://github.com/docker/for-linux/issues/1015
|
||||
FileUtils.touch(upload.path)
|
||||
shared_examples_for 'handling lsif artifact' do
|
||||
context 'when artifact is lsif' do
|
||||
let(:artifact_type) { 'lsif' }
|
||||
|
||||
UploadedFile.new(upload.path, **params)
|
||||
it 'includes ProcessLsif in the headers' do
|
||||
expect(authorize[:headers][:ProcessLsif]).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'validating requirements' do
|
||||
context 'when filesize is specified' do
|
||||
let(:max_artifact_size) { 10 }
|
||||
|
||||
before do
|
||||
allow(Ci::JobArtifact)
|
||||
.to receive(:max_artifact_size)
|
||||
.with(type: artifact_type, project: project)
|
||||
.and_return(max_artifact_size)
|
||||
end
|
||||
|
||||
context 'and filesize exceeds the limit' do
|
||||
let(:filesize) { max_artifact_size + 1 }
|
||||
|
||||
it 'returns error' do
|
||||
expect(authorize[:status]).to eq(:error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and filesize does not exceed the limit' do
|
||||
let(:filesize) { max_artifact_size - 1 }
|
||||
|
||||
it 'returns success' do
|
||||
expect(authorize[:status]).to eq(:success)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'uploading to temp location' do |store_type|
|
||||
# We are not testing the entire headers here because this is fully tested
|
||||
# in workhorse_authorize's spec. We just want to confirm that it indeed used the temp path
|
||||
# by checking some indicators in the headers returned.
|
||||
if store_type == :object_storage
|
||||
it 'includes the authorize headers' do
|
||||
expect(authorize[:status]).to eq(:success)
|
||||
expect(authorize[:headers][:RemoteObject][:StoreURL]).to include(ObjectStorage::TMP_UPLOAD_PATH)
|
||||
end
|
||||
else
|
||||
it 'includes the authorize headers' do
|
||||
expect(authorize[:status]).to eq(:success)
|
||||
expect(authorize[:headers][:TempPath]).to include(ObjectStorage::TMP_UPLOAD_PATH)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'handling lsif artifact'
|
||||
it_behaves_like 'validating requirements'
|
||||
end
|
||||
|
||||
context 'when object storage is enabled' do
|
||||
context 'and direct upload is enabled' do
|
||||
before do
|
||||
stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true)
|
||||
end
|
||||
|
||||
it_behaves_like 'uploading to temp location', :object_storage
|
||||
end
|
||||
|
||||
context 'and direct upload is disabled' do
|
||||
before do
|
||||
stub_artifacts_object_storage(JobArtifactUploader, direct_upload: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'uploading to temp location', :local_storage
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage is disabled' do
|
||||
it_behaves_like 'uploading to temp location', :local_storage
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
|
||||
let(:artifacts_sha256) { '0' * 64 }
|
||||
let(:metadata_file) { nil }
|
||||
|
||||
def expect_accessibility_be(accessibility)
|
||||
if accessibility == :public
|
||||
expect(job.job_artifacts).to all be_public_accessibility
|
||||
else
|
||||
expect(job.job_artifacts).to all be_private_accessibility
|
||||
end
|
||||
let(:params) do
|
||||
{
|
||||
'artifact_type' => 'archive',
|
||||
'artifact_format' => 'zip'
|
||||
}.with_indifferent_access
|
||||
end
|
||||
|
||||
shared_examples 'job does not have public artifacts in the CI config' do |expected_artifacts_count, accessibility|
|
||||
it "sets accessibility by default to #{accessibility}" do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count)
|
||||
subject(:execute) { service.execute(artifacts_file, params, metadata_file: metadata_file) }
|
||||
|
||||
expect_accessibility_be(accessibility)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'job artifact set as private in the CI config' do |expected_artifacts_count, accessibility|
|
||||
let!(:job) { create(:ci_build, :with_private_artifacts_config, project: project) }
|
||||
|
||||
it "sets accessibility to #{accessibility}" do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count)
|
||||
|
||||
expect_accessibility_be(accessibility)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'job artifact set as public in the CI config' do |expected_artifacts_count, accessibility|
|
||||
let!(:job) { create(:ci_build, :with_public_artifacts_config, project: project) }
|
||||
|
||||
it "sets accessibility to #{accessibility}" do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count)
|
||||
|
||||
expect_accessibility_be(accessibility)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'when accessibility level passed as private' do |expected_artifacts_count, accessibility|
|
||||
before do
|
||||
params.merge!('accessibility' => 'private')
|
||||
shared_examples_for 'handling accessibility' do
|
||||
shared_examples 'public accessibility' do
|
||||
it 'sets accessibility to public level' do
|
||||
expect(job.job_artifacts).to all be_public_accessibility
|
||||
end
|
||||
end
|
||||
|
||||
it 'sets accessibility to private level' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count)
|
||||
|
||||
expect_accessibility_be(accessibility)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'when accessibility passed as public' do |expected_artifacts_count|
|
||||
before do
|
||||
params.merge!('accessibility' => 'public')
|
||||
shared_examples 'private accessibility' do
|
||||
it 'sets accessibility to private level' do
|
||||
expect(job.job_artifacts).to all be_private_accessibility
|
||||
end
|
||||
end
|
||||
|
||||
it 'sets accessibility level to public' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count)
|
||||
|
||||
expect(job.job_artifacts).to all be_public_accessibility
|
||||
end
|
||||
end
|
||||
|
||||
context 'when artifacts file is uploaded' do
|
||||
it 'logs the created artifact' do
|
||||
expect(Gitlab::Ci::Artifacts::Logger)
|
||||
.to receive(:log_created)
|
||||
.with(an_instance_of(Ci::JobArtifact))
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'returns artifact in the response' do
|
||||
response = subject
|
||||
new_artifact = job.job_artifacts.last
|
||||
|
||||
expect(response[:artifact]).to eq(new_artifact)
|
||||
end
|
||||
|
||||
it 'saves artifact for the given type' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
|
||||
|
||||
new_artifact = job.job_artifacts.last
|
||||
expect(new_artifact.project).to eq(job.project)
|
||||
expect(new_artifact.file).to be_present
|
||||
expect(new_artifact.file_type).to eq(params['artifact_type'])
|
||||
expect(new_artifact.file_format).to eq(params['artifact_format'])
|
||||
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
|
||||
expect(new_artifact.locked).to eq(job.pipeline.locked)
|
||||
end
|
||||
|
||||
context 'when non_public_artifacts feature flag is disabled' do
|
||||
context 'when non_public_artifacts flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(non_public_artifacts: false)
|
||||
end
|
||||
|
||||
context 'when accessibility level not passed to the service' do
|
||||
it_behaves_like 'job does not have public artifacts in the CI config', 1, :public
|
||||
it_behaves_like 'job artifact set as private in the CI config', 1, :public
|
||||
it_behaves_like 'job artifact set as public in the CI config', 1, :public
|
||||
it_behaves_like 'public accessibility'
|
||||
end
|
||||
|
||||
context 'when non_public_artifacts flag is enabled' do
|
||||
context 'and accessibility is defined in the params' do
|
||||
context 'and is passed as private' do
|
||||
before do
|
||||
params.merge!('accessibility' => 'private')
|
||||
end
|
||||
|
||||
it_behaves_like 'private accessibility'
|
||||
end
|
||||
|
||||
context 'and is passed as public' do
|
||||
before do
|
||||
params.merge!('accessibility' => 'public')
|
||||
end
|
||||
|
||||
it_behaves_like 'public accessibility'
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'when accessibility level passed as private', 1, :public
|
||||
it_behaves_like 'when accessibility passed as public', 1
|
||||
context 'and accessibility is not defined in the params' do
|
||||
context 'and job has no public artifacts defined in its CI config' do
|
||||
it_behaves_like 'public accessibility'
|
||||
end
|
||||
|
||||
context 'and job artifacts defined as private in the CI config' do
|
||||
let(:job) { create(:ci_build, :with_private_artifacts_config, project: project) }
|
||||
|
||||
it_behaves_like 'private accessibility'
|
||||
end
|
||||
|
||||
context 'and job artifacts defined as public in the CI config' do
|
||||
let(:job) { create(:ci_build, :with_public_artifacts_config, project: project) }
|
||||
|
||||
it_behaves_like 'public accessibility'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when accessibility passed as invalid value' do
|
||||
before do
|
||||
params.merge!('accessibility' => 'invalid_value')
|
||||
params.merge!('accessibility' => 'foo')
|
||||
end
|
||||
|
||||
it 'fails with argument error' do
|
||||
expect { subject }.to raise_error(ArgumentError)
|
||||
expect { execute }.to raise_error(ArgumentError, "'foo' is not a valid accessibility")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when accessibility level not passed to the service' do
|
||||
it_behaves_like 'job does not have public artifacts in the CI config', 1, :public
|
||||
it_behaves_like 'job artifact set as private in the CI config', 1, :private
|
||||
it_behaves_like 'job artifact set as public in the CI config', 1, :public
|
||||
end
|
||||
|
||||
it_behaves_like 'when accessibility level passed as private', 1, :private
|
||||
|
||||
it_behaves_like 'when accessibility passed as public', 1
|
||||
|
||||
shared_examples_for 'handling metadata file' do
|
||||
context 'when metadata file is also uploaded' do
|
||||
let(:metadata_file) do
|
||||
file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
|
||||
|
|
@ -165,8 +192,8 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
stub_application_setting(default_artifacts_expire_in: '1 day')
|
||||
end
|
||||
|
||||
it 'saves metadata artifact' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(2)
|
||||
it 'creates a new metadata job artifact' do
|
||||
expect { execute }.to change { Ci::JobArtifact.where(file_type: :metadata).count }.by(1)
|
||||
|
||||
new_artifact = job.job_artifacts.last
|
||||
expect(new_artifact.project).to eq(job.project)
|
||||
|
|
@ -177,16 +204,6 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
expect(new_artifact.locked).to eq(job.pipeline.locked)
|
||||
end
|
||||
|
||||
context 'when accessibility level not passed to the service' do
|
||||
it_behaves_like 'job does not have public artifacts in the CI config', 2, :public
|
||||
it_behaves_like 'job artifact set as private in the CI config', 2, :private
|
||||
it_behaves_like 'job artifact set as public in the CI config', 2, :public
|
||||
end
|
||||
|
||||
it_behaves_like 'when accessibility level passed as private', 2, :privatge
|
||||
|
||||
it_behaves_like 'when accessibility passed as public', 2
|
||||
|
||||
it 'logs the created artifact and metadata' do
|
||||
expect(Gitlab::Ci::Artifacts::Logger)
|
||||
.to receive(:log_created)
|
||||
|
|
@ -195,10 +212,12 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
subject
|
||||
end
|
||||
|
||||
it_behaves_like 'handling accessibility'
|
||||
|
||||
it 'sets expiration date according to application settings' do
|
||||
expected_expire_at = 1.day.from_now
|
||||
|
||||
expect(subject).to match(a_hash_including(status: :success, artifact: anything))
|
||||
expect(execute).to match(a_hash_including(status: :success, artifact: anything))
|
||||
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
|
||||
|
||||
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
|
||||
|
|
@ -214,7 +233,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
it 'sets expiration date according to the parameter' do
|
||||
expected_expire_at = 2.hours.from_now
|
||||
|
||||
expect(subject).to match(a_hash_including(status: :success, artifact: anything))
|
||||
expect(execute).to match(a_hash_including(status: :success, artifact: anything))
|
||||
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
|
||||
|
||||
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
|
||||
|
|
@ -231,7 +250,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
it 'sets expiration date according to the parameter' do
|
||||
expected_expire_at = nil
|
||||
|
||||
expect(subject).to be_truthy
|
||||
expect(execute).to be_truthy
|
||||
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
|
||||
|
||||
expect(job.artifacts_expire_at).to eq(expected_expire_at)
|
||||
|
|
@ -242,96 +261,237 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac
|
|||
end
|
||||
end
|
||||
|
||||
context 'when artifacts file already exists' do
|
||||
let!(:existing_artifact) do
|
||||
create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
|
||||
end
|
||||
shared_examples_for 'handling dotenv' do |storage_type|
|
||||
context 'when artifact type is dotenv' do
|
||||
let(:params) do
|
||||
{
|
||||
'artifact_type' => 'dotenv',
|
||||
'artifact_format' => 'gzip'
|
||||
}.with_indifferent_access
|
||||
end
|
||||
|
||||
context 'when sha256 of uploading artifact is the same of the existing one' do
|
||||
let(:existing_sha256) { artifacts_sha256 }
|
||||
if storage_type == :object_storage
|
||||
let(:object_body) { File.read('spec/fixtures/build.env.gz') }
|
||||
let(:upload_filename) { 'build.env.gz' }
|
||||
|
||||
it 'ignores the changes' do
|
||||
expect { subject }.not_to change { Ci::JobArtifact.count }
|
||||
expect(subject).to match(a_hash_including(status: :success))
|
||||
before do
|
||||
stub_request(:get, %r{s3.amazonaws.com/#{remote_path}})
|
||||
.to_return(status: 200, body: File.read('spec/fixtures/build.env.gz'))
|
||||
end
|
||||
else
|
||||
let(:artifacts_file) do
|
||||
file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256)
|
||||
end
|
||||
end
|
||||
|
||||
it 'calls parse service' do
|
||||
expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service|
|
||||
expect(service).to receive(:execute).once.and_call_original
|
||||
end
|
||||
|
||||
expect(execute[:status]).to eq(:success)
|
||||
expect(job.job_variables.as_json(only: [:key, :value, :source])).to contain_exactly(
|
||||
hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
|
||||
hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sha256 of uploading artifact is different than the existing one' do
|
||||
let(:existing_sha256) { '1' * 64 }
|
||||
shared_examples_for 'handling object storage errors' do
|
||||
shared_examples 'rescues object storage error' do |klass, message, expected_message|
|
||||
it "handles #{klass}" do
|
||||
allow_next_instance_of(JobArtifactUploader) do |uploader|
|
||||
allow(uploader).to receive(:store!).and_raise(klass, message)
|
||||
end
|
||||
|
||||
it 'returns error status' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.and_call_original
|
||||
|
||||
expect { subject }.not_to change { Ci::JobArtifact.count }
|
||||
expect(subject).to match(
|
||||
expect(execute).to match(
|
||||
a_hash_including(
|
||||
http_status: :bad_request, message: 'another artifact of the same type already exists', status: :error))
|
||||
http_status: :service_unavailable,
|
||||
message: expected_message || message,
|
||||
status: :error))
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Errno::EIO, 'some/path', 'Input/output error - some/path'
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Google::Apis::ServerError, 'Server error'
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Signet::RemoteServerError, 'The service is currently unavailable'
|
||||
end
|
||||
|
||||
shared_examples_for 'validating requirements' do
|
||||
context 'when filesize is specified' do
|
||||
let(:max_artifact_size) { 10 }
|
||||
|
||||
before do
|
||||
allow(Ci::JobArtifact)
|
||||
.to receive(:max_artifact_size)
|
||||
.with(type: 'archive', project: project)
|
||||
.and_return(max_artifact_size)
|
||||
|
||||
allow(artifacts_file).to receive(:size).and_return(filesize)
|
||||
end
|
||||
|
||||
context 'and filesize exceeds the limit' do
|
||||
let(:filesize) { max_artifact_size + 1 }
|
||||
|
||||
it 'returns error' do
|
||||
expect(execute[:status]).to eq(:error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and filesize does not exceed the limit' do
|
||||
let(:filesize) { max_artifact_size - 1 }
|
||||
|
||||
it 'returns success' do
|
||||
expect(execute[:status]).to eq(:success)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when artifact type is dotenv' do
|
||||
shared_examples_for 'handling existing artifact' do
|
||||
context 'when job already has an artifact of the same file type' do
|
||||
let!(:existing_artifact) do
|
||||
create(:ci_job_artifact, params[:artifact_type], file_sha256: existing_sha256, job: job)
|
||||
end
|
||||
|
||||
context 'when sha256 of uploading artifact is the same of the existing one' do
|
||||
let(:existing_sha256) { artifacts_sha256 }
|
||||
|
||||
it 'ignores the changes' do
|
||||
expect { execute }.not_to change { Ci::JobArtifact.count }
|
||||
expect(execute).to match(a_hash_including(status: :success))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sha256 of uploading artifact is different than the existing one' do
|
||||
let(:existing_sha256) { '1' * 64 }
|
||||
|
||||
it 'returns error status' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
|
||||
|
||||
expect { execute }.not_to change { Ci::JobArtifact.count }
|
||||
expect(execute).to match(
|
||||
a_hash_including(
|
||||
http_status: :bad_request,
|
||||
message: 'another artifact of the same type already exists',
|
||||
status: :error
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'logging artifact' do
|
||||
it 'logs the created artifact' do
|
||||
expect(Gitlab::Ci::Artifacts::Logger)
|
||||
.to receive(:log_created)
|
||||
.with(an_instance_of(Ci::JobArtifact))
|
||||
|
||||
execute
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'handling remote uploads to temporary location' do
|
||||
context 'when artifacts file is uploaded' do
|
||||
it 'creates a new job artifact' do
|
||||
expect { execute }.to change { Ci::JobArtifact.count }.by(1)
|
||||
|
||||
new_artifact = execute[:artifact]
|
||||
expect(new_artifact).to eq(job.job_artifacts.last)
|
||||
expect(new_artifact.project).to eq(job.project)
|
||||
expect(new_artifact.file.filename).to eq(artifacts_file.original_filename)
|
||||
expect(new_artifact.file_identifier).to eq(artifacts_file.original_filename)
|
||||
expect(new_artifact.file_type).to eq(params['artifact_type'])
|
||||
expect(new_artifact.file_format).to eq(params['artifact_format'])
|
||||
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
|
||||
expect(new_artifact.locked).to eq(job.pipeline.locked)
|
||||
expect(new_artifact.size).to eq(artifacts_file.size)
|
||||
|
||||
expect(execute[:status]).to eq(:success)
|
||||
end
|
||||
|
||||
it_behaves_like 'handling accessibility'
|
||||
it_behaves_like 'handling metadata file'
|
||||
it_behaves_like 'handling partitioning'
|
||||
it_behaves_like 'logging artifact'
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'handling partitioning' do
|
||||
context 'with job partitioned', :ci_partitionable do
|
||||
let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) }
|
||||
let(:job) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
it 'sets partition_id on artifacts' do
|
||||
expect { execute }.to change { Ci::JobArtifact.count }
|
||||
|
||||
artifacts_partitions = job.job_artifacts.map(&:partition_id).uniq
|
||||
|
||||
expect(artifacts_partitions).to eq([ci_testing_partition_id])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage and direct upload is enabled' do
|
||||
let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
|
||||
let(:remote_path) { File.join(remote_store_path, remote_id) }
|
||||
let(:object_body) { File.open('spec/fixtures/ci_build_artifacts.zip') }
|
||||
let(:upload_filename) { 'artifacts.zip' }
|
||||
let(:object) do
|
||||
fog_connection.directories
|
||||
.new(key: 'artifacts')
|
||||
.files
|
||||
.create( # rubocop:disable Rails/SaveBang
|
||||
key: remote_path,
|
||||
body: object_body
|
||||
)
|
||||
end
|
||||
|
||||
let(:artifacts_file) do
|
||||
file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256)
|
||||
fog_to_uploaded_file(
|
||||
object,
|
||||
filename: upload_filename,
|
||||
sha256: artifacts_sha256,
|
||||
remote_id: remote_id
|
||||
)
|
||||
end
|
||||
|
||||
let(:params) do
|
||||
{
|
||||
'artifact_type' => 'dotenv',
|
||||
'artifact_format' => 'gzip'
|
||||
}.with_indifferent_access
|
||||
end
|
||||
let(:remote_id) { 'generated-remote-id-12345' }
|
||||
let(:remote_store_path) { ObjectStorage::TMP_UPLOAD_PATH }
|
||||
|
||||
it 'calls parse service' do
|
||||
expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service|
|
||||
expect(service).to receive(:execute).once.and_call_original
|
||||
end
|
||||
|
||||
expect(subject[:status]).to eq(:success)
|
||||
expect(job.job_variables.as_json(only: [:key, :value, :source])).to contain_exactly(
|
||||
hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
|
||||
hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
|
||||
end
|
||||
it_behaves_like 'handling remote uploads to temporary location'
|
||||
it_behaves_like 'handling dotenv', :object_storage
|
||||
it_behaves_like 'handling object storage errors'
|
||||
it_behaves_like 'validating requirements'
|
||||
end
|
||||
|
||||
context 'with job partitioning', :ci_partitionable do
|
||||
let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) }
|
||||
let(:job) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
it 'sets partition_id on artifacts' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }
|
||||
|
||||
artifacts_partitions = job.job_artifacts.map(&:partition_id).uniq
|
||||
|
||||
expect(artifacts_partitions).to eq([ci_testing_partition_id])
|
||||
context 'when using local storage' do
|
||||
let(:artifacts_file) do
|
||||
file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256)
|
||||
end
|
||||
|
||||
it_behaves_like 'handling remote uploads to temporary location'
|
||||
it_behaves_like 'handling dotenv', :local_storage
|
||||
it_behaves_like 'validating requirements'
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'rescues object storage error' do |klass, message, expected_message|
|
||||
it "handles #{klass}" do
|
||||
allow_next_instance_of(JobArtifactUploader) do |uploader|
|
||||
allow(uploader).to receive(:store!).and_raise(klass, message)
|
||||
end
|
||||
def file_to_upload(path, params = {})
|
||||
upload = Tempfile.new('upload')
|
||||
FileUtils.copy(path, upload.path)
|
||||
# This is a workaround for https://github.com/docker/for-linux/issues/1015
|
||||
FileUtils.touch(upload.path)
|
||||
|
||||
expect(Gitlab::ErrorTracking)
|
||||
.to receive(:track_exception)
|
||||
.and_call_original
|
||||
|
||||
expect(subject).to match(
|
||||
a_hash_including(
|
||||
http_status: :service_unavailable,
|
||||
message: expected_message || message,
|
||||
status: :error))
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Errno::EIO, 'some/path', 'Input/output error - some/path'
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Google::Apis::ServerError, 'Server error'
|
||||
|
||||
it_behaves_like 'rescues object storage error',
|
||||
Signet::RemoteServerError, 'The service is currently unavailable'
|
||||
UploadedFile.new(upload.path, **params)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,6 +3,16 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integration do
|
||||
let_it_be(:ref) { 'master' }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:tag_ref_path) { "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" }
|
||||
let_it_be(:ci_ref_tag) { create(:ci_ref, ref_path: tag_ref_path, project: project) }
|
||||
let_it_be(:branch_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
|
||||
let_it_be(:ci_ref_branch) { create(:ci_ref, ref_path: branch_ref_path, project: project) }
|
||||
let_it_be(:new_ref) { 'new_ref' }
|
||||
let_it_be(:new_ref_path) { "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{new_ref}" }
|
||||
let_it_be(:new_ci_ref) { create(:ci_ref, ref_path: new_ref_path, project: project) }
|
||||
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:tag) do
|
||||
|
|
@ -13,31 +23,31 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
with_them do
|
||||
let(:ref) { 'master' }
|
||||
let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
|
||||
let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
|
||||
let(:project) { ci_ref.project }
|
||||
let(:target_ref) { tag ? ci_ref_tag : ci_ref_branch }
|
||||
let(:source_job) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
let!(:old_unlocked_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :unlocked) }
|
||||
let!(:older_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:older_ambiguous_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: !tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:code_coverage_pipeline) { create(:ci_pipeline, :with_coverage_report_artifact, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, child_of: pipeline, project: project, locked: :artifacts_locked) }
|
||||
let!(:newer_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:successful_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, child_of: successful_pipeline, project: project, locked: :artifacts_locked) }
|
||||
let!(:last_successful_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:last_successful_child_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: ref, tag: tag, child_of: last_successful_pipeline, project: project, locked: :artifacts_locked) }
|
||||
let!(:older_failed_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, status: :failed, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:latest_failed_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, status: :failed, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:blocked_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, status: :manual, ref: ref, tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:other_ref_pipeline) { create(:ci_pipeline, :with_persisted_artifacts, ref: 'other_ref', tag: tag, project: project, locked: :artifacts_locked) }
|
||||
let!(:sources_pipeline) { create(:ci_sources_pipeline, source_job: source_job, source_project: project, pipeline: child_pipeline, project: project) }
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::BATCH_SIZE", 1)
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
|
||||
subject(:execute) { described_class.new(successful_pipeline.project, successful_pipeline.user).execute(target_ref, before_pipeline) }
|
||||
|
||||
context 'when running on a ref before a pipeline' do
|
||||
let(:before_pipeline) { pipeline }
|
||||
let(:before_pipeline) { successful_pipeline }
|
||||
|
||||
it 'unlocks artifacts from older pipelines' do
|
||||
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
|
|
@ -48,15 +58,15 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
it 'does not unlock artifacts from newer pipelines' do
|
||||
expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
|
||||
expect { execute }.not_to change { last_successful_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not lock artifacts from old unlocked pipelines' do
|
||||
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from the same pipeline' do
|
||||
expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
|
||||
it 'does not unlock artifacts from the successful pipeline' do
|
||||
expect { execute }.not_to change { successful_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for other refs' do
|
||||
|
|
@ -74,6 +84,60 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
it 'unlocks pipeline artifact records' do
|
||||
expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
|
||||
end
|
||||
|
||||
context 'when before_pipeline is a failed pipeline' do
|
||||
let(:before_pipeline) { latest_failed_pipeline }
|
||||
|
||||
it 'unlocks artifacts from older failed pipeline' do
|
||||
expect { execute }.to change { older_failed_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifact from the latest failed pipeline' do
|
||||
expect { execute }.not_to change { latest_failed_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from the last successful pipeline' do
|
||||
expect { execute }.not_to change { last_successful_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from the child of last successful pipeline' do
|
||||
expect { execute }.not_to change { last_successful_child_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before_pipeline is a blocked pipeline' do
|
||||
let(:before_pipeline) { blocked_pipeline }
|
||||
|
||||
it 'unlocks artifacts from failed pipeline' do
|
||||
expect { execute }.to change { latest_failed_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifact from the latest blocked pipeline' do
|
||||
expect { execute }.not_to change { blocked_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from the last successful pipeline' do
|
||||
expect { execute }.not_to change { last_successful_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop:todo RSpec/MultipleMemoizedHelpers
|
||||
context 'when the ref has no successful pipeline' do
|
||||
let!(:target_ref) { new_ci_ref }
|
||||
let!(:failed_pipeline_1) { create(:ci_pipeline, :with_persisted_artifacts, status: :failed, ref: new_ref, project: project, locked: :artifacts_locked) }
|
||||
let!(:failed_pipeline_2) { create(:ci_pipeline, :with_persisted_artifacts, status: :failed, ref: new_ref, project: project, locked: :artifacts_locked) }
|
||||
|
||||
let(:before_pipeline) { failed_pipeline_2 }
|
||||
|
||||
it 'unlocks earliest failed pipeline' do
|
||||
expect { execute }.to change { failed_pipeline_1.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock latest failed pipeline' do
|
||||
expect { execute }.not_to change { failed_pipeline_2.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
end
|
||||
# rubocop:enable RSpec/MultipleMemoizedHelpers
|
||||
end
|
||||
|
||||
context 'when running on just the ref' do
|
||||
|
|
@ -84,11 +148,11 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
it 'unlocks artifacts from newer pipelines' do
|
||||
expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
expect { execute }.to change { last_successful_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'unlocks artifacts from the same pipeline' do
|
||||
expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
it 'unlocks artifacts from the successful pipeline' do
|
||||
expect { execute }.to change { successful_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for tag or branch with same name as ref' do
|
||||
|
|
@ -104,7 +168,7 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
it 'unlocks job artifact records' do
|
||||
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(8)
|
||||
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(16)
|
||||
end
|
||||
|
||||
it 'unlocks pipeline artifact records' do
|
||||
|
|
@ -114,10 +178,10 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
describe '#unlock_pipelines_query' do
|
||||
subject { described_class.new(pipeline.project, pipeline.user).unlock_pipelines_query(ci_ref, before_pipeline) }
|
||||
subject { described_class.new(successful_pipeline.project, successful_pipeline.user).unlock_pipelines_query(target_ref, before_pipeline) }
|
||||
|
||||
context 'when running on a ref before a pipeline' do
|
||||
let(:before_pipeline) { pipeline }
|
||||
let(:before_pipeline) { successful_pipeline }
|
||||
|
||||
it 'produces the expected SQL string' do
|
||||
expect(subject.squish).to eq <<~SQL.squish
|
||||
|
|
@ -132,7 +196,7 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
FROM
|
||||
"ci_pipelines"
|
||||
WHERE
|
||||
"ci_pipelines"."ci_ref_id" = #{ci_ref.id}
|
||||
"ci_pipelines"."ci_ref_id" = #{target_ref.id}
|
||||
AND "ci_pipelines"."locked" = 1
|
||||
AND "ci_pipelines"."id" < #{before_pipeline.id}
|
||||
AND "ci_pipelines"."id" NOT IN
|
||||
|
|
@ -162,6 +226,33 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
"base_and_descendants"
|
||||
AS
|
||||
"ci_pipelines")
|
||||
AND "ci_pipelines"."id" NOT IN
|
||||
(WITH RECURSIVE
|
||||
"base_and_descendants"
|
||||
AS
|
||||
((SELECT
|
||||
"ci_pipelines".*
|
||||
FROM
|
||||
"ci_pipelines"
|
||||
WHERE
|
||||
"ci_pipelines"."id" = #{target_ref.last_successful_pipeline.id})
|
||||
UNION
|
||||
(SELECT
|
||||
"ci_pipelines".*
|
||||
FROM
|
||||
"ci_pipelines",
|
||||
"base_and_descendants",
|
||||
"ci_sources_pipelines"
|
||||
WHERE
|
||||
"ci_sources_pipelines"."pipeline_id" = "ci_pipelines"."id"
|
||||
AND "ci_sources_pipelines"."source_pipeline_id" = "base_and_descendants"."id"
|
||||
AND "ci_sources_pipelines"."source_project_id" = "ci_sources_pipelines"."project_id"))
|
||||
SELECT
|
||||
"id"
|
||||
FROM
|
||||
"base_and_descendants"
|
||||
AS
|
||||
"ci_pipelines")
|
||||
LIMIT 1
|
||||
FOR UPDATE
|
||||
SKIP LOCKED)
|
||||
|
|
@ -186,7 +277,7 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
FROM
|
||||
"ci_pipelines"
|
||||
WHERE
|
||||
"ci_pipelines"."ci_ref_id" = #{ci_ref.id}
|
||||
"ci_pipelines"."ci_ref_id" = #{target_ref.id}
|
||||
AND "ci_pipelines"."locked" = 1
|
||||
LIMIT 1
|
||||
FOR UPDATE
|
||||
|
|
@ -199,7 +290,7 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
describe '#unlock_job_artifacts_query' do
|
||||
subject { described_class.new(pipeline.project, pipeline.user).unlock_job_artifacts_query(pipeline_ids) }
|
||||
subject { described_class.new(successful_pipeline.project, successful_pipeline.user).unlock_job_artifacts_query(pipeline_ids) }
|
||||
|
||||
context 'when given a single pipeline ID' do
|
||||
let(:pipeline_ids) { [older_pipeline.id] }
|
||||
|
|
@ -226,7 +317,7 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
|
|||
end
|
||||
|
||||
context 'when given multiple pipeline IDs' do
|
||||
let(:pipeline_ids) { [older_pipeline.id, newer_pipeline.id, pipeline.id] }
|
||||
let(:pipeline_ids) { [older_pipeline.id, last_successful_pipeline.id, successful_pipeline.id] }
|
||||
|
||||
it 'produces the expected SQL string' do
|
||||
expect(subject.squish).to eq <<~SQL.squish
|
||||
|
|
|
|||
|
|
@ -362,27 +362,13 @@ RSpec.configure do |config|
|
|||
./spec/requests/api/project_snapshots_spec.rb
|
||||
./spec/requests/api/project_snippets_spec.rb
|
||||
./spec/requests/api/projects_spec.rb
|
||||
./spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/hooks_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/notes_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/snippets_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/api/status_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb
|
||||
./spec/support/shared_examples/requests/snippet_shared_examples.rb
|
||||
./ee/spec/requests/api/group_push_rule_spec.rb
|
||||
./ee/spec/requests/api/group_repository_storage_moves_spec.rb
|
||||
./ee/spec/requests/api/groups_spec.rb
|
||||
./ee/spec/requests/api/internal/upcoming_reconciliations_spec.rb
|
||||
./ee/spec/requests/api/invitations_spec.rb
|
||||
./ee/spec/requests/api/license_spec.rb
|
||||
./ee/spec/requests/api/merge_request_approvals_spec.rb
|
||||
./ee/spec/requests/api/namespaces_spec.rb
|
||||
./ee/spec/requests/api/notes_spec.rb
|
||||
./ee/spec/requests/api/settings_spec.rb
|
||||
./ee/spec/requests/api/users_spec.rb
|
||||
./ee/spec/support/shared_examples/requests/api/project_approval_rules_api_shared_examples.rb
|
||||
]
|
||||
|
||||
if example.metadata[:file_path].start_with?(*admin_mode_for_api_feature_flag_paths)
|
||||
|
|
|
|||
|
|
@ -118,14 +118,15 @@ module WorkhorseHelpers
|
|||
end
|
||||
end
|
||||
|
||||
def fog_to_uploaded_file(file, sha256: nil)
|
||||
filename = File.basename(file.key)
|
||||
def fog_to_uploaded_file(file, filename: nil, sha256: nil, remote_id: nil)
|
||||
filename ||= File.basename(file.key)
|
||||
|
||||
UploadedFile.new(nil,
|
||||
filename: filename,
|
||||
remote_id: filename,
|
||||
size: file.content_length,
|
||||
sha256: sha256
|
||||
)
|
||||
UploadedFile.new(
|
||||
nil,
|
||||
filename: filename,
|
||||
remote_id: remote_id || filename,
|
||||
size: file.content_length,
|
||||
sha256: sha256
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
let!(:custom_attribute1) { attributable.custom_attributes.create! key: 'foo', value: 'foo' }
|
||||
let!(:custom_attribute2) { attributable.custom_attributes.create! key: 'bar', value: 'bar' }
|
||||
|
||||
describe "GET /#{attributable_name} with custom attributes filter" do
|
||||
describe "GET /#{attributable_name} with custom attributes filter", :aggregate_failures do
|
||||
before do
|
||||
other_attributable
|
||||
end
|
||||
|
|
@ -20,7 +20,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
context 'with an authorized user' do
|
||||
it 'filters by custom attributes' do
|
||||
get api("/#{attributable_name}", admin), params: { custom_attributes: { foo: 'foo', bar: 'bar' } }
|
||||
get api("/#{attributable_name}", admin, admin_mode: true), params: { custom_attributes: { foo: 'foo', bar: 'bar' } }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response.size).to be 1
|
||||
|
|
@ -29,7 +29,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "GET /#{attributable_name} with custom attributes" do
|
||||
describe "GET /#{attributable_name} with custom attributes", :aggregate_failures do
|
||||
before do
|
||||
other_attributable
|
||||
end
|
||||
|
|
@ -46,7 +46,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
context 'with an authorized user' do
|
||||
it 'does not include custom attributes by default' do
|
||||
get api("/#{attributable_name}", admin)
|
||||
get api("/#{attributable_name}", admin, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).not_to be_empty
|
||||
|
|
@ -54,7 +54,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
|
||||
it 'includes custom attributes if requested' do
|
||||
get api("/#{attributable_name}", admin), params: { with_custom_attributes: true }
|
||||
get api("/#{attributable_name}", admin, admin_mode: true), params: { with_custom_attributes: true }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).not_to be_empty
|
||||
|
|
@ -72,7 +72,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "GET /#{attributable_name}/:id with custom attributes" do
|
||||
describe "GET /#{attributable_name}/:id with custom attributes", :aggregate_failures do
|
||||
context 'with an unauthorized user' do
|
||||
it 'does not include custom attributes' do
|
||||
get api("/#{attributable_name}/#{attributable.id}", user), params: { with_custom_attributes: true }
|
||||
|
|
@ -84,14 +84,14 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
context 'with an authorized user' do
|
||||
it 'does not include custom attributes by default' do
|
||||
get api("/#{attributable_name}/#{attributable.id}", admin)
|
||||
get api("/#{attributable_name}/#{attributable.id}", admin, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).not_to include 'custom_attributes'
|
||||
end
|
||||
|
||||
it 'includes custom attributes if requested' do
|
||||
get api("/#{attributable_name}/#{attributable.id}", admin), params: { with_custom_attributes: true }
|
||||
get api("/#{attributable_name}/#{attributable.id}", admin, admin_mode: true), params: { with_custom_attributes: true }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['custom_attributes']).to contain_exactly(
|
||||
|
|
@ -102,7 +102,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "GET /#{attributable_name}/:id/custom_attributes" do
|
||||
describe "GET /#{attributable_name}/:id/custom_attributes", :aggregate_failures do
|
||||
context 'with an unauthorized user' do
|
||||
subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes", user) }
|
||||
|
||||
|
|
@ -111,7 +111,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
context 'with an authorized user' do
|
||||
it 'returns all custom attributes' do
|
||||
get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin)
|
||||
get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to contain_exactly(
|
||||
|
|
@ -122,7 +122,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "GET /#{attributable_name}/:id/custom_attributes/:key" do
|
||||
describe "GET /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do
|
||||
context 'with an unauthorized user' do
|
||||
subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) }
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
context 'with an authorized user' do
|
||||
it 'returns a single custom attribute' do
|
||||
get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
|
||||
get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' })
|
||||
|
|
@ -139,7 +139,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "PUT /#{attributable_name}/:id/custom_attributes/:key" do
|
||||
describe "PUT /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do
|
||||
context 'with an unauthorized user' do
|
||||
subject { put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user), params: { value: 'new' } }
|
||||
|
||||
|
|
@ -149,7 +149,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
context 'with an authorized user' do
|
||||
it 'creates a new custom attribute' do
|
||||
expect do
|
||||
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), params: { value: 'new' }
|
||||
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin, admin_mode: true), params: { value: 'new' }
|
||||
end.to change { attributable.custom_attributes.count }.by(1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -159,7 +159,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
|
||||
it 'updates an existing custom attribute' do
|
||||
expect do
|
||||
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), params: { value: 'new' }
|
||||
put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true), params: { value: 'new' }
|
||||
end.not_to change { attributable.custom_attributes.count }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -169,7 +169,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "DELETE /#{attributable_name}/:id/custom_attributes/:key" do
|
||||
describe "DELETE /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do
|
||||
context 'with an unauthorized user' do
|
||||
subject { delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) }
|
||||
|
||||
|
|
@ -179,7 +179,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name|
|
|||
context 'with an authorized user' do
|
||||
it 'deletes an existing custom attribute' do
|
||||
expect do
|
||||
delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin)
|
||||
delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true)
|
||||
end.to change { attributable.custom_attributes.count }.by(-1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'web-hook API endpoints test hook' do |prefix|
|
||||
describe "POST #{prefix}/:hook_id" do
|
||||
describe "POST #{prefix}/:hook_id", :aggregate_failures do
|
||||
it 'tests the hook' do
|
||||
expect(WebHookService)
|
||||
.to receive(:new).with(hook, anything, String, force: false)
|
||||
.and_return(instance_double(WebHookService, execute: nil))
|
||||
|
||||
post api(hook_uri, user)
|
||||
post api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
end
|
||||
|
|
@ -17,7 +17,7 @@ end
|
|||
RSpec.shared_examples 'web-hook API endpoints with branch-filter' do |prefix|
|
||||
describe "POST #{prefix}/hooks" do
|
||||
it "returns a 422 error if branch filter is not valid" do
|
||||
post api(collection_uri, user),
|
||||
post api(collection_uri, user, admin_mode: user.admin?),
|
||||
params: { url: "http://example.com", push_events_branch_filter: '~badbranchname/' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
|
|
@ -58,10 +58,10 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
|
||||
let(:default_values) { {} }
|
||||
|
||||
describe "GET #{prefix}/hooks" do
|
||||
describe "GET #{prefix}/hooks", :aggregate_failures do
|
||||
context "authorized user" do
|
||||
it "returns all hooks" do
|
||||
get api(collection_uri, user)
|
||||
get api(collection_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_collection_schema
|
||||
|
|
@ -70,7 +70,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
|
||||
context "when user is forbidden" do
|
||||
it "prevents access to hooks" do
|
||||
get api(collection_uri, unauthorized_user)
|
||||
get api(collection_uri, unauthorized_user, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
|
|
@ -90,7 +90,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it 'returns the names of the url variables' do
|
||||
get api(collection_uri, user)
|
||||
get api(collection_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to contain_exactly(
|
||||
|
|
@ -102,10 +102,10 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
end
|
||||
|
||||
describe "GET #{prefix}/hooks/:hook_id" do
|
||||
describe "GET #{prefix}/hooks/:hook_id", :aggregate_failures do
|
||||
context "authorized user" do
|
||||
it "returns a project hook" do
|
||||
get api(hook_uri, user)
|
||||
get api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_hook_schema
|
||||
|
|
@ -114,7 +114,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "returns a 404 error if hook id is not available" do
|
||||
get api(hook_uri(non_existing_record_id), user)
|
||||
get api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
@ -125,7 +125,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "has the correct alert status", :aggregate_failures do
|
||||
get api(hook_uri, user)
|
||||
get api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
|
|
@ -140,7 +140,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "has the correct alert status", :aggregate_failures do
|
||||
get api(hook_uri, user)
|
||||
get api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
|
|
@ -156,7 +156,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
|
||||
context "when user is forbidden" do
|
||||
it "does not access an existing hook" do
|
||||
get api(hook_uri, unauthorized_user)
|
||||
get api(hook_uri, unauthorized_user, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
|
|
@ -171,12 +171,12 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
end
|
||||
|
||||
describe "POST #{prefix}/hooks" do
|
||||
describe "POST #{prefix}/hooks", :aggregate_failures do
|
||||
let(:hook_creation_params) { hook_params }
|
||||
|
||||
it "adds hook", :aggregate_failures do
|
||||
expect do
|
||||
post api(collection_uri, user),
|
||||
post api(collection_uri, user, admin_mode: user.admin?),
|
||||
params: hook_creation_params
|
||||
end.to change { hooks_count }.by(1)
|
||||
|
||||
|
|
@ -201,7 +201,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
token = "secret token"
|
||||
|
||||
expect do
|
||||
post api(collection_uri, user),
|
||||
post api(collection_uri, user, admin_mode: user.admin?),
|
||||
params: { url: "http://example.com", token: token }
|
||||
end.to change { hooks_count }.by(1)
|
||||
|
||||
|
|
@ -216,19 +216,19 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "returns a 400 error if url not given" do
|
||||
post api(collection_uri, user), params: { event_names.first => true }
|
||||
post api(collection_uri, user, admin_mode: user.admin?), params: { event_names.first => true }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
||||
it "returns a 400 error if no parameters are provided" do
|
||||
post api(collection_uri, user)
|
||||
post api(collection_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
||||
it 'sets default values for events', :aggregate_failures do
|
||||
post api(collection_uri, user), params: { url: 'http://mep.mep' }
|
||||
post api(collection_uri, user, admin_mode: user.admin?), params: { url: 'http://mep.mep' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(response).to match_hook_schema
|
||||
|
|
@ -239,22 +239,22 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "returns a 422 error if token not valid" do
|
||||
post api(collection_uri, user),
|
||||
post api(collection_uri, user, admin_mode: user.admin?),
|
||||
params: { url: "http://example.com", token: "foo\nbar" }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
|
||||
it "returns a 422 error if url not valid" do
|
||||
post api(collection_uri, user), params: { url: "ftp://example.com" }
|
||||
post api(collection_uri, user, admin_mode: user.admin?), params: { url: "ftp://example.com" }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT #{prefix}/hooks/:hook_id" do
|
||||
describe "PUT #{prefix}/hooks/:hook_id", :aggregate_failures do
|
||||
it "updates an existing hook" do
|
||||
put api(hook_uri, user), params: update_params
|
||||
put api(hook_uri, user, admin_mode: user.admin?), params: update_params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to match_hook_schema
|
||||
|
|
@ -267,7 +267,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
it 'updates the URL variables' do
|
||||
hook.update!(url_variables: { 'abc' => 'some value' })
|
||||
|
||||
put api(hook_uri, user),
|
||||
put api(hook_uri, user, admin_mode: user.admin?),
|
||||
params: { url_variables: [{ key: 'def', value: 'other value' }] }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -280,7 +280,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
it "adds the token without including it in the response" do
|
||||
token = "secret token"
|
||||
|
||||
put api(hook_uri, user), params: { url: "http://example.org", token: token }
|
||||
put api(hook_uri, user, admin_mode: user.admin?), params: { url: "http://example.org", token: token }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response["url"]).to eq("http://example.org")
|
||||
|
|
@ -291,67 +291,67 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "returns 404 error if hook id not found" do
|
||||
put api(hook_uri(non_existing_record_id), user), params: { url: 'http://example.org' }
|
||||
put api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?), params: { url: 'http://example.org' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it "returns 400 error if no parameters are provided" do
|
||||
put api(hook_uri, user)
|
||||
put api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
||||
it "returns a 422 error if url is not valid" do
|
||||
put api(hook_uri, user), params: { url: 'ftp://example.com' }
|
||||
put api(hook_uri, user, admin_mode: user.admin?), params: { url: 'ftp://example.com' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
|
||||
it "returns a 422 error if token is not valid" do
|
||||
put api(hook_uri, user), params: { token: %w[foo bar].join("\n") }
|
||||
put api(hook_uri, user, admin_mode: user.admin?), params: { token: %w[foo bar].join("\n") }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
end
|
||||
|
||||
describe "DELETE /projects/:id/hooks/:hook_id" do
|
||||
describe "DELETE /projects/:id/hooks/:hook_id", :aggregate_failures do
|
||||
it "deletes hook from project" do
|
||||
expect do
|
||||
delete api(hook_uri, user)
|
||||
delete api(hook_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
end.to change { hooks_count }.by(-1)
|
||||
end
|
||||
|
||||
it "returns a 404 error when deleting non existent hook" do
|
||||
delete api(hook_uri(non_existing_record_id), user)
|
||||
delete api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it "returns a 404 error if hook id not given" do
|
||||
delete api(collection_uri, user)
|
||||
delete api(collection_uri, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it "returns forbidden if a user attempts to delete hooks they do not own" do
|
||||
delete api(hook_uri, unauthorized_user)
|
||||
delete api(hook_uri, unauthorized_user, admin_mode: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
expect(WebHook.exists?(hook.id)).to be_truthy
|
||||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api(hook_uri, user) }
|
||||
let(:request) { api(hook_uri, user, admin_mode: user.admin?) }
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT #{prefix}/hooks/:hook_id/url_variables/:key", :aggregate_failures do
|
||||
it 'sets the variable' do
|
||||
expect do
|
||||
put api("#{hook_uri}/url_variables/abc", user),
|
||||
put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?),
|
||||
params: { value: 'some secret value' }
|
||||
end.to change { hook.reload.url_variables }.to(eq('abc' => 'some secret value'))
|
||||
|
||||
|
|
@ -361,7 +361,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
it 'overwrites existing values' do
|
||||
hook.update!(url_variables: { 'abc' => 'xyz', 'def' => 'other value' })
|
||||
|
||||
put api("#{hook_uri}/url_variables/abc", user),
|
||||
put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?),
|
||||
params: { value: 'some secret value' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
|
|
@ -369,21 +369,21 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
end
|
||||
|
||||
it "returns a 404 error when editing non existent hook" do
|
||||
put api("#{hook_uri(non_existing_record_id)}/url_variables/abc", user),
|
||||
put api("#{hook_uri(non_existing_record_id)}/url_variables/abc", user, admin_mode: user.admin?),
|
||||
params: { value: 'xyz' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it "returns a 422 error when the key is illegal" do
|
||||
put api("#{hook_uri}/url_variables/abc%20def", user),
|
||||
put api("#{hook_uri}/url_variables/abc%20def", user, admin_mode: user.admin?),
|
||||
params: { value: 'xyz' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
|
||||
it "returns a 422 error when the value is illegal" do
|
||||
put api("#{hook_uri}/url_variables/abc", user),
|
||||
put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?),
|
||||
params: { value: '' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
|
|
@ -397,7 +397,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
|
||||
it 'unsets the variable' do
|
||||
expect do
|
||||
delete api("#{hook_uri}/url_variables/abc", user)
|
||||
delete api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?)
|
||||
end.to change { hook.reload.url_variables }.to(eq({ 'def' => 'other value' }))
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
|
|
@ -406,13 +406,13 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
|
|||
it 'returns 404 for keys that do not exist' do
|
||||
hook.update!(url_variables: { 'def' => 'other value' })
|
||||
|
||||
delete api("#{hook_uri}/url_variables/abc", user)
|
||||
delete api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it "returns a 404 error when deleting a variable from a non existent hook" do
|
||||
delete api(hook_uri(non_existing_record_id) + "/url_variables/abc", user)
|
||||
delete api(hook_uri(non_existing_record_id) + "/url_variables/abc", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
||||
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do
|
||||
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do
|
||||
context 'sorting' do
|
||||
before do
|
||||
params = { noteable: noteable, author: user }
|
||||
|
|
@ -12,7 +12,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
|
||||
context 'without sort params' do
|
||||
it 'sorts by created_at in descending order by default' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?)
|
||||
|
||||
response_dates = json_response.map { |note| note['created_at'] }
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
it 'fetches notes using parent path as id paremeter' do
|
||||
parent_id = CGI.escape(parent.full_path)
|
||||
|
||||
get api("/#{parent_type}/#{parent_id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
|
||||
get api("/#{parent_type}/#{parent_id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
|
@ -40,7 +40,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'page breaks first page correctly' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4", user, admin_mode: user.admin?)
|
||||
|
||||
response_ids = json_response.map { |note| note['id'] }
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'page breaks second page correctly' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4&page=2", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4&page=2", user, admin_mode: user.admin?)
|
||||
|
||||
response_ids = json_response.map { |note| note['id'] }
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'sorts by ascending order when requested' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?sort=asc", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?sort=asc", user, admin_mode: user.admin?)
|
||||
|
||||
response_dates = json_response.map { |note| note['created_at'] }
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'sorts by updated_at in descending order when requested' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at", user, admin_mode: user.admin?)
|
||||
|
||||
response_dates = json_response.map { |note| note['updated_at'] }
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'sorts by updated_at in ascending order when requested' do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at&sort=asc", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at&sort=asc", user, admin_mode: user.admin?)
|
||||
|
||||
response_dates = json_response.map { |note| note['updated_at'] }
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it "returns an array of notes" do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
|
|
@ -97,7 +97,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it "returns a 404 error when noteable id not found" do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{non_existing_record_id}/notes", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{non_existing_record_id}/notes", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
@ -105,36 +105,36 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
it "returns 404 when not authorized" do
|
||||
parent.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
|
||||
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user, admin_mode: private_user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do
|
||||
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do
|
||||
it "returns a note by id" do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['body']).to eq(note.note)
|
||||
end
|
||||
|
||||
it "returns a 404 error if note not found" do
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user)
|
||||
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do
|
||||
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do
|
||||
let(:params) { { body: 'hi!' } }
|
||||
|
||||
subject do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params
|
||||
end
|
||||
|
||||
it "creates a new note" do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: 'hi!' }
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: { body: 'hi!' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
@ -143,7 +143,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it "returns a 400 bad request error if body not given" do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user)
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
|
@ -158,7 +158,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
uri = "/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes"
|
||||
|
||||
expect do
|
||||
post api(uri, user), params: { body: 'hi!' }
|
||||
post api(uri, user, admin_mode: user.admin?), params: { body: 'hi!' }
|
||||
end.to change { Event.count }.by(1)
|
||||
end
|
||||
|
||||
|
|
@ -169,7 +169,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
context 'by an admin' do
|
||||
it 'sets the creation time on the new note' do
|
||||
admin = create(:admin)
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", admin), params: params
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", admin, admin_mode: true), params: params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
@ -185,7 +185,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
let(:user) { project.first_owner }
|
||||
|
||||
it 'sets the creation time on the new note' do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
@ -215,7 +215,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
when 'groups'
|
||||
context 'by a group owner' do
|
||||
it 'sets the creation time on the new note' do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
@ -253,7 +253,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
|
||||
context 'when the user is posting an award emoji on their own noteable' do
|
||||
it 'creates a new note' do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: ':+1:' }
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: { body: ':+1:' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq(':+1:')
|
||||
|
|
@ -266,7 +266,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'responds with 404' do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user),
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user, admin_mode: private_user.admin?),
|
||||
params: { body: 'Foo' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
|
|
@ -299,11 +299,11 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
end
|
||||
|
||||
describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do
|
||||
describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do
|
||||
let(:params) { { body: 'Hello!' } }
|
||||
|
||||
subject do
|
||||
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user), params: params
|
||||
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?), params: params
|
||||
end
|
||||
|
||||
context 'when only body param is present' do
|
||||
|
|
@ -329,7 +329,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
end
|
||||
|
||||
it 'returns a 404 error when note id not found' do
|
||||
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user),
|
||||
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?),
|
||||
params: { body: 'Hello!' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
|
|
@ -337,32 +337,32 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
|
|||
|
||||
it 'returns a 400 bad request error if body is empty' do
|
||||
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
|
||||
"notes/#{note.id}", user), params: { body: '' }
|
||||
"notes/#{note.id}", user, admin_mode: user.admin?), params: { body: '' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
end
|
||||
|
||||
describe "DELETE /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do
|
||||
describe "DELETE /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do
|
||||
it 'deletes a note' do
|
||||
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
|
||||
"notes/#{note.id}", user)
|
||||
"notes/#{note.id}", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content)
|
||||
# Check if note is really deleted
|
||||
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
|
||||
"notes/#{note.id}", user)
|
||||
"notes/#{note.id}", user, admin_mode: user.admin?)
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it 'returns a 404 error when note id not found' do
|
||||
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user)
|
||||
delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it_behaves_like '412 response' do
|
||||
let(:request) { api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user) }
|
||||
let(:request) { api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -370,16 +370,16 @@ end
|
|||
RSpec.shared_examples 'noteable API with confidential notes' do |parent_type, noteable_type, id_name|
|
||||
it_behaves_like 'noteable API', parent_type, noteable_type, id_name
|
||||
|
||||
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do
|
||||
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do
|
||||
let(:params) { { body: 'hi!' } }
|
||||
|
||||
subject do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params
|
||||
end
|
||||
|
||||
context 'with internal param' do
|
||||
it "creates a confidential note if internal is set to true" do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(internal: true)
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params.merge(internal: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
@ -391,7 +391,7 @@ RSpec.shared_examples 'noteable API with confidential notes' do |parent_type, no
|
|||
|
||||
context 'with deprecated confidential param' do
|
||||
it "creates a confidential note if confidential is set to true" do
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(confidential: true)
|
||||
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params.merge(confidential: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['body']).to eq('hi!')
|
||||
|
|
|
|||
|
|
@ -224,10 +224,10 @@ RSpec.shared_examples 'pipelines visibility table' do
|
|||
project.project_feature.update!(project_feature_attributes)
|
||||
project.add_role(ci_user, user_role) if user_role && user_role != :non_member
|
||||
|
||||
get api(pipelines_api_path, api_user)
|
||||
get api(pipelines_api_path, api_user, admin_mode: is_admin)
|
||||
end
|
||||
|
||||
it do
|
||||
specify do
|
||||
expect(response).to have_gitlab_http_status(response_status)
|
||||
expect(api_response).to match(expected_response)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
let(:repository_storage_move_id) { storage_move.id }
|
||||
|
||||
def get_container_repository_storage_move
|
||||
get api(url, user)
|
||||
get api(url, user, admin_mode: user.admin?)
|
||||
end
|
||||
|
||||
it 'returns a container repository storage move', :aggregate_failures do
|
||||
|
|
@ -39,7 +39,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
|
||||
shared_examples 'get container repository storage move list' do
|
||||
def get_container_repository_storage_moves
|
||||
get api(url, user)
|
||||
get api(url, user, admin_mode: user.admin?)
|
||||
end
|
||||
|
||||
it 'returns container repository storage moves', :aggregate_failures do
|
||||
|
|
@ -90,7 +90,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
let(:container_id) { non_existing_record_id }
|
||||
|
||||
it 'returns not found' do
|
||||
get api(url, user)
|
||||
get api(url, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
@ -108,7 +108,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
let(:repository_storage_move_id) { storage_move.id }
|
||||
|
||||
it 'returns not found' do
|
||||
get api(url, user)
|
||||
get api(url, user, admin_mode: user.admin?)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
|
@ -127,20 +127,20 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
end
|
||||
end
|
||||
|
||||
describe "POST /#{container_type}/:id/repository_storage_moves" do
|
||||
describe "POST /#{container_type}/:id/repository_storage_moves", :aggregate_failures do
|
||||
let(:container_id) { container.id }
|
||||
let(:url) { "/#{container_type}/#{container_id}/repository_storage_moves" }
|
||||
let(:destination_storage_name) { 'test_second_storage' }
|
||||
|
||||
def create_container_repository_storage_move
|
||||
post api(url, user), params: { destination_storage_name: destination_storage_name }
|
||||
post api(url, user, admin_mode: user.admin?), params: { destination_storage_name: destination_storage_name }
|
||||
end
|
||||
|
||||
before do
|
||||
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
|
||||
end
|
||||
|
||||
it 'schedules a container repository storage move', :aggregate_failures do
|
||||
it 'schedules a container repository storage move' do
|
||||
create_container_repository_storage_move
|
||||
|
||||
storage_move = container.repository_storage_moves.last
|
||||
|
|
@ -158,7 +158,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
it { expect { create_container_repository_storage_move }.to be_denied_for(:user) }
|
||||
end
|
||||
|
||||
context 'destination_storage_name is missing', :aggregate_failures do
|
||||
context 'destination_storage_name is missing' do
|
||||
let(:destination_storage_name) { nil }
|
||||
|
||||
it 'schedules a container repository storage move' do
|
||||
|
|
@ -192,7 +192,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type|
|
|||
let(:destination_storage_name) { 'test_second_storage' }
|
||||
|
||||
def create_container_repository_storage_moves
|
||||
post api(url, user), params: {
|
||||
post api(url, user, admin_mode: user.admin?), params: {
|
||||
source_storage_name: source_storage_name,
|
||||
destination_storage_name: destination_storage_name
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'raw snippet files' do
|
||||
let_it_be(:user_token) { create(:personal_access_token, user: snippet.author) }
|
||||
let(:snippet_id) { snippet.id }
|
||||
let(:user) { snippet.author }
|
||||
let_it_be(:user) { snippet.author }
|
||||
let(:file_path) { '%2Egitattributes' }
|
||||
let(:ref) { 'master' }
|
||||
|
||||
let_it_be(:user_token) do
|
||||
if user.admin?
|
||||
create(:personal_access_token, :admin_mode, user: user)
|
||||
else
|
||||
create(:personal_access_token, user: user)
|
||||
end
|
||||
end
|
||||
|
||||
subject { get api(api_path, personal_access_token: user_token) }
|
||||
|
||||
context 'with an invalid snippet ID' do
|
||||
|
|
@ -15,8 +22,10 @@ RSpec.shared_examples 'raw snippet files' do
|
|||
it 'returns 404' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
expect(json_response['message']).to eq('404 Snippet Not Found')
|
||||
aggregate_failures do
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
expect(json_response['message']).to eq('404 Snippet Not Found')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -185,7 +194,7 @@ RSpec.shared_examples 'snippet individual non-file updates' do
|
|||
end
|
||||
|
||||
RSpec.shared_examples 'invalid snippet updates' do
|
||||
it 'returns 404 for invalid snippet id' do
|
||||
it 'returns 404 for invalid snippet id', :aggregate_failures do
|
||||
update_snippet(snippet_id: non_existing_record_id, params: { title: 'foo' })
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
|
|
@ -204,7 +213,7 @@ RSpec.shared_examples 'invalid snippet updates' do
|
|||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
||||
it 'returns 400 if title is blank' do
|
||||
it 'returns 400 if title is blank', :aggregate_failures do
|
||||
update_snippet(params: { title: '' })
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
|
|
@ -236,7 +245,9 @@ RSpec.shared_examples 'snippet access with different users' do
|
|||
it 'returns the correct response' do
|
||||
request_user = user_for(requester)
|
||||
|
||||
get api(path, request_user)
|
||||
admin_mode = requester == :admin
|
||||
|
||||
get api(path, request_user, admin_mode: admin_mode)
|
||||
|
||||
expect(response).to have_gitlab_http_status(status)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,64 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::UnlockRefArtifactsOnPipelineStopWorker, feature_category: :build_artifacts do
|
||||
describe '#perform' do
|
||||
subject(:perform) { described_class.new.perform(pipeline_id) }
|
||||
|
||||
include_examples 'an idempotent worker' do
|
||||
subject(:idempotent_perform) { perform_multiple(pipeline.id, exec_times: 2) }
|
||||
|
||||
let!(:older_pipeline) do
|
||||
create(:ci_pipeline, :success, :with_job, locked: :artifacts_locked).tap do |pipeline|
|
||||
create(:ci_job_artifact, job: pipeline.builds.first)
|
||||
end
|
||||
end
|
||||
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline, :success, :with_job, ref: older_pipeline.ref, tag: older_pipeline.tag,
|
||||
project: older_pipeline.project, locked: :unlocked).tap do |pipeline|
|
||||
create(:ci_job_artifact, job: pipeline.builds.first)
|
||||
end
|
||||
end
|
||||
|
||||
it 'unlocks the artifacts from older pipelines' do
|
||||
expect { idempotent_perform }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline exists' do
|
||||
let(:pipeline) { create(:ci_pipeline, :success, :with_job) }
|
||||
let(:pipeline_id) { pipeline.id }
|
||||
|
||||
it 'calls the Ci::UnlockArtifactsService with the ref and pipeline' do
|
||||
expect_next_instance_of(Ci::UnlockArtifactsService) do |service|
|
||||
expect(service).to receive(:execute).with(pipeline.ci_ref, pipeline).and_call_original
|
||||
end
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline does not exist' do
|
||||
let(:pipeline_id) { non_existing_record_id }
|
||||
|
||||
it 'does not call the service' do
|
||||
expect(Ci::UnlockArtifactsService).not_to receive(:new)
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the ref no longer exists' do
|
||||
let(:pipeline) { create(:ci_pipeline, :success, :with_job, ci_ref_presence: false) }
|
||||
let(:pipeline_id) { pipeline.id }
|
||||
|
||||
it 'does not call the service' do
|
||||
expect(Ci::UnlockArtifactsService).not_to receive(:new)
|
||||
|
||||
perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -32,10 +32,10 @@ require (
|
|||
gocloud.dev v0.29.0
|
||||
golang.org/x/image v0.5.0
|
||||
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616
|
||||
golang.org/x/net v0.7.0
|
||||
golang.org/x/net v0.8.0
|
||||
golang.org/x/oauth2 v0.5.0
|
||||
golang.org/x/tools v0.6.0
|
||||
google.golang.org/grpc v1.53.0
|
||||
google.golang.org/grpc v1.54.0
|
||||
google.golang.org/protobuf v1.30.0
|
||||
honnef.co/go/tools v0.3.3
|
||||
)
|
||||
|
|
@ -112,7 +112,7 @@ require (
|
|||
golang.org/x/mod v0.8.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.6.0 // indirect
|
||||
golang.org/x/text v0.7.0 // indirect
|
||||
golang.org/x/text v0.8.0 // indirect
|
||||
golang.org/x/time v0.3.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
|
||||
google.golang.org/api v0.110.0 // indirect
|
||||
|
|
|
|||
|
|
@ -2222,8 +2222,8 @@ golang.org/x/net v0.3.1-0.20221206200815-1e63c2f08a10/go.mod h1:MBQ8lrhLObU/6UmL
|
|||
golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
|
||||
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
|
|
@ -2453,8 +2453,9 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
|||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
|
|
@ -2831,8 +2832,9 @@ google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCD
|
|||
google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
|
||||
google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww=
|
||||
google.golang.org/grpc v1.52.1/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5vorUY=
|
||||
google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc=
|
||||
google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw=
|
||||
google.golang.org/grpc v1.54.0 h1:EhTqbhiYeixwWQtAEZAxmV9MGqcjEU2mFx52xCzNyag=
|
||||
google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
|
|
|
|||
Loading…
Reference in New Issue