Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
3ca896b640
commit
4e175ca905
|
|
@ -103,6 +103,19 @@ instance-ff-inverse:
|
|||
# ------------------------------------------
|
||||
# Jobs with parallel variant
|
||||
# ------------------------------------------
|
||||
|
||||
# ========== instance ===========
|
||||
instance:
|
||||
extends:
|
||||
- .parallel
|
||||
- .qa
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
rules:
|
||||
- !reference [.rules:test:feature-flags-set, rules] # always run instance to validate ff change
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
|
||||
instance-selective:
|
||||
extends: .qa
|
||||
variables:
|
||||
|
|
@ -110,12 +123,29 @@ instance-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
instance:
|
||||
|
||||
instance-selective-parallel:
|
||||
extends:
|
||||
- .parallel
|
||||
- instance-selective
|
||||
- .qa
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
rules:
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== praefect ===========
|
||||
|
||||
praefect:
|
||||
extends:
|
||||
- .parallel
|
||||
- .qa
|
||||
variables:
|
||||
QA_SCENARIO: Test::Integration::Praefect
|
||||
QA_CAN_TEST_PRAEFECT: "true"
|
||||
rules:
|
||||
- !reference [.rules:test:feature-flags-set, rules] # always run instance to validate ff change
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
|
||||
|
|
@ -127,10 +157,28 @@ praefect-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
praefect:
|
||||
|
||||
praefect-selective-parallel:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
- praefect-selective
|
||||
variables:
|
||||
QA_SCENARIO: Test::Integration::Praefect
|
||||
QA_CAN_TEST_PRAEFECT: "true"
|
||||
rules:
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== relative-url ===========
|
||||
|
||||
relative-url:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::RelativeUrl
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
|
|
@ -142,10 +190,28 @@ relative-url-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
relative-url:
|
||||
|
||||
relative-url-selective-parallel:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
- relative-url-selective
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::RelativeUrl
|
||||
rules:
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== decomposition-single-db ===========
|
||||
|
||||
decomposition-single-db:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
GITLAB_QA_OPTS: --omnibus-config decomposition_single_db $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
|
|
@ -158,10 +224,30 @@ decomposition-single-db-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
decomposition-single-db:
|
||||
|
||||
decomposition-single-db-selective-parallel:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
- decomposition-single-db-selective
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
GITLAB_QA_OPTS: --omnibus-config decomposition_single_db $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== decomposition-multiple-db ===========
|
||||
|
||||
decomposition-multiple-db:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
GITLAB_ALLOW_SEPARATE_CI_DATABASE: "true"
|
||||
GITLAB_QA_OPTS: --omnibus-config decomposition_multiple_db $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
|
|
@ -175,13 +261,33 @@ decomposition-multiple-db-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
decomposition-multiple-db:
|
||||
|
||||
decomposition-multiple-db-selective-parallel:
|
||||
extends:
|
||||
- .qa
|
||||
- .parallel
|
||||
- decomposition-multiple-db-selective
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
GITLAB_ALLOW_SEPARATE_CI_DATABASE: "true"
|
||||
GITLAB_QA_OPTS: --omnibus-config decomposition_multiple_db $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== object-storage ===========
|
||||
|
||||
object-storage:
|
||||
extends: .qa
|
||||
parallel: 2
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
QA_RSPEC_TAGS: --tag object_storage
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::All/
|
||||
- if: $QA_SUITES =~ /Test::Instance::ObjectStorage/
|
||||
|
||||
object-storage-selective:
|
||||
extends: .qa
|
||||
|
|
@ -192,12 +298,30 @@ object-storage-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::ObjectStorage/
|
||||
object-storage:
|
||||
extends: object-storage-selective
|
||||
|
||||
object-storage-selective-parallel:
|
||||
extends: .qa
|
||||
parallel: 2
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
QA_RSPEC_TAGS: --tag object_storage
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::ObjectStorage/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ========== object-storage-aws ===========
|
||||
|
||||
object-storage-aws:
|
||||
extends: object-storage
|
||||
variables:
|
||||
AWS_S3_ACCESS_KEY: $QA_AWS_S3_ACCESS_KEY
|
||||
AWS_S3_BUCKET_NAME: $QA_AWS_S3_BUCKET_NAME
|
||||
AWS_S3_KEY_ID: $QA_AWS_S3_KEY_ID
|
||||
AWS_S3_REGION: $QA_AWS_S3_REGION
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_aws $EXTRA_GITLAB_QA_OPTS
|
||||
|
||||
object-storage-aws-selective:
|
||||
extends: object-storage-selective
|
||||
|
|
@ -207,11 +331,27 @@ object-storage-aws-selective:
|
|||
AWS_S3_KEY_ID: $QA_AWS_S3_KEY_ID
|
||||
AWS_S3_REGION: $QA_AWS_S3_REGION
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_aws $EXTRA_GITLAB_QA_OPTS
|
||||
object-storage-aws:
|
||||
extends: object-storage-aws-selective
|
||||
parallel: 2
|
||||
rules:
|
||||
- !reference [object-storage, rules]
|
||||
|
||||
object-storage-aws-selective-parallel:
|
||||
extends: object-storage-selective-parallel
|
||||
variables:
|
||||
AWS_S3_ACCESS_KEY: $QA_AWS_S3_ACCESS_KEY
|
||||
AWS_S3_BUCKET_NAME: $QA_AWS_S3_BUCKET_NAME
|
||||
AWS_S3_KEY_ID: $QA_AWS_S3_KEY_ID
|
||||
AWS_S3_REGION: $QA_AWS_S3_REGION
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_aws $EXTRA_GITLAB_QA_OPTS
|
||||
|
||||
|
||||
# ========== object-storage-gcs ===========
|
||||
|
||||
object-storage-gcs:
|
||||
extends: object-storage
|
||||
variables:
|
||||
GCS_BUCKET_NAME: $QA_GCS_BUCKET_NAME
|
||||
GOOGLE_PROJECT: $QA_GOOGLE_PROJECT
|
||||
GOOGLE_JSON_KEY: $QA_GOOGLE_JSON_KEY
|
||||
GOOGLE_CLIENT_EMAIL: $QA_GOOGLE_CLIENT_EMAIL
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_gcs $EXTRA_GITLAB_QA_OPTS
|
||||
|
||||
object-storage-gcs-selective:
|
||||
extends: object-storage-selective
|
||||
|
|
@ -221,11 +361,28 @@ object-storage-gcs-selective:
|
|||
GOOGLE_JSON_KEY: $QA_GOOGLE_JSON_KEY
|
||||
GOOGLE_CLIENT_EMAIL: $QA_GOOGLE_CLIENT_EMAIL
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_gcs $EXTRA_GITLAB_QA_OPTS
|
||||
object-storage-gcs:
|
||||
extends: object-storage-gcs-selective
|
||||
|
||||
object-storage-gcs-selective-parallel:
|
||||
extends: object-storage-selective-parallel
|
||||
variables:
|
||||
GCS_BUCKET_NAME: $QA_GCS_BUCKET_NAME
|
||||
GOOGLE_PROJECT: $QA_GOOGLE_PROJECT
|
||||
GOOGLE_JSON_KEY: $QA_GOOGLE_JSON_KEY
|
||||
GOOGLE_CLIENT_EMAIL: $QA_GOOGLE_CLIENT_EMAIL
|
||||
GITLAB_QA_OPTS: --omnibus-config object_storage_gcs $EXTRA_GITLAB_QA_OPTS
|
||||
|
||||
# ========== packages ===========
|
||||
|
||||
packages:
|
||||
extends: .qa
|
||||
parallel: 2
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
QA_RSPEC_TAGS: --tag packages
|
||||
GITLAB_QA_OPTS: --omnibus-config packages $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [object-storage, rules]
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::Packages/
|
||||
|
||||
packages-selective:
|
||||
extends: .qa
|
||||
|
|
@ -236,12 +393,19 @@ packages-selective:
|
|||
rules:
|
||||
- !reference [.rules:test:qa-selective, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::Packages/
|
||||
packages:
|
||||
extends: packages-selective
|
||||
|
||||
packages-selective-parallel:
|
||||
extends: .qa
|
||||
parallel: 2
|
||||
variables:
|
||||
QA_SCENARIO: Test::Instance::Image
|
||||
QA_RSPEC_TAGS: --tag packages
|
||||
GITLAB_QA_OPTS: --omnibus-config packages $EXTRA_GITLAB_QA_OPTS
|
||||
rules:
|
||||
- !reference [.rules:test:qa-parallel, rules]
|
||||
- !reference [.rules:test:qa-selective-parallel, rules]
|
||||
- if: $QA_SUITES =~ /Test::Instance::Packages/
|
||||
variables:
|
||||
QA_TESTS: ""
|
||||
|
||||
# ------------------------------------------
|
||||
# Non parallel jobs
|
||||
|
|
@ -474,7 +638,7 @@ elasticsearch:
|
|||
- !reference [.rules:test:manual, rules]
|
||||
|
||||
registry-object-storage-tls:
|
||||
extends: object-storage-aws-selective
|
||||
extends: object-storage-aws-selective-parallel
|
||||
variables:
|
||||
QA_SCENARIO: Test::Integration::RegistryTLS
|
||||
QA_RSPEC_TAGS: ""
|
||||
|
|
|
|||
|
|
@ -166,8 +166,10 @@ stages:
|
|||
KNAPSACK_DIR: ${CI_PROJECT_DIR}/qa/knapsack
|
||||
GIT_STRATEGY: none
|
||||
script:
|
||||
- echo "KNAPSACK_TEST_FILE_PATTERN is ${KNAPSACK_TEST_FILE_PATTERN}"
|
||||
# when using qa-image, code runs in /home/gitlab/qa folder
|
||||
- bundle exec rake "knapsack:download[test]"
|
||||
- '[ -n "$QA_TESTS" ] && bundle exec rake "knapsack:create_reports_for_selective"'
|
||||
- mkdir -p "$KNAPSACK_DIR" && cp knapsack/*.json "${KNAPSACK_DIR}/"
|
||||
allow_failure: true
|
||||
artifacts:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# Specific specs passed
|
||||
.specific-specs: &specific-specs
|
||||
.specs-specified: &specs-specified
|
||||
if: $QA_TESTS != ""
|
||||
|
||||
# No specific specs passed
|
||||
|
|
@ -10,6 +10,14 @@
|
|||
.feature-flags-set: &feature-flags-set
|
||||
if: $QA_FEATURE_FLAGS =~ /enabled|disabled/
|
||||
|
||||
# Specific specs specified
|
||||
.spec-file-specified: &spec-file-specified
|
||||
if: $QA_TESTS =~ /_spec\.rb/
|
||||
|
||||
# Specs directory specified
|
||||
.spec-directory-specified: &spec-directory-specified
|
||||
if: $QA_TESTS != "" && $QA_TESTS !~ /_spec\.rb/
|
||||
|
||||
# Manually trigger job on ff changes but with default ff state instead of inverted
|
||||
.feature-flags-set-manual: &feature-flags-set-manual
|
||||
<<: *feature-flags-set
|
||||
|
|
@ -96,11 +104,25 @@
|
|||
when: never
|
||||
- <<: *feature-flags-set
|
||||
when: never
|
||||
- <<: *spec-directory-specified
|
||||
when: never
|
||||
|
||||
.rules:test:qa-selective-parallel:
|
||||
rules:
|
||||
# always run parallel with full suite when framework changes present or ff state changed
|
||||
- <<: *qa-run-all-tests
|
||||
when: never
|
||||
- <<: *all-specs
|
||||
when: never
|
||||
- <<: *feature-flags-set
|
||||
when: never
|
||||
- <<: *spec-file-specified
|
||||
when: never
|
||||
|
||||
.rules:test:qa-parallel:
|
||||
rules:
|
||||
- *qa-run-all-tests
|
||||
- <<: *specific-specs
|
||||
- <<: *specs-specified
|
||||
when: manual
|
||||
allow_failure: true
|
||||
variables:
|
||||
|
|
|
|||
|
|
@ -171,3 +171,4 @@ e2e-test-pipeline-generate:
|
|||
expire_in: 1 day
|
||||
paths:
|
||||
- '*-pipeline.yml'
|
||||
- "${CI_PROJECT_DIR}/qa_tests_vars.env"
|
||||
|
|
|
|||
|
|
@ -244,7 +244,6 @@ Gitlab/NamespacedClass:
|
|||
- 'app/models/notification_setting.rb'
|
||||
- 'app/models/oauth_access_grant.rb'
|
||||
- 'app/models/oauth_access_token.rb'
|
||||
- 'app/models/organization.rb'
|
||||
- 'app/models/out_of_context_discussion.rb'
|
||||
- 'app/models/pages_deployment.rb'
|
||||
- 'app/models/pages_domain.rb'
|
||||
|
|
|
|||
|
|
@ -2,6 +2,12 @@
|
|||
documentation](doc/development/changelog.md) for instructions on adding your own
|
||||
entry.
|
||||
|
||||
## 16.0.1 (2023-05-22)
|
||||
|
||||
### Security (1 change)
|
||||
|
||||
- [Fix arbitary file read via filename param](gitlab-org/security/gitlab@2ddbf5464954addce7b8c82102377f0f137b604f) ([merge request](gitlab-org/security/gitlab!3265))
|
||||
|
||||
## 16.0.0 (2023-05-18)
|
||||
|
||||
### Added (168 changes)
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ module UploadsActions
|
|||
included do
|
||||
prepend_before_action :set_request_format_from_path_extension
|
||||
rescue_from FileUploader::InvalidSecret, with: :render_404
|
||||
|
||||
rescue_from ::Gitlab::Utils::PathTraversalAttackError do
|
||||
head :bad_request
|
||||
end
|
||||
end
|
||||
|
||||
def create
|
||||
|
|
@ -33,6 +37,8 @@ module UploadsActions
|
|||
# - or redirect to its URL
|
||||
#
|
||||
def show
|
||||
Gitlab::Utils.check_path_traversal!(params[:filename])
|
||||
|
||||
return render_404 unless uploader&.exists?
|
||||
|
||||
ttl, directives = *cache_settings
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class ReleasesFinder
|
|||
attr_reader :parent, :current_user, :params
|
||||
|
||||
def initialize(parent, current_user = nil, params = {})
|
||||
@parent = parent
|
||||
@parent = Array.wrap(parent)
|
||||
@current_user = current_user
|
||||
@params = params
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ class ReleasesFinder
|
|||
end
|
||||
|
||||
def execute(preload: true)
|
||||
return Release.none if projects.empty?
|
||||
return Release.none if authorized_projects.empty?
|
||||
|
||||
releases = get_releases
|
||||
releases = by_tag(releases)
|
||||
|
|
@ -26,16 +26,17 @@ class ReleasesFinder
|
|||
private
|
||||
|
||||
def get_releases
|
||||
Release.where(project_id: projects).where.not(tag: nil) # rubocop: disable CodeReuse/ActiveRecord
|
||||
Release.where(project_id: authorized_projects).where.not(tag: nil) # rubocop: disable CodeReuse/ActiveRecord
|
||||
end
|
||||
|
||||
def projects
|
||||
strong_memoize(:projects) do
|
||||
if parent.is_a?(Project)
|
||||
Ability.allowed?(current_user, :read_release, parent) ? [parent] : []
|
||||
end
|
||||
end
|
||||
def authorized_projects
|
||||
# Preload policy for all projects to avoid N+1 queries
|
||||
projects = Project.id_in(parent.map(&:id)).include_project_feature
|
||||
Preloaders::ProjectPolicyPreloader.new(projects, current_user).execute
|
||||
|
||||
projects.select { |project| authorized?(project) }
|
||||
end
|
||||
strong_memoize_attr :authorized_projects
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def by_tag(releases)
|
||||
|
|
@ -48,4 +49,8 @@ class ReleasesFinder
|
|||
def order_releases(releases)
|
||||
releases.sort_by_attribute("#{params[:order_by]}_#{params[:sort]}")
|
||||
end
|
||||
|
||||
def authorized?(project)
|
||||
Ability.allowed?(current_user, :read_release, project)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,8 +8,6 @@ module Resolvers
|
|||
required: false, default_value: :released_at_desc,
|
||||
description: 'Sort releases by given criteria.'
|
||||
|
||||
alias_method :project, :object
|
||||
|
||||
# This resolver has a custom singular resolver
|
||||
def self.single
|
||||
Resolvers::ReleaseResolver
|
||||
|
|
@ -23,11 +21,24 @@ module Resolvers
|
|||
}.freeze
|
||||
|
||||
def resolve(sort:)
|
||||
ReleasesFinder.new(
|
||||
project,
|
||||
current_user,
|
||||
SORT_TO_PARAMS_MAP[sort]
|
||||
).execute
|
||||
BatchLoader::GraphQL.for(project).batch do |projects, loader|
|
||||
releases = ReleasesFinder.new(
|
||||
projects,
|
||||
current_user,
|
||||
SORT_TO_PARAMS_MAP[sort]
|
||||
).execute
|
||||
|
||||
# group_by will not cause N+1 queries here because ReleasesFinder preloads projects
|
||||
releases.group_by(&:project).each do |project, versions|
|
||||
loader.call(project, versions)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def project
|
||||
object.respond_to?(:project) ? object.project : object
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,6 +20,11 @@ module Types
|
|||
|
||||
field :icon, GraphQL::Types::String, null: true, description: 'Icon for the catalog resource.',
|
||||
method: :avatar_path, alpha: { milestone: '15.11' }
|
||||
|
||||
field :versions, Types::ReleaseType.connection_type, null: true,
|
||||
description: 'Versions of the catalog resource.',
|
||||
resolver: Resolvers::ReleasesResolver,
|
||||
alpha: { milestone: '16.1' }
|
||||
end
|
||||
# rubocop: enable Graphql/AuthorizeTypes
|
||||
end
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ class Namespace < ApplicationRecord
|
|||
# This should _not_ be `inverse_of: :namespace`, because that would also set
|
||||
# `user.namespace` when this user creates a group with themselves as `owner`.
|
||||
belongs_to :owner, class_name: 'User'
|
||||
belongs_to :organization
|
||||
belongs_to :organization, class_name: 'Organizations::Organization'
|
||||
|
||||
belongs_to :parent, class_name: "Namespace"
|
||||
has_many :children, -> { where(type: Group.sti_name) }, class_name: "Namespace", foreign_key: :parent_id
|
||||
|
|
|
|||
|
|
@ -1,29 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Organization < ApplicationRecord
|
||||
DEFAULT_ORGANIZATION_ID = 1
|
||||
|
||||
scope :without_default, -> { where.not(id: DEFAULT_ORGANIZATION_ID) }
|
||||
|
||||
before_destroy :check_if_default_organization
|
||||
|
||||
has_many :namespaces
|
||||
has_many :groups
|
||||
|
||||
validates :name,
|
||||
presence: true,
|
||||
length: { maximum: 255 },
|
||||
uniqueness: { case_sensitive: false }
|
||||
|
||||
def default?
|
||||
id == DEFAULT_ORGANIZATION_ID
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_if_default_organization
|
||||
return unless default?
|
||||
|
||||
raise ActiveRecord::RecordNotDestroyed, _('Cannot delete the default organization')
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Organizations
|
||||
class Organization < ApplicationRecord
|
||||
DEFAULT_ORGANIZATION_ID = 1
|
||||
|
||||
scope :without_default, -> { where.not(id: DEFAULT_ORGANIZATION_ID) }
|
||||
|
||||
before_destroy :check_if_default_organization
|
||||
|
||||
has_many :namespaces
|
||||
has_many :groups
|
||||
|
||||
validates :name,
|
||||
presence: true,
|
||||
length: { maximum: 255 },
|
||||
uniqueness: { case_sensitive: false }
|
||||
|
||||
def default?
|
||||
id == DEFAULT_ORGANIZATION_ID
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_if_default_organization
|
||||
return unless default?
|
||||
|
||||
raise ActiveRecord::RecordNotDestroyed, _('Cannot delete the default organization')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -426,6 +426,8 @@ module ObjectStorage
|
|||
end
|
||||
|
||||
def retrieve_from_store!(identifier)
|
||||
Gitlab::Utils.check_path_traversal!(identifier)
|
||||
|
||||
# We need to force assign the value of @filename so that we will still
|
||||
# get the original_filename in cases wherein the file points to a random generated
|
||||
# path format. This happens for direct uploaded files to final location.
|
||||
|
|
|
|||
|
|
@ -3027,6 +3027,15 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: merge_requests_mergeability_check_batch
|
||||
:worker_name: MergeRequests::MergeabilityCheckBatchWorker
|
||||
:feature_category: :code_review_workflow
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: merge_requests_resolve_todos
|
||||
:worker_name: MergeRequests::ResolveTodosWorker
|
||||
:feature_category: :code_review_workflow
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module MergeRequests
|
||||
class MergeabilityCheckBatchWorker
|
||||
include ApplicationWorker
|
||||
|
||||
data_consistency :sticky
|
||||
|
||||
sidekiq_options retry: 3
|
||||
|
||||
feature_category :code_review_workflow
|
||||
idempotent!
|
||||
|
||||
def logger
|
||||
@logger ||= Sidekiq.logger
|
||||
end
|
||||
|
||||
def perform(merge_request_ids)
|
||||
merge_requests = MergeRequest.id_in(merge_request_ids)
|
||||
|
||||
merge_requests.each do |merge_request|
|
||||
result = merge_request.check_mergeability
|
||||
|
||||
next unless result&.error?
|
||||
|
||||
logger.error(
|
||||
worker: self.class.name,
|
||||
message: "Failed to check mergeability of merge request: #{result.message}",
|
||||
merge_request_id: merge_request.id
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: vsd_graphql_dora_and_flow_metrics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116216
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/409499
|
||||
milestone: '16.1'
|
||||
type: development
|
||||
group: group::optimize
|
||||
default_enabled: false
|
||||
|
|
@ -361,6 +361,8 @@
|
|||
- 1
|
||||
- - merge_requests_llm_summarize_merge_request
|
||||
- 1
|
||||
- - merge_requests_mergeability_check_batch
|
||||
- 1
|
||||
- - merge_requests_resolve_todos
|
||||
- 1
|
||||
- - merge_requests_resolve_todos_after_approval
|
||||
|
|
|
|||
|
|
@ -1025,6 +1025,7 @@ Input type: `AiActionInput`
|
|||
| <a id="mutationaiactiongeneratetestfile"></a>`generateTestFile` | [`GenerateTestFileInput`](#generatetestfileinput) | Input for generate_test_file AI action. |
|
||||
| <a id="mutationaiactionmarkupformat"></a>`markupFormat` | [`MarkupFormat`](#markupformat) | Indicates the response format. |
|
||||
| <a id="mutationaiactionsummarizecomments"></a>`summarizeComments` | [`AiSummarizeCommentsInput`](#aisummarizecommentsinput) | Input for summarize_comments AI action. |
|
||||
| <a id="mutationaiactionsummarizereview"></a>`summarizeReview` | [`AiSummarizeReviewInput`](#aisummarizereviewinput) | Input for summarize_review AI action. |
|
||||
| <a id="mutationaiactiontanukibot"></a>`tanukiBot` | [`AiTanukiBotInput`](#aitanukibotinput) | Input for tanuki_bot AI action. |
|
||||
|
||||
#### Fields
|
||||
|
|
@ -12345,6 +12346,28 @@ Represents the total number of issues and their weights for a particular day.
|
|||
| <a id="cicatalogresourceid"></a>`id` **{warning-solid}** | [`ID!`](#id) | **Introduced** in 15.11. This feature is an Experiment. It can be changed or removed at any time. ID of the catalog resource. |
|
||||
| <a id="cicatalogresourcename"></a>`name` **{warning-solid}** | [`String`](#string) | **Introduced** in 15.11. This feature is an Experiment. It can be changed or removed at any time. Name of the catalog resource. |
|
||||
|
||||
#### Fields with arguments
|
||||
|
||||
##### `CiCatalogResource.versions`
|
||||
|
||||
Versions of the catalog resource.
|
||||
|
||||
WARNING:
|
||||
**Introduced** in 16.1.
|
||||
This feature is an Experiment. It can be changed or removed at any time.
|
||||
|
||||
Returns [`ReleaseConnection`](#releaseconnection).
|
||||
|
||||
This field returns a [connection](#connections). It accepts the
|
||||
four standard [pagination arguments](#connection-pagination-arguments):
|
||||
`before: String`, `after: String`, `first: Int`, `last: Int`.
|
||||
|
||||
###### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="cicatalogresourceversionssort"></a>`sort` | [`ReleaseSort`](#releasesort) | Sort releases by given criteria. |
|
||||
|
||||
### `CiConfig`
|
||||
|
||||
#### Fields
|
||||
|
|
@ -27714,6 +27737,14 @@ see the associated mutation type above.
|
|||
| ---- | ---- | ----------- |
|
||||
| <a id="aisummarizecommentsinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
|
||||
|
||||
### `AiSummarizeReviewInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aisummarizereviewinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
|
||||
|
||||
### `AiTanukiBotInput`
|
||||
|
||||
#### Arguments
|
||||
|
|
|
|||
|
|
@ -165,8 +165,8 @@ In order to limit amount of tests executed in a merge request, dynamic selection
|
|||
on changed files and merge request labels. Following criteria determine which tests will run:
|
||||
|
||||
1. Changes in `qa` framework code would execute the full suite
|
||||
1. Changes in particular `_spec.rb` file in `qa` folder would execute only that particular test
|
||||
1. Merge request with backend changes and label `devops::manage` would execute all e2e tests related to `manage` stage
|
||||
1. Changes in particular `_spec.rb` file in `qa` folder would execute only that particular test. In this case knapsack will not be used to run jobs in parallel.
|
||||
1. Merge request with backend changes and label `devops::manage` would execute all e2e tests related to `manage` stage. Jobs will be run in parallel in this case using knapsack.
|
||||
|
||||
#### Overriding selective test execution
|
||||
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ programming languages from [The Pile](https://pile.eleuther.ai/) and the
|
|||
[Google BigQuery source code dataset](https://cloud.google.com/blog/topics/public-datasets/github-on-bigquery-analyze-all-the-open-source-code).
|
||||
We then process this raw dataset against heuristics that aim to increase the quality of the dataset.
|
||||
|
||||
The Code Suggestions model is not trained on GitLab customer data.
|
||||
The Code Suggestions model is not trained on GitLab customer or user data.
|
||||
|
||||
## Progressive enhancement
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ module Bitbucket
|
|||
module Representation
|
||||
class PullRequest < Representation::Base
|
||||
def author
|
||||
raw.fetch('author', {}).fetch('nickname', nil)
|
||||
raw.dig('author', 'nickname')
|
||||
end
|
||||
|
||||
def description
|
||||
|
|
@ -39,19 +39,19 @@ module Bitbucket
|
|||
end
|
||||
|
||||
def source_branch_name
|
||||
source_branch.fetch('branch', {}).fetch('name', nil)
|
||||
source_branch.dig('branch', 'name')
|
||||
end
|
||||
|
||||
def source_branch_sha
|
||||
source_branch.fetch('commit', {}).fetch('hash', nil)
|
||||
source_branch.dig('commit', 'hash')
|
||||
end
|
||||
|
||||
def target_branch_name
|
||||
target_branch.fetch('branch', {}).fetch('name', nil)
|
||||
target_branch.dig('branch', 'name')
|
||||
end
|
||||
|
||||
def target_branch_sha
|
||||
target_branch.fetch('commit', {}).fetch('hash', nil)
|
||||
target_branch.dig('commit', 'hash')
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
variables:
|
||||
AUTO_BUILD_IMAGE_VERSION: 'v1.32.0'
|
||||
AUTO_BUILD_IMAGE_VERSION: 'v1.33.0'
|
||||
|
||||
build:
|
||||
stage: build
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
variables:
|
||||
AUTO_BUILD_IMAGE_VERSION: 'v1.32.0'
|
||||
AUTO_BUILD_IMAGE_VERSION: 'v1.33.0'
|
||||
|
||||
build:
|
||||
stage: build
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ module Gitlab
|
|||
def to_sentry_error(error)
|
||||
Gitlab::ErrorTracking::Error.new(
|
||||
id: error.fingerprint.to_s,
|
||||
title: error.name,
|
||||
title: "#{error.name}: #{error.description}",
|
||||
message: error.description,
|
||||
culprit: error.actor,
|
||||
first_seen: error.first_seen_at,
|
||||
|
|
@ -141,7 +141,7 @@ module Gitlab
|
|||
def to_sentry_detailed_error(error)
|
||||
Gitlab::ErrorTracking::DetailedError.new(
|
||||
id: error.fingerprint.to_s,
|
||||
title: error.name,
|
||||
title: "#{error.name}: #{error.description}",
|
||||
message: error.description,
|
||||
culprit: error.actor,
|
||||
first_seen: error.first_seen_at.to_s,
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ module QA
|
|||
prepend SubMenus::SuperSidebar::Main
|
||||
prepend SubMenus::SuperSidebar::Build
|
||||
prepend SubMenus::SuperSidebar::Operate
|
||||
prepend SubMenus::SuperSidebar::Deploy
|
||||
end
|
||||
|
||||
def click_group_members_item
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Group
|
||||
module SubMenus
|
||||
module SuperSidebar
|
||||
module Deploy
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def self.prepended(base)
|
||||
super
|
||||
|
||||
base.class_eval do
|
||||
include QA::Page::SubMenus::SuperSidebar::Deploy
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,7 +8,7 @@ module QA
|
|||
module Main
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def self.included(base)
|
||||
def self.prepended(base)
|
||||
super
|
||||
|
||||
base.class_eval do
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ module QA
|
|||
|
||||
if Runtime::Env.super_sidebar_enabled?
|
||||
include Page::SubMenus::SuperSidebar::Manage
|
||||
include Page::SubMenus::SuperSidebar::Deploy
|
||||
include SubMenus::SuperSidebar::Plan
|
||||
include SubMenus::SuperSidebar::Settings
|
||||
include SubMenus::SuperSidebar::Code
|
||||
|
|
|
|||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Project
|
||||
module SubMenus
|
||||
module SuperSidebar
|
||||
module Deploy
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def self.included(base)
|
||||
super
|
||||
|
||||
base.class_eval do
|
||||
include QA::Page::SubMenus::SuperSidebar::Deploy
|
||||
end
|
||||
end
|
||||
|
||||
def go_to_releases
|
||||
open_deploy_submenu("Releases")
|
||||
end
|
||||
|
||||
def go_to_feature_flags
|
||||
open_deploy_submenu("Feature flags")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module SubMenus
|
||||
module SuperSidebar
|
||||
module Deploy
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def go_to_package_registry
|
||||
open_deploy_submenu("Package Registry")
|
||||
end
|
||||
|
||||
def go_to_container_registry
|
||||
open_deploy_submenu('Container Registry')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def open_deploy_submenu(sub_menu)
|
||||
open_submenu("Deploy", sub_menu)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -7,14 +7,6 @@ module QA
|
|||
module Operate
|
||||
extend QA::Page::PageConcern
|
||||
|
||||
def go_to_package_registry
|
||||
open_operate_submenu('Package Registry')
|
||||
end
|
||||
|
||||
def go_to_container_registry
|
||||
open_operate_submenu('Container Registry')
|
||||
end
|
||||
|
||||
def go_to_dependency_proxy
|
||||
open_operate_submenu('Dependency Proxy')
|
||||
end
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ module QA
|
|||
:title,
|
||||
:description,
|
||||
:merge_when_pipeline_succeeds,
|
||||
:merge_status,
|
||||
:detailed_merge_status,
|
||||
:state,
|
||||
:reviewers
|
||||
|
||||
|
|
@ -151,13 +151,11 @@ module QA
|
|||
end
|
||||
|
||||
def merge_via_api!
|
||||
Support::Waiter.wait_until(sleep_interval: 1) do
|
||||
QA::Runtime::Logger.debug("Waiting until merge request with id '#{iid}' can be merged")
|
||||
QA::Runtime::Logger.info("Merging via PUT #{api_merge_path}")
|
||||
|
||||
reload!.merge_status == 'can_be_merged'
|
||||
end
|
||||
wait_until_mergable
|
||||
|
||||
Support::Retrier.retry_on_exception do
|
||||
Support::Retrier.retry_on_exception(max_attempts: 10, sleep_interval: 5) do
|
||||
response = put(Runtime::API::Request.new(api_client, api_merge_path).url)
|
||||
|
||||
unless response.code == HTTP_STATUS_OK
|
||||
|
|
@ -205,7 +203,7 @@ module QA
|
|||
:project_id,
|
||||
:source_project_id,
|
||||
:target_project_id,
|
||||
:merge_status,
|
||||
:detailed_merge_status,
|
||||
# we consider mr to still be the same even if users changed
|
||||
:author,
|
||||
:reviewers,
|
||||
|
|
@ -250,6 +248,18 @@ module QA
|
|||
def create_target?
|
||||
!(project.initialize_with_readme && target_branch == project.default_branch) && target_new_branch
|
||||
end
|
||||
|
||||
# Wait until the merge request can be merged. Raises WaitExceededError if the MR can't be merged within 60 seconds
|
||||
#
|
||||
# @return [void]
|
||||
def wait_until_mergable
|
||||
return if Support::Waiter.wait_until(sleep_interval: 1, raise_on_failure: false, log: false) do
|
||||
reload!.detailed_merge_status == 'mergeable'
|
||||
end
|
||||
|
||||
raise Support::Repeater::WaitExceededError,
|
||||
"Timed out waiting for merge of MR with id '#{iid}'. Final status was '#{detailed_merge_status}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -128,7 +128,12 @@ module QA
|
|||
# If a project is being imported, wait until it completes before we let the test continue.
|
||||
# Otherwise we see Git repository errors
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/356101
|
||||
Support::Retrier.retry_until(max_duration: 60, sleep_interval: 5, retry_on_exception: true) do
|
||||
Support::Retrier.retry_until(
|
||||
max_duration: 60,
|
||||
sleep_interval: 5,
|
||||
retry_on_exception: true,
|
||||
message: "Wait for project to be imported"
|
||||
) do
|
||||
reload!.api_resource[:import_status] == "finished"
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@
|
|||
require 'airborne'
|
||||
|
||||
module QA
|
||||
RSpec.describe 'Package', :reliable, only: { subdomain: %i[staging staging-canary pre] }, product_group: :container_registry do
|
||||
RSpec.describe 'Package', :reliable, product_group: :container_registry, only: {
|
||||
subdomain: %i[staging staging-canary pre]
|
||||
} do
|
||||
include Support::API
|
||||
include Support::Helpers::MaskToken
|
||||
|
||||
|
|
@ -76,26 +78,32 @@ module QA
|
|||
YAML
|
||||
end
|
||||
|
||||
after do
|
||||
registry&.remove_via_api!
|
||||
end
|
||||
|
||||
it 'pushes, pulls image to the registry and deletes tag', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348001' do
|
||||
Support::Retrier.retry_on_exception(max_attempts: 3, sleep_interval: 2) do
|
||||
it 'pushes, pulls image to the registry and deletes tag',
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348001' do
|
||||
Support::Retrier.retry_on_exception(max_attempts: 3, sleep_interval: 2, message: "Commit push") do
|
||||
Resource::Repository::Commit.fabricate_via_api! do |commit|
|
||||
commit.api_client = api_client
|
||||
commit.commit_message = 'Add .gitlab-ci.yml'
|
||||
commit.project = project
|
||||
commit.add_files([{
|
||||
file_path: '.gitlab-ci.yml',
|
||||
content: gitlab_ci_yaml
|
||||
}])
|
||||
file_path: '.gitlab-ci.yml',
|
||||
content: gitlab_ci_yaml
|
||||
}])
|
||||
end
|
||||
end
|
||||
|
||||
Support::Waiter.wait_until(max_duration: 10) { pipeline_is_triggered? }
|
||||
|
||||
Support::Retrier.retry_until(max_duration: 300, sleep_interval: 5) do
|
||||
Support::Retrier.retry_until(
|
||||
max_duration: 10,
|
||||
sleep_interval: 1,
|
||||
message: "Waiting for pipeline to start"
|
||||
) do
|
||||
pipeline_is_triggered?
|
||||
end
|
||||
Support::Retrier.retry_until(
|
||||
max_duration: 300,
|
||||
sleep_interval: 5,
|
||||
message: "Waiting for pipeline to succeed"
|
||||
) do
|
||||
latest_pipeline_succeed?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -46,6 +46,28 @@ module QA
|
|||
logger.warn("Falling back to '#{FALLBACK_REPORT}'")
|
||||
end
|
||||
|
||||
# Create a copy of the report that contains the selective tests and has '-selective' suffix
|
||||
#
|
||||
# @param [String] qa_tests
|
||||
# @return [void]
|
||||
def create_for_selective(qa_tests)
|
||||
timed_specs = JSON.parse(File.read(report_path))
|
||||
|
||||
qa_tests_array = qa_tests.split(' ')
|
||||
filtered_timed_specs = timed_specs.select { |k, _| qa_tests_array.any? { |qa_test| k.include? qa_test } }
|
||||
File.write(selective_path, filtered_timed_specs.to_json)
|
||||
end
|
||||
|
||||
# Add '-selective-parallel' suffix to report name
|
||||
#
|
||||
# @return [String]
|
||||
def selective_path
|
||||
extension = File.extname(report_path)
|
||||
directory = File.dirname(report_path)
|
||||
file_name = File.basename(report_path, extension)
|
||||
File.join(directory, "#{file_name}-selective-parallel#{extension}")
|
||||
end
|
||||
|
||||
# Rename and move new regenerated report to a separate folder used to indicate report name
|
||||
#
|
||||
# @return [void]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"qa/specs/features/ee/browser_ui/3_create/repository/code_owners_with_protected_branch_and_squashed_commits_spec.rb": 18.85673829499956,
|
||||
"qa/specs/features/api/3_create/repository/files_spec.rb": 3.180753622999873,
|
||||
"qa/specs/features/browser_ui/3_create/web_ide_old/server_hooks_custom_error_message_spec.rb": 0.010764157999801682
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"qa/specs/features/ee/browser_ui/3_create/repository/code_owners_with_protected_branch_and_squashed_commits_spec.rb": 18.85673829499956,
|
||||
"qa/specs/features/api/3_create/repository/files_spec.rb": 3.180753622999873,
|
||||
"qa/specs/features/browser_ui/3_create/web_ide_old/server_hooks_custom_error_message_spec.rb": 0.010764157999801682,
|
||||
"qa/specs/features/api/9_data_stores/users_spec.rb": 0.2808277129997805,
|
||||
"qa/specs/features/api/2_plan/closes_issue_via_pushing_a_commit_spec.rb": 4.882168451000325
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe QA::Support::KnapsackReport do
|
||||
subject(:knapsack_report) { described_class.new('instance') }
|
||||
|
||||
describe '#create_for_selective' do
|
||||
let(:qa_tests) do
|
||||
<<~CMD
|
||||
qa/specs/features/api/3_create
|
||||
qa/specs/features/browser_ui/3_create/
|
||||
qa/specs/features/ee/api/3_create/
|
||||
qa/specs/features/ee/browser_ui/3_create/
|
||||
CMD
|
||||
end
|
||||
|
||||
let(:fixtures_path) { 'spec/fixtures/knapsack_report' }
|
||||
let(:expected_output) { JSON.parse(File.read(File.join(fixtures_path, 'instance-selective-parallel.json'))) }
|
||||
|
||||
before do
|
||||
allow(File).to receive(:read).and_call_original
|
||||
allow(File).to receive(:read)
|
||||
.with('knapsack/instance.json')
|
||||
.and_return(File.read(File.join(fixtures_path, 'instance.json')))
|
||||
end
|
||||
|
||||
it 'creates a filtered file based on qa_tests' do
|
||||
expect(File).to receive(:write)
|
||||
.with('knapsack/instance-selective-parallel.json', expected_output.to_json)
|
||||
|
||||
knapsack_report.create_for_selective(qa_tests)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#selective_path' do
|
||||
it 'returns the path with file name suffixed with -selective-parallel' do
|
||||
expect(knapsack_report.selective_path).to eq('knapsack/instance-selective-parallel.json')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -30,6 +30,18 @@ namespace :ci do
|
|||
# on run-all label of framework changes do not infer specific tests
|
||||
tests = run_all_label_present || qa_changes.framework_changes? ? nil : qa_changes.qa_tests
|
||||
|
||||
# When QA_TESTS only contain folders and no specific specs, populate KNAPSACK_TEST_FILE_PATTERN
|
||||
if tests && tests.split(' ').none? { |item| item.include?('_spec') }
|
||||
test_paths = tests.split(' ').map { |item| "#{item}**/*" }
|
||||
|
||||
files_pattern = "{#{test_paths.join(',')}}"
|
||||
|
||||
logger.info(" Files pattern for tests: #{files_pattern}")
|
||||
append_to_file(env_file, <<~TXT)
|
||||
KNAPSACK_TEST_FILE_PATTERN='#{files_pattern}'
|
||||
TXT
|
||||
end
|
||||
|
||||
if run_all_label_present
|
||||
logger.info(" merge request has pipeline:run-all-e2e label, full test suite will be executed")
|
||||
append_to_file(env_file, "QA_RUN_ALL_E2E_LABEL=true\n")
|
||||
|
|
|
|||
|
|
@ -42,6 +42,19 @@ namespace :knapsack do
|
|||
end
|
||||
end
|
||||
|
||||
desc "Create knapsack reports from existing reports for selective jobs"
|
||||
task :create_reports_for_selective do
|
||||
reports = Dir.glob("knapsack/*").map { |file| file.match(%r{.*/(.*)?\.json})[1] }
|
||||
|
||||
reports.each do |report_name|
|
||||
unless report_name.include?('-selective-parallel')
|
||||
QA::Support::KnapsackReport.new(report_name).create_for_selective(ENV['QA_TESTS'])
|
||||
end
|
||||
rescue StandardError => e
|
||||
QA::Runtime::Logger.error(e)
|
||||
end
|
||||
end
|
||||
|
||||
desc "Merge and upload knapsack report"
|
||||
task :upload, [:glob] do |_task, args|
|
||||
QA::Support::KnapsackReport.configure!
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ variables:
|
|||
QA_SAVE_TEST_METRICS: "${QA_SAVE_TEST_METRICS:-false}"
|
||||
QA_SUITES: "$QA_SUITES"
|
||||
QA_TESTS: "$QA_TESTS"
|
||||
KNAPSACK_TEST_FILE_PATTERN: "$KNAPSACK_TEST_FILE_PATTERN"
|
||||
YML
|
||||
)
|
||||
|
||||
|
|
@ -63,3 +64,4 @@ for key in "${!qa_pipelines[@]}"; do
|
|||
done
|
||||
|
||||
echoinfo "Successfully generated qa pipeline files"
|
||||
echo "$variables"
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :organization do
|
||||
sequence(:name) { |n| "Organization ##{n}" }
|
||||
|
||||
trait :default do
|
||||
id { Organization::DEFAULT_ORGANIZATION_ID }
|
||||
name { 'Default' }
|
||||
initialize_with do
|
||||
# Ensure we only use one default organization
|
||||
Organization.find_by(id: Organization::DEFAULT_ORGANIZATION_ID) || new(**attributes)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :organization, class: 'Organizations::Organization' do
|
||||
sequence(:name) { |n| "Organization ##{n}" }
|
||||
|
||||
trait :default do
|
||||
id { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
|
||||
name { 'Default' }
|
||||
initialize_with do
|
||||
# Ensure we only use one default organization
|
||||
default_org = Organizations::Organization
|
||||
.where(id: Organizations::Organization::DEFAULT_ORGANIZATION_ID)
|
||||
.first_or_initialize
|
||||
default_org.attributes = attributes.except(:id)
|
||||
default_org
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,20 +2,15 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ReleasesFinder do
|
||||
let(:user) { create(:user) }
|
||||
let(:group) { create :group }
|
||||
let(:project) { create(:project, :repository, group: group) }
|
||||
RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:group) { create :group }
|
||||
let_it_be(:project) { create(:project, :repository, group: group) }
|
||||
let(:params) { {} }
|
||||
let(:args) { {} }
|
||||
let(:repository) { project.repository }
|
||||
let(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
|
||||
let(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
|
||||
|
||||
before do
|
||||
v1_0_0.update_attribute(:released_at, 2.days.ago)
|
||||
v1_1_0.update_attribute(:released_at, 1.day.ago)
|
||||
end
|
||||
let_it_be(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
|
||||
let_it_be(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
|
||||
|
||||
shared_examples_for 'when the user is not part of the project' do
|
||||
it 'returns no releases' do
|
||||
|
|
@ -67,21 +62,20 @@ RSpec.describe ReleasesFinder do
|
|||
context 'when the user is a project guest' do
|
||||
before do
|
||||
project.add_guest(user)
|
||||
|
||||
v1_0_0.update!(released_at: 2.days.ago, created_at: 1.day.ago)
|
||||
v1_1_0.update!(released_at: 1.day.ago, created_at: 2.days.ago)
|
||||
end
|
||||
|
||||
it 'sorts by release date' do
|
||||
it 'returns the releases' do
|
||||
is_expected.to be_present
|
||||
expect(subject.size).to eq(2)
|
||||
expect(subject).to eq([v1_1_0, v1_0_0])
|
||||
expect(subject).to match_array([v1_1_0, v1_0_0])
|
||||
end
|
||||
|
||||
context 'with sorting parameters' do
|
||||
before do
|
||||
v1_1_0.update_attribute(:created_at, 3.days.ago)
|
||||
end
|
||||
|
||||
context 'by default is released_at in descending order' do
|
||||
it { is_expected.to eq([v1_1_0, v1_0_0]) }
|
||||
it 'sorted by released_at in descending order by default' do
|
||||
is_expected.to eq([v1_1_0, v1_0_0])
|
||||
end
|
||||
|
||||
context 'released_at in ascending order' do
|
||||
|
|
@ -107,4 +101,73 @@ RSpec.describe ReleasesFinder do
|
|||
it_behaves_like 'when a tag parameter is passed'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'when parent is an array of projects' do
|
||||
let_it_be(:project2) { create(:project, :repository, group: group) }
|
||||
let_it_be(:v2_0_0) { create(:release, project: project2, tag: 'v2.0.0') }
|
||||
let_it_be(:v2_1_0) { create(:release, project: project2, tag: 'v2.1.0') }
|
||||
|
||||
subject { described_class.new([project, project2], user, params).execute(**args) }
|
||||
|
||||
context 'when the user is not part of any project' do
|
||||
it_behaves_like 'when the user is not part of the project'
|
||||
end
|
||||
|
||||
context 'when the user is only part of one project' do
|
||||
before do
|
||||
project.add_guest(user)
|
||||
end
|
||||
|
||||
it 'returns the releases of only the authorized project' do
|
||||
is_expected.to be_present
|
||||
expect(subject.size).to eq(2)
|
||||
expect(subject).to match_array([v1_1_0, v1_0_0])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user is a guest on all projects' do
|
||||
before do
|
||||
project.add_guest(user)
|
||||
project2.add_guest(user)
|
||||
|
||||
v1_0_0.update!(released_at: 4.days.ago, created_at: 1.day.ago)
|
||||
v1_1_0.update!(released_at: 3.days.ago, created_at: 2.days.ago)
|
||||
v2_0_0.update!(released_at: 2.days.ago, created_at: 3.days.ago)
|
||||
v2_1_0.update!(released_at: 1.day.ago, created_at: 4.days.ago)
|
||||
end
|
||||
|
||||
it 'returns the releases of all projects' do
|
||||
is_expected.to be_present
|
||||
expect(subject.size).to eq(4)
|
||||
expect(subject).to match_array([v2_1_0, v2_0_0, v1_1_0, v1_0_0])
|
||||
end
|
||||
|
||||
it_behaves_like 'preload'
|
||||
it_behaves_like 'when a tag parameter is passed'
|
||||
|
||||
context 'with sorting parameters' do
|
||||
it 'sorted by released_at in descending order by default' do
|
||||
is_expected.to eq([v2_1_0, v2_0_0, v1_1_0, v1_0_0])
|
||||
end
|
||||
|
||||
context 'released_at in ascending order' do
|
||||
let(:params) { { sort: 'asc' } }
|
||||
|
||||
it { is_expected.to eq([v1_0_0, v1_1_0, v2_0_0, v2_1_0]) }
|
||||
end
|
||||
|
||||
context 'order by created_at in descending order' do
|
||||
let(:params) { { order_by: 'created_at' } }
|
||||
|
||||
it { is_expected.to eq([v1_0_0, v1_1_0, v2_0_0, v2_1_0]) }
|
||||
end
|
||||
|
||||
context 'order by created_at in ascending order' do
|
||||
let(:params) { { order_by: 'created_at', sort: 'asc' } }
|
||||
|
||||
it { is_expected.to eq([v2_1_0, v2_0_0, v1_1_0, v1_0_0]) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -54,7 +54,9 @@ RSpec.describe Resolvers::ReleasesResolver, feature_category: :release_orchestra
|
|||
private
|
||||
|
||||
def resolve_releases
|
||||
context = { current_user: current_user }
|
||||
resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
|
||||
batch_sync do
|
||||
context = { current_user: current_user }
|
||||
resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_com
|
|||
name
|
||||
description
|
||||
icon
|
||||
versions
|
||||
]
|
||||
|
||||
expect(described_class).to have_graphql_fields(*expected_fields)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'fast_spec_helper'
|
||||
|
||||
RSpec.describe Bitbucket::Representation::PullRequest do
|
||||
RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :importers do
|
||||
describe '#iid' do
|
||||
it { expect(described_class.new('id' => 1).iid).to eq(1) }
|
||||
end
|
||||
|
|
@ -10,6 +10,7 @@ RSpec.describe Bitbucket::Representation::PullRequest do
|
|||
describe '#author' do
|
||||
it { expect(described_class.new({ 'author' => { 'nickname' => 'Ben' } }).author).to eq('Ben') }
|
||||
it { expect(described_class.new({}).author).to be_nil }
|
||||
it { expect(described_class.new({ 'author' => nil }).author).to be_nil }
|
||||
end
|
||||
|
||||
describe '#description' do
|
||||
|
|
@ -47,4 +48,12 @@ RSpec.describe Bitbucket::Representation::PullRequest do
|
|||
it { expect(described_class.new({ destination: { commit: { hash: 'abcd123' } } }.with_indifferent_access).target_branch_sha).to eq('abcd123') }
|
||||
it { expect(described_class.new({ destination: {} }.with_indifferent_access).target_branch_sha).to be_nil }
|
||||
end
|
||||
|
||||
describe '#created_at' do
|
||||
it { expect(described_class.new('created_on' => '2023-01-01').created_at).to eq('2023-01-01') }
|
||||
end
|
||||
|
||||
describe '#updated_at' do
|
||||
it { expect(described_class.new('updated_on' => '2023-01-01').updated_at).to eq('2023-01-01') }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -104,11 +104,13 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers d
|
|||
title: 'This is a title',
|
||||
description: 'This is a test pull request',
|
||||
state: 'merged',
|
||||
author: 'other',
|
||||
author: pull_request_author,
|
||||
created_at: Time.now,
|
||||
updated_at: Time.now)
|
||||
end
|
||||
|
||||
let(:pull_request_author) { 'other' }
|
||||
|
||||
let(:author_line) { "*Created by: someuser*\n\n" }
|
||||
|
||||
before do
|
||||
|
|
@ -168,6 +170,16 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers d
|
|||
expect(reply_note.note).to include(author_line)
|
||||
end
|
||||
|
||||
context 'when author is blank' do
|
||||
let(:pull_request_author) { nil }
|
||||
|
||||
it 'adds created by anonymous in the description', :aggregate_failures do
|
||||
expect { subject.execute }.to change { MergeRequest.count }.by(1)
|
||||
|
||||
expect(MergeRequest.first.description).to include('Created by: Anonymous')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user exists in GitLab' do
|
||||
let!(:existing_user) { create(:user, username: 'someuser') }
|
||||
let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
|
||||
|
|
@ -218,6 +230,17 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers d
|
|||
end
|
||||
end
|
||||
|
||||
context "when target_branch_sha is blank" do
|
||||
let(:target_branch_sha) { nil }
|
||||
|
||||
it 'creates the merge request with no target branch', :aggregate_failures do
|
||||
expect { subject.execute }.to change { MergeRequest.count }.by(1)
|
||||
|
||||
merge_request = MergeRequest.first
|
||||
expect(merge_request.target_branch_sha).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
context 'metrics' do
|
||||
before do
|
||||
allow(Gitlab::Metrics).to receive(:counter) { counter }
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
|
|||
it 'returns detailed error' do
|
||||
is_expected.to have_attributes(
|
||||
id: error.fingerprint.to_s,
|
||||
title: error.name,
|
||||
title: "#{error.name}: #{error.description}",
|
||||
message: error.description,
|
||||
culprit: error.actor,
|
||||
first_seen: error.first_seen_at.to_s,
|
||||
|
|
@ -187,7 +187,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
|
|||
expect(result_errors).to all(
|
||||
have_attributes(
|
||||
id: error.fingerprint.to_s,
|
||||
title: error.name,
|
||||
title: "#{error.name}: #{error.description}",
|
||||
message: error.description,
|
||||
culprit: error.actor,
|
||||
first_seen: error.first_seen_at,
|
||||
|
|
|
|||
|
|
@ -10,13 +10,9 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
|
|||
let_it_be(:resource_2) { create(:catalog_resource, project: project_2) }
|
||||
let_it_be(:resource_3) { create(:catalog_resource, project: project_3) }
|
||||
|
||||
let_it_be(:releases) do
|
||||
[
|
||||
create(:release, project: project, released_at: Time.zone.now - 2.days),
|
||||
create(:release, project: project, released_at: Time.zone.now - 1.day),
|
||||
create(:release, project: project, released_at: Time.zone.now)
|
||||
]
|
||||
end
|
||||
let_it_be(:release1) { create(:release, project: project, released_at: Time.zone.now - 2.days) }
|
||||
let_it_be(:release2) { create(:release, project: project, released_at: Time.zone.now - 1.day) }
|
||||
let_it_be(:release3) { create(:release, project: project, released_at: Time.zone.now) }
|
||||
|
||||
it { is_expected.to belong_to(:project) }
|
||||
|
||||
|
|
@ -58,13 +54,13 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
|
|||
|
||||
describe '#versions' do
|
||||
it 'returns releases ordered by released date descending' do
|
||||
expect(resource.versions).to eq(releases.reverse)
|
||||
expect(resource.versions).to eq([release3, release2, release1])
|
||||
end
|
||||
end
|
||||
|
||||
describe '#latest_version' do
|
||||
it 'returns the latest release' do
|
||||
expect(resource.latest_version).to eq(releases.last)
|
||||
expect(resource.latest_version).to eq(release3)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ RSpec.describe Namespace, feature_category: :subgroups do
|
|||
let(:repository_storage) { 'default' }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to :organization }
|
||||
it { is_expected.to belong_to(:organization).class_name('Organizations::Organization') }
|
||||
it { is_expected.to have_many :projects }
|
||||
it { is_expected.to have_many :project_statistics }
|
||||
it { is_expected.to belong_to :parent }
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Organization, type: :model, feature_category: :cell do
|
||||
RSpec.describe Organizations::Organization, type: :model, feature_category: :cell do
|
||||
let_it_be(:organization) { create(:organization) }
|
||||
let_it_be(:default_organization) { create(:organization, :default) }
|
||||
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Groups::UploadsController, feature_category: :shared do
|
||||
include WorkhorseHelpers
|
||||
|
||||
it_behaves_like 'uploads actions' do
|
||||
let_it_be(:model) { create(:group, :public) }
|
||||
let_it_be(:upload) { create(:upload, :namespace_upload, :with_file, model: model) }
|
||||
|
||||
let(:show_path) { show_group_uploads_path(model, upload.secret, File.basename(upload.path)) }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::UploadsController, feature_category: :shared do
|
||||
include WorkhorseHelpers
|
||||
|
||||
it_behaves_like 'uploads actions' do
|
||||
let_it_be(:model) { create(:project, :public) }
|
||||
let_it_be(:upload) { create(:upload, :issuable_upload, :with_file, model: model) }
|
||||
|
||||
let(:show_path) { show_project_uploads_path(model, upload.secret, File.basename(upload.path)) }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe UploadsController, feature_category: :shared do
|
||||
include WorkhorseHelpers
|
||||
|
||||
it_behaves_like 'uploads actions' do
|
||||
let_it_be(:model) { create(:personal_snippet, :public) }
|
||||
let_it_be(:upload) { create(:upload, :personal_snippet_upload, :with_file, model: model) }
|
||||
|
||||
# See config/routes/uploads.rb
|
||||
let(:show_path) do
|
||||
"/uploads/-/system/#{model.model_name.singular}/#{model.to_param}/#{upload.secret}/#{File.basename(upload.path)}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -4,8 +4,8 @@ RSpec.shared_examples 'releases and group releases resolver' do
|
|||
context 'when the user does not have access to the project' do
|
||||
let(:current_user) { public_user }
|
||||
|
||||
it 'returns an empty array' do
|
||||
expect(resolve_releases).to be_empty
|
||||
it 'returns an empty response' do
|
||||
expect(resolve_releases).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'uploads actions' do
|
||||
describe "GET #show" do
|
||||
context 'with file traversal in filename parameter' do
|
||||
# Uploads in tests are stored in directories like:
|
||||
# tmp/tests/public/uploads/@hashed/AB/CD/ABCD/SECRET
|
||||
let(:filename) { "../../../../../../../../../Gemfile.lock" }
|
||||
let(:escaped_filename) { CGI.escape filename }
|
||||
|
||||
it 'responds with status 400' do
|
||||
# Check files do indeed exists
|
||||
upload_absolute_path = Pathname(upload.absolute_path)
|
||||
expect(upload_absolute_path).to be_exist
|
||||
attacked_file_path = upload_absolute_path.dirname.join(filename)
|
||||
expect(attacked_file_path).to be_exist
|
||||
|
||||
# Need to escape, otherwise we get `ActionController::UrlGenerationError Exception: No route matches`
|
||||
get show_path.sub(File.basename(upload.path), escaped_filename)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -365,6 +365,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
|
|||
'MergeRequests::DeleteSourceBranchWorker' => 3,
|
||||
'MergeRequests::FetchSuggestedReviewersWorker' => 3,
|
||||
'MergeRequests::HandleAssigneesChangeWorker' => 3,
|
||||
'MergeRequests::MergeabilityCheckBatchWorker' => 3,
|
||||
'MergeRequests::ResolveTodosWorker' => 3,
|
||||
'MergeRequests::SyncCodeOwnerApprovalRulesWorker' => 3,
|
||||
'MergeTrains::RefreshWorker' => 3,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,63 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe MergeRequests::MergeabilityCheckBatchWorker, feature_category: :code_review_workflow do
|
||||
subject { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
context 'when some merge_requests do not exist' do
|
||||
it 'ignores unknown merge request ids' do
|
||||
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
|
||||
|
||||
expect(Sidekiq.logger).not_to receive(:error)
|
||||
|
||||
subject.perform([1234, 5678])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when some merge_requests needs mergeability checks' do
|
||||
let(:merge_request_1) { create(:merge_request, merge_status: :unchecked) }
|
||||
let(:merge_request_2) { create(:merge_request, merge_status: :cannot_be_merged_rechecking) }
|
||||
let(:merge_request_3) { create(:merge_request, merge_status: :can_be_merged) }
|
||||
|
||||
it 'executes MergeabilityCheckService on merge requests that needs to be checked' do
|
||||
expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
|
||||
expect(service).to receive(:execute).and_return(ServiceResponse.success)
|
||||
end
|
||||
expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_2) do |service|
|
||||
expect(service).to receive(:execute).and_return(ServiceResponse.success)
|
||||
end
|
||||
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(merge_request_3.id)
|
||||
expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(1234)
|
||||
|
||||
subject.perform([merge_request_1.id, merge_request_2.id, merge_request_3.id, 1234])
|
||||
end
|
||||
|
||||
it 'structurally logs a failed mergeability check' do
|
||||
expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
|
||||
expect(service).to receive(:execute).and_return(ServiceResponse.error(message: "solar flares"))
|
||||
end
|
||||
|
||||
expect(Sidekiq.logger).to receive(:error).once
|
||||
.with(
|
||||
merge_request_id: merge_request_1.id,
|
||||
worker: described_class.to_s,
|
||||
message: 'Failed to check mergeability of merge request: solar flares')
|
||||
|
||||
subject.perform([merge_request_1.id])
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'an idempotent worker' do
|
||||
let(:merge_request) { create(:merge_request) }
|
||||
let(:job_args) { [merge_request.id] }
|
||||
|
||||
it 'is mergeable' do
|
||||
subject
|
||||
|
||||
expect(merge_request).to be_mergeable
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue