Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-07-20 12:10:29 +00:00
parent f5f1f221ba
commit a84766a28a
89 changed files with 934 additions and 608 deletions

View File

@ -2,6 +2,15 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 14.0.6 (2021-07-20)
### Fixed (4 changes)
- [Fix validation method regarding MIME type keys](gitlab-org/gitlab@2cc6d89cc77368b9472c8ec22e97bb3481409fb3) ([merge request](gitlab-org/gitlab!66403))
- [Geo: Fix snippet verification by replicating the HEAD ref](gitlab-org/gitlab@4dbf36af8553775603c170784ad8bfcdc436a669) ([merge request](gitlab-org/gitlab!66403)) **GitLab Enterprise Edition**
- [Fix LFS objects not downloading with Bitbucket](gitlab-org/gitlab@161776f9a4975dfeb2760b06e83160def902c61f) ([merge request](gitlab-org/gitlab!66403))
- [Replace Excon with Faraday for requesting object storage](gitlab-org/gitlab@a223d526d5b97f248c8810ef0b968d2c3b0323e0) ([merge request](gitlab-org/gitlab!66403))
## 14.0.5 (2021-07-08)
### Fixed (4 changes)

View File

@ -132,6 +132,9 @@ export default {
} else if (e.keyCode === ENTER_KEY_CODE && this.currentHighlightItem > -1) {
this.updateSelectedLabels([this.visibleLabels[this.currentHighlightItem]]);
this.searchKey = '';
// Prevent parent form submission upon hitting enter.
e.preventDefault();
} else if (e.keyCode === ESC_KEY_CODE) {
this.toggleDropdownContents();
}

View File

@ -315,7 +315,7 @@ export default {
</dropdown-value>
<dropdown-button v-show="dropdownButtonVisible" class="gl-mt-2" />
<dropdown-contents
v-show="dropdownButtonVisible && showDropdownContents"
v-if="dropdownButtonVisible && showDropdownContents"
ref="dropdownContents"
:render-on-top="!contentIsOnViewport"
/>

View File

@ -20,7 +20,11 @@ export const receiveLabelsFailure = ({ commit }) => {
message: __('Error fetching labels.'),
});
};
export const fetchLabels = ({ state, dispatch }) => {
export const fetchLabels = ({ state, dispatch }, options) => {
if (state.labelsFetched && (!options || !options.refetch)) {
return Promise.resolve();
}
dispatch('requestLabels');
return axios
.get(state.labelsFetchPath)
@ -46,6 +50,7 @@ export const createLabel = ({ state, dispatch }, label) => {
})
.then(({ data }) => {
if (data.id) {
dispatch('fetchLabels', { refetch: true });
dispatch('receiveCreateLabelSuccess');
dispatch('toggleDropdownContentsCreateView');
} else {

View File

@ -36,6 +36,7 @@ export default {
// selectedLabels array.
const selectedLabelIds = state.selectedLabels.map((label) => label.id);
state.labelsFetchInProgress = false;
state.labelsFetched = true;
state.labels = labels.reduce((allLabels, label) => {
allLabels.push({
...label,

View File

@ -1,6 +1,7 @@
export default () => ({
// Initial Data
labels: [],
labelsFetched: false,
selectedLabels: [],
labelsListTitle: '',
labelsCreateTitle: '',

View File

@ -330,7 +330,7 @@ export default {
</dropdown-value>
<dropdown-button v-show="dropdownButtonVisible" class="gl-mt-2" />
<dropdown-contents
v-show="dropdownButtonVisible && showDropdownContents"
v-if="dropdownButtonVisible && showDropdownContents"
ref="dropdownContents"
:render-on-top="!contentIsOnViewport"
:labels-create-title="labelsCreateTitle"

View File

@ -87,12 +87,6 @@
padding-bottom: $gl-spacing-scale-8;
}
// Will be moved to @gitlab/ui in https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1495
.gl-py-13 {
padding-top: $gl-spacing-scale-13;
padding-bottom: $gl-spacing-scale-13;
}
.gl-transition-property-stroke-opacity {
transition-property: stroke-opacity;
}

View File

@ -68,20 +68,22 @@ class Projects::PipelinesController < Projects::ApplicationController
end
def create
@pipeline = Ci::CreatePipelineService
service_response = Ci::CreatePipelineService
.new(project, current_user, create_params)
.execute(:web, ignore_skip_ci: true, save_on_errors: false)
@pipeline = service_response.payload
respond_to do |format|
format.html do
if @pipeline.created_successfully?
if service_response.success?
redirect_to project_pipeline_path(project, @pipeline)
else
render 'new', status: :bad_request
end
end
format.json do
if @pipeline.created_successfully?
if service_response.success?
render json: PipelineSerializer
.new(project: project, current_user: current_user)
.represent(@pipeline),

View File

@ -38,11 +38,7 @@ module Security
def execute
return [] if @job_types.empty?
if Feature.enabled?(:ci_build_metadata_config, pipeline.project, default_enabled: :yaml)
find_jobs
else
find_jobs_legacy
end
find_jobs
end
private
@ -51,19 +47,6 @@ module Security
@pipeline.builds.with_secure_reports_from_config_options(@job_types)
end
def find_jobs_legacy
# the query doesn't guarantee accuracy, so we verify it here
legacy_jobs_query.select do |job|
@job_types.find { |job_type| job.options.dig(:artifacts, :reports, job_type) }
end
end
def legacy_jobs_query
@job_types.map do |job_type|
@pipeline.builds.with_secure_reports_from_options(job_type)
end.reduce(&:or)
end
def valid_job_types?(job_types)
(job_types - self.class.allowed_job_types).empty?
end

View File

@ -76,14 +76,8 @@ module Ci
end
def write_metadata_attribute(legacy_key, metadata_key, value)
# save to metadata or this model depending on the state of feature flag
if Feature.enabled?(:ci_build_metadata_config, project, default_enabled: :yaml)
ensure_metadata.write_attribute(metadata_key, value)
write_attribute(legacy_key, nil)
else
write_attribute(legacy_key, value)
metadata&.write_attribute(metadata_key, nil)
end
ensure_metadata.write_attribute(metadata_key, value)
write_attribute(legacy_key, nil)
end
end
end

View File

@ -33,8 +33,9 @@ module Ci
current_user,
pipeline_params.fetch(:target_revision))
downstream_pipeline = service.execute(
pipeline_params.fetch(:source), **pipeline_params[:execute_params])
downstream_pipeline = service
.execute(pipeline_params.fetch(:source), **pipeline_params[:execute_params])
.payload
downstream_pipeline.tap do |pipeline|
update_bridge_status!(@bridge, pipeline)

View File

@ -87,12 +87,16 @@ module Ci
if pipeline.persisted?
schedule_head_pipeline_update
create_namespace_onboarding_action
else
# If pipeline is not persisted, try to recover IID
pipeline.reset_project_iid
end
# If pipeline is not persisted, try to recover IID
pipeline.reset_project_iid unless pipeline.persisted?
pipeline
if error_message = pipeline.full_error_messages.presence || pipeline.failure_reason.presence
ServiceResponse.error(message: error_message, payload: pipeline)
else
ServiceResponse.success(payload: pipeline)
end
end
# rubocop: enable Metrics/ParameterLists
@ -100,8 +104,8 @@ module Ci
source = args[0]
params = Hash(args[1])
execute(source, **params, &block).tap do |pipeline|
unless pipeline.persisted?
execute(source, **params, &block).tap do |response|
unless response.payload.persisted?
raise CreateError, pipeline.full_error_messages
end
end

View File

@ -7,7 +7,8 @@ module Ci
module ExternalPullRequests
class CreatePipelineService < BaseService
def execute(pull_request)
return unless pull_request.open? && pull_request.actual_branch_head?
return pull_request_not_open_error unless pull_request.open?
return pull_request_branch_error unless pull_request.actual_branch_head?
create_pipeline_for(pull_request)
end
@ -26,6 +27,14 @@ module Ci
target_sha: pull_request.target_sha
}
end
def pull_request_not_open_error
ServiceResponse.error(message: 'The pull request is not opened', payload: nil)
end
def pull_request_branch_error
ServiceResponse.error(message: 'The source sha is not the head of the source branch', payload: nil)
end
end
end
end

View File

@ -27,13 +27,13 @@ module Ci
# this check is to not leak the presence of the project if user cannot read it
return unless trigger.project == project
pipeline = Ci::CreatePipelineService
response = Ci::CreatePipelineService
.new(project, trigger.owner, ref: params[:ref], variables_attributes: variables)
.execute(:trigger, ignore_skip_ci: true) do |pipeline|
pipeline.trigger_requests.build(trigger: trigger)
end
pipeline_service_response(pipeline)
pipeline_service_response(response.payload)
end
def pipeline_service_response(pipeline)
@ -57,7 +57,7 @@ module Ci
# this check is to not leak the presence of the project if user cannot read it
return unless can?(job.user, :read_project, project)
pipeline = Ci::CreatePipelineService
response = Ci::CreatePipelineService
.new(project, job.user, ref: params[:ref], variables_attributes: variables)
.execute(:pipeline, ignore_skip_ci: true) do |pipeline|
source = job.sourced_pipelines.build(
@ -69,7 +69,7 @@ module Ci
pipeline.source_pipeline = source
end
pipeline_service_response(pipeline)
pipeline_service_response(response.payload)
end
def job_from_token

View File

@ -3,7 +3,7 @@
module MergeRequests
class CreatePipelineService < MergeRequests::BaseService
def execute(merge_request)
return unless can_create_pipeline_for?(merge_request)
return cannot_create_pipeline_error unless can_create_pipeline_for?(merge_request)
create_detached_merge_request_pipeline(merge_request)
end
@ -60,6 +60,10 @@ module MergeRequests
::Gitlab::UserAccess.new(current_user, container: merge_request.target_project)
.can_update_branch?(merge_request.source_branch_ref)
end
def cannot_create_pipeline_error
ServiceResponse.error(message: 'Cannot create a pipeline for this merge request.', payload: nil)
end
end
end

View File

@ -31,7 +31,7 @@ module Projects
#
LfsDownloadLinkListService
.new(project, remote_uri: current_endpoint_uri)
.execute(lfs_pointers_in_repository)
.execute(missing_lfs_files)
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsObjectDownloadListError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
@ -53,6 +53,22 @@ module Projects
@lfs_pointers_in_repository ||= LfsListService.new(project).execute
end
def existing_lfs_objects
project.lfs_objects
end
def existing_lfs_objects_hash
{}.tap do |hash|
existing_lfs_objects.find_each do |lfs_object|
hash[lfs_object.oid] = lfs_object.size
end
end
end
def missing_lfs_files
lfs_pointers_in_repository.except(*existing_lfs_objects_hash.keys)
end
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file

View File

@ -7,12 +7,12 @@
= _('External storage URL')
= f.text_field :static_objects_external_storage_url, class: 'form-control gl-form-input'
%span.form-text.text-muted#static_objects_external_storage_url_help_block
= _('URL of the external storage that will serve the repository static objects (e.g. archives, blobs, ...).')
= _('URL of the external storage to serve the repository static objects.')
.form-group
= f.label :static_objects_external_storage_auth_token, class: 'label-bold' do
= _('External storage authentication token')
= f.text_field :static_objects_external_storage_auth_token, class: 'form-control gl-form-input'
%span.form-text.text-muted#static_objects_external_storage_auth_token_help_block
= _('A secure token that identifies an external storage request.')
= _('Secure token that identifies an external storage request.')
= f.submit _('Save changes'), class: "gl-button btn btn-confirm"

View File

@ -55,10 +55,11 @@
%section.settings.as-repository-static-objects.no-animate#js-repository-static-objects-settings{ class: ('expanded' if expanded_by_default?) }
.settings-header
%h4
= _('Repository static objects')
= _('External storage for repository static objects')
%button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
= _('Serve repository static objects (e.g. archives, blobs, ...) from an external storage (e.g. a CDN).')
= _('Serve repository static objects (for example, archives and blobs) from external storage.')
= link_to s_('Learn more.'), help_page_path('administration/static_objects_external_storage.md'), target: '_blank', rel: 'noopener noreferrer'
.settings-content
= render 'repository_static_objects'

View File

@ -54,6 +54,10 @@ module ApplicationWorker
subclass.after_set_class_attribute { subclass.set_queue }
end
def generated_queue_name
Gitlab::SidekiqConfig::WorkerRouter.queue_name_from_worker_name(self)
end
override :validate_worker_attributes!
def validate_worker_attributes!
super

View File

@ -1,8 +0,0 @@
---
name: ci_build_metadata_config
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/7238
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/330954
milestone: '11.7'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -25,7 +25,7 @@ The following table outlines the major differences between Gitaly Cluster and Ge
| Tool | Nodes | Locations | Latency tolerance | Failover | Consistency | Provides redundancy for |
|:---------------|:---------|:----------|:-------------------|:----------------------------------------------------------------------------|:-----------------------------------------|:------------------------|
| Gitaly Cluster | Multiple | Single | Approximately 1 ms | [Automatic](praefect.md#automatic-failover-and-primary-election-strategies) | [Strong](praefect.md#strong-consistency) | Data storage in Git |
| Gitaly Cluster | Multiple | Single | Approximately 1 ms | [Automatic](praefect.md#automatic-failover-and-primary-election-strategies) | [Strong](index.md#strong-consistency) | Data storage in Git |
| Geo | Multiple | Multiple | Up to one minute | [Manual](../geo/disaster_recovery/index.md) | Eventual | Entire GitLab instance |
For more information, see:
@ -40,7 +40,7 @@ Yes! For more information, see [Migrate to Gitaly Cluster](praefect.md#migrate-t
## What are some repository storage recommendations?
The size of the required storage can vary between instances and depends on the set
[replication factor](praefect.md#replication-factor). You might want to include implementing
[replication factor](index.md#replication-factor). You might want to include implementing
repository storage redundancy.
For a replication factor:

View File

@ -30,8 +30,8 @@ repository storage is either:
- A Gitaly storage with direct access to repositories using [storage paths](../repository_storage_paths.md),
where each repository is stored on a single Gitaly node. All requests are routed to this node.
- A virtual storage provided by [Gitaly Cluster](#gitaly-cluster), where each repository can be
stored on multiple Gitaly nodes for fault tolerance. In a Gitaly Cluster:
- A [virtual storage](#virtual-storage) provided by [Gitaly Cluster](#gitaly-cluster), where each
repository can be stored on multiple Gitaly nodes for fault tolerance. In a Gitaly Cluster:
- Read requests are distributed between multiple Gitaly nodes, which can improve performance.
- Write requests are broadcast to repository replicas.
@ -39,32 +39,6 @@ WARNING:
Engineering support for NFS for Git repositories is deprecated. Read the
[deprecation notice](#nfs-deprecation-notice).
## Virtual storage
Virtual storage makes it viable to have a single repository storage in GitLab to simplify repository
management.
Virtual storage with Gitaly Cluster can usually replace direct Gitaly storage configurations.
However, this is at the expense of additional storage space needed to store each repository on multiple
Gitaly nodes. The benefit of using Gitaly Cluster virtual storage over direct Gitaly storage is:
- Improved fault tolerance, because each Gitaly node has a copy of every repository.
- Improved resource utilization, reducing the need for over-provisioning for shard-specific peak
loads, because read loads are distributed across Gitaly nodes.
- Manual rebalancing for performance is not required, because read loads are distributed across
Gitaly nodes.
- Simpler management, because all Gitaly nodes are identical.
The number of repository replicas can be configured using a
[replication factor](praefect.md#replication-factor).
It can
be uneconomical to have the same replication factor for all repositories.
[Variable replication factor](https://gitlab.com/groups/gitlab-org/-/epics/3372) is planned to
provide greater flexibility for extremely large GitLab instances.
As with normal Gitaly storages, virtual storages can be sharded.
## Gitaly
The following shows GitLab set up to use direct access to Gitaly:
@ -160,7 +134,7 @@ In this example:
- Repositories are stored on a virtual storage called `storage-1`.
- Three Gitaly nodes provide `storage-1` access: `gitaly-1`, `gitaly-2`, and `gitaly-3`.
- The three Gitaly nodes share data in three separate hashed storage locations.
- The [replication factor](praefect.md#replication-factor) is `3`. There are three copies maintained
- The [replication factor](#replication-factor) is `3`. There are three copies maintained
of each repository.
The availability objectives for Gitaly clusters are:
@ -170,7 +144,7 @@ The availability objectives for Gitaly clusters are:
Writes are replicated asynchronously. Any writes that have not been replicated
to the newly promoted primary are lost.
[Strong consistency](praefect.md#strong-consistency) can be used to avoid loss in some
[Strong consistency](#strong-consistency) can be used to avoid loss in some
circumstances.
- **Recovery Time Objective (RTO):** Less than 10 seconds.
@ -181,17 +155,31 @@ The availability objectives for Gitaly clusters are:
[Faster outage detection](https://gitlab.com/gitlab-org/gitaly/-/issues/2608)
is planned to improve this to less than 1 second.
Gitaly Cluster supports:
### Virtual storage
- [Strong consistency](praefect.md#strong-consistency) of the secondary replicas.
- [Automatic failover](praefect.md#automatic-failover-and-primary-election-strategies) from the primary to the secondary.
- Reporting of possible data loss if replication queue is non-empty.
- From GitLab 13.0 to GitLab 14.0, marking repositories as [read-only](praefect.md#read-only-mode)
if data loss is detected to prevent data inconsistencies.
Virtual storage makes it viable to have a single repository storage in GitLab to simplify repository
management.
Follow the [Gitaly Cluster epic](https://gitlab.com/groups/gitlab-org/-/epics/1489)
for improvements including
[horizontally distributing reads](https://gitlab.com/groups/gitlab-org/-/epics/2013).
Virtual storage with Gitaly Cluster can usually replace direct Gitaly storage configurations.
However, this is at the expense of additional storage space needed to store each repository on multiple
Gitaly nodes. The benefit of using Gitaly Cluster virtual storage over direct Gitaly storage is:
- Improved fault tolerance, because each Gitaly node has a copy of every repository.
- Improved resource utilization, reducing the need for over-provisioning for shard-specific peak
loads, because read loads are distributed across Gitaly nodes.
- Manual rebalancing for performance is not required, because read loads are distributed across
Gitaly nodes.
- Simpler management, because all Gitaly nodes are identical.
The number of repository replicas can be configured using a
[replication factor](#replication-factor).
It can
be uneconomical to have the same replication factor for all repositories.
[Variable replication factor](https://gitlab.com/groups/gitlab-org/-/epics/3372) is planned to
provide greater flexibility for extremely large GitLab instances.
As with normal Gitaly storages, virtual storages can be sharded.
### Moving beyond NFS
@ -220,7 +208,7 @@ Further reading:
- Blog post: [The road to Gitaly v1.0 (aka, why GitLab doesn't require NFS for storing Git data anymore)](https://about.gitlab.com/blog/2018/09/12/the-road-to-gitaly-1-0/)
- Blog post: [How we spent two weeks hunting an NFS bug in the Linux kernel](https://about.gitlab.com/blog/2018/11/14/how-we-spent-two-weeks-hunting-an-nfs-bug/)
### Components of Gitaly Cluster
### Components
Gitaly Cluster consists of multiple components:
@ -240,6 +228,86 @@ component for running a Gitaly Cluster.
For more information, see [Gitaly High Availability (HA) Design](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/design_ha.md).
### Features
Gitaly Cluster provides the following features:
- [Distributed reads](#distributed-reads) among Gitaly nodes.
- [Strong consistency](#strong-consistency) of the secondary replicas.
- [Replication factor](#replication-factor) of repositories for increased redundancy.
- [Automatic failover](praefect.md#automatic-failover-and-primary-election-strategies) from the
primary Gitaly node to secondary Gitaly nodes.
- Reporting of possible [data loss](praefect.md#check-for-data-loss) if replication queue is
non-empty.
Follow the [Gitaly Cluster epic](https://gitlab.com/groups/gitlab-org/-/epics/1489) for improvements
including [horizontally distributing reads](https://gitlab.com/groups/gitlab-org/-/epics/2013).
#### Distributed reads
> - Introduced in GitLab 13.1 in [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) with feature flag `gitaly_distributed_reads` set to disabled.
> - [Made generally available and enabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/2951) in GitLab 13.3.
> - [Disabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/3178) in GitLab 13.5.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/3334) in GitLab 13.8.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitaly/-/issues/3383) in GitLab 13.11.
Gitaly Cluster supports distribution of read operations across Gitaly nodes that are configured for
the [virtual storage](#virtual-storage).
All RPCs marked with the `ACCESSOR` option are redirected to an up to date and healthy Gitaly node.
For example, [`GetBlob`](https://gitlab.com/gitlab-org/gitaly/-/blob/v12.10.6/proto/blob.proto#L16).
_Up to date_ in this context means that:
- There is no replication operations scheduled for this Gitaly node.
- The last replication operation is in _completed_ state.
The primary node is chosen to serve the request if:
- There are no up to date nodes.
- Any other error occurs during node selection.
To track distribution of read operations, you can use the `gitaly_praefect_read_distribution`
Prometheus counter metric. It has two labels:
- `virtual_storage`.
- `storage`.
They reflect configuration defined for this instance of Praefect.
#### Strong consistency
> - Introduced in GitLab 13.1 in [alpha](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga), disabled by default.
> - Entered [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) in GitLab 13.2, disabled by default.
> - In GitLab 13.3, disabled unless primary-wins voting strategy is disabled.
> - From GitLab 13.4, enabled by default.
> - From GitLab 13.5, you must use Git v2.28.0 or higher on Gitaly nodes to enable strong consistency.
> - From GitLab 13.6, primary-wins voting strategy and `gitaly_reference_transactions_primary_wins` feature flag were removed from the source code.
By default, Gitaly Cluster guarantees eventual consistency by replicating all writes to secondary
Gitaly nodes after the write to the primary Gitaly node has happened.
Praefect can instead provide strong consistency by creating a transaction and writing changes to all
Gitaly nodes at once.
If enabled, transactions are only available for a subset of RPCs. For more information, see the
[strong consistency epic](https://gitlab.com/groups/gitlab-org/-/epics/1189).
For configuration information, see [Configure strong consistency](praefect.md#configure-strong-consistency).
#### Replication factor
Replication factor is the number of copies Gitaly Cluster maintains of a given repository. A higher
replication factor:
- Offers better redundancy and distribution of read workload.
- Results in higher storage cost.
By default, Gitaly Cluster replicates repositories to every storage in a
[virtual storage](#virtual-storage).
For configuration information, see [Configure replication factor](praefect.md#configure-replication-factor).
### Configure Gitaly Cluster
For more information on configuring Gitaly Cluster, see [Configure Gitaly Cluster](praefect.md).
@ -253,8 +321,8 @@ your assumptions, resulting in performance degradation, instability, and even da
- Gitaly has optimizations such as the [`info/refs` advertisement cache](https://gitlab.com/gitlab-org/gitaly/blob/master/doc/design_diskcache.md),
that rely on Gitaly controlling and monitoring access to repositories by using the official gRPC
interface.
- [Gitaly Cluster](praefect.md) has optimizations, such as fault tolerance and
[distributed reads](praefect.md#distributed-reads), that depend on the gRPC interface and database
- [Gitaly Cluster](#gitaly-cluster) has optimizations, such as fault tolerance and
[distributed reads](#distributed-reads), that depend on the gRPC interface and database
to determine repository state.
WARNING:

View File

@ -24,7 +24,7 @@ NOTE:
Upgrade instructions for Omnibus GitLab installations
[are available](https://docs.gitlab.com/omnibus/update/#gitaly-cluster).
## Requirements for configuring a Gitaly Cluster
## Requirements
The minimum recommended configuration for a Gitaly Cluster requires:
@ -33,14 +33,33 @@ The minimum recommended configuration for a Gitaly Cluster requires:
- 3 Praefect nodes
- 3 Gitaly nodes (1 primary, 2 secondary)
See the [design
document](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/design_ha.md)
See the [design document](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/design_ha.md)
for implementation details.
NOTE:
If not set in GitLab, feature flags are read as false from the console and Praefect uses their
default value. The default value depends on the GitLab version.
### Network connectivity
Gitaly Cluster [components](index.md#components) need to communicate with each other over many
routes. Your firewall rules must allow the following for Gitaly Cluster to function properly:
| From | To | Default port / TLS port |
|:-----------------------|:------------------------|:------------------------|
| GitLab | Praefect load balancer | `2305` / `3305` |
| Praefect load balancer | Praefect | `2305` / `3305` |
| Praefect | Gitaly | `8075` / `9999` |
| Gitaly | GitLab (internal API) | `80` / `443` |
| Gitaly | Praefect load balancer | `2305` / `3305` |
| Gitaly | Praefect | `2305` / `3305` |
| Gitaly | Gitaly | `8075` / `9999` |
NOTE:
Gitaly does not directly connect to Praefect. However, requests from Gitaly to the Praefect
load balancer may still be blocked unless firewalls on the Praefect nodes allow traffic from
the Gitaly nodes.
## Setup Instructions
If you [installed](https://about.gitlab.com/install/) GitLab using the Omnibus GitLab package
@ -129,7 +148,7 @@ The following options are available:
- For non-Geo installations, either:
- Use one of the documented [PostgreSQL setups](../postgresql/index.md).
- Use your own third-party database setup. This will require [manual setup](#manual-database-setup).
- Use your own third-party database setup. This requires [manual setup](#manual-database-setup).
- For Geo instances, either:
- Set up a separate [PostgreSQL instance](https://www.postgresql.org/docs/11/high-availability.html).
- Use a cloud-managed PostgreSQL service. AWS
@ -457,7 +476,7 @@ On the **Praefect** node:
In [GitLab 13.8 and earlier](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/4988),
Gitaly nodes were configured directly under the virtual storage, and not under the `nodes` key.
1. [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2013) in GitLab 13.1 and later, enable [distribution of reads](#distributed-reads).
1. [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2013) in GitLab 13.1 and later, enable [distribution of reads](index.md#distributed-reads).
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure
Praefect](../restart_gitlab.md#omnibus-gitlab-reconfigure):
@ -1053,75 +1072,9 @@ To get started quickly:
Congratulations! You've configured an observable fault-tolerant Praefect
cluster.
## Network connectivity requirements
## Configure strong consistency
Gitaly Cluster components need to communicate with each other over many routes.
Your firewall rules must allow the following for Gitaly Cluster to function properly:
| From | To | Default port / TLS port |
|:-----------------------|:------------------------|:------------------------|
| GitLab | Praefect load balancer | `2305` / `3305` |
| Praefect load balancer | Praefect | `2305` / `3305` |
| Praefect | Gitaly | `8075` / `9999` |
| Gitaly | GitLab (internal API) | `80` / `443` |
| Gitaly | Praefect load balancer | `2305` / `3305` |
| Gitaly | Praefect | `2305` / `3305` |
| Gitaly | Gitaly | `8075` / `9999` |
NOTE:
Gitaly does not directly connect to Praefect. However, requests from Gitaly to the Praefect
load balancer may still be blocked unless firewalls on the Praefect nodes allow traffic from
the Gitaly nodes.
## Distributed reads
> - Introduced in GitLab 13.1 in [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) with feature flag `gitaly_distributed_reads` set to disabled.
> - [Made generally available and enabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/2951) in GitLab 13.3.
> - [Disabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/3178) in GitLab 13.5.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitaly/-/issues/3334) in GitLab 13.8.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitaly/-/issues/3383) in GitLab 13.11.
Praefect supports distribution of read operations across Gitaly nodes that are
configured for the virtual node.
All RPCs marked with `ACCESSOR` option like
[GetBlob](https://gitlab.com/gitlab-org/gitaly/-/blob/v12.10.6/proto/blob.proto#L16)
are redirected to an up to date and healthy Gitaly node.
_Up to date_ in this context means that:
- There is no replication operations scheduled for this node.
- The last replication operation is in _completed_ state.
If there is no such nodes, or any other error occurs during node selection, the primary
node is chosen to serve the request.
To track distribution of read operations, you can use the `gitaly_praefect_read_distribution`
Prometheus counter metric. It has two labels:
- `virtual_storage`.
- `storage`.
They reflect configuration defined for this instance of Praefect.
## Strong consistency
> - Introduced in GitLab 13.1 in [alpha](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga), disabled by default.
> - Entered [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) in GitLab 13.2, disabled by default.
> - In GitLab 13.3, disabled unless primary-wins voting strategy is disabled.
> - From GitLab 13.4, enabled by default.
> - From GitLab 13.5, you must use Git v2.28.0 or higher on Gitaly nodes to enable strong consistency.
> - From GitLab 13.6, primary-wins voting strategy and `gitaly_reference_transactions_primary_wins` feature flag were removed from the source code.
Praefect guarantees eventual consistency by replicating all writes to secondary nodes
after the write to the primary Gitaly node has happened.
Praefect can instead provide strong consistency by creating a transaction and writing
changes to all Gitaly nodes at once.
If enabled, transactions are only available for a subset of RPCs. For more
information, see the [strong consistency epic](https://gitlab.com/groups/gitlab-org/-/epics/1189).
To enable strong consistency:
To enable [strong consistency](index.md#strong-consistency):
- In GitLab 13.5, you must use Git v2.28.0 or higher on Gitaly nodes to enable strong consistency.
- In GitLab 13.4 and later, the strong consistency voting strategy has been improved and enabled by default.
@ -1155,14 +1108,7 @@ To monitor strong consistency, you can use the following Prometheus metrics:
- `gitaly_hook_transaction_voting_delay_seconds`: Client-side delay introduced
by waiting for the transaction to be committed.
## Replication factor
Replication factor is the number of copies Praefect maintains of a given repository. A higher
replication factor offers better redundancy and distribution of read workload, but also results
in a higher storage cost. By default, Praefect replicates repositories to every storage in a
virtual storage.
### Configure replication factor
## Configure replication factor
WARNING:
Configurable replication factors require [repository-specific primary nodes](#repository-specific-primary-nodes) to be used.

View File

@ -95,14 +95,14 @@ To view the Sidekiq processes in GitLab:
## Negate settings
To have the additional Sidekiq processes work on every queue **except** the ones
To have the Sidekiq process work on every queue **except** the ones
you list. In this example, we exclude all import-related jobs from a Sidekiq node:
1. After you follow the steps for [starting extra processes](#start-multiple-processes),
edit `/etc/gitlab/gitlab.rb` and add:
1. Edit `/etc/gitlab/gitlab.rb` and add:
```ruby
sidekiq['negate'] = true
sidekiq['queue_selector'] = true
sidekiq['queue_groups'] = [
"feature_category=importers"
]
@ -140,50 +140,6 @@ sidekiq['queue_groups'] = [
]
```
### Disable Sidekiq cluster
WARNING:
Sidekiq cluster is [scheduled](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/240)
to be the only way to start Sidekiq in GitLab 14.0.
By default, the Sidekiq service runs `sidekiq-cluster`. To disable this behavior,
add the following to the Sidekiq configuration:
```ruby
sidekiq['enable'] = true
sidekiq['cluster'] = false
```
All of the aforementioned configuration options for `sidekiq`
are available. By default, they are configured as follows:
```ruby
sidekiq['queue_selector'] = false
sidekiq['interval'] = nil
sidekiq['max_concurrency'] = 50
sidekiq['min_concurrency'] = nil
sidekiq['negate'] = false
sidekiq['queue_groups'] = ['*']
sidekiq['shutdown_timeout'] = 25
```
`sidekiq_cluster` must be disabled if you decide to configure the
cluster as above.
When disabling `sidekiq_cluster`, you must copy your configuration for
`sidekiq_cluster`over to `sidekiq`. Anything configured for
`sidekiq_cluster` is overridden by the options for `sidekiq` when
setting `sidekiq['cluster'] = true`.
When using this feature, the service called `sidekiq` is now
running `sidekiq-cluster`.
The [concurrency](#manage-concurrency) and other options configured
for Sidekiq are respected.
By default, logs for `sidekiq-cluster` go to `/var/log/gitlab/sidekiq`
like regular Sidekiq logs.
## Ignore all import queues
When [importing from GitHub](../../user/project/import/github.md) or
@ -264,7 +220,7 @@ being equal to `max_concurrency`.
Running a single Sidekiq process is the default in GitLab 12.10 and earlier.
WARNING:
Running Sidekiq directly is scheduled to be removed in GitLab
Running Sidekiq directly was removed in GitLab
[14.0](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/240).
1. Edit `/etc/gitlab/gitlab.rb` and add:

View File

@ -103,9 +103,11 @@ based on a subset of worker attributes:
- `worker_name` - the worker name. The other attributes are typically more useful as
they are more general, but this is available in case a particular worker needs
to be selected.
- `name` - the queue name. The other attributes are typically more useful as
they are more general, but this is available in case a particular queue needs
to be selected.
- `name` - the queue name generated from the worker name. The other attributes
are typically more useful as they are more general, but this is available in
case a particular queue needs to be selected. Because this is generated from
the worker name, it does not change based on the result of other routing
rules.
- `resource_boundary` - if the queue is bound by `cpu`, `memory`, or
`unknown`. For example, the `ProjectExportWorker` is memory bound as it has
to load data in memory before saving it for export.

View File

@ -5,22 +5,23 @@ info: "To determine the technical writer assigned to the Stage/Group associated
type: reference
---
# Static objects external storage **(FREE)**
# External storage for static objects **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31025) in GitLab 12.3.
You can configure GitLab to serve repository static objects, like archives or raw blobs,
from an external storage, such as a Content Delivery Network (CDN).
Configure GitLab to serve repository static objects (such as archives or raw blobs) from external
storage such as a content delivery network (CDN).
## Configuring
## Configure external storage
To configure external storage for static objects:
1. On the top bar, select **Menu >** **{admin}** **Admin**.
1. In the left sidebar, select **Settings > Repository**.
1. Expand the **Repository static objects** section.
1. Expand the **External storage for repository static objects** section.
1. Enter the base URL and an arbitrary token. When you [set up external storage](#set-up-external-storage),
use a script that sets these values as `ORIGIN_HOSTNAME` and `STORAGE_TOKEN`.
1. Select **Save changes**.
The token is required to distinguish requests coming from the external storage, so users don't
circumvent the external storage and access the application directly. GitLab expects
@ -29,18 +30,23 @@ originating from the external storage.
## Serving private static objects
GitLab appends a user-specific token for static object URLs belonging to private projects,
so an external storage can be authenticated on the user's behalf. When processing requests originating
from the external storage, GitLab checks the following places to confirm the user may
access the requested object:
GitLab appends a user-specific token for static object URLs belonging to private projects so
external storage can be authenticated on the user's behalf.
When processing requests originating
from the external storage, GitLab checks the following to confirm the user may access the requested
object:
- The `token` query parameter.
- The `X-Gitlab-Static-Object-Token` header.
## Requests flow example
The following example shows a sequence of requests and responses between the user,
GitLab, and the CDN:
The following example shows a sequence of requests and responses between:
- The user.
- GitLab.
- The content delivery network.
```mermaid
sequenceDiagram
@ -72,7 +78,7 @@ other CDNs or Function as a Service (FaaS) systems should work using the same pr
- `ORIGIN_HOSTNAME`: the hostname of your GitLab installation.
- `STORAGE_TOKEN`: any arbitrary secure token. You can get a token by running
`pwgen -cn1 64` on a UNIX machine. Save this token for the Admin Area, as
described in the [configuring](#configuring) section.
described in the [configuring](#configure-external-storage) section.
```javascript
const ORIGIN_HOSTNAME = 'gitlab.installation.com' // FIXME: SET CORRECT VALUE
@ -229,4 +235,4 @@ other CDNs or Function as a Service (FaaS) systems should work using the same pr
1. Create a new worker with this script.
1. Copy your values for `ORIGIN_HOSTNAME` and `STORAGE_TOKEN`.
Use those values [to configure external storage for static objects](#configuring).
Use those values [to configure external storage for static objects](#configure-external-storage).

View File

@ -16,7 +16,7 @@ This is a partial list of the [RSpec metadata](https://relishapp.com/rspec/rspec
| `:elasticsearch` | The test requires an Elasticsearch service. It is used by the [instance-level scenario](https://gitlab.com/gitlab-org/gitlab-qa#definitions) [`Test::Integration::Elasticsearch`](https://gitlab.com/gitlab-org/gitlab/-/blob/72b62b51bdf513e2936301cb6c7c91ec27c35b4d/qa/qa/ee/scenario/test/integration/elasticsearch.rb) to include only tests that require Elasticsearch. |
| `:except` | The test is to be run in their typical execution contexts _except_ as specified. See [test execution context selection](execution_context_selection.md) for more information. |
| `:geo` | The test requires two GitLab Geo instances - a primary and a secondary - to be spun up. |
| `:gitaly_cluster` | The test runs against a GitLab instance where repositories are stored on redundant Gitaly nodes behind a Praefect node. All nodes are [separate containers](../../../administration/gitaly/praefect.md#requirements-for-configuring-a-gitaly-cluster). Tests that use this tag have a longer setup time since there are three additional containers that need to be started. |
| `:gitaly_cluster` | The test runs against a GitLab instance where repositories are stored on redundant Gitaly nodes behind a Praefect node. All nodes are [separate containers](../../../administration/gitaly/praefect.md#requirements). Tests that use this tag have a longer setup time since there are three additional containers that need to be started. |
| `:github` | The test requires a GitHub personal access token. |
| `:group_saml` | The test requires a GitLab instance that has SAML SSO enabled at the group level. Interacts with an external SAML identity provider. Paired with the `:orchestrated` tag. |
| `:instance_saml` | The test requires a GitLab instance that has SAML SSO enabled at the instance level. Interacts with an external SAML identity provider. Paired with the `:orchestrated` tag. |

View File

@ -54,7 +54,7 @@ To access the default page for Admin Area settings:
| [Repository mirror](visibility_and_access_controls.md#allow-mirrors-to-be-set-up-for-projects) | Configure repository mirroring. |
| [Repository storage](../../../administration/repository_storage_types.md) | Configure storage path settings. |
| Repository maintenance | ([Repository checks](../../../administration/repository_checks.md) and [Housekeeping](../../../administration/housekeeping.md)). Configure automatic Git checks and housekeeping on repositories. |
| [Repository static objects](../../../administration/static_objects_external_storage.md) | Serve repository static objects (for example, archives, blobs, ...) from an external storage (for example, a CDN). |
| [Repository static objects](../../../administration/static_objects_external_storage.md) | Serve repository static objects (for example, archives and blobs) from an external storage (for example, a CDN). |
## Templates **(PREMIUM SELF)**

View File

@ -78,12 +78,11 @@ module API
.merge(variables_attributes: params[:variables])
.except(:variables)
new_pipeline = ::Ci::CreatePipelineService.new(user_project,
current_user,
pipeline_params)
.execute(:api, ignore_skip_ci: true, save_on_errors: false)
response = ::Ci::CreatePipelineService.new(user_project, current_user, pipeline_params)
.execute(:api, ignore_skip_ci: true, save_on_errors: false)
new_pipeline = response.payload
if new_pipeline.persisted?
if response.success?
present new_pipeline, with: Entities::Ci::Pipeline
else
render_validation_error!(new_pipeline)

View File

@ -404,6 +404,7 @@ module API
pipeline = ::MergeRequests::CreatePipelineService
.new(project: user_project, current_user: current_user, params: { allow_duplicate: true })
.execute(find_merge_request_with_access(params[:merge_request_iid]))
.payload
if pipeline.nil?
not_allowed!

View File

@ -89,6 +89,32 @@ module Gitlab
job.user
end
def find_user_from_basic_auth_password
return unless has_basic_credentials?(current_request)
login, password = user_name_and_password(current_request)
return if ::Gitlab::Auth::CI_JOB_USER == login
Gitlab::Auth.find_with_user_password(login, password)
end
def find_user_from_lfs_token
return unless has_basic_credentials?(current_request)
login, token = user_name_and_password(current_request)
user = User.by_login(login)
user if user && Gitlab::LfsToken.new(user).token_valid?(token)
end
def find_user_from_personal_access_token
return unless access_token
validate_access_token!
access_token&.user || raise(UnauthorizedError)
end
# We allow Private Access Tokens with `api` scope to be used by web
# requests on RSS feeds or ICS files for backwards compatibility.
# It is also used by GraphQL/API requests.
@ -308,6 +334,10 @@ module Gitlab
current_request.path.starts_with?(Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/api/'))
end
def git_request?
Gitlab::PathRegex.repository_git_route_regex.match?(current_request.path)
end
def archive_request?
current_request.path.include?('/-/archive/')
end

View File

@ -34,7 +34,10 @@ module Gitlab
find_user_from_feed_token(request_format) ||
find_user_from_static_object_token(request_format) ||
find_user_from_basic_auth_job ||
find_user_from_job_token
find_user_from_job_token ||
find_user_from_lfs_token ||
find_user_from_personal_access_token ||
find_user_from_basic_auth_password
rescue Gitlab::Auth::AuthenticationError
nil
end
@ -58,7 +61,7 @@ module Gitlab
def route_authentication_setting
@route_authentication_setting ||= {
job_token_allowed: api_request?,
basic_auth_personal_access_token: api_request?
basic_auth_personal_access_token: api_request? || git_request?
}
end
end

View File

@ -54,10 +54,12 @@ module Gitlab
}
)
service.execute(:chat) do |pipeline|
response = service.execute(:chat) do |pipeline|
build_environment_variables(pipeline)
build_chat_data(pipeline)
end
response.payload
end
# pipeline - The `Ci::Pipeline` to create the environment variables for.

View File

@ -38,6 +38,7 @@ module Gitlab
pipeline = ::Ci::CreatePipelineService
.new(@project, @current_user, ref: @project.default_branch)
.execute(:push, dry_run: true, content: content)
.payload
Result.new(
jobs: dry_run_convert_to_jobs(pipeline.stages),

View File

@ -5,7 +5,6 @@ module Gitlab
# For queues that don't have explicit workers - default and mailers
class DummyWorker
ATTRIBUTE_METHODS = {
queue: :queue,
name: :name,
feature_category: :get_feature_category,
has_external_dependencies: :worker_has_external_dependencies?,
@ -20,6 +19,10 @@ module Gitlab
@attributes = attributes
end
def generated_queue_name
@attributes[:queue]
end
def queue_namespace
nil
end

View File

@ -6,9 +6,11 @@ module Gitlab
include Comparable
attr_reader :klass
delegate :feature_category_not_owned?, :get_feature_category, :get_sidekiq_options,
:get_tags, :get_urgency, :get_weight, :get_worker_resource_boundary,
:idempotent?, :queue, :queue_namespace, :worker_has_external_dependencies?,
delegate :feature_category_not_owned?, :generated_queue_name, :get_feature_category,
:get_sidekiq_options, :get_tags, :get_urgency, :get_weight,
:get_worker_resource_boundary, :idempotent?, :queue_namespace,
:worker_has_external_dependencies?,
to: :klass
def initialize(klass, ee:)
@ -35,7 +37,7 @@ module Gitlab
# Put namespaced queues first
def to_sort
[queue_namespace ? 0 : 1, queue]
[queue_namespace ? 0 : 1, generated_queue_name]
end
# YAML representation
@ -45,7 +47,7 @@ module Gitlab
def to_yaml
{
name: queue,
name: generated_queue_name,
worker_name: klass.name,
feature_category: get_feature_category,
has_external_dependencies: worker_has_external_dependencies?,
@ -62,7 +64,7 @@ module Gitlab
end
def queue_and_weight
[queue, get_weight]
[generated_queue_name, get_weight]
end
def retries

View File

@ -1501,9 +1501,6 @@ msgstr ""
msgid "A rebase is already in progress."
msgstr ""
msgid "A secure token that identifies an external storage request."
msgstr ""
msgid "A sign-in to your account has been made from the following IP address: %{ip}"
msgstr ""
@ -13389,6 +13386,9 @@ msgstr ""
msgid "External storage authentication token"
msgstr ""
msgid "External storage for repository static objects"
msgstr ""
msgid "ExternalAuthorizationService|Classification label"
msgstr ""
@ -27713,9 +27713,6 @@ msgstr ""
msgid "Repository size is above the limit."
msgstr ""
msgid "Repository static objects"
msgstr ""
msgid "Repository storage"
msgstr ""
@ -28829,6 +28826,9 @@ msgstr ""
msgid "Secret token"
msgstr ""
msgid "Secure token that identifies an external storage request."
msgstr ""
msgid "Security"
msgstr ""
@ -29627,7 +29627,7 @@ msgstr ""
msgid "SeriesFinalConjunction|and"
msgstr ""
msgid "Serve repository static objects (e.g. archives, blobs, ...) from an external storage (e.g. a CDN)."
msgid "Serve repository static objects (for example, archives and blobs) from external storage."
msgstr ""
msgid "Server supports batch API only, please update your Git LFS client to version 1.0.1 and up."
@ -34815,7 +34815,7 @@ msgstr ""
msgid "URL of the external Spam Check endpoint"
msgstr ""
msgid "URL of the external storage that will serve the repository static objects (e.g. archives, blobs, ...)."
msgid "URL of the external storage to serve the repository static objects."
msgstr ""
msgid "URL or request ID"

View File

@ -3,7 +3,7 @@
require 'airborne'
module QA
RSpec.describe 'Package', only: { subdomain: :staging } do
RSpec.describe 'Package', only: { subdomain: %i[staging pre] } do
include Support::Api
describe 'Container Registry' do
@ -13,6 +13,7 @@ module QA
Resource::Project.fabricate_via_api! do |project|
project.name = 'project-with-registry-api'
project.template_name = 'express'
project.api_client = api_client
end
end
@ -37,6 +38,12 @@ module QA
- docker:19.03.12-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_CERTDIR: "/certs"
DOCKER_TLS_VERIFY: 1
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
before_script:
- until docker info; do sleep 1; done
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $IMAGE_TAG .
@ -50,6 +57,7 @@ module QA
MEDIA_TYPE: 'application/vnd.docker.distribution.manifest.v2+json'
before_script:
- token=$(curl -u "$CI_REGISTRY_USER:$CI_REGISTRY_PASSWORD" "https://$CI_SERVER_HOST/jwt/auth?service=container_registry&scope=repository:$CI_PROJECT_PATH:pull,push,delete" | jq -r '.token')
- echo $token
script:
- 'digest=$(curl -L -H "Authorization: Bearer $token" -H "Accept: $MEDIA_TYPE" "https://$CI_REGISTRY/v2/$CI_PROJECT_PATH/manifests/master" | jq -r ".layers[0].digest")'
- 'curl -L -X DELETE -H "Authorization: Bearer $token" -H "Accept: $MEDIA_TYPE" "https://$CI_REGISTRY/v2/$CI_PROJECT_PATH/blobs/$digest"'
@ -57,7 +65,6 @@ module QA
- 'digest=$(curl -L -H "Authorization: Bearer $token" -H "Accept: $MEDIA_TYPE" "https://$CI_REGISTRY/v2/$CI_PROJECT_PATH/manifests/master" | jq -r ".config.digest")'
- 'curl -L -X DELETE -H "Authorization: Bearer $token" -H "Accept: $MEDIA_TYPE" "https://$CI_REGISTRY/v2/$CI_PROJECT_PATH/manifests/$digest"'
- 'curl -L --head -H "Authorization: Bearer $token" -H "Accept: $MEDIA_TYPE" "https://$CI_REGISTRY/v2/$CI_PROJECT_PATH/manifests/$digest"'
YAML
end
@ -67,8 +74,9 @@ module QA
it 'pushes, pulls image to the registry and deletes image blob, manifest and tag', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/1738' do
Resource::Repository::Commit.fabricate_via_api! do |commit|
commit.project = project
commit.api_client = api_client
commit.commit_message = 'Add .gitlab-ci.yml'
commit.project = project
commit.add_files([{
file_path: '.gitlab-ci.yml',
content: gitlab_ci_yaml
@ -77,7 +85,7 @@ module QA
Support::Waiter.wait_until(max_duration: 10) { pipeline_is_triggered? }
Support::Retrier.retry_until(max_duration: 260, sleep_interval: 5) do
Support::Retrier.retry_until(max_duration: 300, sleep_interval: 5) do
latest_pipeline_succeed?
end

View File

@ -43,12 +43,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let!(:push_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:push)
.execute(:push)
.payload
end
let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.execute(:merge_request_event, merge_request: merge_request)
.payload
end
before do
@ -77,12 +79,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when a user updated a merge request in the parent project', :sidekiq_might_not_need_inline do
let!(:push_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:push)
.execute(:push)
.payload
end
let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.execute(:merge_request_event, merge_request: merge_request)
.payload
end
before do
@ -222,12 +226,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let!(:push_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:push)
.execute(:push)
.payload
end
let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.execute(:merge_request_event, merge_request: merge_request)
.payload
end
let(:forked_project) { fork_project(project, user2, repository: true) }
@ -267,12 +273,14 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when a user updated a merge request from a forked project to the parent project' do
let!(:push_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:push)
.execute(:push)
.payload
end
let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
.execute(:merge_request_event, merge_request: merge_request)
.payload
end
before do

View File

@ -245,7 +245,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
threads << Thread.new do
Sidekiq::Worker.skipping_transaction_check do
@pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push)
@pipeline = Ci::CreatePipelineService.new(project, user, build_push_data).execute(:push).payload
end
end

View File

@ -116,6 +116,8 @@ describe('DropdownContentsLabelsView', () => {
});
describe('methods', () => {
const fakePreventDefault = jest.fn();
describe('isLabelSelected', () => {
it('returns true when provided `label` param is one of the selected labels', () => {
expect(wrapper.vm.isLabelSelected(mockRegularLabel)).toBe(true);
@ -191,9 +193,11 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.handleKeyDown({
keyCode: ENTER_KEY_CODE,
preventDefault: fakePreventDefault,
});
expect(wrapper.vm.searchKey).toBe('');
expect(fakePreventDefault).toHaveBeenCalled();
});
it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
@ -204,6 +208,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.handleKeyDown({
keyCode: ENTER_KEY_CODE,
preventDefault: fakePreventDefault,
});
expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([

View File

@ -214,7 +214,7 @@ describe('LabelsSelect Actions', () => {
});
describe('on success', () => {
it('dispatches `requestCreateLabel`, `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', (done) => {
it('dispatches `requestCreateLabel`, `fetchLabels` & `receiveCreateLabelSuccess` & `toggleDropdownContentsCreateView` actions', (done) => {
const label = { id: 1 };
mock.onPost(/labels.json/).replyOnce(200, label);
@ -225,6 +225,7 @@ describe('LabelsSelect Actions', () => {
[],
[
{ type: 'requestCreateLabel' },
{ payload: { refetch: true }, type: 'fetchLabels' },
{ type: 'receiveCreateLabelSuccess' },
{ type: 'toggleDropdownContentsCreateView' },
],

View File

@ -708,6 +708,122 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
end
describe '#find_user_from_basic_auth_password' do
subject { find_user_from_basic_auth_password }
context 'when the request does not have AUTHORIZATION header' do
it { is_expected.to be_nil }
end
it 'returns nil without user and password' do
set_basic_auth_header(nil, nil)
is_expected.to be_nil
end
it 'returns nil without password' do
set_basic_auth_header('some-user', nil)
is_expected.to be_nil
end
it 'returns nil without user' do
set_basic_auth_header(nil, 'password')
is_expected.to be_nil
end
it 'returns nil with CI username' do
set_basic_auth_header(::Gitlab::Auth::CI_JOB_USER, 'password')
is_expected.to be_nil
end
it 'returns nil with wrong password' do
set_basic_auth_header(user.username, 'wrong-password')
is_expected.to be_nil
end
it 'returns user with correct credentials' do
set_basic_auth_header(user.username, user.password)
is_expected.to eq(user)
end
end
describe '#find_user_from_lfs_token' do
subject { find_user_from_lfs_token }
context 'when the request does not have AUTHORIZATION header' do
it { is_expected.to be_nil }
end
it 'returns nil without user and token' do
set_basic_auth_header(nil, nil)
is_expected.to be_nil
end
it 'returns nil without token' do
set_basic_auth_header('some-user', nil)
is_expected.to be_nil
end
it 'returns nil without user' do
set_basic_auth_header(nil, 'token')
is_expected.to be_nil
end
it 'returns nil with wrong token' do
set_basic_auth_header(user.username, 'wrong-token')
is_expected.to be_nil
end
it 'returns user with correct user and correct token' do
lfs_token = Gitlab::LfsToken.new(user).token
set_basic_auth_header(user.username, lfs_token)
is_expected.to eq(user)
end
it 'returns nil with wrong user and correct token' do
lfs_token = Gitlab::LfsToken.new(user).token
other_user = create(:user)
set_basic_auth_header(other_user.username, lfs_token)
is_expected.to be_nil
end
end
describe '#find_user_from_personal_access_token' do
subject { find_user_from_personal_access_token }
it 'returns nil without access token' do
allow_any_instance_of(described_class).to receive(:access_token).and_return(nil)
is_expected.to be_nil
end
it 'returns user with correct access token' do
personal_access_token = create(:personal_access_token, user: user)
allow_any_instance_of(described_class).to receive(:access_token).and_return(personal_access_token)
is_expected.to eq(user)
end
it 'returns exception if access token has no user' do
personal_access_token = create(:personal_access_token, user: user)
allow_any_instance_of(described_class).to receive(:access_token).and_return(personal_access_token)
allow_any_instance_of(PersonalAccessToken).to receive(:user).and_return(nil)
expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError)
end
end
describe '#validate_access_token!' do
subject { validate_access_token! }

View File

@ -45,6 +45,9 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
let!(:feed_token_user) { build(:user) }
let!(:static_object_token_user) { build(:user) }
let!(:job_token_user) { build(:user) }
let!(:lfs_token_user) { build(:user) }
let!(:basic_auth_access_token_user) { build(:user) }
let!(:basic_auth_password_user) { build(:user) }
it 'returns access_token user first' do
allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token)
@ -78,6 +81,30 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
expect(subject.find_sessionless_user(:api)).to eq job_token_user
end
it 'returns lfs_token user if no job_token user found' do
allow_any_instance_of(described_class)
.to receive(:find_user_from_lfs_token)
.and_return(lfs_token_user)
expect(subject.find_sessionless_user(:api)).to eq lfs_token_user
end
it 'returns basic_auth_access_token user if no lfs_token user found' do
allow_any_instance_of(described_class)
.to receive(:find_user_from_personal_access_token)
.and_return(basic_auth_access_token_user)
expect(subject.find_sessionless_user(:api)).to eq basic_auth_access_token_user
end
it 'returns basic_auth_access_password user if no basic_auth_access_token user found' do
allow_any_instance_of(described_class)
.to receive(:find_user_from_basic_auth_password)
.and_return(basic_auth_password_user)
expect(subject.find_sessionless_user(:api)).to eq basic_auth_password_user
end
it 'returns nil if no user found' do
expect(subject.find_sessionless_user(:api)).to be_blank
end
@ -194,4 +221,27 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
expect(subject.runner).to be_blank
end
end
describe '#route_authentication_setting' do
using RSpec::Parameterized::TableSyntax
where(:script_name, :expected_job_token_allowed, :expected_basic_auth_personal_access_token) do
'/api/endpoint' | true | true
'/namespace/project.git' | false | true
'/web/endpoint' | false | false
end
with_them do
before do
env['SCRIPT_NAME'] = script_name
end
it 'returns correct settings' do
expect(subject.send(:route_authentication_setting)).to eql({
job_token_allowed: expected_job_token_allowed,
basic_auth_personal_access_token: expected_basic_auth_personal_access_token
})
end
end
end
end

View File

@ -53,24 +53,8 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do
context 'with retries max config option' do
let(:build) { create(:ci_build, options: { retry: { max: 1 } }) }
context 'when build_metadata_config is set' do
before do
stub_feature_flags(ci_build_metadata_config: true)
end
it 'returns the number of configured max retries' do
expect(result).to eq 1
end
end
context 'when build_metadata_config is not set' do
before do
stub_feature_flags(ci_build_metadata_config: false)
end
it 'returns the number of configured max retries' do
expect(result).to eq 1
end
it 'returns the number of configured max retries' do
expect(result).to eq 1
end
end

View File

@ -12,7 +12,7 @@ RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do

View File

@ -11,7 +11,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:platform_target) { 'ECS' }

View File

@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event

View File

@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event

View File

@ -33,7 +33,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -210,7 +210,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event

View File

@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -47,7 +47,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event

View File

@ -11,7 +11,7 @@ RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do

View File

@ -25,7 +25,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -64,7 +64,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
context 'on merge request' do
let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
let(:pipeline) { service.execute(merge_request).payload }
it 'has no jobs' do
expect(pipeline).to be_merge_request_event

View File

@ -17,7 +17,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@ -264,7 +264,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: files) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: default_branch ) }
let(:pipeline) { service.execute(:push) }
let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do

View File

@ -10,7 +10,7 @@ RSpec.describe 'Flutter.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do

View File

@ -10,7 +10,7 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, namespace: user.namespace, files: { 'README.md' => '' }) }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }

View File

@ -14,7 +14,7 @@ RSpec.describe 'npm.gitlab-ci.yml' do
let(:pipeline_tag) { 'v1.2.1' }
let(:pipeline_ref) { pipeline_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
def create_branch(name:)

View File

@ -15,7 +15,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:pipeline) { service.execute!(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do

View File

@ -114,6 +114,13 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
['resource_boundary=cpu', 'queue_b'],
['tags=expensive', 'queue_c']
] | 'queue_foo'
# Match by generated queue name
[
['name=foo_bar', 'queue_foo'],
['feature_category=feature_a|urgency=low', 'queue_a'],
['resource_boundary=cpu', 'queue_b'],
['tags=expensive', 'queue_c']
] | 'queue_foo'
end
end

View File

@ -7,7 +7,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
namespace = queue.include?(':') && queue.split(':').first
inner_worker = double(
name: attributes[:worker_name] || 'Foo::BarWorker',
queue: queue,
generated_queue_name: queue,
queue_namespace: namespace,
get_feature_category: attributes[:feature_category],
get_weight: attributes[:weight],
@ -48,9 +48,9 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
describe 'delegations' do
[
:feature_category_not_owned?, :get_feature_category, :get_weight,
:get_worker_resource_boundary, :get_urgency, :queue,
:queue_namespace, :worker_has_external_dependencies?
:feature_category_not_owned?, :generated_queue_name,
:get_feature_category, :get_weight, :get_worker_resource_boundary,
:get_urgency, :queue_namespace, :worker_has_external_dependencies?
].each do |meth|
it "delegates #{meth} to the worker class" do
worker = double

View File

@ -28,7 +28,7 @@ RSpec.describe Gitlab::SidekiqConfig do
describe '.workers_for_all_queues_yml' do
it 'returns a tuple with FOSS workers first' do
expect(described_class.workers_for_all_queues_yml.first)
.to include(an_object_having_attributes(queue: 'post_receive'))
.to include(an_object_having_attributes(generated_queue_name: 'post_receive'))
end
end

View File

@ -2214,34 +2214,12 @@ RSpec.describe Ci::Build do
expect(build.options['image']).to be_nil
end
context 'when ci_build_metadata_config is set' do
before do
stub_feature_flags(ci_build_metadata_config: true)
end
it 'persist data in build metadata' do
expect(build.metadata.read_attribute(:config_options)).to eq(options.symbolize_keys)
end
it 'does not persist data in build' do
expect(build.read_attribute(:options)).to be_nil
end
it 'persist data in build metadata' do
expect(build.metadata.read_attribute(:config_options)).to eq(options.symbolize_keys)
end
context 'when ci_build_metadata_config is disabled' do
let(:build) { create(:ci_build, pipeline: pipeline) }
before do
stub_feature_flags(ci_build_metadata_config: false)
end
it 'persist data in build' do
expect(build.read_attribute(:options)).to eq(options.symbolize_keys)
end
it 'does not persist data in build metadata' do
expect(build.metadata.read_attribute(:config_options)).to be_nil
end
it 'does not persist data in build' do
expect(build.read_attribute(:options)).to be_nil
end
context 'when options include artifacts:expose_as' do
@ -3613,36 +3591,14 @@ RSpec.describe Ci::Build do
end
end
context 'when ci_build_metadata_config is set' do
before do
stub_feature_flags(ci_build_metadata_config: true)
end
it_behaves_like 'having consistent representation'
it_behaves_like 'having consistent representation'
it 'persist data in build metadata' do
expect(build.metadata.read_attribute(:config_variables)).not_to be_nil
end
it 'does not persist data in build' do
expect(build.read_attribute(:yaml_variables)).to be_nil
end
it 'persist data in build metadata' do
expect(build.metadata.read_attribute(:config_variables)).not_to be_nil
end
context 'when ci_build_metadata_config is disabled' do
before do
stub_feature_flags(ci_build_metadata_config: false)
end
it_behaves_like 'having consistent representation'
it 'persist data in build' do
expect(build.read_attribute(:yaml_variables)).not_to be_nil
end
it 'does not persist data in build metadata' do
expect(build.metadata.read_attribute(:config_variables)).to be_nil
end
it 'does not persist data in build' do
expect(build.read_attribute(:yaml_variables)).to be_nil
end
end
@ -4788,51 +4744,21 @@ RSpec.describe Ci::Build do
subject { build.send(:write_metadata_attribute, :options, :config_options, options) }
context 'when ci_build_metadata_config is set' do
context 'when data in build is already set' do
before do
stub_feature_flags(ci_build_metadata_config: true)
build.write_attribute(:options, existing_options)
end
context 'when data in build is already set' do
before do
build.write_attribute(:options, existing_options)
end
it 'does set metadata options' do
subject
it 'does set metadata options' do
subject
expect(build.metadata.read_attribute(:config_options)).to eq(options)
end
it 'does reset build options' do
subject
expect(build.read_attribute(:options)).to be_nil
end
end
end
context 'when ci_build_metadata_config is disabled' do
before do
stub_feature_flags(ci_build_metadata_config: false)
expect(build.metadata.read_attribute(:config_options)).to eq(options)
end
context 'when data in build metadata is already set' do
before do
build.ensure_metadata.write_attribute(:config_options, existing_options)
end
it 'does reset build options' do
subject
it 'does set metadata options' do
subject
expect(build.read_attribute(:options)).to eq(options)
end
it 'does reset build options' do
subject
expect(build.metadata.read_attribute(:config_options)).to be_nil
end
expect(build.read_attribute(:options)).to be_nil
end
end
end

View File

@ -677,4 +677,118 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
it_behaves_like 'reject requests over the rate limit'
end
end
describe 'Gitlab::RackAttack::Request#unauthenticated?' do
let_it_be(:url) { "/api/v4/projects" }
let_it_be(:user) { create(:user) }
def expect_unauthenticated_request
expect_next_instance_of(Rack::Attack::Request) do |instance|
expect(instance.unauthenticated?).to be true
end
end
def expect_authenticated_request
expect_next_instance_of(Rack::Attack::Request) do |instance|
expect(instance.unauthenticated?).to be false
end
end
before do
settings_to_set[:throttle_unauthenticated_enabled] = true
stub_application_setting(settings_to_set)
end
context 'without authentication' do
it 'request is unauthenticated' do
expect_unauthenticated_request
get url
end
end
context 'authenticated by a runner token' do
let_it_be(:runner) { create(:ci_runner) }
it 'request is authenticated' do
expect_authenticated_request
get url, params: { token: runner.token }
end
end
context 'authenticated with personal access token' do
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
it 'request is authenticated by token in query string' do
expect_authenticated_request
get url, params: { private_token: personal_access_token.token }
end
it 'request is authenticated by token in the headers' do
expect_authenticated_request
get url, headers: personal_access_token_headers(personal_access_token)
end
it 'request is authenticated by token in the OAuth headers' do
expect_authenticated_request
get url, headers: oauth_token_headers(personal_access_token)
end
it 'request is authenticated by token in basic auth' do
expect_authenticated_request
get url, headers: basic_auth_headers(user, personal_access_token)
end
end
context 'authenticated with OAuth token' do
let(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) }
let(:oauth_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "api") }
it 'request is authenticated by token in query string' do
expect_authenticated_request
get url, params: { access_token: oauth_token.token }
end
it 'request is authenticated by token in the headers' do
expect_authenticated_request
get url, headers: oauth_token_headers(oauth_token)
end
end
context 'authenticated with lfs token' do
it 'request is authenticated by token in basic auth' do
lfs_token = Gitlab::LfsToken.new(user)
encoded_login = ["#{user.username}:#{lfs_token.token}"].pack('m0')
expect_authenticated_request
get url, headers: { 'AUTHORIZATION' => "Basic #{encoded_login}" }
end
end
context 'authenticated with regular login' do
it 'request is authenticated after login' do
login_as(user)
expect_authenticated_request
get url
end
it 'request is authenticated by credentials in basic auth' do
encoded_login = ["#{user.username}:#{user.password}"].pack('m0')
expect_authenticated_request
get url, headers: { 'AUTHORIZATION' => "Basic #{encoded_login}" }
end
end
end
end

View File

@ -624,6 +624,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let(:primary_pipeline) do
Ci::CreatePipelineService.new(upstream_project, upstream_project.owner, { ref: 'master' })
.execute(:push, save_on_errors: false)
.payload
end
let(:bridge) { primary_pipeline.processables.find_by(name: 'bridge-job') }

View File

@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
let(:job) { pipeline.builds.find_by(name: 'job') }
before do

View File

@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)

View File

@ -79,7 +79,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
def create_pipeline!
service.execute(:push)
service.execute(:push).payload
end
def create_gitlab_ci_yml(project, content)

View File

@ -11,7 +11,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) { create(:ci_bridge, pipeline: upstream_pipeline) }
subject { service.execute(:push, bridge: bridge) }
subject { service.execute(:push, bridge: bridge).payload }
context 'custom config content' do
let(:bridge) do

View File

@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)

View File

@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
subject { service.execute(:push, dry_run: true) }
subject { service.execute(:push, dry_run: true).payload }
before do
stub_ci_pipeline_yaml_file(config)

View File

@ -14,7 +14,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe '#execute' do
subject { service.execute(:push) }
subject { service.execute(:push).payload }
context 'with deployment tier' do
before do

View File

@ -9,7 +9,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user, ref: 'master') }
let(:pipeline) { service.execute(:push) }
let(:pipeline) { service.execute(:push).payload }
let(:job) { pipeline.builds.find_by(name: 'job') }
before do

View File

@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/feature' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file <<-EOS

View File

@ -10,7 +10,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
before do
stub_ci_pipeline_yaml_file(config)

View File

@ -6,7 +6,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:user) { project.owner }
let(:service) { described_class.new(project, user, { ref: 'master' }) }
let(:pipeline) { service.execute(:push) }
let(:pipeline) { service.execute(:push).payload }
before do
stub_ci_pipeline_yaml_file(config)

View File

@ -30,7 +30,7 @@ RSpec.describe Ci::CreatePipelineService do
describe '#execute' do
context 'when source is a dangling build' do
subject { service.execute(:ondemand_dast_scan, content: content) }
subject { service.execute(:ondemand_dast_scan, content: content).payload }
context 'parameter config content' do
it 'creates a pipeline' do

View File

@ -369,6 +369,6 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
def create_pipeline!
service.execute(:push)
service.execute(:push).payload
end
end

View File

@ -8,7 +8,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
let(:config) do
<<~YAML

View File

@ -7,7 +7,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:pipeline) { service.execute(source) }
let(:pipeline) { service.execute(source).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
context 'job:rules' do

View File

@ -49,12 +49,16 @@ RSpec.describe Ci::CreatePipelineService do
# rubocop:enable Metrics/ParameterLists
context 'valid params' do
let(:pipeline) { execute_service }
let(:pipeline) { execute_service.payload }
let(:pipeline_on_previous_commit) do
execute_service(
after: previous_commit_sha_from_ref('master')
)
).payload
end
it 'responds with success' do
expect(execute_service).to be_success
end
it 'creates a pipeline' do
@ -128,7 +132,7 @@ RSpec.describe Ci::CreatePipelineService do
merge_request_1
merge_request_2
head_pipeline = execute_service(ref: 'feature', after: nil)
head_pipeline = execute_service(ref: 'feature', after: nil).payload
expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
@ -157,7 +161,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: "branch_1",
source_project: project)
head_pipeline = execute_service
head_pipeline = execute_service.payload
expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline)
end
@ -178,7 +182,7 @@ RSpec.describe Ci::CreatePipelineService do
source_project: project,
target_project: target_project)
head_pipeline = execute_service(ref: 'feature', after: nil)
head_pipeline = execute_service(ref: 'feature', after: nil).payload
expect(merge_request.reload.head_pipeline).to eq(head_pipeline)
end
@ -209,7 +213,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'feature',
source_project: project)
head_pipeline = execute_service
head_pipeline = execute_service.payload
expect(head_pipeline).to be_persisted
expect(head_pipeline.yaml_errors).to be_present
@ -230,7 +234,7 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'feature',
source_project: project)
head_pipeline = execute_service
head_pipeline = execute_service.payload
expect(head_pipeline).to be_skipped
expect(head_pipeline).to be_persisted
@ -260,7 +264,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'cancels running outdated pipelines', :sidekiq_inline do
pipeline_on_previous_commit.reload.run
head_pipeline = execute_service
head_pipeline = execute_service.payload
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: head_pipeline.id)
end
@ -276,7 +280,8 @@ RSpec.describe Ci::CreatePipelineService do
new_pipeline = execute_service(
ref: 'refs/heads/feature',
after: previous_commit_sha_from_ref('feature')
)
).payload
pipeline
expect(new_pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
@ -290,7 +295,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is cancelable' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_nil
end
@ -303,7 +308,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is cancelable' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_truthy
end
@ -316,7 +321,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is not cancelable' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline.builds.find_by(name: 'rspec').interruptible).to be_falsy
end
@ -476,21 +481,25 @@ RSpec.describe Ci::CreatePipelineService do
context "skip tag if there is no build for it" do
it "creates commit if there is appropriate job" do
expect(execute_service).to be_persisted
expect(execute_service.payload).to be_persisted
end
it "creates commit if there is no appropriate job but deploy job has right ref setting" do
config = YAML.dump({ deploy: { script: "ls", only: ["master"] } })
stub_ci_pipeline_yaml_file(config)
expect(execute_service).to be_persisted
expect(execute_service.payload).to be_persisted
end
end
it 'skips creating pipeline for refs without .gitlab-ci.yml' do
it 'skips creating pipeline for refs without .gitlab-ci.yml', :aggregate_failures do
stub_ci_pipeline_yaml_file(nil)
expect(execute_service).not_to be_persisted
response = execute_service
expect(response).to be_error
expect(response.message).to eq('Missing CI config file')
expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
expect(Namespaces::OnboardingPipelineCreatedWorker).not_to receive(:perform_async)
end
@ -499,7 +508,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'creates failed pipeline' do
stub_ci_pipeline_yaml_file(ci_yaml)
pipeline = execute_service(message: message)
pipeline = execute_service(message: message).payload
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
@ -516,7 +525,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'pull it from the repository' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline).to be_repository_source
expect(pipeline.builds.map(&:name)).to eq ['rspec']
end
@ -530,7 +539,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'pull it from Auto-DevOps' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline).to be_auto_devops_source
expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection semgrep-sast test])
end
@ -541,11 +550,12 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(nil)
end
it 'attaches errors to the pipeline' do
pipeline = execute_service
it 'responds with error message', :aggregate_failures do
response = execute_service
expect(pipeline.errors.full_messages).to eq ['Missing CI config file']
expect(pipeline).not_to be_persisted
expect(response).to be_error
expect(response.message).to eq('Missing CI config file')
expect(response.payload).not_to be_persisted
end
end
@ -556,7 +566,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'saves error in pipeline' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline.yaml_errors).to include('Undefined error')
end
@ -648,7 +658,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'saves error in pipeline' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline.yaml_errors).to include('Undefined error')
end
@ -680,7 +690,7 @@ RSpec.describe Ci::CreatePipelineService do
ci_messages.each do |ci_message|
it "skips builds creation if the commit message is #{ci_message}" do
pipeline = execute_service(message: ci_message)
pipeline = execute_service(message: ci_message).payload
expect(pipeline).to be_persisted
expect(pipeline.builds.any?).to be false
@ -692,7 +702,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'does not skip pipeline creation' do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { commit_message }
pipeline = execute_service(message: commit_message)
pipeline = execute_service(message: commit_message).payload
expect(pipeline).to be_persisted
expect(pipeline.builds.first.name).to eq("rspec")
@ -724,7 +734,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipline in the skipped state' do
pipeline = execute_service(push_options: push_options)
pipeline = execute_service(push_options: push_options).payload
# TODO: DRY these up with "skips builds creation if the commit message"
expect(pipeline).to be_persisted
@ -739,10 +749,12 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(config)
end
it 'does not create a new pipeline' do
it 'does not create a new pipeline', :aggregate_failures do
result = execute_service
expect(result).not_to be_persisted
expect(result).to be_error
expect(result.message).to eq('No stages / jobs for this pipeline.')
expect(result.payload).not_to be_persisted
expect(Ci::Build.all).to be_empty
expect(Ci::Pipeline.count).to eq(0)
end
@ -757,10 +769,11 @@ RSpec.describe Ci::CreatePipelineService do
.and_call_original
end
it 'rewinds iid' do
it 'rewinds iid', :aggregate_failures do
result = execute_service
expect(result).not_to be_persisted
expect(result).to be_error
expect(result.payload).not_to be_persisted
expect(internal_id.last_value).to eq(0)
end
end
@ -773,7 +786,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'does not create a new pipeline', :sidekiq_inline do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(result.manual_actions).not_to be_empty
@ -793,7 +806,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates the environment with tags' do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: "review/master")).to be_present
@ -815,7 +828,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates the environment with auto stop in' do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(result.builds.first.options[:environment][:auto_stop_in]).to eq('1 day')
@ -835,7 +848,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'skipps persisted variables in environment name' do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: "review/id1/id2")).to be_present
@ -860,7 +873,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'stores the requested namespace' do
result = execute_service
result = execute_service.payload
build = result.builds.first
expect(result).to be_persisted
@ -876,7 +889,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'does not create an environment' do
expect do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
end.not_to change { Environment.count }
@ -896,7 +909,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipeline with the environment' do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(Environment.find_by(name: 'production')).to be_present
@ -906,7 +919,7 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when builds with auto-retries are configured' do
let(:pipeline) { execute_service }
let(:pipeline) { execute_service.payload }
let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
before do
@ -946,7 +959,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:resource_group_key) { 'iOS' }
it 'persists the association correctly' do
result = execute_service
result = execute_service.payload
deploy_job = result.builds.find_by_name!(:test)
resource_group = project.resource_groups.find_by_key!(resource_group_key)
@ -962,7 +975,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
it 'interpolates the variables into the key correctly' do
result = execute_service
result = execute_service.payload
expect(result).to be_persisted
expect(project.resource_groups.exists?(key: 'master-test')).to eq(true)
@ -979,7 +992,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'correctly creates builds with custom timeout value configured' do
pipeline = execute_service
pipeline = execute_service.payload
expect(pipeline).to be_persisted
expect(pipeline.builds.find_by(name: 'rspec').options[:job_timeout]).to eq 123
@ -994,7 +1007,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'is valid config' do
pipeline = execute_service
pipeline = execute_service.payload
build = pipeline.builds.first
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
@ -1059,14 +1072,14 @@ RSpec.describe Ci::CreatePipelineService do
project.add_developer(user)
end
it 'does not create a pipeline' do
expect(execute_service).not_to be_persisted
it 'does not create a pipeline', :aggregate_failures do
expect(execute_service.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
context 'when user is maintainer' do
let(:pipeline) { execute_service }
let(:pipeline) { execute_service.payload }
before do
project.add_maintainer(user)
@ -1083,9 +1096,11 @@ RSpec.describe Ci::CreatePipelineService do
let(:user) {}
let(:trigger_request) { create(:ci_trigger_request) }
it 'does not create a pipeline' do
expect(execute_service(trigger_request: trigger_request))
.not_to be_persisted
it 'does not create a pipeline', :aggregate_failures do
response = execute_service(trigger_request: trigger_request)
expect(response).to be_error
expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
@ -1099,9 +1114,11 @@ RSpec.describe Ci::CreatePipelineService do
project.add_developer(user)
end
it 'does not create a pipeline' do
expect(execute_service(trigger_request: trigger_request))
.not_to be_persisted
it 'does not create a pipeline', :aggregate_failures do
response = execute_service(trigger_request: trigger_request)
expect(response).to be_error
expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
end
end
@ -1116,7 +1133,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a pipeline' do
expect(execute_service(trigger_request: trigger_request))
expect(execute_service(trigger_request: trigger_request).payload)
.to be_persisted
expect(Ci::Pipeline.count).to eq(1)
end
@ -1150,7 +1167,7 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'creates a tagged pipeline' do
pipeline = execute_service(ref: 'v1.0.0')
pipeline = execute_service(ref: 'v1.0.0').payload
expect(pipeline.tag?).to be true
end
@ -1162,16 +1179,16 @@ RSpec.describe Ci::CreatePipelineService do
{ key: 'second', secret_value: 'second_world' }]
end
subject { execute_service(variables_attributes: variables_attributes) }
subject(:pipeline) { execute_service(variables_attributes: variables_attributes).payload }
it 'creates a pipeline with specified variables' do
expect(subject.variables.map { |var| var.slice(:key, :secret_value) })
expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
.to eq variables_attributes.map(&:with_indifferent_access)
end
end
context 'when pipeline has a job with environment' do
let(:pipeline) { execute_service }
let(:pipeline) { execute_service.payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
@ -1219,7 +1236,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe 'Pipeline for external pull requests' do
let(:pipeline) do
let(:response) do
execute_service(source: source,
external_pull_request: pull_request,
ref: ref_name,
@ -1227,6 +1244,8 @@ RSpec.describe Ci::CreatePipelineService do
target_sha: target_sha)
end
let(:pipeline) { response.payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@ -1275,9 +1294,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
it 'does not create an extrnal pull request pipeline' do
it 'does not create an extrnal pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
end
end
@ -1296,9 +1317,11 @@ RSpec.describe Ci::CreatePipelineService do
}
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
expect(pipeline.errors[:base]).to eq(['No stages / jobs for this pipeline.'])
end
end
end
@ -1306,7 +1329,9 @@ RSpec.describe Ci::CreatePipelineService do
context 'when external pull request is not specified' do
let(:pull_request) { nil }
it 'does not create an external pull request pipeline' do
it 'does not create an external pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:external_pull_request]).to eq(["can't be blank"])
end
@ -1353,11 +1378,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when external pull request is not specified' do
let(:pull_request) { nil }
it 'does not create an external pull request pipeline' do
it 'does not create an external pull request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("External pull request can't be blank and Failed to build the pipeline!")
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
.to eq(['Failed to build the pipeline!'])
expect(pipeline.errors[:base]).to eq(['Failed to build the pipeline!'])
end
end
end
@ -1365,7 +1390,7 @@ RSpec.describe Ci::CreatePipelineService do
end
describe 'Pipelines for merge requests' do
let(:pipeline) do
let(:response) do
execute_service(source: source,
merge_request: merge_request,
ref: ref_name,
@ -1373,6 +1398,8 @@ RSpec.describe Ci::CreatePipelineService do
target_sha: target_sha)
end
let(:pipeline) { response.payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@ -1455,9 +1482,11 @@ RSpec.describe Ci::CreatePipelineService do
context 'when ref is tag' do
let(:ref_name) { 'refs/tags/v1.1.0' }
it 'does not create a merge request pipeline' do
it 'does not create a merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('Tag is not included in the list and Failed to build the pipeline!')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
expect(pipeline.errors[:tag]).to eq(['is not included in the list'])
end
end
@ -1497,9 +1526,10 @@ RSpec.describe Ci::CreatePipelineService do
}
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
end
end
end
@ -1532,11 +1562,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
.to eq(['No stages / jobs for this pipeline.'])
end
end
end
@ -1561,11 +1590,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
.to eq(['No stages / jobs for this pipeline.'])
end
end
end
@ -1592,11 +1620,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
.to eq(['No stages / jobs for this pipeline.'])
end
end
end
@ -1621,11 +1648,10 @@ RSpec.describe Ci::CreatePipelineService do
target_branch: 'master')
end
it 'does not create a detached merge request pipeline' do
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq('No stages / jobs for this pipeline.')
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
.to eq(['No stages / jobs for this pipeline.'])
end
end
end
@ -1666,7 +1692,8 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when needs is used' do
let(:pipeline) { execute_service }
let(:response) { execute_service }
let(:pipeline) { response.payload }
let(:config) do
{
@ -1712,7 +1739,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/feature' }
shared_examples 'has errors' do
it 'contains the expected errors' do
it 'contains the expected errors', :aggregate_failures do
expect(pipeline.builds).to be_empty
error_message = "'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage"
@ -1723,9 +1750,12 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when save_on_errors is enabled' do
let(:pipeline) { execute_service(save_on_errors: true) }
let(:response) { execute_service(save_on_errors: true) }
let(:pipeline) { response.payload }
it 'does create a pipeline as test_a depends on build_a' do
it 'does create a pipeline as test_a depends on build_a', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq("'test_a' job needs 'build_a' job, but 'build_a' is not in any previous stage")
expect(pipeline).to be_persisted
end
@ -1733,9 +1763,11 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when save_on_errors is disabled' do
let(:pipeline) { execute_service(save_on_errors: false) }
let(:response) { execute_service(save_on_errors: false) }
let(:pipeline) { response.payload }
it 'does not create a pipeline as test_a depends on build_a' do
it 'does not create a pipeline as test_a depends on build_a', :aggregate_failures do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
@ -1755,7 +1787,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'when rules are used' do
let(:ref_name) { 'refs/heads/master' }
let(:pipeline) { execute_service }
let(:response) { execute_service }
let(:pipeline) { response.payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:regular_job) { find_job('regular-job') }
let(:rules_job) { find_job('rules-job') }
@ -2312,8 +2345,9 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when inside freeze period' do
it 'does not create the pipeline' do
it 'does not create the pipeline', :aggregate_failures do
Timecop.freeze(2020, 4, 10, 23, 1) do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@ -2343,7 +2377,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:ref_name) { 'refs/heads/feature' }
it 'does not create a pipeline' do
it 'does not create a pipeline', :aggregate_failures do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@ -2351,7 +2386,7 @@ RSpec.describe Ci::CreatePipelineService do
context 'with workflow rules with pipeline variables' do
let(:pipeline) do
execute_service(variables_attributes: variables_attributes)
execute_service(variables_attributes: variables_attributes).payload
end
let(:config) do
@ -2379,7 +2414,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:variables_attributes) { {} }
it 'does not create a pipeline' do
it 'does not create a pipeline', :aggregate_failures do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end
@ -2389,7 +2425,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:pipeline) do
execute_service do |pipeline|
pipeline.variables.build(variables)
end
end.payload
end
let(:config) do
@ -2447,7 +2483,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with no matches' do
let(:variables) { {} }
it 'does not create a pipeline' do
it 'does not create a pipeline', :aggregate_failures do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
@ -2475,7 +2512,8 @@ RSpec.describe Ci::CreatePipelineService do
EOY
end
it 'does not create a pipeline' do
it 'does not create a pipeline', :aggregate_failures do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
end

View File

@ -6,14 +6,13 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
describe '#execute' do
let_it_be(:project) { create(:project, :auto_devops, :repository) }
let_it_be(:user) { create(:user) }
let(:pull_request) { create(:external_pull_request, project: project) }
let_it_be_with_reload(:pull_request) { create(:external_pull_request, project: project) }
before do
project.add_maintainer(user)
end
subject { described_class.new(project, user).execute(pull_request) }
subject(:response) { described_class.new(project, user).execute(pull_request) }
context 'when pull request is open' do
before do
@ -28,17 +27,20 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(source_branch: source_branch.name, source_sha: source_branch.target)
end
it 'creates a pipeline for external pull request' do
expect(subject).to be_valid
expect(subject).to be_persisted
expect(subject).to be_external_pull_request_event
expect(subject).to eq(project.ci_pipelines.last)
expect(subject.external_pull_request).to eq(pull_request)
expect(subject.user).to eq(user)
expect(subject.status).to eq('created')
expect(subject.ref).to eq(pull_request.source_branch)
expect(subject.sha).to eq(pull_request.source_sha)
expect(subject.source_sha).to eq(pull_request.source_sha)
it 'creates a pipeline for external pull request', :aggregate_failures do
pipeline = response.payload
expect(response).to be_success
expect(pipeline).to be_valid
expect(pipeline).to be_persisted
expect(pipeline).to be_external_pull_request_event
expect(pipeline).to eq(project.ci_pipelines.last)
expect(pipeline.external_pull_request).to eq(pull_request)
expect(pipeline.user).to eq(user)
expect(pipeline.status).to eq('created')
expect(pipeline.ref).to eq(pull_request.source_branch)
expect(pipeline.sha).to eq(pull_request.source_sha)
expect(pipeline.source_sha).to eq(pull_request.source_sha)
end
end
@ -50,10 +52,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(source_branch: source_branch.name, source_sha: commit.sha)
end
it 'does nothing' do
it 'does nothing', :aggregate_failures do
expect(Ci::CreatePipelineService).not_to receive(:new)
expect(subject).to be_nil
expect(response).to be_error
expect(response.message).to eq('The source sha is not the head of the source branch')
expect(response.payload).to be_nil
end
end
end
@ -63,10 +67,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
pull_request.update!(status: :closed)
end
it 'does nothing' do
it 'does nothing', :aggregate_failures do
expect(Ci::CreatePipelineService).not_to receive(:new)
expect(subject).to be_nil
expect(response).to be_error
expect(response.message).to eq('The pull request is not opened')
expect(response.payload).to be_nil
end
end
end

View File

@ -871,7 +871,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
end
let(:pipeline) do
Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push)
Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
end
before do

View File

@ -10,7 +10,7 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
with_them do
let(:test_file) { YAML.load_file(test_file_path) }
let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline) }
let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
before do
stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))

View File

@ -24,9 +24,11 @@ RSpec.describe Ci::PipelineTriggerService do
context 'when the pipeline was not created successfully' do
let(:fail_pipeline) do
receive(:execute).and_wrap_original do |original, *args|
pipeline = original.call(*args)
response = original.call(*args)
pipeline = response.payload
pipeline.update!(failure_reason: 'unknown_failure')
pipeline
response
end
end

View File

@ -18,7 +18,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
describe '#execute' do
subject { service.execute(merge_request) }
subject(:response) { service.execute(merge_request) }
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
@ -39,14 +39,15 @@ RSpec.describe MergeRequests::CreatePipelineService do
let(:source_project) { project }
it 'creates a detached merge request pipeline' do
expect { subject }.to change { Ci::Pipeline.count }.by(1)
expect { response }.to change { Ci::Pipeline.count }.by(1)
expect(subject).to be_persisted
expect(subject).to be_detached_merge_request_pipeline
expect(response).to be_success
expect(response.payload).to be_persisted
expect(response.payload).to be_detached_merge_request_pipeline
end
it 'defaults to merge_request_event' do
expect(subject.source).to eq('merge_request_event')
expect(response.payload.source).to eq('merge_request_event')
end
context 'with fork merge request' do
@ -58,7 +59,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let(:actor) { user }
it 'creates a pipeline in the target project' do
expect(subject.project).to eq(project)
expect(response.payload.project).to eq(project)
end
context 'when source branch is protected' do
@ -66,7 +67,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let!(:protected_branch) { create(:protected_branch, name: '*', project: project) }
it 'creates a pipeline in the source project' do
expect(subject.project).to eq(source_project)
expect(response.payload.project).to eq(source_project)
end
end
@ -74,7 +75,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let!(:protected_branch) { create(:protected_branch, :developers_can_merge, name: '*', project: project) }
it 'creates a pipeline in the target project' do
expect(subject.project).to eq(project)
expect(response.payload.project).to eq(project)
end
end
end
@ -85,7 +86,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'creates a pipeline in the source project' do
expect(subject.project).to eq(source_project)
expect(response.payload.project).to eq(source_project)
end
end
end
@ -99,15 +100,16 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'creates a pipeline in the source project' do
expect(subject.project).to eq(source_project)
expect(response.payload.project).to eq(source_project)
end
end
context 'when actor does not have permission to create pipelines' do
let(:actor) { create(:user) }
it 'returns nothing' do
expect(subject.full_error_messages).to include('Insufficient permissions to create a new pipeline')
it 'responds with error' do
expect(response).to be_error
expect(response.message).to include('Insufficient permissions to create a new pipeline')
end
end
end
@ -139,7 +141,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'does not create a pipeline' do
expect { subject }.not_to change { Ci::Pipeline.count }
expect { response }.not_to change { Ci::Pipeline.count }
end
end
@ -154,7 +156,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
end
it 'does not create a pipeline' do
expect { subject }.not_to change { Ci::Pipeline.count }
expect { response }.not_to change { Ci::Pipeline.count }
end
end
end
@ -170,11 +172,12 @@ RSpec.describe MergeRequests::CreatePipelineService do
}
end
it 'creates a detached merge request pipeline' do
expect { subject }.to change { Ci::Pipeline.count }.by(1)
it 'creates a detached merge request pipeline', :aggregate_failures do
expect { response }.to change { Ci::Pipeline.count }.by(1)
expect(subject).to be_persisted
expect(subject).to be_detached_merge_request_pipeline
expect(response).to be_success
expect(response.payload).to be_persisted
expect(response.payload).to be_detached_merge_request_pipeline
end
end
@ -188,10 +191,25 @@ RSpec.describe MergeRequests::CreatePipelineService do
}
end
it 'does not create a pipeline' do
expect { subject }.not_to change { Ci::Pipeline.count }
it 'does not create a pipeline', :aggregate_failures do
expect { response }.not_to change { Ci::Pipeline.count }
expect(response).to be_error
end
end
end
context 'when merge request has no commits' do
before do
allow(merge_request).to receive(:has_no_commits?).and_return(true)
end
it 'does not create a pipeline', :aggregate_failures do
expect { response }.not_to change { Ci::Pipeline.count }
expect(response).to be_error
expect(response.message).to eq('Cannot create a pipeline for this merge request.')
expect(response.payload).to be_nil
end
end
end
end

View File

@ -34,10 +34,24 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
subject.execute
end
it 'retrieves the download links of non existent objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
context 'when no LFS objects exist' do
before do
project.lfs_objects.delete_all
end
subject.execute
it 'retrieves all LFS objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
subject.execute
end
end
context 'when some LFS objects already exist' do
it 'retrieves the download links of non-existent objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(oids)
subject.execute
end
end
end

View File

@ -68,20 +68,6 @@ RSpec.shared_examples ::Security::JobsFinder do |default_job_types|
end
end
context 'when using legacy CI build metadata config storage' do
before do
stub_feature_flags(ci_build_metadata_config: false)
end
it_behaves_like 'JobsFinder core functionality'
end
context 'when using the new CI build metadata config storage' do
before do
stub_feature_flags(ci_build_metadata_config: true)
end
it_behaves_like 'JobsFinder core functionality'
end
it_behaves_like 'JobsFinder core functionality'
end
end

View File

@ -8,17 +8,17 @@ RSpec.describe 'Every Sidekiq worker' do
end
it 'does not use the default queue' do
expect(workers_without_defaults.map(&:queue)).not_to include('default')
expect(workers_without_defaults.map(&:generated_queue_name)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(start_with('cronjob:'))
expect(Gitlab::SidekiqConfig.cron_workers.map(&:generated_queue_name)).to all(start_with('cronjob:'))
end
it 'has its queue in Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS', :aggregate_failures do
file_worker_queues = Gitlab::SidekiqConfig.worker_queues.to_set
worker_queues = Gitlab::SidekiqConfig.workers.map(&:queue).to_set
worker_queues = Gitlab::SidekiqConfig.workers.map(&:generated_queue_name).to_set
worker_queues << ActionMailer::MailDeliveryJob.new.queue_name
worker_queues << 'default'
@ -33,7 +33,7 @@ RSpec.describe 'Every Sidekiq worker' do
config_queues = Gitlab::SidekiqConfig.config_queues.to_set
Gitlab::SidekiqConfig.workers.each do |worker|
queue = worker.queue
queue = worker.generated_queue_name
queue_namespace = queue.split(':').first
expect(config_queues).to include(queue).or(include(queue_namespace))