Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-11-04 09:12:56 +00:00
parent 5bfd7a344b
commit 191020103b
68 changed files with 1224 additions and 195 deletions

View File

@ -77,7 +77,10 @@ Are there any other stages or teams involved that need to be kept in the loop?
### Global rollout on production
All `/chatops` commands that target production should be done in the `#production` slack channel for visibility.
For visibility, all `/chatops` commands that target production should be:
- Executed in the `#production` slack channel.
- Cross-posted (with the command results) to the responsible team's slack channel (`#g_TEAM_NAME`).
- [ ] [Incrementally roll out](https://docs.gitlab.com/ee/development/feature_flags/controls.html#process) the feature.
- If the feature flag in code has [an actor](https://docs.gitlab.com/ee/development/feature_flags/#feature-actors), perform **actor-based** rollout.

View File

@ -68,6 +68,11 @@ class Explore::ProjectsController < Explore::ApplicationController
end
# rubocop: enable CodeReuse/ActiveRecord
def topics
load_project_counts
load_topics
end
def topic
load_topic
@ -95,6 +100,10 @@ class Explore::ProjectsController < Explore::ApplicationController
prepare_projects_for_rendering(projects)
end
def load_topics
@topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(params[:page]).without_count
end
def load_topic
@topic = Projects::Topic.find_by_name(params[:topic_name])
end

View File

@ -11,8 +11,8 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
before_action :ensure_token_granted!, only: [:blob, :manifest]
before_action :ensure_feature_enabled!
before_action :verify_workhorse_api!, only: [:authorize_upload_blob, :upload_blob]
skip_before_action :verify_authenticity_token, only: [:authorize_upload_blob, :upload_blob]
before_action :verify_workhorse_api!, only: [:authorize_upload_blob, :upload_blob, :authorize_upload_manifest, :upload_manifest]
skip_before_action :verify_authenticity_token, only: [:authorize_upload_blob, :upload_blob, :authorize_upload_manifest, :upload_manifest]
attr_reader :token
@ -22,20 +22,11 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
result = DependencyProxy::FindOrCreateManifestService.new(group, image, tag, token).execute
if result[:status] == :success
response.headers['Docker-Content-Digest'] = result[:manifest].digest
response.headers['Content-Length'] = result[:manifest].size
response.headers['Docker-Distribution-Api-Version'] = DependencyProxy::DISTRIBUTION_API_VERSION
response.headers['Etag'] = "\"#{result[:manifest].digest}\""
content_type = result[:manifest].content_type
event_name = tracking_event_name(object_type: :manifest, from_cache: result[:from_cache])
track_package_event(event_name, :dependency_proxy, namespace: group, user: auth_user)
send_upload(
result[:manifest].file,
proxy: true,
redirect_params: { query: { 'response-content-type' => content_type } },
send_params: { type: content_type }
)
if result[:manifest]
send_manifest(result[:manifest], from_cache: result[:from_cache])
else
send_dependency(manifest_header, DependencyProxy::Registry.manifest_url(image, tag), manifest_file_name)
end
else
render status: result[:http_status], json: result[:message]
end
@ -59,7 +50,7 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
def authorize_upload_blob
set_workhorse_internal_api_content_type
render json: DependencyProxy::FileUploader.workhorse_authorize(has_length: false, maximum_size: 5.gigabytes)
render json: DependencyProxy::FileUploader.workhorse_authorize(has_length: false, maximum_size: DependencyProxy::Blob::MAX_FILE_SIZE)
end
def upload_blob
@ -75,6 +66,27 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
head :ok
end
def authorize_upload_manifest
set_workhorse_internal_api_content_type
render json: DependencyProxy::FileUploader.workhorse_authorize(has_length: false, maximum_size: DependencyProxy::Manifest::MAX_FILE_SIZE)
end
def upload_manifest
@group.dependency_proxy_manifests.create!(
file_name: manifest_file_name,
content_type: request.headers[Gitlab::Workhorse::SEND_DEPENDENCY_CONTENT_TYPE_HEADER],
digest: request.headers['Docker-Content-Digest'],
file: params[:file],
size: params[:file].size
)
event_name = tracking_event_name(object_type: :manifest, from_cache: false)
track_package_event(event_name, :dependency_proxy, namespace: group, user: auth_user)
head :ok
end
private
def blob_via_workhorse
@ -86,14 +98,38 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
send_upload(blob.file)
else
send_dependency(token, DependencyProxy::Registry.blob_url(image, params[:sha]), blob_file_name)
send_dependency(token_header, DependencyProxy::Registry.blob_url(image, params[:sha]), blob_file_name)
end
end
def send_manifest(manifest, from_cache:)
# Technical debt: change to read_at https://gitlab.com/gitlab-org/gitlab/-/issues/341536
manifest.touch
response.headers['Docker-Content-Digest'] = manifest.digest
response.headers['Content-Length'] = manifest.size
response.headers['Docker-Distribution-Api-Version'] = DependencyProxy::DISTRIBUTION_API_VERSION
response.headers['Etag'] = "\"#{manifest.digest}\""
content_type = manifest.content_type
event_name = tracking_event_name(object_type: :manifest, from_cache: from_cache)
track_package_event(event_name, :dependency_proxy, namespace: group, user: auth_user)
send_upload(
manifest.file,
proxy: true,
redirect_params: { query: { 'response-content-type' => content_type } },
send_params: { type: content_type }
)
end
def blob_file_name
@blob_file_name ||= params[:sha].sub('sha256:', '') + '.gz'
end
def manifest_file_name
@manifest_file_name ||= "#{image}:#{tag}.json"
end
def group
strong_memoize(:group) do
Group.find_by_full_path(params[:group_id], follow_redirects: true)
@ -137,4 +173,12 @@ class Groups::DependencyProxyForContainersController < ::Groups::DependencyProxy
render status: result[:http_status], json: result[:message]
end
end
def token_header
{ Authorization: ["Bearer #{token}"] }
end
def manifest_header
token_header.merge(Accept: ::ContainerRegistry::Client::ACCEPTED_TYPES)
end
end

View File

@ -267,6 +267,7 @@ module Nav
builder.add_primary_menu_item(id: 'your', title: _('Your projects'), href: dashboard_projects_path)
builder.add_primary_menu_item(id: 'starred', title: _('Starred projects'), href: starred_dashboard_projects_path)
builder.add_primary_menu_item(id: 'explore', title: _('Explore projects'), href: explore_root_path)
builder.add_primary_menu_item(id: 'topics', title: _('Explore topics'), href: topics_explore_projects_path)
builder.add_secondary_menu_item(id: 'create', title: _('Create new project'), href: new_project_path)
builder.build
end

View File

@ -41,8 +41,8 @@ module WorkhorseHelper
head :ok
end
def send_dependency(token, url, filename)
headers.store(*Gitlab::Workhorse.send_dependency(token, url))
def send_dependency(dependency_headers, url, filename)
headers.store(*Gitlab::Workhorse.send_dependency(dependency_headers, url))
headers['Content-Disposition'] =
ActionDispatch::Http::ContentDisposition.format(disposition: 'attachment', filename: filename)
headers['Content-Type'] = 'application/gzip'

View File

@ -11,6 +11,12 @@ module Analytics
alias_attribute :state, :state_id
enum state: Issue.available_states, _suffix: true
scope :assigned_to, ->(user) do
assignees_class = IssueAssignee
condition = assignees_class.where(user_id: user).where(arel_table[:issue_id].eq(assignees_class.arel_table[:issue_id]))
where(condition.arel.exists)
end
def self.issuable_id_column
:issue_id
end

View File

@ -11,6 +11,12 @@ module Analytics
alias_attribute :state, :state_id
enum state: MergeRequest.available_states, _suffix: true
scope :assigned_to, ->(user) do
assignees_class = MergeRequestAssignee
condition = assignees_class.where(user_id: user).where(arel_table[:merge_request_id].eq(assignees_class.arel_table[:merge_request_id]))
where(condition.arel.exists)
end
def self.issuable_id_column
:merge_request_id
end

View File

@ -5,6 +5,19 @@ module Analytics
module StageEventModel
extend ActiveSupport::Concern
included do
scope :by_stage_event_hash_id, ->(id) { where(stage_event_hash_id: id) }
scope :by_project_id, ->(id) { where(project_id: id) }
scope :by_group_id, ->(id) { where(group_id: id) }
scope :end_event_timestamp_after, -> (date) { where(arel_table[:end_event_timestamp].gteq(date)) }
scope :end_event_timestamp_before, -> (date) { where(arel_table[:end_event_timestamp].lteq(date)) }
scope :start_event_timestamp_after, -> (date) { where(arel_table[:start_event_timestamp].gteq(date)) }
scope :start_event_timestamp_before, -> (date) { where(arel_table[:start_event_timestamp].lteq(date)) }
scope :authored, ->(user) { where(author_id: user) }
scope :with_milestone_id, ->(milestone_id) { where(milestone_id: milestone_id) }
scope :end_event_is_not_happened_yet, -> { where(end_event_timestamp: nil) }
end
class_methods do
def upsert_data(data)
upsert_values = data.map do |row|

View File

@ -7,6 +7,8 @@ class DependencyProxy::Blob < ApplicationRecord
belongs_to :group
MAX_FILE_SIZE = 5.gigabytes.freeze
validates :group, presence: true
validates :file, presence: true
validates :file_name, presence: true

View File

@ -7,6 +7,8 @@ class DependencyProxy::Manifest < ApplicationRecord
belongs_to :group
MAX_FILE_SIZE = 10.megabytes.freeze
validates :group, presence: true
validates :file, presence: true
validates :file_name, presence: true
@ -14,10 +16,7 @@ class DependencyProxy::Manifest < ApplicationRecord
mount_file_store_uploader DependencyProxy::FileUploader
def self.find_or_initialize_by_file_name_or_digest(file_name:, digest:)
result = find_by(file_name: file_name) || find_by(digest: digest)
return result if result
new(file_name: file_name, digest: digest)
def self.find_by_file_name_or_digest(file_name:, digest:)
find_by(file_name: file_name) || find_by(digest: digest)
end
end

View File

@ -14,18 +14,18 @@ module DependencyProxy
def execute
@manifest = @group.dependency_proxy_manifests
.active
.find_or_initialize_by_file_name_or_digest(file_name: @file_name, digest: @tag)
.find_by_file_name_or_digest(file_name: @file_name, digest: @tag)
head_result = DependencyProxy::HeadManifestService.new(@image, @tag, @token).execute
if cached_manifest_matches?(head_result)
@manifest.touch
return respond if cached_manifest_matches?(head_result)
return success(manifest: @manifest, from_cache: true)
if Feature.enabled?(:dependency_proxy_manifest_workhorse, @group, default_enabled: :yaml)
success(manifest: nil, from_cache: false)
else
pull_new_manifest
respond(from_cache: false)
end
pull_new_manifest
respond(from_cache: false)
rescue Timeout::Error, *Gitlab::HTTP::HTTP_ERRORS
respond
end
@ -34,12 +34,19 @@ module DependencyProxy
def pull_new_manifest
DependencyProxy::PullManifestService.new(@image, @tag, @token).execute_with_manifest do |new_manifest|
@manifest.update!(
params = {
file_name: @file_name,
content_type: new_manifest[:content_type],
digest: new_manifest[:digest],
file: new_manifest[:file],
size: new_manifest[:file].size
)
}
if @manifest
@manifest.update!(params)
else
@manifest = @group.dependency_proxy_manifests.create!(params)
end
end
end
@ -50,10 +57,7 @@ module DependencyProxy
end
def respond(from_cache: true)
if @manifest.persisted?
# Technical debt: change to read_at https://gitlab.com/gitlab-org/gitlab/-/issues/341536
@manifest.touch if from_cache
if @manifest
success(manifest: @manifest, from_cache: from_cache)
else
error('Failed to download the manifest from the external registry', 503)

View File

@ -14,19 +14,7 @@
.top-area.scrolling-tabs-container.inner-page-scroll-tabs
.fade-left= sprite_icon('chevron-lg-left', size: 12)
.fade-right= sprite_icon('chevron-lg-right', size: 12)
%ul.nav-links.scrolling-tabs.mobile-separator.nav.nav-tabs{ class: ('border-0' if feature_project_list_filter_bar) }
= nav_link(page: [dashboard_projects_path, root_path]) do
= link_to dashboard_projects_path, class: 'shortcuts-activity', data: {placement: 'right'} do
= _("Your projects")
%span.badge.gl-tab-counter-badge.badge-muted.badge-pill.gl-badge.sm= limited_counter_with_delimiter(@total_user_projects_count)
= nav_link(page: starred_dashboard_projects_path) do
= link_to starred_dashboard_projects_path, data: {placement: 'right'} do
= _("Starred projects")
%span.badge.gl-tab-counter-badge.badge-muted.badge-pill.gl-badge.sm= limited_counter_with_delimiter(@total_starred_projects_count)
= nav_link(page: [explore_root_path, trending_explore_projects_path, starred_explore_projects_path, explore_projects_path]) do
= link_to explore_root_path, data: {placement: 'right'} do
= _("Explore projects")
= render_if_exists "dashboard/removed_projects_tab", removed_projects_count: @removed_projects_count
= render 'dashboard/projects_nav'
- unless feature_project_list_filter_bar
.nav-controls
= render 'shared/projects/search_form'

View File

@ -0,0 +1,18 @@
- feature_project_list_filter_bar = Feature.enabled?(:project_list_filter_bar)
%ul.nav-links.scrolling-tabs.mobile-separator.nav.nav-tabs{ class: ('gl-border-0!' if feature_project_list_filter_bar) }
= nav_link(page: [dashboard_projects_path, root_path]) do
= link_to dashboard_projects_path, class: 'shortcuts-activity', data: {placement: 'right'} do
= _("Your projects")
%span.badge.gl-tab-counter-badge.badge-muted.badge-pill.gl-badge.sm= limited_counter_with_delimiter(@total_user_projects_count)
= nav_link(page: starred_dashboard_projects_path) do
= link_to starred_dashboard_projects_path, data: {placement: 'right'} do
= _("Starred projects")
%span.badge.gl-tab-counter-badge.badge-muted.badge-pill.gl-badge.sm= limited_counter_with_delimiter(@total_starred_projects_count)
= nav_link(page: [explore_root_path, trending_explore_projects_path, starred_explore_projects_path, explore_projects_path]) do
= link_to explore_root_path, data: {placement: 'right'} do
= _("Explore projects")
= nav_link(page: topics_explore_projects_path) do
= link_to topics_explore_projects_path, data: {placement: 'right'} do
= _("Explore topics")
= render_if_exists "dashboard/removed_projects_tab", removed_projects_count: @removed_projects_count

View File

@ -0,0 +1,12 @@
- @hide_top_links = true
- page_title _("Topics")
- header_title _("Topics"), topics_explore_projects_path
= render_dashboard_ultimate_trial(current_user)
- if current_user
= render 'explore/topics/head'
- else
= render 'explore/head'
= render partial: 'shared/topics/list'

View File

@ -0,0 +1,9 @@
.page-title-holder.d-flex.align-items-center
%h1.page-title= _('Projects')
.top-area.scrolling-tabs-container.inner-page-scroll-tabs
.fade-left= sprite_icon('chevron-lg-left', size: 12)
.fade-right= sprite_icon('chevron-lg-right', size: 12)
= render 'dashboard/projects_nav'
.nav-controls
= render 'shared/topics/search_form'

View File

@ -1,5 +1,6 @@
- @hide_breadcrumbs = true
- @hide_top_links = true
- @content_class = 'limit-container-width'
- page_title _('New branch')
.js-jira-connect-create-branch{ data: @new_branch_data }

View File

@ -0,0 +1,9 @@
- remote = local_assigns.fetch(:remote, false)
- if @topics.empty?
= render 'shared/empty_states/topics'
- else
.row.gl-mt-3
= render partial: 'shared/topics/topic', collection: @topics
= paginate_collection @topics, remote: remote

View File

@ -0,0 +1,16 @@
- max_topic_name_length = 30
- detail_page_link = topic_explore_projects_path(topic_name: topic.name)
.col-lg-3.col-md-4.col-sm-12
.gl-card.gl-mb-5
.gl-card-body.gl-display-flex.gl-align-items-center
.avatar-container.rect-avatar.s40.gl-flex-shrink-0
= link_to detail_page_link do
= topic_icon(topic, class: "avatar s40")
= link_to detail_page_link do
- if topic.name.length > max_topic_name_length
%h5.str-truncated.has-tooltip{ title: topic.name }
= truncate(topic.name, length: max_topic_name_length)
- else
%h5
= topic.name

View File

@ -1,8 +1,8 @@
---
name: new_customersdot_staging_url
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71827
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342513
name: dependency_proxy_manifest_workhorse
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73033
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344216
milestone: '14.4'
type: development
group: group::fulfillment
group: group::package
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: use_vsa_aggregated_tables
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72978
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/343429
milestone: '14.5'
type: development
group: group::optimize
default_enabled: false

View File

@ -5,6 +5,7 @@ namespace :explore do
collection do
get :trending
get :starred
get :topics
get 'topics/:topic_name', action: :topic, as: :topic, constraints: { topic_name: /.+/ }
end
end

View File

@ -155,5 +155,7 @@ scope format: false do
get 'v2/*group_id/dependency_proxy/containers/*image/blobs/:sha' => 'groups/dependency_proxy_for_containers#blob' # rubocop:todo Cop/PutGroupRoutesUnderScope
post 'v2/*group_id/dependency_proxy/containers/*image/blobs/:sha/upload/authorize' => 'groups/dependency_proxy_for_containers#authorize_upload_blob' # rubocop:todo Cop/PutGroupRoutesUnderScope
post 'v2/*group_id/dependency_proxy/containers/*image/blobs/:sha/upload' => 'groups/dependency_proxy_for_containers#upload_blob' # rubocop:todo Cop/PutGroupRoutesUnderScope
post 'v2/*group_id/dependency_proxy/containers/*image/manifests/*tag/upload/authorize' => 'groups/dependency_proxy_for_containers#authorize_upload_manifest' # rubocop:todo Cop/PutGroupRoutesUnderScope
post 'v2/*group_id/dependency_proxy/containers/*image/manifests/*tag/upload' => 'groups/dependency_proxy_for_containers#upload_manifest' # rubocop:todo Cop/PutGroupRoutesUnderScope
end
end

View File

@ -831,6 +831,17 @@ Set the limit to `0` to allow any file size.
When asking for versions of a given NuGet package name, the GitLab Package Registry returns a maximum of 300 versions.
## Dependency Proxy Limits
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/6396) in GitLab 14.5.
The maximum file size for an image cached in the
[Dependency Proxy](../user/packages/dependency_proxy/index.md)
varies by file type:
- Image blob: 5 GB
- Image manifest: 10 MB
## Branch retargeting on merge
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/320902) in GitLab 13.9.

View File

@ -12,9 +12,10 @@ full list of reference architectures, see
> - **Supported users (approximate):** 10,000
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
> - **[Latest 10k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/10k)**
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Performance tested daily with the [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/10k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|-----------------------------------------------------|-------------|-------------------------|------------------|--------------|-----------|

View File

@ -20,8 +20,9 @@ many organizations.
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
> - **Cloud Native Hybrid:** No. For a cloud native hybrid environment, you
> can follow a [modified hybrid reference architecture](#cloud-native-hybrid-reference-architecture-with-helm-charts).
> - **Test requests per second (RPS) rates:** API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
> - **[Latest 1k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/1k)**
> - **Performance tested daily with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/1k)**
| Users | Configuration | GCP | AWS | Azure |
|--------------|-------------------------|----------------|--------------|----------|

View File

@ -12,9 +12,10 @@ full list of reference architectures, see
> - **Supported users (approximate):** 25,000
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
> - **[Latest 25k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/25k)**
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/25k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|---------------------------------------------------|-------------|-------------------------|------------------|--------------|-----------|

View File

@ -14,8 +14,9 @@ For a full list of reference architectures, see
> - **High Availability:** No. For a highly-available environment, you can
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
> - **[Latest 2k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/2k)**
> - **Performance tested daily with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/2k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|------------------------------------------|--------|-------------------------|-----------------|--------------|----------|

View File

@ -22,9 +22,10 @@ For a full list of reference architectures, see
> - **Supported users (approximate):** 3,000
> - **High Availability:** Yes, although [Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
> - **[Latest 3k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/3k)**
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/3k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|--------------------------------------------|-------------|-----------------------|-----------------|--------------|----------|

View File

@ -12,9 +12,10 @@ full list of reference architectures, see
> - **Supported users (approximate):** 50,000
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
> - **[Latest 50k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/50k)**
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/50k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|---------------------------------------------------|-------------|-------------------------|------------------|---------------|-----------|

View File

@ -19,9 +19,10 @@ costly-to-operate environment by using the
> - **Supported users (approximate):** 5,000
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Test requests per second (RPS) rates:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
> - **[Latest 5k weekly performance testing results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/5k)**
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Performance tested weekly with the [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance)**:
> - **Test requests per second (RPS) rates:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/5k)**
| Service | Nodes | Configuration | GCP | AWS | Azure |
|--------------------------------------------|-------------|-------------------------|-----------------|--------------|----------|

View File

@ -86,10 +86,10 @@ GitLab CI/CD features, grouped by DevOps stage, include:
| [Browser Performance Testing](../user/project/merge_requests/browser_performance_testing.md) | Quickly determine the browser performance impact of pending code changes. |
| [Load Performance Testing](../user/project/merge_requests/load_performance_testing.md) | Quickly determine the server performance impact of pending code changes. |
| [CI services](services/index.md) | Link Docker containers with your base image. |
| [Code Quality](../user/project/merge_requests/code_quality.md) | Analyze your source code quality. |
| [GitLab CI/CD for external repositories](ci_cd_for_external_repos/index.md) **(PREMIUM)** | Get the benefits of GitLab CI/CD combined with repositories in GitHub and Bitbucket Cloud. |
| [Interactive Web Terminals](interactive_web_terminal/index.md) **(FREE SELF)** | Open an interactive web terminal to debug the running jobs. |
| [Unit test reports](unit_test_reports.md) | Identify script failures directly on merge requests. |
| [Review Apps](review_apps/index.md) | Configure GitLab CI/CD to preview code changes. |
| [Unit test reports](unit_test_reports.md) | Identify test failures directly on merge requests. |
| [Using Docker images](docker/using_docker_images.md) | Use GitLab and GitLab Runner with Docker to build and test applications. |
|-------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------|
| **Release** | |
@ -100,10 +100,10 @@ GitLab CI/CD features, grouped by DevOps stage, include:
| [Feature Flags](../operations/feature_flags.md) | Deploy your features behind Feature Flags. |
| [GitLab Pages](../user/project/pages/index.md) | Deploy static websites. |
| [GitLab Releases](../user/project/releases/index.md) | Add release notes to Git tags. |
| [Review Apps](review_apps/index.md) | Configure GitLab CI/CD to preview code changes. |
| [Cloud deployment](cloud_deployment/index.md) | Deploy your application to a main cloud provider. |
|-------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------|
| **Secure** | |
| [Code Quality](../user/project/merge_requests/code_quality.md) | Analyze your source code quality. |
| [Container Scanning](../user/application_security/container_scanning/index.md) **(ULTIMATE)** | Check your Docker containers for known vulnerabilities. |
| [Dependency Scanning](../user/application_security/dependency_scanning/index.md) **(ULTIMATE)** | Analyze your dependencies for known vulnerabilities. |
| [License Compliance](../user/compliance/license_compliance/index.md) **(ULTIMATE)** | Search your project dependencies for their licenses. |
@ -148,6 +148,10 @@ See also the [Why CI/CD?](https://docs.google.com/presentation/d/1OGgk2Tcxbpl7DJ
As GitLab CI/CD has evolved, certain breaking changes have
been necessary.
#### 14.0
- No breaking changes.
#### 13.0
- [Remove Backported `os.Expand`](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4915).

View File

@ -2755,51 +2755,42 @@ artifacts are restored after [caches](#cache).
#### `dependencies`
By default, all `artifacts` from previous stages
are passed to each job. However, you can use the `dependencies` keyword to
define a limited list of jobs to fetch artifacts from. You can also set a job to download no artifacts at all.
Use the `dependencies` keyword to define a list of jobs to fetch artifacts from.
You can also set a job to download no artifacts at all.
To use this feature, define `dependencies` in context of the job and pass
a list of all previous jobs the artifacts should be downloaded from.
If you do not use `dependencies`, all `artifacts` from previous stages are passed to each job.
You can define jobs from stages that were executed before the current one.
An error occurs if you define jobs from the current or an upcoming stage.
**Keyword type**: Job keyword. You can use it only as part of a job.
To prevent a job from downloading artifacts, define an empty array.
**Possible inputs**:
When you use `dependencies`, the status of the previous job is not considered.
If a job fails or it's a manual job that isn't triggered, no error occurs.
- The names of jobs to fetch artifacts from.
- An empty array (`[]`), to configure the job to not download any artifacts.
The following example defines two jobs with artifacts: `build:osx` and
`build:linux`. When the `test:osx` is executed, the artifacts from `build:osx`
are downloaded and extracted in the context of the build. The same happens
for `test:linux` and artifacts from `build:linux`.
The job `deploy` downloads artifacts from all previous jobs because of
the [stage](#stages) precedence:
**Example of `dependencies`**:
```yaml
build:osx:
build osx:
stage: build
script: make build:osx
artifacts:
paths:
- binaries/
build:linux:
build linux:
stage: build
script: make build:linux
artifacts:
paths:
- binaries/
test:osx:
test osx:
stage: test
script: make test:osx
dependencies:
- build:osx
test:linux:
test linux:
stage: test
script: make test:linux
dependencies:
@ -2810,14 +2801,18 @@ deploy:
script: make deploy
```
##### When a dependent job fails
In this example, two jobs have artifacts: `build osx` and `build linux`. When `test osx` is executed,
the artifacts from `build osx` are downloaded and extracted in the context of the build.
The same thing happens for `test linux` and artifacts from `build linux`.
> Introduced in GitLab 10.3.
The `deploy` job downloads artifacts from all previous jobs because of
the [stage](#stages) precedence.
If the artifacts of the job that is set as a dependency are
[expired](#artifactsexpire_in) or
[deleted](../pipelines/job_artifacts.md#delete-job-artifacts), then
the dependent job fails.
**Additional details**:
- The job status does not matter. If a job fails or it's a manual job that isn't triggered, no error occurs.
- If the artifacts of a dependent job are [expired](#artifactsexpire_in) or
[deleted](../pipelines/job_artifacts.md#delete-job-artifacts), then the job fails.
#### `artifacts:exclude`

View File

@ -86,6 +86,7 @@ you can set up this integration with your own account instead.
`https://<gitlab.example.com>/-/jira/login/oauth/callback`.
1. For **Scopes**, select `api` and clear any other checkboxes.
- The connector requires a _write-enabled_ `api` scope to automatically create and manage required webhooks.
1. Select **Submit**.
1. GitLab displays the generated **Application ID**
and **Secret** values. Copy these values, as you need them to configure Jira.

View File

@ -40,8 +40,14 @@ including:
## Group webhooks **(PREMIUM)**
You can configure a webhook for a group to ensure all projects in the group
receive the same webhook settings.
You can configure a group webhook, which is triggered by events
that occur across all projects in the group.
Group webhooks can also be configured to listen for events that are
specific to a group, including:
- [Group member events](webhook_events.md#group-member-events)
- [Subgroup events](webhook_events.md#subgroup-events)
## Configure a webhook

View File

@ -0,0 +1,125 @@
# frozen_string_literal: true
module Gitlab
module Analytics
module CycleAnalytics
module Aggregated
# rubocop: disable CodeReuse/ActiveRecord
class BaseQueryBuilder
include StageQueryHelpers
MODEL_CLASSES = {
MergeRequest.to_s => ::Analytics::CycleAnalytics::MergeRequestStageEvent,
Issue.to_s => ::Analytics::CycleAnalytics::IssueStageEvent
}.freeze
# Allowed params:
# * from - stage end date filter start date
# * to - stage end date filter to date
# * author_username
# * milestone_title
# * label_name (array)
# * assignee_username (array)
# * project_ids (array)
def initialize(stage:, params: {})
@stage = stage
@params = params
@root_ancestor = stage.parent.root_ancestor
@stage_event_model = MODEL_CLASSES.fetch(stage.subject_class.to_s)
end
def build
query = base_query
query = filter_by_stage_parent(query)
query = filter_author(query)
query = filter_milestone_ids(query)
query = filter_label_names(query)
filter_assignees(query)
end
def filter_author(query)
return query if params[:author_username].blank?
user = User.by_username(params[:author_username]).first
return query.none if user.blank?
query.authored(user)
end
def filter_milestone_ids(query)
return query if params[:milestone_title].blank?
milestone = MilestonesFinder
.new(group_ids: root_ancestor.self_and_descendant_ids, project_ids: root_ancestor.all_projects.select(:id), title: params[:milestone_title])
.execute
.first
return query.none if milestone.blank?
query.with_milestone_id(milestone.id)
end
def filter_label_names(query)
return query if params[:label_name].blank?
all_label_ids = Issuables::LabelFilter
.new(group: root_ancestor, project: nil, params: { label_name: params[:label_name] })
.find_label_ids(params[:label_name])
return query.none if params[:label_name].size != all_label_ids.size
all_label_ids.each do |label_ids|
relation = LabelLink
.where(target_type: stage.subject_class.name)
.where(LabelLink.arel_table['target_id'].eq(query.model.arel_table[query.model.issuable_id_column]))
relation = relation.where(label_id: label_ids)
query = query.where(relation.arel.exists)
end
query
end
def filter_assignees(query)
return query if params[:assignee_username].blank?
Issuables::AssigneeFilter
.new(params: { assignee_username: params[:assignee_username] })
.filter(query)
end
def filter_by_stage_parent(query)
query.by_project_id(stage.parent_id)
end
def base_query
query = stage_event_model
.by_stage_event_hash_id(stage.stage_event_hash_id)
from = params[:from] || 30.days.ago
if in_progress?
query = query
.end_event_is_not_happened_yet
.opened_state
.start_event_timestamp_after(from)
query = query.start_event_timestamp_before(params[:to]) if params[:to]
else
query = query.end_event_timestamp_after(from)
query = query.end_event_timestamp_before(params[:to]) if params[:to]
end
query
end
private
attr_reader :stage, :params, :root_ancestor, :stage_event_model
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
end
Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder.prepend_mod_with('Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder')

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
module Gitlab
module Analytics
module CycleAnalytics
module Aggregated
# Arguments:
# stage - an instance of CycleAnalytics::ProjectStage or CycleAnalytics::GroupStage
# params:
# current_user: an instance of User
# from: DateTime
# to: DateTime
class DataCollector
include Gitlab::Utils::StrongMemoize
MAX_COUNT = 10001
delegate :serialized_records, to: :records_fetcher
def initialize(stage:, params: {})
@stage = stage
@params = params
end
def median
strong_memoize(:median) { Median.new(stage: stage, query: query, params: params) }
end
def count
strong_memoize(:count) { limit_count }
end
private
attr_reader :stage, :params
def query
BaseQueryBuilder.new(stage: stage, params: params).build
end
def limit_count
query.limit(MAX_COUNT).count
end
end
end
end
end
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
module Gitlab
module Analytics
module CycleAnalytics
module Aggregated
class Median
include StageQueryHelpers
def initialize(stage:, query:, params:)
@stage = stage
@query = query
@params = params
end
# rubocop: disable CodeReuse/ActiveRecord
def seconds
@query = @query.select(median_duration_in_seconds.as('median')).reorder(nil)
result = @query.take || {}
result['median'] || nil
end
# rubocop: enable CodeReuse/ActiveRecord
def days
seconds ? seconds.fdiv(1.day) : nil
end
private
attr_reader :stage, :query, :params
end
end
end
end
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module Analytics
module CycleAnalytics
module Aggregated
module StageQueryHelpers
def percentile_cont
percentile_cont_ordering = Arel::Nodes::UnaryOperation.new(Arel::Nodes::SqlLiteral.new('ORDER BY'), duration)
Arel::Nodes::NamedFunction.new(
'percentile_cont(0.5) WITHIN GROUP',
[percentile_cont_ordering]
)
end
def duration
if in_progress?
Arel::Nodes::Subtraction.new(
Arel::Nodes::NamedFunction.new('TO_TIMESTAMP', [Time.current.to_i]),
query.model.arel_table[:start_event_timestamp]
)
else
Arel::Nodes::Subtraction.new(
query.model.arel_table[:end_event_timestamp],
query.model.arel_table[:start_event_timestamp]
)
end
end
def median_duration_in_seconds
Arel::Nodes::Extract.new(percentile_cont, :epoch)
end
def in_progress?
params[:end_event_filter] == :in_progress
end
end
end
end
end
end

View File

@ -29,7 +29,11 @@ module Gitlab
def median
strong_memoize(:median) do
Median.new(stage: stage, query: query, params: params)
if use_aggregated_data_collector?
aggregated_data_collector.median
else
Median.new(stage: stage, query: query, params: params)
end
end
end
@ -41,7 +45,11 @@ module Gitlab
def count
strong_memoize(:count) do
limit_count
if use_aggregated_data_collector?
aggregated_data_collector.count
else
limit_count
end
end
end
@ -59,6 +67,14 @@ module Gitlab
def limit_count
query.limit(MAX_COUNT).count
end
def aggregated_data_collector
@aggregated_data_collector ||= Aggregated::DataCollector.new(stage: stage, params: params)
end
def use_aggregated_data_collector?
params.fetch(:use_aggregated_data_collector, false)
end
end
end
end

View File

@ -79,7 +79,8 @@ module Gitlab
sort: sort&.to_sym,
direction: direction&.to_sym,
page: page,
end_event_filter: end_event_filter.to_sym
end_event_filter: end_event_filter.to_sym,
use_aggregated_data_collector: Feature.enabled?(:use_vsa_aggregated_tables, group || project, default_enabled: :yaml)
}.merge(attributes.symbolize_keys.slice(*FINDER_PARAM_NAMES))
end

View File

@ -54,7 +54,10 @@ module Gitlab
connection = host.connection
return yield connection
rescue StandardError => error
if serialization_failure?(error)
if primary_only?
# If we only have primary configured, retrying is pointless
raise error
elsif serialization_failure?(error)
# This error can occur when a query conflicts. See
# https://www.postgresql.org/docs/current/static/hot-standby.html#HOT-STANDBY-CONFLICT
# for more information.

View File

@ -45,14 +45,12 @@ module Gitlab
::Gitlab::GraphqlLogger.info(info)
end
def query_variables_for_logging(query)
clean_variables(query.provided_variables)
end
def clean_variables(variables)
ActiveSupport::ParameterFilter
filtered = ActiveSupport::ParameterFilter
.new(::Rails.application.config.filter_parameters)
.filter(variables)
filtered&.to_s
end
end
end

View File

@ -4,11 +4,7 @@ module Gitlab
module SubscriptionPortal
def self.default_subscriptions_url
if ::Gitlab.dev_or_test_env?
if Feature.enabled?(:new_customersdot_staging_url, default_enabled: :yaml)
'https://customers.staging.gitlab.com'
else
'https://customers.stg.gitlab.com'
end
'https://customers.staging.gitlab.com'
else
'https://customers.gitlab.com'
end

View File

@ -8,6 +8,7 @@ require 'uri'
module Gitlab
class Workhorse
SEND_DATA_HEADER = 'Gitlab-Workhorse-Send-Data'
SEND_DEPENDENCY_CONTENT_TYPE_HEADER = 'Workhorse-Proxy-Content-Type'
VERSION_FILE = 'GITLAB_WORKHORSE_VERSION'
INTERNAL_API_CONTENT_TYPE = 'application/vnd.gitlab-workhorse+json'
INTERNAL_API_REQUEST_HEADER = 'Gitlab-Workhorse-Api-Request'
@ -170,9 +171,9 @@ module Gitlab
]
end
def send_dependency(token, url)
def send_dependency(headers, url)
params = {
'Header' => { Authorization: ["Bearer #{token}"] },
'Header' => headers,
'Url' => url
}

View File

@ -13997,6 +13997,9 @@ msgstr ""
msgid "Explore public groups"
msgstr ""
msgid "Explore topics"
msgstr ""
msgid "Export"
msgstr ""

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require_relative '../../qa_helpers'
module RuboCop
module Cop
module QA
# This cop checks for duplicate testcase links across e2e specs
#
# @example
#
# # bad
# it 'some test', testcase: '(...)/quality/test_cases/1892'
# it 'another test, testcase: '(...)/quality/test_cases/1892'
#
# # good
# it 'some test', testcase: '(...)/quality/test_cases/1892'
# it 'another test, testcase: '(...)/quality/test_cases/1894'
class DuplicateTestcaseLink < RuboCop::Cop::Cop
include QAHelpers
MESSAGE = "Don't reuse the same testcase link in different tests. Replace one of `%s`."
@testcase_set = Set.new
def_node_matcher :duplicate_testcase_link, <<~PATTERN
(block
(send nil? ...
...
(hash
(pair
(sym :testcase)
(str $_))...)...)...)
PATTERN
def on_block(node)
return unless in_qa_file?(node)
duplicate_testcase_link(node) do |link|
break unless self.class.duplicate?(link)
add_offense(node, message: MESSAGE % link)
end
end
def self.duplicate?(link)
!@testcase_set.add?(link)
end
end
end
end
end

View File

@ -124,6 +124,34 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
shared_examples 'authorize action with permission' do
context 'with a valid user' do
before do
group.add_guest(user)
end
it 'sends Workhorse local file instructions', :aggregate_failures do
subject
expect(response.headers['Content-Type']).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(DependencyProxy::FileUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
expect(json_response['MaximumSize']).to eq(maximum_size)
end
it 'sends Workhorse remote object instructions', :aggregate_failures do
stub_dependency_proxy_object_storage(direct_upload: true)
subject
expect(response.headers['Content-Type']).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to be_nil
expect(json_response['RemoteObject']).not_to be_nil
expect(json_response['MaximumSize']).to eq(maximum_size)
end
end
end
before do
allow(Gitlab.config.dependency_proxy)
.to receive(:enabled).and_return(true)
@ -136,9 +164,10 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
describe 'GET #manifest' do
let_it_be(:manifest) { create(:dependency_proxy_manifest) }
let_it_be(:manifest) { create(:dependency_proxy_manifest, group: group) }
let(:pull_response) { { status: :success, manifest: manifest, from_cache: false } }
let(:tag) { 'latest1' }
before do
allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance|
@ -146,7 +175,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
subject { get_manifest }
subject { get_manifest(tag) }
context 'feature enabled' do
before do
@ -207,11 +236,26 @@ RSpec.describe Groups::DependencyProxyForContainersController do
it_behaves_like 'a successful manifest pull'
it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest'
context 'with a cache entry' do
let(:pull_response) { { status: :success, manifest: manifest, from_cache: true } }
context 'with workhorse response' do
let(:pull_response) { { status: :success, manifest: nil, from_cache: false } }
it_behaves_like 'returning response status', :success
it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest_from_cache'
it 'returns Workhorse send-dependency instructions', :aggregate_failures do
subject
send_data_type, send_data = workhorse_send_data
header, url = send_data.values_at('Header', 'Url')
expect(send_data_type).to eq('send-dependency')
expect(header).to eq(
"Authorization" => ["Bearer abcd1234"],
"Accept" => ::ContainerRegistry::Client::ACCEPTED_TYPES
)
expect(url).to eq(DependencyProxy::Registry.manifest_url('alpine', tag))
expect(response.headers['Content-Type']).to eq('application/gzip')
expect(response.headers['Content-Disposition']).to eq(
ActionDispatch::Http::ContentDisposition.format(disposition: 'attachment', filename: manifest.file_name)
)
end
end
end
@ -237,8 +281,8 @@ RSpec.describe Groups::DependencyProxyForContainersController do
it_behaves_like 'not found when disabled'
def get_manifest
get :manifest, params: { group_id: group.to_param, image: 'alpine', tag: '3.9.2' }
def get_manifest(tag)
get :manifest, params: { group_id: group.to_param, image: 'alpine', tag: tag }
end
end
@ -383,40 +427,16 @@ RSpec.describe Groups::DependencyProxyForContainersController do
describe 'GET #authorize_upload_blob' do
let(:blob_sha) { 'a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4' }
let(:maximum_size) { DependencyProxy::Blob::MAX_FILE_SIZE }
subject(:authorize_upload_blob) do
subject do
request.headers.merge!(workhorse_internal_api_request_header)
get :authorize_upload_blob, params: { group_id: group.to_param, image: 'alpine', sha: blob_sha }
end
it_behaves_like 'without permission'
context 'with a valid user' do
before do
group.add_guest(user)
end
it 'sends Workhorse local file instructions', :aggregate_failures do
authorize_upload_blob
expect(response.headers['Content-Type']).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(DependencyProxy::FileUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
expect(json_response['MaximumSize']).to eq(5.gigabytes)
end
it 'sends Workhorse remote object instructions', :aggregate_failures do
stub_dependency_proxy_object_storage(direct_upload: true)
authorize_upload_blob
expect(response.headers['Content-Type']).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to be_nil
expect(json_response['RemoteObject']).not_to be_nil
expect(json_response['MaximumSize']).to eq(5.gigabytes)
end
end
it_behaves_like 'authorize action with permission'
end
describe 'GET #upload_blob' do
@ -449,6 +469,48 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
describe 'GET #authorize_upload_manifest' do
let(:maximum_size) { DependencyProxy::Manifest::MAX_FILE_SIZE }
subject do
request.headers.merge!(workhorse_internal_api_request_header)
get :authorize_upload_manifest, params: { group_id: group.to_param, image: 'alpine', tag: 'latest' }
end
it_behaves_like 'without permission'
it_behaves_like 'authorize action with permission'
end
describe 'GET #upload_manifest' do
let(:file) { fixture_file_upload("spec/fixtures/dependency_proxy/manifest", 'application/json') }
subject do
request.headers.merge!(workhorse_internal_api_request_header)
get :upload_manifest, params: {
group_id: group.to_param,
image: 'alpine',
tag: 'latest',
file: file
}
end
it_behaves_like 'without permission'
context 'with a valid user' do
before do
group.add_guest(user)
expect_next_found_instance_of(Group) do |instance|
expect(instance).to receive_message_chain(:dependency_proxy_manifests, :create!)
end
end
it_behaves_like 'a package tracking event', described_class.name, 'pull_manifest'
end
end
def enable_dependency_proxy
group.create_dependency_proxy_setting!(enabled: true)
end

View File

@ -16,6 +16,7 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
before do
stub_feature_flags(use_vsa_aggregated_tables: false)
sign_in(user)
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
FactoryBot.define do
factory :cycle_analytics_issue_stage_event, class: 'Analytics::CycleAnalytics::IssueStageEvent' do
sequence(:stage_event_hash_id) { |n| n }
sequence(:issue_id) { 0 }
sequence(:group_id) { 0 }
sequence(:project_id) { 0 }
start_event_timestamp { 3.weeks.ago.to_date }
end_event_timestamp { 2.weeks.ago.to_date }
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
FactoryBot.define do
factory :cycle_analytics_merge_request_stage_event, class: 'Analytics::CycleAnalytics::MergeRequestStageEvent' do
sequence(:stage_event_hash_id) { |n| n }
sequence(:merge_request_id) { 0 }
sequence(:group_id) { 0 }
sequence(:project_id) { 0 }
start_event_timestamp { 3.weeks.ago.to_date }
end_event_timestamp { 2.weeks.ago.to_date }
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Explore Topics' do
context 'when no topics exist' do
it 'renders empty message', :aggregate_failures do
visit topics_explore_projects_path
expect(current_path).to eq topics_explore_projects_path
expect(page).to have_content('There are no topics to show.')
end
end
context 'when topics exist' do
let!(:topic) { create(:topic, name: 'topic1') }
it 'renders topic list' do
visit topics_explore_projects_path
expect(current_path).to eq topics_explore_projects_path
expect(page).to have_content('topic1')
end
end
end

View File

@ -188,6 +188,11 @@ RSpec.describe Nav::TopNavHelper do
href: '/explore',
id: 'explore',
title: 'Explore projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
href: '/explore/projects/topics',
id: 'topics',
title: 'Explore topics'
)
]
expect(projects_view[:linksPrimary]).to eq(expected_links_primary)

View File

@ -0,0 +1,150 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:user_1) { create(:user) }
let_it_be(:label_1) { create(:label, project: project) }
let_it_be(:label_2) { create(:label, project: project) }
let_it_be(:issue_1) { create(:issue, project: project, author: project.creator, labels: [label_1, label_2]) }
let_it_be(:issue_2) { create(:issue, project: project, milestone: milestone, assignees: [user_1]) }
let_it_be(:issue_3) { create(:issue, project: project) }
let_it_be(:issue_outside_project) { create(:issue) }
let_it_be(:stage) do
create(:cycle_analytics_project_stage,
project: project,
start_event_identifier: :issue_created,
end_event_identifier: :issue_deployed_to_production
)
end
let_it_be(:stage_event_1) do
create(:cycle_analytics_issue_stage_event,
stage_event_hash_id: stage.stage_event_hash_id,
group_id: group.id,
project_id: project.id,
issue_id: issue_1.id,
author_id: project.creator.id,
milestone_id: nil,
state_id: issue_1.state_id,
end_event_timestamp: 8.months.ago
)
end
let_it_be(:stage_event_2) do
create(:cycle_analytics_issue_stage_event,
stage_event_hash_id: stage.stage_event_hash_id,
group_id: group.id,
project_id: project.id,
issue_id: issue_2.id,
author_id: nil,
milestone_id: milestone.id,
state_id: issue_2.state_id
)
end
let_it_be(:stage_event_3) do
create(:cycle_analytics_issue_stage_event,
stage_event_hash_id: stage.stage_event_hash_id,
group_id: group.id,
project_id: project.id,
issue_id: issue_3.id,
author_id: nil,
milestone_id: milestone.id,
state_id: issue_3.state_id,
start_event_timestamp: 8.months.ago,
end_event_timestamp: nil
)
end
let(:params) do
{
from: 1.year.ago.to_date,
to: Date.today
}
end
subject(:issue_ids) { described_class.new(stage: stage, params: params).build.pluck(:issue_id) }
it 'scopes the query for the given project' do
expect(issue_ids).to match_array([issue_1.id, issue_2.id])
expect(issue_ids).not_to include([issue_outside_project.id])
end
describe 'author_username param' do
it 'returns stage events associated with the given author' do
params[:author_username] = project.creator.username
expect(issue_ids).to eq([issue_1.id])
end
it 'returns empty result when unknown author is given' do
params[:author_username] = 'no one'
expect(issue_ids).to be_empty
end
end
describe 'milestone_title param' do
it 'returns stage events associated with the milestone' do
params[:milestone_title] = milestone.title
expect(issue_ids).to eq([issue_2.id])
end
it 'returns empty result when unknown milestone is given' do
params[:milestone_title] = 'unknown milestone'
expect(issue_ids).to be_empty
end
end
describe 'label_name param' do
it 'returns stage events associated with multiple labels' do
params[:label_name] = [label_1.name, label_2.name]
expect(issue_ids).to eq([issue_1.id])
end
it 'does not include records with partial label match' do
params[:label_name] = [label_1.name, 'other label']
expect(issue_ids).to be_empty
end
end
describe 'assignee_username param' do
it 'returns stage events associated assignee' do
params[:assignee_username] = [user_1.username]
expect(issue_ids).to eq([issue_2.id])
end
end
describe 'timestamp filtering' do
before do
params[:from] = 1.year.ago
params[:to] = 6.months.ago
end
it 'filters by the end event time range' do
expect(issue_ids).to eq([issue_1.id])
end
context 'when in_progress items are requested' do
before do
params[:end_event_filter] = :in_progress
end
it 'filters by the start event time range' do
expect(issue_ids).to eq([issue_3.id])
end
end
end
end

View File

@ -141,6 +141,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
lb.read { raise conflict_error }
end
context 'only primary is configured' do
let(:lb) do
config = Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
allow(config).to receive(:load_balancing_enabled?).and_return(false)
described_class.new(config)
end
it 'does not retry a query on connection error if only the primary is configured' do
host = double(:host, query_cache_enabled: true)
allow(lb).to receive(:host).and_return(host)
allow(host).to receive(:connection).and_raise(PG::UnableToSend)
expect { lb.read }.to raise_error(PG::UnableToSend)
end
end
it 'uses the primary if no secondaries are available' do
allow(lb).to receive(:connection_error?).and_return(true)

View File

@ -44,7 +44,7 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
query_fingerprint: query.fingerprint,
query_string: query_string,
trace_type: "execute_query",
variables: variables
variables: variables.to_s
})
dummy_schema.execute(query_string, variables: variables)

View File

@ -9,14 +9,13 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
before do
stub_env('CUSTOMER_PORTAL_URL', env_value)
stub_feature_flags(new_customersdot_staging_url: false)
end
describe '.default_subscriptions_url' do
where(:test, :development, :result) do
false | false | 'https://customers.gitlab.com'
false | true | 'https://customers.stg.gitlab.com'
true | false | 'https://customers.stg.gitlab.com'
false | true | 'https://customers.staging.gitlab.com'
true | false | 'https://customers.staging.gitlab.com'
end
before do
@ -35,7 +34,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
subject { described_class.subscriptions_url }
context 'when CUSTOMER_PORTAL_URL ENV is unset' do
it { is_expected.to eq('https://customers.stg.gitlab.com') }
it { is_expected.to eq('https://customers.staging.gitlab.com') }
end
context 'when CUSTOMER_PORTAL_URL ENV is set' do
@ -55,15 +54,15 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
context 'url methods' do
where(:method_name, :result) do
:default_subscriptions_url | 'https://customers.stg.gitlab.com'
:payment_form_url | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
:subscriptions_graphql_url | 'https://customers.stg.gitlab.com/graphql'
:subscriptions_more_minutes_url | 'https://customers.stg.gitlab.com/buy_pipeline_minutes'
:subscriptions_more_storage_url | 'https://customers.stg.gitlab.com/buy_storage'
:subscriptions_manage_url | 'https://customers.stg.gitlab.com/subscriptions'
:subscriptions_plans_url | 'https://customers.stg.gitlab.com/plans'
:subscriptions_instance_review_url | 'https://customers.stg.gitlab.com/instance_review'
:subscriptions_gitlab_plans_url | 'https://customers.stg.gitlab.com/gitlab_plans'
:default_subscriptions_url | 'https://customers.staging.gitlab.com'
:payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation'
:subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql'
:subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes'
:subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage'
:subscriptions_manage_url | 'https://customers.staging.gitlab.com/subscriptions'
:subscriptions_plans_url | 'https://customers.staging.gitlab.com/plans'
:subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review'
:subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans'
end
with_them do
@ -78,7 +77,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
end
describe '.upgrade_subscription_url' do
@ -87,7 +86,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
let(:plan_id) { 5 }
it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
end
describe '.renew_subscription_url' do
@ -95,6 +94,6 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
end
end

View File

@ -512,6 +512,24 @@ RSpec.describe Gitlab::Workhorse do
end
end
describe '.send_dependency' do
let(:headers) { { Accept: 'foo', Authorization: 'Bearer asdf1234' } }
let(:url) { 'https://foo.bar.com/baz' }
subject { described_class.send_dependency(headers, url) }
it 'sets the header correctly', :aggregate_failures do
key, command, params = decode_workhorse_header(subject)
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-dependency")
expect(params).to eq({
'Header' => headers,
'Url' => url
}.deep_stringify_keys)
end
end
describe '.send_git_snapshot' do
let(:url) { 'http://example.com' }

View File

@ -13,5 +13,8 @@ RSpec.describe Analytics::CycleAnalytics::IssueStageEvent do
expect(described_class.states).to eq(Issue.available_states)
end
it_behaves_like 'StageEventModel'
it_behaves_like 'StageEventModel' do
let_it_be(:stage_event_factory) { :cycle_analytics_issue_stage_event }
let_it_be(:issuable_factory) { :issue }
end
end

View File

@ -13,5 +13,8 @@ RSpec.describe Analytics::CycleAnalytics::MergeRequestStageEvent do
expect(described_class.states).to eq(MergeRequest.available_states)
end
it_behaves_like 'StageEventModel'
it_behaves_like 'StageEventModel' do
let_it_be(:stage_event_factory) { :cycle_analytics_merge_request_stage_event }
let_it_be(:issuable_factory) { :merge_request }
end
end

View File

@ -31,18 +31,14 @@ RSpec.describe DependencyProxy::Manifest, type: :model do
end
end
describe '.find_or_initialize_by_file_name_or_digest' do
describe '.find_by_file_name_or_digest' do
let_it_be(:file_name) { 'foo' }
let_it_be(:digest) { 'bar' }
subject { DependencyProxy::Manifest.find_or_initialize_by_file_name_or_digest(file_name: file_name, digest: digest) }
subject { DependencyProxy::Manifest.find_by_file_name_or_digest(file_name: file_name, digest: digest) }
context 'no manifest exists' do
it 'initializes a manifest' do
expect(DependencyProxy::Manifest).to receive(:new).with(file_name: file_name, digest: digest)
subject
end
it { is_expected.to be_nil }
end
context 'manifest exists and matches file_name' do

View File

@ -32,7 +32,7 @@ RSpec.describe 'GraphQL' do
# operation_fingerprint starts with operation name
operation_fingerprint: %r{^anonymous\/},
is_mutation: false,
variables: variables,
variables: variables.to_s,
query_string: query
}
end

View File

@ -517,11 +517,15 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
let(:path) { "/v2/#{group.path}/dependency_proxy/containers/alpine/manifests/latest" }
let(:other_path) { "/v2/#{other_group.path}/dependency_proxy/containers/alpine/manifests/latest" }
let(:pull_response) { { status: :success, manifest: manifest, from_cache: false } }
let(:head_response) { { status: :success } }
before do
allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance|
allow(instance).to receive(:execute).and_return(pull_response)
end
allow_next_instance_of(DependencyProxy::HeadManifestService) do |instance|
allow(instance).to receive(:execute).and_return(head_response)
end
end
it_behaves_like 'rate-limited token-authenticated requests'

View File

@ -85,6 +85,26 @@ RSpec.describe "Groups", "routing" do
expect(get('/v2')).to route_to('groups/dependency_proxy_auth#authenticate')
end
it 'routes to #upload_manifest' do
expect(post('v2/gitlabhq/dependency_proxy/containers/alpine/manifests/latest/upload'))
.to route_to('groups/dependency_proxy_for_containers#upload_manifest', group_id: 'gitlabhq', image: 'alpine', tag: 'latest')
end
it 'routes to #upload_blob' do
expect(post('v2/gitlabhq/dependency_proxy/containers/alpine/blobs/abc12345/upload'))
.to route_to('groups/dependency_proxy_for_containers#upload_blob', group_id: 'gitlabhq', image: 'alpine', sha: 'abc12345')
end
it 'routes to #upload_manifest_authorize' do
expect(post('v2/gitlabhq/dependency_proxy/containers/alpine/manifests/latest/upload/authorize'))
.to route_to('groups/dependency_proxy_for_containers#authorize_upload_manifest', group_id: 'gitlabhq', image: 'alpine', tag: 'latest')
end
it 'routes to #upload_blob_authorize' do
expect(post('v2/gitlabhq/dependency_proxy/containers/alpine/blobs/abc12345/upload/authorize'))
.to route_to('groups/dependency_proxy_for_containers#authorize_upload_blob', group_id: 'gitlabhq', image: 'alpine', sha: 'abc12345')
end
context 'image name without namespace' do
it 'routes to #manifest' do
expect(get('/v2/gitlabhq/dependency_proxy/containers/ruby/manifests/2.3.6'))

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/qa/duplicate_testcase_link'
RSpec.describe RuboCop::Cop::QA::DuplicateTestcaseLink do
let(:source_file) { 'qa/page.rb' }
subject(:cop) { described_class.new }
context 'in a QA file' do
before do
allow(cop).to receive(:in_qa_file?).and_return(true)
end
it "registers an offense for a duplicate testcase link" do
expect_offense(<<-RUBY)
it 'some test', testcase: '/quality/test_cases/1892' do
end
it 'another test', testcase: '/quality/test_cases/1892' do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't reuse the same testcase link in different tests. Replace one of `/quality/test_cases/1892`.
end
RUBY
end
it "doesnt offend if testcase link is unique" do
expect_no_offenses(<<-RUBY)
it 'some test', testcase: '/quality/test_cases/1893' do
end
it 'another test', testcase: '/quality/test_cases/1894' do
end
RUBY
end
end
end

View File

@ -31,6 +31,14 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
end
shared_examples 'returning no manifest' do
it 'returns a nil manifest' do
expect(subject[:status]).to eq(:success)
expect(subject[:from_cache]).to eq false
expect(subject[:manifest]).to be_nil
end
end
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
@ -40,7 +48,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
it_behaves_like 'downloading the manifest'
it_behaves_like 'returning no manifest'
context 'with dependency_proxy_manifest_workhorse feature disabled' do
before do
stub_feature_flags(dependency_proxy_manifest_workhorse: false)
end
it_behaves_like 'downloading the manifest'
end
end
context 'failed head request' do
@ -49,7 +65,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
it_behaves_like 'downloading the manifest'
it_behaves_like 'returning no manifest'
context 'with dependency_proxy_manifest_workhorse feature disabled' do
before do
stub_feature_flags(dependency_proxy_manifest_workhorse: false)
end
it_behaves_like 'downloading the manifest'
end
end
end
@ -60,7 +84,7 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
shared_examples 'using the cached manifest' do
it 'uses cached manifest instead of downloading one', :aggregate_failures do
expect { subject }.to change { dependency_proxy_manifest.reload.updated_at }
subject
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
@ -80,12 +104,20 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
end
it 'downloads the new manifest and updates the existing record', :aggregate_failures do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
expect(subject[:manifest].content_type).to eq(content_type)
expect(subject[:manifest].digest).to eq(digest)
expect(subject[:from_cache]).to eq false
it_behaves_like 'returning no manifest'
context 'with dependency_proxy_manifest_workhorse feature disabled' do
before do
stub_feature_flags(dependency_proxy_manifest_workhorse: false)
end
it 'downloads the new manifest and updates the existing record', :aggregate_failures do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
expect(subject[:manifest].content_type).to eq(content_type)
expect(subject[:manifest].digest).to eq(digest)
expect(subject[:from_cache]).to eq false
end
end
end
@ -96,7 +128,15 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
stub_manifest_download(image, tag, headers: headers)
end
it_behaves_like 'downloading the manifest'
it_behaves_like 'returning no manifest'
context 'with dependency_proxy_manifest_workhorse feature disabled' do
before do
stub_feature_flags(dependency_proxy_manifest_workhorse: false)
end
it_behaves_like 'downloading the manifest'
end
end
context 'failed connection' do

View File

@ -74,4 +74,108 @@ RSpec.shared_examples 'StageEventModel' do
expect(input_data.map(&:values).sort).to eq(output_data)
end
end
describe 'scopes' do
def attributes(array)
array.map(&:attributes)
end
RSpec::Matchers.define :match_attributes do |expected|
match do |actual|
actual.map(&:attributes) == expected.map(&:attributes)
end
end
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:user) }
let_it_be(:milestone) { create(:milestone) }
let_it_be(:issuable_with_assignee) { create(issuable_factory, assignees: [user])}
let_it_be(:record) { create(stage_event_factory, start_event_timestamp: 3.years.ago.to_date, end_event_timestamp: 2.years.ago.to_date) }
let_it_be(:record_with_author) { create(stage_event_factory, author_id: user.id) }
let_it_be(:record_with_project) { create(stage_event_factory, project_id: project.id) }
let_it_be(:record_with_group) { create(stage_event_factory, group_id: project.namespace_id) }
let_it_be(:record_with_assigned_issuable) { create(stage_event_factory, described_class.issuable_id_column => issuable_with_assignee.id) }
let_it_be(:record_with_milestone) { create(stage_event_factory, milestone_id: milestone.id) }
it 'filters by stage_event_hash_id' do
records = described_class.by_stage_event_hash_id(record.stage_event_hash_id)
expect(records).to match_attributes([record])
end
it 'filters by project_id' do
records = described_class.by_project_id(project.id)
expect(records).to match_attributes([record_with_project])
end
it 'filters by group_id' do
records = described_class.by_group_id(project.namespace_id)
expect(records).to match_attributes([record_with_group])
end
it 'filters by author_id' do
records = described_class.authored(user)
expect(records).to match_attributes([record_with_author])
end
it 'filters by assignee' do
records = described_class.assigned_to(user)
expect(records).to match_attributes([record_with_assigned_issuable])
end
it 'filters by milestone_id' do
records = described_class.with_milestone_id(milestone.id)
expect(records).to match_attributes([record_with_milestone])
end
describe 'start_event_timestamp filtering' do
it 'when range is given' do
records = described_class
.start_event_timestamp_after(4.years.ago)
.start_event_timestamp_before(2.years.ago)
expect(records).to match_attributes([record])
end
it 'when specifying upper bound' do
records = described_class.start_event_timestamp_before(2.years.ago)
expect(attributes(records)).to include(attributes([record]).first)
end
it 'when specifying the lower bound' do
records = described_class.start_event_timestamp_after(4.years.ago)
expect(attributes(records)).to include(attributes([record]).first)
end
end
describe 'end_event_timestamp filtering' do
it 'when range is given' do
records = described_class
.end_event_timestamp_after(3.years.ago)
.end_event_timestamp_before(1.year.ago)
expect(records).to match_attributes([record])
end
it 'when specifying upper bound' do
records = described_class.end_event_timestamp_before(1.year.ago)
expect(attributes(records)).to include(attributes([record]).first)
end
it 'when specifying the lower bound' do
records = described_class.end_event_timestamp_after(3.years.ago)
expect(attributes(records)).to include(attributes([record]).first)
end
end
end
end

View File

@ -75,6 +75,19 @@ func (p *Injector) Inject(w http.ResponseWriter, r *http.Request, sendData strin
helper.Fail500(w, r, fmt.Errorf("dependency proxy: failed to create request: %w", err))
}
saveFileRequest.Header = helper.HeaderClone(r.Header)
// forward headers from dependencyResponse to rails and client
for key, values := range dependencyResponse.Header {
saveFileRequest.Header.Del(key)
w.Header().Del(key)
for _, value := range values {
saveFileRequest.Header.Add(key, value)
w.Header().Add(key, value)
}
}
// workhorse hijack overwrites the Content-Type header, but we need this header value
saveFileRequest.Header.Set("Workhorse-Proxy-Content-Type", dependencyResponse.Header.Get("Content-Type"))
saveFileRequest.ContentLength = dependencyResponse.ContentLength
nrw := &nullResponseWriter{header: make(http.Header)}

View File

@ -113,8 +113,14 @@ func TestInject(t *testing.T) {
func TestSuccessfullRequest(t *testing.T) {
content := []byte("result")
contentLength := strconv.Itoa(len(content))
contentType := "foo"
dockerContentDigest := "sha256:asdf1234"
overriddenHeader := "originResourceServer"
originResourceServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Length", contentLength)
w.Header().Set("Content-Type", contentType)
w.Header().Set("Docker-Content-Digest", dockerContentDigest)
w.Header().Set("Overridden-Header", overriddenHeader)
w.Write(content)
}))
@ -131,12 +137,16 @@ func TestSuccessfullRequest(t *testing.T) {
require.Equal(t, "/target/upload", uploadHandler.request.URL.Path)
require.Equal(t, int64(6), uploadHandler.request.ContentLength)
require.Equal(t, contentType, uploadHandler.request.Header.Get("Workhorse-Proxy-Content-Type"))
require.Equal(t, dockerContentDigest, uploadHandler.request.Header.Get("Docker-Content-Digest"))
require.Equal(t, overriddenHeader, uploadHandler.request.Header.Get("Overridden-Header"))
require.Equal(t, content, uploadHandler.body)
require.Equal(t, 200, response.Code)
require.Equal(t, string(content), response.Body.String())
require.Equal(t, contentLength, response.Header().Get("Content-Length"))
require.Equal(t, dockerContentDigest, response.Header().Get("Docker-Content-Digest"))
}
func TestIncorrectSendData(t *testing.T) {
@ -177,6 +187,7 @@ func TestFailedOriginServer(t *testing.T) {
func makeRequest(injector *Injector, data string) *httptest.ResponseRecorder {
w := httptest.NewRecorder()
r := httptest.NewRequest("GET", "/target", nil)
r.Header.Set("Overridden-Header", "request")
sendData := base64.StdEncoding.EncodeToString([]byte(data))
injector.Inject(w, r, sendData)