Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-01-20 00:10:22 +00:00
parent 9bc3ee9ad4
commit 2f37c1fa00
67 changed files with 390 additions and 259 deletions

View File

@ -402,6 +402,7 @@ export default {
>
<gl-link
data-testid="incident-link"
data-qa-selector="incident_link"
:href="showIncidentLink(item)"
class="gl-min-w-0"
>

View File

@ -196,14 +196,13 @@ export default {
<div class="ci-widget-content">
<div class="media-body">
<div
class="gl-font-weight-bold"
data-testid="pipeline-info-container"
data-qa-selector="merge_request_pipeline_info_content"
>
{{ pipeline.details.event_type_name || pipeline.details.name }}
<gl-link
:href="pipeline.path"
class="pipeline-id gl-font-weight-normal pipeline-number"
class="pipeline-id"
data-testid="pipeline-id"
data-qa-selector="pipeline_link"
>#{{ pipeline.id }}</gl-link

View File

@ -688,8 +688,7 @@ code {
}
.commit-sha,
.ref-name,
.pipeline-number {
.ref-name {
font-family: $monospace-font;
font-size: 95%;
}

View File

@ -8,7 +8,7 @@ module IssuableCollectionsAction
included do
before_action :check_search_rate_limit!, only: [:issues, :merge_requests], if: -> {
params[:search].present? && Feature.enabled?(:rate_limit_issuable_searches)
params[:search].present?
}
end

View File

@ -28,8 +28,7 @@ class Projects::IssuesController < Projects::ApplicationController
SET_ISSUABLES_INDEX_ONLY_ACTIONS.include?(c.action_name.to_sym) && !index_html_request?
}
before_action :check_search_rate_limit!, if: ->(c) {
SET_ISSUABLES_INDEX_ONLY_ACTIONS.include?(c.action_name.to_sym) && !index_html_request? &&
params[:search].present? && Feature.enabled?(:rate_limit_issuable_searches)
SET_ISSUABLES_INDEX_ONLY_ACTIONS.include?(c.action_name.to_sym) && !index_html_request? && params[:search].present?
}
# Allow write(create) issue

View File

@ -28,9 +28,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
:codequality_mr_diff_reports
]
before_action :set_issuables_index, only: [:index]
before_action :check_search_rate_limit!, only: [:index], if: -> {
params[:search].present? && Feature.enabled?(:rate_limit_issuable_searches)
}
before_action :check_search_rate_limit!, only: [:index], if: -> { params[:search].present? }
before_action :authenticate_user!, only: [:assign_related_issues]
before_action :check_user_can_push_to_source_branch!, only: [:rebase]

View File

@ -41,7 +41,7 @@ module SearchArguments
end
def validate_search_rate_limit!(args)
return if args[:search].blank? || context[:request].nil? || Feature.disabled?(:rate_limit_issuable_searches)
return if args[:search].blank? || context[:request].nil?
if current_user.present?
rate_limiter_key = :search_rate_limit

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class FileEntry
include ActiveModel::Model
include ::Packages::FIPS
DIGESTS = %i[md5 sha1 sha256].freeze
FILENAME_REGEX = %r{\A[a-zA-Z0-9][a-zA-Z0-9_.~+-]*\z}.freeze
@ -32,8 +31,6 @@ module Packages
private
def valid_package_file_digests
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
DIGESTS.each do |digest|
package_file_digest = package_file["file_#{digest}"]
sum = public_send("#{digest}sum") # rubocop:disable GitlabSecurity/PublicSend

View File

@ -167,6 +167,20 @@ module Notes
if Feature.enabled?(:notes_create_service_tracking, project)
Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
end
if Feature.enabled?(:route_hll_to_snowplow_phase4, project&.namespace) && note.for_commit?
metric_key_path = 'counts.commit_comment'
Gitlab::Tracking.event(
'Notes::CreateService',
'create_commit_comment',
project: project,
namespace: project&.namespace,
user: user,
label: metric_key_path,
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: metric_key_path).to_context]
)
end
end
def tracking_data_for(note)

View File

@ -3,8 +3,6 @@
module Packages
module Debian
class CreatePackageFileService
include ::Packages::FIPS
def initialize(package:, current_user:, params: {})
@package = package
@current_user = current_user
@ -12,7 +10,6 @@ module Packages
end
def execute
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
raise ArgumentError, "Invalid package" unless package.present?
raise ArgumentError, "Invalid user" unless current_user.present?

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class ExtractChangesMetadataService
include Gitlab::Utils::StrongMemoize
include ::Packages::FIPS
ExtractionError = Class.new(StandardError)
@ -14,8 +13,6 @@ module Packages
end
def execute
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
{
file_type: file_type,
architecture: metadata[:architecture],

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class GenerateDistributionService
include Gitlab::Utils::StrongMemoize
include ::Packages::FIPS
include ExclusiveLeaseGuard
ONE_HOUR = 1.hour.freeze
@ -66,13 +65,10 @@ module Packages
def initialize(distribution)
@distribution = distribution
@oldest_kept_generated_at = nil
@md5sum = []
@sha256 = []
end
def execute
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
try_obtain_lease do
@distribution.transaction do
# We consider `apt-get update` can take at most one hour
@ -143,10 +139,10 @@ module Packages
rfc822_field('Directory', package_dirname(package_file))
]
else
# NB: MD5sum was removed for FIPS compliance
[
rfc822_field('Filename', "#{package_dirname(package_file)}/#{package_file.file_name}"),
rfc822_field('Size', package_file.size),
rfc822_field('MD5sum', package_file.file_md5),
rfc822_field('SHA256', package_file.file_sha256)
]
end
@ -190,7 +186,6 @@ module Packages
)
end
@md5sum.append(" #{file_md5} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
@sha256.append(" #{file_sha256} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
end
@ -234,7 +229,8 @@ module Packages
end
def release_sums
["MD5Sum:", @md5sum, "SHA256:", @sha256].flatten.compact.join("\n") + "\n"
# NB: MD5Sum was removed for FIPS compliance
["SHA256:", @sha256].flatten.compact.join("\n") + "\n"
end
def rfc822_field(name, value, condition = true)

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class GenerateDistributionWorker
include ApplicationWorker
include ::Packages::FIPS
data_consistency :always
include Gitlab::Utils::StrongMemoize
@ -21,8 +20,6 @@ module Packages
loggable_arguments 0
def perform(container_type, distribution_id)
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
@container_type = container_type
@distribution_id = distribution_id

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class ProcessChangesWorker
include ApplicationWorker
include ::Packages::FIPS
data_consistency :always
include Gitlab::Utils::StrongMemoize
@ -16,8 +15,6 @@ module Packages
feature_category :package_registry
def perform(package_file_id, user_id)
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
@package_file_id = package_file_id
@user_id = user_id
@ -25,8 +22,6 @@ module Packages
::Packages::Debian::ProcessChangesService.new(package_file, user).execute
rescue StandardError => e
raise if e.instance_of?(DisabledError)
Gitlab::ErrorTracking.log_exception(e, package_file_id: @package_file_id, user_id: @user_id)
package_file.destroy!
end

View File

@ -4,7 +4,6 @@ module Packages
module Debian
class ProcessPackageFileWorker
include ApplicationWorker
include ::Packages::FIPS
include Gitlab::Utils::StrongMemoize
data_consistency :always
@ -16,8 +15,6 @@ module Packages
feature_category :package_registry
def perform(package_file_id, user_id, distribution_name, component_name)
raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
@package_file_id = package_file_id
@user_id = user_id
@distribution_name = distribution_name
@ -29,8 +26,6 @@ module Packages
::Packages::Debian::ProcessPackageFileService.new(package_file, user, distribution_name, component_name).execute
rescue StandardError => e
raise if e.instance_of?(DisabledError)
Gitlab::ErrorTracking.log_exception(e, package_file_id: @package_file_id, user_id: @user_id,
distribution_name: @distribution_name, component_name: @component_name)
package_file.destroy!

View File

@ -0,0 +1,26 @@
---
description: Mirrored `counts.commit_comment` Service Ping metric as a Snowplow event. Emitted when a note for a commit is created.
category: Notes::CreateService
action: create_commit_comment
label_description: Mirrored Service Ping metric key path
property_description:
value_description:
extra_properties:
identifiers:
- project
- user
- namespace
product_section: dev
product_stage: create
product_group: source_code
product_category: source_code_management
milestone: "15.8"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105148
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -1,8 +0,0 @@
---
name: rate_limit_issuable_searches
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104208"
rollout_issue_url:
milestone: '15.8'
type: development
group: group::project management
default_enabled: false

View File

@ -1,20 +1,16 @@
- title: "Changing merge request approvals with the `/approvals` API endpoint"
announcement_milestone: "12.3"
announcement_date: "2019-09-22"
announcement_milestone: "14.0"
removal_milestone: "16.0"
removal_date: "2023-05-22"
breaking_change: true
reporter: tlinz
stage: Create
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/353097
body: | # (required) Do not modify this line, instead modify the lines below.
To change the approvals required for a merge request, you should no longer use the `/approvals` API endpoint, which was deprecated in GitLab 12.3.
To change the approvals required for a merge request, you should no longer use the `/approvals` API endpoint, which was deprecated in GitLab 14.0.
Instead, use the [`/approval_rules` endpoint](https://docs.gitlab.com/ee/api/merge_request_approvals.html#merge-request-level-mr-approvals) to [create](https://docs.gitlab.com/ee/api/merge_request_approvals.html#create-merge-request-level-rule) or [update](https://docs.gitlab.com/ee/api/merge_request_approvals.html#update-merge-request-level-rule) the approval rules for a merge request.
#
# OPTIONAL FIELDS
#
end_of_support_milestone:
tiers: Premium
documentation_url: https://docs.gitlab.com/ee/api/merge_request_approvals.html
image_url: # (optional) This is a link to a thumbnail image depicting the feature
video_url: # (optional) Use the youtube thumbnail URL with the structure of https://img.youtube.com/vi/UNIQUEID/hqdefault.jpg
image_url:
video_url:

View File

@ -0,0 +1,12 @@
---
table_name: namespaces_storage_limit_exclusions
classes:
- Namespaces::Storage::LimitExclusion
feature_categories:
- subscription_cost_management
description: |
Stores namespaces that are excluded from the storage limit.
Any namespaces that are included in this table will not have storage limitations applied.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108449
milestone: '15.8'
gitlab_schema: gitlab_main

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class CreateNamespacesStorageLimitExclusions < Gitlab::Database::Migration[2.1]
enable_lock_retries!
def up
create_table :namespaces_storage_limit_exclusions do |t|
t.references :namespace,
foreign_key: { on_delete: :cascade },
index: true,
null: false
t.text :reason, null: false, limit: 255
t.timestamps_with_timezone null: false
end
end
def down
drop_table :namespaces_storage_limit_exclusions
end
end

View File

@ -0,0 +1 @@
98252d08d480287f1014d7b7c46eafdaa53b9582607e87d5169ec7c314b56ccc

View File

@ -18337,6 +18337,24 @@ CREATE SEQUENCE namespaces_id_seq
ALTER SEQUENCE namespaces_id_seq OWNED BY namespaces.id;
CREATE TABLE namespaces_storage_limit_exclusions (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
reason text NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
CONSTRAINT check_81640b2ee2 CHECK ((char_length(reason) <= 255))
);
CREATE SEQUENCE namespaces_storage_limit_exclusions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE namespaces_storage_limit_exclusions_id_seq OWNED BY namespaces_storage_limit_exclusions.id;
CREATE TABLE namespaces_sync_events (
id bigint NOT NULL,
namespace_id bigint NOT NULL
@ -24382,6 +24400,8 @@ ALTER TABLE ONLY namespace_statistics ALTER COLUMN id SET DEFAULT nextval('names
ALTER TABLE ONLY namespaces ALTER COLUMN id SET DEFAULT nextval('namespaces_id_seq'::regclass);
ALTER TABLE ONLY namespaces_storage_limit_exclusions ALTER COLUMN id SET DEFAULT nextval('namespaces_storage_limit_exclusions_id_seq'::regclass);
ALTER TABLE ONLY namespaces_sync_events ALTER COLUMN id SET DEFAULT nextval('namespaces_sync_events_id_seq'::regclass);
ALTER TABLE ONLY note_diff_files ALTER COLUMN id SET DEFAULT nextval('note_diff_files_id_seq'::regclass);
@ -26508,6 +26528,9 @@ ALTER TABLE ONLY namespace_statistics
ALTER TABLE ONLY namespaces
ADD CONSTRAINT namespaces_pkey PRIMARY KEY (id);
ALTER TABLE ONLY namespaces_storage_limit_exclusions
ADD CONSTRAINT namespaces_storage_limit_exclusions_pkey PRIMARY KEY (id);
ALTER TABLE ONLY namespaces_sync_events
ADD CONSTRAINT namespaces_sync_events_pkey PRIMARY KEY (id);
@ -30293,6 +30316,8 @@ CREATE INDEX index_namespaces_on_type_and_visibility_and_parent_id ON namespaces
CREATE INDEX index_namespaces_public_groups_name_id ON namespaces USING btree (name, id) WHERE (((type)::text = 'Group'::text) AND (visibility_level = 20));
CREATE INDEX index_namespaces_storage_limit_exclusions_on_namespace_id ON namespaces_storage_limit_exclusions USING btree (namespace_id);
CREATE INDEX index_namespaces_sync_events_on_namespace_id ON namespaces_sync_events USING btree (namespace_id);
CREATE INDEX index_non_requested_project_members_on_source_id_and_type ON members USING btree (source_id, source_type) WHERE ((requested_at IS NULL) AND ((type)::text = 'ProjectMember'::text));
@ -34264,6 +34289,9 @@ ALTER TABLE ONLY diff_note_positions
ALTER TABLE ONLY analytics_cycle_analytics_aggregations
ADD CONSTRAINT fk_rails_13c8374c7a FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY namespaces_storage_limit_exclusions
ADD CONSTRAINT fk_rails_14e8f7b0e0 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY users_security_dashboard_projects
ADD CONSTRAINT fk_rails_150cd5682c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -156,16 +156,17 @@ Set the limit to `0` to disable it.
### Search rate limit
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80631) in GitLab 14.9.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80631) in GitLab 14.9.
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104208) to include issue, merge request, and epic searches to the rate limit in GitLab 15.9.
This setting limits global search requests as follows:
This setting limits search requests as follows:
| Limit | Default (requests per minute) |
|-------------------------|-------------------------------|
| Authenticated user | 30 |
| Unauthenticated user | 10 |
Depending on the number of enabled [scopes](../user/search/index.md#global-search-scopes), a global search request can consume two to seven requests per minute. You may want to disable one or more scopes to use fewer requests. Global search requests that exceed the search rate limit per minute return the following error:
Depending on the number of enabled [scopes](../user/search/index.md#global-search-scopes), a global search request can consume two to seven requests per minute. You may want to disable one or more scopes to use fewer requests. Search requests that exceed the search rate limit per minute return the following error:
```plaintext
This endpoint has been requested too many times. Try again later.

View File

@ -21,10 +21,6 @@ for production use due to limited functionality.
For instructions on how to upload and install Debian packages from the GitLab
package registry, see the [Debian registry documentation](../../user/packages/debian_repository/index.md).
NOTE:
The Debian registry is not FIPS compliant and is disabled when [FIPS mode](../../development/fips_compliance.md) is enabled.
These endpoints will all return `404 Not Found`.
NOTE:
These endpoints do not adhere to the standard API authentication methods.
See the [Debian registry documentation](../../user/packages/debian_repository/index.md)

View File

@ -18,10 +18,6 @@ This API is under development and is not meant for production use.
For more information about working with Debian packages, see the
[Debian package registry documentation](../../user/packages/debian_repository/index.md).
NOTE:
The Debian registry is not FIPS compliant and is disabled when [FIPS mode](../../development/fips_compliance.md) is enabled.
These endpoints will all return `404 Not Found`.
## Enable the Debian group API
Debian group repository support is still a work in progress. It's gated behind a feature flag that's

View File

@ -18,10 +18,6 @@ This API is under development and is not meant for production use.
For more information about working with Debian packages, see the
[Debian package registry documentation](../../user/packages/debian_repository/index.md).
NOTE:
The Debian registry is not FIPS compliant and is disabled when [FIPS mode](../../development/fips_compliance.md) is enabled.
These endpoints will all return `404 Not Found`.
## Enable the Debian API
The Debian API is behind a feature flag that is disabled by default.

View File

@ -118,6 +118,22 @@ To deprecate a page or topic:
You can add any additional context-specific details that might help users.
1. Add the following HTML comments above and below the content.
For the `remove_date`, set a date three months after the release where it
was deprecated.
```markdown
<!--- start_remove The following content will be removed on remove_date: 'YYYY-MM-DD' -->
## Title (deprecated) **(ULTIMATE SELF)**
WARNING:
This feature was [deprecated](<link-to-issue>) in GitLab 14.8
and is planned for removal in 15.4. Use [feature X](<link-to-issue>) instead.
<!--- end_remove -->
```
1. Open a merge request to add the word `(deprecated)` to the left nav, after the page title.
### Remove a page

View File

@ -172,7 +172,7 @@ The jobs are separated into stages:
are allowed to fail in the test stage:
- The `test` job runs unit and integration tests by detecting the language and
framework ([Auto Test](../stages.md#auto-test))
framework ([Auto Test](../stages.md#auto-test-deprecated))
- The `code_quality` job checks the code quality and is allowed to fail
([Auto Code Quality](../stages.md#auto-code-quality))
- The `container_scanning` job checks the Docker container if it has any

View File

@ -52,7 +52,13 @@ Specify either:
- The CI/CD variable `BUILDPACK_URL` with any of [`pack`'s URI specification formats](https://buildpacks.io/docs/app-developer-guide/specify-buildpacks/).
- A [`project.toml` project descriptor](https://buildpacks.io/docs/app-developer-guide/using-project-descriptor/) with the buildpacks you would like to include.
### Customize buildpacks with Herokuish
<!--- start_remove The following content will be removed on remove_date: '2024-08-22' -->
### Customize buildpacks with Herokuish (deprecated)
WARNING:
Support for Herokuish was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108234) in GitLab 15.8,
and is planned for removal in 17.0. Use [Cloud Native Buildpacks](stages.md#moving-from-herokuish-to-cloud-native-buildpacks) instead.
Specify either:
@ -69,6 +75,8 @@ reference:
- The branch `mybranch`: `https://github.com/heroku/heroku-buildpack-ruby.git#mybranch`.
- The commit SHA `f97d8a8ab49`: `https://github.com/heroku/heroku-buildpack-ruby.git#f97d8a8ab49`.
<!--- end_remove -->
### Multiple buildpacks
Because Auto Test cannot use the `.buildpacks` file, Auto DevOps does

View File

@ -32,7 +32,7 @@ Auto DevOps supports development during each of the [DevOps stages](stages.md).
|---------|-------------|
| Build | [Auto Build](stages.md#auto-build) |
| Build | [Auto Dependency Scanning](stages.md#auto-dependency-scanning) |
| Test | [Auto Test](stages.md#auto-test) |
| Test | [Auto Test](stages.md#auto-test-deprecated) |
| Test | [Auto Browser Performance Testing](stages.md#auto-browser-performance-testing) |
| Test | [Auto Code Intelligence](stages.md#auto-code-intelligence) |
| Test | [Auto Code Quality](stages.md#auto-code-quality) |

View File

@ -89,10 +89,16 @@ buildjob:
Read more about defining volumes in the [`pack build` documentation](https://buildpacks.io/docs/tools/pack/cli/pack_build/).
### Auto Build using Herokuish
<!--- start_remove The following content will be removed on remove_date: '2024-08-22' -->
### Auto Build using Herokuish (deprecated)
> [Replaced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63351) with Cloud Native Buildpacks in GitLab 14.0.
WARNING:
Support for Herokuish was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108234) in GitLab 15.8,
and is planned for removal in 17.0. Use [Cloud Native Buildpacks](#moving-from-herokuish-to-cloud-native-buildpacks) instead.
Prior to GitLab 14.0, [Herokuish](https://github.com/gliderlabs/herokuish) was
the default build method for projects without a `Dockerfile`. Herokuish can
still be used by setting the CI/CD variable `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED`
@ -103,6 +109,8 @@ If Auto Build fails despite the project meeting the buildpack requirements, set
a project CI/CD variable `TRACE=true` to enable verbose logging, which may help you
troubleshoot.
<!--- end_remove -->
### Moving from Herokuish to Cloud Native Buildpacks
Builds using Cloud Native Buildpacks support the same options as builds using
@ -118,7 +126,15 @@ Herokuish, with the following caveats:
Instead, custom commands should be prefixed with `/cnb/lifecycle/launcher`
to receive the correct execution environment.
## Auto Test
<!--- start_remove The following content will be removed on remove_date: '2024-08-22' -->
## Auto Test (deprecated)
WARNING:
Support for Herokuish was
[deprecated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108234)
in GitLab 15.8, and is planned for removal in 17.0. Because Auto Test uses
Herokuish, Auto Test is also deprecated.
Auto Test runs the appropriate tests for your application using
[Herokuish](https://github.com/gliderlabs/herokuish) and
@ -140,6 +156,7 @@ Cloud Native Buildpacks, and only buildpacks that implement the
[Testpack API](https://devcenter.heroku.com/articles/testpack-api) are supported.
<!-- vale gitlab.Spelling = YES -->
<!--- end_remove -->
### Currently supported languages
@ -502,7 +519,7 @@ For example, in a Rails application in an image built with
Unless your repository contains a `Dockerfile`, your image is built with
Cloud Native Buildpacks, and you must prefix commands run in these images with
`/cnb/lifecycle/launcher`, (or `/bin/herokuish procfile exec` when
using [Herokuish](#auto-build-using-herokuish))
using [Herokuish](#auto-build-using-herokuish-deprecated))
to replicate the environment where your
application runs.

View File

@ -3046,6 +3046,22 @@ This will result in the rename of the sub-chart: `gitlab/task-runner` to `gitlab
## Announced in 14.0
<div class="deprecation removal-160 breaking-change">
### Changing merge request approvals with the `/approvals` API endpoint
Planned removal: GitLab <span class="removal-milestone">16.0</span> <span class="removal-date"></span>
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
Review the details carefully before upgrading.
To change the approvals required for a merge request, you should no longer use the `/approvals` API endpoint, which was deprecated in GitLab 14.0.
Instead, use the [`/approval_rules` endpoint](https://docs.gitlab.com/ee/api/merge_request_approvals.html#merge-request-level-mr-approvals) to [create](https://docs.gitlab.com/ee/api/merge_request_approvals.html#create-merge-request-level-rule) or [update](https://docs.gitlab.com/ee/api/merge_request_approvals.html#update-merge-request-level-rule) the approval rules for a merge request.
</div>
<div class="deprecation removal-156">
### NFS for Git repository storage

View File

@ -34,18 +34,6 @@ For removal reviewers (Technical Writers only):
https://about.gitlab.com/handbook/marketing/blog/release-posts/#update-the-removals-doc
-->
## Removed in 16.0
### Changing merge request approvals with the `/approvals` API endpoint
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
Review the details carefully before upgrading.
To change the approvals required for a merge request, you should no longer use the `/approvals` API endpoint, which was deprecated in GitLab 12.3.
Instead, use the [`/approval_rules` endpoint](https://docs.gitlab.com/ee/api/merge_request_approvals.html#merge-request-level-mr-approvals) to [create](https://docs.gitlab.com/ee/api/merge_request_approvals.html#create-merge-request-level-rule) or [update](https://docs.gitlab.com/ee/api/merge_request_approvals.html#update-merge-request-level-rule) the approval rules for a merge request.
## Removed in 15.8
### CiliumNetworkPolicy within the auto deploy Helm chart is removed

View File

@ -95,7 +95,7 @@ Our criteria for the separation of duties is as follows:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213364) in GitLab 13.3.
> - Chain of Custody reports sent using email [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342594) in GitLab 15.3 with a flag named `async_chain_of_custody_report`. Disabled by default.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/370100) in GitLab 15.5. Feature flag `async_chain_of_custody_report` removed.
> - Chain of Custody report includes all commits (instead of just merge commits) [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/267601) in GitLab 15.8 with a flag named `all_commits_compliance_report`. Disabled by default.
> - Chain of Custody report includes all commits (instead of just merge commits) [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/267601) in GitLab 15.9 with a flag named `all_commits_compliance_report`. Disabled by default.
FLAG:
On self-managed GitLab, by default the Chain of Custody report only contains information on merge commits. To make the report contain information on all commits to projects within a group, ask an administrator to [enable the feature flag](../../../administration/feature_flags.md) named `all_commits_compliance_report`. On GitLab.com, this feature is not available.

View File

@ -15,9 +15,6 @@ The Debian package registry for GitLab is under development and isn't ready for
limited functionality. This [epic](https://gitlab.com/groups/gitlab-org/-/epics/6057) details the remaining
work and timelines to make it production ready.
NOTE:
The Debian registry is not FIPS compliant and is disabled when [FIPS mode](../../../development/fips_compliance.md) is enabled.
Publish Debian packages in your project's Package Registry. Then install the
packages whenever you need to use them as a dependency.

View File

@ -56,6 +56,9 @@ To add your custom domain to GitLab Pages:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Settings > Pages**.
If this path is not visible, select **Deployments > Pages**.
[This location is part of an experiment](../index.md#menu-position-test).
1. In the top right, select **New Domain**.
1. In **Domain**, enter your domain.
1. Optional. In **Certificate**, turn off the **Automatic certificate management using Let's Encrypt** toggle to add an [SSL/TLS certificate](#adding-an-ssltls-certificate-to-pages). You can also add the certificate and key later.

View File

@ -127,7 +127,7 @@ Items that are **not** exported include:
- Pipeline triggers
- Webhooks
- Any encrypted tokens
- [Number of required approvals](https://gitlab.com/gitlab-org/gitlab/-/issues/221088)
- [Number of required approvals](https://gitlab.com/gitlab-org/gitlab/-/issues/221087)
- Repository size limits
- Deploy keys allowed to push to protected branches
- Secure Files

View File

@ -6,10 +6,6 @@ module API
project_id: %r{[0-9]+}.freeze
).freeze
before do
not_found! if Gitlab::FIPS.enabled?
end
resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
helpers do
def project_or_group

View File

@ -14,10 +14,6 @@ module API
file_name: API::NO_SLASH_URL_PART_REGEX
}.freeze
before do
not_found! if Gitlab::FIPS.enabled?
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
helpers do
def project_or_group

View File

@ -6,10 +6,6 @@ module API
requires :id, types: [String, Integer], desc: 'The ID or URL-encoded path of the group'
end
before do
not_found! if Gitlab::FIPS.enabled?
end
resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
after_validation do
require_packages_enabled!

View File

@ -698,8 +698,6 @@ module API
end
def validate_search_rate_limit!
return unless Feature.enabled?(:rate_limit_issuable_searches)
if current_user
check_rate_limit!(:search_rate_limit, scope: [current_user])
else

View File

@ -6,10 +6,6 @@ module API
requires :id, types: [String, Integer], desc: 'The ID or URL-encoded path of the project'
end
before do
not_found! if Gitlab::FIPS.enabled?
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
after_validation do
require_packages_enabled!

View File

@ -5,13 +5,11 @@ module QA
module AlertSettings
extend self
def setup_http_endpoint(
integration_name: random_word,
payload: { title: random_word, description: random_word },
send: true
)
credentials = {}
def go_to_monitor_settings
Page::Project::Menu.perform(&:go_to_monitor_settings)
end
def setup_http_endpoint_integration(integration_name: random_word)
Page::Project::Settings::Monitor.perform do |setting|
setting.expand_alerts do |alert|
alert.add_new_integration
@ -19,23 +17,11 @@ module QA
alert.enter_integration_name(integration_name)
alert.activate_integration
alert.save_and_create_alert
if send
alert.fill_in_test_payload(payload.to_json)
alert.send_test_alert
else
alert.go_to_view_credentials
credentials = { url: alert.webhook_url, auth_key: alert.authorization_key }
end
end
end
credentials
end
def setup_prometheus(payload: { title: random_word, description: random_word }, send: true)
credentials = {}
Page::Project::Menu.perform(&:go_to_monitor_settings)
def setup_prometheus_integration
Page::Project::Settings::Monitor.perform do |setting|
setting.expand_alerts do |alert|
alert.add_new_integration
@ -43,20 +29,38 @@ module QA
alert.activate_integration
alert.fill_in_prometheus_url
alert.save_and_create_alert
if send
alert.fill_in_test_payload(payload.to_json)
alert.send_test_alert
else
alert.go_to_view_credentials
credentials = { url: alert.webhook_url, auth_key: alert.authorization_key }
end
end
end
end
def send_test_alert(payload: { title: random_word, description: random_word })
Page::Project::Settings::Alerts.perform do |alert|
alert.fill_in_test_payload(payload.to_json)
alert.send_test_alert
end
end
def integration_credentials
credentials = {}
Page::Project::Settings::Alerts.perform do |alert|
alert.go_to_view_credentials
credentials = { url: alert.webhook_url, auth_key: alert.authorization_key }
end
credentials
end
def enable_create_incident
Page::Project::Settings::Monitor.perform do |setting|
setting.expand_alerts do |alert|
alert.go_to_alert_settings
alert.enable_incident_for_alert
alert.save_alert_settings
alert.click_button('Collapse')
end
end
end
private
def random_word

View File

@ -8,11 +8,16 @@ module QA
class Index < Page::Base
view 'app/assets/javascripts/incidents/components/incidents_list.vue' do
element :create_incident_button
element :incident_link
end
def create_incident
click_element :create_incident_button
end
def has_incident?(wait: Support::Repeater::DEFAULT_MAX_WAIT_TIME)
wait_until(max_duration: wait) { has_element?(:incident_link) }
end
end
end
end

View File

@ -26,8 +26,12 @@ module QA
element :prometheus_url_field
end
def go_to_alert_settings
click_link_with_text('Alert settings')
end
def enable_incident_for_alert
check_element(:create_incident_checkbox)
check_element(:create_incident_checkbox, true)
end
def select_issue_template(template)
@ -37,7 +41,7 @@ module QA
end
end
def save_incident_settings
def save_alert_settings
click_element :save_changes_button
end

View File

@ -10,9 +10,9 @@ module QA
end
attributes :id,
:status,
:ref,
:sha
:status,
:ref,
:sha
# array in form
# [
@ -49,6 +49,10 @@ module QA
"/projects/#{project.id}/pipelines/#{id}"
end
def api_pipeline_jobs_path
"#{api_get_path}/jobs"
end
def api_post_path
"/projects/#{project.id}/pipeline"
end
@ -93,6 +97,10 @@ module QA
result[:downstream_pipeline][:id]
end
def pipeline_jobs
parse_body(api_get_from(api_pipeline_jobs_path))
end
end
end
end

View File

@ -198,6 +198,10 @@ module QA
"#{api_get_path}/pipelines"
end
def api_latest_pipeline_path
"#{api_pipelines_path}/latest"
end
def api_pipeline_schedules_path
"#{api_get_path}/pipeline_schedules"
end
@ -400,6 +404,10 @@ module QA
auto_paginated_response(request_url(api_pipelines_path, per_page: '100'), attempts: attempts)
end
def latest_pipeline
parse_body(api_get_from(api_latest_pipeline_path))
end
def jobs
response = get(request_url(api_jobs_path))
parse_body(response)

View File

@ -29,6 +29,9 @@ module QA
{
file_path: '.gitlab-ci.yml',
content: <<~YAML
default:
tags: ["#{executor}"]
stages:
- Stage1
- Stage2
@ -36,26 +39,22 @@ module QA
Prep:
stage: Stage1
tags: ["#{executor}"]
script: exit 0
when: manual
Build:
stage: Stage2
tags: ["#{executor}"]
needs: ['Prep']
script: exit 0
parallel: 6
Test:
stage: Stage3
tags: ["#{executor}"]
needs: ['Build']
script: exit 0
Deploy:
stage: Stage3
tags: ["#{executor}"]
needs: ['Test']
script: exit 0
parallel: 6
@ -67,6 +66,8 @@ module QA
end
before do
make_sure_to_have_a_skipped_pipeline
Flow::Login.sign_in
project.visit!
Flow::Pipeline.visit_latest_pipeline(status: 'skipped')
@ -84,7 +85,7 @@ module QA
show.click_job_action('Prep') # Trigger pipeline manually
show.wait_until(max_duration: 300, sleep_interval: 2, reload: false) do
project.pipelines.last[:status] == 'success'
project.latest_pipeline[:status] == 'success'
end
aggregate_failures do
@ -99,6 +100,50 @@ module QA
end
end
end
private
# Wait for first pipeline to finish and have "skipped" status
# If it takes too long, create new pipeline and retry (2 times)
def make_sure_to_have_a_skipped_pipeline
attempts ||= 1
Runtime::Logger.info('Waiting for pipeline to have status "skipped"...')
Support::Waiter.wait_until(max_duration: 120, sleep_interval: 3) do
project.latest_pipeline[:status] == 'skipped'
end
rescue Support::Repeater::WaitExceededError
raise 'Failed to create skipped pipeline after 3 attempts.' unless (attempts += 1) < 4
Runtime::Logger.debug(
"Previous pipeline took too long to finish. Potential jobs with problems:\n#{problematic_jobs}"
)
Runtime::Logger.info("Triggering a new pipeline...")
trigger_new_pipeline
retry
end
def trigger_new_pipeline
original_count = project.pipelines.length
Resource::Pipeline.fabricate_via_api! do |pipeline|
pipeline.project = project
end
Support::Waiter.wait_until(sleep_interval: 1) { project.pipelines.length > original_count }
end
# We know that all the jobs in pipeline are purposely skipped
# The pipeline should have status "skipped" almost right away after being created
# If pipeline is held up, likely because there are some jobs that
# doesn't have either "skipped" or "manual" status
def problematic_jobs
pipeline = Resource::Pipeline.fabricate_via_api! do |pipeline|
pipeline.project = project
pipeline.id = project.latest_pipeline[:id]
end
acceptable_statuses = %w[skipped manual]
pipeline.pipeline_jobs.select { |job| !(acceptable_statuses.include? job[:status]) }
end
end
end
end

View File

@ -24,6 +24,7 @@ module QA
before do
Flow::Login.sign_in
project.visit!
Flow::AlertSettings.go_to_monitor_settings
end
context(
@ -35,7 +36,8 @@ module QA
end
before do
Flow::AlertSettings.setup_http_endpoint(payload: payload)
Flow::AlertSettings.setup_http_endpoint_integration
Flow::AlertSettings.send_test_alert(payload: payload)
end
it_behaves_like 'sends test alert'
@ -73,7 +75,8 @@ module QA
end
before do
Flow::AlertSettings.setup_prometheus(payload: payload)
Flow::AlertSettings.setup_prometheus_integration
Flow::AlertSettings.send_test_alert(payload: payload)
end
it_behaves_like 'sends test alert'

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Monitor', product_group: :respond do
describe 'Alert' do
let(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'project-for-alerts'
project.description = 'Project for alerts'
end
end
before do
Flow::Login.sign_in
project.visit!
Flow::AlertSettings.go_to_monitor_settings
Flow::AlertSettings.enable_create_incident
Flow::AlertSettings.setup_http_endpoint_integration
Flow::AlertSettings.send_test_alert
end
it(
'can automatically create incident',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/388469'
) do
Page::Project::Menu.perform(&:go_to_monitor_incidents)
Page::Project::Monitor::Incidents::Index.perform do |index|
expect(index).to have_incident
end
end
end
end
end

View File

@ -36,9 +36,14 @@ module QA
let(:alert_title) { Faker::Lorem.word }
let(:credentials) do
Flow::AlertSettings.integration_credentials
end
before do
Flow::Login.sign_in
project.visit!
Flow::AlertSettings.go_to_monitor_settings
end
context(
@ -49,8 +54,8 @@ module QA
{ title: alert_title, description: alert_title }
end
let(:credentials) do
Flow::AlertSettings.setup_http_endpoint(send: false)
before do
Flow::AlertSettings.setup_http_endpoint_integration
end
it_behaves_like 'sends test alert using authorization key', 'http'
@ -87,8 +92,8 @@ module QA
}
end
let(:credentials) do
Flow::AlertSettings.setup_prometheus(send: false)
before do
Flow::AlertSettings.setup_prometheus_integration
end
it_behaves_like 'sends test alert using authorization key'

View File

@ -31,13 +31,6 @@ RSpec.describe Packages::Debian::FileEntry, type: :model do
describe 'validations' do
it { is_expected.to be_valid }
context 'with FIPS mode', :fips_mode do
it 'raises an error' do
expect { subject.validate! }
.to raise_error(::Packages::FIPS::DisabledError, 'Debian registry is not FIPS compliant')
end
end
describe '#filename' do
it { is_expected.to validate_presence_of(:filename) }
it { is_expected.not_to allow_value('Hé').for(:filename) }

View File

@ -116,6 +116,35 @@ RSpec.describe Notes::CreateService do
end
end
context 'in a commit', :snowplow do
let_it_be(:commit) { create(:commit, project: project) }
let(:opts) { { note: 'Awesome comment', noteable_type: 'Commit', commit_id: commit.id } }
let(:counter) { Gitlab::UsageDataCounters::NoteCounter }
let(:execute_create_service) { described_class.new(project, user, opts).execute }
before do
stub_feature_flags(notes_create_service_tracking: false)
end
it 'tracks commit comment usage data', :clean_gitlab_redis_shared_state do
expect(counter).to receive(:count).with(:create, 'Commit').and_call_original
expect do
execute_create_service
end.to change { counter.read(:create, 'Commit') }.by(1)
end
it_behaves_like 'Snowplow event tracking with Redis context' do
let(:category) { described_class.name }
let(:action) { 'create_commit_comment' }
let(:label) { 'counts.commit_comment' }
let(:namespace) { project.namespace }
let(:feature_flag_name) { :route_hll_to_snowplow_phase4 }
end
end
describe 'event tracking', :snowplow do
let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
let(:execute_create_service) { described_class.new(project, user, opts).execute }

View File

@ -137,13 +137,5 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
expect { package_file }.to raise_error(ActiveRecord::RecordInvalid)
end
end
context 'when FIPS mode enabled', :fips_mode do
let(:file) { nil }
it 'raises an error' do
expect { package_file }.to raise_error(::Packages::FIPS::DisabledError)
end
end
end
end

View File

@ -13,12 +13,6 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
subject { service.execute }
context 'with FIPS mode enabled', :fips_mode do
it 'raises an error' do
expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
end
end
context 'with valid package file' do
it 'extract metadata', :aggregate_failures do
expected_fields = { 'Architecture' => 'source amd64', 'Binary' => 'libsample0 sample-dev sample-udeb' }

View File

@ -15,12 +15,6 @@ RSpec.describe Packages::Debian::GenerateDistributionService do
context "for #{container_type}" do
include_context 'with Debian distribution', container_type
context 'with FIPS mode enabled', :fips_mode do
it 'raises an error' do
expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
end
end
it_behaves_like 'Generate Debian Distribution and component files'
end
end

View File

@ -52,3 +52,12 @@ RSpec.shared_examples 'Snowplow event tracking with RedisHLL context' do |overri
end
end
end
RSpec.shared_examples 'Snowplow event tracking with Redis context' do |overrides: {}|
it_behaves_like 'Snowplow event tracking', overrides: overrides do
let(:context) do
key_path = try(:label) || action
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: key_path).to_context.to_json]
end
end
end

View File

@ -15,9 +15,3 @@ RSpec.shared_examples 'rejects Debian access with unknown container id' do |anon
end
end
end
RSpec.shared_examples 'Debian API FIPS mode' do
context 'when FIPS mode is enabled', :fips_mode do
it_behaves_like 'returning response status', :not_found
end
end

View File

@ -3,8 +3,6 @@
RSpec.shared_examples 'Debian distributions GET request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
it_behaves_like 'Debian API FIPS mode'
it "returns #{status}#{and_body}" do
subject
@ -19,8 +17,6 @@ end
RSpec.shared_examples 'Debian distributions PUT request' do |status, body|
and_body = body.nil? ? '' : ' and expected body'
it_behaves_like 'Debian API FIPS mode'
if status == :success
it 'updates distribution', :aggregate_failures do
expect(::Packages::Debian::UpdateDistributionService).to receive(:new).with(distribution, api_params.except(:codename)).and_call_original
@ -53,8 +49,6 @@ end
RSpec.shared_examples 'Debian distributions DELETE request' do |status, body|
and_body = body.nil? ? '' : ' and expected body'
it_behaves_like 'Debian API FIPS mode'
if status == :success
it 'updates distribution', :aggregate_failures do
expect { subject }

View File

@ -3,8 +3,6 @@
RSpec.shared_examples 'Debian packages GET request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
it_behaves_like 'Debian API FIPS mode'
it "returns #{status}#{and_body}" do
subject
@ -19,8 +17,6 @@ end
RSpec.shared_examples 'Debian packages upload request' do |status, body = nil|
and_body = body.nil? ? '' : ' and expected body'
it_behaves_like 'Debian API FIPS mode'
if status == :created
it 'creates package files', :aggregate_failures do
expect(::Packages::Debian::FindOrCreateIncomingService).to receive(:new).with(container, user).and_call_original

View File

@ -49,20 +49,4 @@ RSpec.shared_examples 'issuable API rate-limited search' do
get api(url), params: { scope: 'all', search: issuable.title }
end
end
context 'when rate_limit_issuable_searches is disabled', :freeze_time, :clean_gitlab_redis_rate_limiting do
before do
stub_feature_flags(rate_limit_issuable_searches: false)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold)
.with(:search_rate_limit_unauthenticated).and_return(1)
end
it 'does not enforce the rate limit' do
get api(url), params: { scope: 'all', search: issuable.title }
get api(url), params: { scope: 'all', search: issuable.title }
expect(response).to have_gitlab_http_status(:ok)
end
end
end

View File

@ -4,6 +4,8 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
def check_release_files(expected_release_content)
distribution.reload
expect(expected_release_content).not_to include('MD5')
distribution.file.use_file do |file_path|
expect(File.read(file_path)).to eq(expected_release_content)
end
@ -45,6 +47,7 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
expect(component_file.updated_at).to eq(release_date)
unless expected_content.nil?
expect(expected_content).not_to include('MD5')
component_file.file.use_file do |file_path|
expect(File.read(file_path)).to eq(expected_content)
end
@ -93,7 +96,6 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
Priority: optional
Filename: #{pool_prefix}/libsample0_1.2.3~alpha2_amd64.deb
Size: 409600
MD5sum: #{package_files[2].file_md5}
SHA256: #{package_files[2].file_sha256}
Package: sample-dev
@ -113,7 +115,6 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
Priority: optional
Filename: #{pool_prefix}/sample-dev_1.2.3~binary_amd64.deb
Size: 409600
MD5sum: #{package_files[3].file_md5}
SHA256: #{package_files[3].file_sha256}
EOF
@ -122,7 +123,6 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
Priority: extra
Filename: #{pool_prefix}/sample-udeb_1.2.3~alpha2_amd64.udeb
Size: 409600
MD5sum: #{package_files[4].file_md5}
SHA256: #{package_files[4].file_sha256}
EOF
@ -171,19 +171,15 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
check_component_file(current_time.round, 'contrib', :sources, nil, nil)
main_amd64_size = expected_main_amd64_content.length
main_amd64_md5sum = Digest::MD5.hexdigest(expected_main_amd64_content)
main_amd64_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content)
contrib_all_size = component_file1.size
contrib_all_md5sum = component_file1.file_md5
contrib_all_sha256 = component_file1.file_sha256
main_amd64_di_size = expected_main_amd64_di_content.length
main_amd64_di_md5sum = Digest::MD5.hexdigest(expected_main_amd64_di_content)
main_amd64_di_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_di_content)
main_sources_size = expected_main_sources_content.length
main_sources_md5sum = Digest::MD5.hexdigest(expected_main_sources_content)
main_sources_sha256 = Digest::SHA256.hexdigest(expected_main_sources_content)
expected_release_content = <<~EOF
@ -193,21 +189,6 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
Acquire-By-Hash: yes
Architectures: all amd64 arm64
Components: contrib main
MD5Sum:
#{contrib_all_md5sum} #{contrib_all_size} contrib/binary-all/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-all/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-amd64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-amd64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-arm64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 contrib/source/Sources
d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-all/Packages
#{main_amd64_md5sum} #{main_amd64_size} main/binary-amd64/Packages
#{main_amd64_di_md5sum} #{main_amd64_di_size} main/debian-installer/binary-amd64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-arm64/Packages
#{main_sources_md5sum} #{main_sources_size} main/source/Sources
SHA256:
#{contrib_all_sha256} #{contrib_all_size} contrib/binary-all/Packages
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-all/Packages
@ -251,7 +232,6 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
Date: Sat, 25 Jan 2020 15:17:18 +0000
Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
Acquire-By-Hash: yes
MD5Sum:
SHA256:
EOF

View File

@ -18,12 +18,6 @@ RSpec.describe Packages::Debian::GenerateDistributionWorker, type: :worker do
context "for #{container_type}" do
include_context 'with Debian distribution', container_type
context 'with FIPS mode enabled', :fips_mode do
it 'raises an error' do
expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
end
end
context 'with mocked service' do
it 'calls GenerateDistributionService' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)

View File

@ -16,12 +16,6 @@ RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker do
subject { worker.perform(package_file_id, user_id) }
context 'with FIPS mode enabled', :fips_mode do
it 'raises an error' do
expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
end
end
context 'with mocked service' do
it 'calls ProcessChangesService' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)

View File

@ -116,12 +116,6 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur
let(:package_file) { incoming.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
let(:component_name) { 'main' }
context 'with FIPS mode enabled', :fips_mode do
it 'raises an error' do
expect { subject }.to raise_error(::Packages::FIPS::DisabledError)
end
end
context 'with non existing package file' do
let(:package_file_id) { non_existing_record_id }

View File

@ -26,7 +26,7 @@ require (
github.com/sirupsen/logrus v1.9.0
github.com/smartystreets/goconvey v1.7.2
github.com/stretchr/testify v1.8.1
gitlab.com/gitlab-org/gitaly/v15 v15.7.0
gitlab.com/gitlab-org/gitaly/v15 v15.7.5
gitlab.com/gitlab-org/golang-archive-zip v0.1.1
gitlab.com/gitlab-org/labkit v1.17.0
gocloud.dev v0.28.0

View File

@ -1842,8 +1842,8 @@ github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
gitlab.com/gitlab-org/gitaly/v15 v15.7.0 h1:dpcupsBqQSjp+AJ3Wy2UdNSIZKUAtyXbUcktq2uRakw=
gitlab.com/gitlab-org/gitaly/v15 v15.7.0/go.mod h1:s37u+W94lg3T7cv+i+v5WtstyHvuKV1JlwYJNznZVJE=
gitlab.com/gitlab-org/gitaly/v15 v15.7.5 h1:qdPq0AfRDZuf7lelYorWWGOUAnxbz67XGQ5VPXhnsmM=
gitlab.com/gitlab-org/gitaly/v15 v15.7.5/go.mod h1:s37u+W94lg3T7cv+i+v5WtstyHvuKV1JlwYJNznZVJE=
gitlab.com/gitlab-org/golang-archive-zip v0.1.1 h1:35k9giivbxwF03+8A05Cm8YoxoakU8FBCj5gysjCTCE=
gitlab.com/gitlab-org/golang-archive-zip v0.1.1/go.mod h1:ZDtqpWPGPB9qBuZnZDrKQjIdJtkN7ZAoVwhT6H2o2kE=
gitlab.com/gitlab-org/labkit v1.17.0 h1:mEkoLzXorLNdt8NkfgYS5xMDhdqCsIJaeEVtSf7d8cU=