Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-02-09 00:07:17 +00:00
parent 8c5f330a63
commit 7a7db6c00d
30 changed files with 445 additions and 287 deletions

View File

@ -149,15 +149,27 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
Gitlab::PollingInterval.set_header(response, interval: 10_000)
serializer_options = {
disable_coverage: true,
disable_failed_builds: true,
preload: true
}
if Feature.enabled?(:skip_status_preload_in_pipeline_lists, @project, type: :gitlab_com_derisk)
serializer_options.merge!(
disable_manual_and_scheduled_actions: true,
preload_statuses: false,
preload_downstream_statuses: false
)
end
render json: {
pipelines: PipelineSerializer
.new(project: @project, current_user: @current_user)
.with_pagination(request, response)
.represent(
@pipelines,
preload: true,
disable_failed_builds: true,
disable_coverage: true
**serializer_options
),
count: {
all: @pipelines.count

View File

@ -12,7 +12,8 @@ module Projects
package_file = project.package_files.find(params[:id])
package_file.package.touch_last_downloaded_at
send_upload(package_file.file, attachment: package_file.file_name)
send_upload(package_file.file, attachment: package_file.file_name_for_download)
end
end
end

View File

@ -226,14 +226,26 @@ class Projects::PipelinesController < Projects::ApplicationController
private
def serialize_pipelines
serializer_options = {
disable_coverage: true,
disable_manual_and_scheduled_actions: true,
preload: true
}
if Feature.enabled?(:skip_status_preload_in_pipeline_lists, @project, type: :gitlab_com_derisk)
serializer_options.merge!(
disable_failed_builds: true,
preload_statuses: false,
preload_downstream_statuses: false
)
end
PipelineSerializer
.new(project: @project, current_user: @current_user)
.with_pagination(request, response)
.represent(
@pipelines,
disable_coverage: true,
preload: true,
disable_manual_and_scheduled_actions: true
**serializer_options
)
end

View File

@ -31,6 +31,10 @@ module Types
object.helm_file_metadatum
end
end
def file_name
URI.decode_uri_component(object.file_name)
end
end
end
end

View File

@ -7,6 +7,7 @@ class Packages::PackageFile < ApplicationRecord
include Packages::Destructible
INSTALLABLE_STATUSES = [:default].freeze
ENCODED_SLASH = "%2F"
delegate :project, :project_id, to: :package
delegate :conan_file_type, to: :conan_file_metadatum
@ -136,6 +137,10 @@ class Packages::PackageFile < ApplicationRecord
Gitlab::Routing.url_helpers.download_project_package_file_path(project, self)
end
def file_name_for_download
file_name.split(ENCODED_SLASH)[-1]
end
private
def update_size_from_file

View File

@ -6,17 +6,9 @@ class PipelineSerializer < BaseSerializer
# rubocop: disable CodeReuse/ActiveRecord
def represent(resource, opts = {})
if resource.is_a?(ActiveRecord::Relation)
resource = resource.preload(preloaded_relations)
end
if paginated?
resource = paginator.paginate(resource)
end
if opts.delete(:preload)
resource = Gitlab::Ci::Pipeline::Preloader.preload!(resource)
end
resource = resource.preload(preloaded_relations(**opts)) if resource.is_a?(ActiveRecord::Relation)
resource = paginator.paginate(resource) if paginated?
resource = Gitlab::Ci::Pipeline::Preloader.preload!(resource) if opts.delete(:preload)
super(resource, opts)
end
@ -38,15 +30,15 @@ class PipelineSerializer < BaseSerializer
private
def preloaded_relations
def preloaded_relations(preload_statuses: true, preload_downstream_statuses: true, **)
[
:pipeline_metadata,
:cancelable_statuses,
:retryable_builds,
:stages,
:latest_statuses,
:trigger_requests,
:user,
(:latest_statuses if preload_statuses),
{
manual_actions: :metadata,
scheduled_actions: :metadata,
@ -59,15 +51,15 @@ class PipelineSerializer < BaseSerializer
project: [:route, { namespace: :route }],
triggered_by_pipeline: [{ project: [:route, { namespace: :route }] }, :user],
triggered_pipelines: [
(:latest_statuses if preload_downstream_statuses),
{
project: [:route, { namespace: :route }]
},
:source_job,
:latest_statuses,
:user
]
].compact
}
]
].compact
end
end

View File

@ -29,7 +29,7 @@ module Packages
file: params[:file],
size: params[:file].size,
file_sha256: params[:file].sha256,
file_name: params[:file_name],
file_name: URI.encode_uri_component(params[:file_name]),
build: params[:build]
}

View File

@ -10,18 +10,7 @@ module WorkItems
link = set_parent(issuable, work_item)
link.move_to_end
if link.changed? && link.save
relate_child_note = create_notes(work_item)
ResourceLinkEvent.create(
user: current_user,
work_item: link.work_item_parent,
child_work_item: link.work_item,
action: ResourceLinkEvent.actions[:add],
system_note_metadata_id: relate_child_note&.system_note_metadata&.id
)
end
create_notes_and_resource_event(work_item, link) if link.changed? && link.save
link
end
@ -30,6 +19,20 @@ module WorkItems
def extract_references
params[:issuable_references]
end
def create_notes_and_resource_event(work_item, link)
relate_child_note = create_notes(work_item)
ResourceLinkEvent.create(
user: current_user,
work_item: link.work_item_parent,
child_work_item: link.work_item,
action: ResourceLinkEvent.actions[:add],
system_note_metadata_id: relate_child_note&.system_note_metadata&.id
)
end
end
end
end
WorkItems::ParentLinks::CreateService.prepend_mod

View File

@ -36,3 +36,5 @@ module WorkItems
end
end
end
WorkItems::ParentLinks::DestroyService.prepend_mod

View File

@ -0,0 +1,9 @@
---
name: skip_status_preload_in_pipeline_lists
feature_issue_url:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142947
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17482
milestone: '16.9'
group: group::pipeline execution
type: gitlab_com_derisk
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: gitlab_sidekiq_enable_semi_reliable_fetcher
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/23854
rollout_issue_url:
milestone: '11.6'
type: ops
group: group::geo
default_enabled: true

View File

@ -1,8 +0,0 @@
---
name: gitlab_sidekiq_reliable_fetcher
introduced_by_url:
rollout_issue_url:
milestone:
type: ops
group: group::scalability
default_enabled: true

View File

@ -15,18 +15,6 @@ def load_cron_jobs!
end
end
def enable_reliable_fetch?
return true unless Feature::FlipperFeature.table_exists?
Feature.enabled?(:gitlab_sidekiq_reliable_fetcher, type: :ops)
end
def enable_semi_reliable_fetch_mode?
return true unless Feature::FlipperFeature.table_exists?
Feature.enabled?(:gitlab_sidekiq_enable_semi_reliable_fetcher, type: :ops)
end
# Custom Queues configuration
queues_config_hash = Gitlab::Redis::Queues.redis_client_params
@ -99,10 +87,9 @@ Sidekiq.configure_server do |config|
Gitlab::Cluster::LifecycleEvents.do_worker_stop
end
if enable_reliable_fetch?
config[:semi_reliable_fetch] = enable_semi_reliable_fetch_mode?
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
end
config[:semi_reliable_fetch] = true # Default value is false
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
Gitlab::SidekiqVersioning.install!

View File

@ -0,0 +1,10 @@
---
table_name: audit_events_instance_external_streaming_destinations
classes:
- AuditEvents::Instance::ExternalStreamingDestination
feature_categories:
- audit_events
description: Stores external audit event destinations configurations for instance.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141739
milestone: '16.9'
gitlab_schema: gitlab_main_clusterwide

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class CreateAuditEventsInstanceExternalStreamingDestinations < Gitlab::Database::Migration[2.2]
milestone '16.9'
def change
create_table :audit_events_instance_external_streaming_destinations do |t|
t.timestamps_with_timezone null: false
t.integer :type, null: false, limit: 2
t.text :name, null: false, limit: 72
t.jsonb :config, null: false
t.binary :encrypted_secret_token, null: false
t.binary :encrypted_secret_token_iv, null: false
end
end
end

View File

@ -0,0 +1 @@
37166b2aa9addf6d1effd34da3242866a13c9a24636f0be677d72c89bc09376d

View File

@ -13226,6 +13226,27 @@ CREATE SEQUENCE audit_events_instance_external_audit_event_destinations_id_seq
ALTER SEQUENCE audit_events_instance_external_audit_event_destinations_id_seq OWNED BY audit_events_instance_external_audit_event_destinations.id;
CREATE TABLE audit_events_instance_external_streaming_destinations (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
type smallint NOT NULL,
name text NOT NULL,
config jsonb NOT NULL,
encrypted_secret_token bytea NOT NULL,
encrypted_secret_token_iv bytea NOT NULL,
CONSTRAINT check_219decfb51 CHECK ((char_length(name) <= 72))
);
CREATE SEQUENCE audit_events_instance_external_streaming_destinations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE audit_events_instance_external_streaming_destinations_id_seq OWNED BY audit_events_instance_external_streaming_destinations.id;
CREATE TABLE audit_events_instance_google_cloud_logging_configurations (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -27053,6 +27074,8 @@ ALTER TABLE ONLY audit_events_instance_amazon_s3_configurations ALTER COLUMN id
ALTER TABLE ONLY audit_events_instance_external_audit_event_destinations ALTER COLUMN id SET DEFAULT nextval('audit_events_instance_external_audit_event_destinations_id_seq'::regclass);
ALTER TABLE ONLY audit_events_instance_external_streaming_destinations ALTER COLUMN id SET DEFAULT nextval('audit_events_instance_external_streaming_destinations_id_seq'::regclass);
ALTER TABLE ONLY audit_events_instance_google_cloud_logging_configurations ALTER COLUMN id SET DEFAULT nextval('audit_events_instance_google_cloud_logging_configuration_id_seq'::regclass);
ALTER TABLE ONLY audit_events_streaming_event_type_filters ALTER COLUMN id SET DEFAULT nextval('audit_events_streaming_event_type_filters_id_seq'::regclass);
@ -29028,6 +29051,9 @@ ALTER TABLE ONLY audit_events_instance_amazon_s3_configurations
ALTER TABLE ONLY audit_events_instance_external_audit_event_destinations
ADD CONSTRAINT audit_events_instance_external_audit_event_destinations_pkey PRIMARY KEY (id);
ALTER TABLE ONLY audit_events_instance_external_streaming_destinations
ADD CONSTRAINT audit_events_instance_external_streaming_destinations_pkey PRIMARY KEY (id);
ALTER TABLE ONLY audit_events_instance_google_cloud_logging_configurations
ADD CONSTRAINT audit_events_instance_google_cloud_logging_configurations_pkey PRIMARY KEY (id);

View File

@ -118,8 +118,8 @@ There is a feature proposal to add the ability to back up repositories directly
1. Spin up a VM with 8 vCPU and 7.2 GB memory. This node will be used to back up Git repositories. Note that
[a Praefect node cannot be used to back up Git data](https://gitlab.com/gitlab-org/gitlab/-/issues/396343#note_1385950340).
1. Configure the node as another **GitLab Rails** node as defined in your [reference architecture](../reference_architectures/index.md).
As with other GitLab Rails nodes, this node must have access to your main PostgreSQL database, Redis, object storage, and Gitaly Cluster.
1. Configure the node as another **GitLab Rails (webservice)** node as defined in your [reference architecture](../reference_architectures/index.md).
As with other GitLab Rails nodes, this node must have access to your main PostgreSQL database, Redis, object storage, and Gitaly Cluster. Find your reference architecture and see the "Configure GitLab Rails" section of an example how to set the server up. You might need to translate some [Helm chart values](https://docs.gitlab.com/charts/charts/globals.html) to the Linux package equivalent ones.
1. Ensure the GitLab application isn't running on this node by disabling most services:
1. Edit `/etc/gitlab/gitlab.rb` to ensure the following services are disabled.

View File

@ -20,10 +20,9 @@ migrate or import any types of groups or organizations from GitHub to GitLab.
The namespace is a user or group in GitLab, such as `gitlab.com/sidney-jones` or
`gitlab.com/customer-success`.
If you are importing from GitHub Enterprise to GitLab.com, use the
[GitLab Import API](../../../api/import.md#import-repository-from-github) GitHub endpoint instead. The API allows you to
provide a different domain to import the project from. Using the UI, the GitHub importer always imports from the
`github.com` domain.
Using the GitLab UI, the GitHub importer always imports from the
`github.com` domain. If you are importing from a self-hosted GitHub Enterprise Server domain, use the
[GitLab Import API](#use-the-rest-api) GitHub endpoint.
When importing projects:
@ -59,25 +58,9 @@ To import projects from GitHub:
matching email addresses are required.
- GitHub accounts must have a GitHub public-facing email address so that all comments and contributions can be properly mapped to
the same user in GitLab. GitHub Enterprise does not require this field to be populated so you might have to add it on existing accounts.
### Importing from GitHub Enterprise to self-managed GitLab
If you are importing from GitHub Enterprise to a self-managed GitLab instance:
- You must first enable the [GitHub integration](../../../integration/github.md).
- GitHub must be enabled as an import source in the
[Admin Area](../../../administration/settings/import_and_export_settings.md#configure-allowed-import-sources).
- For GitLab 15.10 and earlier, you must add `github.com` and `api.github.com` entries in the
- For self-managed GitLab 15.10 and earlier, you must add `github.com` and `api.github.com` entries in the
[allowlist for local requests](../../../security/webhooks.md#allow-outbound-requests-to-certain-ip-addresses-and-domains).
### Importing from GitHub.com to self-managed GitLab
If you are importing from GitHub.com to a self-managed GitLab instance:
- You don't need to enable the [GitHub integration](../../../integration/github.md).
- GitHub must be enabled as an import source in the
[Admin Area](../../../administration/settings/import_and_export_settings.md#configure-allowed-import-sources).
### Known issues
- GitHub pull request comments (known as diff notes in GitLab) created before 2017 are imported in separate threads.
@ -98,30 +81,67 @@ If a GitHub user's public email address doesn't match any GitLab user email
address, the user's activity is associated with the user account that is
performing the import.
### Use the GitHub integration
You can import your GitHub repository by either:
- [Using GitHub OAuth](#use-github-oauth)
- [Using a GitHub Personal Access Token](#use-a-github-personal-access-token)
- [Using the API](#use-the-rest-api)
### Use GitHub OAuth
If you are importing to GitLab.com or to a self-managed GitLab that has GitHub OAuth [configured](../../../integration/github.md), you can use GitHub OAuth to import your repository.
This method has an advantage over using a [Personal Access Token (PAT)](#use-a-github-personal-access-token)
because the backend exchanges the access token with the appropriate permissions.
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Import project** and then **GitHub**.
1. Now you can either:
- If GitHub OAuth is [configured](../../../integration/github.md) for the instance, select **Authorize with GitHub**.
- Use a GitHub personal access token:
1. Go to <https://github.com/settings/tokens/new>.
1. In the **Note** field, enter a token description.
1. Select the `repo` scope.
1. Optional. To [import collaborators](#select-additional-items-to-import), select the `read:org` scope.
1. Select **Generate token**.
1. On the GitLab import page, in the **Personal Access Token** field, paste the GitHub personal access token.
1. Select **Authenticate**.
1. Continue on to [selecting which repositories to import](#select-which-repositories-to-import).
1. Select **Authorize with GitHub**.
1. Proceed to [selecting which repositories to import](#select-which-repositories-to-import).
To use a different token to perform an imports after previously performing
To use a different method to perform an import after previously performing
these steps, sign out of your GitLab account and sign in again.
### Use a GitHub Personal Access Token
To import your GitHub repository using a GitHub Personal Access Token:
1. Generate a GitHub Personal Access Token:
1. Go to <https://github.com/settings/tokens/new>.
1. In the **Note** field, enter a token description.
1. Select the `repo` scope.
1. Optional. To [import collaborators](#select-additional-items-to-import), select the `read:org` scope.
1. Select **Generate token**.
1. On the GitLab left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Import project** and then **GitHub**.
1. Select **Authorize with GitHub**.
1. In the **Personal Access Token** field, paste the GitHub Personal Access Token.
1. Select **Authenticate**.
1. Proceed to [selecting which repositories to import](#select-which-repositories-to-import).
To use a different token to perform an import after previously performing
these steps, sign out of your GitLab account and sign in again, or revoke the
older token in GitHub.
### Use the REST API
You can also import a repository from GitHub using the
[GitLab REST API](../../../api/import.md#import-repository-from-github).
The [GitLab REST API](../../../api/import.md#import-repository-from-github) can be used to import a GitHub repository. It has some advantages over using the GitLab UI:
- Can be used to import GitHub repositories that you do not own if they are public.
- It can be used to import from a GitHub Enterprise Server that is self-hosted.
- Can be used to set the `timeout_strategy` option that is not available to the UI.
The REST API is limited to authenticating with GitLab Personal Access Tokens.
To import your GitHub repository using the GitLab REST API:
1. Generate a GitHub Personal Access Token:
1. Go to <https://github.com/settings/tokens/new>.
1. In the **Note** field, enter a token description.
1. Select the `repo` scope.
1. Optional. To [import collaborators](#select-additional-items-to-import), select the `read:org` scope.
1. Select **Generate token**.
1. Use the [GitLab REST API](../../../api/import.md#import-repository-from-github) to import your GitHub repository.
### Filter repositories list
@ -240,25 +260,20 @@ Increasing the number of Sidekiq workers does *not* reduce the time spent clonin
### Enable GitHub OAuth using a GitHub Enterprise Cloud OAuth App
You can use a personal access token to make API requests. Additionally, you can
authorize a GitHub App or OAuth app, which can then make API requests on your
behalf.
If you belong to a [GitHub Enterprise Cloud organization](https://docs.github.com/en/enterprise-cloud@latest/get-started/onboarding) you can configure your self-managed GitLab instance to obtain a higher [GitHub API rate limit](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api?apiVersion=2022-11-28#primary-rate-limit-for-authenticated-users).
API requests to GitHub are [subject to rate limits](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api?apiVersion=2022-11-28#primary-rate-limit-for-authenticated-users).
GitHub API requests are usually subject to a rate limit of 5,000 requests per hour. Using the steps below, you obtain a higher 15,000 requests per hour rate limit, resulting in a faster overall import time.
For GitHub repositories with tens of thousands of issues, pull requests, and
comments on those issues and pull requests, a higher rate limit results in a
faster overall import time because the GitLab importer must pause when a
GitHub rate limit is reached. To enable a higher rate limit for your
self-managed or dedicated GitLab instance:
Prerequisites:
- Ensure that you have access to a
[GitHub Enterprise Cloud organization](https://docs.github.com/en/enterprise-cloud@latest/get-started/onboarding/getting-started-with-github-enterprise-cloud)
- [Create an OAuth app in GitHub](../../../integration/github.md#create-an-oauth-app-in-github).
- Ensure that the OAuth app is owned by the Enterprise Cloud Organization, not
your personal GitHub account.
- [Configure GitHub OAuth in GitLab](../../../integration/github.md#enable-github-oauth-in-gitlab).
- Perform the project import using the [GitHub integration](#use-the-github-integration) and select **Authorize with GitHub** to use the OAuth authorization method.
- You have access to a
[GitHub Enterprise Cloud organization](https://docs.github.com/en/enterprise-cloud@latest/get-started/onboarding/getting-started-with-github-enterprise-cloud).
- GitLab is configured to enable [GitHub OAuth](../../../integration/github.md#enable-github-oauth-in-gitlab).
To enable a higher rate limit:
- [Create an OAuth app in GitHub](../../../integration/github.md#create-an-oauth-app-in-github). Ensure that the OAuth app is owned by the Enterprise Cloud Organization, not your personal GitHub account.
- Perform the project import using [GitHub OAuth](#use-github-oauth).
- Optional. By default, sign-in is enabled for all configured OAuth providers.
If you want to enable GitHub OAuth for imports but you want to
prevent the ability for users to sign in to your GitLab instance with GitHub,

View File

@ -28,17 +28,17 @@ module API
expose_url(model_version_uri || generic_package_uri)
end
# Example: http://127.0.0.1:3000/api/v4/projects/20/packages/ml_models/my-model-name-4/3.0.0
# Example: http://127.0.0.1:3000/api/v4/projects/20/packages/ml_models/1/files/
def model_version_uri
return unless object.model_version_id
model_version = object.model_version
path = api_v4_projects_packages_ml_models_model_version_path(
id: object.project.id, model_name: model_version.model.name, model_version: '', file_name: ''
path = api_v4_projects_packages_ml_models_files___path___path(
id: object.project.id, model_version_id: model_version.id, path: '', file_name: ''
)
path.sub('/model_version', "/#{model_version.version}")
path.delete_suffix('(/path/)')
end
# Example: http://127.0.0.1:3000/api/v4/projects/20/packages/generic/ml_experiment_1/1/

View File

@ -40,8 +40,7 @@ module API
end
def find_model_version!
::Ml::ModelVersion.by_project_id_name_and_version(project.id, params[:model_name], params[:model_version]) ||
not_found!
::Ml::ModelVersion.by_project_id_and_id(user_project.id, params[:model_version_id]) || not_found!
end
def model_version
@ -52,92 +51,92 @@ module API
params do
requires :id, types: [String, Integer], desc: 'The ID or URL-encoded path of the project'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
namespace ':id/packages/ml_models' do
params do
requires :model_name, type: String, desc: 'Model name', regexp: Gitlab::Regex.ml_model_name_regex,
file_path: true
requires :model_version, type: String, desc: 'Model version',
regexp: Gitlab::Regex.semver_regex
requires :file_name, type: String, desc: 'Package file name',
regexp: Gitlab::Regex.ml_model_file_name_regex, file_path: true
optional :status, type: String, values: ALLOWED_STATUSES, desc: 'Package status'
params do
requires :model_version_id, type: Integer, desc: 'Model version id'
requires :file_name, type: String, desc: 'File name', file_path: true,
regexp: Gitlab::Regex.ml_model_file_name_regex
optional :path, type: String, desc: 'File directory path'
optional :status, type: String, values: ALLOWED_STATUSES, desc: 'Package status'
end
namespace ':id/packages/ml_models/:model_version_id/files/(*path/):file_name',
requirements: ML_MODEL_PACKAGES_REQUIREMENTS do
desc 'Workhorse authorize model package file' do
detail 'Introduced in GitLab 16.8'
success code: 200
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
put 'authorize' do
authorize_workhorse!(subject: project, maximum_size: project.actual_limits.ml_model_max_file_size)
end
namespace ':model_name/*model_version/:file_name', requirements: ML_MODEL_PACKAGES_REQUIREMENTS do
desc 'Workhorse authorize model package file' do
detail 'Introduced in GitLab 16.1'
success code: 200
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
put 'authorize' do
authorize_workhorse!(subject: project, maximum_size: project.actual_limits.ml_model_max_file_size)
end
desc 'Workhorse upload model package file' do
detail 'Introduced in GitLab 16.2'
success code: 201
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
params do
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The package file to be published (generated by Multipart middleware)',
documentation: { type: 'file' }
end
put do
authorize_upload!(project)
not_found! unless can?(current_user, :write_model_registry, project)
desc 'Workhorse upload model package file' do
detail 'Introduced in GitLab 16.8'
success code: 201
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
params do
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The package file to be published (generated by Multipart middleware)',
documentation: { type: 'file' }
end
put do
authorize_upload!(project)
not_found! unless can?(current_user, :write_model_registry, project)
bad_request!('File is too large') if max_file_size_exceeded?
bad_request!('File is too large') if max_file_size_exceeded?
create_package_file_params = declared(params).merge(
model_version: model_version,
build: current_authenticated_job,
package_name: params[:model_name],
package_version: params[:model_version]
)
create_package_file_params = declared(params).merge(
model_version: model_version,
build: current_authenticated_job,
package_name: model_version.name,
package_version: model_version.version,
file_name: [params[:path], params[:file_name]].compact.join('/')
)
package_file = ::Packages::MlModel::CreatePackageFileService
.new(project, current_user, create_package_file_params)
.execute
package_file = ::Packages::MlModel::CreatePackageFileService
.new(project, current_user, create_package_file_params)
.execute
bad_request!('Package creation failed') unless package_file
bad_request!('Package creation failed') unless package_file
created!
rescue ObjectStorage::RemoteStoreError => e
Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:file_name], project_id: project.id })
created!
rescue ObjectStorage::RemoteStoreError => e
Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:file_name], project_id: project.id })
forbidden!
end
forbidden!
end
desc 'Download an ml_model package file' do
detail 'This feature was introduced in GitLab 16.2'
success code: 200
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
get do
authorize_read_package!(project)
desc 'Download an ml_model package file' do
detail 'This feature was introduced in GitLab 16.8'
success code: 200
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[ml_model_registry]
end
package_file = ::Packages::PackageFileFinder.new(model_version.package, params[:file_name]).execute!
get do
authorize_read_package!(project)
present_package_file!(package_file)
end
file_name = URI.encode_uri_component([params[:path], params[:file_name]].compact.join('/'))
package_file = ::Packages::PackageFileFinder.new(model_version.package, file_name).execute!
present_package_file!(package_file)
end
end
end

View File

@ -44613,6 +44613,9 @@ msgstr ""
msgid "SecurityOrchestration|For large groups, there may be a significant delay in applying policy changes to pre-existing merge requests. Policy changes typically apply almost immediately for newly created merge requests."
msgstr ""
msgid "SecurityOrchestration|Granularly scope your policies to selected projects. Enforce custom CI with pipeline execution action for scan execution policies. %{linkStart}How do I implement these features?%{linkEnd}"
msgstr ""
msgid "SecurityOrchestration|Groups"
msgstr ""
@ -44634,6 +44637,9 @@ msgstr ""
msgid "SecurityOrchestration|Inherited from %{namespace}"
msgstr ""
msgid "SecurityOrchestration|Introducing Policy Scoping and Pipeline Execution Policy Action experimental features"
msgstr ""
msgid "SecurityOrchestration|Invalid Compliance Framework ID(s)"
msgstr ""
@ -44846,6 +44852,9 @@ msgstr ""
msgid "SecurityOrchestration|Severity is %{severity}."
msgstr ""
msgid "SecurityOrchestration|Share feedback"
msgstr ""
msgid "SecurityOrchestration|Show all included projects"
msgstr ""

View File

@ -3,46 +3,6 @@
require 'spec_helper'
RSpec.describe 'sidekiq', feature_category: :build do
describe 'enable_reliable_fetch?' do
subject { enable_reliable_fetch? }
context 'when gitlab_sidekiq_reliable_fetcher is enabled' do
before do
stub_feature_flags(gitlab_sidekiq_reliable_fetcher: true)
end
it { is_expected.to be_truthy }
end
context 'when gitlab_sidekiq_reliable_fetcher is disabled' do
before do
stub_feature_flags(gitlab_sidekiq_reliable_fetcher: false)
end
it { is_expected.to be_falsey }
end
end
describe 'enable_semi_reliable_fetch_mode?' do
subject { enable_semi_reliable_fetch_mode? }
context 'when gitlab_sidekiq_enable_semi_reliable_fetcher is enabled' do
before do
stub_feature_flags(gitlab_sidekiq_enable_semi_reliable_fetcher: true)
end
it { is_expected.to be_truthy }
end
context 'when gitlab_sidekiq_enable_semi_reliable_fetcher is disabled' do
before do
stub_feature_flags(gitlab_sidekiq_enable_semi_reliable_fetcher: false)
end
it { is_expected.to be_falsey }
end
end
describe 'load_cron_jobs!' do
subject { load_cron_jobs! }

View File

@ -77,7 +77,7 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
let!(:candidate) { version.candidate }
it 'returns the model version format of the artifact_uri' do
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/#{version.model.name}/#{version.version}")
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/#{version.id}/files/")
end
end
end

View File

@ -9,6 +9,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
let_it_be(:package_file3) { create(:package_file, :xml, file_name: 'formatted.zip') }
let_it_be(:package_file4) { create(:package_file, :nuget) }
let_it_be(:package_file5) { create(:package_file, :xml, file_name: 'my_dir%2Fformatted') }
let_it_be(:debian_package) { create(:debian_package, project: project, with_changes_file: true) }
it_behaves_like 'having unique enum values'
@ -420,4 +421,22 @@ RSpec.describe Packages::PackageFile, type: :model do
it { is_expected.to contain_exactly(pending_destruction_package_file) }
end
end
describe '#file_name_for_download' do
subject { package_file.file_name_for_download }
context 'with a simple file name' do
let(:package_file) { package_file1 }
it { is_expected.to eq(package_file.file_name) }
end
context 'with a file name with encoded slashes' do
let(:package_file) { package_file5 }
it 'returns the last component of the file name' do
is_expected.to eq('formatted')
end
end
end
end

View File

@ -118,19 +118,23 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
project.send("add_#{user_role}", user) if member && user_role != :anonymous
end
describe 'PUT /api/v4/projects/:id/packages/ml_models/:model_name/:model_version/:file_name/authorize' do
describe 'PUT /api/v4/projects/:id/packages/ml_models/:model_version_id/files/(*path)/:file_name/authorize' do
include_context 'ml model authorize permissions table'
let_it_be(:file_name) { 'model.md5' }
let(:token) { tokens[:personal_access_token] }
let(:user_headers) { { 'Authorization' => "Bearer #{token}" } }
let(:headers) { user_headers.merge(workhorse_headers) }
let(:request) { authorize_upload_file(headers) }
let(:model_name) { model_version.name }
let(:version) { model_version.version }
let(:file_name) { 'myfile.tar.gz' }
let(:file_path) { '' }
let(:full_path) { "#{file_path}#{file_name}" }
subject(:api_response) do
url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}/authorize"
url = "/projects/#{project.id}/packages/ml_models/#{model_version.id}/files/#{full_path}/authorize"
put api(url), headers: headers
@ -150,21 +154,32 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
end
it { is_expected.to have_gitlab_http_status(expected_status) }
context 'when file does not have path' do
it { is_expected.to have_gitlab_http_status(expected_status) }
end
context 'when file has path' do
let(:file_path) { 'my_dir' }
it { is_expected.to have_gitlab_http_status(expected_status) }
end
end
it_behaves_like 'Endpoint not found if read_model_registry not available'
end
describe 'application security' do
where(:model_name, :file_name) do
'my-package/../' | 'myfile.tar.gz'
'my-package%2f%2e%2e%2f' | 'myfile.tar.gz'
'my_package' | '../.ssh%2fauthorized_keys'
'my_package' | '%2e%2e%2f.ssh%2fauthorized_keys'
context 'when path has back directory' do
let(:file_name) { '../.ssh%2fauthorized_keys' }
it 'rejects malicious request' do
is_expected.to have_gitlab_http_status(:bad_request)
end
end
with_them do
context 'when path has invalid characters' do
let(:file_name) { '%2e%2e%2f.ssh%2fauthorized_keys' }
it 'rejects malicious request' do
is_expected.to have_gitlab_http_status(:bad_request)
end
@ -172,7 +187,7 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
end
end
describe 'PUT /api/v4/projects/:id/packages/ml_models/:model_name/:model_version/:file_name' do
describe 'PUT /api/v4/projects/:id/packages/ml_models/:model_version_id/(*path)/files/:file_name' do
include_context 'ml model authorize permissions table'
let_it_be(:file_name) { 'model.md5' }
@ -183,11 +198,14 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
let(:params) { { file: temp_file(file_name) } }
let(:file_key) { :file }
let(:send_rewritten_field) { true }
let(:model_name) { model_version.name }
let(:version) { model_version.version }
let(:version_id) { model_version.id }
let(:file_path) { '' }
let(:full_path) { "#{file_path}#{file_name}" }
let(:saved_file_name) { file_name }
subject(:api_response) do
url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}"
url = "/projects/#{project.id}/packages/ml_models/#{version_id}/files/#{full_path}"
workhorse_finalize(
api(url),
@ -201,13 +219,6 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
response
end
describe 'success' do
it 'creates a new package' do
expect { api_response }.to change { Packages::PackageFile.count }.by(1)
expect(api_response).to have_gitlab_http_status(:created)
end
end
describe 'user access' do
where(:visibility, :user_role, :member, :token_type, :valid_token, :expected_status) do
authorize_permissions_table
@ -222,9 +233,26 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
end
if params[:expected_status] == :success
it_behaves_like 'process ml model package upload'
context 'when file does not have path' do
it_behaves_like 'process ml model package upload'
end
context 'when file has path' do
let(:file_path) { 'my_dir/' }
let(:saved_file_name) { "my_dir%2F#{file_name}" }
it_behaves_like 'process ml model package upload'
end
else
it { is_expected.to have_gitlab_http_status(expected_status) }
context 'when file does not have path' do
it { is_expected.to have_gitlab_http_status(expected_status) }
end
context 'when file has path' do
let(:file_path) { 'my_dir/' }
it { is_expected.to have_gitlab_http_status(expected_status) }
end
end
end
@ -234,22 +262,26 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
end
end
describe 'GET /api/v4/projects/:project_id/packages/ml_models/:model_name/:model_version/:file_name' do
describe 'GET /api/v4/projects/:project_id/packages/ml_models/:model_version_id/files/(*path)/:file_name' do
include_context 'ml model authorize permissions table'
let_it_be(:file_name) { 'model.md5' }
let_it_be(:package) { model_version.package }
let_it_be(:package_file) { create(:package_file, :generic, package: package, file_name: 'model.md5') }
let_it_be(:package_file_1) { create(:package_file, :generic, package: package, file_name: 'model.md5') }
let_it_be(:package_file_2) { create(:package_file, :generic, package: package, file_name: 'my_dir%2Fmodel.md5') }
let(:model_name) { model_version.name }
let(:version) { model_version.version }
let(:file_name) { package_file.file_name }
let(:file_path) { '' }
let(:full_path) { "#{file_path}#{file_name}" }
let(:saved_file_name) { file_name }
let(:version_id) { model_version.id }
let(:token) { tokens[:personal_access_token] }
let(:user_headers) { { 'Authorization' => "Bearer #{token}" } }
let(:headers) { user_headers.merge(workhorse_headers) }
subject(:api_response) do
url = "/projects/#{project.id}/packages/ml_models/#{model_name}/#{version}/#{file_name}"
url = "/projects/#{project.id}/packages/ml_models/#{version_id}/files/#{full_path}"
get api(url), headers: headers
@ -269,10 +301,22 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
end
if params[:expected_status] == :success
it_behaves_like 'process ml model package download'
else
it { is_expected.to have_gitlab_http_status(expected_status) }
context 'when file does not have path' do
if params[:expected_status] == :success
it_behaves_like 'process ml model package download'
else
it { is_expected.to have_gitlab_http_status(expected_status) }
end
end
context 'when file has path' do
let(:file_path) { 'my_dir/' }
if params[:expected_status] == :success
it_behaves_like 'process ml model package download'
else
it { is_expected.to have_gitlab_http_status(expected_status) }
end
end
end

View File

@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Projects::Packages::PackageFilesController, feature_category: :package_registry do
let_it_be(:project) { create(:project, :public) }
let_it_be(:package) { create(:package, project: project) }
let_it_be(:package_file) { create(:package_file, package: package) }
let(:filename) { package_file.file_name }
let(:filename) { 'file.zip' }
let(:package_file) { create(:package_file, package: package, file_name: filename) }
describe 'GET download' do
subject do
@ -25,6 +25,17 @@ RSpec.describe Projects::Packages::PackageFilesController, feature_category: :pa
.to eq(%(attachment; filename="#{filename}"; filename*=UTF-8''#{filename}))
end
context 'when file name has directory structure' do
let(:filename) { 'dir%2Ffile.zip' }
it 'sends the package file only with the last component of the name' do
subject
expect(response.headers['Content-Disposition'])
.to eq(%(attachment; filename="file.zip"; filename*=UTF-8''file.zip))
end
end
it_behaves_like 'bumping the package last downloaded at field'
end
end

View File

@ -7,12 +7,12 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, user: user, project: project) }
let_it_be(:file_name) { 'myfile.tar.gz.1' }
let_it_be(:model) { create(:ml_models, user: user, project: project) }
let_it_be(:model_version) { create(:ml_model_versions, :with_package, model: model, version: '0.1.0') }
let(:build) { instance_double(Ci::Build, pipeline: pipeline) }
let(:file_name) { 'myfile.tar.gz.1' }
let(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
let(:temp_file) { Tempfile.new("test") }
let(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
@ -42,6 +42,23 @@ RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :m
end
end
context 'when file name has slashes' do
let(:file_name) { 'my_dir/myfile.tar.gz.1' }
let(:params) do
{
model_version: model_version,
file: file,
file_name: file_name,
status: :hidden
}
end
it 'url encodes the file name' do
expect(execute_service.file_name).to eq('my_dir%2Fmyfile.tar.gz.1')
end
end
context 'with existing model version' do
let(:params) do
{

View File

@ -9,8 +9,14 @@ RSpec.shared_examples 'Endpoint not found if read_model_registry not available'
.and_return(false)
end
it "is not found" do
is_expected.to have_gitlab_http_status(:not_found)
context 'when file has path' do
let(:file_path) { 'my_dir/' }
it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when file does not have path' do
it { is_expected.to have_gitlab_http_status(:not_found) }
end
end
end
@ -24,13 +30,21 @@ RSpec.shared_examples 'Endpoint not found if write_model_registry not available'
.and_return(false)
end
it { is_expected.to have_gitlab_http_status(:not_found) }
context 'when file has path' do
let(:file_path) { 'my_dir/' }
it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when file does not have path' do
it { is_expected.to have_gitlab_http_status(:not_found) }
end
end
end
RSpec.shared_examples 'Not found when model version does not exist' do
context 'when model version does not exist' do
let(:version) { "#{non_existing_record_id}.0.0" }
let(:version_id) { non_existing_record_id }
it { is_expected.to have_gitlab_http_status(:not_found) }
end
@ -43,7 +57,7 @@ RSpec.shared_examples 'creates package files for model versions' do
expect(api_response).to have_gitlab_http_status(:created)
package_file = project.packages.last.package_files.reload.last
expect(package_file.file_name).to eq(file_name)
expect(package_file.file_name).to eq(saved_file_name)
end
it 'returns bad request if package creation fails' do

View File

@ -26,11 +26,17 @@ func TestS3SessionSetup(t *testing.T) {
require.Equal(t, "us-west-1", s3Config.SigningRegion)
require.True(t, aws.BoolValue(sess.Config.S3ForcePathStyle))
require.Equal(t, len(sessionCache.sessions), 1)
sessionCache.Lock()
require.Equal(t, 1, len(sessionCache.sessions))
sessionCache.Unlock()
anotherConfig := cfg
_, err = setupS3Session(credentials, anotherConfig)
require.NoError(t, err)
require.Equal(t, len(sessionCache.sessions), 1)
sessionCache.Lock()
require.Equal(t, 1, len(sessionCache.sessions))
sessionCache.Unlock()
}
func TestS3SessionEndpointSetup(t *testing.T) {
@ -62,7 +68,7 @@ func TestS3SessionExpiry(t *testing.T) {
sess, err := setupS3Session(credentials, cfg)
require.NoError(t, err)
require.Equal(t, aws.StringValue(sess.Config.Region), "us-west-1")
require.Equal(t, "us-west-1", aws.StringValue(sess.Config.Region))
require.True(t, aws.BoolValue(sess.Config.S3ForcePathStyle))
firstSession, ok := getS3Session(cfg)