Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-04 06:13:01 +00:00
parent 21b3a9bd2a
commit 81a3506236
71 changed files with 1226 additions and 721 deletions

View File

@ -1 +1 @@
7dc3b9589e45477d86ae9a37cae61ce03aa984a1
ec2662d798239ece807c2a0d689a7b7ccbecd03c

View File

@ -161,7 +161,7 @@ export default {
currentPath,
);
this.updateGroups(res);
this.updateGroups(res, Boolean(filterGroupsBy));
});
},
toggleChildren(group) {

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module StreamDiffs
extend ActiveSupport::Concern
include ActionController::Live
def diffs
return render_404 unless rapid_diffs_enabled?
stream_headers
offset = { offset_index: params.permit(:offset)[:offset].to_i }
stream_diff_files(options.merge(offset))
rescue StandardError => e
Gitlab::AppLogger.error("Error streaming diffs: #{e.message}")
response.stream.write e.message
ensure
response.stream.close
end
private
def rapid_diffs_enabled?
::Feature.enabled?(:rapid_diffs, current_user, type: :wip)
end
def resource
raise NotImplementedError
end
def options
{}
end
def view
helpers.diff_view
end
def stream_diff_files(options)
resource.diffs_for_streaming(options).diff_files.each do |diff_file|
response.stream.write(render_diff_file(diff_file))
end
end
def render_diff_file(diff_file)
render_to_string(
::RapidDiffs::DiffFileComponent.new(diff_file: diff_file, parallel_view: view == :parallel),
layout: false
)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Projects
class CommitDiffsStreamController < Projects::CommitController
include StreamDiffs
private
def resource
commit
end
def options
opts = diff_options
opts[:offset_index] = params.permit(:offset)[:offset].to_i
opts[:ignore_whitespace_change] = true if params.permit(:format)[:format] == 'diff'
opts[:use_extra_viewer_as_main] = false
opts
end
end
end

View File

@ -3,57 +3,33 @@
module Projects
module MergeRequests
class DiffsStreamController < Projects::MergeRequests::ApplicationController
include ActionController::Live
urgency :low, [:diffs]
def diffs
return render_404 unless ::Feature.enabled?(:rapid_diffs, current_user, type: :wip)
stream_headers
offset = params[:offset].to_i
# NOTE: This is a temporary flag to test out the new diff_blobs
if !!ActiveModel::Type::Boolean.new.cast(params[:diff_blobs])
stream_diff_blobs(offset)
else
stream_diff_files(offset)
end
rescue StandardError => e
Gitlab::AppLogger.error("Error streaming diffs: #{e.message}")
response.stream.write e.message
ensure
response.stream.close
end
include StreamDiffs
private
def view
helpers.diff_view
def resource
@merge_request
end
def stream_diff_blobs(offset)
@merge_request.diffs_for_streaming(offset_index: offset) do |diff_files_batch|
def options
{}
end
def stream_diff_files(options)
if !!ActiveModel::Type::Boolean.new.cast(params[:diff_blobs])
stream_diff_blobs(options)
else
super
end
end
def stream_diff_blobs(options)
@merge_request.diffs_for_streaming(options) do |diff_files_batch|
diff_files_batch.each do |diff_file|
response.stream.write(render_diff_file(diff_file))
end
end
end
def stream_diff_files(offset)
@merge_request.diffs_for_streaming(offset_index: offset).diff_files.each do |diff_file|
response.stream.write(render_diff_file(diff_file))
end
end
def render_diff_file(diff_file)
render_to_string(
::RapidDiffs::DiffFileComponent.new(diff_file: diff_file, parallel_view: view == :parallel),
layout: false
)
end
end
end
end

View File

@ -51,7 +51,7 @@ module Packages
end
def packages_class
@params.fetch(:packages_class, ::Packages::Package)
::Packages::Package
end
end
end

View File

@ -5,14 +5,6 @@ module Packages
class PackageFinder < ::Packages::GroupOrProjectPackageFinder
extend ::Gitlab::Utils::Override
def initialize(current_user, project_or_group, params = {})
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
params[:packages_class] = ::Packages::Pypi::Package
end
super
end
def execute
packages.by_file_name_and_sha256(@params[:filename], @params[:sha256])
end
@ -20,11 +12,7 @@ module Packages
private
def packages
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
base.has_version
else
base.pypi.has_version
end
base.has_version
end
override :group_packages
@ -37,6 +25,11 @@ module Packages
@project_or_group.all_projects.select(:id)
).installable
end
override :packages_class
def packages_class
::Packages::Pypi::Package
end
end
end
end

View File

@ -5,14 +5,6 @@ module Packages
class PackagesFinder < ::Packages::GroupOrProjectPackageFinder
extend ::Gitlab::Utils::Override
def initialize(current_user, project_or_group, params = {})
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
params[:packages_class] = ::Packages::Pypi::Package
end
super
end
def execute
return packages unless @params[:package_name]
@ -22,11 +14,7 @@ module Packages
private
def packages
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
base.has_version
else
base.pypi.has_version
end
base.has_version
end
override :group_packages
@ -37,6 +25,11 @@ module Packages
with_package_registry_enabled: true
)
end
override :packages_class
def packages_class
::Packages::Pypi::Package
end
end
end
end

View File

@ -515,6 +515,7 @@ class Commit
def diffs(diff_options = {})
Gitlab::Diff::FileCollection::Commit.new(self, diff_options: diff_options)
end
alias_method :diffs_for_streaming, :diffs
def persisted?
true

View File

@ -42,7 +42,7 @@ module Ml
def stop_destroy
return unless model_id
errors[:base] << "Cannot delete an experiment associated to a model"
errors.add(:base, "Cannot delete an experiment associated to a model")
# According to docs, throw is the correct way to stop on a callback
# https://api.rubyonrails.org/classes/ActiveRecord/Callbacks.html#module-ActiveRecord::Callbacks-label-Canceling+callbacks
throw :abort # rubocop:disable Cop/BanCatchThrow

View File

@ -44,10 +44,6 @@ class Packages::Package < ApplicationRecord
has_many :dependency_links, inverse_of: :package, class_name: 'Packages::DependencyLink'
has_many :tags, inverse_of: :package, class_name: 'Packages::Tag'
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
has_one :pypi_metadatum, inverse_of: :package, class_name: 'Packages::Pypi::Metadatum'
has_one :maven_metadatum, inverse_of: :package, class_name: 'Packages::Maven::Metadatum'
has_one :nuget_metadatum, inverse_of: :package, class_name: 'Packages::Nuget::Metadatum'
has_many :nuget_symbols, inverse_of: :package, class_name: 'Packages::Nuget::Symbol'
@ -79,10 +75,6 @@ class Packages::Package < ApplicationRecord
validates :version, format: { with: Gitlab::Regex.nuget_version_regex }, if: :nuget?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
validates :version, format: { with: Gitlab::Regex.pypi_version_regex }, if: :pypi?
validates :version, format: { with: Gitlab::Regex.semver_regex, message: Gitlab::Regex.semver_regex_message },
if: -> { npm? || terraform_module? }
@ -90,16 +82,6 @@ class Packages::Package < ApplicationRecord
scope :with_name, ->(name) { where(name: name) }
scope :with_name_like, ->(name) { where(arel_table[:name].matches(name)) }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
scope :with_normalized_pypi_name, ->(name) do
where(
"LOWER(regexp_replace(name, ?, '-', 'g')) = ?",
Gitlab::Regex::Packages::PYPI_NORMALIZED_NAME_REGEX_STRING,
name.downcase
)
end
scope :with_case_insensitive_version, ->(version) do
where('LOWER(version) = ?', version.downcase)
end
@ -135,10 +117,6 @@ class Packages::Package < ApplicationRecord
scope :preload_npm_metadatum, -> { preload(:npm_metadatum) }
scope :preload_nuget_metadatum, -> { preload(:nuget_metadatum) }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
scope :preload_pypi_metadatum, -> { preload(:pypi_metadatum) }
scope :with_npm_scope, ->(scope) do
npm.where("position('/' in packages_packages.name) > 0 AND split_part(packages_packages.name, '/', 1) = :package_scope", package_scope: "@#{sanitize_sql_like(scope)}")
end
@ -189,7 +167,7 @@ class Packages::Package < ApplicationRecord
def self.inheritance_column = 'package_type'
def self.inheritance_column_to_class_map
hash = {
{
ml_model: 'Packages::MlModel::Package',
golang: 'Packages::Go::Package',
rubygems: 'Packages::Rubygems::Package',
@ -198,14 +176,9 @@ class Packages::Package < ApplicationRecord
debian: 'Packages::Debian::Package',
composer: 'Packages::Composer::Package',
helm: 'Packages::Helm::Package',
generic: 'Packages::Generic::Package'
}
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
hash[:pypi] = 'Packages::Pypi::Package'
end
hash
generic: 'Packages::Generic::Package',
pypi: 'Packages::Pypi::Package'
}.freeze
end
def self.only_maven_packages_with_path(path, use_cte: false)
@ -329,16 +302,6 @@ class Packages::Package < ApplicationRecord
::Packages::MarkPackageFilesForDestructionWorker.perform_async(id)
end
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
#
# As defined in PEP 503 https://peps.python.org/pep-0503/#normalized-names
def normalized_pypi_name
return name unless pypi?
name.gsub(/#{Gitlab::Regex::Packages::PYPI_NORMALIZED_NAME_REGEX_STRING}/o, '-').downcase
end
def normalized_nuget_version
return unless nuget?

View File

@ -15,17 +15,7 @@ class Packages::Pypi::Metadatum < ApplicationRecord
belongs_to :package, class_name: 'Packages::Pypi::Package', inverse_of: :pypi_metadatum
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
belongs_to :legacy_package, -> {
where(package_type: :pypi)
}, inverse_of: :pypi_metadatum, class_name: 'Packages::Package', foreign_key: :package_id
validates :package, presence: true, if: -> { pypi_extract_pypi_package_model_enabled? }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
validates :legacy_package, presence: true, unless: -> { pypi_extract_pypi_package_model_enabled? }
validates :package, presence: true
with_options allow_nil: true do
validates :keywords, length: { maximum: MAX_KEYWORDS_LENGTH }
@ -36,19 +26,4 @@ class Packages::Pypi::Metadatum < ApplicationRecord
validates :description_content_type, length: { maximum: MAX_DESCRIPTION_CONTENT_TYPE_LENGTH }
end
validates :required_python, length: { maximum: MAX_REQUIRED_PYTHON_LENGTH }, allow_nil: false
validate :pypi_package_type, unless: -> { pypi_extract_pypi_package_model_enabled? }
private
def pypi_package_type
unless legacy_package&.pypi?
errors.add(:base, _('Package type must be PyPi'))
end
end
def pypi_extract_pypi_package_model_enabled?
Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
end
strong_memoize_attr :pypi_extract_pypi_package_model_enabled?
end

View File

@ -9,11 +9,13 @@ module Import
presents ::BulkImport, as: :bulk_import
def show_alert?
Feature.enabled?(:importer_user_mapping, current_user) && groups_awaiting_placeholder_assignment.any?
Feature.enabled?(:importer_user_mapping, current_user) &&
Feature.enabled?(:bulk_import_importer_user_mapping, current_user) &&
groups_awaiting_placeholder_assignment.any?
end
def groups_awaiting_placeholder_assignment
return [] unless bulk_import
return [] unless bulk_import&.finished?
namespaces = bulk_import.namespaces_with_unassigned_placeholders
namespaces.select do |namespace|

View File

@ -15,7 +15,7 @@ module BulkImports
return unless bulk_import
return if bulk_import.completed?
return bulk_import.fail_op! if all_entities_failed?
return bulk_import.finish! if all_entities_processed? && bulk_import.started?
return bulk_import.finish! if all_entities_processed? && bulk_import.started? && placeholder_references_loaded?
return re_enqueue if max_batch_size_exceeded? # Do not start more jobs if max allowed are already running
process_bulk_import
@ -54,6 +54,25 @@ module BulkImports
entities.all?(&:failed?)
end
def placeholder_references_loaded?
return true unless importer_user_mapping_enabled?
store = Import::PlaceholderReferences::Store.new(
import_source: Import::SOURCE_DIRECT_TRANSFER,
import_uid: bulk_import.id
)
return true if store.empty?
logger.info(
message: 'Placeholder references not finished loading to database',
bulk_import_id: bulk_import.id,
placeholder_reference_store_count: store.count
)
false
end
# A new BulkImportWorker job is enqueued to either
# - Process the new BulkImports::Entity created during import (e.g. for the subgroups)
# - Or to mark the `bulk_import` as finished
@ -69,6 +88,10 @@ module BulkImports
started_entities.count >= DEFAULT_BATCH_SIZE
end
def importer_user_mapping_enabled?
Import::BulkImports::EphemeralData.new(bulk_import.id).importer_user_mapping_enabled?
end
def next_batch_size
[DEFAULT_BATCH_SIZE - started_entities.count, 0].max
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Ml
class DestroyExperimentService
def initialize(experiment)
@experiment = experiment
end
def execute
if @experiment.destroy
ServiceResponse.success(payload: payload)
else
ServiceResponse.error(message: @experiment.errors.full_messages, payload: payload)
end
end
private
def payload
{ experiment: @experiment }
end
end
end

View File

@ -8,6 +8,7 @@ module Packages
def execute
::Packages::Package.transaction do
meta = Packages::Pypi::Metadatum.new(
package: created_package,
required_python: params[:requires_python] || '',
metadata_version: params[:metadata_version],
author_email: params[:author_email],
@ -17,12 +18,6 @@ module Packages
keywords: params[:keywords]
)
if Feature.enabled?(:pypi_extract_pypi_package_model, Feature.current_request)
meta.package = created_package
else
meta.legacy_package = created_package
end
truncate_fields(meta)
raise ActiveRecord::RecordInvalid, meta unless meta.valid?

View File

@ -1,9 +0,0 @@
---
name: pypi_extract_pypi_package_model
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435827
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165694
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/480692
milestone: '17.5'
group: group::package registry
type: gitlab_com_derisk
default_enabled: false

View File

@ -96,6 +96,7 @@ resources :commit, only: [:show], constraints: { id: Gitlab::Git::Commit::SHA_PA
member do
get :show, to: 'commit#rapid_diffs',
constraints: ->(params) { params[:rapid_diffs] == 'true' }
get :diffs_stream, to: 'commit_diffs_stream#diffs'
get :branches
get :pipelines
post :revert

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillPackagesRubygemsMetadataProjectId
description: Backfills sharding key `packages_rubygems_metadata.project_id` from `packages_packages`.
feature_category: package_registry
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167611
milestone: '17.5'
queued_migration_version: 20240930122643
finalize_after: '2024-10-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: packages_packages
sharding_key: project_id
belongs_to: package
desired_sharding_key_migration_job_name: BackfillPackagesRubygemsMetadataProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToPackagesRubygemsMetadata < Gitlab::Database::Migration[2.2]
milestone '17.5'
def change
add_column :packages_rubygems_metadata, :project_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexPackagesRubygemsMetadataOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
INDEX_NAME = 'index_packages_rubygems_metadata_on_project_id'
def up
add_concurrent_index :packages_rubygems_metadata, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_rubygems_metadata, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddPackagesRubygemsMetadataProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :packages_rubygems_metadata, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :packages_rubygems_metadata, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddPackagesRubygemsMetadataProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.5'
def up
install_sharding_key_assignment_trigger(
table: :packages_rubygems_metadata,
sharding_key: :project_id,
parent_table: :packages_packages,
parent_sharding_key: :project_id,
foreign_key: :package_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :packages_rubygems_metadata,
sharding_key: :project_id,
parent_table: :packages_packages,
parent_sharding_key: :project_id,
foreign_key: :package_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillPackagesRubygemsMetadataProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillPackagesRubygemsMetadataProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:packages_rubygems_metadata,
:package_id,
:project_id,
:packages_packages,
:project_id,
:package_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:packages_rubygems_metadata,
:package_id,
[
:project_id,
:packages_packages,
:project_id,
:package_id
]
)
end
end

View File

@ -0,0 +1 @@
fa54d0f7fd89d3b9fa64d76fd48228070bdd55dc4840c37e60fd7b42c12ea327

View File

@ -0,0 +1 @@
a432dae36492a800316b32c86343fb1f6a0164c08596c62935e68879c8332159

View File

@ -0,0 +1 @@
7ec41a97fa56b1d6ea95eb95603f9547bbae01f832e0d4f4a0ec6e1b9d552b07

View File

@ -0,0 +1 @@
9ea93c14618c93e0d3b7e825c95b5b1d37159df9eff1580eb10f8e12c0bc75ad

View File

@ -0,0 +1 @@
b7280f94dcd97acbc5b48ad26d2c1907bd5a82a40ff33d44c476f3ae1c92c39f

View File

@ -1609,6 +1609,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_700f29b1312e() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "packages_packages"
WHERE "packages_packages"."id" = NEW."package_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_70d3f0bba1de() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -15644,6 +15660,7 @@ CREATE TABLE packages_rubygems_metadata (
requirements text,
rubygems_version text,
signing_key text,
project_id bigint,
CONSTRAINT check_0154a18c82 CHECK ((char_length(description) <= 1024)),
CONSTRAINT check_22814c771b CHECK ((char_length(email) <= 255)),
CONSTRAINT check_242293030e CHECK ((char_length(extensions) <= 255)),
@ -30051,6 +30068,8 @@ CREATE INDEX index_packages_rpm_metadata_on_project_id ON packages_rpm_metadata
CREATE INDEX index_packages_rpm_repository_files_on_project_id_and_file_name ON packages_rpm_repository_files USING btree (project_id, file_name);
CREATE INDEX index_packages_rubygems_metadata_on_project_id ON packages_rubygems_metadata USING btree (project_id);
CREATE INDEX index_packages_tags_on_package_id_and_updated_at ON packages_tags USING btree (package_id, updated_at DESC);
CREATE INDEX index_packages_tags_on_project_id ON packages_tags USING btree (project_id);
@ -33523,6 +33542,8 @@ CREATE TRIGGER trigger_6cdea9559242 BEFORE INSERT OR UPDATE ON issue_links FOR E
CREATE TRIGGER trigger_6d6c79ce74e1 BEFORE INSERT OR UPDATE ON protected_environment_deploy_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_6d6c79ce74e1();
CREATE TRIGGER trigger_700f29b1312e BEFORE INSERT OR UPDATE ON packages_rubygems_metadata FOR EACH ROW EXECUTE FUNCTION trigger_700f29b1312e();
CREATE TRIGGER trigger_70d3f0bba1de BEFORE INSERT OR UPDATE ON compliance_framework_security_policies FOR EACH ROW EXECUTE FUNCTION trigger_70d3f0bba1de();
CREATE TRIGGER trigger_740afa9807b8 BEFORE INSERT OR UPDATE ON subscription_user_add_on_assignments FOR EACH ROW EXECUTE FUNCTION trigger_740afa9807b8();
@ -34760,6 +34781,9 @@ ALTER TABLE ONLY bulk_import_entities
ALTER TABLE ONLY security_policy_requirements
ADD CONSTRAINT fk_b6e48e3428 FOREIGN KEY (compliance_framework_security_policy_id) REFERENCES compliance_framework_security_policies(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_rubygems_metadata
ADD CONSTRAINT fk_b73c052149 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY compliance_management_frameworks
ADD CONSTRAINT fk_b74c45b71f FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -0,0 +1,170 @@
---
stage: Verify
group: Pipeline Execution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# External pipeline validation
DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** Self-managed
You can use an external service to validate a pipeline before it's created.
GitLab sends a POST request to the external service URL with the pipeline
data as payload. The response code from the external service determines if GitLab
should accept or reject the pipeline. If the response is:
- `200`, the pipeline is accepted.
- `406`, the pipeline is rejected.
- Other codes, the pipeline is accepted and logged.
If there's an error or the request times out, the pipeline is accepted.
Pipelines rejected by the external validation service aren't created, and don't
appear in pipeline lists in the GitLab UI or API. If you create a pipeline in the
UI that is rejected, `Pipeline cannot be run. External validation failed` is displayed.
## Configure external pipeline validation
To configure external pipeline validation, add the
[`EXTERNAL_VALIDATION_SERVICE_URL` environment variable](../environment_variables.md)
and set it to the external service URL.
By default, requests to the external service time out after five seconds. To override
the default, set the `EXTERNAL_VALIDATION_SERVICE_TIMEOUT` environment variable to the
required number of seconds.
## Payload schema
> - `tag_list` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/335904) in GitLab 16.11.
```json
{
"type": "object",
"required" : [
"project",
"user",
"credit_card",
"pipeline",
"builds",
"total_builds_count",
"namespace"
],
"properties" : {
"project": {
"type": "object",
"required": [
"id",
"path",
"created_at",
"shared_runners_enabled",
"group_runners_enabled"
],
"properties": {
"id": { "type": "integer" },
"path": { "type": "string" },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"shared_runners_enabled": { "type": "boolean" },
"group_runners_enabled": { "type": "boolean" }
}
},
"user": {
"type": "object",
"required": [
"id",
"username",
"email",
"created_at"
],
"properties": {
"id": { "type": "integer" },
"username": { "type": "string" },
"email": { "type": "string" },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"current_sign_in_ip": { "type": ["string", "null"] },
"last_sign_in_ip": { "type": ["string", "null"] },
"sign_in_count": { "type": "integer" }
}
},
"credit_card": {
"type": "object",
"required": [
"similar_cards_count",
"similar_holder_names_count"
],
"properties": {
"similar_cards_count": { "type": "integer" },
"similar_holder_names_count": { "type": "integer" }
}
},
"pipeline": {
"type": "object",
"required": [
"sha",
"ref",
"type"
],
"properties": {
"sha": { "type": "string" },
"ref": { "type": "string" },
"type": { "type": "string" }
}
},
"builds": {
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"stage",
"image",
"tag_list",
"services",
"script"
],
"properties": {
"name": { "type": "string" },
"stage": { "type": "string" },
"image": { "type": ["string", "null"] },
"tag_list": { "type": ["array", "null"] },
"services": {
"type": ["array", "null"],
"items": { "type": "string" }
},
"script": {
"type": "array",
"items": { "type": "string" }
}
}
}
},
"total_builds_count": { "type": "integer" },
"namespace": {
"type": "object",
"required": [
"plan",
"trial"
],
"properties": {
"plan": { "type": "string" },
"trial": { "type": "boolean" }
}
},
"provisioning_group": {
"type": "object",
"required": [
"plan",
"trial"
],
"properties": {
"plan": { "type": "string" },
"trial": { "type": "boolean" }
}
}
}
}
```
The `namespace` field is only available in [GitLab Premium and Ultimate](https://about.gitlab.com/pricing/).

View File

@ -22,29 +22,29 @@ You can use the following environment variables to override certain values:
## Supported environment variables
| Variable | Type | Description |
|--------------------------------------------|---------|---------------------------------------------------------------------------------------------------------|
| `DATABASE_URL` | string | The database URL; is of the form: `postgresql://localhost/blog_development`. |
| Variable | Type | Description |
|--------------------------------------------|---------|-------------|
| `DATABASE_URL` | string | The database URL; is of the form: `postgresql://localhost/blog_development`. |
| `ENABLE_BOOTSNAP` | string | Toggles [Bootsnap](https://github.com/Shopify/bootsnap) for speeding up initial Rails boot. Enabled by default for non-production environments. Set to `0` to disable. |
| `EXTERNAL_URL` | string | Specify the external URL at the [time of installation](https://docs.gitlab.com/omnibus/settings/configuration.html#specifying-the-external-url-at-the-time-of-installation). |
| `EXTERNAL_VALIDATION_SERVICE_TIMEOUT` | integer | Timeout, in seconds, for an [external CI/CD pipeline validation service](external_pipeline_validation.md). Default is `5`. |
| `EXTERNAL_VALIDATION_SERVICE_URL` | string | URL to an [external CI/CD pipeline validation service](external_pipeline_validation.md). |
| `EXTERNAL_VALIDATION_SERVICE_TOKEN` | string | The `X-Gitlab-Token` for authentication with an [external CI/CD pipeline validation service](external_pipeline_validation.md). |
| `EXTERNAL_VALIDATION_SERVICE_TIMEOUT` | integer | Timeout, in seconds, for an [external CI/CD pipeline validation service](cicd/external_pipeline_validation.md). Default is `5`. |
| `EXTERNAL_VALIDATION_SERVICE_URL` | string | URL to an [external CI/CD pipeline validation service](cicd/external_pipeline_validation.md). |
| `EXTERNAL_VALIDATION_SERVICE_TOKEN` | string | The `X-Gitlab-Token` for authentication with an [external CI/CD pipeline validation service](cicd/external_pipeline_validation.md). |
| `GITLAB_CDN_HOST` | string | Sets the base URL for a CDN to serve static assets (for example, `https://mycdnsubdomain.fictional-cdn.com`). |
| `GITLAB_EMAIL_DISPLAY_NAME` | string | The name used in the **From** field in emails sent by GitLab. |
| `GITLAB_EMAIL_FROM` | string | The email address used in the **From** field in emails sent by GitLab. |
| `GITLAB_EMAIL_REPLY_TO` | string | The email address used in the **Reply-To** field in emails sent by GitLab. |
| `GITLAB_EMAIL_SUBJECT_SUFFIX` | string | The email subject suffix used in emails sent by GitLab. |
| `GITLAB_HOST` | string | The full URL of the GitLab server (including `http://` or `https://`). |
| `GITLAB_EMAIL_DISPLAY_NAME` | string | The name used in the **From** field in emails sent by GitLab. |
| `GITLAB_EMAIL_FROM` | string | The email address used in the **From** field in emails sent by GitLab. |
| `GITLAB_EMAIL_REPLY_TO` | string | The email address used in the **Reply-To** field in emails sent by GitLab. |
| `GITLAB_EMAIL_SUBJECT_SUFFIX` | string | The email subject suffix used in emails sent by GitLab. |
| `GITLAB_HOST` | string | The full URL of the GitLab server (including `http://` or `https://`). |
| `GITLAB_MARKUP_TIMEOUT` | string | Timeout, in seconds, for `rest2html` and `pod2html` commands executed by the [`gitlab-markup` gem](https://gitlab.com/gitlab-org/gitlab-markup/). Default is `10`. |
| `GITLAB_ROOT_PASSWORD` | string | Sets the password for the `root` user on installation. |
| `GITLAB_ROOT_PASSWORD` | string | Sets the password for the `root` user on installation. |
| `GITLAB_SHARED_RUNNERS_REGISTRATION_TOKEN` | string | Sets the initial registration token used for runners. [Deprecated in GitLab 16.11](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148310). |
| `RAILS_ENV` | string | The Rails environment; can be one of `production`, `development`, `staging`, or `test`. |
| `RAILS_ENV` | string | The Rails environment; can be one of `production`, `development`, `staging`, or `test`. |
| `GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS` | integer | The default TTL used for entries stored in the Rails-cache. Default is `28800`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95042) in 15.3. |
| `GITLAB_CI_CONFIG_FETCH_TIMEOUT_SECONDS` | integer | Timeout for resolving remote includes in CI config in seconds. Must be between `0` and `60`. Default is `30`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116383) in 15.11. |
| `GITLAB_DISABLE_TOKEN_EXPIRATION_BANNER` | string | If set to `true`, `1`, or `yes`, the token expiration banner is not shown. Default is `false`. |
| `GITLAB_DISABLE_MARKDOWN_TIMEOUT` | string | If set to `true`, `1`, or `yes`, Markdown rendering on the backend does not time out. Default is `false`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163662) in 17.4.|
| `GITLAB_LFS_MAX_OID_TO_FETCH` | integer | Sets the maximum number of LFS objects to link. Default is `100,000`. |
| `GITLAB_DISABLE_MARKDOWN_TIMEOUT` | string | If set to `true`, `1`, or `yes`, Markdown rendering on the backend does not time out. Default is `false`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163662) in 17.4. |
| `GITLAB_LFS_MAX_OID_TO_FETCH` | integer | Sets the maximum number of LFS objects to link. Default is `100,000`. |
| `SIDEKIQ_SEMI_RELIABLE_FETCH_TIMEOUT` | integer | Sets the timeout for Sidekiq semi-reliable fetch. Default is `5`. [Before GitLab 16.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139583), default was `3`. If you experience high Redis CPU consumption on GitLab 16.6 and earlier, or if you have customized this variable, you should update this variable to `5`. |
## Adding more variables

View File

@ -1,170 +1,13 @@
---
stage: Verify
group: Pipeline Execution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
redirect_to: 'cicd/external_pipeline_validation.md'
remove_date: '2025-01-03'
---
# External pipeline validation
<!-- markdownlint-disable -->
DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** Self-managed
This document was moved to [another location](cicd/external_pipeline_validation.md).
You can use an external service to validate a pipeline before it's created.
GitLab sends a POST request to the external service URL with the pipeline
data as payload. The response code from the external service determines if GitLab
should accept or reject the pipeline. If the response is:
- `200`, the pipeline is accepted.
- `406`, the pipeline is rejected.
- Other codes, the pipeline is accepted and logged.
If there's an error or the request times out, the pipeline is accepted.
Pipelines rejected by the external validation service aren't created, and don't
appear in pipeline lists in the GitLab UI or API. If you create a pipeline in the
UI that is rejected, `Pipeline cannot be run. External validation failed` is displayed.
## Configure external pipeline validation
To configure external pipeline validation, add the
[`EXTERNAL_VALIDATION_SERVICE_URL` environment variable](environment_variables.md)
and set it to the external service URL.
By default, requests to the external service time out after five seconds. To override
the default, set the `EXTERNAL_VALIDATION_SERVICE_TIMEOUT` environment variable to the
required number of seconds.
## Payload schema
> - `tag_list` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/335904) in GitLab 16.11.
```json
{
"type": "object",
"required" : [
"project",
"user",
"credit_card",
"pipeline",
"builds",
"total_builds_count",
"namespace"
],
"properties" : {
"project": {
"type": "object",
"required": [
"id",
"path",
"created_at",
"shared_runners_enabled",
"group_runners_enabled"
],
"properties": {
"id": { "type": "integer" },
"path": { "type": "string" },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"shared_runners_enabled": { "type": "boolean" },
"group_runners_enabled": { "type": "boolean" }
}
},
"user": {
"type": "object",
"required": [
"id",
"username",
"email",
"created_at"
],
"properties": {
"id": { "type": "integer" },
"username": { "type": "string" },
"email": { "type": "string" },
"created_at": { "type": ["string", "null"], "format": "date-time" },
"current_sign_in_ip": { "type": ["string", "null"] },
"last_sign_in_ip": { "type": ["string", "null"] },
"sign_in_count": { "type": "integer" }
}
},
"credit_card": {
"type": "object",
"required": [
"similar_cards_count",
"similar_holder_names_count"
],
"properties": {
"similar_cards_count": { "type": "integer" },
"similar_holder_names_count": { "type": "integer" }
}
},
"pipeline": {
"type": "object",
"required": [
"sha",
"ref",
"type"
],
"properties": {
"sha": { "type": "string" },
"ref": { "type": "string" },
"type": { "type": "string" }
}
},
"builds": {
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"stage",
"image",
"tag_list",
"services",
"script"
],
"properties": {
"name": { "type": "string" },
"stage": { "type": "string" },
"image": { "type": ["string", "null"] },
"tag_list": { "type": ["array", "null"] },
"services": {
"type": ["array", "null"],
"items": { "type": "string" }
},
"script": {
"type": "array",
"items": { "type": "string" }
}
}
}
},
"total_builds_count": { "type": "integer" },
"namespace": {
"type": "object",
"required": [
"plan",
"trial"
],
"properties": {
"plan": { "type": "string" },
"trial": { "type": "boolean" }
}
},
"provisioning_group": {
"type": "object",
"required": [
"plan",
"trial"
],
"properties": {
"plan": { "type": "string" },
"trial": { "type": "boolean" }
}
}
}
}
```
The `namespace` field is only available in [GitLab Premium and Ultimate](https://about.gitlab.com/pricing/).
<!-- This redirect file can be deleted after <2025-01-03>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -355,12 +355,14 @@ Plan.default.actual_limits.update!(import_placeholder_user_limit_tier_1: 200)
Set the limit to `0` to disable it.
## Pull Mirroring Interval
## Pull mirroring interval
The [minimum wait time between pull refreshes](../user/project/repository/mirror/index.md)
defaults to 300 seconds (5 minutes). For example, a pull refresh only runs once in a given 300 second period, regardless of how many times you trigger it.
This setting applies in the context of pull refreshes invoked via the [projects API](../api/projects.md#start-the-pull-mirroring-process-for-a-project), or when forcing an update by selecting **Update now** (**{retry}**) in **Settings > Repository > Mirroring repositories**. This setting has no effect on the automatic 30 minute interval schedule used by Sidekiq for [pull mirroring](../user/project/repository/mirror/pull.md).
This setting applies in the context of pull refreshes invoked by using the [projects API](../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project),
or when forcing an update by selecting **Update now** (**{retry}**) in **Settings > Repository > Mirroring repositories**.
This setting has no effect on the automatic 30 minute interval schedule used by Sidekiq for [pull mirroring](../user/project/repository/mirror/pull.md).
To change this limit for a self-managed installation, run the following in the
[GitLab Rails console](operations/rails_console.md#starting-a-rails-console-session):

View File

@ -0,0 +1,123 @@
---
stage: Data Stores
group: Tenant Scale
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Pull mirroring API
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
You can manage project [pull mirroring](../user/project/repository/mirror/pull.md) by using the REST API.
## Get a project's pull mirror details
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/354506) in GitLab 15.6.
Return the details of a project's [pull mirror](../user/project/repository/mirror/index.md).
```plaintext
GET /projects/:id/mirror/pull
```
Supported attributes:
| Attribute | Type | Required | Description |
|:----------|:------------------|:---------|:------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-paths). |
Example request:
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/:id/mirror/pull"
```
Example response:
```json
{
"id": 101486,
"last_error": null,
"last_successful_update_at": "2020-01-06T17:32:02.823Z",
"last_update_at": "2020-01-06T17:32:02.823Z",
"last_update_started_at": "2020-01-06T17:31:55.864Z",
"update_status": "finished",
"url": "https://*****:*****@gitlab.com/gitlab-org/security/gitlab.git"
}
```
## Configure pull mirroring for a project
> - Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 16.0.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/410354) in GitLab 16.2. Feature flag `mirror_only_branches_match_regex` removed.
Configure pull mirroring while [creating a new project](projects.md#create-a-project) or
[updating an existing project](projects.md#edit-a-project) by using the API if the remote repository is accessible publicly or by
using `username:token` authentication.
If your HTTP repository is not publicly accessible, you can add the authentication information to the URL. For example,
`https://username:token@gitlab.company.com/group/project.git` where `token` is a
[personal access token](../user/profile/personal_access_tokens.md) with the `api` scope enabled.
Supported attributes:
| Attribute | Type | Required | Description |
|:---------------------------------|:--------|:---------|:------------|
| `import_url` | string | Yes | URL of remote repository being mirrored (with `user:token` if needed). |
| `mirror` | boolean | Yes | Enables pull mirroring on project when set to `true`. |
| `mirror_trigger_builds` | boolean | No | Trigger pipelines for mirror updates when set to `true`. |
| `only_mirror_protected_branches` | boolean | No | Limits mirroring to only protected branches when set to `true`. |
| `mirror_branch_regex` | String | No | Contains a regular expression. Only branches with names matching the regex are mirrored. Requires `only_mirror_protected_branches` to be disabled. |
Example creating a project with pull mirroring:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
--header "Content-Type: application/json" \
--data '{
"name": "new_project",
"namespace_id": "1",
"mirror": true,
"import_url": "https://username:token@gitlab.example.com/group/project.git"
}' \
--url "https://gitlab.example.com/api/v4/projects/"
```
Example adding pull mirroring:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/projects/:id" \
--data "mirror=true&import_url=https://username:token@gitlab.example.com/group/project.git"
```
Example removing pull mirroring:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/projects/:id" \
--data "mirror=false"
```
## Start the pull mirroring process for a project
Start the pull mirroring process for a project.
```plaintext
POST /projects/:id/mirror/pull
```
Supported attributes:
| Attribute | Type | Required | Description |
|:----------|:------------------|:---------|:------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-paths). |
Example request:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/:id/mirror/pull"
```

View File

@ -3003,127 +3003,6 @@ Example response:
}
```
## Get a project's pull mirror details
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/354506) in GitLab 15.6.
Return the details of a project's [pull mirror](../user/project/repository/mirror/index.md).
```plaintext
GET /projects/:id/mirror/pull
```
Supported attributes:
| Attribute | Type | Required | Description |
|:----------|:------------------|:---------|:------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-paths). |
Example request:
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/:id/mirror/pull"
```
Example response:
```json
{
"id": 101486,
"last_error": null,
"last_successful_update_at": "2020-01-06T17:32:02.823Z",
"last_update_at": "2020-01-06T17:32:02.823Z",
"last_update_started_at": "2020-01-06T17:31:55.864Z",
"update_status": "finished",
"url": "https://*****:*****@gitlab.com/gitlab-org/security/gitlab.git"
}
```
## Configure pull mirroring for a project
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
> - Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 16.0.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/410354) in GitLab 16.2. Feature flag `mirror_only_branches_match_regex` removed.
Configure pull mirroring while [creating a new project](#create-a-project) or [updating an existing project](#edit-a-project)
by using the API if the remote repository is accessible publicly or by using `username:token` authentication.
If your HTTP repository is not publicly accessible, you can add the authentication information to the URL. For example,
`https://username:token@gitlab.company.com/group/project.git` where `token` is a
[personal access token](../user/profile/personal_access_tokens.md) with the `api` scope enabled.
Supported attributes:
| Attribute | Type | Required | Description |
|:---------------------------------|:--------|:---------|:------------|
| `import_url` | string | Yes | URL of remote repository being mirrored (with `user:token` if needed). |
| `mirror` | boolean | Yes | Enables pull mirroring on project when set to `true`. |
| `mirror_trigger_builds` | boolean | No | Trigger pipelines for mirror updates when set to `true`. |
| `only_mirror_protected_branches` | boolean | No | Limits mirroring to only protected branches when set to `true`. |
| `mirror_branch_regex` | String | No | Contains a regular expression. Only branches with names matching the regex are mirrored. Requires `only_mirror_protected_branches` to be disabled. |
Example creating a project with pull mirroring:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
--header "Content-Type: application/json" \
--data '{
"name": "new_project",
"namespace_id": "1",
"mirror": true,
"import_url": "https://username:token@gitlab.example.com/group/project.git"
}' \
--url "https://gitlab.example.com/api/v4/projects/"
```
Example adding pull mirroring:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/projects/:id" \
--data "mirror=true&import_url=https://username:token@gitlab.example.com/group/project.git"
```
Example removing pull mirroring:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/projects/:id" \
--data "mirror=false"
```
## Start the pull mirroring process for a project
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
Start the pull mirroring process for a project.
```plaintext
POST /projects/:id/mirror/pull
```
Supported attributes:
| Attribute | Type | Required | Description |
|:----------|:------------------|:---------|:------------|
| `id` | integer or string | Yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-paths). |
Example request:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/:id/mirror/pull"
```
## Download snapshot of a Git repository
This endpoint may only be accessed by an administrative user.

View File

@ -19,7 +19,7 @@ and password information.
NOTE:
[Pull mirrors](../user/project/repository/mirror/pull.md) use
[a different API endpoint](projects.md#configure-pull-mirroring-for-a-project) to
[a different API endpoint](project_pull_mirroring.md#configure-pull-mirroring-for-a-project) to
display and update them.
## List a project's remote mirrors
@ -89,7 +89,8 @@ Example response:
## Create a pull mirror
Learn how to [configure a pull mirror](projects.md#configure-pull-mirroring-for-a-project) using the Projects API.
Learn how to [configure a pull mirror](project_pull_mirroring.md#configure-pull-mirroring-for-a-project) by using the
project pull mirroring API.
## Create a push mirror

View File

@ -48,7 +48,7 @@ repositories:
GitLab:
1. Imports the project.
1. Enables [Pull Mirroring](../../user/project/repository/mirror/pull.md).
1. Enables [pull mirroring](../../user/project/repository/mirror/pull.md).
1. Enables [GitHub project integration](../../user/project/integrations/github.md).
1. Creates a web hook on GitHub to notify GitLab of new commits.
@ -80,7 +80,7 @@ To manually enable GitLab CI/CD for your repository:
new commits.
The web hook URL should be set to the GitLab API to
[trigger pull mirroring](../../api/projects.md#start-the-pull-mirroring-process-for-a-project),
[trigger pull mirroring](../../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project),
using the GitLab personal access token we just created:
```plaintext
@ -90,15 +90,3 @@ To manually enable GitLab CI/CD for your repository:
Select the **Let me select individual events** option, then check the **Pull requests** and **Pushes** checkboxes. These settings are needed for [pipelines for external pull requests](index.md#pipelines-for-external-pull-requests).
1. In GitHub, add a `.gitlab-ci.yml` to configure GitLab CI/CD.
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
Each scenario can be a third-level heading, for example `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->

View File

@ -22,7 +22,7 @@ Specific details may have changed since then, but it should still serve as a goo
## Explanation of mirroring process
GitLab performs these steps when an
[API call](../api/projects.md#start-the-pull-mirroring-process-for-a-project)
[API call](../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project)
triggers a pull mirror. Scheduled mirror updates are similar, but do not start with the API call:
1. The request originates from an API call, and triggers the `start_pull_mirroring_service` in

View File

@ -247,6 +247,9 @@ The OpenSSL 3 upgrade has been postponed to GitLab 17.7.0.
- Git 2.46.0 and later is required by Gitaly. For installations from source, you should use the [Git version provided by Gitaly](../../install/installation.md#git).
- S3 object storage uploads in Workhorse are now handled by default using the [AWS SDK v2 for Go](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164597). If you experience issues
with S3 object storage uploads, you can downgrade to v1 of by disabling the `workhorse_use_aws_sdk_v2` [feature flag](../../administration/feature_flags.md#enable-or-disable-the-feature).
- When you upgrade to GitLab 17.4, an OAuth application is generated for the Web IDE.
If your GitLab server's external URL configuration in the `GitLab.rb` file contains uppercase letters, the Web IDE might fail to load.
To resolve this issue, see [update the OAuth callback URL](../../user/project/web_ide/index.md#update-the-oauth-callback-url).
## 17.3.0

View File

@ -619,10 +619,117 @@ The following are Docker image-related CI/CD variables.
#### Vulnerability filters
| CI/CD variable | Default value | Description |
|------------------------------|--------------------------|-------------|
| `SAST_EXCLUDED_PATHS` | `spec, test, tests, tmp` | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs (see [`doublestar.Match`](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4@v4.0.2#Match) for supported patterns), or file or folder paths (for example, `doc,spec`). Parent directories also match patterns. You might need to exclude temporary directories used by your build tool as these can generate false positives. To exclude paths, copy and paste the default excluded paths, then **add** your own paths to be excluded. If you don't specify the default excluded paths, you override the defaults and _only_ paths you specify are excluded from the SAST scans. |
| `SEARCH_MAX_DEPTH` | 20 for [Advanced SAST](gitlab_advanced_sast.md) and [Semgrep](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep); 4 for all other SAST analyzers | SAST searches the repository to detect the programming languages used, and selects the matching analyzers. Set the value of `SEARCH_MAX_DEPTH` to specify how many directory levels the search phase should span. After the analyzers have been selected, the _entire_ repository is analyzed. |
<table class="sast-table">
<thead>
<tr>
<th>CI/CD variable</th>
<th>Description</th>
<th>Default Value</th>
<th>Analyzer</th>
</tr>
</thead>
<tbody>
<tr>
<td rowspan="3">
<code>SAST_EXCLUDED_PATHS</code>
</td>
<td rowspan="3">
Comma-separated list of paths for excluding vulnerabilities. The exact handling of this variable depends on which analyzer is used.<sup><b><a href="#sast-excluded-paths-description">1</a></b></sup>
</td>
<td rowspan="3">
<code>
<a href="https://gitlab.com/gitlab-org/gitlab/blob/v17.3.0-ee/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml#L13">spec, test, tests, tmp</a>
</code>
</td>
<td>
<a href="https://gitlab.com/gitlab-org/security-products/analyzers/semgrep">Semgrep</a><sup><b><a href="#sast-excluded-paths-semgrep">2</a></b>,</sup><sup><b><a href="#sast-excluded-paths-all-other-sast-analyzers">3</a></b></sup>
</td>
</tr>
<tr>
<td>
<a href="gitlab_advanced_sast.md">GitLab Advanced SAST</a><sup><b><a href="#sast-excluded-paths-semgrep">2</a></b>,</sup><sup><b><a href="#sast-excluded-paths-all-other-sast-analyzers">3</a></b></sup>
</td>
</tr>
<tr>
<td>
All other SAST analyzers<sup><b><a href="#sast-excluded-paths-all-other-sast-analyzers">3</a></b></sup>
</td>
</tr>
<tr>
<td rowspan="3">
<code>SEARCH_MAX_DEPTH</code>
</td>
<td rowspan="3">
The number of directory levels the analyzer will descend into when searching for matching files to scan.<sup><b><a href="#search-max-depth-description">4</a></b></sup>
</td>
<td rowspan="2">
<code>
<a href="https://gitlab.com/gitlab-org/gitlab/-/blob/v17.3.0-ee/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml#L54">20</a>
</code>
</td>
<td>
<a href="https://gitlab.com/gitlab-org/security-products/analyzers/semgrep">Semgrep</a>
</td>
</tr>
<tr>
<td>
<a href="gitlab_advanced_sast.md">GitLab Advanced SAST</a>
</td>
</tr>
<tr>
<td>
<code>
<a href="https://gitlab.com/gitlab-org/gitlab/blob/v17.3.0-ee/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml#L26">4</a>
</code>
</td>
<td>
All other SAST analyzers
</td>
</tr>
</tbody>
</table>
**Footnotes:**
1. <a id="sast-excluded-paths-description"></a>You might need to exclude temporary directories used by your build tool as
these can generate false positives. To exclude paths, copy and paste the default excluded paths, then **add** your
own paths to be excluded. If you don't specify the default excluded paths, the defaults are overridden and _only_ the
paths you specify are excluded from SAST scans.
1. <a id="sast-excluded-paths-semgrep"></a>For these analyzers, `SAST_EXCLUDED_PATHS` is implemented as a **pre-filter**,
which is applied _before_ the scan is executed.
The analyzer skips any files or directories whose path matches one of the comma-separated patterns.
For example, if `SAST_EXCLUDED_PATHS` is set to `*.py,tests`:
- `*.py` ignores the following:
- `foo.py`
- `src/foo.py`
- `foo.py/bar.sh`
- `tests` ignores:
- `tests/foo.py`
- `a/b/tests/c/foo.py`
Each pattern is a glob-style pattern that uses the same syntax as [gitignore](https://git-scm.com/docs/gitignore#_pattern_format).
1. <a id="sast-excluded-paths-all-other-sast-analyzers"></a>For these analyzers, `SAST_EXCLUDED_PATHS` is implemented as
a **post-filter**, which is applied _after_ the scan is executed.
Patterns can be globs (see [`doublestar.Match`](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4@v4.0.2#Match) for supported
patterns), or file or folder paths (for example, `doc,spec`). Parent directories also match patterns.
The post-filter implementation of `SAST_EXCLUDED_PATHS` is available for all SAST analyzers. Some
SAST analyzers such as those with superscript **[2](#sast-excluded-paths-semgrep)** implement `SAST_EXCLUDED_PATHS`
as both a pre-filter and post-filter. A pre-filter is more efficient because it reduces the number of files
to be scanned.
For analyzers that support `SAST_EXCLUDED_PATHS` as both a pre-filter and post-filter, the pre-filter is applied first,
then the post-filter is applied to any vulnerabilities that remain.
1. <a id="search-max-depth-description"></a>The [SAST CI/CD template](https://gitlab.com/gitlab-org/gitlab/blob/v17.4.1-ee/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml)
searches the repository to detect the programming languages
used, and selects the matching analyzers. Then, each analyzer searches the codebase to find the specific files or directories
it should scan. Set the value of `SEARCH_MAX_DEPTH` to specify how many directory levels the analyzer's search phase should span.
#### Analyzer settings

View File

@ -285,6 +285,7 @@ of the methods below are also supported with the same caveats.
|--------------------------|-----------------|---------------|----------------------------------------------------------------------------------------------|
| `get_experiment` | Yes | 15.11 | |
| `get_experiment_by_name` | Yes | 15.11 | |
| `delete_experiment` | Yes | 17.5 | |
| `set_experiment` | Yes | 15.11 | |
| `get_run` | Yes | 15.11 | |
| `start_run` | Yes | 15.11 | (16.3) If a name is not provided, the candidate receives a random nickname. |

View File

@ -52,7 +52,7 @@ To create the webhook in the downstream instance:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select **Settings > Webhooks**.
1. Add the webhook **URL**, which (in this case) uses the
[Pull Mirror API](../../../../api/projects.md#start-the-pull-mirroring-process-for-a-project)
[Pull Mirror API](../../../../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project)
request to trigger an immediate pull after a repository update:
```plaintext

View File

@ -114,11 +114,11 @@ assigned when you set up pull mirroring.
Pull mirroring uses polling to detect new branches and commits added upstream,
often minutes afterwards. You can notify GitLab using an
[API call](../../../../api/projects.md#start-the-pull-mirroring-process-for-a-project),
[API call](../../../../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project),
but the [minimum interval for pull mirroring limits](index.md#force-an-update) is still enforced.
For more information, read
[Start the pull mirroring process for a project](../../../../api/projects.md#start-the-pull-mirroring-process-for-a-project).
[Start the pull mirroring process for a project](../../../../api/project_pull_mirroring.md#start-the-pull-mirroring-process-for-a-project).
## Fix hard failures when mirroring
@ -151,4 +151,4 @@ end
- [Troubleshooting](troubleshooting.md) for repository mirroring.
- [Pull mirroring intervals](../../../../administration/instance_limits.md#pull-mirroring-interval)
- [Pull mirroring API](../../../../api/projects.md#configure-pull-mirroring-for-a-project)
- [Project pull mirroring API](../../../../api/project_pull_mirroring.md#configure-pull-mirroring-for-a-project)

View File

@ -261,7 +261,8 @@ When mirroring fails due to Silent Mode the following are the debug steps:
- [Triggering the mirror using the API](pull.md#trigger-pipelines-for-mirror-updates) shows: `The project is not mirrored`.
- If pull or push mirror was already set up but there are no further updates on the mirrored repository,
confirm the [project's pull and push mirror details ans status](../../../../api/projects.md#get-a-projects-pull-mirror-details) are not recent as shown below. This indicates mirroring was paused and disabling GitLab Silent Mode restarts it automatically.
confirm the [project's pull and push mirror details ans status](../../../../api/project_pull_mirroring.md#get-a-projects-pull-mirror-details)
are not recent as shown below. This indicates mirroring was paused and disabling GitLab Silent Mode restarts it automatically.
For example, if Silent Mode is what is impeding your imports, the output is similar to the following:

View File

@ -114,6 +114,23 @@ module API
{}
end
desc 'Delete an experiment.' do
summary 'Delete an experiment.'
detail 'https://mlflow.org/docs/latest/rest-api.html#delete-experiment'
end
params do
requires :experiment_id, type: String, desc: 'ID of the experiment.'
end
post 'delete', urgency: :low do
destroy = ::Ml::DestroyExperimentService.new(experiment).execute
if destroy.success?
present({})
else
render_api_error!(destroy.message.first, 400)
end
end
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPackagesRubygemsMetadataProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_packages_rubygems_metadata_project_id
feature_category :package_registry
end
end
end

View File

@ -162,9 +162,7 @@ module Gitlab
end
def each_gitaly_patch
i = @array.length
@iterator.each do |raw|
@iterator.each_with_index do |raw, iterator_index|
@empty = false
options = { expanded: expand_diff? }
@ -182,8 +180,10 @@ module Gitlab
end
end
yield @array[i] = diff
i += 1
if iterator_index >= @offset_index
@array << diff
yield diff
end
end
end

View File

@ -38629,9 +38629,6 @@ msgstr ""
msgid "Package type must be NuGet"
msgstr ""
msgid "Package type must be PyPi"
msgstr ""
msgid "Package type must be Terraform Module"
msgstr ""

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe StreamDiffs, type: :controller, feature_category: :source_code_management do
subject(:controller) do
Class.new(ApplicationController) do
include StreamDiffs
def call_resource
resource
end
def call_options
options
end
end
end
describe '#resource' do
it 'raises NotImplementedError' do
expect { controller.new.call_resource }.to raise_error(NotImplementedError)
end
end
describe '#options' do
it 'returns empty hash' do
expect(controller.new.call_options).to eq({})
end
end
end

View File

@ -121,26 +121,6 @@ FactoryBot.define do
end
end
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
factory :pypi_package_legacy do
sequence(:name) { |n| "pypi-package-#{n}" }
sequence(:version) { |n| "1.0.#{n}" }
package_type { :pypi }
transient do
without_loaded_metadatum { false }
end
after :create do |package, evaluator|
create :package_file, :pypi, package: package, file_name: "#{package.name}-#{package.version}.tar.gz"
unless evaluator.without_loaded_metadatum
create :pypi_metadatum, legacy_package: package, package: nil
end
end
end
factory :ml_model_package, class: 'Packages::MlModel::Package' do
sequence(:name) { |n| "mlmodel-package-#{n}" }
sequence(:version) { |n| "1.0.#{n}" }

View File

@ -33,27 +33,5 @@ RSpec.describe Packages::Pypi::PackageFinder, feature_category: :package_registr
it { is_expected.to eq(package2) }
end
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
context 'within a project' do
let(:scope) { project }
# rubocop:disable Cop/AvoidBecomes -- implementing inheritance for PyPi packages https://gitlab.com/gitlab-org/gitlab/-/issues/435827
it { is_expected.to eq(package2.becomes(::Packages::Package)) }
# rubocop:enable Cop/AvoidBecomes
end
context 'within a group' do
let(:scope) { group }
# rubocop:disable Cop/AvoidBecomes -- implementing inheritance for PyPi packages https://gitlab.com/gitlab-org/gitlab/-/issues/435827
it { is_expected.to eq(package2.becomes(::Packages::Package)) }
# rubocop:enable Cop/AvoidBecomes
end
end
end
end

View File

@ -39,19 +39,6 @@ RSpec.describe Packages::Pypi::PackagesFinder, feature_category: :package_regist
it { is_expected.to contain_exactly(package2, package3) }
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
# rubocop:disable Cop/AvoidBecomes -- implementing inheritance for PyPi packages https://gitlab.com/gitlab-org/gitlab/-/issues/435827
it do
is_expected
.to contain_exactly(package2.becomes(::Packages::Package), package3.becomes(::Packages::Package))
end
# rubocop:enable Cop/AvoidBecomes
end
it_behaves_like 'when no package is found'
it_behaves_like 'when package_name param is a non-normalized name'
end
@ -68,16 +55,6 @@ RSpec.describe Packages::Pypi::PackagesFinder, feature_category: :package_regist
it { is_expected.to contain_exactly(package4) }
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
# rubocop:disable Cop/AvoidBecomes -- implementing inheritance for PyPi packages https://gitlab.com/gitlab-org/gitlab/-/issues/435827
it { is_expected.to contain_exactly(package4.becomes(::Packages::Package)) }
# rubocop:enable Cop/AvoidBecomes
end
it_behaves_like 'when no package is found'
it_behaves_like 'when package_name param is a non-normalized name'

View File

@ -165,10 +165,11 @@ describe('AppComponent', () => {
jest.spyOn(vm, 'updateGroups');
});
it('should fetch groups for provided page details and update window state', () => {
it('without filter should fetch groups for provided page details, update window state, and call setGroups', () => {
jest.spyOn(urlUtilities, 'mergeUrlParams');
jest.spyOn(window.history, 'replaceState').mockImplementation(() => {});
jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
jest.spyOn(vm.store, 'setGroups').mockImplementation(() => {});
const fetchPagePromise = vm.fetchPage({
page: 2,
@ -196,7 +197,43 @@ describe('AppComponent', () => {
expect.any(String),
);
expect(vm.updateGroups).toHaveBeenCalled();
expect(vm.store.setGroups).toHaveBeenCalledWith(mockGroups);
});
});
it('with filter should fetch groups for provided page details, update window state, and call setSearchedGroups', () => {
jest.spyOn(urlUtilities, 'mergeUrlParams');
jest.spyOn(window.history, 'replaceState').mockImplementation(() => {});
jest.spyOn(window, 'scrollTo').mockImplementation(() => {});
jest.spyOn(vm.store, 'setSearchedGroups').mockImplementation(() => {});
const fetchPagePromise = vm.fetchPage({
page: 2,
filterGroupsBy: 'search',
sortBy: null,
});
expect(vm.isLoading).toBe(true);
expect(vm.fetchGroups).toHaveBeenCalledWith({
page: 2,
filterGroupsBy: 'search',
sortBy: null,
updatePagination: true,
});
return fetchPagePromise.then(() => {
expect(vm.isLoading).toBe(false);
expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
expect(urlUtilities.mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, expect.any(String));
expect(window.history.replaceState).toHaveBeenCalledWith(
{
page: expect.any(String),
},
expect.any(String),
expect.any(String),
);
expect(vm.store.setSearchedGroups).toHaveBeenCalledWith(mockGroups);
});
});
});

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPackagesRubygemsMetadataProjectId,
feature_category: :package_registry,
schema: 20240930122639 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :packages_rubygems_metadata }
let(:batch_column) { :package_id }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :packages_packages }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :package_id }
end
end

View File

@ -532,6 +532,7 @@ RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_manag
describe '#each' do
context 'with Gitlab::GitalyClient::DiffStitcher' do
let(:offset_index) { 0 }
let(:collection) do
described_class.new(
iterator,
@ -539,12 +540,13 @@ RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_manag
max_lines: max_lines,
limits: limits,
expanded: expanded,
generated_files: generated_files
generated_files: generated_files,
offset_index: offset_index
)
end
let(:iterator) { Gitlab::GitalyClient::DiffStitcher.new(diff_params) }
let(:diff_params) { [diff_1, diff_2] }
let(:diff_params) { [diff_1, diff_2, diff_3] }
let(:diff_1) do
OpenStruct.new(
to_path: ".gitmodules",
@ -573,6 +575,20 @@ RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_manag
)
end
let(:diff_3) do
OpenStruct.new(
to_path: "README",
from_path: "README",
old_mode: 0100644,
new_mode: 0100644,
from_id: '357406f3075a57708d0163752905cc1576fceacc',
to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
patch: 'a' * 100,
raw_patch_data: 'a' * 100,
end_of_patch: true
)
end
context 'with generated_files' do
let(:generated_files) { [diff_1.from_path] }
@ -608,6 +624,45 @@ RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_manag
it { is_expected.to be_falsey }
end
end
context 'when offset_index is given' do
let(:generated_files) { nil }
context 'when offset_index is 0' do
let(:offset_index) { 0 }
it 'yields all diffs' do
expect(collection.to_a.map(&:diff)).to eq(
[
diff_1.patch,
diff_2.patch,
diff_3.patch
]
)
end
end
context 'when offset index is 1' do
let(:offset_index) { 1 }
it 'does not yield diffs before the offset' do
expect(collection.to_a.map(&:diff)).to eq(
[
diff_2.patch,
diff_3.patch
]
)
end
end
context 'when offset_index is the same as the number of diffs' do
let(:offset_index) { 3 }
it 'yields no diffs' do
expect(collection.to_a).to be_empty
end
end
end
end
context 'with existing generated value in the hash' do

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPackagesRubygemsMetadataProjectId, feature_category: :package_registry do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :packages_rubygems_metadata,
column_name: :package_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:packages_packages,
:project_id,
:package_id
]
)
}
end
end
end

View File

@ -30,6 +30,7 @@ RSpec.describe Ml::Experiment, feature_category: :mlops do
experiment = create(:ml_models, project: exp.project).default_experiment
expect { experiment.destroy! }.to raise_error(ActiveRecord::ActiveRecordError)
expect(experiment.errors.full_messages).to include('Cannot delete an experiment associated to a model')
end
end

View File

@ -182,75 +182,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
context 'pypi package' do
subject { create(:pypi_package_legacy) }
it { is_expected.to allow_value('0.1').for(:version) }
it { is_expected.to allow_value('2.0').for(:version) }
it { is_expected.to allow_value('1.2.0').for(:version) }
it { is_expected.to allow_value('0100!0.0').for(:version) }
it { is_expected.to allow_value('00!1.2').for(:version) }
it { is_expected.to allow_value('1.0a').for(:version) }
it { is_expected.to allow_value('1.0-a').for(:version) }
it { is_expected.to allow_value('1.0.a1').for(:version) }
it { is_expected.to allow_value('1.0a1').for(:version) }
it { is_expected.to allow_value('1.0-a1').for(:version) }
it { is_expected.to allow_value('1.0alpha1').for(:version) }
it { is_expected.to allow_value('1.0b1').for(:version) }
it { is_expected.to allow_value('1.0beta1').for(:version) }
it { is_expected.to allow_value('1.0rc1').for(:version) }
it { is_expected.to allow_value('1.0pre1').for(:version) }
it { is_expected.to allow_value('1.0preview1').for(:version) }
it { is_expected.to allow_value('1.0.dev1').for(:version) }
it { is_expected.to allow_value('1.0.DEV1').for(:version) }
it { is_expected.to allow_value('1.0.post1').for(:version) }
it { is_expected.to allow_value('1.0.rev1').for(:version) }
it { is_expected.to allow_value('1.0.r1').for(:version) }
it { is_expected.to allow_value('1.0c2').for(:version) }
it { is_expected.to allow_value('2012.15').for(:version) }
it { is_expected.to allow_value('1.0+5').for(:version) }
it { is_expected.to allow_value('1.0+abc.5').for(:version) }
it { is_expected.to allow_value('1!1.1').for(:version) }
it { is_expected.to allow_value('1.0c3').for(:version) }
it { is_expected.to allow_value('1.0rc2').for(:version) }
it { is_expected.to allow_value('1.0c1').for(:version) }
it { is_expected.to allow_value('1.0b2-346').for(:version) }
it { is_expected.to allow_value('1.0b2.post345').for(:version) }
it { is_expected.to allow_value('1.0b2.post345.dev456').for(:version) }
it { is_expected.to allow_value('1.2.rev33+123456').for(:version) }
it { is_expected.to allow_value('1.1.dev1').for(:version) }
it { is_expected.to allow_value('1.0b1.dev456').for(:version) }
it { is_expected.to allow_value('1.0a12.dev456').for(:version) }
it { is_expected.to allow_value('1.0b2').for(:version) }
it { is_expected.to allow_value('1.0.dev456').for(:version) }
it { is_expected.to allow_value('1.0c1.dev456').for(:version) }
it { is_expected.to allow_value('1.0.post456').for(:version) }
it { is_expected.to allow_value('1.0.post456.dev34').for(:version) }
it { is_expected.to allow_value('1.2+123abc').for(:version) }
it { is_expected.to allow_value('1.2+abc').for(:version) }
it { is_expected.to allow_value('1.2+abc123').for(:version) }
it { is_expected.to allow_value('1.2+abc123def').for(:version) }
it { is_expected.to allow_value('1.2+1234.abc').for(:version) }
it { is_expected.to allow_value('1.2+123456').for(:version) }
it { is_expected.to allow_value('1.2.r32+123456').for(:version) }
it { is_expected.to allow_value('1!1.2.rev33+123456').for(:version) }
it { is_expected.to allow_value('1.0a12').for(:version) }
it { is_expected.to allow_value('1.2.3-45+abcdefgh').for(:version) }
it { is_expected.to allow_value('v1.2.3').for(:version) }
it { is_expected.not_to allow_value('1.2.3-45-abcdefgh').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1./2.3').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
it_behaves_like 'validating version to be SemVer compliant for', :npm_package
it_behaves_like 'validating version to be SemVer compliant for', :terraform_module_package
@ -644,16 +575,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it { is_expected.to match_array([package1, package2]) }
end
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
describe '.with_normalized_pypi_name' do
let_it_be(:pypi_package) { create(:pypi_package, name: 'Foo.bAr---BAZ_buz') }
subject { described_class.with_normalized_pypi_name('foo-bar-baz-buz') }
it { is_expected.to match_array([pypi_package]) }
end
describe '.with_case_insensitive_version' do
let_it_be(:nuget_package) { create(:nuget_package, version: '1.0.0-ABC') }
@ -1105,28 +1026,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
describe '#normalized_pypi_name' do
let_it_be(:package) { create(:pypi_package) }
subject { package.normalized_pypi_name }
where(:package_name, :normalized_name) do
'ASDF' | 'asdf'
'a.B_c-d' | 'a-b-c-d'
'a-------b....c___d' | 'a-b-c-d'
end
with_them do
before do
package.update_column(:name, package_name)
end
it { is_expected.to eq(normalized_name) }
end
end
describe '#normalized_nuget_version' do
let_it_be(:package) { create(:nuget_package, :with_metadatum, version: '1.0') }
let(:normalized_version) { '1.0.0' }

View File

@ -5,31 +5,11 @@ require 'spec_helper'
RSpec.describe Packages::Pypi::Metadatum, type: :model, feature_category: :package_registry do
describe 'relationships' do
it { is_expected.to belong_to(:package) }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
it do
is_expected.to belong_to(:legacy_package).conditions(package_type: :pypi).class_name('Packages::Package')
.inverse_of(:pypi_metadatum).with_foreign_key(:package_id)
end
end
describe 'validations' do
it { is_expected.to validate_presence_of(:package) }
# TODO: Remove with the rollout of the FF pypi_extract_pypi_package_model
# https://gitlab.com/gitlab-org/gitlab/-/issues/480692
it { is_expected.not_to validate_presence_of(:legacy_package) }
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
it { is_expected.to validate_presence_of(:legacy_package) }
it { is_expected.not_to validate_presence_of(:package) }
end
it { is_expected.to allow_value('').for(:required_python) }
it { is_expected.to validate_length_of(:required_python).is_at_most(described_class::MAX_REQUIRED_PYTHON_LENGTH) }
it { is_expected.to allow_value('').for(:keywords) }
@ -55,7 +35,7 @@ RSpec.describe Packages::Pypi::Metadatum, type: :model, feature_category: :packa
.is_at_most(described_class::MAX_DESCRIPTION_CONTENT_TYPE_LENGTH)
}
describe '#pypi_package_type', :aggregate_failures do
describe '#package_type', :aggregate_failures do
subject(:pypi_metadatum) { build(:pypi_metadatum) }
it 'builds a valid metadatum' do
@ -69,19 +49,6 @@ RSpec.describe Packages::Pypi::Metadatum, type: :model, feature_category: :packa
it 'raises the error' do
expect { build(:pypi_metadatum, package: package) }.to raise_error(ActiveRecord::AssociationTypeMismatch)
end
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
it 'adds the validation error' do
pypi_metadatum = build(:pypi_metadatum, legacy_package: package, package: nil)
expect(pypi_metadatum).not_to be_valid
expect(pypi_metadatum.errors.to_a).to include('Package type must be PyPi')
end
end
end
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe Import::PendingReassignmentAlertPresenter, :aggregate_failures, f
include SafeFormatHelper
let_it_be(:user) { build_stubbed(:user) }
let_it_be(:bulk_import) { build_stubbed(:bulk_import, :with_configuration) }
let(:bulk_import) { build_stubbed(:bulk_import, :with_configuration, :finished) }
let(:presenter) { described_class.new(bulk_import, current_user: user) }
let_it_be(:namespaces) { [] }
@ -44,7 +44,7 @@ RSpec.describe Import::PendingReassignmentAlertPresenter, :aggregate_failures, f
context 'with no top level groups' do
let_it_be(:namespaces) { [] }
it 'presents the import values' do
it 'does not present the import values' do
expect(presenter.show_alert?).to eq(false)
end
end
@ -68,7 +68,27 @@ RSpec.describe Import::PendingReassignmentAlertPresenter, :aggregate_failures, f
stub_feature_flags(importer_user_mapping: false)
end
it 'presents the import values' do
it 'does not present the import values' do
expect(presenter.show_alert?).to eq(false)
end
end
context 'when bulk_import_importer_user_mapping feature flag is disabled' do
before do
stub_feature_flags(bulk_import_importer_user_mapping: false)
end
it 'does not present the import values' do
expect(presenter.show_alert?).to eq(false)
end
end
context 'when import has not finished' do
before do
bulk_import.status = 1
end
it 'does not present the import values' do
expect(presenter.show_alert?).to eq(false)
end
end

View File

@ -11,7 +11,7 @@ RSpec.describe ::Packages::Pypi::SimpleIndexPresenter, :aggregate_failures, feat
let_it_be(:package1) { create(:pypi_package, project: project, name: package_name, version: '1.0.0') }
let_it_be(:package2) { create(:pypi_package, project: project, name: package_name, version: '2.0.0') }
let(:packages) { project.packages }
let(:packages) { Packages::Pypi::Package.for_projects(project) }
describe '#body' do
subject(:presenter) { described_class.new(packages, project_or_group).body }
@ -44,12 +44,12 @@ RSpec.describe ::Packages::Pypi::SimpleIndexPresenter, :aggregate_failures, feat
it 'avoids n+1 database queries', :use_sql_query_cache do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
described_class.new(project.packages.reload, project_or_group).body
described_class.new(Packages::Pypi::Package.for_projects(project).reload, project_or_group).body
end
create_list(:pypi_package, 5, project: project)
expect { described_class.new(project.packages.reload, project_or_group).body }
expect { described_class.new(Packages::Pypi::Package.for_projects(project).reload, project_or_group).body }
.to issue_same_number_of_queries_as(control)
end
end

View File

@ -13,7 +13,7 @@ RSpec.describe ::Packages::Pypi::SimplePackageVersionsPresenter, :aggregate_fail
let(:file) { package.package_files.first }
let(:filename) { file.file_name }
let(:packages) { project.packages }
let(:packages) { Packages::Pypi::Package.for_projects(project) }
describe '#body' do
subject(:presenter) { described_class.new(packages, project_or_group).body }
@ -42,7 +42,8 @@ RSpec.describe ::Packages::Pypi::SimplePackageVersionsPresenter, :aggregate_fail
create(:pypi_package, project: project, name: package_name)
expect { described_class.new(project.packages, project_or_group).body }.not_to exceed_query_limit(control)
expect { described_class.new(Packages::Pypi::Package.for_projects(project), project_or_group).body }
.not_to exceed_query_limit(control)
end
end

View File

@ -284,4 +284,45 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
it_behaves_like 'MLflow|Requires api scope and write permission'
end
end
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/delete' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/delete" }
let(:default_params) { { experiment_id: experiment.iid.to_s } }
let(:params) { default_params }
let(:request) { post api(route), params: params, headers: headers }
it 'deletes the experiment', :aggregate_failures do
is_expected.to have_gitlab_http_status(:ok)
expect { experiment.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
describe 'Error States' do
context 'when experiment does not exist' do
let(:params) { default_params.merge(experiment_id: non_existing_record_iid.to_s) }
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
end
context 'when experiment has a model_id' do
let(:model) { create(:ml_models, project: project) }
let(:experiment) { create(:ml_experiments, :with_metadata, project: project, model_id: model.id) }
it 'returns an error' do
is_expected.to have_gitlab_http_status(:bad_request)
expect(json_response).to include({ 'message' => 'Cannot delete an experiment associated to a model' })
end
it_behaves_like 'MLflow|Bad Request'
end
context 'when experiment_id is not passed' do
let(:params) { {} }
it_behaves_like 'MLflow|Bad Request'
end
it_behaves_like 'MLflow|shared error cases'
it_behaves_like 'MLflow|Requires api scope and write permission'
end
end
end

View File

@ -328,7 +328,7 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
end
it 'returns 422 and does not create a package' do
expect { subject }.not_to change { project.packages.pypi.count }
expect { subject }.not_to change { Packages::Pypi::Package.for_projects(project).count }
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
@ -424,7 +424,7 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
it 'does not create a new package', :aggregate_failures do
expect { subject }
.to change { project.packages.pypi.count }.by(0)
.to change { Packages::Pypi::Package.for_projects(project).count }.by(0)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::Pypi::Metadatum.count }.by(0)
expect(response).to have_gitlab_http_status(:created)
@ -434,7 +434,7 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
it 'does create a new package', :aggregate_failures do
existing_package.pending_destruction!
expect { subject }
.to change { project.packages.pypi.count }.by(1)
.to change { Packages::Pypi::Package.for_projects(project).count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::Pypi::Metadatum.count }.by(1)
expect(response).to have_gitlab_http_status(:created)

View File

@ -0,0 +1,92 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Commit diffs stream', feature_category: :source_code_management do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user, maintainer_of: project) }
let(:commit_with_two_diffs) { project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e") }
let(:offset) { 0 }
before do
sign_in(user)
end
describe 'GET diffs_stream' do
def send_request(**extra_params)
params = {
namespace_id: project.namespace,
project_id: project,
id: commit_with_two_diffs.id,
offset: offset
}
get diffs_stream_namespace_project_commit_path(params.merge(extra_params))
end
it 'streams the response' do
send_request
expect(response).to have_gitlab_http_status(:success)
end
it 'includes all diffs' do
send_request
streamed_content = response.body
commit_with_two_diffs.diffs.diff_files.each do |diff_file|
expect(streamed_content).to include(diff_file.new_path)
end
end
context 'when offset is given' do
context 'when offset is 1' do
let(:offset) { 1 }
it 'streams diffs except the offset' do
send_request
diff_files = commit_with_two_diffs.diffs.diff_files.to_a
expect(response.body).not_to include(diff_files.first.new_path)
expect(response.body).to include(diff_files.last.new_path)
end
end
context 'when offset is same as number of diffs' do
let(:offset) { commit_with_two_diffs.diffs.size }
it 'no diffs are streamed' do
send_request
expect(response.body).to be_empty
end
end
end
context 'when an exception occurs' do
before do
allow(::RapidDiffs::DiffFileComponent)
.to receive(:new).and_raise(StandardError.new('something went wrong'))
end
it 'prints out error message' do
send_request
expect(response.body).to include('something went wrong')
end
end
context 'when the rapid_diffs feature flag is disabled' do
before do
stub_feature_flags(rapid_diffs: false)
end
it 'returns a 404 status' do
send_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end

View File

@ -45,15 +45,55 @@ RSpec.describe BulkImports::ProcessService, feature_category: :importers do
end
context 'when all entities are processed' do
it 'marks bulk import as finished' do
before do
bulk_import.update!(status: 1)
create(:bulk_import_entity, :finished, bulk_import: bulk_import)
create(:bulk_import_entity, :failed, bulk_import: bulk_import)
end
it 'marks bulk import as finished' do
subject.execute
expect(bulk_import.reload.finished?).to eq(true)
end
context 'when placeholder references have not finished being loaded to the database' do
before do
allow_next_instance_of(Import::PlaceholderReferences::Store) do |store|
allow(store).to receive(:empty?).and_return(false)
allow(store).to receive(:count).and_return(1)
end
end
it 'marks bulk import as finished' do
subject.execute
expect(bulk_import.reload.finished?).to eq(true)
end
context 'when importer_user_mapping_enabled is enabled' do
before do
allow_next_instance_of(Import::BulkImports::EphemeralData) do |ephemeral_data|
allow(ephemeral_data).to receive(:importer_user_mapping_enabled?).and_return(true)
end
end
it 'logs and re-enqueues the worker' do
expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger).to receive(:info).with(
message: 'Placeholder references not finished loading to database',
bulk_import_id: bulk_import.id,
placeholder_reference_store_count: 1
)
end
subject.execute
expect(bulk_import.reload.started?).to eq(true)
end
end
end
end
context 'when all entities are failed' do

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Ml::DestroyExperimentService, feature_category: :mlops do
let_it_be(:project) { create(:project) }
let_it_be(:model) { create(:ml_models, project: project) }
let(:experiment) { create(:ml_experiments, project: project) }
let(:experiment_with_model) { create(:ml_experiments, project: project, model_id: model.id) }
let(:service) { described_class.new(experiment) }
describe '#execute' do
subject(:destroy_result) { service.execute }
context 'when experiment is successfully destroyed' do
it 'returns a success response' do
expect(destroy_result).to be_success
end
it 'destroys the experiment' do
expect(destroy_result).to be_success
expect(destroy_result.payload[:experiment]).to eq(experiment)
expect(Ml::Experiment.find_by(id: experiment.id)).to be_nil
end
end
context 'when experiment fails to destroy' do
before do
allow(experiment).to receive(:destroy).and_return(false)
end
it 'returns an error response' do
expect(destroy_result).to be_error
end
end
context 'when experiment is associated with a model' do
let(:experiment) { experiment_with_model }
it 'returns an error response' do
expect(destroy_result).to be_error
expect(destroy_result.message[0]).to eq('Cannot delete an experiment associated to a model')
end
it 'does not destroy the experiment' do
expect(Ml::Experiment.find_by(id: experiment.id)).to eq(experiment)
end
end
end
end

View File

@ -42,16 +42,6 @@ RSpec.describe Packages::Pypi::CreatePackageService, :aggregate_failures, featur
expect(created_package.package_files.first.file_sha256).to eq sha256
expect(created_package.package_files.first.file_md5).to eq md5
end
context 'when pypi_extract_package_model is disabled' do
before do
stub_feature_flags(pypi_extract_pypi_package_model: false)
end
it 'creates the package' do
expect { execute_service }.to change { Packages::Package.pypi.count }.by(1)
end
end
end
context 'with FIPS mode', :fips_mode do

View File

@ -4,7 +4,7 @@ RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member
RSpec.shared_examples 'creating pypi package files' do
it 'creates package files' do
expect { subject }
.to change { project.packages.pypi.count }.by(1)
.to change { Packages::Pypi::Package.for_projects(project).count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
.and change { Packages::Pypi::Metadatum.count }.by(1)
expect(response).to have_gitlab_http_status(status)
@ -37,7 +37,7 @@ RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member
create(:package_file, :pypi, package: existing_package, file_name: params[:content].original_filename)
expect { subject }
.to change { project.packages.pypi.count }.by(0)
.to change { Packages::Pypi::Package.for_projects(project).count }.by(0)
.and change { Packages::PackageFile.count }.by(0)
.and change { Packages::Pypi::Metadatum.count }.by(0)