Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
277496b843
commit
dfc8a99695
|
|
@ -45,10 +45,10 @@
|
|||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /pipeline:mr-approved/'
|
||||
|
||||
.if-merge-request-approved-and-specific-devops-stage: &if-merge-request-approved-and-specific-devops-stage
|
||||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && ($CI_MERGE_REQUEST_LABELS =~ /pipeline:mr-approved/ && $CI_MERGE_REQUEST_LABELS =~ /devops::create/)'
|
||||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && ($CI_MERGE_REQUEST_LABELS =~ /pipeline:mr-approved/ && $CI_MERGE_REQUEST_LABELS =~ /devops::(create|govern|manage)/)'
|
||||
|
||||
.if-merge-request-and-specific-devops-stage: &if-merge-request-and-specific-devops-stage
|
||||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /devops::create/'
|
||||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /devops::(create|govern|manage)/'
|
||||
|
||||
.if-merge-request-not-approved: &if-merge-request-not-approved
|
||||
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS !~ /pipeline:mr-approved/'
|
||||
|
|
|
|||
|
|
@ -8,6 +8,18 @@ class CommitStatus < Ci::ApplicationRecord
|
|||
include Presentable
|
||||
include BulkInsertableAssociations
|
||||
include TaggableQueries
|
||||
include IgnorableColumns
|
||||
|
||||
ignore_columns %i[
|
||||
auto_canceled_by_id_convert_to_bigint
|
||||
commit_id_convert_to_bigint
|
||||
erased_by_id_convert_to_bigint
|
||||
project_id_convert_to_bigint
|
||||
runner_id_convert_to_bigint
|
||||
trigger_request_id_convert_to_bigint
|
||||
upstream_pipeline_id_convert_to_bigint
|
||||
user_id_convert_to_bigint
|
||||
], remove_with: '17.0', remove_after: '2024-04-22'
|
||||
|
||||
self.table_name = :p_ci_builds
|
||||
self.sequence_name = :ci_builds_id_seq
|
||||
|
|
|
|||
|
|
@ -12,12 +12,14 @@ module Ml
|
|||
|
||||
validates :eid, :experiment, presence: true
|
||||
validates :status, inclusion: { in: statuses.keys }
|
||||
validates :model_version_id, uniqueness: { allow_nil: true }
|
||||
|
||||
belongs_to :experiment, class_name: 'Ml::Experiment'
|
||||
belongs_to :user
|
||||
belongs_to :package, class_name: 'Packages::Package'
|
||||
belongs_to :project
|
||||
belongs_to :ci_build, class_name: 'Ci::Build', optional: true
|
||||
belongs_to :model_version, class_name: 'Ml::ModelVersion', optional: true, inverse_of: :candidate
|
||||
has_many :metrics, class_name: 'Ml::CandidateMetric'
|
||||
has_many :params, class_name: 'Ml::CandidateParam'
|
||||
has_many :metadata, class_name: 'Ml::CandidateMetadata'
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ module Ml
|
|||
belongs_to :model, class_name: 'Ml::Model'
|
||||
belongs_to :project
|
||||
belongs_to :package, class_name: 'Packages::MlModel::Package', optional: true
|
||||
has_one :candidate, class_name: 'Ml::Candidate'
|
||||
|
||||
delegate :name, to: :model
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ml
|
||||
class CreateCandidateService
|
||||
def initialize(experiment, params = {})
|
||||
@experiment = experiment
|
||||
@name = params[:name]
|
||||
@user = params[:user]
|
||||
@start_time = params[:start_time]
|
||||
@model_version = params[:model_version]
|
||||
end
|
||||
|
||||
def execute
|
||||
Ml::Candidate.create!(
|
||||
experiment: experiment,
|
||||
project: experiment.project,
|
||||
name: candidate_name,
|
||||
start_time: start_time || 0,
|
||||
user: user,
|
||||
model_version: model_version
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def candidate_name
|
||||
name.presence || random_candidate_name
|
||||
end
|
||||
|
||||
def random_candidate_name
|
||||
parts = Array.new(3).map { FFaker::Animal.common_name.downcase.delete(' ') } << rand(10000)
|
||||
parts.join('-').truncate(255)
|
||||
end
|
||||
|
||||
attr_reader :name, :user, :experiment, :start_time, :model_version
|
||||
end
|
||||
end
|
||||
|
|
@ -15,12 +15,13 @@ module Ml
|
|||
end
|
||||
|
||||
def create!(experiment, start_time, tags = nil, name = nil)
|
||||
candidate = experiment.candidates.create!(
|
||||
create_params = {
|
||||
start_time: start_time,
|
||||
user: user,
|
||||
name: candidate_name(name, tags),
|
||||
project: project,
|
||||
start_time: start_time || 0
|
||||
)
|
||||
name: candidate_name(name, tags)
|
||||
}
|
||||
|
||||
candidate = Ml::CreateCandidateService.new(experiment, create_params).execute
|
||||
|
||||
add_tags(candidate, tags)
|
||||
|
||||
|
|
@ -103,17 +104,12 @@ module Ml
|
|||
end
|
||||
|
||||
def candidate_name(name, tags)
|
||||
name.presence || candidate_name_from_tags(tags) || random_candidate_name
|
||||
name.presence || candidate_name_from_tags(tags)
|
||||
end
|
||||
|
||||
def candidate_name_from_tags(tags)
|
||||
tags&.detect { |t| t[:key] == 'mlflow.runName' }&.dig(:value)
|
||||
end
|
||||
|
||||
def random_candidate_name
|
||||
parts = Array.new(3).map { FFaker::Animal.common_name.downcase.delete(' ') } << rand(10000)
|
||||
parts.join('-').truncate(255)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,7 +12,15 @@ module Ml
|
|||
|
||||
def execute
|
||||
model = Ml::FindOrCreateModelService.new(project, name).execute
|
||||
Ml::ModelVersion.find_or_create!(model, version, package, description)
|
||||
|
||||
model_version = Ml::ModelVersion.find_or_create!(model, version, package, description)
|
||||
|
||||
model_version.candidate = ::Ml::CreateCandidateService.new(
|
||||
model.default_experiment,
|
||||
{ model_version: model_version }
|
||||
).execute
|
||||
|
||||
model_version
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
table_name: approval_group_rules
|
||||
classes:
|
||||
- ApprovalRules::ApprovalGroupRule
|
||||
- ApprovalRules::ApprovalGroupRule
|
||||
feature_categories:
|
||||
- source_code_management
|
||||
- source_code_management
|
||||
description: Keeps approval group rules
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132651
|
||||
milestone: '16.5'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddColumnModelVersionIdToMlCandidates < Gitlab::Database::Migration[2.1]
|
||||
def change
|
||||
add_column :ml_candidates, :model_version_id, :bigint, null: true
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexOnModelVersionIdToMlCandidates < Gitlab::Database::Migration[2.1]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_ml_candidates_on_model_version_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ml_candidates, :model_version_id, name: INDEX_NAME, unique: true
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ml_candidates, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddFkOnMlCandidatesToMlModelVersions < Gitlab::Database::Migration[2.1]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key(:ml_candidates, :ml_model_versions, column: :model_version_id, on_delete: :cascade)
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(:ml_model_versions, column: :model_version_id, on_delete: :cascade)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class InitConversionForPCiBuilds < Gitlab::Database::Migration[2.1]
|
||||
include ::Gitlab::Database::MigrationHelpers::WraparoundAutovacuum
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = :p_ci_builds
|
||||
COLUMN_NAMES = %i[
|
||||
auto_canceled_by_id
|
||||
commit_id
|
||||
erased_by_id
|
||||
project_id
|
||||
runner_id
|
||||
trigger_request_id
|
||||
upstream_pipeline_id
|
||||
user_id
|
||||
]
|
||||
|
||||
def up
|
||||
return if should_skip?
|
||||
|
||||
initialize_conversion_of_integer_to_bigint(TABLE_NAME, COLUMN_NAMES)
|
||||
end
|
||||
|
||||
def down
|
||||
return if should_skip?
|
||||
|
||||
revert_initialize_conversion_of_integer_to_bigint(TABLE_NAME, COLUMN_NAMES)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def should_skip?
|
||||
!can_execute_on?(TABLE_NAME)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
16a0b32619e6b28c49fc2e9e609970ac61582c295c2ed281c531e407de2af216
|
||||
|
|
@ -0,0 +1 @@
|
|||
1b0cd52ccf99a477f39168cdb6b719d5b64f6110a7fd9df0a6f200c6ff9c0237
|
||||
|
|
@ -0,0 +1 @@
|
|||
030809f5519906dbdcdf3b8fd35a7181ca2c9ec1bdca745997aa14f24ee6ac6d
|
||||
|
|
@ -0,0 +1 @@
|
|||
df2937c8e70fde85677ef150ed6c2445d06b8a2f7113f58a8908a81ece449d75
|
||||
|
|
@ -298,6 +298,22 @@ BEGIN
|
|||
END;
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_10ee1357e825() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
NEW."auto_canceled_by_id_convert_to_bigint" := NEW."auto_canceled_by_id";
|
||||
NEW."commit_id_convert_to_bigint" := NEW."commit_id";
|
||||
NEW."erased_by_id_convert_to_bigint" := NEW."erased_by_id";
|
||||
NEW."project_id_convert_to_bigint" := NEW."project_id";
|
||||
NEW."runner_id_convert_to_bigint" := NEW."runner_id";
|
||||
NEW."trigger_request_id_convert_to_bigint" := NEW."trigger_request_id";
|
||||
NEW."upstream_pipeline_id_convert_to_bigint" := NEW."upstream_pipeline_id";
|
||||
NEW."user_id_convert_to_bigint" := NEW."user_id";
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_1bd97da9c1a4() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -13379,6 +13395,14 @@ CREATE TABLE p_ci_builds (
|
|||
stage_id bigint,
|
||||
partition_id bigint NOT NULL,
|
||||
auto_canceled_by_partition_id bigint DEFAULT 100 NOT NULL,
|
||||
auto_canceled_by_id_convert_to_bigint bigint,
|
||||
commit_id_convert_to_bigint bigint,
|
||||
erased_by_id_convert_to_bigint bigint,
|
||||
project_id_convert_to_bigint bigint,
|
||||
runner_id_convert_to_bigint bigint,
|
||||
trigger_request_id_convert_to_bigint bigint,
|
||||
upstream_pipeline_id_convert_to_bigint bigint,
|
||||
user_id_convert_to_bigint bigint,
|
||||
CONSTRAINT check_1e2fbd1b39 CHECK ((lock_version IS NOT NULL))
|
||||
)
|
||||
PARTITION BY LIST (partition_id);
|
||||
|
|
@ -13429,6 +13453,14 @@ CREATE TABLE ci_builds (
|
|||
stage_id bigint,
|
||||
partition_id bigint NOT NULL,
|
||||
auto_canceled_by_partition_id bigint DEFAULT 100 NOT NULL,
|
||||
auto_canceled_by_id_convert_to_bigint bigint,
|
||||
commit_id_convert_to_bigint bigint,
|
||||
erased_by_id_convert_to_bigint bigint,
|
||||
project_id_convert_to_bigint bigint,
|
||||
runner_id_convert_to_bigint bigint,
|
||||
trigger_request_id_convert_to_bigint bigint,
|
||||
upstream_pipeline_id_convert_to_bigint bigint,
|
||||
user_id_convert_to_bigint bigint,
|
||||
CONSTRAINT check_1e2fbd1b39 CHECK ((lock_version IS NOT NULL))
|
||||
);
|
||||
|
||||
|
|
@ -18883,6 +18915,7 @@ CREATE TABLE ml_candidates (
|
|||
project_id bigint,
|
||||
internal_id bigint,
|
||||
ci_build_id bigint,
|
||||
model_version_id bigint,
|
||||
CONSTRAINT check_25e6c65051 CHECK ((char_length(name) <= 255)),
|
||||
CONSTRAINT check_cd160587d4 CHECK ((eid IS NOT NULL))
|
||||
);
|
||||
|
|
@ -33229,6 +33262,8 @@ CREATE INDEX index_ml_candidates_on_ci_build_id ON ml_candidates USING btree (ci
|
|||
|
||||
CREATE UNIQUE INDEX index_ml_candidates_on_experiment_id_and_eid ON ml_candidates USING btree (experiment_id, eid);
|
||||
|
||||
CREATE UNIQUE INDEX index_ml_candidates_on_model_version_id ON ml_candidates USING btree (model_version_id);
|
||||
|
||||
CREATE INDEX index_ml_candidates_on_package_id ON ml_candidates USING btree (package_id);
|
||||
|
||||
CREATE INDEX index_ml_candidates_on_project_id ON ml_candidates USING btree (project_id);
|
||||
|
|
@ -36747,6 +36782,8 @@ CREATE TRIGGER tags_loose_fk_trigger AFTER DELETE ON tags REFERENCING OLD TABLE
|
|||
|
||||
CREATE TRIGGER trigger_07bc3c48f407 BEFORE INSERT OR UPDATE ON ci_stages FOR EACH ROW EXECUTE FUNCTION trigger_07bc3c48f407();
|
||||
|
||||
CREATE TRIGGER trigger_10ee1357e825 BEFORE INSERT OR UPDATE ON p_ci_builds FOR EACH ROW EXECUTE FUNCTION trigger_10ee1357e825();
|
||||
|
||||
CREATE TRIGGER trigger_1bd97da9c1a4 BEFORE INSERT OR UPDATE ON ci_pipelines FOR EACH ROW EXECUTE FUNCTION trigger_1bd97da9c1a4();
|
||||
|
||||
CREATE TRIGGER trigger_68d7b6653c7d BEFORE INSERT OR UPDATE ON ci_sources_pipelines FOR EACH ROW EXECUTE FUNCTION trigger_68d7b6653c7d();
|
||||
|
|
@ -37828,6 +37865,9 @@ ALTER TABLE ONLY namespaces
|
|||
ALTER TABLE ONLY fork_networks
|
||||
ADD CONSTRAINT fk_e7b436b2b5 FOREIGN KEY (root_project_id) REFERENCES projects(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY ml_candidates
|
||||
ADD CONSTRAINT fk_e86e0bfa5a FOREIGN KEY (model_version_id) REFERENCES ml_model_versions(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY integrations
|
||||
ADD CONSTRAINT fk_e8fe908a34 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ Configure Gitaly server in one of two ways:
|
|||
WARNING:
|
||||
If directly copying repository data from a GitLab server to Gitaly, ensure that the metadata file,
|
||||
default path `/var/opt/gitlab/git-data/repositories/.gitaly-metadata`, is not included in the transfer.
|
||||
Copying this file causes GitLab to use the [Rugged patches](index.md#direct-access-to-git-in-gitlab) for repositories hosted on the Gitaly server,
|
||||
Copying this file causes GitLab to use the direct disk access to repositories hosted on the Gitaly server,
|
||||
leading to `Error creating pipeline` and `Commit not found` errors, or stale data.
|
||||
|
||||
### Configure Gitaly clients
|
||||
|
|
|
|||
|
|
@ -587,92 +587,6 @@ off Gitaly Cluster to a sharded Gitaly instance:
|
|||
1. [Move the repositories](../operations/moving_repositories.md#moving-repositories) to the newly created storage. You can
|
||||
move them by shard or by group, which gives you the opportunity to spread them over multiple Gitaly servers.
|
||||
|
||||
## Direct access to Git in GitLab
|
||||
|
||||
Direct access to Git uses code in GitLab known as the "Rugged patches".
|
||||
|
||||
Before Gitaly existed, what are now Gitaly clients accessed Git repositories directly, either:
|
||||
|
||||
- On a local disk in the case of a single-machine Linux package installation.
|
||||
- Using NFS in the case of a horizontally-scaled GitLab installation.
|
||||
|
||||
In addition to running plain `git` commands, GitLab used a Ruby library called
|
||||
[Rugged](https://github.com/libgit2/rugged). Rugged is a wrapper around
|
||||
[libgit2](https://libgit2.org/), a stand-alone implementation of Git in the form of a C library.
|
||||
|
||||
Over time it became clear that Rugged, particularly in combination with
|
||||
[Unicorn](https://yhbt.net/unicorn/), is extremely efficient. Because `libgit2` is a library and
|
||||
not an external process, there was very little overhead between:
|
||||
|
||||
- GitLab application code that tried to look up data in Git repositories.
|
||||
- The Git implementation itself.
|
||||
|
||||
Because the combination of Rugged and Unicorn was so efficient, the GitLab application code ended up
|
||||
with lots of duplicate Git object lookups. For example, looking up the default branch commit a dozen
|
||||
times in one request. We could write inefficient code without poor performance.
|
||||
|
||||
When we migrated these Git lookups to Gitaly calls, we suddenly had a much higher fixed cost per Git
|
||||
lookup. Even when Gitaly is able to re-use an already-running `git` process (for example, to look up
|
||||
a commit), you still have:
|
||||
|
||||
- The cost of a network roundtrip to Gitaly.
|
||||
- Inside Gitaly, a write/read roundtrip on the Unix pipes that connect Gitaly to the `git` process.
|
||||
|
||||
Using GitLab.com to measure, we reduced the number of Gitaly calls per request until we no longer felt
|
||||
the efficiency loss of losing Rugged. It also helped that we run Gitaly itself directly on the Git
|
||||
file servers, rather than by using NFS mounts. This gave us a speed boost that counteracted the
|
||||
negative effect of not using Rugged anymore.
|
||||
|
||||
Unfortunately, other deployments of GitLab could not remove NFS like we did on GitLab.com, and they
|
||||
got the worst of both worlds:
|
||||
|
||||
- The slowness of NFS.
|
||||
- The increased inherent overhead of Gitaly.
|
||||
|
||||
The code removed from GitLab during the Gitaly migration project affected these deployments. As a
|
||||
performance workaround for these NFS-based deployments, we re-introduced some of the old Rugged
|
||||
code. This re-introduced code is informally referred to as the "Rugged patches".
|
||||
|
||||
### Automatic detection
|
||||
|
||||
> Automatic detection for Rugged [disabled](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95445) in GitLab 15.3.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default automatic detection of whether Rugged should be used (per storage) is not available.
|
||||
To make it available, an administrator can [disable the feature flag](../../administration/feature_flags.md) named
|
||||
`skip_rugged_auto_detect`.
|
||||
|
||||
The Ruby methods that perform direct Git access are behind
|
||||
[feature flags](../../development/gitaly.md#legacy-rugged-code), disabled by default. It wasn't
|
||||
convenient to set feature flags to get the best performance, so we added an automatic mechanism that
|
||||
enables direct Git access.
|
||||
|
||||
When GitLab calls a function that has a "Rugged patch", it performs two checks:
|
||||
|
||||
- Is the feature flag for this patch set in the database? If so, the feature flag setting controls
|
||||
the GitLab use of "Rugged patch" code.
|
||||
- If the feature flag is not set, GitLab tries accessing the file system underneath the
|
||||
Gitaly server directly. If it can, it uses the "Rugged patch":
|
||||
- If using Puma and [thread count](../../install/requirements.md#puma-threads) is set
|
||||
to `1`.
|
||||
|
||||
The result of these checks is cached.
|
||||
|
||||
To see if GitLab can access the repository file system directly, we use the following heuristic:
|
||||
|
||||
- Gitaly ensures that the file system has a metadata file in its root with a UUID in it.
|
||||
- Gitaly reports this UUID to GitLab by using the `ServerInfo` RPC.
|
||||
- GitLab Rails tries to read the metadata file directly. If it exists, and if the UUIDs match,
|
||||
assume we have direct access.
|
||||
|
||||
Direct Git access is:
|
||||
|
||||
- [Disabled](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95445) by default in GitLab 15.3 and later for
|
||||
compatibility with [Praefect-generated replica paths](#praefect-generated-replica-paths-gitlab-150-and-later). It
|
||||
can be enabled if Rugged [feature flags](../../development/gitaly.md#legacy-rugged-code) are enabled.
|
||||
- Enabled by default in GitLab 15.2 and earlier because it fills in the correct repository paths in the GitLab
|
||||
configuration file `config/gitlab.yml`. This satisfies the UUID check.
|
||||
|
||||
### Transition to Gitaly Cluster
|
||||
|
||||
For the sake of removing complexity, we must remove direct Git access in GitLab. However, we can't
|
||||
|
|
|
|||
|
|
@ -243,7 +243,6 @@ configuration option in `gitlab.yml`. These metrics are served from the
|
|||
| `geo_cursor_last_event_timestamp` | Gauge | 10.2 | Last UNIX timestamp of the event log processed by the secondary | `url` |
|
||||
| `geo_status_failed_total` | Counter | 10.2 | Number of times retrieving the status from the Geo Node failed | `url` |
|
||||
| `geo_last_successful_status_check_timestamp` | Gauge | 10.2 | Last timestamp when the status was successfully updated | `url` |
|
||||
| `geo_job_artifacts_synced_missing_on_primary` | Gauge | 10.7 | Number of job artifacts marked as synced due to the file missing on the primary | `url` |
|
||||
| `geo_package_files` | Gauge | 13.0 | Number of package files on primary | `url` |
|
||||
| `geo_package_files_checksummed` | Gauge | 13.0 | Number of package files checksummed on primary | `url` |
|
||||
| `geo_package_files_checksum_failed` | Gauge | 13.0 | Number of package files failed to calculate the checksum on primary | `url` |
|
||||
|
|
|
|||
|
|
@ -140,37 +140,6 @@ When running Puma in single mode, some features are not supported:
|
|||
|
||||
For more information, see [epic 5303](https://gitlab.com/groups/gitlab-org/-/epics/5303).
|
||||
|
||||
## Performance caveat when using Puma with Rugged
|
||||
|
||||
For deployments where NFS is used to store Git repositories, GitLab uses
|
||||
[direct Git access](../gitaly/index.md#direct-access-to-git-in-gitlab) to improve performance by using
|
||||
[Rugged](https://github.com/libgit2/rugged).
|
||||
|
||||
Rugged usage is automatically enabled if direct Git access [is available](../gitaly/index.md#automatic-detection) and
|
||||
Puma is running single threaded, unless it is disabled by a [feature flag](../../development/gitaly.md#legacy-rugged-code).
|
||||
|
||||
MRI Ruby uses a Global VM Lock (GVL). GVL allows MRI Ruby to be multi-threaded, but running at
|
||||
most on a single core.
|
||||
|
||||
Git includes intensive I/O operations. When Rugged uses a thread for a long period of time,
|
||||
other threads that might be processing requests can starve. Puma running in single thread mode
|
||||
does not have this issue, because concurrently at most one request is being processed.
|
||||
|
||||
GitLab is working to remove Rugged usage. Even though performance without Rugged
|
||||
is acceptable today, in some cases it might be still beneficial to run with it.
|
||||
|
||||
Given the caveat of running Rugged with multi-threaded Puma, and acceptable
|
||||
performance of Gitaly, we disable Rugged usage if Puma multi-threaded is
|
||||
used (when Puma is configured to run with more than one thread).
|
||||
|
||||
This default behavior may not be the optimal configuration in some situations. If Rugged
|
||||
plays an important role in your deployment, we suggest you benchmark to find the
|
||||
optimal configuration:
|
||||
|
||||
- The safest option is to start with single-threaded Puma.
|
||||
- To force Rugged to be used with multi-threaded Puma, you can use a
|
||||
[feature flag](../../development/gitaly.md#legacy-rugged-code).
|
||||
|
||||
## Configuring Puma to listen over SSL
|
||||
|
||||
Puma, when deployed with a Linux package installation, listens over a Unix socket by
|
||||
|
|
|
|||
|
|
@ -356,20 +356,6 @@ To enable LDAP with the synchronization worker for Sidekiq:
|
|||
|
||||
If you use [SAML Group Sync](../../user/group/saml_sso/group_sync.md), you must configure [SAML Groups](../../integration/saml.md#configure-users-based-on-saml-group-membership) on all your Sidekiq nodes.
|
||||
|
||||
## Disable Rugged
|
||||
|
||||
Calls into Rugged, Ruby bindings for `libgit2`, [lock the Sidekiq processes (GVL)](https://silverhammermba.github.io/emberb/c/#c-in-ruby-threads),
|
||||
blocking all jobs on that worker from proceeding. If Rugged calls performed by Sidekiq are slow, this can cause significant delays in
|
||||
background task processing.
|
||||
|
||||
By default, Rugged is used when Git repository data is stored on local storage or on an NFS mount.
|
||||
Using Rugged is recommended when using NFS, but if
|
||||
you are using local storage, disabling Rugged can improve Sidekiq performance:
|
||||
|
||||
```shell
|
||||
sudo gitlab-rake gitlab:features:disable_rugged
|
||||
```
|
||||
|
||||
## Related topics
|
||||
|
||||
- [Extra Sidekiq processes](extra_sidekiq_processes.md)
|
||||
|
|
|
|||
|
|
@ -332,7 +332,6 @@ Example response:
|
|||
"job_artifacts_count": 2,
|
||||
"job_artifacts_synced_count": null,
|
||||
"job_artifacts_failed_count": null,
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "0.00%",
|
||||
"projects_count": 41,
|
||||
"repositories_count": 41,
|
||||
|
|
@ -470,7 +469,6 @@ Example response:
|
|||
"job_artifacts_verification_failed_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "100.00%",
|
||||
"job_artifacts_verified_in_percentage": "100.00%",
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"ci_secure_files_count": 5,
|
||||
"ci_secure_files_checksum_total_count": 5,
|
||||
"ci_secure_files_checksummed_count": 5,
|
||||
|
|
@ -483,7 +481,6 @@ Example response:
|
|||
"ci_secure_files_verification_failed_count": 0,
|
||||
"ci_secure_files_synced_in_percentage": "100.00%",
|
||||
"ci_secure_files_verified_in_percentage": "100.00%",
|
||||
"ci_secure_files_synced_missing_on_primary_count": 0,
|
||||
"dependency_proxy_blobs_count": 5,
|
||||
"dependency_proxy_blobs_checksum_total_count": 5,
|
||||
"dependency_proxy_blobs_checksummed_count": 5,
|
||||
|
|
@ -496,13 +493,11 @@ Example response:
|
|||
"dependency_proxy_blobs_verification_failed_count": 0,
|
||||
"dependency_proxy_blobs_synced_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_verified_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_count": 5,
|
||||
"container_repositories_synced_count": 5,
|
||||
"container_repositories_failed_count": 0,
|
||||
"container_repositories_registry_count": 5,
|
||||
"container_repositories_synced_in_percentage": "100.00%",
|
||||
"container_repositories_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_checksum_total_count": 0,
|
||||
"container_repositories_checksummed_count": 0,
|
||||
"container_repositories_checksum_failed_count": 0,
|
||||
|
|
@ -569,7 +564,6 @@ Example response:
|
|||
"job_artifacts_count": 2,
|
||||
"job_artifacts_synced_count": 1,
|
||||
"job_artifacts_failed_count": 1,
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "50.00%",
|
||||
"design_management_repositories_count": 5,
|
||||
"design_management_repositories_synced_count": 5,
|
||||
|
|
@ -695,7 +689,6 @@ Example response:
|
|||
"job_artifacts_verification_failed_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "100.00%",
|
||||
"job_artifacts_verified_in_percentage": "100.00%",
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"dependency_proxy_blobs_count": 5,
|
||||
"dependency_proxy_blobs_checksum_total_count": 5,
|
||||
"dependency_proxy_blobs_checksummed_count": 5,
|
||||
|
|
@ -708,13 +701,11 @@ Example response:
|
|||
"dependency_proxy_blobs_verification_failed_count": 0,
|
||||
"dependency_proxy_blobs_synced_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_verified_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_count": 5,
|
||||
"container_repositories_synced_count": 5,
|
||||
"container_repositories_failed_count": 0,
|
||||
"container_repositories_registry_count": 5,
|
||||
"container_repositories_synced_in_percentage": "100.00%",
|
||||
"container_repositories_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_checksum_total_count": 0,
|
||||
"container_repositories_checksummed_count": 0,
|
||||
"container_repositories_checksum_failed_count": 0,
|
||||
|
|
@ -785,7 +776,6 @@ Example response:
|
|||
"job_artifacts_count": 2,
|
||||
"job_artifacts_synced_count": 1,
|
||||
"job_artifacts_failed_count": 1,
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "50.00%",
|
||||
"projects_count": 41,
|
||||
"repositories_count": 41,
|
||||
|
|
@ -896,7 +886,6 @@ Example response:
|
|||
"job_artifacts_verification_failed_count": 0,
|
||||
"job_artifacts_synced_in_percentage": "100.00%",
|
||||
"job_artifacts_verified_in_percentage": "100.00%",
|
||||
"job_artifacts_synced_missing_on_primary_count": 0,
|
||||
"ci_secure_files_count": 5,
|
||||
"ci_secure_files_checksum_total_count": 5,
|
||||
"ci_secure_files_checksummed_count": 5,
|
||||
|
|
@ -909,7 +898,6 @@ Example response:
|
|||
"ci_secure_files_verification_failed_count": 0,
|
||||
"ci_secure_files_synced_in_percentage": "100.00%",
|
||||
"ci_secure_files_verified_in_percentage": "100.00%",
|
||||
"ci_secure_files_synced_missing_on_primary_count": 0,
|
||||
"dependency_proxy_blobs_count": 5,
|
||||
"dependency_proxy_blobs_checksum_total_count": 5,
|
||||
"dependency_proxy_blobs_checksummed_count": 5,
|
||||
|
|
@ -922,13 +910,11 @@ Example response:
|
|||
"dependency_proxy_blobs_verification_failed_count": 0,
|
||||
"dependency_proxy_blobs_synced_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_verified_in_percentage": "100.00%",
|
||||
"dependency_proxy_blobs_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_count": 5,
|
||||
"container_repositories_synced_count": 5,
|
||||
"container_repositories_failed_count": 0,
|
||||
"container_repositories_registry_count": 5,
|
||||
"container_repositories_synced_in_percentage": "100.00%",
|
||||
"container_repositories_synced_missing_on_primary_count": 0,
|
||||
"container_repositories_checksum_total_count": 0,
|
||||
"container_repositories_checksummed_count": 0,
|
||||
"container_repositories_checksum_failed_count": 0,
|
||||
|
|
|
|||
|
|
@ -292,7 +292,6 @@ Example response:
|
|||
[
|
||||
{
|
||||
"geo_node_id": 1,
|
||||
"job_artifacts_synced_missing_on_primary_count": null,
|
||||
"projects_count": 19,
|
||||
"container_repositories_replication_enabled": null,
|
||||
"lfs_objects_count": 0,
|
||||
|
|
@ -510,7 +509,6 @@ Example response:
|
|||
},
|
||||
{
|
||||
"geo_node_id": 2,
|
||||
"job_artifacts_synced_missing_on_primary_count": null,
|
||||
"projects_count": 19,
|
||||
"container_repositories_replication_enabled": null,
|
||||
"lfs_objects_count": 0,
|
||||
|
|
@ -744,7 +742,6 @@ Example response:
|
|||
```json
|
||||
{
|
||||
"geo_node_id": 2,
|
||||
"job_artifacts_synced_missing_on_primary_count": null,
|
||||
"projects_count": 19,
|
||||
"container_repositories_replication_enabled": null,
|
||||
"lfs_objects_count": 0,
|
||||
|
|
|
|||
|
|
@ -19,9 +19,3 @@ All access to Gitaly from other parts of GitLab are through Create: Source Code
|
|||
After a call is made to Gitaly, Git `commit` information is stored in memory. This information is wrapped by the [Ruby `Commit` Model](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/models/commit.rb), which is a wrapper around [`Gitlab::Git::Commit`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/git/commit.rb).
|
||||
|
||||
The `Commit` model acts like an ActiveRecord object, but it does not have a PostgreSQL backend. Instead, it maps back to Gitaly RPCs.
|
||||
|
||||
## Rugged Patches
|
||||
|
||||
Historically in GitLab, access to the server-based `git` repositories was provided through the [rugged](https://github.com/libgit2/rugged) RubyGem, which provides Ruby bindings to `libgit2`. This was further extended by what is termed "Rugged Patches", [a set of extensions to the Rugged library](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/57317). Rugged implementations of some of the most commonly-used RPCs can be [enabled via feature flags](../../gitaly.md#legacy-rugged-code).
|
||||
|
||||
Rugged access requires the use of a NFS file system, a direction GitLab is moving away from in favor of Gitaly Cluster. Rugged has been proposed for [deprecation and removal](https://gitlab.com/gitlab-org/gitaly/-/issues/1690). Several large customers are still using NFS, and a specific removal date is not planned at this point.
|
||||
|
|
|
|||
|
|
@ -64,45 +64,6 @@ rm -rf tmp/tests/gitaly
|
|||
|
||||
During RSpec tests, the Gitaly instance writes logs to `gitlab/log/gitaly-test.log`.
|
||||
|
||||
## Legacy Rugged code
|
||||
|
||||
While Gitaly can handle all Git access, many of GitLab customers still
|
||||
run Gitaly atop NFS. The legacy Rugged implementation for Git calls may
|
||||
be faster than the Gitaly RPC due to N+1 Gitaly calls and other
|
||||
reasons. See [the issue](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/57317) for more
|
||||
details.
|
||||
|
||||
Until GitLab has eliminated most of these inefficiencies or the use of
|
||||
NFS is discontinued for Git data, Rugged implementations of some of the
|
||||
most commonly-used RPCs can be enabled via feature flags:
|
||||
|
||||
- `rugged_find_commit`
|
||||
- `rugged_get_tree_entries`
|
||||
- `rugged_tree_entry`
|
||||
- `rugged_commit_is_ancestor`
|
||||
- `rugged_commit_tree_entry`
|
||||
- `rugged_list_commits_by_oid`
|
||||
|
||||
A convenience Rake task can be used to enable or disable these flags
|
||||
all together. To enable:
|
||||
|
||||
```shell
|
||||
bundle exec rake gitlab:features:enable_rugged
|
||||
```
|
||||
|
||||
To disable:
|
||||
|
||||
```shell
|
||||
bundle exec rake gitlab:features:disable_rugged
|
||||
```
|
||||
|
||||
Most of this code exists in the `lib/gitlab/git/rugged_impl` directory.
|
||||
|
||||
NOTE:
|
||||
You should *not* have to add or modify code related to Rugged unless explicitly discussed with the
|
||||
[Gitaly Team](https://gitlab.com/groups/gl-gitaly/group_members). This code does not work on GitLab.com or other GitLab
|
||||
instances that do not use NFS.
|
||||
|
||||
## `TooManyInvocationsError` errors
|
||||
|
||||
During development and testing, you may experience `Gitlab::GitalyClient::TooManyInvocationsError` failures.
|
||||
|
|
|
|||
|
|
@ -28,9 +28,6 @@ Some notable gems that are used for wikis are:
|
|||
| Component | Description | Gem name | GitLab project | Upstream project |
|
||||
|:--------------|:-----------------------------------------------|:-------------------------------|:--------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------|
|
||||
| `gitlab` | Markup renderer, depends on various other gems | `gitlab-markup` | [`gitlab-org/gitlab-markup`](https://gitlab.com/gitlab-org/gitlab-markup) | [`github/markup`](https://github.com/github/markup) |
|
||||
| `gollum-lib` | Main Gollum library | `gitlab-gollum-lib` | [`gitlab-org/gollum-lib`](https://gitlab.com/gitlab-org/gollum-lib) | [`gollum/gollum-lib`](https://github.com/gollum/gollum-lib) |
|
||||
| | Gollum Git adapter for Rugged | `gitlab-gollum-rugged_adapter` | [`gitlab-org/gitlab-gollum-rugged_adapter`](https://gitlab.com/gitlab-org/gitlab-gollum-rugged_adapter) | [`gollum/rugged_adapter`](https://github.com/gollum/rugged_adapter) |
|
||||
| | Rugged (also used in Gitaly itself) | `rugged` | - | [`libgit2/rugged`](https://github.com/libgit2/rugged) |
|
||||
|
||||
### Notes on Gollum
|
||||
|
||||
|
|
|
|||
|
|
@ -227,8 +227,7 @@ optimal settings for your infrastructure.
|
|||
|
||||
### Puma threads
|
||||
|
||||
The recommended number of threads is dependent on several factors, including total memory, and use
|
||||
of [legacy Rugged code](../administration/gitaly/index.md#direct-access-to-git-in-gitlab).
|
||||
The recommended number of threads is dependent on several factors, including total memory.
|
||||
|
||||
- If the operating system has a maximum 2 GB of memory, the recommended number of threads is `1`.
|
||||
A higher value results in excess swapping, and decrease performance.
|
||||
|
|
|
|||
|
|
@ -276,6 +276,7 @@ including a large number of false positives.
|
|||
| `CS_REGISTRY_USER` | `$CI_REGISTRY_USER` | Username for accessing a Docker registry requiring authentication. The default is only set if `$CS_IMAGE` resides at [`$CI_REGISTRY`](../../../ci/variables/predefined_variables.md). Not supported when [FIPS mode](../../../development/fips_compliance.md#enable-fips-mode) is enabled. | All |
|
||||
| `CS_DOCKERFILE_PATH` | `Dockerfile` | The path to the `Dockerfile` to use for generating remediations. By default, the scanner looks for a file named `Dockerfile` in the root directory of the project. You should configure this variable only if your `Dockerfile` is in a non-standard location, such as a subdirectory. See [Solutions for vulnerabilities](#solutions-for-vulnerabilities-auto-remediation) for more details. | All |
|
||||
| `CS_QUIET` | `""` | If set, this variable disables output of the [vulnerabilities table](#container-scanning-job-log-format) in the job log. [Introduced](https://gitlab.com/gitlab-org/security-products/analyzers/container-scanning/-/merge_requests/50) in GitLab 15.1. | All |
|
||||
| `CS_TRIVY_JAVA_DB` | `"ghcr.io/aquasecurity/trivy-java-db"` | Specify an alternate location for the [trivy-java-db](https://github.com/aquasecurity/trivy-java-db) vulnerability database. | Trivy |
|
||||
| `SECURE_LOG_LEVEL` | `info` | Set the minimum logging level. Messages of this logging level or higher are output. From highest to lowest severity, the logging levels are: `fatal`, `error`, `warn`, `info`, `debug`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10880) in GitLab 13.1. | All |
|
||||
|
||||
### Supported distributions
|
||||
|
|
@ -658,6 +659,32 @@ Also:
|
|||
|
||||
Scanning images in external private registries is not supported when [FIPS mode](../../../development/fips_compliance.md#enable-fips-mode) is enabled.
|
||||
|
||||
#### Create and use a Trivy Java database mirror
|
||||
|
||||
When the `trivy` scanner is used and a `jar` file is encountered in a container image being scanned, `trivy` downloads an additional `trivy-java-db` vulnerability database. By default, the `trivy-java-db` database is hosted as an [OCI artifact](https://oras.land/docs/quickstart) at `ghcr.io/aquasecurity/trivy-java-db:1`. If this registry is not accessible, for example in a network-isolated offline GitLab instance, one solution is to mirror the `trivy-java-db` to a container registry that can be accessed in the offline instance:
|
||||
|
||||
```yaml
|
||||
mirror trivy java db:
|
||||
image:
|
||||
name: ghcr.io/oras-project/oras:v1.1.0
|
||||
entrypoint: [""]
|
||||
script:
|
||||
- oras login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- oras pull ghcr.io/aquasecurity/trivy-java-db:1
|
||||
- oras push $CI_REGISTRY_IMAGE:1 --config /dev/null:application/vnd.aquasec.trivy.config.v1+json javadb.tar.gz:application/vnd.aquasec.trivy.javadb.layer.v1.tar+gzip
|
||||
```
|
||||
|
||||
If the above container registry is `gitlab.example.com/trivy-java-db-mirror`, then the container scanning job should be configured in the following way:
|
||||
|
||||
```yaml
|
||||
include:
|
||||
- template: Security/Container-Scanning.gitlab-ci.yml
|
||||
|
||||
container_scanning:
|
||||
variables:
|
||||
CS_TRIVY_JAVA_DB: gitlab.example.com/trivy-java-db-mirror:1
|
||||
```
|
||||
|
||||
## Running the standalone container scanning tool
|
||||
|
||||
It's possible to run the [GitLab container scanning tool](https://gitlab.com/gitlab-org/security-products/analyzers/container-scanning)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
|
|||
it { is_expected.to belong_to(:user) }
|
||||
it { is_expected.to belong_to(:package) }
|
||||
it { is_expected.to belong_to(:ci_build).class_name('Ci::Build') }
|
||||
it { is_expected.to belong_to(:model_version).class_name('Ml::ModelVersion') }
|
||||
it { is_expected.to have_many(:params) }
|
||||
it { is_expected.to have_many(:metrics) }
|
||||
it { is_expected.to have_many(:metadata) }
|
||||
|
|
@ -35,6 +36,45 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
|
|||
it { expect(described_class.new.eid).to be_present }
|
||||
end
|
||||
|
||||
describe 'validation' do
|
||||
let_it_be(:model) { create(:ml_models, project: candidate.project) }
|
||||
let_it_be(:model_version1) { create(:ml_model_versions, model: model) }
|
||||
let_it_be(:model_version2) { create(:ml_model_versions, model: model) }
|
||||
let_it_be(:validation_candidate) do
|
||||
create(:ml_candidates, model_version: model_version1, project: candidate.project)
|
||||
end
|
||||
|
||||
let(:params) do
|
||||
{
|
||||
model_version: nil
|
||||
}
|
||||
end
|
||||
|
||||
subject(:errors) do
|
||||
candidate = described_class.new(**params)
|
||||
candidate.validate
|
||||
candidate.errors
|
||||
end
|
||||
|
||||
describe 'model_version' do
|
||||
context 'when model_version is nil' do
|
||||
it { expect(errors).not_to include(:model_version_id) }
|
||||
end
|
||||
|
||||
context 'when no other candidate is associated to the model_version' do
|
||||
let(:params) { { model_version: model_version2 } }
|
||||
|
||||
it { expect(errors).not_to include(:model_version_id) }
|
||||
end
|
||||
|
||||
context 'when another candidate has model_version_id' do
|
||||
let(:params) { { model_version: validation_candidate.model_version } }
|
||||
|
||||
it { expect(errors).to include(:model_version_id) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.destroy' do
|
||||
let_it_be(:candidate_to_destroy) do
|
||||
create(:ml_candidates, :with_metrics_and_params, :with_metadata, :with_artifact)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
|
|||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to belong_to(:model) }
|
||||
it { is_expected.to belong_to(:package).class_name('Packages::MlModel::Package') }
|
||||
it { is_expected.to have_one(:candidate).class_name('Ml::Candidate') }
|
||||
end
|
||||
|
||||
describe 'validation' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ::Ml::CreateCandidateService, feature_category: :mlops do
|
||||
describe '#execute' do
|
||||
let_it_be(:model_version) { create(:ml_model_versions) }
|
||||
let_it_be(:experiment) { create(:ml_experiments, project: model_version.project) }
|
||||
|
||||
let(:params) { {} }
|
||||
|
||||
subject(:candidate) { described_class.new(experiment, params).execute }
|
||||
|
||||
context 'with default parameters' do
|
||||
it 'creates a candidate' do
|
||||
expect { candidate }.to change { experiment.candidates.count }.by(1)
|
||||
end
|
||||
|
||||
it 'gives a fake name' do
|
||||
expect(candidate.name).to match(/[a-z]+-[a-z]+-[a-z]+-\d+/)
|
||||
end
|
||||
|
||||
it 'sets the correct values', :aggregate_failures do
|
||||
expect(candidate.start_time).to eq(0)
|
||||
expect(candidate.experiment).to be(experiment)
|
||||
expect(candidate.project).to be(experiment.project)
|
||||
expect(candidate.user).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when parameters are passed' do
|
||||
let(:params) do
|
||||
{
|
||||
start_time: 1234,
|
||||
name: 'candidate_name',
|
||||
model_version: model_version,
|
||||
user: experiment.user
|
||||
}
|
||||
end
|
||||
|
||||
context 'with default parameters' do
|
||||
it 'creates a candidate' do
|
||||
expect { candidate }.to change { experiment.candidates.count }.by(1)
|
||||
end
|
||||
|
||||
it 'sets the correct values', :aggregate_failures do
|
||||
expect(candidate.start_time).to eq(1234)
|
||||
expect(candidate.experiment).to be(experiment)
|
||||
expect(candidate.project).to be(experiment.project)
|
||||
expect(candidate.user).to be(experiment.user)
|
||||
expect(candidate.name).to eq('candidate_name')
|
||||
expect(candidate.model_version_id).to eq(model_version.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -28,6 +28,7 @@ RSpec.describe ::Ml::FindOrCreateModelVersionService, feature_category: :mlops d
|
|||
|
||||
it 'returns existing model version', :aggregate_failures do
|
||||
expect { model_version }.to change { Ml::ModelVersion.count }.by(0)
|
||||
expect { model_version }.to change { Ml::Candidate.count }.by(0)
|
||||
expect(model_version).to eq(existing_version)
|
||||
end
|
||||
end
|
||||
|
|
@ -41,11 +42,12 @@ RSpec.describe ::Ml::FindOrCreateModelVersionService, feature_category: :mlops d
|
|||
let(:package) { create(:ml_model_package, project: project, name: name, version: version) }
|
||||
|
||||
it 'creates a new model version', :aggregate_failures do
|
||||
expect { model_version }.to change { Ml::ModelVersion.count }
|
||||
expect { model_version }.to change { Ml::ModelVersion.count }.by(1).and change { Ml::Candidate.count }.by(1)
|
||||
|
||||
expect(model_version.name).to eq(name)
|
||||
expect(model_version.version).to eq(version)
|
||||
expect(model_version.package).to eq(package)
|
||||
expect(model_version.candidate.model_version_id).to eq(model_version.id)
|
||||
expect(model_version.description).to eq(description)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,10 +15,15 @@ RSpec.describe Tooling::Danger::RubocopInlineDisableSuggestion, feature_category
|
|||
|
||||
let(:template) do
|
||||
<<~SUGGESTION_MARKDOWN.chomp
|
||||
```suggestion
|
||||
%<suggested_line>s
|
||||
```
|
||||
|
||||
Consider removing this inline disabling and adhering to the rubocop rule.
|
||||
If that isn't possible, please provide context as a reply for reviewers.
|
||||
See [rubocop best practices](https://docs.gitlab.com/ee/development/rubocop_development_guide.html).
|
||||
|
||||
If that isn't possible, please provide the reason as a code comment in the
|
||||
same line where the rule is disabled separated by ` -- `.
|
||||
See [rubocop best practices](https://docs.gitlab.com/ee/development/rubocop_development_guide.html#disabling-rules-inline).
|
||||
|
||||
----
|
||||
|
||||
|
|
@ -77,6 +82,9 @@ RSpec.describe Tooling::Danger::RubocopInlineDisableSuggestion, feature_category
|
|||
|
||||
def show_my_new_dot?(project, namespace)
|
||||
return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks -- Reason for disabling
|
||||
thatsfine = "".dup # rubocop:disable Lint/UselessAssignment,Performance/UnfreezeString -- That's OK
|
||||
me = "".dup # rubocop:disable Lint/UselessAssignment,Performance/UnfreezeString
|
||||
test = "".dup # rubocop:disable Lint/UselessAssignment, Performance/UnfreezeString
|
||||
return false if notification_dot_acknowledged?
|
||||
|
||||
show_out_of_pipeline_minutes_notification?(project, namespace)
|
||||
|
|
@ -102,6 +110,8 @@ RSpec.describe Tooling::Danger::RubocopInlineDisableSuggestion, feature_category
|
|||
+ return false unless ::Gitlab.com? # rubocop:todo Gitlab/AvoidGitlabInstanceChecks
|
||||
+ return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks
|
||||
+ return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks -- Reason for disabling
|
||||
+ me = "".dup # rubocop:disable Lint/UselessAssignment,Performance/UnfreezeString
|
||||
+ test = "".dup # rubocop:disable Lint/UselessAssignment, Performance/UnfreezeString
|
||||
+ return false unless ::Gitlab.com? # rubocop: todo Gitlab/AvoidGitlabInstanceChecks --
|
||||
DIFF
|
||||
end
|
||||
|
|
@ -119,8 +129,12 @@ RSpec.describe Tooling::Danger::RubocopInlineDisableSuggestion, feature_category
|
|||
end
|
||||
|
||||
it 'adds comments at the correct lines', :aggregate_failures do
|
||||
[3, 7, 13, 20, 27, 34, 41, 55].each do |line_number|
|
||||
expect(rubocop).to receive(:markdown).with(template, file: filename, line: line_number)
|
||||
[3, 7, 13, 20, 27, 34, 41, 50, 51, 58].each do |line_number|
|
||||
existing_line = file_lines[line_number - 1].sub(/ --\s*$/, '')
|
||||
suggested_line = "#{existing_line} -- TODO: Reason why the rule must be disabled"
|
||||
comment = format(template, suggested_line: suggested_line)
|
||||
|
||||
expect(rubocop).to receive(:markdown).with(comment, file: filename, line: line_number)
|
||||
end
|
||||
|
||||
rubocop.add_suggestions_for(filename)
|
||||
|
|
|
|||
|
|
@ -5,13 +5,15 @@ require_relative 'suggestion'
|
|||
module Tooling
|
||||
module Danger
|
||||
class RubocopInlineDisableSuggestion < Suggestion
|
||||
MATCH = /^\+.*#\s*rubocop\s*:\s*(?:disable|todo)\s+(?!.*\s--\s\S)/
|
||||
REPLACEMENT = nil
|
||||
MATCH = %r{^(?<line>.*#\s*rubocop\s*:\s*(?:disable|todo)\s+(?:[\w/]+(?:\s*,\s*[\w/]+)*))\s*(?!.*\s*--\s\S).*}
|
||||
REPLACEMENT = '\k<line> -- TODO: Reason why the rule must be disabled'
|
||||
|
||||
SUGGESTION = <<~MESSAGE_MARKDOWN
|
||||
Consider removing this inline disabling and adhering to the rubocop rule.
|
||||
If that isn't possible, please provide context as a reply for reviewers.
|
||||
See [rubocop best practices](https://docs.gitlab.com/ee/development/rubocop_development_guide.html).
|
||||
|
||||
If that isn't possible, please provide the reason as a code comment in the
|
||||
same line where the rule is disabled separated by ` -- `.
|
||||
See [rubocop best practices](https://docs.gitlab.com/ee/development/rubocop_development_guide.html#disabling-rules-inline).
|
||||
|
||||
----
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue