Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
a303eb5d32
commit
611897b987
|
|
@ -42,7 +42,7 @@ review-docs-cleanup:
|
|||
docs-lint links:
|
||||
extends:
|
||||
- .docs:rules:docs-lint
|
||||
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-html:alpine-3.18-ruby-3.2.2-6a53d93b
|
||||
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-html:alpine-3.18-ruby-3.2.2-08fa6df8
|
||||
stage: lint
|
||||
needs: []
|
||||
script:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
include:
|
||||
- project: gitlab-org/quality/pipeline-common
|
||||
ref: 7.10.2
|
||||
ref: 7.10.3
|
||||
file:
|
||||
- /ci/danger-review.yml
|
||||
|
||||
|
|
|
|||
|
|
@ -1412,20 +1412,6 @@ Layout/ArgumentAlignment:
|
|||
- 'spec/lib/gitlab/changelog/config_spec.rb'
|
||||
- 'spec/lib/gitlab/checks/changes_access_spec.rb'
|
||||
- 'spec/lib/gitlab/checks/single_change_access_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/badge/pipeline/status_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/hook_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/policy/changes_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/policy/variables_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/entry/bridge_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/entry/job_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/entry/policy_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/extendable/entry_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/external/mapper_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/config/external/rules_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/parsers/security/common_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/pipeline/chain/command_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb'
|
||||
- 'spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb'
|
||||
|
|
|
|||
|
|
@ -3,63 +3,6 @@
|
|||
Style/PercentLiteralDelimiters:
|
||||
Exclude:
|
||||
- 'metrics_server/metrics_server.rb'
|
||||
- 'spec/requests/api/badges_spec.rb'
|
||||
- 'spec/requests/api/ci/jobs_spec.rb'
|
||||
- 'spec/requests/api/ci/pipelines_spec.rb'
|
||||
- 'spec/requests/api/ci/runner/jobs_request_post_spec.rb'
|
||||
- 'spec/requests/api/ci/runner/runners_post_spec.rb'
|
||||
- 'spec/requests/api/ci/triggers_spec.rb'
|
||||
- 'spec/requests/api/container_repositories_spec.rb'
|
||||
- 'spec/requests/api/deployments_spec.rb'
|
||||
- 'spec/requests/api/geo_spec.rb'
|
||||
- 'spec/requests/api/graphql/ci/manual_variables_spec.rb'
|
||||
- 'spec/requests/api/graphql/gitlab_schema_spec.rb'
|
||||
- 'spec/requests/api/graphql/group/container_repositories_spec.rb'
|
||||
- 'spec/requests/api/graphql/group/milestones_spec.rb'
|
||||
- 'spec/requests/api/graphql/mutations/design_management/delete_spec.rb'
|
||||
- 'spec/requests/api/graphql/mutations/snippets/create_spec.rb'
|
||||
- 'spec/requests/api/graphql/mutations/snippets/destroy_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/base_service_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/container_repositories_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/issue_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/jira_import_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/jira_projects_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/release_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/terraform/state_spec.rb'
|
||||
- 'spec/requests/api/graphql/project/terraform/states_spec.rb'
|
||||
- 'spec/requests/api/internal/base_spec.rb'
|
||||
- 'spec/requests/api/issues/get_group_issues_spec.rb'
|
||||
- 'spec/requests/api/issues/get_project_issues_spec.rb'
|
||||
- 'spec/requests/api/issues/issues_spec.rb'
|
||||
- 'spec/requests/api/issues/post_projects_issues_spec.rb'
|
||||
- 'spec/requests/api/issues/put_projects_issues_spec.rb'
|
||||
- 'spec/requests/api/merge_requests_spec.rb'
|
||||
- 'spec/requests/api/metadata_spec.rb'
|
||||
- 'spec/requests/api/project_container_repositories_spec.rb'
|
||||
- 'spec/requests/api/project_templates_spec.rb'
|
||||
- 'spec/requests/api/projects_spec.rb'
|
||||
- 'spec/requests/api/releases_spec.rb'
|
||||
- 'spec/requests/api/repositories_spec.rb'
|
||||
- 'spec/requests/api/search_spec.rb'
|
||||
- 'spec/requests/api/settings_spec.rb'
|
||||
- 'spec/requests/api/tags_spec.rb'
|
||||
- 'spec/requests/api/task_completion_status_spec.rb'
|
||||
- 'spec/requests/api/unleash_spec.rb'
|
||||
- 'spec/requests/api/users_spec.rb'
|
||||
- 'spec/requests/api/wikis_spec.rb'
|
||||
- 'spec/requests/jwt_controller_spec.rb'
|
||||
- 'spec/requests/lfs_locks_api_spec.rb'
|
||||
- 'spec/requests/users_controller_spec.rb'
|
||||
- 'spec/routing/uploads_routing_spec.rb'
|
||||
- 'spec/rubocop/cop/migration/migration_record_spec.rb'
|
||||
- 'spec/rubocop/cop/migration/prevent_index_creation_spec.rb'
|
||||
- 'spec/rubocop/cop/migration/sidekiq_queue_migrate_spec.rb'
|
||||
- 'spec/rubocop/cop/performance/readlines_each_spec.rb'
|
||||
- 'spec/serializers/build_details_entity_spec.rb'
|
||||
- 'spec/serializers/container_repositories_serializer_spec.rb'
|
||||
- 'spec/serializers/diff_file_entity_spec.rb'
|
||||
- 'spec/serializers/group_child_entity_spec.rb'
|
||||
- 'spec/services/award_emojis/copy_service_spec.rb'
|
||||
- 'spec/services/bulk_imports/file_download_service_spec.rb'
|
||||
- 'spec/services/bulk_imports/lfs_objects_export_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
import allRunnersQuery from 'ee_else_ce/ci/runner/graphql/list/all_runners.query.graphql';
|
||||
import allRunnersCountQuery from 'ee_else_ce/ci/runner/graphql/list/all_runners_count.query.graphql';
|
||||
|
||||
import RunnerListHeader from '../components/runner_list_header.vue';
|
||||
import RegistrationDropdown from '../components/registration/registration_dropdown.vue';
|
||||
import RunnerFilteredSearchBar from '../components/runner_filtered_search_bar.vue';
|
||||
import RunnerList from '../components/runner_list.vue';
|
||||
|
|
@ -42,6 +43,7 @@ export default {
|
|||
components: {
|
||||
GlButton,
|
||||
GlLink,
|
||||
RunnerListHeader,
|
||||
RegistrationDropdown,
|
||||
RunnerFilteredSearchBar,
|
||||
RunnerList,
|
||||
|
|
@ -175,11 +177,9 @@ export default {
|
|||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<header class="gl-my-5 gl-display-flex gl-justify-content-space-between">
|
||||
<h2 class="gl-my-0 header-title">
|
||||
{{ s__('Runners|Runners') }}
|
||||
</h2>
|
||||
<div class="gl-display-flex gl-gap-3">
|
||||
<runner-list-header>
|
||||
<template #title>{{ s__('Runners|Runners') }}</template>
|
||||
<template #actions>
|
||||
<runner-dashboard-link />
|
||||
<gl-button :href="newRunnerPath" variant="confirm">
|
||||
{{ s__('Runners|New instance runner') }}
|
||||
|
|
@ -189,8 +189,9 @@ export default {
|
|||
:type="$options.INSTANCE_TYPE"
|
||||
placement="right"
|
||||
/>
|
||||
</div>
|
||||
</header>
|
||||
</template>
|
||||
</runner-list-header>
|
||||
|
||||
<div
|
||||
class="gl-display-flex gl-align-items-center gl-flex-direction-column-reverse gl-md-flex-direction-row gl-mt-3 gl-md-mt-0"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
<script>
|
||||
export default {
|
||||
name: 'RunnerListHeader',
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<header
|
||||
class="gl-my-5 gl-display-flex gl-align-items-flex-start gl-flex-wrap gl-justify-content-space-between"
|
||||
>
|
||||
<h1 v-if="$scopedSlots.title" class="gl-my-0 gl-font-size-h1 header-title">
|
||||
<slot name="title"></slot>
|
||||
</h1>
|
||||
<div v-if="$scopedSlots.actions" class="gl-display-flex gl-gap-3">
|
||||
<slot name="actions"></slot>
|
||||
</div>
|
||||
</header>
|
||||
</template>
|
||||
|
|
@ -14,6 +14,7 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
|||
import groupRunnersCountQuery from 'ee_else_ce/ci/runner/graphql/list/group_runners_count.query.graphql';
|
||||
import groupRunnersQuery from 'ee_else_ce/ci/runner/graphql/list/group_runners.query.graphql';
|
||||
|
||||
import RunnerListHeader from '../components/runner_list_header.vue';
|
||||
import RegistrationDropdown from '../components/registration/registration_dropdown.vue';
|
||||
import RunnerFilteredSearchBar from '../components/runner_filtered_search_bar.vue';
|
||||
import RunnerList from '../components/runner_list.vue';
|
||||
|
|
@ -44,6 +45,7 @@ export default {
|
|||
components: {
|
||||
GlButton,
|
||||
GlLink,
|
||||
RunnerListHeader,
|
||||
RegistrationDropdown,
|
||||
RunnerFilteredSearchBar,
|
||||
RunnerList,
|
||||
|
|
@ -209,11 +211,9 @@ export default {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<header class="gl-my-5 gl-display-flex gl-justify-content-space-between">
|
||||
<h2 class="gl-my-0 header-title">
|
||||
{{ s__('Runners|Runners') }}
|
||||
</h2>
|
||||
<div class="gl-display-flex gl-gap-3">
|
||||
<runner-list-header>
|
||||
<template #title>{{ s__('Runners|Runners') }}</template>
|
||||
<template #actions>
|
||||
<gl-button
|
||||
v-if="newRunnerPath"
|
||||
:href="newRunnerPath"
|
||||
|
|
@ -228,8 +228,9 @@ export default {
|
|||
:type="$options.GROUP_TYPE"
|
||||
placement="right"
|
||||
/>
|
||||
</div>
|
||||
</header>
|
||||
</template>
|
||||
</runner-list-header>
|
||||
|
||||
<div
|
||||
class="gl-display-flex gl-align-items-center gl-flex-direction-column-reverse gl-md-flex-direction-row gl-mt-3 gl-md-mt-0"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ class MergeRequestContextCommitDiffFile < ApplicationRecord
|
|||
belongs_to :merge_request_context_commit, inverse_of: :diff_files
|
||||
|
||||
sha_attribute :sha
|
||||
alias_attribute :id, :sha
|
||||
|
||||
# create MergeRequestContextCommitDiffFile by given diff file record(s)
|
||||
def self.bulk_insert(*args)
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ class MergeRequestDiffCommit < ApplicationRecord
|
|||
belongs_to :committer, class_name: 'MergeRequest::DiffCommitUser'
|
||||
|
||||
sha_attribute :sha
|
||||
alias_attribute :id, :sha
|
||||
|
||||
attribute :trailers, :ind_jsonb
|
||||
validates :trailers, json_schema: { filename: 'git_trailers' }
|
||||
|
|
@ -129,4 +128,8 @@ class MergeRequestDiffCommit < ApplicationRecord
|
|||
def committer_email
|
||||
committer&.email
|
||||
end
|
||||
|
||||
def to_hash
|
||||
super.merge({ 'id' => sha })
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ class SystemNoteMetadata < ApplicationRecord
|
|||
include Importable
|
||||
include IgnorableColumns
|
||||
|
||||
ignore_column :id_convert_to_bigint, remove_with: '16.9', remove_after: '2024-01-13'
|
||||
ignore_column :note_id_convert_to_bigint, remove_with: '16.7', remove_after: '2023-11-16'
|
||||
|
||||
# These notes's action text might contain a reference that is external.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ml
|
||||
class UpdateModelService
|
||||
def initialize(model, description)
|
||||
@model = model
|
||||
@description = description
|
||||
end
|
||||
|
||||
def execute
|
||||
@model.update!(description: @description)
|
||||
|
||||
@model
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module VsCode
|
||||
module Settings
|
||||
class DeleteService
|
||||
def initialize(current_user:)
|
||||
@current_user = current_user
|
||||
end
|
||||
|
||||
def execute
|
||||
VsCodeSetting.by_user(current_user).delete_all
|
||||
|
||||
ServiceResponse.success
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :current_user
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: claude_description_generation
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135706
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430077
|
||||
milestone: '16.6'
|
||||
type: development
|
||||
group: group::project management
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: reduce_duplicate_job_key_ttl
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135910
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430345
|
||||
milestone: '16.6'
|
||||
type: development
|
||||
group: group::scalability
|
||||
default_enabled: false
|
||||
|
|
@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/426549
|
|||
milestone: '16.5'
|
||||
type: development
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
||||
default_enabled: true
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ module WikiCloth
|
|||
data << "\n" if data.last(1) != "\n"
|
||||
data << "garbage"
|
||||
|
||||
buffer = WikiBuffer.new("",options)
|
||||
buffer = WikiBuffer.new(+'',options)
|
||||
|
||||
begin
|
||||
if self.options[:fast]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class InitializeConversionOfSystemNoteMetadataToBigint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE = :system_note_metadata
|
||||
COLUMNS = %i[id]
|
||||
|
||||
milestone '16.6'
|
||||
|
||||
def up
|
||||
initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
|
||||
end
|
||||
|
||||
def down
|
||||
revert_initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class BackfillSystemNoteMetadataIdForBigintConversion < Gitlab::Database::Migration[2.2]
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
TABLE = :system_note_metadata
|
||||
COLUMNS = %i[id]
|
||||
|
||||
milestone '16.6'
|
||||
|
||||
def up
|
||||
backfill_conversion_of_integer_to_bigint(TABLE, COLUMNS, sub_batch_size: 100)
|
||||
end
|
||||
|
||||
def down
|
||||
revert_backfill_conversion_of_integer_to_bigint(TABLE, COLUMNS)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropRepositoriesColumnsFromGeoNodeStatusTable < Gitlab::Database::Migration[2.2]
|
||||
enable_lock_retries!
|
||||
milestone '16.6'
|
||||
|
||||
def up
|
||||
[
|
||||
:repositories_synced_count,
|
||||
:repositories_failed_count,
|
||||
:repositories_verified_count,
|
||||
:repositories_verification_failed_count,
|
||||
:repositories_checksummed_count,
|
||||
:repositories_checksum_failed_count,
|
||||
:repositories_checksum_mismatch_count,
|
||||
:repositories_retrying_verification_count
|
||||
].each do |column_name|
|
||||
remove_column :geo_node_statuses, column_name, if_exists: true
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
change_table(:geo_node_statuses) do |t|
|
||||
t.integer :repositories_synced_count
|
||||
t.integer :repositories_failed_count
|
||||
t.integer :repositories_verified_count
|
||||
t.integer :repositories_verification_failed_count
|
||||
t.integer :repositories_checksummed_count
|
||||
t.integer :repositories_checksum_failed_count
|
||||
t.integer :repositories_checksum_mismatch_count
|
||||
t.integer :repositories_retrying_verification_count
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
399e9a19e9436dc077e9b107daf3397a6be2efe574981265758c082deb2c19ce
|
||||
|
|
@ -0,0 +1 @@
|
|||
f3ce119c5ded9fae2f94168455379eb3a8d7d7bc1eff3e555a2a77011a6309fb
|
||||
|
|
@ -0,0 +1 @@
|
|||
050d1a1a44af5f93902c6a715434ce8144bb6644a891a890d381ae85e6cda9d7
|
||||
|
|
@ -341,6 +341,15 @@ BEGIN
|
|||
END;
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_eaec934fe6b2() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
NEW."id_convert_to_bigint" := NEW."id";
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION unset_has_issues_on_vulnerability_reads() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -16528,8 +16537,6 @@ CREATE TABLE geo_node_statuses (
|
|||
id integer NOT NULL,
|
||||
geo_node_id integer NOT NULL,
|
||||
db_replication_lag_seconds integer,
|
||||
repositories_synced_count integer,
|
||||
repositories_failed_count integer,
|
||||
lfs_objects_count integer,
|
||||
lfs_objects_synced_count integer,
|
||||
lfs_objects_failed_count integer,
|
||||
|
|
@ -16549,15 +16556,9 @@ CREATE TABLE geo_node_statuses (
|
|||
job_artifacts_failed_count integer,
|
||||
version character varying,
|
||||
revision character varying,
|
||||
repositories_verified_count integer,
|
||||
repositories_verification_failed_count integer,
|
||||
lfs_objects_synced_missing_on_primary_count integer,
|
||||
job_artifacts_synced_missing_on_primary_count integer,
|
||||
repositories_checksummed_count integer,
|
||||
repositories_checksum_failed_count integer,
|
||||
repositories_checksum_mismatch_count integer,
|
||||
storage_configuration_digest bytea,
|
||||
repositories_retrying_verification_count integer,
|
||||
projects_count integer,
|
||||
container_repositories_count integer,
|
||||
container_repositories_synced_count integer,
|
||||
|
|
@ -23747,7 +23748,8 @@ CREATE TABLE system_note_metadata (
|
|||
created_at timestamp without time zone NOT NULL,
|
||||
updated_at timestamp without time zone NOT NULL,
|
||||
description_version_id bigint,
|
||||
note_id bigint NOT NULL
|
||||
note_id bigint NOT NULL,
|
||||
id_convert_to_bigint bigint DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE system_note_metadata_id_seq
|
||||
|
|
@ -36831,6 +36833,8 @@ CREATE TRIGGER trigger_b2d852e1e2cb BEFORE INSERT OR UPDATE ON ci_pipelines FOR
|
|||
|
||||
CREATE TRIGGER trigger_delete_project_namespace_on_project_delete AFTER DELETE ON projects FOR EACH ROW WHEN ((old.project_namespace_id IS NOT NULL)) EXECUTE FUNCTION delete_associated_project_namespace();
|
||||
|
||||
CREATE TRIGGER trigger_eaec934fe6b2 BEFORE INSERT OR UPDATE ON system_note_metadata FOR EACH ROW EXECUTE FUNCTION trigger_eaec934fe6b2();
|
||||
|
||||
CREATE TRIGGER trigger_has_external_issue_tracker_on_delete AFTER DELETE ON integrations FOR EACH ROW WHEN ((((old.category)::text = 'issue_tracker'::text) AND (old.active = true) AND (old.project_id IS NOT NULL))) EXECUTE FUNCTION set_has_external_issue_tracker();
|
||||
|
||||
CREATE TRIGGER trigger_has_external_issue_tracker_on_insert AFTER INSERT ON integrations FOR EACH ROW WHEN ((((new.category)::text = 'issue_tracker'::text) AND (new.active = true) AND (new.project_id IS NOT NULL))) EXECUTE FUNCTION set_has_external_issue_tracker();
|
||||
|
|
|
|||
|
|
@ -6,18 +6,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 10,000 users **(PREMIUM SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 10,000 users. For a
|
||||
full list of reference architectures, see
|
||||
This page describes the GitLab reference architecture designed for the load of up to 10,000 users
|
||||
with notable headroom.
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
> - **Supported users (approximate):** 10,000
|
||||
NOTE:
|
||||
Before deploying this architecture it's recommended to read through the [main documentation](index.md) first,
|
||||
specifically the [Before you start](index.md#before-you-start) and [Deciding which architecture to use](index.md#deciding-which-architecture-to-use) sections.
|
||||
|
||||
> - **Target load:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/10k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use)
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS |
|
||||
|------------------------------------------|-------|-------------------------|------------------|----------------|
|
||||
|
|
@ -144,6 +147,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 10k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 200 RPS
|
||||
- Web: 20 RPS
|
||||
- Git (Pull): 20 RPS
|
||||
- Git (Push): 4 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 10,000 users:
|
||||
|
|
|
|||
|
|
@ -6,24 +6,18 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 1,000 users **(FREE SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 1,000 users. For a
|
||||
full list of reference architectures, see
|
||||
This page describes the GitLab reference architecture designed for the load of up to 1,000 users
|
||||
with notable headroom (non-HA standalone).
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
If you are serving up to 1,000 users, and you don't have strict availability
|
||||
requirements, a [standalone](index.md#standalone-non-ha) single-node solution with
|
||||
frequent backups is appropriate for
|
||||
many organizations.
|
||||
|
||||
> - **Supported users (approximate):** 1,000
|
||||
> - **Target Load:** API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
|
||||
> - **High Availability:** No. For a highly-available environment, you can
|
||||
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid:** No. For a cloud native hybrid environment, you
|
||||
> can follow a [modified hybrid reference architecture](#cloud-native-hybrid-reference-architecture-with-helm-charts).
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/1k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
|
||||
| Users | Configuration | GCP | AWS | Azure |
|
||||
|
|
@ -73,6 +67,27 @@ WARNING:
|
|||
**However, if you have [large monorepos](index.md#large-monorepos) (larger than several gigabytes) or [additional workloads](index.md#additional-workloads) these can *significantly* impact the performance of the environment and further adjustments may be required.**
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 1k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 20 RPS
|
||||
- Web: 2 RPS
|
||||
- Git (Pull): 2 RPS
|
||||
- Git (Push): 1 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup instructions
|
||||
|
||||
To install GitLab for this default reference architecture, use the standard
|
||||
|
|
|
|||
|
|
@ -6,18 +6,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 25,000 users **(PREMIUM SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 25,000 users. For a
|
||||
full list of reference architectures, see
|
||||
This page describes the GitLab reference architecture designed for the load of up to 25,000 users
|
||||
with notable headroom.
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
> - **Supported users (approximate):** 25,000
|
||||
NOTE:
|
||||
Before deploying this architecture it's recommended to read through the [main documentation](index.md) first,
|
||||
specifically the [Before you start](index.md#before-you-start) and [Deciding which architecture to use](index.md#deciding-which-architecture-to-use) sections.
|
||||
|
||||
> - **Target load:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/25k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use)
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS |
|
||||
|------------------------------------------|-------|-------------------------|------------------|--------------|
|
||||
|
|
@ -144,6 +147,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 25k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 500 RPS
|
||||
- Web: 50 RPS
|
||||
- Git (Pull): 50 RPS
|
||||
- Git (Push): 10 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 25,000 users:
|
||||
|
|
|
|||
|
|
@ -6,18 +6,17 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 2,000 users **(FREE SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 2,000 users.
|
||||
This page describes the GitLab reference architecture designed for the load of up to 2,000 users
|
||||
with notable headroom (non-HA).
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
> - **Supported users (approximate):** 2,000
|
||||
> - **Target Load:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
|
||||
> - **High Availability:** No. For a highly-available environment, you can
|
||||
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/2k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS | Azure |
|
||||
|
|
@ -81,6 +80,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 2k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 40 RPS
|
||||
- Web: 4 RPS
|
||||
- Git (Pull): 4 RPS
|
||||
- Git (Push): 1 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 2,000 users:
|
||||
|
|
|
|||
|
|
@ -6,27 +6,20 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 3,000 users **(PREMIUM SELF)**
|
||||
|
||||
This GitLab reference architecture can help you deploy GitLab to up to 3,000
|
||||
users, and then maintain uptime and access for those users. You can also use
|
||||
this architecture to provide improved GitLab uptime and availability for fewer
|
||||
than 3,000 users. For fewer users, reduce the stated node sizes as needed.
|
||||
This page describes the GitLab reference architecture designed for the load of up to 3,000 users
|
||||
with notable headroom.
|
||||
|
||||
If maintaining a high level of uptime for your GitLab environment isn't a
|
||||
requirement, or if you don't have the expertise to maintain this sort of
|
||||
environment, we recommend using the non-HA [2,000-user reference architecture](2k_users.md)
|
||||
for your GitLab installation. If HA is still a requirement, there's several supported
|
||||
tweaks you can make to this architecture to reduce complexity as detailed here.
|
||||
This architecture is the smallest one available with HA built in. If you require HA but
|
||||
have a lower user count or total load the [Supported Modifications for lower user counts](#supported-modifications-for-lower-user-counts-ha)
|
||||
section details how to reduce this architecture's size while maintaining HA.
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
> - **Supported users (approximate):** 3,000
|
||||
> - **Target Load:** 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
|
||||
> - **High Availability:** Yes, although [Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/3k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS |
|
||||
|
|
@ -149,6 +142,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 3k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 60 RPS
|
||||
- Web: 6 RPS
|
||||
- Git (Pull): 6 RPS
|
||||
- Git (Push): 1 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 3,000 users:
|
||||
|
|
|
|||
|
|
@ -6,18 +6,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 50,000 users **(PREMIUM SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 50,000 users. For a
|
||||
full list of reference architectures, see
|
||||
This page describes the GitLab reference architecture designed for the load of up to 50,000 users
|
||||
with notable headroom.
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
> - **Supported users (approximate):** 50,000
|
||||
NOTE:
|
||||
Before deploying this architecture it's recommended to read through the [main documentation](index.md) first,
|
||||
specifically the [Before you start](index.md#before-you-start) and [Deciding which architecture to use](index.md#deciding-which-architecture-to-use) sections.
|
||||
|
||||
> - **Target load:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/50k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use)
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS |
|
||||
|------------------------------------------|-------|-------------------------|------------------|---------------|
|
||||
|
|
@ -144,6 +147,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 50k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 1000 RPS
|
||||
- Web: 100 RPS
|
||||
- Git (Pull): 100 RPS
|
||||
- Git (Push): 20 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 50,000 users:
|
||||
|
|
|
|||
|
|
@ -6,25 +6,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Reference architecture: up to 5,000 users **(PREMIUM SELF)**
|
||||
|
||||
This page describes GitLab reference architecture for up to 5,000 users. For a
|
||||
full list of reference architectures, see
|
||||
This page describes the GitLab reference architecture designed for the load of up to 5,000 users
|
||||
with notable headroom.
|
||||
|
||||
For a full list of reference architectures, see
|
||||
[Available reference architectures](index.md#available-reference-architectures).
|
||||
|
||||
NOTE:
|
||||
This reference architecture is designed to help your organization achieve a
|
||||
highly-available GitLab deployment. If you do not have the expertise or need to
|
||||
maintain a highly-available environment, you can have a simpler and less
|
||||
costly-to-operate environment by using the
|
||||
[2,000-user reference architecture](2k_users.md).
|
||||
Before deploying this architecture it's recommended to read through the [main documentation](index.md) first,
|
||||
specifically the [Before you start](index.md#before-you-start) and [Deciding which architecture to use](index.md#deciding-which-architecture-to-use) sections.
|
||||
|
||||
> - **Supported users (approximate):** 5,000
|
||||
> - **Target load:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
|
||||
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
|
||||
> - **Estimated Costs:** [See cost table](index.md#cost-to-run)
|
||||
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
> - **Validation and test results:** The Quality Engineering team does [regular smoke and performance tests](index.md#validation-and-test-results) to ensure the reference architectures remain compliant
|
||||
> - **Test requests per second (RPS) rates:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
|
||||
> - **[Latest Results](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/5k)**
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use).
|
||||
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-use)
|
||||
|
||||
| Service | Nodes | Configuration | GCP | AWS |
|
||||
|-------------------------------------------|-------|-------------------------|-----------------|--------------|
|
||||
|
|
@ -146,6 +142,27 @@ monitor .[#7FFFD4,norank]u--> elb
|
|||
|
||||
Before starting, see the [requirements](index.md#requirements) for reference architectures.
|
||||
|
||||
## Testing methodology
|
||||
|
||||
The 5k architecture is designed to cover a large majority of workflows and is regularly
|
||||
[smoke and performance tested](index.md#validation-and-test-results) by the Quality Engineering team
|
||||
against the following endpoint throughput targets:
|
||||
|
||||
- API: 100 RPS
|
||||
- Web: 10 RPS
|
||||
- Git (Pull): 10 RPS
|
||||
- Git (Push): 2 RPS
|
||||
|
||||
The above targets were selected based on real customer data of total environmental loads corresponding to the user count,
|
||||
including CI and other workloads along with additional substantial headroom added.
|
||||
|
||||
If you have metrics to suggest that you have regularly higher throughput against the above endpoint targets, [large monorepos](index.md#large-monorepos)
|
||||
or notable [additional workloads](index.md#additional-workloads) these can notably impact the performance environment and [further adjustments may be required](index.md#scaling-an-environment).
|
||||
If this applies to you, we strongly recommended referring to the linked documentation as well as reaching out to your [Customer Success Manager](https://handbook.gitlab.com/job-families/sales/customer-success-management/) or our [Support team](https://about.gitlab.com/support/) for further guidance.
|
||||
|
||||
Testing is done regularly via our [GitLab Performance Tool (GPT)](https://gitlab.com/gitlab-org/quality/performance) and its dataset, which is available for anyone to use.
|
||||
The results of this testing are [available publicly on the GPT wiki](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest). For more information on our testing strategy [refer to this section of the documentation](index.md#validation-and-test-results).
|
||||
|
||||
## Setup components
|
||||
|
||||
To set up GitLab and its components to accommodate up to 5,000 users:
|
||||
|
|
|
|||
|
|
@ -12,36 +12,37 @@ GitLab Quality Engineering and Support teams to provide recommended deployments
|
|||
|
||||
## Available reference architectures
|
||||
|
||||
Depending on your workflow, the following recommended reference architectures
|
||||
may need to be adapted accordingly. Your workload is influenced by factors
|
||||
including how active your users are, how much automation you use, mirroring,
|
||||
and repository/change size. Additionally, the displayed memory values are
|
||||
provided by [GCP machine types](https://cloud.google.com/compute/docs/machine-resource).
|
||||
For different cloud vendors, attempt to select options that best match the
|
||||
provided architecture.
|
||||
The following Reference Architectures are available as recommended starting points for your environment.
|
||||
|
||||
The architectures are named in terms of user count, which in this case means the architecture is designed against
|
||||
the _total_ load that comes with such a user count based on real data along with substantial headroom added to cover most scenarios such as CI or other automated workloads.
|
||||
|
||||
However, it should be noted that in some cases, known heavy scenarios such as [large monorepos](#large-monorepos) or notable [additional workloads](#additional-workloads) may require adjustments to be made.
|
||||
|
||||
For each Reference Architecture, the details of what they have been tested against can be found respectively in the "Testing Methodology" section of each page.
|
||||
|
||||
### GitLab package (Omnibus)
|
||||
|
||||
The following reference architectures, where the GitLab package is used, are available:
|
||||
Below is a list of Linux package based architectures:
|
||||
|
||||
- [Up to 1,000 users](1k_users.md)
|
||||
- [Up to 2,000 users](2k_users.md)
|
||||
- [Up to 3,000 users](3k_users.md)
|
||||
- [Up to 5,000 users](5k_users.md)
|
||||
- [Up to 10,000 users](10k_users.md)
|
||||
- [Up to 25,000 users](25k_users.md)
|
||||
- [Up to 50,000 users](50k_users.md)
|
||||
- [Up to 1,000 users](1k_users.md) <span style="color: darkgrey;">_API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS_</span>
|
||||
- [Up to 2,000 users](2k_users.md) <span style="color: darkgrey;">_API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS_</span>
|
||||
- [Up to 3,000 users](3k_users.md) <span style="color: darkgrey;">_API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS_</span>
|
||||
- [Up to 5,000 users](5k_users.md) <span style="color: darkgrey;">_API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS_</span>
|
||||
- [Up to 10,000 users](10k_users.md) <span style="color: darkgrey;">_API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS_</span>
|
||||
- [Up to 25,000 users](25k_users.md) <span style="color: darkgrey;">_API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS_</span>
|
||||
- [Up to 50,000 users](50k_users.md) <span style="color: darkgrey;">_API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS_</span>
|
||||
|
||||
### Cloud native hybrid
|
||||
|
||||
The following Cloud Native Hybrid reference architectures, where select recommended components can be run in Kubernetes, are available:
|
||||
Below is a list of Cloud Native Hybrid reference architectures, where select recommended components can be run in Kubernetes:
|
||||
|
||||
- [Up to 2,000 users](2k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 3,000 users](3k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 5,000 users](5k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 10,000 users](10k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 25,000 users](25k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 50,000 users](50k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
|
||||
- [Up to 2,000 users](2k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS_</span>
|
||||
- [Up to 3,000 users](3k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS_</span>
|
||||
- [Up to 5,000 users](5k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS_</span>
|
||||
- [Up to 10,000 users](10k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS_</span>
|
||||
- [Up to 25,000 users](25k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS_</span>
|
||||
- [Up to 50,000 users](50k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative) <span style="color: darkgrey;">_API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS_</span>
|
||||
|
||||
## Before you start
|
||||
|
||||
|
|
@ -63,6 +64,19 @@ As a general guide, **the more performant and/or resilient you want your environ
|
|||
|
||||
This section explains the designs you can choose from. It begins with the least complexity, goes to the most, and ends with a decision tree.
|
||||
|
||||
### Expected Load (RPS)
|
||||
|
||||
The first thing to check is what the expected load is your environment would be expected to serve.
|
||||
|
||||
The Reference Architectures have been designed with substantial headroom by default, but it's recommended to also check the
|
||||
load of what each architecture has been tested against under the "Testing Methodology" section found on each page,
|
||||
comparing those values with what load you are expecting against your existing GitLab environment to help select the right Reference Architecture
|
||||
size.
|
||||
|
||||
Load is given in terms of Requests per Section (RPS) for each endpoint type (API, Web, Git). This information on your existing infrastructure
|
||||
can typically be surfaced by most reputable monitoring solutions or in some other ways such as load balancer metrics. For example, on existing GitLab environments,
|
||||
[Prometheus metrics](../monitoring/prometheus/gitlab_metrics.md) such as `gitlab_transaction_duration_seconds` can be used to see this data.
|
||||
|
||||
### Standalone (non-HA)
|
||||
|
||||
For environments serving 2,000 or fewer users, we generally recommend a standalone approach by deploying a non-highly available single or multi-node environment. With this approach, you can employ strategies such as [automated backups](../../administration/backup_restore/backup_gitlab.md#configuring-cron-to-make-daily-backups) for recovery to provide a good level of RPO / RTO while avoiding the complexities that come with HA.
|
||||
|
|
@ -144,10 +158,11 @@ Below you can find the above guidance in the form of a decision tree. It's recom
|
|||
```mermaid
|
||||
%%{init: { 'theme': 'base' } }%%
|
||||
graph TD
|
||||
L1A(<b>What Reference Architecture should I use?</b>)
|
||||
L0A(<b>What Reference Architecture should I use?</b>)
|
||||
L1A(<b>What is your <a href=#expected-load-rps>expected load</a>?</b>)
|
||||
|
||||
L2A(3,000 users or more?)
|
||||
L2B(2,000 users or less?)
|
||||
L2A("Equivalent to <a href=3k_users.md#testing-methodology>3,000 users</a> or more?")
|
||||
L2B("Equivalent to <a href=2k_users.md#testing-methodology>2,000 users</a> or less?")
|
||||
|
||||
L3A("<a href=#do-you-need-high-availability-ha>Do you need HA?</a><br>(or Zero-Downtime Upgrades)")
|
||||
L3B[Do you have experience with<br/>and want additional resilience<br/>with select components in Kubernetes?]
|
||||
|
|
@ -157,6 +172,7 @@ graph TD
|
|||
L4C><b>Recommendation</b><br><br>Cloud Native Hybrid architecture<br>closest to user count]
|
||||
L4D>"<b>Recommendation</b><br><br>Standalone 1K or 2K<br/>architecture with Backups"]
|
||||
|
||||
L0A --> L1A
|
||||
L1A --> L2A
|
||||
L1A --> L2B
|
||||
L2A -->|Yes| L3B
|
||||
|
|
@ -664,15 +680,28 @@ You should upgrade a Reference Architecture in the same order as you created it.
|
|||
|
||||
### Scaling an environment
|
||||
|
||||
Scaling a GitLab environment is designed to be as seamless as possible.
|
||||
Scaling a GitLab environment is designed to be as flexible and seamless as possible.
|
||||
|
||||
In terms of the Reference Architectures, you would look to the next size and adjust accordingly.
|
||||
Most setups would only need vertical scaling, but there are some specific areas that can be adjusted depending on the setup:
|
||||
This can be done iteratively or wholesale to the next size of architecture depending on your circumstances.
|
||||
For example, if any of your GitLab Rails, Sidekiq, Gitaly, Redis or PostgreSQL nodes are consistently oversaturated, then increase their resources accordingly while leaving the rest of the environment as is.
|
||||
|
||||
If expecting a large increase in users, you may elect to scale up the whole environment to the next
|
||||
size of architecture.
|
||||
|
||||
If the overall design is being followed, you can scale the environment vertically as required.
|
||||
|
||||
If robust metrics are in place that show the environment is over-provisioned, you can apply the same process for
|
||||
scaling downwards. You should take an iterative approach when scaling downwards to ensure there are no issues.
|
||||
|
||||
#### Scaling from a non-HA to an HA architecture
|
||||
|
||||
While in most cases vertical scaling is only required to increase an environment's resources, if you are moving to an HA environment,
|
||||
there may be some additional steps required as shown below:
|
||||
|
||||
- If you're scaling from a non-HA environment to an HA environment, various components are recommended to be deployed in their HA forms:
|
||||
- Redis to multi-node Redis w/ Redis Sentinel
|
||||
- Postgres to multi-node Postgres w/ Consul + PgBouncer
|
||||
- Gitaly to Gitaly Cluster w/ Praefect
|
||||
- [Redis to multi-node Redis w/ Redis Sentinel](../redis/replication_and_failover.md#switching-from-an-existing-single-machine-installation)
|
||||
- [Postgres to multi-node Postgres w/ Consul + PgBouncer](../postgresql/moving.md)
|
||||
- [Gitaly to Gitaly Cluster w/ Praefect](../gitaly/index.md#migrate-to-gitaly-cluster)
|
||||
- From 10k users and higher, Redis is recommended to be split into multiple HA servers as it's single threaded.
|
||||
|
||||
Conversely, if you have robust metrics in place that show the environment is over-provisioned, you can apply the same process for
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ In the following table, you can see:
|
|||
| [Service Level Agreement countdown timer](../../operations/incident_management/incidents.md#service-level-agreement-countdown-timer) | GitLab 16.6 and later |
|
||||
| [Lock project membership to group](../../user/group/access_and_permissions.md#prevent-members-from-being-added-to-projects-in-a-group) | GitLab 16.6 and later |
|
||||
| [Users and permissions report](../../administration/admin_area.md#user-permission-export) | GitLab 16.6 and later |
|
||||
| [Advanced search](../../user/search/advanced_search.md) | GitLab 16.6 and later |
|
||||
|
||||
### Enable registration features
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Install GitLab under a relative URL **(FREE SELF)**
|
||||
|
||||
While we recommend to install GitLab on its own (sub)domain, sometimes
|
||||
While you should install GitLab on its own (sub)domain, sometimes
|
||||
this is not possible due to a variety of reasons. In that case, GitLab can also
|
||||
be installed under a relative URL, for example `https://example.com/gitlab`.
|
||||
|
||||
|
|
|
|||
|
|
@ -45,16 +45,43 @@ module API
|
|||
detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#get-registeredmodel'
|
||||
end
|
||||
params do
|
||||
# The name param is actually required, however it is listed as optional here
|
||||
# we can send a custom error response required by MLFlow
|
||||
optional :name, type: String, default: '',
|
||||
desc: 'Registered model unique name identifier, in reference to the project'
|
||||
end
|
||||
get 'get', urgency: :low do
|
||||
resource_not_found! unless params[:name]
|
||||
|
||||
model = ::Ml::FindModelService.new(user_project, params[:name]).execute
|
||||
|
||||
resource_not_found! if model.nil?
|
||||
|
||||
present model, with: Entities::Ml::Mlflow::RegisteredModel, root: :registered_model
|
||||
end
|
||||
|
||||
desc 'Update a Registered Model by Name' do
|
||||
success Entities::Ml::Mlflow::RegisteredModel
|
||||
detail 'https://mlflow.org/docs/2.6.0/rest-api.html#update-registeredmodel'
|
||||
end
|
||||
params do
|
||||
# The name param is actually required, however it is listed as optional here
|
||||
# we can send a custom error response required by MLFlow
|
||||
optional :name, type: String,
|
||||
desc: 'Registered model unique name identifier, in reference to the project'
|
||||
optional :description, type: String,
|
||||
desc: 'Optional description for registered model.'
|
||||
end
|
||||
patch 'update', urgency: :low do
|
||||
resource_not_found! unless params[:name]
|
||||
|
||||
model = ::Ml::FindModelService.new(user_project, params[:name]).execute
|
||||
|
||||
resource_not_found! if model.nil?
|
||||
|
||||
present ::Ml::UpdateModelService.new(model, params[:description]).execute,
|
||||
with: Entities::Ml::Mlflow::RegisteredModel, root: :registered_model
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -121,6 +121,19 @@ module API
|
|||
error!(response.message, 400)
|
||||
end
|
||||
end
|
||||
|
||||
desc 'Deletes all user vscode setting resources' do
|
||||
success [{ code: 200, message: 'OK' }]
|
||||
failure [
|
||||
{ code: 401, message: '401 Unauthorized' }
|
||||
]
|
||||
tags %w[vscode]
|
||||
end
|
||||
delete '/v1/collection' do
|
||||
DeleteService.new(current_user: current_user).execute
|
||||
|
||||
present "OK"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ module Gitlab
|
|||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
DEFAULT_DUPLICATE_KEY_TTL = 6.hours
|
||||
SHORT_DUPLICATE_KEY_TTL = 10.minutes
|
||||
DEFAULT_STRATEGY = :until_executing
|
||||
STRATEGY_NONE = :none
|
||||
|
||||
|
|
@ -134,7 +135,7 @@ module Gitlab
|
|||
jid != existing_jid
|
||||
end
|
||||
|
||||
def set_deduplicated_flag!(expiry = duplicate_key_ttl)
|
||||
def set_deduplicated_flag!
|
||||
return unless reschedulable?
|
||||
|
||||
with_redis { |redis| redis.eval(DEDUPLICATED_SCRIPT, keys: [cookie_key]) }
|
||||
|
|
@ -173,7 +174,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def duplicate_key_ttl
|
||||
options[:ttl] || DEFAULT_DUPLICATE_KEY_TTL
|
||||
options[:ttl] || default_duplicate_key_ttl
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -182,6 +183,12 @@ module Gitlab
|
|||
attr_reader :queue_name, :job
|
||||
attr_writer :existing_jid
|
||||
|
||||
def default_duplicate_key_ttl
|
||||
return SHORT_DUPLICATE_KEY_TTL if Feature.enabled?(:reduce_duplicate_job_key_ttl)
|
||||
|
||||
DEFAULT_DUPLICATE_KEY_TTL
|
||||
end
|
||||
|
||||
def worker_klass
|
||||
@worker_klass ||= worker_class_name.to_s.safe_constantize
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ module Gitlab
|
|||
|
||||
if duplicate_job.idempotent?
|
||||
duplicate_job.update_latest_wal_location!
|
||||
duplicate_job.set_deduplicated_flag!(expiry)
|
||||
duplicate_job.set_deduplicated_flag!
|
||||
|
||||
Gitlab::SidekiqLogging::DeduplicationLogger.instance.deduplicated_log(
|
||||
job, strategy_name, duplicate_job.options)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module VsCode
|
|||
}
|
||||
]
|
||||
}.freeze
|
||||
SETTINGS_TYPES = %w[settings extensions globalState machines keybindings snippets tasks].freeze
|
||||
SETTINGS_TYPES = %w[settings extensions globalState machines keybindings snippets tasks profiles].freeze
|
||||
DEFAULT_SESSION = "1"
|
||||
NO_CONTENT_ETAG = "0"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -41701,6 +41701,9 @@ msgstr ""
|
|||
msgid "Runners|View metrics"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|View runners list"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Wait time (secs)"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -255,7 +255,7 @@ module QA
|
|||
|
||||
Page::Main::Menu.validate_elements_present!
|
||||
|
||||
# validate_canary!
|
||||
validate_canary!
|
||||
end
|
||||
|
||||
def fill_in_credential(user)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,9 @@ def metric_definitions_from_args
|
|||
end
|
||||
|
||||
def red(text)
|
||||
"\e[31m#{text}\e[0m"
|
||||
@pastel ||= Pastel.new
|
||||
|
||||
@pastel.red(text)
|
||||
end
|
||||
|
||||
def snowplow_data
|
||||
|
|
@ -142,15 +144,13 @@ rescue Errno::ECONNREFUSED
|
|||
exit 1
|
||||
end
|
||||
|
||||
print "\e[?1049h" # Stores the original screen buffer
|
||||
print "\e[H" # Moves the cursor home
|
||||
begin
|
||||
loop do
|
||||
metrics_table = generate_metrics_table
|
||||
events_table = generate_snowplow_table
|
||||
|
||||
print "\e[H" # Moves the cursor home
|
||||
print "\e[2J" # Clears the screen buffer
|
||||
print TTY::Cursor.clear_screen
|
||||
print TTY::Cursor.move_to(0, 0)
|
||||
|
||||
puts "Updated at #{Time.current}"
|
||||
puts "Monitored events: #{ARGV.join(', ')}"
|
||||
|
|
@ -164,7 +164,4 @@ begin
|
|||
end
|
||||
rescue Interrupt
|
||||
# Quietly shut down
|
||||
ensure
|
||||
print "\e[?1049l" # Restores the original screen buffer
|
||||
print "\e[H" # Moves the cursor home
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,6 +22,22 @@ FactoryBot.define do
|
|||
data_store { :redis }
|
||||
end
|
||||
|
||||
trait :redis_trace_chunks_with_data do
|
||||
data_store { :redis_trace_chunks }
|
||||
|
||||
transient do
|
||||
initial_data { 'test data' }
|
||||
end
|
||||
|
||||
after(:create) do |build_trace_chunk, evaluator|
|
||||
Ci::BuildTraceChunks::RedisTraceChunks.new.set_data(build_trace_chunk, evaluator.initial_data)
|
||||
end
|
||||
end
|
||||
|
||||
trait :redis_trace_chunks_without_data do
|
||||
data_store { :redis_trace_chunks }
|
||||
end
|
||||
|
||||
trait :database_with_data do
|
||||
data_store { :database }
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"registered_model"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"description",
|
||||
"user_id"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"user_id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"creation_timestamp": {
|
||||
"type": "string"
|
||||
},
|
||||
"last_updated_timestamp": {
|
||||
"type": "string"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"key",
|
||||
"value"
|
||||
],
|
||||
"properties": {
|
||||
"key": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import RunnerListHeader from '~/ci/runner/components/runner_list_header.vue';
|
||||
|
||||
describe('RunnerListHeader', () => {
|
||||
let wrapper;
|
||||
const createWrapper = (options) => {
|
||||
wrapper = shallowMountExtended(RunnerListHeader, {
|
||||
...options,
|
||||
});
|
||||
};
|
||||
|
||||
it('shows title', () => {
|
||||
createWrapper({
|
||||
scopedSlots: {
|
||||
title: () => 'My title',
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.find('h1').text()).toBe('My title');
|
||||
});
|
||||
|
||||
it('shows actions', () => {
|
||||
createWrapper({
|
||||
scopedSlots: {
|
||||
actions: () => 'My actions',
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('My actions');
|
||||
});
|
||||
});
|
||||
|
|
@ -117,10 +117,7 @@ RSpec.describe Gitlab::Ci::Badge::Pipeline::Status do
|
|||
end
|
||||
|
||||
def create_pipeline(project, sha, branch)
|
||||
pipeline = create(:ci_empty_pipeline,
|
||||
project: project,
|
||||
sha: sha,
|
||||
ref: branch)
|
||||
pipeline = create(:ci_empty_pipeline, project: project, sha: sha, ref: branch)
|
||||
|
||||
create(:ci_build, pipeline: pipeline, stage: 'notify')
|
||||
end
|
||||
|
|
|
|||
|
|
@ -69,9 +69,11 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b'),
|
||||
entry('path/dir_1/subdir/')
|
||||
is_expected.to contain_exactly(
|
||||
entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b'),
|
||||
entry('path/dir_1/subdir/')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -82,8 +84,10 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b')
|
||||
is_expected.to contain_exactly(
|
||||
entry('path/dir_1/file_1'),
|
||||
entry('path/dir_1/file_b')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -103,8 +107,10 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
|
|||
it { is_expected.to all(be_an_instance_of(described_class)) }
|
||||
|
||||
it do
|
||||
is_expected.to contain_exactly entry('path/dir_1/subdir/'),
|
||||
entry('path/')
|
||||
is_expected.to contain_exactly(
|
||||
entry('path/dir_1/subdir/'),
|
||||
entry('path/')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,10 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Gitlab::Ci::Build::Hook, feature_category: :pipeline_composition do
|
||||
let_it_be(:build1) do
|
||||
FactoryBot.build(:ci_build,
|
||||
options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } })
|
||||
build(
|
||||
:ci_build,
|
||||
options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } }
|
||||
)
|
||||
end
|
||||
|
||||
describe '.from_hooks' do
|
||||
|
|
|
|||
|
|
@ -8,11 +8,14 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
|
|||
describe '#satisfied_by?' do
|
||||
describe 'paths matching' do
|
||||
let(:pipeline) do
|
||||
build(:ci_empty_pipeline, project: project,
|
||||
ref: 'master',
|
||||
source: :push,
|
||||
sha: '1234abcd',
|
||||
before_sha: '0123aabb')
|
||||
build(
|
||||
:ci_empty_pipeline,
|
||||
project: project,
|
||||
ref: 'master',
|
||||
source: :push,
|
||||
sha: '1234abcd',
|
||||
before_sha: '0123aabb'
|
||||
)
|
||||
end
|
||||
|
||||
let(:ci_build) do
|
||||
|
|
@ -92,11 +95,14 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
|
|||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
||||
let(:pipeline) do
|
||||
create(:ci_empty_pipeline, project: project,
|
||||
ref: 'master',
|
||||
source: :push,
|
||||
sha: '498214d',
|
||||
before_sha: '281d3a7')
|
||||
create(
|
||||
:ci_empty_pipeline,
|
||||
project: project,
|
||||
ref: 'master',
|
||||
source: :push,
|
||||
sha: '498214d',
|
||||
before_sha: '281d3a7'
|
||||
)
|
||||
end
|
||||
|
||||
let(:build) do
|
||||
|
|
@ -122,12 +128,15 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
|
|||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
||||
let(:pipeline) do
|
||||
create(:ci_empty_pipeline, project: project,
|
||||
ref: 'feature',
|
||||
source: source,
|
||||
sha: '0b4bc9a4',
|
||||
before_sha: Gitlab::Git::BLANK_SHA,
|
||||
merge_request: merge_request)
|
||||
create(
|
||||
:ci_empty_pipeline,
|
||||
project: project,
|
||||
ref: 'feature',
|
||||
source: source,
|
||||
sha: '0b4bc9a4',
|
||||
before_sha: Gitlab::Git::BLANK_SHA,
|
||||
merge_request: merge_request
|
||||
)
|
||||
end
|
||||
|
||||
let(:ci_build) do
|
||||
|
|
@ -140,11 +149,13 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
|
|||
let(:source) { :merge_request_event }
|
||||
|
||||
let(:merge_request) do
|
||||
create(:merge_request,
|
||||
source_project: project,
|
||||
source_branch: 'feature',
|
||||
target_project: project,
|
||||
target_branch: 'master')
|
||||
create(
|
||||
:merge_request,
|
||||
source_project: project,
|
||||
source_branch: 'feature',
|
||||
target_project: project,
|
||||
target_branch: 'master'
|
||||
)
|
||||
end
|
||||
|
||||
it 'is satified by changes in the merge request' do
|
||||
|
|
|
|||
|
|
@ -104,23 +104,32 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do
|
|||
|
||||
context 'when using project ci variables in environment scope' do
|
||||
let(:ci_build) do
|
||||
build(:ci_build, pipeline: pipeline,
|
||||
project: project,
|
||||
ref: 'master',
|
||||
stage: 'review',
|
||||
environment: 'test/$CI_JOB_STAGE/1',
|
||||
ci_stage: build(:ci_stage, name: 'review', project: project, pipeline: pipeline))
|
||||
build(
|
||||
:ci_build,
|
||||
pipeline: pipeline,
|
||||
project: project,
|
||||
ref: 'master',
|
||||
stage: 'review',
|
||||
environment: 'test/$CI_JOB_STAGE/1',
|
||||
ci_stage: build(:ci_stage, name: 'review', project: project, pipeline: pipeline)
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
create(:ci_variable, project: project,
|
||||
key: 'SCOPED_VARIABLE',
|
||||
value: 'my-value-1')
|
||||
create(
|
||||
:ci_variable,
|
||||
project: project,
|
||||
key: 'SCOPED_VARIABLE',
|
||||
value: 'my-value-1'
|
||||
)
|
||||
|
||||
create(:ci_variable, project: project,
|
||||
key: 'SCOPED_VARIABLE',
|
||||
value: 'my-value-2',
|
||||
environment_scope: 'test/review/*')
|
||||
create(
|
||||
:ci_variable,
|
||||
project: project,
|
||||
key: 'SCOPED_VARIABLE',
|
||||
value: 'my-value-2',
|
||||
environment_scope: 'test/review/*'
|
||||
)
|
||||
end
|
||||
|
||||
it 'is satisfied by scoped variable match' do
|
||||
|
|
|
|||
|
|
@ -95,8 +95,11 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
|
|||
|
||||
context 'when using compare_to' do
|
||||
let_it_be(:project) do
|
||||
create(:project, :custom_repo,
|
||||
files: { 'README.md' => 'readme' })
|
||||
create(
|
||||
:project,
|
||||
:custom_repo,
|
||||
files: { 'README.md' => 'readme' }
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:user) { project.owner }
|
||||
|
|
|
|||
|
|
@ -101,14 +101,16 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
|
|||
|
||||
describe '#value' do
|
||||
it 'is returns a bridge job configuration' do
|
||||
expect(subject.value).to eq(name: :my_bridge,
|
||||
trigger: { project: 'some/project' },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage)
|
||||
expect(subject.value).to eq(
|
||||
name: :my_bridge,
|
||||
trigger: { project: 'some/project' },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -124,15 +126,16 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
|
|||
|
||||
describe '#value' do
|
||||
it 'is returns a bridge job configuration hash' do
|
||||
expect(subject.value).to eq(name: :my_bridge,
|
||||
trigger: { project: 'some/project',
|
||||
branch: 'feature' },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage)
|
||||
expect(subject.value).to eq(
|
||||
name: :my_bridge,
|
||||
trigger: { project: 'some/project', branch: 'feature' },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -305,15 +308,16 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
|
|||
|
||||
describe '#value' do
|
||||
it 'returns a bridge job configuration hash' do
|
||||
expect(subject.value).to eq(name: :my_bridge,
|
||||
trigger: { project: 'some/project',
|
||||
forward: { pipeline_variables: true } },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage)
|
||||
expect(subject.value).to eq(
|
||||
name: :my_bridge,
|
||||
trigger: { project: 'some/project', forward: { pipeline_variables: true } },
|
||||
ignore: false,
|
||||
stage: 'test',
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -790,19 +790,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
|
|||
end
|
||||
|
||||
it 'returns correct value' do
|
||||
expect(entry.value)
|
||||
.to eq(name: :rspec,
|
||||
before_script: %w[ls pwd],
|
||||
script: %w[rspec],
|
||||
stage: 'test',
|
||||
ignore: false,
|
||||
after_script: %w[cleanup],
|
||||
hooks: { pre_get_sources_script: ['echo hello'] },
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage,
|
||||
id_tokens: { TEST_ID_TOKEN: { aud: 'https://gitlab.com' } })
|
||||
expect(entry.value).to eq(
|
||||
name: :rspec,
|
||||
before_script: %w[ls pwd],
|
||||
script: %w[rspec],
|
||||
stage: 'test',
|
||||
ignore: false,
|
||||
after_script: %w[cleanup],
|
||||
hooks: { pre_get_sources_script: ['echo hello'] },
|
||||
only: { refs: %w[branches tags] },
|
||||
job_variables: {},
|
||||
root_variables_inheritance: true,
|
||||
scheduling_type: :stage,
|
||||
id_tokens: { TEST_ID_TOKEN: { aud: 'https://gitlab.com' } }
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -221,8 +221,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy, feature_category: :continuous_
|
|||
let(:config) { { variables: %w[$VARIABLE] } }
|
||||
|
||||
it 'includes default values' do
|
||||
expect(entry.value).to eq(refs: %w[branches tags],
|
||||
variables: %w[$VARIABLE])
|
||||
expect(entry.value).to eq(refs: %w[branches tags], variables: %w[$VARIABLE])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -128,8 +128,7 @@ RSpec.describe Gitlab::Ci::Config::Extendable::Entry do
|
|||
|
||||
it 'raises an error' do
|
||||
expect { subject.extend! }
|
||||
.to raise_error(described_class::InvalidExtensionError,
|
||||
/invalid base hash/)
|
||||
.to raise_error(described_class::InvalidExtensionError, /invalid base hash/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -140,8 +139,7 @@ RSpec.describe Gitlab::Ci::Config::Extendable::Entry do
|
|||
|
||||
it 'raises an error' do
|
||||
expect { subject.extend! }
|
||||
.to raise_error(described_class::InvalidExtensionError,
|
||||
/unknown key/)
|
||||
.to raise_error(described_class::InvalidExtensionError, /unknown key/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -230,8 +228,7 @@ RSpec.describe Gitlab::Ci::Config::Extendable::Entry do
|
|||
|
||||
it 'raises an error' do
|
||||
expect { subject.extend! }
|
||||
.to raise_error(described_class::CircularDependencyError,
|
||||
/circular dependency detected/)
|
||||
.to raise_error(described_class::CircularDependencyError, /circular dependency detected/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -406,8 +406,10 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
|
|||
end
|
||||
|
||||
it 'includes the matched local files' do
|
||||
expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Local),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local))
|
||||
expect(subject).to contain_exactly(
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local)
|
||||
)
|
||||
|
||||
expect(subject.map(&:location)).to contain_exactly('myfolder/file1.yml', 'myfolder/file2.yml')
|
||||
end
|
||||
|
|
@ -424,8 +426,10 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
|
|||
let(:project_id) { project.id }
|
||||
|
||||
it 'includes the file' do
|
||||
expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Remote),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local))
|
||||
expect(subject).to contain_exactly(
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Remote),
|
||||
an_instance_of(Gitlab::Ci::Config::External::File::Local)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -335,7 +335,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
|
|||
|
||||
expect(flags).to contain_exactly(
|
||||
have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
|
||||
have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
|
||||
have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -33,30 +33,40 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate
|
|||
FOR VALUES IN (0)
|
||||
SQL
|
||||
|
||||
ApplicationRecord.connection.execute(create_partition_sql)
|
||||
Ci::ApplicationRecord.connection.execute(create_partition_sql)
|
||||
|
||||
create_detached_partition_sql = <<~SQL
|
||||
CREATE TABLE IF NOT EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_202201 (
|
||||
id bigserial primary key not null
|
||||
)
|
||||
SQL
|
||||
|
||||
ApplicationRecord.connection.execute(create_detached_partition_sql)
|
||||
Ci::ApplicationRecord.connection.execute(create_detached_partition_sql)
|
||||
[ApplicationRecord, Ci::ApplicationRecord]
|
||||
.map(&:connection)
|
||||
.each do |conn|
|
||||
conn.execute(create_partition_sql)
|
||||
conn.execute(
|
||||
"DROP TABLE IF EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_202201"
|
||||
)
|
||||
conn.execute(create_detached_partition_sql)
|
||||
|
||||
Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do
|
||||
Postgresql::DetachedPartition.create!(
|
||||
table_name: '_test_gitlab_main_part_20220101',
|
||||
drop_after: Time.current
|
||||
)
|
||||
end
|
||||
Gitlab::Database::SharedModel.using_connection(Ci::ApplicationRecord.connection) do
|
||||
Postgresql::DetachedPartition.create!(
|
||||
table_name: '_test_gitlab_main_part_20220101',
|
||||
drop_after: Time.current
|
||||
)
|
||||
end
|
||||
Gitlab::Database::SharedModel.using_connection(conn) do
|
||||
Postgresql::DetachedPartition.delete_all
|
||||
Postgresql::DetachedPartition.create!(
|
||||
table_name: '_test_gitlab_main_part_20220101',
|
||||
drop_after: Time.current
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
after(:all) do
|
||||
[ApplicationRecord, Ci::ApplicationRecord]
|
||||
.map(&:connection)
|
||||
.each do |conn|
|
||||
conn.execute(
|
||||
"DROP TABLE IF EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_main_part_202201"
|
||||
)
|
||||
Gitlab::Database::SharedModel.using_connection(conn) { Postgresql::DetachedPartition.delete_all }
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "lock tables" do |gitlab_schema, database_name|
|
||||
|
|
|
|||
|
|
@ -59,6 +59,31 @@ RSpec.describe Gitlab::OtherMarkup, feature_category: :wiki do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when mediawiki content' do
|
||||
links = {
|
||||
'p' => {
|
||||
file: 'file.mediawiki',
|
||||
input: 'Red Bridge (JRuby Embed)',
|
||||
output: "\n<p>Red Bridge (JRuby Embed)</p>"
|
||||
},
|
||||
'h1' => {
|
||||
file: 'file.mediawiki',
|
||||
input: '= Red Bridge (JRuby Embed) =',
|
||||
output: "\n\n<h1>\n<a name=\"Red_Bridge_JRuby_Embed\"></a><span>Red Bridge (JRuby Embed)</span>\n</h1>\n"
|
||||
},
|
||||
'h2' => {
|
||||
file: 'file.mediawiki',
|
||||
input: '== Red Bridge (JRuby Embed) ==',
|
||||
output: "\n\n<h2>\n<a name=\"Red_Bridge_JRuby_Embed\"></a><span>Red Bridge (JRuby Embed)</span>\n</h2>\n"
|
||||
}
|
||||
}
|
||||
links.each do |name, data|
|
||||
it "does render into #{name} element" do
|
||||
expect(render(data[:file], data[:input], context)).to eq(data[:output])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when rendering takes too long' do
|
||||
let_it_be(:file_name) { 'foo.bar' }
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
|
|
|||
|
|
@ -106,9 +106,21 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
|
|||
end
|
||||
|
||||
context 'when TTL option is not set' do
|
||||
let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
|
||||
context 'when reduce_duplicate_job_key_ttl is enabled' do
|
||||
let(:expected_ttl) { described_class::SHORT_DUPLICATE_KEY_TTL }
|
||||
|
||||
it_behaves_like 'sets Redis keys with correct TTL'
|
||||
it_behaves_like 'sets Redis keys with correct TTL'
|
||||
end
|
||||
|
||||
context 'when reduce_duplicate_job_key_ttl is disabled' do
|
||||
before do
|
||||
stub_feature_flags(reduce_duplicate_job_key_ttl: false)
|
||||
end
|
||||
|
||||
let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
|
||||
|
||||
it_behaves_like 'sets Redis keys with correct TTL'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when TTL option is set' do
|
||||
|
|
|
|||
|
|
@ -4,223 +4,8 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
|
||||
let(:data_store) { described_class.new }
|
||||
let(:store_trait_with_data) { :redis_with_data }
|
||||
let(:store_trait_without_data) { :redis_without_data }
|
||||
|
||||
describe '#data' do
|
||||
subject { data_store.data(model) }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
|
||||
|
||||
it 'returns the data' do
|
||||
is_expected.to eq('sample data in redis')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
|
||||
|
||||
it 'returns nil' do
|
||||
is_expected.to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#set_data' do
|
||||
subject { data_store.set_data(model, data) }
|
||||
|
||||
let(:data) { 'abc123' }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
|
||||
|
||||
it 'overwrites data' do
|
||||
expect(data_store.data(model)).to eq('sample data in redis')
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to eq('abc123')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
|
||||
|
||||
it 'sets new data' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to eq('abc123')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#append_data' do
|
||||
context 'when valid offset is used with existing data' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abcd') }
|
||||
|
||||
it 'appends data' do
|
||||
expect(data_store.data(model)).to eq('abcd')
|
||||
|
||||
length = data_store.append_data(model, '12345', 4)
|
||||
|
||||
expect(length).to eq 9
|
||||
expect(data_store.data(model)).to eq('abcd12345')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist yet' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
|
||||
|
||||
it 'sets new data' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
|
||||
length = data_store.append_data(model, 'abc', 0)
|
||||
|
||||
expect(length).to eq 3
|
||||
expect(data_store.data(model)).to eq('abc')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data needs to be truncated' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: '12345678') }
|
||||
|
||||
it 'appends data and truncates stored value' do
|
||||
expect(data_store.data(model)).to eq('12345678')
|
||||
|
||||
length = data_store.append_data(model, 'ab', 4)
|
||||
|
||||
expect(length).to eq 6
|
||||
expect(data_store.data(model)).to eq('1234ab')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when invalid offset is provided' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abc') }
|
||||
|
||||
it 'raises an exception' do
|
||||
length = data_store.append_data(model, '12345', 4)
|
||||
|
||||
expect(length).to be_negative
|
||||
end
|
||||
end
|
||||
|
||||
context 'when trace contains multi-byte UTF8 characters' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'aüc') }
|
||||
|
||||
it 'appends data' do
|
||||
length = data_store.append_data(model, '1234', 4)
|
||||
|
||||
data_store.data(model).then do |new_data|
|
||||
expect(new_data.bytesize).to eq 8
|
||||
expect(new_data).to eq 'aüc1234'
|
||||
end
|
||||
|
||||
expect(length).to eq 8
|
||||
end
|
||||
end
|
||||
|
||||
context 'when trace contains non-UTF8 characters' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: "a\255c") }
|
||||
|
||||
it 'appends data' do
|
||||
length = data_store.append_data(model, '1234', 3)
|
||||
|
||||
data_store.data(model).then do |new_data|
|
||||
expect(new_data.bytesize).to eq 7
|
||||
end
|
||||
|
||||
expect(length).to eq 7
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#delete_data' do
|
||||
subject { data_store.delete_data(model) }
|
||||
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
|
||||
|
||||
it 'deletes data' do
|
||||
expect(data_store.data(model)).to eq('sample data in redis')
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
|
||||
|
||||
it 'does nothing' do
|
||||
expect(data_store.data(model)).to be_nil
|
||||
|
||||
subject
|
||||
|
||||
expect(data_store.data(model)).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#size' do
|
||||
context 'when data exists' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'üabcd') }
|
||||
|
||||
it 'returns data bytesize correctly' do
|
||||
expect(data_store.size(model)).to eq 6
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data does not exist' do
|
||||
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
|
||||
|
||||
it 'returns zero' do
|
||||
expect(data_store.size(model)).to be_zero
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#keys' do
|
||||
subject { data_store.keys(relation) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
let(:relation) { build.trace_chunks }
|
||||
|
||||
before do
|
||||
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
|
||||
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
|
||||
end
|
||||
|
||||
it 'returns keys' do
|
||||
is_expected.to eq([[build.id, 0], [build.id, 1]])
|
||||
end
|
||||
end
|
||||
|
||||
describe '#delete_keys' do
|
||||
subject { data_store.delete_keys(keys) }
|
||||
|
||||
let(:build) { create(:ci_build) }
|
||||
let(:relation) { build.trace_chunks }
|
||||
let(:keys) { data_store.keys(relation) }
|
||||
|
||||
before do
|
||||
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
|
||||
create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
|
||||
end
|
||||
|
||||
it 'deletes multiple data' do
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(true)
|
||||
expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(true)
|
||||
end
|
||||
|
||||
subject
|
||||
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(false)
|
||||
expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
it_behaves_like 'CI build trace chunk redis', Gitlab::Redis::SharedState
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::BuildTraceChunks::RedisTraceChunks, :clean_gitlab_redis_trace_chunks,
|
||||
feature_category: :continuous_integration do
|
||||
let(:data_store) { described_class.new }
|
||||
let(:store_trait_with_data) { :redis_trace_chunks_with_data }
|
||||
let(:store_trait_without_data) { :redis_trace_chunks_without_data }
|
||||
|
||||
it_behaves_like 'CI build trace chunk redis', Gitlab::Redis::TraceChunks
|
||||
end
|
||||
|
|
@ -363,7 +363,7 @@ RSpec.describe API::Badges, feature_category: :groups_and_projects do
|
|||
end
|
||||
|
||||
describe 'Endpoints' do
|
||||
%w(project group).each do |source_type|
|
||||
%w[project group].each do |source_type|
|
||||
it_behaves_like 'GET /:sources/:id/badges', source_type
|
||||
it_behaves_like 'GET /:sources/:id/badges/:badge_id', source_type
|
||||
it_behaves_like 'GET /:sources/:id/badges/render', source_type
|
||||
|
|
|
|||
|
|
@ -266,7 +266,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
|
|||
expect(json_response.dig('project', 'groups')).to match_array([{ 'id' => group.id }])
|
||||
expect(json_response.dig('user', 'id')).to eq(api_user.id)
|
||||
expect(json_response.dig('user', 'username')).to eq(api_user.username)
|
||||
expect(json_response.dig('user', 'roles_in_project')).to match_array %w(guest reporter developer)
|
||||
expect(json_response.dig('user', 'roles_in_project')).to match_array %w[guest reporter developer]
|
||||
expect(json_response).not_to include('environment')
|
||||
end
|
||||
|
||||
|
|
@ -450,7 +450,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'filter project with array of scope elements' do
|
||||
let(:query) { { scope: %w(pending running) } }
|
||||
let(:query) { { scope: %w[pending running] } }
|
||||
|
||||
it do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -459,7 +459,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'respond 400 when scope contains invalid state' do
|
||||
let(:query) { { scope: %w(unknown running) } }
|
||||
let(:query) { { scope: %w[unknown running] } }
|
||||
|
||||
it { expect(response).to have_gitlab_http_status(:bad_request) }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -436,7 +436,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'filter jobs with array of scope elements' do
|
||||
let(:query) { { scope: %w(pending running) } }
|
||||
let(:query) { { scope: %w[pending running] } }
|
||||
|
||||
it :aggregate_failures do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
@ -445,7 +445,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'respond 400 when scope contains invalid state' do
|
||||
let(:query) { { scope: %w(unknown running) } }
|
||||
let(:query) { { scope: %w[unknown running] } }
|
||||
|
||||
it { expect(response).to have_gitlab_http_status(:bad_request) }
|
||||
end
|
||||
|
|
@ -615,7 +615,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'with array of scope elements' do
|
||||
let(:query) { { scope: %w(pending running) } }
|
||||
let(:query) { { scope: %w[pending running] } }
|
||||
|
||||
it :skip_before_request, :aggregate_failures do
|
||||
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
|
||||
|
|
@ -623,14 +623,14 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
|
|||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response).to be_an Array
|
||||
expect(json_response.count).to eq 2
|
||||
json_response.each { |r| expect(%w(pending running).include?(r['status'])).to be true }
|
||||
json_response.each { |r| expect(%w[pending running].include?(r['status'])).to be true }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'respond 400 when scope contains invalid state' do
|
||||
context 'in an array' do
|
||||
let(:query) { { scope: %w(unknown running) } }
|
||||
let(:query) { { scope: %w[unknown running] } }
|
||||
|
||||
it { expect(response).to have_gitlab_http_status(:bad_request) }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -202,12 +202,12 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
|
||||
let(:expected_steps) do
|
||||
[{ 'name' => 'script',
|
||||
'script' => %w(echo),
|
||||
'script' => %w[echo],
|
||||
'timeout' => job.metadata_timeout,
|
||||
'when' => 'on_success',
|
||||
'allow_failure' => false },
|
||||
{ 'name' => 'after_script',
|
||||
'script' => %w(ls date),
|
||||
'script' => %w[ls date],
|
||||
'timeout' => job.metadata_timeout,
|
||||
'when' => 'always',
|
||||
'allow_failure' => true }]
|
||||
|
|
@ -226,7 +226,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
let(:expected_artifacts) do
|
||||
[{ 'name' => 'artifacts_file',
|
||||
'untracked' => false,
|
||||
'paths' => %w(out/),
|
||||
'paths' => %w[out/],
|
||||
'when' => 'always',
|
||||
'expire_in' => '7d',
|
||||
"artifact_type" => "archive",
|
||||
|
|
@ -461,7 +461,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
expect { request_job }.to change { runner.reload.contacted_at }
|
||||
end
|
||||
|
||||
%w(version revision platform architecture).each do |param|
|
||||
%w[version revision platform architecture].each do |param|
|
||||
context "when info parameter '#{param}' is present" do
|
||||
let(:value) { "#{param}_value" }
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
description: 'server.hostname',
|
||||
maintenance_note: 'Some maintainer notes',
|
||||
run_untagged: false,
|
||||
tag_list: %w(tag1 tag2),
|
||||
tag_list: %w[tag1 tag2],
|
||||
locked: true,
|
||||
active: true,
|
||||
access_level: 'ref_protected',
|
||||
|
|
@ -167,7 +167,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
|
||||
end
|
||||
|
||||
%w(name version revision platform architecture).each do |param|
|
||||
%w[name version revision platform architecture].each do |param|
|
||||
context "when info parameter '#{param}' info is present" do
|
||||
let(:value) { "#{param}_value" }
|
||||
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ RSpec.describe API::Ci::Triggers, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
it 'validates variables needs to be a map of key-valued strings' do
|
||||
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(variables: { 'TRIGGER_KEY' => %w(1 2) }, ref: 'master')
|
||||
post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(variables: { 'TRIGGER_KEY' => %w[1 2] }, ref: 'master')
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(json_response['message']).to eq('variables needs to be a map of key-valued strings')
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ RSpec.describe API::ContainerRepositories, feature_category: :container_registry
|
|||
let(:url) { "/registry/repositories/#{repository.id}?tags=true" }
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w(rootA latest), with_manifest: true)
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w[rootA latest], with_manifest: true)
|
||||
end
|
||||
|
||||
it 'returns a repository and its tags' do
|
||||
|
|
@ -102,7 +102,7 @@ RSpec.describe API::ContainerRepositories, feature_category: :container_registry
|
|||
let(:url) { "/registry/repositories/#{repository.id}?tags_count=true" }
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w(rootA latest), with_manifest: true)
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w[rootA latest], with_manifest: true)
|
||||
end
|
||||
|
||||
it 'returns a repository and its tags_count' do
|
||||
|
|
|
|||
|
|
@ -424,7 +424,7 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do
|
|||
)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(json_response['message']['status']).to include(%{cannot transition via \"run\"})
|
||||
expect(json_response['message']['status']).to include(%(cannot transition via \"run\"))
|
||||
end
|
||||
|
||||
it 'links merge requests when the deployment status changes to success', :sidekiq_inline do
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ RSpec.describe API::Geo, feature_category: :geo_replication do
|
|||
{
|
||||
'type' => 'object',
|
||||
'additionalProperties' => false,
|
||||
'required' => %w(geo_enabled),
|
||||
'required' => %w[geo_enabled],
|
||||
'properties' => {
|
||||
'geo_enabled' => { 'type' => 'boolean' }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,6 +90,6 @@ RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables', feature
|
|||
|
||||
variables_data = graphql_data.dig('project', 'pipelines', 'nodes').first
|
||||
.dig('jobs', 'nodes').flat_map { |job| job.dig('manualVariables', 'nodes') }
|
||||
expect(variables_data.map { |var| var['key'] }).to match_array(%w(MANUAL_TEST_VAR_1 MANUAL_TEST_VAR_2))
|
||||
expect(variables_data.map { |var| var['key'] }).to match_array(%w[MANUAL_TEST_VAR_1 MANUAL_TEST_VAR_2])
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ RSpec.describe 'GitlabSchema configurations', feature_category: :integrations do
|
|||
|
||||
context 'regular queries' do
|
||||
subject do
|
||||
query = graphql_query_for('project', { 'fullPath' => project.full_path }, %w(id name description))
|
||||
query = graphql_query_for('project', { 'fullPath' => project.full_path }, %w[id name description])
|
||||
post_graphql(query)
|
||||
end
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ RSpec.describe 'GitlabSchema configurations', feature_category: :integrations do
|
|||
|
||||
subject do
|
||||
queries = [
|
||||
{ query: graphql_query_for('project', { 'fullPath' => '$fullPath' }, %w(id name description)) }, # Complexity 4
|
||||
{ query: graphql_query_for('project', { 'fullPath' => '$fullPath' }, %w[id name description]) }, # Complexity 4
|
||||
{ query: graphql_query_for('echo', { 'text' => "$test" }, []), variables: { "test" => "Hello world" } }, # Complexity 1
|
||||
{ query: graphql_query_for('project', { 'fullPath' => project.full_path }, "userPermissions { createIssue }") } # Complexity 3
|
||||
]
|
||||
|
|
@ -215,7 +215,7 @@ RSpec.describe 'GitlabSchema configurations', feature_category: :integrations do
|
|||
|
||||
context "global id's" do
|
||||
it 'uses GlobalID to expose ids' do
|
||||
post_graphql(graphql_query_for('project', { 'fullPath' => project.full_path }, %w(id)),
|
||||
post_graphql(graphql_query_for('project', { 'fullPath' => project.full_path }, %w[id]),
|
||||
current_user: project.first_owner)
|
||||
|
||||
parsed_id = GlobalID.parse(graphql_data['project']['id'])
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ RSpec.describe 'getting container repositories in a group', feature_category: :s
|
|||
group.add_owner(owner)
|
||||
stub_container_registry_config(enabled: true)
|
||||
container_repositories.each do |repository|
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w(tag1 tag2 tag3), with_manifest: false)
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w[tag1 tag2 tag3], with_manifest: false)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -142,7 +142,7 @@ RSpec.describe 'getting container repositories in a group', feature_category: :s
|
|||
end
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: container_repository.path, tags: %w(tag4 tag5 tag6), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository.path, tags: %w[tag4 tag5 tag6], with_manifest: false)
|
||||
end
|
||||
|
||||
it 'returns the searched container repository' do
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ RSpec.describe 'Milestones through GroupQuery', feature_category: :team_planning
|
|||
let_it_be(:closed_issue) { create(:issue, :closed, project: project, milestone: milestone) }
|
||||
|
||||
let(:milestone_query) do
|
||||
%{
|
||||
%(
|
||||
id
|
||||
title
|
||||
description
|
||||
|
|
@ -149,7 +149,7 @@ RSpec.describe 'Milestones through GroupQuery', feature_category: :team_planning
|
|||
projectMilestone
|
||||
groupMilestone
|
||||
subgroupMilestone
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def post_query
|
||||
|
|
@ -180,12 +180,12 @@ RSpec.describe 'Milestones through GroupQuery', feature_category: :team_planning
|
|||
|
||||
context 'milestone statistics' do
|
||||
let(:milestone_query) do
|
||||
%{
|
||||
%(
|
||||
stats {
|
||||
totalIssuesCount
|
||||
closedIssuesCount
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns the correct milestone statistics' do
|
||||
|
|
|
|||
|
|
@ -47,28 +47,28 @@ RSpec.describe "deleting designs", feature_category: :design_management do
|
|||
context 'the designs list is empty' do
|
||||
it_behaves_like 'a failed request' do
|
||||
let(:designs) { [] }
|
||||
let(:the_error) { a_string_matching %r/no filenames/ }
|
||||
let(:the_error) { a_string_matching %r{no filenames} }
|
||||
end
|
||||
end
|
||||
|
||||
context 'the designs list contains filenames we cannot find' do
|
||||
it_behaves_like 'a failed request' do
|
||||
let(:designs) { %w/foo bar baz/.map { |fn| double('file', filename: fn) } }
|
||||
let(:the_error) { a_string_matching %r/filenames were not found/ }
|
||||
let(:designs) { %w[foo bar baz].map { |fn| double('file', filename: fn) } }
|
||||
let(:the_error) { a_string_matching %r{filenames were not found} }
|
||||
end
|
||||
end
|
||||
|
||||
context 'the current user does not have developer access' do
|
||||
it_behaves_like 'a failed request' do
|
||||
let(:current_user) { create(:user) }
|
||||
let(:the_error) { a_string_matching %r/you don't have permission/ }
|
||||
let(:the_error) { a_string_matching %r{you don't have permission} }
|
||||
end
|
||||
end
|
||||
|
||||
context "when the issue does not exist" do
|
||||
it_behaves_like 'a failed request' do
|
||||
let(:variables) { { iid: "1234567890" } }
|
||||
let(:the_error) { a_string_matching %r/does not exist/ }
|
||||
let(:the_error) { a_string_matching %r{does not exist} }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -169,8 +169,8 @@ RSpec.describe 'Creating a Snippet', feature_category: :source_code_management d
|
|||
end
|
||||
|
||||
it_behaves_like 'expected files argument', nil, nil
|
||||
it_behaves_like 'expected files argument', %w(foo bar), %w(foo bar)
|
||||
it_behaves_like 'expected files argument', 'foo', %w(foo)
|
||||
it_behaves_like 'expected files argument', %w[foo bar], %w[foo bar]
|
||||
it_behaves_like 'expected files argument', 'foo', %w[foo]
|
||||
|
||||
context 'when files has an invalid value' do
|
||||
let(:uploaded_files) { [1] }
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ RSpec.describe 'Destroying a Snippet', feature_category: :source_code_management
|
|||
let!(:snippet_gid) { project.to_gid.to_s }
|
||||
|
||||
it 'returns an error' do
|
||||
err_message = %["#{snippet_gid}" does not represent an instance of Snippet]
|
||||
err_message = %("#{snippet_gid}" does not represent an instance of Snippet)
|
||||
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ RSpec.describe 'query Jira service', feature_category: :system_access do
|
|||
it 'retuns list of jira imports' do
|
||||
service_types = services.map { |s| s['type'] }
|
||||
|
||||
expect(service_types).to match_array(%w(BugzillaService JiraService RedmineService))
|
||||
expect(service_types).to match_array(%w[BugzillaService JiraService RedmineService])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ RSpec.describe 'getting container repositories in a project', feature_category:
|
|||
before do
|
||||
stub_container_registry_config(enabled: true)
|
||||
container_repositories.each do |repository|
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w(tag1 tag2 tag3), with_manifest: false)
|
||||
stub_container_registry_tags(repository: repository.path, tags: %w[tag1 tag2 tag3], with_manifest: false)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -141,7 +141,7 @@ RSpec.describe 'getting container repositories in a project', feature_category:
|
|||
end
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: container_repository.path, tags: %w(tag4 tag5 tag6), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository.path, tags: %w[tag4 tag5 tag6], with_manifest: false)
|
||||
end
|
||||
|
||||
it 'returns the searched container repository' do
|
||||
|
|
@ -175,11 +175,11 @@ RSpec.describe 'getting container repositories in a project', feature_category:
|
|||
let_it_be(:container_repository5) { create(:container_repository, name: 'e', project: sort_project) }
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: container_repository1.path, tags: %w(tag1 tag1 tag3), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository2.path, tags: %w(tag4 tag5 tag6), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository3.path, tags: %w(tag7 tag8), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository4.path, tags: %w(tag9), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository5.path, tags: %w(tag10 tag11), with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository1.path, tags: %w[tag1 tag1 tag3], with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository2.path, tags: %w[tag4 tag5 tag6], with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository3.path, tags: %w[tag7 tag8], with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository4.path, tags: %w[tag9], with_manifest: false)
|
||||
stub_container_registry_tags(repository: container_repository5.path, tags: %w[tag10 tag11], with_manifest: false)
|
||||
end
|
||||
|
||||
def pagination_query(params)
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ RSpec.describe 'Getting versions related to an issue', feature_category: :design
|
|||
post_graphql(query, current_user: current_user)
|
||||
|
||||
keys = graphql_data.dig(*edges_path).first['node'].keys
|
||||
expect(keys).to match_array(%w(id sha createdAt author))
|
||||
expect(keys).to match_array(%w[id sha createdAt author])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ RSpec.describe 'Query.project(fullPath).issue(iid)', feature_category: :team_pla
|
|||
let(:object_field_name) { :design }
|
||||
|
||||
let(:no_argument_error) do
|
||||
custom_graphql_error(path, a_string_matching(%r/id or filename/))
|
||||
custom_graphql_error(path, a_string_matching(%r{id or filename}))
|
||||
end
|
||||
|
||||
let_it_be(:object_on_other_issue) { create(:design, issue: issue_b) }
|
||||
|
|
@ -134,7 +134,7 @@ RSpec.describe 'Query.project(fullPath).issue(iid)', feature_category: :team_pla
|
|||
it 'raises an error' do
|
||||
post_query
|
||||
|
||||
expect(graphql_errors).to include(custom_graphql_error(path, a_string_matching(%r/id or sha/)))
|
||||
expect(graphql_errors).to include(custom_graphql_error(path, a_string_matching(%r{id or sha})))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ RSpec.describe 'query Jira import data', feature_category: :importers do
|
|||
total_issue_count = jira_imports.map { |ji| ji.dig('totalIssueCount') }
|
||||
|
||||
expect(jira_imports.size).to eq 2
|
||||
expect(jira_proket_keys).to eq %w(BB AA)
|
||||
expect(jira_proket_keys).to eq %w[BB AA]
|
||||
expect(usernames).to eq [current_user.username, current_user.username]
|
||||
expect(imported_issues_count).to eq [2, 2]
|
||||
expect(failed_issues_count).to eq [1, 2]
|
||||
|
|
|
|||
|
|
@ -55,8 +55,8 @@ RSpec.describe 'query Jira projects', feature_category: :integrations do
|
|||
project_ids = jira_projects.map { |jp| jp['projectId'] }
|
||||
|
||||
expect(jira_projects.size).to eq(2)
|
||||
expect(project_keys).to eq(%w(EX ABC))
|
||||
expect(project_names).to eq(%w(Example Alphabetical))
|
||||
expect(project_keys).to eq(%w[EX ABC])
|
||||
expect(project_names).to eq(%w[Example Alphabetical])
|
||||
expect(project_ids).to eq([10000, 10001])
|
||||
end
|
||||
|
||||
|
|
@ -69,8 +69,8 @@ RSpec.describe 'query Jira projects', feature_category: :integrations do
|
|||
project_ids = jira_projects.map { |jp| jp['projectId'] }
|
||||
|
||||
expect(jira_projects.size).to eq(1)
|
||||
expect(project_keys).to eq(%w(EX))
|
||||
expect(project_names).to eq(%w(Example))
|
||||
expect(project_keys).to eq(%w[EX])
|
||||
expect(project_names).to eq(%w[Example])
|
||||
expect(project_ids).to eq([10000])
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let(:path) { path_prefix }
|
||||
|
||||
let(:release_fields) do
|
||||
%{
|
||||
%(
|
||||
tagName
|
||||
tagPath
|
||||
description
|
||||
|
|
@ -45,7 +45,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
createdAt
|
||||
releasedAt
|
||||
upcomingRelease
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -176,14 +176,14 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let(:path) { path_prefix + %w[links] }
|
||||
|
||||
let(:release_fields) do
|
||||
query_graphql_field(:links, nil, %{
|
||||
query_graphql_field(:links, nil, %(
|
||||
selfUrl
|
||||
openedMergeRequestsUrl
|
||||
mergedMergeRequestsUrl
|
||||
closedMergeRequestsUrl
|
||||
openedIssuesUrl
|
||||
closedIssuesUrl
|
||||
})
|
||||
))
|
||||
end
|
||||
|
||||
it 'finds all release links' do
|
||||
|
|
@ -225,7 +225,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let(:path) { path_prefix }
|
||||
|
||||
let(:release_fields) do
|
||||
%{
|
||||
%(
|
||||
tagName
|
||||
tagPath
|
||||
description
|
||||
|
|
@ -234,7 +234,7 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
createdAt
|
||||
releasedAt
|
||||
upcomingRelease
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -358,14 +358,14 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let(:path) { path_prefix + %w[links] }
|
||||
|
||||
let(:release_fields) do
|
||||
query_graphql_field(:links, nil, %{
|
||||
query_graphql_field(:links, nil, %(
|
||||
selfUrl
|
||||
openedMergeRequestsUrl
|
||||
mergedMergeRequestsUrl
|
||||
closedMergeRequestsUrl
|
||||
openedIssuesUrl
|
||||
closedIssuesUrl
|
||||
})
|
||||
))
|
||||
end
|
||||
|
||||
it 'finds only selfUrl' do
|
||||
|
|
@ -547,10 +547,10 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let(:current_user) { developer }
|
||||
|
||||
let(:release_fields) do
|
||||
%{
|
||||
%(
|
||||
releasedAt
|
||||
upcomingRelease
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -588,13 +588,13 @@ RSpec.describe 'Query.project(fullPath).release(tagName)', feature_category: :re
|
|||
let_it_be_with_reload(:release) { create(:release, project: project) }
|
||||
|
||||
let(:release_fields) do
|
||||
%{
|
||||
%(
|
||||
milestones {
|
||||
nodes {
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:actual_milestone_title_order) do
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ RSpec.describe 'query a single terraform state', feature_category: :infrastructu
|
|||
query_graphql_field(
|
||||
:terraformState,
|
||||
{ name: terraform_state.name },
|
||||
%{
|
||||
%(
|
||||
id
|
||||
name
|
||||
lockedAt
|
||||
|
|
@ -45,7 +45,7 @@ RSpec.describe 'query a single terraform state', feature_category: :infrastructu
|
|||
lockedByUser {
|
||||
id
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ RSpec.describe 'query terraform states', feature_category: :infrastructure_as_co
|
|||
graphql_query_for(
|
||||
:project,
|
||||
{ fullPath: project.full_path },
|
||||
%{
|
||||
%(
|
||||
terraformStates {
|
||||
count
|
||||
nodes {
|
||||
|
|
@ -45,7 +45,7 @@ RSpec.describe 'query terraform states', feature_category: :infrastructure_as_co
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -111,12 +111,12 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
|
|||
it 'returns new recovery codes when the user exists' do
|
||||
allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(true)
|
||||
allow_any_instance_of(User)
|
||||
.to receive(:generate_otp_backup_codes!).and_return(%w(119135e5a3ebce8e 34bd7b74adbc8861))
|
||||
.to receive(:generate_otp_backup_codes!).and_return(%w[119135e5a3ebce8e 34bd7b74adbc8861])
|
||||
|
||||
subject
|
||||
|
||||
expect(json_response['success']).to be_truthy
|
||||
expect(json_response['recovery_codes']).to match_array(%w(119135e5a3ebce8e 34bd7b74adbc8861))
|
||||
expect(json_response['recovery_codes']).to match_array(%w[119135e5a3ebce8e 34bd7b74adbc8861])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -200,7 +200,7 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
|
|||
params: {
|
||||
key_id: key.id,
|
||||
name: 'newtoken',
|
||||
scopes: %w(read_api badscope read_repository)
|
||||
scopes: %w[read_api badscope read_repository]
|
||||
},
|
||||
headers: gitlab_shell_internal_api_request_header
|
||||
|
||||
|
|
@ -216,14 +216,14 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
|
|||
params: {
|
||||
key_id: key.id,
|
||||
name: 'newtoken',
|
||||
scopes: %w(read_api read_repository),
|
||||
scopes: %w[read_api read_repository],
|
||||
expires_at: max_pat_access_token_lifetime
|
||||
},
|
||||
headers: gitlab_shell_internal_api_request_header
|
||||
|
||||
expect(json_response['success']).to be_truthy
|
||||
expect(json_response['token']).to match(/\A\S{#{token_size}}\z/)
|
||||
expect(json_response['scopes']).to match_array(%w(read_api read_repository))
|
||||
expect(json_response['scopes']).to match_array(%w[read_api read_repository])
|
||||
expect(json_response['expires_at']).to eq(max_pat_access_token_lifetime.iso8601)
|
||||
end
|
||||
end
|
||||
|
|
@ -236,14 +236,14 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
|
|||
params: {
|
||||
key_id: key.id,
|
||||
name: 'newtoken',
|
||||
scopes: %w(read_api read_repository),
|
||||
scopes: %w[read_api read_repository],
|
||||
expires_at: 365.days.from_now
|
||||
},
|
||||
headers: gitlab_shell_internal_api_request_header
|
||||
|
||||
expect(json_response['success']).to be_truthy
|
||||
expect(json_response['token']).to match(/\A\S{#{token_size}}\z/)
|
||||
expect(json_response['scopes']).to match_array(%w(read_api read_repository))
|
||||
expect(json_response['scopes']).to match_array(%w[read_api read_repository])
|
||||
expect(json_response['expires_at']).to eq(max_pat_access_token_lifetime.iso8601)
|
||||
end
|
||||
end
|
||||
|
|
@ -897,7 +897,7 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
|
|||
{
|
||||
'action' => 'geo_proxy_to_primary',
|
||||
'data' => {
|
||||
'api_endpoints' => %w{geo/proxy_git_ssh/info_refs_receive_pack geo/proxy_git_ssh/receive_pack},
|
||||
'api_endpoints' => %w[geo/proxy_git_ssh/info_refs_receive_pack geo/proxy_git_ssh/receive_pack],
|
||||
'gl_username' => 'testuser',
|
||||
'primary_repo' => 'http://localhost:3000/testuser/repo.git'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -350,7 +350,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
|
|||
get api(base_url, admin)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response.last.keys).to include(*%w(id iid project_id title description))
|
||||
expect(json_response.last.keys).to include(*%w[id iid project_id title description])
|
||||
expect(json_response.last).not_to have_key('subscribed')
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -530,7 +530,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
|
|||
get api("#{base_url}/issues", user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response.last.keys).to include(*%w(id iid project_id title description))
|
||||
expect(json_response.last.keys).to include(*%w[id iid project_id title description])
|
||||
expect(json_response.last).not_to have_key('subscribed')
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -638,7 +638,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
|
|||
end
|
||||
|
||||
it 'returns an empty array if no issue matches labels with labels param as array' do
|
||||
get api('/issues', user), params: { labels: %w(foo bar) }
|
||||
get api('/issues', user), params: { labels: %w[foo bar] }
|
||||
|
||||
expect_paginated_array_response([])
|
||||
end
|
||||
|
|
@ -914,7 +914,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
|
|||
end
|
||||
|
||||
it 'fails to sort with non predefined options' do
|
||||
%w(milestone abracadabra).each do |sort_opt|
|
||||
%w[milestone abracadabra].each do |sort_opt|
|
||||
get api('/issues', user), params: { order_by: sort_opt, sort: 'asc' }
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ RSpec.describe API::Issues, :aggregate_failures, feature_category: :team_plannin
|
|||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['title']).to eq('new issue')
|
||||
expect(json_response['description']).to be_nil
|
||||
expect(json_response['labels']).to eq(%w(label label2))
|
||||
expect(json_response['labels']).to eq(%w[label label2])
|
||||
expect(json_response['confidential']).to be_falsy
|
||||
expect(json_response['assignee']['name']).to eq(user2.name)
|
||||
expect(json_response['assignees'].first['name']).to eq(user2.name)
|
||||
|
|
@ -191,12 +191,12 @@ RSpec.describe API::Issues, :aggregate_failures, feature_category: :team_plannin
|
|||
|
||||
it 'creates a new project issue with labels param as array' do
|
||||
post api("/projects/#{project.id}/issues", user),
|
||||
params: { title: 'new issue', labels: %w(label label2), weight: 3, assignee_ids: [user2.id] }
|
||||
params: { title: 'new issue', labels: %w[label label2], weight: 3, assignee_ids: [user2.id] }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['title']).to eq('new issue')
|
||||
expect(json_response['description']).to be_nil
|
||||
expect(json_response['labels']).to eq(%w(label label2))
|
||||
expect(json_response['labels']).to eq(%w[label label2])
|
||||
expect(json_response['confidential']).to be_falsy
|
||||
expect(json_response['assignee']['name']).to eq(user2.name)
|
||||
expect(json_response['assignees'].first['name']).to eq(user2.name)
|
||||
|
|
@ -391,7 +391,7 @@ RSpec.describe API::Issues, :aggregate_failures, feature_category: :team_plannin
|
|||
|
||||
it 'cannot create new labels with labels param as array' do
|
||||
expect do
|
||||
post api("/projects/#{project.id}/issues", non_member), params: { title: 'new issue', labels: %w(label label2) }
|
||||
post api("/projects/#{project.id}/issues", non_member), params: { title: 'new issue', labels: %w[label label2] }
|
||||
end.not_to change { project.labels.count }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -359,7 +359,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
|
|||
|
||||
it 'updates labels and touches the record with labels param as array', :aggregate_failures do
|
||||
travel_to(2.minutes.from_now) do
|
||||
put api_for_user, params: { labels: %w(foo bar) }
|
||||
put api_for_user, params: { labels: %w[foo bar] }
|
||||
end
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
|
|
|||
|
|
@ -229,7 +229,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
|
|||
merge_request_closed.id,
|
||||
merge_request.id
|
||||
])
|
||||
expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
|
||||
expect(json_response.last.keys).to match_array(%w[id iid title web_url created_at description project_id state updated_at])
|
||||
expect(json_response.last['iid']).to eq(merge_request.iid)
|
||||
expect(json_response.last['title']).to eq(merge_request.title)
|
||||
expect(json_response.last).to have_key('web_url')
|
||||
|
|
@ -2175,7 +2175,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
|
|||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['title']).to eq('Test merge_request')
|
||||
expect(json_response['labels']).to eq(%w(label label2))
|
||||
expect(json_response['labels']).to eq(%w[label label2])
|
||||
expect(json_response['milestone']['id']).to eq(milestone.id)
|
||||
expect(json_response['squash']).to be_truthy
|
||||
expect(json_response['force_remove_source_branch']).to be_falsy
|
||||
|
|
@ -2187,11 +2187,11 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
|
|||
end
|
||||
|
||||
it_behaves_like 'creates merge request with labels' do
|
||||
let(:labels) { %w(label label2) }
|
||||
let(:labels) { %w[label label2] }
|
||||
end
|
||||
|
||||
it_behaves_like 'creates merge request with labels' do
|
||||
let(:labels) { %w(label label2) }
|
||||
let(:labels) { %w[label label2] }
|
||||
end
|
||||
|
||||
it 'creates merge request with special label names' do
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ RSpec.describe API::Metadata, feature_category: :shared do
|
|||
let(:personal_access_token) { create(:personal_access_token, scopes: scopes) }
|
||||
|
||||
context 'with api scope' do
|
||||
let(:scopes) { %i(api) }
|
||||
let(:scopes) { %i[api] }
|
||||
|
||||
it 'returns the metadata information' do
|
||||
get api(endpoint, personal_access_token: personal_access_token)
|
||||
|
|
@ -42,7 +42,7 @@ RSpec.describe API::Metadata, feature_category: :shared do
|
|||
end
|
||||
|
||||
context 'with ai_features scope' do
|
||||
let(:scopes) { %i(ai_features) }
|
||||
let(:scopes) { %i[ai_features] }
|
||||
|
||||
it 'returns the metadata information' do
|
||||
get api(endpoint, personal_access_token: personal_access_token)
|
||||
|
|
@ -58,7 +58,7 @@ RSpec.describe API::Metadata, feature_category: :shared do
|
|||
end
|
||||
|
||||
context 'with read_user scope' do
|
||||
let(:scopes) { %i(read_user) }
|
||||
let(:scopes) { %i[read_user] }
|
||||
|
||||
it 'returns the metadata information' do
|
||||
get api(endpoint, personal_access_token: personal_access_token)
|
||||
|
|
@ -74,7 +74,7 @@ RSpec.describe API::Metadata, feature_category: :shared do
|
|||
end
|
||||
|
||||
context 'with neither api, ai_features nor read_user scope' do
|
||||
let(:scopes) { %i(read_repository) }
|
||||
let(:scopes) { %i[read_repository] }
|
||||
|
||||
it 'returns authorization error' do
|
||||
get api(endpoint, personal_access_token: personal_access_token)
|
||||
|
|
|
|||
|
|
@ -131,4 +131,37 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
|
|||
it_behaves_like 'MLflow|Requires api scope and write permission'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'PATCH /projects/:id/ml/mlflow/api/2.0/mlflow/registered-models/update' do
|
||||
let(:model_name) { model.name }
|
||||
let(:model_description) { 'updated model description' }
|
||||
let(:params) { { name: model_name, description: model_description } }
|
||||
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/update" }
|
||||
let(:request) { patch api(route), params: params, headers: headers }
|
||||
|
||||
it 'returns the updated model', :aggregate_failures do
|
||||
is_expected.to have_gitlab_http_status(:ok)
|
||||
is_expected.to match_response_schema('ml/update_model')
|
||||
expect(json_response["registered_model"]["description"]).to eq(model_description)
|
||||
end
|
||||
|
||||
describe 'Error States' do
|
||||
context 'when has access' do
|
||||
context 'and model does not exist' do
|
||||
let(:model_name) { 'foo' }
|
||||
|
||||
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
|
||||
end
|
||||
|
||||
context 'and name is not passed' do
|
||||
let(:params) { { description: model_description } }
|
||||
|
||||
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'MLflow|shared error cases'
|
||||
it_behaves_like 'MLflow|Requires read_api scope'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
|
|||
let(:api_user) { reporter }
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w[rootA latest])
|
||||
end
|
||||
|
||||
it_behaves_like 'a package tracking event', described_class.name, 'list_tags'
|
||||
|
|
@ -177,7 +177,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
|
|||
subject
|
||||
|
||||
expect(json_response.length).to eq(2)
|
||||
expect(json_response.map { |repository| repository['name'] }).to eq %w(latest rootA)
|
||||
expect(json_response.map { |repository| repository['name'] }).to eq %w[latest rootA]
|
||||
end
|
||||
|
||||
it 'returns a matching schema' do
|
||||
|
|
@ -362,7 +362,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
|
|||
let(:api_user) { reporter }
|
||||
|
||||
before do
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w[rootA], with_manifest: true)
|
||||
end
|
||||
|
||||
it 'returns a details of tag' do
|
||||
|
|
@ -408,7 +408,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
|
|||
|
||||
context 'when there are multiple tags' do
|
||||
before do
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA rootB), with_manifest: true)
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w[rootA rootB], with_manifest: true)
|
||||
end
|
||||
|
||||
it 'properly removes tag' do
|
||||
|
|
@ -427,7 +427,7 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
|
|||
|
||||
context 'when there\'s only one tag' do
|
||||
before do
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
|
||||
stub_container_registry_tags(repository: root_repository.path, tags: %w[rootA], with_manifest: true)
|
||||
end
|
||||
|
||||
it 'properly removes tag' do
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
|
|||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(response).to match_response_schema('public_api/v4/template_list')
|
||||
expect(json_response.map { |t| t['key'] }).to match_array(%w(bug feature_proposal template_test))
|
||||
expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test])
|
||||
end
|
||||
|
||||
it 'returns merge request templates' do
|
||||
|
|
@ -78,7 +78,7 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
|
|||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(response).to match_response_schema('public_api/v4/template_list')
|
||||
expect(json_response.map { |t| t['key'] }).to match_array(%w(bug feature_proposal template_test))
|
||||
expect(json_response.map { |t| t['key'] }).to match_array(%w[bug feature_proposal template_test])
|
||||
end
|
||||
|
||||
it 'returns 400 for an unknown template type' do
|
||||
|
|
|
|||
|
|
@ -325,7 +325,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
|
||||
context 'filter by topic (column topic_list)' do
|
||||
before do
|
||||
project.update!(topic_list: %w(ruby javascript))
|
||||
project.update!(topic_list: %w[ruby javascript])
|
||||
end
|
||||
|
||||
it 'returns no projects' do
|
||||
|
|
@ -894,7 +894,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
|
||||
context 'sorting' do
|
||||
context 'by project statistics' do
|
||||
%w(repository_size storage_size wiki_size packages_size).each do |order_by|
|
||||
%w[repository_size storage_size wiki_size packages_size].each do |order_by|
|
||||
context "sorting by #{order_by}" do
|
||||
before do
|
||||
ProjectStatistics.update_all(order_by => 100)
|
||||
|
|
@ -2560,7 +2560,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
|
||||
context 'and the project has a private repository' do
|
||||
let(:project) { create(:project, :repository, :public, :repository_private) }
|
||||
let(:protected_attributes) { %w(default_branch ci_config_path) }
|
||||
let(:protected_attributes) { %w[default_branch ci_config_path] }
|
||||
|
||||
it 'hides protected attributes of private repositories if user is not a member' do
|
||||
get api(path, user)
|
||||
|
|
@ -3940,7 +3940,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
expect(Project.find_by(path: project[:path]).analytics_access_level).to eq(ProjectFeature::PRIVATE)
|
||||
end
|
||||
|
||||
%i(releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level model_experiments_access_level).each do |field|
|
||||
%i[releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level model_experiments_access_level].each do |field|
|
||||
it "sets #{field}" do
|
||||
put api(path, user), params: { field => 'private' }
|
||||
|
||||
|
|
|
|||
|
|
@ -1492,7 +1492,7 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
|
|||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(returned_milestones).to match_array(%w(milestone2 milestone3))
|
||||
expect(returned_milestones).to match_array(%w[milestone2 milestone3])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -742,7 +742,7 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
|
||||
describe 'GET :id/repository/merge_base' do
|
||||
let(:refs) do
|
||||
%w(304d257dcb821665ab5110318fc58a007bd104ed 0031876facac3f2b2702a0e53a26e89939a42209 570e7b2abdd848b95f2f578043fc23bd6f6fd24d)
|
||||
%w[304d257dcb821665ab5110318fc58a007bd104ed 0031876facac3f2b2702a0e53a26e89939a42209 570e7b2abdd848b95f2f578043fc23bd6f6fd24d]
|
||||
end
|
||||
|
||||
subject(:request) do
|
||||
|
|
@ -786,7 +786,7 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
|
||||
context 'when passing refs that do not exist' do
|
||||
it_behaves_like '400 response' do
|
||||
let(:refs) { %w(304d257dcb821665ab5110318fc58a007bd104ed missing) }
|
||||
let(:refs) { %w[304d257dcb821665ab5110318fc58a007bd104ed missing] }
|
||||
let(:current_user) { user }
|
||||
let(:message) { 'Could not find ref: missing' }
|
||||
end
|
||||
|
|
@ -801,7 +801,7 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
context 'when not enough refs are passed' do
|
||||
let(:refs) { %w(only-one) }
|
||||
let(:refs) { %w[only-one] }
|
||||
let(:current_user) { user }
|
||||
|
||||
it 'renders a bad request error' do
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue