Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
24d07e7552
commit
0e89b9f2e0
|
|
@ -1 +1 @@
|
|||
0.0.26
|
||||
0.0.27
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -384,7 +384,7 @@ gem 'terser', '1.0.2', feature_category: :shared
|
|||
gem 'click_house-client', path: 'gems/click_house-client', require: 'click_house/client', feature_category: :database
|
||||
gem 'addressable', '~> 2.8', feature_category: :shared
|
||||
gem 'gon', '~> 6.4.0', feature_category: :shared
|
||||
gem 'request_store', '~> 1.5.1', feature_category: :shared
|
||||
gem 'request_store', '~> 1.7.0', feature_category: :shared
|
||||
gem 'base32', '~> 0.3.0', feature_category: :shared
|
||||
gem 'gitlab-license', '~> 2.6', feature_category: :shared
|
||||
|
||||
|
|
|
|||
|
|
@ -597,7 +597,7 @@
|
|||
{"name":"regexp_property_values","version":"1.0.0","platform":"java","checksum":"5e26782b01241616855c4ee7bb8a62fce9387e484f2d3eaf04f2a0633708222e"},
|
||||
{"name":"regexp_property_values","version":"1.0.0","platform":"ruby","checksum":"162499dc0bba1e66d334273a059f207a61981cc8cc69d2ca743594e7886d080f"},
|
||||
{"name":"representable","version":"3.2.0","platform":"ruby","checksum":"cc29bf7eebc31653586849371a43ffe36c60b54b0a6365b5f7d95ec34d1ebace"},
|
||||
{"name":"request_store","version":"1.5.1","platform":"ruby","checksum":"07a204d161590789f2b1d27f9f0eadcdecd6d868cb2f03240250e1bc747df78e"},
|
||||
{"name":"request_store","version":"1.7.0","platform":"ruby","checksum":"e1b75d5346a315f452242a68c937ef8e48b215b9453a77a6c0acdca2934c88cb"},
|
||||
{"name":"responders","version":"3.0.1","platform":"ruby","checksum":"613fe28e498987f4feaa3230aa6313ca4bd5f0563a3da83511b0dd6cd8f47292"},
|
||||
{"name":"rest-client","version":"2.1.0","platform":"ruby","checksum":"35a6400bdb14fae28596618e312776c158f7ebbb0ccad752ff4fa142bf2747e3"},
|
||||
{"name":"rest-client","version":"2.1.0","platform":"x64-mingw32","checksum":"7cd156496196d90b7d8f5b8de521ef67d8a9e03f06862da80b9b5912ab05a470"},
|
||||
|
|
|
|||
|
|
@ -1593,7 +1593,7 @@ GEM
|
|||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
request_store (1.5.1)
|
||||
request_store (1.7.0)
|
||||
rack (>= 1.4)
|
||||
responders (3.0.1)
|
||||
actionpack (>= 5.0)
|
||||
|
|
@ -2282,7 +2282,7 @@ DEPENDENCIES
|
|||
redis (~> 5.4.0)
|
||||
redis-actionpack (~> 5.5.0)
|
||||
redis-clustering (~> 5.4.0)
|
||||
request_store (~> 1.5.1)
|
||||
request_store (~> 1.7.0)
|
||||
responders (~> 3.0)
|
||||
retriable (~> 3.1.2)
|
||||
rexml (~> 3.4.0)
|
||||
|
|
|
|||
|
|
@ -607,7 +607,7 @@
|
|||
{"name":"regexp_property_values","version":"1.0.0","platform":"ruby","checksum":"162499dc0bba1e66d334273a059f207a61981cc8cc69d2ca743594e7886d080f"},
|
||||
{"name":"reline","version":"0.6.0","platform":"ruby","checksum":"57620375dcbe56ec09bac7192bfb7460c716bbf0054dc94345ecaa5438e539d2"},
|
||||
{"name":"representable","version":"3.2.0","platform":"ruby","checksum":"cc29bf7eebc31653586849371a43ffe36c60b54b0a6365b5f7d95ec34d1ebace"},
|
||||
{"name":"request_store","version":"1.5.1","platform":"ruby","checksum":"07a204d161590789f2b1d27f9f0eadcdecd6d868cb2f03240250e1bc747df78e"},
|
||||
{"name":"request_store","version":"1.7.0","platform":"ruby","checksum":"e1b75d5346a315f452242a68c937ef8e48b215b9453a77a6c0acdca2934c88cb"},
|
||||
{"name":"responders","version":"3.0.1","platform":"ruby","checksum":"613fe28e498987f4feaa3230aa6313ca4bd5f0563a3da83511b0dd6cd8f47292"},
|
||||
{"name":"rest-client","version":"2.1.0","platform":"ruby","checksum":"35a6400bdb14fae28596618e312776c158f7ebbb0ccad752ff4fa142bf2747e3"},
|
||||
{"name":"rest-client","version":"2.1.0","platform":"x64-mingw32","checksum":"7cd156496196d90b7d8f5b8de521ef67d8a9e03f06862da80b9b5912ab05a470"},
|
||||
|
|
|
|||
|
|
@ -1625,7 +1625,7 @@ GEM
|
|||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
request_store (1.5.1)
|
||||
request_store (1.7.0)
|
||||
rack (>= 1.4)
|
||||
responders (3.0.1)
|
||||
actionpack (>= 5.0)
|
||||
|
|
@ -2316,7 +2316,7 @@ DEPENDENCIES
|
|||
redis (~> 5.4.0)
|
||||
redis-actionpack (~> 5.5.0)
|
||||
redis-clustering (~> 5.4.0)
|
||||
request_store (~> 1.5.1)
|
||||
request_store (~> 1.7.0)
|
||||
responders (~> 3.0)
|
||||
retriable (~> 3.1.2)
|
||||
rexml (~> 3.4.0)
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ export default {
|
|||
class="gl-border-0 gl-bg-transparent gl-p-0 gl-outline-none"
|
||||
:aria-label="statusConfig.label"
|
||||
>
|
||||
<gl-badge :variant="statusConfig.variant">
|
||||
<gl-badge :icon="statusConfig.icon" :variant="statusConfig.variant">
|
||||
{{ statusConfig.label }}
|
||||
</gl-badge>
|
||||
</button>
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ export const verificationStatuses = {
|
|||
MULTIPLE_SIGNATURES: 'MULTIPLE_SIGNATURES',
|
||||
REVOKED_KEY: 'REVOKED_KEY',
|
||||
VERIFIED_SYSTEM: 'VERIFIED_SYSTEM',
|
||||
UNVERIFIED_AUTHOR_EMAIL: 'UNVERIFIED_AUTHOR_EMAIL',
|
||||
};
|
||||
|
||||
export const signatureTypes = {
|
||||
|
|
@ -29,6 +30,13 @@ const UNVERIFIED_CONFIG = {
|
|||
description: __('This commit was signed with an unverified signature.'),
|
||||
};
|
||||
|
||||
export const REVERIFIED_CONFIG = {
|
||||
variant: 'warning',
|
||||
icon: 'warning',
|
||||
label: __('Verified'),
|
||||
title: __('Verified commit with unverified email'),
|
||||
};
|
||||
|
||||
export const VERIFIED_CONFIG = {
|
||||
variant: 'success',
|
||||
label: __('Verified'),
|
||||
|
|
@ -42,6 +50,12 @@ export const statusConfig = {
|
|||
'This commit was signed with a verified signature and the committer email was verified to belong to the same user.',
|
||||
),
|
||||
},
|
||||
[verificationStatuses.UNVERIFIED_AUTHOR_EMAIL]: {
|
||||
...REVERIFIED_CONFIG,
|
||||
description: __(
|
||||
'This commit was previously signed with a verified signature and verified committer email address. However the committer email address is no longer verified to the same user.',
|
||||
),
|
||||
},
|
||||
[verificationStatuses.VERIFIED_SYSTEM]: {
|
||||
...VERIFIED_CONFIG,
|
||||
description: __(
|
||||
|
|
|
|||
|
|
@ -3,14 +3,18 @@ import initBlob from '~/pages/projects/init_blob';
|
|||
import redirectToCorrectPage from '~/blame/blame_redirect';
|
||||
import BlamePreferences from '~/blame/preferences/blame_preferences.vue';
|
||||
import { initFindFileShortcut } from '~/projects/behaviors';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
|
||||
const initBlamePreferences = () => {
|
||||
const el = document.getElementById('js-blame-preferences');
|
||||
if (!el) return false;
|
||||
|
||||
const { hasRevsFile } = el.dataset;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
render: (createElement) => createElement(BlamePreferences, { props: { hasRevsFile: true } }), // TODO: replace `hasRevsFile` with real data once API is ready
|
||||
render: (createElement) =>
|
||||
createElement(BlamePreferences, { props: { hasRevsFile: parseBoolean(hasRevsFile) } }),
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -176,7 +176,7 @@
|
|||
}
|
||||
|
||||
.job-log-line-number {
|
||||
@apply gl-text-subtle;
|
||||
@apply gl-text-neutral-400 dark:gl-text-neutral-300;
|
||||
padding-right: $gl-padding-8;
|
||||
margin-right: $gl-padding-8;
|
||||
min-width: $job-line-number-width;
|
||||
|
|
@ -187,14 +187,14 @@
|
|||
&:active,
|
||||
&:visited {
|
||||
text-decoration: underline;
|
||||
@apply gl-text-subtle;
|
||||
@apply gl-text-neutral-400 dark:gl-text-neutral-300;
|
||||
}
|
||||
}
|
||||
|
||||
.job-log-time {
|
||||
padding-right: $gl-padding-8;
|
||||
margin-right: $gl-padding-8;
|
||||
@apply gl-text-subtle;
|
||||
@apply gl-text-neutral-400 dark:gl-text-neutral-300;
|
||||
user-select: none;
|
||||
flex-shrink: 0;
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ module Types
|
|||
|
||||
field :verification_status, CommitSignatures::VerificationStatusEnum,
|
||||
null: true,
|
||||
description: 'Indicates verification status of the associated key or certificate.'
|
||||
description: 'Indicates verification status of the associated key or certificate.',
|
||||
calls_gitaly: true
|
||||
|
||||
field :commit_sha, GraphQL::Types::String,
|
||||
null: true,
|
||||
|
|
|
|||
|
|
@ -32,4 +32,10 @@ module BlameHelper
|
|||
def entire_blame_path(id, project)
|
||||
namespace_project_blame_streaming_path(namespace_id: project.namespace, project_id: project, id: id)
|
||||
end
|
||||
|
||||
def blame_preferences(project)
|
||||
{
|
||||
has_revs_file: (!project.repository.ignore_revs_file_blob.nil?).to_json
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -42,13 +42,16 @@ module CommitSignature
|
|||
raise NoMethodError, 'must implement `signed_by_user` method'
|
||||
end
|
||||
|
||||
def reverified_status
|
||||
return verification_status unless Feature.enabled?(:check_for_mailmapped_commit_emails, project)
|
||||
return verification_status unless verified? || verified_system?
|
||||
# If commit is persisted as verified, check that commit email is still correct.
|
||||
def verification_status
|
||||
persisted_status = read_attribute(:verification_status)
|
||||
return persisted_status unless Feature.enabled?(:check_for_mailmapped_commit_emails, project)
|
||||
return persisted_status unless verified? || verified_system?
|
||||
return persisted_status unless commit
|
||||
|
||||
return 'unverified_author_email' if emails_for_verification&.exclude?(commit&.author_email)
|
||||
return 'unverified_author_email' if emails_for_verification&.exclude?(commit.author_email)
|
||||
|
||||
verification_status
|
||||
persisted_status
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ module Enums
|
|||
unknown_key: 5,
|
||||
multiple_signatures: 6,
|
||||
revoked_key: 7,
|
||||
verified_system: 8
|
||||
verified_system: 8,
|
||||
unverified_author_email: 9
|
||||
# EE adds more values in ee/app/models/concerns/ee/enums/commit_signature.rb
|
||||
}.freeze
|
||||
|
||||
|
|
|
|||
|
|
@ -1344,6 +1344,13 @@ class Repository
|
|||
end
|
||||
end
|
||||
|
||||
def ignore_revs_file_blob
|
||||
return unless Feature.enabled?(:blame_ignore_revs, project)
|
||||
return unless project&.default_branch
|
||||
|
||||
blob_at(project.default_branch, Gitlab::Blame::IGNORE_REVS_FILE_NAME, limit: 0)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Increase the limit by number of excluded refs
|
||||
|
|
|
|||
|
|
@ -15,11 +15,8 @@ module Clusters
|
|||
|
||||
def execute
|
||||
filtered_authorizations = filter_by_environment(authorizations)
|
||||
if Feature.enabled?(:kubernetes_agent_protected_branches, project)
|
||||
filtered_authorizations = filter_protected_ref(filtered_authorizations)
|
||||
end
|
||||
|
||||
filtered_authorizations
|
||||
filter_protected_ref(filtered_authorizations)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@
|
|||
= s_("Blame|Older")
|
||||
|
||||
- if Feature.enabled?(:blame_ignore_revs, @project)
|
||||
#js-blame-preferences
|
||||
#js-blame-preferences{ data: blame_preferences(@project) }
|
||||
|
||||
.table-responsive.blame-table
|
||||
.blame-table-wrapper
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
- if signature
|
||||
- uri = "projects/commit/#{'x509/' if signature.x509?}"
|
||||
= render partial: "#{uri}#{signature.reverified_status}_signature_badge", locals: { signature: signature }
|
||||
= render partial: "#{uri}#{signature.verification_status}_signature_badge", locals: { signature: signature }
|
||||
- else
|
||||
= render partial: 'projects/commit/unverified_signature_badge', locals: { signature: nil }
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: kubernetes_agent_protected_branches
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/388323
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156626
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/517093
|
||||
milestone: '17.3'
|
||||
group: group::environments
|
||||
type: beta
|
||||
default_enabled: false
|
||||
|
|
@ -729,6 +729,10 @@ vulnerability_namespace_historical_statistics:
|
|||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
vulnerability_namespace_statistics:
|
||||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
vulnerability_occurrences:
|
||||
- table: ci_pipelines
|
||||
column: initial_pipeline_id
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
table_name: vulnerability_namespace_statistics
|
||||
classes:
|
||||
- Vulnerabilities::NamespaceStatistic
|
||||
feature_categories:
|
||||
- security_asset_inventories
|
||||
description: Stores vulnerability statistics per namespace
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183138
|
||||
milestone: '17.10'
|
||||
gitlab_schema: gitlab_sec
|
||||
sharding_key:
|
||||
namespace_id: namespaces
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateVulnerabilityNamespaceStatistics < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.10'
|
||||
|
||||
TRAVERSAL_IDS_INDEX_NAME = 'index_vuln_namespace_statistics_gin_traversal_ids'
|
||||
NAMESPACE_ID_FK_INDEX_NAME = 'index_vuln_namespace_statistics_on_namespace_id'
|
||||
|
||||
def up
|
||||
# rubocop:disable Migration/EnsureFactoryForTable -- False Positive
|
||||
create_table :vulnerability_namespace_statistics do |t|
|
||||
t.timestamps_with_timezone null: false
|
||||
t.bigint :namespace_id, null: false, index: { name: NAMESPACE_ID_FK_INDEX_NAME, unique: true }
|
||||
t.integer :total, default: 0, null: false
|
||||
t.integer :critical, default: 0, null: false
|
||||
t.integer :high, default: 0, null: false
|
||||
t.integer :medium, default: 0, null: false
|
||||
t.integer :low, default: 0, null: false
|
||||
t.integer :unknown, default: 0, null: false
|
||||
t.integer :info, default: 0, null: false
|
||||
t.bigint :traversal_ids, array: true, default: [], null: false
|
||||
t.index :traversal_ids, using: :gin, name: TRAVERSAL_IDS_INDEX_NAME
|
||||
end
|
||||
# rubocop:enable Migration/EnsureFactoryForTable
|
||||
end
|
||||
|
||||
def down
|
||||
drop_table :vulnerability_namespace_statistics
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IncreaseDuoWorkflowEventsMessageLimit < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '17.10'
|
||||
|
||||
def up
|
||||
add_text_limit :duo_workflows_events, :message, 4096,
|
||||
constraint_name: check_constraint_name(:duo_workflows_events, :message, 'max_length_4K')
|
||||
remove_text_limit :duo_workflows_events, :message,
|
||||
constraint_name: check_constraint_name(:duo_workflows_events, :message, 'max_length')
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op: Danger of failing if there are records with length(message) > 255
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
f8955bd71299a40b0fb228f60d44a082b5438705507d2f7bb59e995a2f826905
|
||||
|
|
@ -0,0 +1 @@
|
|||
731fc4f0705c0586cfe3599b510cdc5309292049c9879f2f543b3782c30ce128
|
||||
|
|
@ -13339,7 +13339,7 @@ CREATE TABLE duo_workflows_events (
|
|||
message text,
|
||||
correlation_id_value text,
|
||||
CONSTRAINT check_5e35596b00 CHECK ((char_length(correlation_id_value) <= 128)),
|
||||
CONSTRAINT check_d96965e118 CHECK ((char_length(message) <= 255))
|
||||
CONSTRAINT check_9422e6deb0 CHECK ((char_length(message) <= 4096))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE duo_workflows_events_id_seq
|
||||
|
|
@ -23794,6 +23794,30 @@ CREATE SEQUENCE vulnerability_namespace_historical_statistics_id_seq
|
|||
|
||||
ALTER SEQUENCE vulnerability_namespace_historical_statistics_id_seq OWNED BY vulnerability_namespace_historical_statistics.id;
|
||||
|
||||
CREATE TABLE vulnerability_namespace_statistics (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
namespace_id bigint NOT NULL,
|
||||
total integer DEFAULT 0 NOT NULL,
|
||||
critical integer DEFAULT 0 NOT NULL,
|
||||
high integer DEFAULT 0 NOT NULL,
|
||||
medium integer DEFAULT 0 NOT NULL,
|
||||
low integer DEFAULT 0 NOT NULL,
|
||||
unknown integer DEFAULT 0 NOT NULL,
|
||||
info integer DEFAULT 0 NOT NULL,
|
||||
traversal_ids bigint[] DEFAULT '{}'::bigint[] NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE vulnerability_namespace_statistics_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE vulnerability_namespace_statistics_id_seq OWNED BY vulnerability_namespace_statistics.id;
|
||||
|
||||
CREATE TABLE vulnerability_occurrence_identifiers (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
|
|
@ -26517,6 +26541,8 @@ ALTER TABLE ONLY vulnerability_merge_request_links ALTER COLUMN id SET DEFAULT n
|
|||
|
||||
ALTER TABLE ONLY vulnerability_namespace_historical_statistics ALTER COLUMN id SET DEFAULT nextval('vulnerability_namespace_historical_statistics_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_namespace_statistics ALTER COLUMN id SET DEFAULT nextval('vulnerability_namespace_statistics_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_occurrence_identifiers ALTER COLUMN id SET DEFAULT nextval('vulnerability_occurrence_identifiers_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_occurrences ALTER COLUMN id SET DEFAULT nextval('vulnerability_occurrences_id_seq'::regclass);
|
||||
|
|
@ -29551,6 +29577,9 @@ ALTER TABLE ONLY vulnerability_merge_request_links
|
|||
ALTER TABLE ONLY vulnerability_namespace_historical_statistics
|
||||
ADD CONSTRAINT vulnerability_namespace_historical_statistics_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_namespace_statistics
|
||||
ADD CONSTRAINT vulnerability_namespace_statistics_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY vulnerability_occurrence_identifiers
|
||||
ADD CONSTRAINT vulnerability_occurrence_identifiers_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -35826,6 +35855,10 @@ CREATE INDEX index_vuln_namespace_hist_statistics_for_traversal_ids_update ON vu
|
|||
|
||||
CREATE UNIQUE INDEX index_vuln_namespace_historical_statistics_traversal_ids_date ON vulnerability_namespace_historical_statistics USING btree (traversal_ids, date);
|
||||
|
||||
CREATE INDEX index_vuln_namespace_statistics_gin_traversal_ids ON vulnerability_namespace_statistics USING gin (traversal_ids);
|
||||
|
||||
CREATE UNIQUE INDEX index_vuln_namespace_statistics_on_namespace_id ON vulnerability_namespace_statistics USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_vuln_reads_common_query_on_resolved_on_default_branch ON vulnerability_reads USING btree (project_id, state, report_type, vulnerability_id DESC) WHERE (resolved_on_default_branch IS TRUE);
|
||||
|
||||
CREATE INDEX index_vuln_reads_on_casted_cluster_agent_id_where_it_is_null ON vulnerability_reads USING btree (casted_cluster_agent_id) WHERE (casted_cluster_agent_id IS NOT NULL);
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ the following:
|
|||
main: # 'main' is the GitLab 'provider ID' of this LDAP server
|
||||
label: 'LDAP'
|
||||
host: 'ldap.example.com'
|
||||
...
|
||||
# ...
|
||||
group_base: 'cn=my_group,ou=groups,dc=example,dc=com'
|
||||
admin_group: 'my_admin_group'
|
||||
```
|
||||
|
|
@ -363,14 +363,14 @@ memberof: cn=admin_staff,ou=people,dc=example,dc=com
|
|||
uid: John
|
||||
```
|
||||
|
||||
If the user wasn't found in LDAP with either the DN or email, you may see the
|
||||
If the user wasn't found in LDAP with either the DN or email, you might see the
|
||||
following message instead:
|
||||
|
||||
```shell
|
||||
LDAP search error: No Such Object
|
||||
```
|
||||
|
||||
...in which case the user is blocked:
|
||||
In this case, the user is blocked:
|
||||
|
||||
```shell
|
||||
User Update (0.4ms) UPDATE "users" SET "state" = $1, "updated_at" = $2 WHERE "users"."id" = $3 [["state", "ldap_blocked"], ["updated_at", "2019-10-18 15:46:22.902177"], ["id", 20]]
|
||||
|
|
|
|||
|
|
@ -833,7 +833,7 @@ Example configuration for self-compiled installations (file path: `config/gitlab
|
|||
label: 'Casdoor', # optional label for login button, defaults to "Openid Connect"
|
||||
args: {
|
||||
name: 'openid_connect',
|
||||
scope: ['openid','profile','email'],
|
||||
scope: ['openid', 'profile', 'email'],
|
||||
response_type: 'code',
|
||||
issuer: 'https://<CASDOOR_HOSTNAME>',
|
||||
discovery: true,
|
||||
|
|
@ -919,7 +919,7 @@ For self-compiled installations:
|
|||
args: {
|
||||
name: 'openid_connect',
|
||||
strategy_class: "OmniAuth::Strategies::OpenIDConnect",
|
||||
scope: ['openid','profile','email'],
|
||||
scope: ['openid', 'profile', 'email'],
|
||||
response_type: 'code',
|
||||
issuer: '<your_oidc_url>',
|
||||
discovery: true,
|
||||
|
|
@ -940,7 +940,7 @@ For self-compiled installations:
|
|||
args: {
|
||||
name: 'openid_connect_2fa',
|
||||
strategy_class: "OmniAuth::Strategies::OpenIDConnect",
|
||||
scope: ['openid','profile','email'],
|
||||
scope: ['openid', 'profile', 'email'],
|
||||
response_type: 'code',
|
||||
issuer: '<your_oidc_url>',
|
||||
discovery: true,
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ The following example is a `gitlab.rb` configuration for Linux package installat
|
|||
```ruby
|
||||
gitlab_rails['ldap_servers'] = {
|
||||
'main' => {
|
||||
# ... other LDAP settings ...
|
||||
# Additional LDAP settings
|
||||
'duo_add_on_groups' => ['duo_users', 'admins'],
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ Example response:
|
|||
"created": 1739421415,
|
||||
"owned_by": "vllm",
|
||||
"root": "mistralai/Mixtral-8x22B-Instruct-v0.1",
|
||||
// ... other fields ...
|
||||
// Additional fields
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -79,6 +79,6 @@ All trademarks, materials, documentation, and other intellectual property remain
|
|||
Use of GitLab Trademarks must be in compliance with the standards set forth in [our guidelines](https://handbook.gitlab.com/handbook/marketing/brand-and-product-marketing/brand/brand-activation/trademark-guidelines/) (as updated from time to time).
|
||||
CHEF® and all Chef marks are owned by Progress Software Corporation and must be used in accordance with the [Progress Software Trademark Usage Policy](https://www.progress.com/legal/trademarks).
|
||||
|
||||
When using a GitLab or 3rd party trademark in documentation, include the (R) symbol in the first instance, for example, "Chef(R) is used for configuring...." You may omit the symbol in subsequent instances.
|
||||
When using a GitLab or third-party trademark in documentation, include the (R) symbol in the first instance, for example, "Chef(R) is used for configuring…" You may omit the symbol in subsequent instances.
|
||||
|
||||
If a trademark owner requires a particular notice or trademark requirement, such notice or requirement should be stated above.
|
||||
|
|
|
|||
|
|
@ -60,9 +60,9 @@ global:
|
|||
dependencyProxy:
|
||||
enabled: false
|
||||
bucket: gitlab-dependency-proxy
|
||||
connection: {}
|
||||
secret:
|
||||
key:
|
||||
connection:
|
||||
secret:
|
||||
key:
|
||||
```
|
||||
|
||||
For more information, see [Configure Charts using Globals](https://docs.gitlab.com/charts/charts/globals.html#configure-appconfig-settings).
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ main:
|
|||
host: '_your_ldap_server'
|
||||
port: 389
|
||||
uid: 'sAMAccountName'
|
||||
...
|
||||
# ...
|
||||
```
|
||||
|
||||
`main` is the LDAP server ID. Together, the unique provider is `ldapmain`.
|
||||
|
|
|
|||
|
|
@ -258,7 +258,7 @@ docker run --cap-add SYS_PTRACE [...]
|
|||
```yaml
|
||||
services:
|
||||
ruby_container_name:
|
||||
...
|
||||
# ...
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
```
|
||||
|
|
|
|||
|
|
@ -40,6 +40,8 @@ Example response:
|
|||
{
|
||||
"id" : 1,
|
||||
"name": "board1",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": {
|
||||
"id": 5,
|
||||
"name": "Diaspora Project Site",
|
||||
|
|
@ -124,6 +126,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "project issue board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": {
|
||||
"id": 5,
|
||||
"name": "Diaspora Project Site",
|
||||
|
|
@ -200,6 +204,9 @@ Example response:
|
|||
```json
|
||||
{
|
||||
"id": 1,
|
||||
"name": "newboard",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": {
|
||||
"id": 5,
|
||||
"name": "Diaspora Project Site",
|
||||
|
|
@ -209,7 +216,6 @@ Example response:
|
|||
"http_url_to_repo": "http://example.com/diaspora/diaspora-project-site.git",
|
||||
"web_url": "http://example.com/diaspora/diaspora-project-site"
|
||||
},
|
||||
"name": "newboard",
|
||||
"lists" : [],
|
||||
"group": null,
|
||||
"milestone": null,
|
||||
|
|
@ -232,6 +238,8 @@ PUT /projects/:id/boards/:board_id
|
|||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/_index.md#namespaced-paths). |
|
||||
| `board_id` | integer | yes | The ID of a board. |
|
||||
| `name` | string | no | The new name of the board. |
|
||||
| `hide_backlog_list` | boolean | no | Hide the Open list. |
|
||||
| `hide_closed_list` | boolean | no | Hide the Closed list. |
|
||||
| `assignee_id` | integer | no | The assignee the board should be scoped to. Premium and Ultimate only. |
|
||||
| `milestone_id` | integer | no | The milestone the board should be scoped to. Premium and Ultimate only. |
|
||||
| `labels` | string | no | Comma-separated list of label names which the board should be scoped to. Premium and Ultimate only. |
|
||||
|
|
@ -246,6 +254,9 @@ Example response:
|
|||
```json
|
||||
{
|
||||
"id": 1,
|
||||
"name": "new_name",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": {
|
||||
"id": 5,
|
||||
"name": "Diaspora Project Site",
|
||||
|
|
@ -266,7 +277,6 @@ Example response:
|
|||
"last_activity_at": "2018-07-03T05:48:49.982Z"
|
||||
},
|
||||
"lists": [],
|
||||
"name": "new_name",
|
||||
"group": null,
|
||||
"milestone": {
|
||||
"id": 43,
|
||||
|
|
@ -427,6 +437,7 @@ POST /projects/:id/boards/:board_id/lists
|
|||
| `label_id` | integer | no | The ID of a label. |
|
||||
| `assignee_id` | integer | no | The ID of a user. Premium and Ultimate only. |
|
||||
| `milestone_id` | integer | no | The ID of a milestone. Premium and Ultimate only. |
|
||||
| `iteration_id` | integer | no | The ID of a iteration. Premium and Ultimate only. |
|
||||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -44000,6 +44000,7 @@ Verification status of a GPG, X.509 or SSH signature for a commit.
|
|||
| <a id="verificationstatussame_user_different_email"></a>`SAME_USER_DIFFERENT_EMAIL` | same_user_different_email verification status. |
|
||||
| <a id="verificationstatusunknown_key"></a>`UNKNOWN_KEY` | unknown_key verification status. |
|
||||
| <a id="verificationstatusunverified"></a>`UNVERIFIED` | unverified verification status. |
|
||||
| <a id="verificationstatusunverified_author_email"></a>`UNVERIFIED_AUTHOR_EMAIL` | unverified_author_email verification status. |
|
||||
| <a id="verificationstatusunverified_key"></a>`UNVERIFIED_KEY` | unverified_key verification status. |
|
||||
| <a id="verificationstatusverified"></a>`VERIFIED` | verified verification status. |
|
||||
| <a id="verificationstatusverified_ca"></a>`VERIFIED_CA` | verified_ca verification status. |
|
||||
|
|
|
|||
|
|
@ -40,6 +40,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group issue board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -92,6 +94,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group issue board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -157,6 +161,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group issue board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -207,6 +213,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group issue board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -278,6 +286,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "newboard",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": null,
|
||||
"lists" : [],
|
||||
"group": {
|
||||
|
|
@ -321,9 +331,11 @@ Example response:
|
|||
```json
|
||||
{
|
||||
"id": 1,
|
||||
"name": "new_name",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"project": null,
|
||||
"lists": [],
|
||||
"name": "new_name",
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -480,10 +492,13 @@ POST /groups/:id/boards/:board_id/lists
|
|||
| --------- | ---- | -------- | ----------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](rest/_index.md#namespaced-paths). |
|
||||
| `board_id` | integer | yes | The ID of a board. |
|
||||
| `label_id` | integer | yes | The ID of a label. |
|
||||
| `label_id` | integer | no | The ID of a label. |
|
||||
| `assignee_id` | integer | no | The ID of a user. Premium and Ultimate only. |
|
||||
| `milestone_id` | integer | no | The ID of a milestone. Premium and Ultimate only. |
|
||||
| `iteration_id` | integer | no | The ID of a iteration. Premium and Ultimate only. |
|
||||
|
||||
```shell
|
||||
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/4/boards/12/lists?milestone_id=7"
|
||||
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/5/boards/12/lists?milestone_id=7"
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
|
|
|||
|
|
@ -46,6 +46,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group epic board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
@ -129,6 +131,8 @@ Example response:
|
|||
{
|
||||
"id": 1,
|
||||
"name": "group epic board",
|
||||
"hide_backlog_list": false,
|
||||
"hide_closed_list": false,
|
||||
"group": {
|
||||
"id": 5,
|
||||
"name": "Documentcloud",
|
||||
|
|
|
|||
|
|
@ -32,17 +32,16 @@ Prerequisites:
|
|||
There are a few ways to view a list of environments for a given project:
|
||||
|
||||
- On the project's overview page, if at least one environment is available (that is, not stopped).
|
||||

|
||||

|
||||
|
||||
- On the left sidebar, select **Operate > Environments**.
|
||||
The environments are displayed.
|
||||
|
||||

|
||||

|
||||
|
||||
- To view a list of deployments for an environment, select the environment name,
|
||||
for example, `staging`.
|
||||
|
||||

|
||||

|
||||
|
||||
Deployments show up in this list only after a deployment job has created them.
|
||||
|
||||
|
|
|
|||
|
|
@ -383,20 +383,15 @@ With this configuration, GitLab adds **artifact 1** as a link to `file.txt` to t
|
|||
|
||||
{{< /history >}}
|
||||
|
||||
By default artifacts are always kept for successful pipelines for the most recent commit on each ref.
|
||||
Any [`expire_in`](#with-an-expiry) configuration does not apply to the most recent artifacts.
|
||||
By default, artifacts are always kept for the most recent successful pipeline on each ref. Any `expire_in` configuration does not apply to the most recent artifacts.
|
||||
|
||||
A pipeline's artifacts are only deleted according to the `expire_in` configuration
|
||||
if a new pipeline runs for the same ref and:
|
||||
When a new pipeline on the same ref completes successfully, the previous pipeline's artifacts are deleted according to the `expire_in` configuration. The artifacts of the new pipeline are kept automatically.
|
||||
|
||||
A pipeline’s artifacts are only deleted according to the `expire_in` configuration if a new pipeline runs for the same ref and:
|
||||
|
||||
- Succeeds.
|
||||
- Fails.
|
||||
- Stops running due to being blocked by a manual job.
|
||||
|
||||
Additionally, artifacts are kept for the ref's last successful pipeline even if it
|
||||
is not the latest pipeline. As a result, if a new pipeline run fails, the last successful pipeline's
|
||||
artifacts are still kept.
|
||||
|
||||
Keeping the latest artifacts can use a large amount of storage space in projects
|
||||
with a lot of jobs or large artifacts. If the latest artifacts are not needed in
|
||||
a project, you can disable this behavior to save space:
|
||||
|
|
|
|||
|
|
@ -521,15 +521,15 @@ For example, in a Bamboo build plan:
|
|||
```yaml
|
||||
version: 2
|
||||
# ...
|
||||
Build:
|
||||
# ...
|
||||
artifacts:
|
||||
-
|
||||
name: Test Reports
|
||||
- name: Test Reports
|
||||
location: target/reports
|
||||
pattern: '*.xml'
|
||||
required: false
|
||||
shared: false
|
||||
-
|
||||
name: Special Reports
|
||||
- name: Special Reports
|
||||
location: target/reports
|
||||
pattern: 'special/*.xml'
|
||||
shared: true
|
||||
|
|
@ -543,8 +543,7 @@ for example:
|
|||
```yaml
|
||||
Test app:
|
||||
artifact-subscriptions:
|
||||
-
|
||||
artifact: Test Reports
|
||||
- artifact: Test Reports
|
||||
destination: deploy
|
||||
```
|
||||
|
||||
|
|
@ -553,6 +552,8 @@ Test app:
|
|||
```yaml
|
||||
version: 2
|
||||
# ...
|
||||
Build:
|
||||
# ...
|
||||
tasks:
|
||||
- artifact-download:
|
||||
source-plan: PROJECTKEY-PLANKEY
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ To ensure that a `deploy` job runs once at a time, you can specify
|
|||
|
||||
```yaml
|
||||
deploy:
|
||||
...
|
||||
# ...
|
||||
resource_group: production
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -1063,7 +1063,7 @@ The following fields are populated by default:
|
|||
|
||||
An example of provenance metadata that the GitLab Runner might generate is as follows:
|
||||
|
||||
```yaml
|
||||
```json
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v0.1",
|
||||
"predicateType": "https://slsa.dev/provenance/v1",
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ When several CI/CD jobs run concurrently, the fair usage algorithm assigns jobs
|
|||
1. Job 6 is next, because 6 is now the lowest job number from projects with no running jobs (Projects 1 and 2 have jobs running).
|
||||
1. Job 2 is next, because, of projects with the lowest number of jobs running (each has 1), it is the lowest job number.
|
||||
1. Job 5 is next, because Project 1 now has 2 jobs running and Job 5 is the lowest remaining job number between Projects 2 and 3.
|
||||
1. Finally is Job 3... because it's the only job left.
|
||||
1. Finally is Job 3 because it's the only job left.
|
||||
|
||||
When only one job runs at a time, the fair usage algorithm assigns jobs in this order:
|
||||
|
||||
|
|
@ -234,7 +234,7 @@ When only one job runs at a time, the fair usage algorithm assigns jobs in this
|
|||
1. We finish Job 4.
|
||||
1. Job 5 is next, because having finished Job 4, Project 2 has no jobs running again.
|
||||
1. Job 6 is next, because Project 3 is the only project left with no running jobs.
|
||||
1. Lastly we choose Job 3... because, again, it's the only job left.
|
||||
1. Lastly we choose Job 3, because, again, it's the only job left.
|
||||
|
||||
## Group runners
|
||||
|
||||
|
|
|
|||
|
|
@ -454,7 +454,7 @@ when you include the file. For example:
|
|||
include:
|
||||
- component: $CI_SERVER_FQDN/project/path/component@1.0.0
|
||||
inputs:
|
||||
test_job_needs: [ my-other-job ]
|
||||
test_job_needs: [my-other-job]
|
||||
|
||||
my-other-job:
|
||||
script:
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ workflow:
|
|||
rules:
|
||||
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
|
||||
when: never
|
||||
- ... # Previously defined workflow rules here
|
||||
- # Previously defined workflow rules here
|
||||
```
|
||||
|
||||
[Triggered pipelines](../triggers/_index.md) that run on a branch have a `$CI_COMMIT_BRANCH`
|
||||
|
|
|
|||
|
|
@ -182,7 +182,7 @@ table_name: security_findings
|
|||
classes:
|
||||
- Security::Finding
|
||||
|
||||
...
|
||||
# ...
|
||||
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ steps:
|
|||
widget: text
|
||||
target: $BUILD_IMAGE
|
||||
required: true
|
||||
pattern: "^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(?<!-)(?:\.(?!-)[a-zA-Z0-9-]{1,63}(?<!-))*(?::[0-9]{1,5})?\/)?((?![._-])(?:[a-z0-9._-]*)(?<![._-])(?:\/(?![._-])[a-z0-9._-]*(?<![._-]))*)(?::(?![.-])[a-zA-Z0-9_.-]{1,128})?$"
|
||||
pattern: '^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(?<!-)(?:\.(?!-)[a-zA-Z0-9-]{1,63}(?<!-))*(?::[0-9]{1,5})?\/)?((?![._-])(?:[a-z0-9._-]*)(?<![._-])(?:\/(?![._-])[a-z0-9._-]*(?<![._-]))*)(?::(?![.-])[a-zA-Z0-9_.-]{1,128})?$'
|
||||
invalid-feedback: Please enter a valid docker image
|
||||
|
||||
# Second input widget
|
||||
|
|
|
|||
|
|
@ -0,0 +1,65 @@
|
|||
---
|
||||
stage: none
|
||||
group: unassigned
|
||||
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
|
||||
title: Data deletion guidelines
|
||||
---
|
||||
|
||||
In order to minimize the risk of accidental data loss, GitLab provides guidelines for how to safely use deletion operations in the codebase.
|
||||
|
||||
Generally, there are two ways to delete data:
|
||||
|
||||
- Mark for deletion: Identifies data for removal at a future date. This is the preferred approach.
|
||||
- Hard deletion: Immediately and permanently removes data.
|
||||
|
||||
## Mark for deletion
|
||||
|
||||
You should avoid direct calls to hard delete classes, as this can lead to unintended data loss.
|
||||
|
||||
{{< tabs >}}
|
||||
|
||||
{{< tab title="Projects" >}}
|
||||
|
||||
```ruby
|
||||
Projects::MarkForDeletionService.new(project, current_user).execute
|
||||
```
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab title="Groups" >}}
|
||||
|
||||
```ruby
|
||||
Groups::MarkForDeletionService.new(group, current_user).execute
|
||||
```
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
## Hard deletion
|
||||
|
||||
If you must delete data, use the following classes to hard delete from the codebase.
|
||||
|
||||
{{< tabs >}}
|
||||
|
||||
{{< tab title="Projects" >}}
|
||||
|
||||
```ruby
|
||||
Projects::DestroyService.new(project, user, {}).execute
|
||||
|
||||
ProjectDestroyWorker.perform_async(project_id, user_id, params)
|
||||
```
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab title="Groups" >}}
|
||||
|
||||
```ruby
|
||||
Groups::MarkForDeletionService.new(group, current_user).execute
|
||||
|
||||
GroupDestroyWorker.new.perform(group_id, user_id)
|
||||
```
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
|
@ -268,7 +268,7 @@ Charts variables:
|
|||
```yaml
|
||||
all:
|
||||
vars:
|
||||
...
|
||||
# ...
|
||||
gitlab_charts_custom_config_file: '/path/to/gitlab-environment-toolkit/ansible/environments/gitlab-10k/inventory/charts.yml'
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -349,8 +349,8 @@ file for the right database ID. For example, for an LFS object, the
|
|||
request must also include the SHA256 sum of the file. An example JWT
|
||||
payload looks like:
|
||||
|
||||
```yaml
|
||||
{"data": {sha256: "31806bb23580caab78040f8c45d329f5016b0115"}, iat: "1234567890"}
|
||||
```json
|
||||
{"data": {"sha256": "31806bb23580caab78040f8c45d329f5016b0115"}, "iat": "1234567890"}
|
||||
```
|
||||
|
||||
If the requested file matches the requested SHA256 sum, then the Geo
|
||||
|
|
@ -366,8 +366,8 @@ the Git repository request. This ensures that the **secondary** site downloads
|
|||
the right Git repository for the right database ID. An example JWT
|
||||
payload looks like:
|
||||
|
||||
```yaml
|
||||
{"data": {scope: "mygroup/myproject"}, iat: "1234567890"}
|
||||
```json
|
||||
{"data": {"scope": "mygroup/myproject"}, "iat": "1234567890"}
|
||||
```
|
||||
|
||||
## Git Push to Geo secondary
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ A typical job definition in one of the e2e test pipeline definition YAML files w
|
|||
|
||||
```yaml
|
||||
my-new-test-job:
|
||||
...
|
||||
# ...
|
||||
variables:
|
||||
QA_SCENARIO: Test::Integration::MyNewTestScenario
|
||||
```
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ If you are on a GitLab Self-Managed instance, create a personal access token.
|
|||
|
||||
After you configure the plugin in your IDE, connect it to your GitLab account:
|
||||
|
||||
1. In your IDE, select **Eclipse > Settings...**.
|
||||
1. In your IDE, select **Eclipse > Settings**.
|
||||
1. On the left sidebar, select **GitLab**.
|
||||
1. Provide the **Connection URL**. For GitLab.com, use `https://gitlab.com`.
|
||||
1. For **GitLab Personal Access Token**, paste in the personal access token you created.
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ The Workspace log file, named `.log` is located in the directory `<your-eclipse-
|
|||
|
||||
To enable GitLab Language Server debug logs:
|
||||
|
||||
1. In your IDE, select **Eclipse > Settings...**.
|
||||
1. In your IDE, select **Eclipse > Settings**.
|
||||
1. On the left sidebar, select **GitLab**.
|
||||
1. In **Language Server Log Level**, enter `debug`.
|
||||
1. Select **Apply and Close**.
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ title: Install and set up the GitLab extension for Visual Studio
|
|||
|
||||
To get the extension, use any of these methods:
|
||||
|
||||
- Inside Visual Studio, go to **Extensions > Manage extensions... > Browse**, and search for `GitLab`.
|
||||
- Inside Visual Studio, select **Extensions** from the activity bar, and search for `GitLab`.
|
||||
- From the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=GitLab.GitLabExtensionForVisualStudio).
|
||||
- From GitLab, either from the
|
||||
[list of releases](https://gitlab.com/gitlab-org/editor-extensions/gitlab-visual-studio-extension/-/releases), or by
|
||||
[list of releases](https://gitlab.com/gitlab-org/editor-extensions/gitlab-visual-studio-extension/-/releases), or by
|
||||
[downloading the latest version](https://gitlab.com/gitlab-org/editor-extensions/gitlab-visual-studio-extension/-/releases/permalink/latest/downloads/GitLab.Extension.vsix)
|
||||
directly.
|
||||
|
||||
|
|
|
|||
|
|
@ -236,15 +236,15 @@ gitlab_rails['omniauth_providers'] = [
|
|||
|
||||
```yaml
|
||||
- { name: 'PROVIDER_NAME',
|
||||
...
|
||||
# ...
|
||||
args: { gitlab_username_claim: 'sub' }
|
||||
}
|
||||
- { name: 'github',
|
||||
...
|
||||
# ...
|
||||
args: { gitlab_username_claim: 'name' }
|
||||
}
|
||||
- { name: 'kerberos',
|
||||
...
|
||||
# ...
|
||||
args: { gitlab_username_claim: 'uid' }
|
||||
}
|
||||
```
|
||||
|
|
@ -607,9 +607,9 @@ then override the icon in one of two ways:
|
|||
```yaml
|
||||
omniauth:
|
||||
providers:
|
||||
- { name: '...'
|
||||
- { name: 'myIcon'
|
||||
icon: 'data:image/png;base64,<base64-data>'
|
||||
...
|
||||
# ...
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -441,7 +441,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml', # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml/callback', # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -451,7 +451,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml_2', # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml_2/callback', # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
}
|
||||
]
|
||||
|
|
@ -485,7 +485,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml' # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml/callback' # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML' # Mandatory
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
```
|
||||
|
||||
1. Put the following content in a file named `saml_2.yaml` to be used as a
|
||||
|
|
@ -499,7 +499,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml_2' # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml_2/callback' # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML' # Mandatory
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
```
|
||||
|
||||
1. Optional. Set additional SAML providers by following the same steps.
|
||||
|
|
@ -567,7 +567,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml', # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml/callback', # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -577,7 +577,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml_2', # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml_2/callback', # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
}
|
||||
]
|
||||
|
|
@ -618,7 +618,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml', # This is mandatory and must match the provider name
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml/callback', # URL must match the name of the provider
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
}
|
||||
- {
|
||||
|
|
@ -628,7 +628,7 @@ To set up multiple SAML IdPs:
|
|||
name: 'saml_2', # This is mandatory and must match the provider name
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
assertion_consumer_service_url: 'https://gitlab.example.com/users/auth/saml_2/callback', # URL must match the name of the provider
|
||||
... # Put here all the required arguments similar to a single provider
|
||||
# Include all required arguments similar to a single provider
|
||||
},
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ The rules can be defined using `regex`
|
|||
|
||||
The extended rules for PII data element detection
|
||||
|
||||
```yaml
|
||||
```toml
|
||||
[[rules]]
|
||||
id = "ssn"
|
||||
description = "Social Security Number"
|
||||
|
|
@ -63,7 +63,7 @@ keywords = ["ssn"]
|
|||
|
||||
The extended rules for password in plain text
|
||||
|
||||
```yaml
|
||||
```toml
|
||||
[[rules]]
|
||||
id = "password-secret"
|
||||
description = "Detect secrets starting with Password or PASSWORD"
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ For example, to build a Docker image based on based on the
|
|||
ARG RUBY_VERSION=latest
|
||||
FROM ruby:$RUBY_VERSION
|
||||
|
||||
# ... put your stuff here
|
||||
# Include your content here
|
||||
```
|
||||
|
||||
To pass complex values like spaces and newlines, use Base64 encoding.
|
||||
|
|
|
|||
|
|
@ -485,7 +485,7 @@ tests:
|
|||
script:
|
||||
- command_1
|
||||
- command_2
|
||||
...
|
||||
# ...
|
||||
- command_n
|
||||
tags:
|
||||
- my-custom-tag
|
||||
|
|
@ -505,7 +505,7 @@ tests:
|
|||
script:
|
||||
- cpu_intensive_command_1
|
||||
- cpu_intensive_command_2
|
||||
...
|
||||
# ...
|
||||
- cpu_intensive_command_n
|
||||
tags:
|
||||
- my-custom-tag
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ At the top of the left sidebar are several shortcuts. Use these shortcuts to
|
|||
show and hide the left sidebar, create new items, search, and view your profile. You can also view your list of issues,
|
||||
merge requests, and to-do items.
|
||||
|
||||

|
||||

|
||||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
|
|
@ -39,7 +39,7 @@ The next area of the left sidebar changes based on the information you're viewin
|
|||
you might be viewing a project, exploring projects or groups, or viewing your profile.
|
||||
To switch to other areas of the left sidebar, use **Search or go to**.
|
||||
|
||||

|
||||

|
||||
|
||||
The rest of the left sidebar is populated based on the option you choose. For example,
|
||||
if you're in a project, the sidebar is project-specific.
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ Use auto-instrumentation to instrument the application:
|
|||
|
||||
The OpenTelemetry autoconfigure libraries read their configuration from environment variables.
|
||||
|
||||
1. From the top-right menu, select **Edit Configurations...**:
|
||||
1. From the top-right menu, select **Edit Configurations**:
|
||||
|
||||

|
||||
|
||||
|
|
|
|||
|
|
@ -351,7 +351,7 @@ To enable filters, in the `.yaml` configuration file set the filter's `enabled`
|
|||
|
||||
```yaml
|
||||
title: My dashboard
|
||||
...
|
||||
# ...
|
||||
filters:
|
||||
excludeAnonymousUsers:
|
||||
enabled: true
|
||||
|
|
|
|||
|
|
@ -122,15 +122,14 @@ The rules we are using in the `apifuzzer_v1` and `apifuzzer_v2` jobs are copied
|
|||
# Disable the main apifuzzer_fuzz job
|
||||
apifuzzer_fuzz:
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
when: never
|
||||
|
||||
apifuzzer_v1:
|
||||
extends: apifuzzer_fuzz
|
||||
variables:
|
||||
FUZZAPI_EXCLUDE_PATHS: /api/v1/**
|
||||
rules:
|
||||
rules:
|
||||
- if: $API_FUZZING_DISABLED == 'true' || $API_FUZZING_DISABLED == '1'
|
||||
when: never
|
||||
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH == 'true' &&
|
||||
|
|
@ -149,7 +148,6 @@ apifuzzer_v2:
|
|||
variables:
|
||||
FUZZAPI_EXCLUDE_PATHS: /api/v2/**
|
||||
rules:
|
||||
rules:
|
||||
- if: $API_FUZZING_DISABLED == 'true' || $API_FUZZING_DISABLED == '1'
|
||||
when: never
|
||||
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
|
||||
|
|
@ -185,8 +183,8 @@ To verify the operation is excluded, run the API Fuzzing job and review the job
|
|||
# different names
|
||||
apifuzzer_fuzz:
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
when: never
|
||||
|
||||
# API Fuzzing for feature branch work, excludes /api/large_response_json
|
||||
apifuzzer_branch:
|
||||
|
|
@ -194,7 +192,6 @@ apifuzzer_branch:
|
|||
variables:
|
||||
FUZZAPI_EXCLUDE_PATHS: /api/large_response_json
|
||||
rules:
|
||||
rules:
|
||||
- if: $API_FUZZING_DISABLED == 'true' || $API_FUZZING_DISABLED == '1'
|
||||
when: never
|
||||
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
|
||||
|
|
@ -212,7 +209,7 @@ apifuzzer_branch:
|
|||
# Includes the long running operations
|
||||
apifuzzer_main:
|
||||
extends: apifuzzer_fuzz
|
||||
rules:
|
||||
rules:
|
||||
- if: $API_FUZZING_DISABLED == 'true' || $API_FUZZING_DISABLED == '1'
|
||||
when: never
|
||||
- if: $API_FUZZING_DISABLED_FOR_DEFAULT_BRANCH &&
|
||||
|
|
|
|||
|
|
@ -256,7 +256,7 @@ dast:
|
|||
DAST_TARGET_URL: https://target.example.com
|
||||
DAST_AUTH_URL: https://target.example.com
|
||||
DAST_AUTH_TYPE: basic-digest
|
||||
DAST_AUTH_NEGOTIATE_DELEGATION: *.example.com,example.com,*.EXAMPLE.COM,EXAMPLE.COM
|
||||
DAST_AUTH_NEGOTIATE_DELEGATION: '*.example.com,example.com,*.EXAMPLE.COM,EXAMPLE.COM'
|
||||
# Not shown -- DAST_AUTH_USERNAME, DAST_AUTH_PASSWORD set via Settings -> CI -> Variables
|
||||
before_script:
|
||||
- KRB5_CONF='
|
||||
|
|
|
|||
|
|
@ -233,7 +233,7 @@ analysis.
|
|||
Example of what the Job Summary output looks like:
|
||||
|
||||
```plaintext
|
||||
# ... job output ... #
|
||||
# Job output #
|
||||
|
||||
[=== libbehave: New packages detected ===]
|
||||
🔺 4 new packages have been detected in this MR.
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ template:
|
|||
|
||||
| CI/CD variable | Description | Default value |
|
||||
|-------------------------------------------|-----------------------------------------------|-----------------------------------|
|
||||
| `SECURE_BINARIES_ANALYZERS` | Comma-separated list of analyzers to download | `"bandit, brakeman, gosec, and so on..."` |
|
||||
| `SECURE_BINARIES_ANALYZERS` | Comma-separated list of analyzers to download | `"bandit, brakeman, gosec, ..."` |
|
||||
| `SECURE_BINARIES_DOWNLOAD_IMAGES` | Used to disable jobs | `"true"` |
|
||||
| `SECURE_BINARIES_PUSH_IMAGES` | Push files to the project registry | `"true"` |
|
||||
| `SECURE_BINARIES_SAVE_ARTIFACTS` | Also save image archives as artifacts | `"false"` |
|
||||
|
|
|
|||
|
|
@ -148,15 +148,15 @@ to be compatible with each other:
|
|||
Valid configuration example:
|
||||
|
||||
```yaml
|
||||
- `override-policy-1` stages: `[build, test, policy-test, deploy]`
|
||||
- `override-policy-2` stages: `[test, deploy]`
|
||||
- override-policy-1 stages: [build, test, policy-test, deploy]
|
||||
- override-policy-2 stages: [test, deploy]
|
||||
```
|
||||
|
||||
Invalid configuration example:
|
||||
|
||||
```yaml
|
||||
- `override-policy-1` stages: `[build, test, policy-test, deploy]`
|
||||
- `override-policy-2` stages: `[deploy, test]`
|
||||
- override-policy-1 stages: [build, test, policy-test, deploy]
|
||||
- override-policy-2 stages: [deploy, test]
|
||||
```
|
||||
|
||||
The pipeline fails if one or more `override_project_ci` policies has an invalid `stages` configuration.
|
||||
|
|
|
|||
|
|
@ -186,13 +186,13 @@ deploy:
|
|||
AGENT_ID: 1234 # replace with your agent's numeric ID
|
||||
K8S_PROXY_URL: https://<KAS_DOMAIN>/k8s-proxy/ # For agent server (KAS) deployed in Kubernetes cluster (for gitlab.com use kas.gitlab.com); replace with your URL
|
||||
# K8S_PROXY_URL: https://<GITLAB_DOMAIN>/-/kubernetes-agent/k8s-proxy/ # For agent server (KAS) in Omnibus
|
||||
# ... any other variables you have configured
|
||||
# Include any additional variables
|
||||
before_script:
|
||||
- kubectl config set-credentials agent:$AGENT_ID --token="ci:${AGENT_ID}:${CI_JOB_TOKEN}"
|
||||
- kubectl config set-cluster gitlab --server="${K8S_PROXY_URL}"
|
||||
- kubectl config set-context "$KUBE_CONTEXT" --cluster=gitlab --user="agent:${AGENT_ID}"
|
||||
- kubectl config use-context "$KUBE_CONTEXT"
|
||||
# ... rest of your job configuration
|
||||
# Include the remaining job configuration
|
||||
```
|
||||
|
||||
### Environments with KAS that use self-signed certificates
|
||||
|
|
@ -371,6 +371,7 @@ In this example:
|
|||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/467936) in GitLab 17.3 [with a flag](../../../administration/feature_flags.md) named `kubernetes_agent_protected_branches`. Disabled by default.
|
||||
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/467936) in GitLab 17.10. Feature flag `kubernetes_agent_protected_branches` removed.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -93,8 +93,9 @@ Add `--set-file config.kasCaCert=my-custom-ca.pem` to the `helm install` command
|
|||
|
||||
When you deploy `agentk` with a set `config.kasCaCert` value, the certificate is added to `configmap` and the certificate file is mounted in `/etc/ssl/certs`.
|
||||
|
||||
For example, with the command `kubectl get configmap -lapp=gitlab-agent -o yaml`:
|
||||
|
||||
```yaml
|
||||
$ kubectl get configmap -lapp=gitlab-agent -o yaml
|
||||
apiVersion: v1
|
||||
items:
|
||||
- apiVersion: v1
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ In the `script` section of the `.gitlab-ci.yml` file, enter the following comman
|
|||
cosign sign "$IMAGE_DIGEST" \
|
||||
--annotations "com.gitlab.ci.user.name=$GITLAB_USER_NAME" \
|
||||
--annotations "com.gitlab.ci.pipeline.id=$CI_PIPELINE_ID" \
|
||||
# ... (other annotations) ...
|
||||
# Additional annotations
|
||||
--annotations "tag=$IMAGE_TAG"
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -619,12 +619,12 @@ If the log doesn't appear in the `.npm/_logs/` directory, you can copy the
|
|||
log to your root directory and view it there:
|
||||
|
||||
```yaml
|
||||
script:
|
||||
script:
|
||||
- npm install --loglevel verbose
|
||||
- cp -r /root/.npm/_logs/ .
|
||||
artifacts:
|
||||
paths:
|
||||
- './_logs
|
||||
paths:
|
||||
- './_logs'
|
||||
```
|
||||
|
||||
The npm log is copied to `/root/.npm/_logs/` as an artifact.
|
||||
|
|
|
|||
|
|
@ -72,15 +72,12 @@ After you publish the package for `ChildProject`, you should see the package in
|
|||
A package is associated with a project on GitLab. But, a package is not associated
|
||||
with the code in that project.
|
||||
|
||||
For example, when configuring a package for npm or Maven, the `project_id` sets the registry URL that the package publishes to:
|
||||
For example, when configuring a package for npm or Maven, the `project_id` sets the registry URL that the package publishes to.
|
||||
|
||||
```yaml
|
||||
# npm
|
||||
https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/
|
||||
For example:
|
||||
|
||||
# maven
|
||||
https://gitlab.example.com/api/v4/projects/<project_id>/packages/maven/
|
||||
```
|
||||
- npm: `https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/`
|
||||
- maven: `https://gitlab.example.com/api/v4/projects/<project_id>/packages/maven/`
|
||||
|
||||
If you change the `project_id` in the registry URL to another project, your package publishes to that project.
|
||||
|
||||
|
|
|
|||
|
|
@ -76,6 +76,32 @@ To do this:
|
|||
|
||||
GitLab Pages uses a cache for efficiency. Changes to access settings typically take effect within one minute when the cache updates.
|
||||
|
||||
## Authenticate with an access token
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/388) in GitLab 17.10.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
To authenticate against a restricted GitLab Pages site, you can provide the `Authorization` header with an access token.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- You must have one of the following access tokens with the `read_api` scope:
|
||||
- [Personal access token](../../profile/personal_access_tokens.md#create-a-personal-access-token)
|
||||
- [Project access token](../settings/project_access_tokens.md#create-a-project-access-token)
|
||||
- [Group access token](../../group/settings/group_access_tokens.md#create-a-group-access-token)
|
||||
- [OAuth 2.0 token](../../../api/oauth2.md)
|
||||
|
||||
For example, to use an access token with OAuth-compliant headers:
|
||||
|
||||
```shell
|
||||
curl --header "Authorization: Bearer <your_access_token>" <published_pages_url>
|
||||
```
|
||||
|
||||
For invalid or unauthorized access tokens, returns [`404`](../../../api/rest/troubleshooting.md#status-codes).
|
||||
|
||||
## Terminating a Pages session
|
||||
|
||||
To sign out of your GitLab Pages website, revoke the application access token
|
||||
|
|
|
|||
|
|
@ -202,14 +202,14 @@ deploy-pages:
|
|||
script:
|
||||
- echo "Pages accessible through ${CI_PAGES_URL}"
|
||||
variables:
|
||||
PAGES_PREFIX: "" # no prefix by default (master)
|
||||
PAGES_PREFIX: "" # no prefix by default (run on the default branch)
|
||||
pages: # specifies that this is a Pages job and publishes the default public directory
|
||||
path_prefix: "$PAGES_PREFIX"
|
||||
environment:
|
||||
name: "Pages ${PAGES_PREFIX}"
|
||||
url: $CI_PAGES_URL
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "staging" # ensure to run on master (with default PAGES_PREFIX)
|
||||
- if: $CI_COMMIT_BRANCH == "staging" # ensure to run on the default branch (with default PAGES_PREFIX)
|
||||
variables:
|
||||
PAGES_PREFIX: '_stg' # prefix with _stg for the staging branch
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" # conditionally change the prefix on Merge Requests
|
||||
|
|
|
|||
|
|
@ -219,7 +219,7 @@ requirements, add a rule which checks `CI_HAS_OPEN_REQUIREMENTS` CI/CD variable.
|
|||
```yaml
|
||||
requirements_confirmation:
|
||||
rules:
|
||||
- if: "$CI_HAS_OPEN_REQUIREMENTS" == "true"
|
||||
- if: '$CI_HAS_OPEN_REQUIREMENTS == "true"'
|
||||
when: manual
|
||||
- when: never
|
||||
allow_failure: false
|
||||
|
|
|
|||
|
|
@ -158,7 +158,7 @@ Rather than attempting to push all changes at once, this workaround:
|
|||
git gc
|
||||
SIZE=$(git count-objects -v 2> /dev/null | grep size-pack | awk '{print $2}')
|
||||
|
||||
# Be conservative... and try to push 2GB at a time
|
||||
# Be conservative and try to push 2GB at a time
|
||||
# (given this assumes each commit is the same size - which is wrong)
|
||||
BATCHES=$(($SIZE / 500000))
|
||||
TOTAL_COMMITS=$(git rev-list --count HEAD)
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ Supported input rules:
|
|||
|
||||
| Input rule syntax | Content inserted |
|
||||
| --------------------------------------------------------- | -------------------- |
|
||||
| `# Heading 1` <br>... <br> `###### Heading 6` | Headings 1 through 6 |
|
||||
| `# Heading 1` through `###### Heading 6` | Headings 1 through 6 |
|
||||
| `**bold**` or `__bold__` | Bold text |
|
||||
| `_italics_` or `*italics*` | Italicized text |
|
||||
| `~~strike~~` | Strikethrough |
|
||||
|
|
|
|||
|
|
@ -505,7 +505,7 @@ When the collection loops remove the object locks, the script deletes the job ar
|
|||
# age = calculate_age(job.created_at)
|
||||
|
||||
for a in artifacts:
|
||||
# ... removed analysis collection code for readability
|
||||
# Analysis collection code removed for readability
|
||||
|
||||
# Advanced filtering: match job artifacts age and size against thresholds
|
||||
if (float(age) > float(threshold_age)) or (float(a['size']) > float(threshold_size)):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ module API
|
|||
class Run < Grape::Entity
|
||||
expose :itself, using: RunInfo, as: :info
|
||||
expose :data do
|
||||
expose :metrics, using: Metric
|
||||
expose :latest_metrics, as: :metrics, using: Metric
|
||||
expose :params, using: KeyValue
|
||||
expose :metadata, as: :tags, using: KeyValue
|
||||
end
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@ module Gitlab
|
|||
|
||||
:unverified
|
||||
end
|
||||
alias_method :reverified_status, :verification_status
|
||||
|
||||
private
|
||||
|
||||
|
|
|
|||
|
|
@ -1049,7 +1049,7 @@ msgstr ""
|
|||
msgid "%{labelStart}Project:%{labelEnd} %{project}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{labelStart}Report Type:%{labelEnd} %{reportType}"
|
||||
msgid "%{labelStart}Report type:%{labelEnd} %{reportType}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{labelStart}Scanner:%{labelEnd} %{scanner}"
|
||||
|
|
@ -48861,7 +48861,7 @@ msgstr ""
|
|||
msgid "Reports|New"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Report Type"
|
||||
msgid "Reports|Report type"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|See test results while the pipeline is running"
|
||||
|
|
@ -53921,10 +53921,10 @@ msgstr ""
|
|||
msgid "SecurityReports|Remove project from dashboard"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Report Type"
|
||||
msgid "SecurityReports|Report has expired"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Report has expired"
|
||||
msgid "SecurityReports|Report type"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Results show vulnerability findings from the latest successful %{helpPageLinkStart}pipeline%{helpPageLinkEnd}."
|
||||
|
|
|
|||
|
|
@ -86,7 +86,6 @@ ee/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_containe
|
|||
ee/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
|
||||
ee/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
|
||||
ee/spec/frontend/vulnerabilities/generic_report/report_item_graphql_spec.js
|
||||
ee/spec/frontend/vulnerabilities/related_issues_spec.js
|
||||
spec/frontend/__helpers__/vue_test_utils_helper_spec.js
|
||||
spec/frontend/access_tokens/index_spec.js
|
||||
spec/frontend/admin/abuse_report/components/reported_content_spec.js
|
||||
|
|
@ -211,7 +210,6 @@ spec/frontend/sidebar/components/confidential/confidentiality_dropdown_spec.js
|
|||
spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
|
||||
spec/frontend/sidebar/components/incidents/escalation_status_spec.js
|
||||
spec/frontend/sidebar/components/labels/labels_select_vue/dropdown_contents_labels_view_spec.js
|
||||
spec/frontend/sidebar/components/labels/labels_select_vue/labels_select_root_spec.js
|
||||
spec/frontend/sidebar/components/milestone/milestone_dropdown_spec.js
|
||||
spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js
|
||||
spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
|
||||
|
|
|
|||
|
|
@ -44,9 +44,12 @@ describe('Commit signature', () => {
|
|||
${signatureTypes.GPG} | ${verificationStatuses.OTHER_USER}
|
||||
${signatureTypes.GPG} | ${verificationStatuses.SAME_USER_DIFFERENT_EMAIL}
|
||||
${signatureTypes.GPG} | ${verificationStatuses.MULTIPLE_SIGNATURES}
|
||||
${signatureTypes.GPG} | ${verificationStatuses.UNVERIFIED_AUTHOR_EMAIL}
|
||||
${signatureTypes.X509} | ${verificationStatuses.VERIFIED}
|
||||
${signatureTypes.X509} | ${verificationStatuses.UNVERIFIED_AUTHOR_EMAIL}
|
||||
${signatureTypes.SSH} | ${verificationStatuses.VERIFIED}
|
||||
${signatureTypes.SSH} | ${verificationStatuses.REVOKED_KEY}
|
||||
${signatureTypes.SSH} | ${verificationStatuses.UNVERIFIED_AUTHOR_EMAIL}
|
||||
`(
|
||||
'For a specified `$signatureType` and `$verificationStatus` it renders component correctly',
|
||||
({ signatureType, verificationStatus }) => {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import Vue, { nextTick } from 'vue';
|
|||
import Vuex from 'vuex';
|
||||
|
||||
import { isInViewport } from '~/lib/utils/common_utils';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import DropdownButton from '~/sidebar/components/labels/labels_select_vue/dropdown_button.vue';
|
||||
import DropdownContents from '~/sidebar/components/labels/labels_select_vue/dropdown_contents.vue';
|
||||
import DropdownTitle from '~/sidebar/components/labels/labels_select_vue/dropdown_title.vue';
|
||||
|
|
@ -185,7 +186,7 @@ describe('LabelsSelectRoot', () => {
|
|||
isInViewport.mockImplementation(() => false);
|
||||
wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
expect(wrapper.findComponent(DropdownContents).props('renderOnTop')).toBe(true);
|
||||
});
|
||||
|
||||
|
|
@ -193,7 +194,7 @@ describe('LabelsSelectRoot', () => {
|
|||
isInViewport.mockImplementation(() => true);
|
||||
wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
expect(wrapper.findComponent(DropdownContents).props('renderOnTop')).toBe(false);
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -77,4 +77,28 @@ RSpec.describe BlameHelper, feature_category: :source_code_management do
|
|||
|
||||
it { is_expected.to eq "/#{project.full_path}/-/blame/#{id}/streaming" }
|
||||
end
|
||||
|
||||
describe '#blame_preferences' do
|
||||
subject { helper.blame_preferences(project) }
|
||||
|
||||
let_it_be(:project) { build_stubbed(:project) }
|
||||
|
||||
let(:repo_double) { instance_double(Repository, ignore_revs_file_blob: blob) }
|
||||
|
||||
before do
|
||||
allow(project).to receive(:repository).and_return(repo_double)
|
||||
end
|
||||
|
||||
context 'when there is no ignore revs file' do
|
||||
let(:blob) { nil }
|
||||
|
||||
it { is_expected.to eq(has_revs_file: 'false') }
|
||||
end
|
||||
|
||||
context 'when there is a revs file' do
|
||||
let(:blob) { 'not_nil' }
|
||||
|
||||
it { is_expected.to eq(has_revs_file: 'true') }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::Entities::Ml::Mlflow::GetRun, feature_category: :mlops do
|
||||
let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) }
|
||||
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) }
|
||||
let_it_be(:metrics) { candidate.latest_metrics }
|
||||
|
||||
subject { described_class.new(candidate).as_json }
|
||||
|
||||
|
|
@ -16,12 +17,12 @@ RSpec.describe API::Entities::Ml::Mlflow::GetRun, feature_category: :mlops do
|
|||
end
|
||||
|
||||
it 'presents the metrics' do
|
||||
expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size)
|
||||
expect(subject.dig(:run, :data, :metrics).size).to eq(metrics.size)
|
||||
end
|
||||
|
||||
it 'presents metrics correctly' do
|
||||
presented_metric = subject.dig(:run, :data, :metrics)[0]
|
||||
metric = candidate.metrics[0]
|
||||
metric = metrics[0]
|
||||
|
||||
expect(presented_metric[:key]).to eq(metric.name)
|
||||
expect(presented_metric[:value]).to eq(metric.value)
|
||||
|
|
@ -43,7 +44,7 @@ RSpec.describe API::Entities::Ml::Mlflow::GetRun, feature_category: :mlops do
|
|||
|
||||
context 'when candidate has no metrics' do
|
||||
before do
|
||||
allow(candidate).to receive(:metrics).and_return([])
|
||||
allow(candidate).to receive(:latest_metrics).and_return([])
|
||||
end
|
||||
|
||||
it 'returns empty data' do
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::Entities::Ml::Mlflow::Run do
|
||||
let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) }
|
||||
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) }
|
||||
|
||||
subject { described_class.new(candidate).as_json }
|
||||
|
||||
|
|
@ -12,12 +12,12 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
|
|||
end
|
||||
|
||||
it 'presents the metrics' do
|
||||
expect(subject.dig(:data, :metrics).size).to eq(candidate.metrics.size)
|
||||
expect(subject.dig(:data, :metrics).size).to eq(candidate.latest_metrics.size)
|
||||
end
|
||||
|
||||
it 'presents metrics correctly' do
|
||||
presented_metric = subject.dig(:data, :metrics)[0]
|
||||
metric = candidate.metrics[0]
|
||||
metric = candidate.latest_metrics[0]
|
||||
|
||||
expect(presented_metric[:key]).to eq(metric.name)
|
||||
expect(presented_metric[:value]).to eq(metric.value)
|
||||
|
|
@ -39,7 +39,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
|
|||
|
||||
context 'when candidate has no metrics' do
|
||||
before do
|
||||
allow(candidate).to receive(:metrics).and_return([])
|
||||
allow(candidate).to receive(:latest_metrics).and_return([])
|
||||
end
|
||||
|
||||
it 'returns empty data' do
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::Entities::Ml::Mlflow::SearchRuns, feature_category: :mlops do
|
||||
let_it_be(:candidates) { [build_stubbed(:ml_candidates, :with_metrics_and_params), build_stubbed(:ml_candidates)] }
|
||||
|
||||
let_it_be(:candidates) { [create(:ml_candidates, :with_metrics_and_params), create(:ml_candidates)] }
|
||||
let_it_be(:metrics) { candidates[0].latest_metrics }
|
||||
let(:next_page_token) { 'abcdef' }
|
||||
|
||||
subject { described_class.new({ candidates: candidates, next_page_token: next_page_token }).as_json }
|
||||
|
|
@ -16,11 +16,11 @@ RSpec.describe API::Entities::Ml::Mlflow::SearchRuns, feature_category: :mlops d
|
|||
end
|
||||
|
||||
it 'presents metrics', :aggregate_failures do
|
||||
expect(subject.dig(:runs, 0, :data, :metrics).size).to eq(candidates[0].metrics.size)
|
||||
expect(subject.dig(:runs, 0, :data, :metrics).size).to eq(metrics.size)
|
||||
expect(subject.dig(:runs, 1, :data, :metrics).size).to eq(0)
|
||||
|
||||
presented_metric = subject.dig(:runs, 0, :data, :metrics, 0, :key)
|
||||
metric = candidates[0].metrics[0].name
|
||||
metric = metrics[0].name
|
||||
|
||||
expect(presented_metric).to eq(metric)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -215,6 +215,7 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate
|
|||
context 'when running on multiple databases' do
|
||||
before do
|
||||
skip_if_shared_database(:ci)
|
||||
skip_if_shared_database(:sec)
|
||||
end
|
||||
|
||||
describe '#lock_writes' do
|
||||
|
|
@ -309,14 +310,11 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate
|
|||
subject { described_class.new.lock_writes }
|
||||
|
||||
before do
|
||||
# Some spec in this file currently fails when a sec database is configured. We plan to ensure it all functions
|
||||
# and passes prior to the sec db rollout.
|
||||
# Consult https://gitlab.com/gitlab-org/gitlab/-/issues/520270 for more info.
|
||||
skip_if_multiple_databases_are_setup(:sec)
|
||||
|
||||
allow(::Gitlab::Database).to receive(:db_config_share_with).and_return(nil)
|
||||
ci_db_config = Ci::ApplicationRecord.connection_db_config
|
||||
allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main')
|
||||
(Gitlab::Database.database_base_models.values - [ActiveRecord::Base]).each do |db_record_class|
|
||||
db_config = db_record_class.connection_db_config
|
||||
allow(::Gitlab::Database).to receive(:db_config_share_with).with(db_config).and_return('main')
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not lock any tables if the ci database is shared with main database' do
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
|
|||
:raw_commit,
|
||||
signature: signature,
|
||||
sha: commit_sha,
|
||||
committer_email: committer_email
|
||||
committer_email: committer_email,
|
||||
author_email: committer_email
|
||||
)
|
||||
|
||||
allow(raw_commit).to receive :save!
|
||||
|
|
|
|||
|
|
@ -26,6 +26,10 @@ RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
|
|||
.with(Gitlab::Git::Repository, commit.sha)
|
||||
.and_return(signature_data)
|
||||
|
||||
allow_next_instance_of(Commit) do |instance|
|
||||
allow(instance).to receive(:author_email).and_return(user_author.email)
|
||||
end
|
||||
|
||||
allow(verifier).to receive_messages({
|
||||
verification_status: verification_status,
|
||||
signed_by_key: signed_by_key,
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ RSpec.describe CommitSignatures::GpgSignature do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#reverified_status' do
|
||||
describe '#verification_status' do
|
||||
let(:verification_status) { :verified }
|
||||
let(:signature) do
|
||||
create(:gpg_signature, commit_sha: commit_sha, gpg_key: gpg_key, project: project,
|
||||
|
|
@ -128,16 +128,16 @@ RSpec.describe CommitSignatures::GpgSignature do
|
|||
end
|
||||
|
||||
# verified is used for user signed gpg commits.
|
||||
context 'when verification_status is verified' do
|
||||
it 'returns existing verification status' do
|
||||
expect(signature.reverified_status).to eq('verified')
|
||||
context 'when persisted verification_status is verified' do
|
||||
it 'returns persisted verification status' do
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
|
||||
context 'when commit author does not match the gpg_key author' do
|
||||
let(:commit_author) { create(:user) }
|
||||
|
||||
it 'returns unverified_author_email' do
|
||||
expect(signature.reverified_status).to eq('unverified_author_email')
|
||||
expect(signature.verification_status).to eq('unverified_author_email')
|
||||
end
|
||||
|
||||
context 'when check_for_mailmapped_commit_emails feature flag is disabled' do
|
||||
|
|
@ -146,33 +146,33 @@ RSpec.describe CommitSignatures::GpgSignature do
|
|||
end
|
||||
|
||||
it 'verification status is unmodified' do
|
||||
expect(signature.reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when verification_status not verified' do
|
||||
context 'when persisted verification_status not verified' do
|
||||
let(:verification_status) { :unverified }
|
||||
|
||||
it 'returns the signature verification status' do
|
||||
expect(signature.reverified_status).to eq('unverified')
|
||||
expect(signature.verification_status).to eq('unverified')
|
||||
end
|
||||
end
|
||||
|
||||
# verified_system is used for ui signed commits.
|
||||
context 'when verification_status is verified_system' do
|
||||
context 'when persisted verification_status is verified_system' do
|
||||
let(:verification_status) { :verified_system }
|
||||
|
||||
it 'returns existing verification status' do
|
||||
expect(signature.reverified_status).to eq('verified_system')
|
||||
expect(signature.verification_status).to eq('verified_system')
|
||||
end
|
||||
|
||||
context 'when commit author does not match the gpg_key author' do
|
||||
let(:commit_author) { create(:user) }
|
||||
|
||||
it 'returns existing verification status' do
|
||||
expect(signature.reverified_status).to eq('unverified_author_email')
|
||||
it 'returns unverified_author_email' do
|
||||
expect(signature.verification_status).to eq('unverified_author_email')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -76,21 +76,21 @@ RSpec.describe CommitSignatures::SshSignature, feature_category: :source_code_ma
|
|||
end
|
||||
end
|
||||
|
||||
describe '#reverified_status' do
|
||||
describe '#verification_status' do
|
||||
before do
|
||||
allow(signature.project).to receive(:commit).with(commit_sha).and_return(commit)
|
||||
end
|
||||
|
||||
context 'when verification_status is verified' do
|
||||
context 'when persisted verification_status is verified' do
|
||||
it 'returns verified' do
|
||||
expect(signature.reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
|
||||
context 'and the author email does not belong to the signed by user' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
it 'returns unverified_author_email' do
|
||||
expect(signature.reverified_status).to eq('unverified_author_email')
|
||||
expect(signature.verification_status).to eq('unverified_author_email')
|
||||
end
|
||||
|
||||
context 'when check_for_mailmapped_commit_emails feature flag is disabled' do
|
||||
|
|
@ -99,17 +99,17 @@ RSpec.describe CommitSignatures::SshSignature, feature_category: :source_code_ma
|
|||
end
|
||||
|
||||
it 'verification status is unmodified' do
|
||||
expect(signature.reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when verification_status not verified' do
|
||||
context 'when persisted verification_status not verified' do
|
||||
let(:signature) { create(:ssh_signature, verification_status: 'unverified') }
|
||||
|
||||
it 'returns the signature verification status' do
|
||||
expect(signature.reverified_status).to eq('unverified')
|
||||
expect(signature.verification_status).to eq('unverified')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -117,14 +117,14 @@ RSpec.describe CommitSignatures::SshSignature, feature_category: :source_code_ma
|
|||
let(:verification_status) { :verified_system }
|
||||
|
||||
it 'returns the signature verification status' do
|
||||
expect(signature.reverified_status).to eq('verified_system')
|
||||
expect(signature.verification_status).to eq('verified_system')
|
||||
end
|
||||
|
||||
context 'and the author email does not belong to the signed by user' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
it 'returns unverified_author_email' do
|
||||
expect(signature.reverified_status).to eq('unverified_author_email')
|
||||
expect(signature.verification_status).to eq('unverified_author_email')
|
||||
end
|
||||
|
||||
context 'when check_for_mailmapped_commit_emails feature flag is disabled' do
|
||||
|
|
@ -133,7 +133,7 @@ RSpec.describe CommitSignatures::SshSignature, feature_category: :source_code_ma
|
|||
end
|
||||
|
||||
it 'verification status is unmodified' do
|
||||
expect(signature.reverified_status).to eq('verified_system')
|
||||
expect(signature.verification_status).to eq('verified_system')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ RSpec.describe CommitSignatures::X509CommitSignature do
|
|||
let_it_be(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
|
||||
let_it_be(:project) { create(:project, :public, :repository) }
|
||||
let_it_be(:commit) { create(:commit, project: project, sha: commit_sha) }
|
||||
let_it_be(:x509_certificate) { create(:x509_certificate) }
|
||||
let_it_be(:x509_certificate) { create(:x509_certificate, email: 'r.meier@siemens.com') }
|
||||
let_it_be(:verification_status) { "verified" }
|
||||
|
||||
let(:attributes) do
|
||||
|
|
@ -49,16 +49,16 @@ RSpec.describe CommitSignatures::X509CommitSignature do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#reverified_status' do
|
||||
describe '#verification_status' do
|
||||
let_it_be(:matching_email) { 'r.meier@siemens.com' }
|
||||
|
||||
subject(:reverified_status) { described_class.safe_create!(attributes).reverified_status }
|
||||
subject(:signature) { described_class.safe_create!(attributes) }
|
||||
|
||||
context 'when the commit email matches the x509 certificate emails' do
|
||||
let_it_be(:x509_certificate) { create(:x509_certificate, email: matching_email) }
|
||||
|
||||
it 'returns verified' do
|
||||
expect(reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ RSpec.describe CommitSignatures::X509CommitSignature do
|
|||
end
|
||||
|
||||
it 'returns verified' do
|
||||
expect(reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -84,7 +84,7 @@ RSpec.describe CommitSignatures::X509CommitSignature do
|
|||
end
|
||||
|
||||
it 'returns unverified_author_email' do
|
||||
expect(reverified_status).to eq('unverified_author_email')
|
||||
expect(signature.verification_status).to eq('unverified_author_email')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -94,7 +94,7 @@ RSpec.describe CommitSignatures::X509CommitSignature do
|
|||
end
|
||||
|
||||
it 'verification status is unmodified' do
|
||||
expect(reverified_status).to eq('verified')
|
||||
expect(signature.verification_status).to eq('verified')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be_with_refind(:project) { create(:project, :repository) }
|
||||
|
||||
let(:repository) { project.repository }
|
||||
let(:broken_repository) { create(:project, :broken_storage).repository }
|
||||
|
|
@ -4432,4 +4432,33 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#ignore_revs_file_blob' do
|
||||
subject { repository.ignore_revs_file_blob }
|
||||
|
||||
context 'when there is a ignore revs file on the default branch' do
|
||||
let(:file_content) { project.commit.id }
|
||||
let(:project_files) do
|
||||
{ Gitlab::Blame::IGNORE_REVS_FILE_NAME => file_content }
|
||||
end
|
||||
|
||||
around do |example|
|
||||
create_and_delete_files(project, project_files) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
it { is_expected.to be_a_kind_of(Blob) }
|
||||
|
||||
context 'when the blame_ignore_revs is not enabled' do
|
||||
before do
|
||||
stub_feature_flags(blame_ignore_revs: false)
|
||||
end
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
end
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -158,16 +158,6 @@ RSpec.describe Clusters::Agents::Authorizations::CiAccess::FilterService, featur
|
|||
it 'does not return any authorizations' do
|
||||
expect(execute_filter).to eq []
|
||||
end
|
||||
|
||||
context 'when kubernetes_agent_protected_branches is disabled' do
|
||||
before do
|
||||
stub_feature_flags(kubernetes_agent_protected_branches: false)
|
||||
end
|
||||
|
||||
it 'does not filter for protected_ref' do
|
||||
expect(execute_filter).to match_array agent_authorizations_with_protected_agent
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'validates jsonb integer field' do |field, settings_attribute|
|
||||
it { is_expected.to allow_value({ field => 0 }).for(settings_attribute) }
|
||||
it { is_expected.to allow_value({ field => 100 }).for(settings_attribute) }
|
||||
it { is_expected.to allow_value({ field => 9999999 }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => "string" }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => true }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => nil }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => 1.5 }).for(settings_attribute) }
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'validates jsonb boolean field' do |field, settings_attribute|
|
||||
it { is_expected.to allow_value({ field => true }).for(settings_attribute) }
|
||||
it { is_expected.to allow_value({ field => false }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => "true" }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => 1 }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => nil }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => "false" }).for(settings_attribute) }
|
||||
it { is_expected.not_to allow_value({ field => 0 }).for(settings_attribute) }
|
||||
end
|
||||
Loading…
Reference in New Issue