Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-02-20 15:07:23 +00:00
parent 1f3fdb1a6b
commit 73e9ee9758
47 changed files with 1059 additions and 195 deletions

View File

@ -1709,10 +1709,11 @@ ee/lib/ee/api/entities/project.rb
/ee/app/models/concerns/audit_events/
/config/audit_events/types/type_schema.json
[Duo Chat] @gitlab-org/ai-powered/duo-chat
ee/lib/gitlab/llm/chain/agents/zero_shot/
ee/lib/gitlab/llm/chain/agents/single_action_executor.rb
ee/lib/gitlab/llm/chain/streamed_zero_shot_answer.rb
[Category:Duo Chat] @gitlab-org/ai-powered/duo-chat
ee/lib/gitlab/duo/chat/
ee/lib/gitlab/llm/chain/tools/
ee/lib/gitlab/llm/chain/utils/chat_authorizer.rb
ee/lib/gitlab/llm/chain/utils/chat_conversation.rb
ee/lib/gitlab/llm/completions/chat.rb
[Category:Code Suggestions] @jprovaznik @ck3g @acook.gitlab @partiaga @lma-git @missy-gitlab @tgao3701908 @emeraldjayde @squadri

View File

@ -1,9 +0,0 @@
---
# Cop supports --autocorrect.
Gitlab/DocumentationLinks/Link:
Exclude:
- 'ee/lib/ee/gitlab/namespace_storage_size_error_message.rb'
- 'ee/lib/gitlab/checks/secrets_check.rb'
- 'ee/spec/support/shared_contexts/secrets_check_shared_contexts.rb'
- 'lib/backup/tasks/database.rb'
- 'lib/system_check/helpers.rb'

View File

@ -50,7 +50,6 @@ Layout/LineEndStringConcatenationIndentation:
- 'ee/app/controllers/concerns/insights_actions.rb'
- 'ee/app/controllers/ee/ldap/omniauth_callbacks_controller.rb'
- 'ee/app/finders/geo/framework_registry_finder.rb'
- 'ee/app/graphql/ee/mutations/ci/project_ci_cd_settings_update.rb'
- 'ee/app/graphql/ee/mutations/issues/create.rb'
- 'ee/app/graphql/ee/mutations/merge_requests/update.rb'
- 'ee/app/graphql/ee/resolvers/ci/all_jobs_resolver.rb'

View File

@ -39,7 +39,6 @@ RSpec/BeEq:
- 'ee/spec/finders/group_saml_identity_finder_spec.rb'
- 'ee/spec/finders/llm/extra_resource_finder_spec.rb'
- 'ee/spec/graphql/api/validate_code_owner_file_spec.rb'
- 'ee/spec/graphql/ee/mutations/ci/project_ci_cd_settings_update_spec.rb'
- 'ee/spec/graphql/ee/resolvers/namespace_projects_resolver_spec.rb'
- 'ee/spec/graphql/ee/types/group_type_spec.rb'
- 'ee/spec/graphql/ee/types/namespace_type_spec.rb'
@ -1204,7 +1203,6 @@ RSpec/BeEq:
- 'spec/requests/api/graphql/mutations/achievements/update_user_achievement_spec.rb'
- 'spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb'
- 'spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb'
- 'spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb'
- 'spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb'
- 'spec/requests/api/graphql/mutations/import/source_users/cancel_reassignment_spec.rb'
- 'spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb'

View File

@ -1041,7 +1041,6 @@ RSpec/BeforeAllRoleAssignment:
- 'spec/requests/api/graphql/issue_status_counts_spec.rb'
- 'spec/requests/api/graphql/merge_request/merge_request_spec.rb'
- 'spec/requests/api/graphql/mutations/award_emojis/add_spec.rb'
- 'spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb'
- 'spec/requests/api/graphql/mutations/commits/create_spec.rb'
- 'spec/requests/api/graphql/mutations/custom_emoji/create_spec.rb'
- 'spec/requests/api/graphql/mutations/custom_emoji/destroy_spec.rb'

View File

@ -87,7 +87,6 @@ RSpec/NamedSubject:
- 'ee/spec/graphql/ee/mutations/boards/issues/issue_move_list_spec.rb'
- 'ee/spec/graphql/ee/mutations/boards/lists/create_spec.rb'
- 'ee/spec/graphql/ee/mutations/ci/job_token_scope/add_project_spec.rb'
- 'ee/spec/graphql/ee/mutations/ci/project_ci_cd_settings_update_spec.rb'
- 'ee/spec/graphql/ee/types/group_type_spec.rb'
- 'ee/spec/graphql/ee/types/namespace_type_spec.rb'
- 'ee/spec/graphql/graphql_triggers_spec.rb'

View File

@ -3,6 +3,13 @@ import { getNormalizedURL, getBaseURL, relativePathToAbsolute } from '~/lib/util
const { sanitize: dompurifySanitize, addHook, isValidAttribute } = DOMPurify;
const isValidCssColor = (color) => {
const s = new Option().style;
s.color = color;
// If the browser accepts it, it will return a non-empty string.
return s.color !== '';
};
export const defaultConfig = {
// Safely allow SVG <use> tags
ADD_TAGS: ['use', 'gl-emoji', 'copy-code'],
@ -97,13 +104,28 @@ addHook('afterSanitizeAttributes', (node) => {
const TEMPORARY_ATTRIBUTE = 'data-temp-href-target';
addHook('beforeSanitizeAttributes', (node) => {
addHook('beforeSanitizeAttributes', (node, _, config) => {
if (node.tagName === 'A' && node.hasAttribute('target')) {
node.setAttribute(TEMPORARY_ATTRIBUTE, node.getAttribute('target'));
}
// Preserve background-color on GlLabel when style tags are forbidden..
if (
config.FORBID_TAGS.includes('style') &&
node.classList?.contains('gl-label-text') &&
node.style?.backgroundColor
) {
const bgColor = node.style.backgroundColor;
// Only preserve the background color if it's valid.
if (isValidCssColor(bgColor)) {
// eslint-disable-next-line no-param-reassign
node.dataset.tempBg = bgColor;
}
node.removeAttribute('style');
}
});
addHook('afterSanitizeAttributes', (node) => {
addHook('afterSanitizeAttributes', (node, _, config) => {
if (node.tagName === 'A' && node.hasAttribute(TEMPORARY_ATTRIBUTE)) {
node.setAttribute('target', node.getAttribute(TEMPORARY_ATTRIBUTE));
node.removeAttribute(TEMPORARY_ATTRIBUTE);
@ -112,6 +134,19 @@ addHook('afterSanitizeAttributes', (node) => {
node.setAttribute('rel', appendSecureRelValue(rel));
}
}
// Restore background-color on GlLabel when style tags are forbidden.
if (
config.FORBID_TAGS.includes('style') &&
node.classList?.contains('gl-label-text') &&
node.dataset.tempBg &&
isValidCssColor(node.dataset.tempBg)
) {
// eslint-disable-next-line no-param-reassign
node.style.backgroundColor = node.dataset.tempBg;
// eslint-disable-next-line no-param-reassign
delete node.dataset.tempBg;
}
});
export const sanitize = (val, config) => dompurifySanitize(val, { ...defaultConfig, ...config });

View File

@ -6,6 +6,7 @@ import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser
import SectionLayout from '~/vue_shared/security_configuration/components/section_layout.vue';
import SafeHtml from '~/vue_shared/directives/safe_html';
import PageHeading from '~/vue_shared/components/page_heading.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { SERVICE_PING_SECURITY_CONFIGURATION_THREAT_MANAGEMENT_VISIT } from '~/tracking/constants';
import { REPORT_TYPE_CONTAINER_SCANNING_FOR_REGISTRY } from '~/vue_shared/security_reports/constants';
import {
@ -43,8 +44,11 @@ export default {
'ee_component/security_configuration/components/container_scanning_for_registry_feature_card.vue'
),
PageHeading,
VulnerabilityArchives: () =>
import('ee_component/security_configuration/components/vulnerability_archives.vue'),
},
directives: { SafeHtml },
mixins: [glFeatureFlagsMixin()],
inject: ['projectFullPath', 'vulnerabilityTrainingDocsPath'],
props: {
augmentedSecurityFeatures: {
@ -103,6 +107,9 @@ export default {
!this.autoDevopsEnabledAlertDismissedProjects.includes(this.projectFullPath)
);
},
shouldShowVulnerabilityArchives() {
return this.glFeatures?.vulnerabilityArchival;
},
},
methods: {
getComponentName(feature) {
@ -244,6 +251,7 @@ export default {
<training-provider-list :security-training-enabled="securityTrainingEnabled" />
</template>
</section-layout>
<vulnerability-archives v-if="shouldShowVulnerabilityArchives" />
</gl-tab>
</gl-tabs>
</article>

View File

@ -7,7 +7,6 @@ module Mutations
include FindsProject
include Gitlab::Utils::StrongMemoize
include ::Ci::JobToken::InternalEventsTracking
authorize :admin_project
@ -38,6 +37,11 @@ module Mutations
description: 'Indicates the ability to push to the original project ' \
'repository using a job token'
argument :pipeline_variables_minimum_override_role,
GraphQL::Types::String,
required: false,
description: 'Minimum role required to set variables when creating a pipeline or running a job.'
field :ci_cd_settings,
Types::Ci::CiCdSettingType,
null: false,
@ -48,23 +52,40 @@ module Mutations
raise Gitlab::Graphql::Errors::ArgumentError, 'job_token_scope_enabled can only be set to false'
end
settings = project(full_path).ci_cd_settings
settings.update(args)
project = authorized_find!(full_path)
track_job_token_scope_setting_changes(settings, current_user) unless settings.errors.any?
response = ::Projects::UpdateService.new(
project,
current_user,
project_update_params(project, **args)
).execute
{
ci_cd_settings: settings,
errors: errors_on_object(settings)
}
settings = project.ci_cd_settings
if response[:status] == :success
{
ci_cd_settings: settings,
errors: errors_on_object(settings)
}
else
{
ci_cd_settings: settings,
errors: [response[:message]]
}
end
end
private
def project(full_path)
strong_memoize_with(:project, full_path) do
authorized_find!(full_path)
end
def project_update_params(_project, **args)
{
keep_latest_artifact: args[:keep_latest_artifact],
ci_outbound_job_token_scope_enabled: args[:job_token_scope_enabled],
ci_inbound_job_token_scope_enabled: args[:inbound_job_token_scope_enabled],
ci_push_repository_for_job_token_allowed: args[:push_repository_for_job_token_allowed],
restrict_user_defined_variables: args[:restrict_user_defined_variables],
ci_pipeline_variables_minimum_override_role: args[:pipeline_variables_minimum_override_role]
}.compact
end
end
end

View File

@ -8,7 +8,7 @@ module Mutations
graphql_name 'BranchRuleSquashOptionUpdate'
description 'Update a squash option for a branch rule'
authorize :update_branch_rule
authorize :update_squash_option
argument :branch_rule_id, ::Types::GlobalIDType[::Projects::BranchRule],
required: true,
description: 'Global ID of the branch rule.'

View File

@ -32,6 +32,11 @@ module Types
null: true,
description: 'Whether merged results pipelines are enabled.',
method: :merge_pipelines_enabled?
field :pipeline_variables_minimum_override_role,
GraphQL::Types::String,
null: false,
description: 'Minimum role required to set variables when creating a pipeline or running a job.',
authorize: :admin_project
field :project,
Types::ProjectType,
null: true,

View File

@ -80,6 +80,19 @@ class ProjectCiCdSetting < ApplicationRecord
role_access_level >= role_project_minimum_access_level
end
def pipeline_variables_minimum_override_role=(value)
return if value.nil?
self.restrict_user_defined_variables = true
self[:pipeline_variables_minimum_override_role] = value
end
def pipeline_variables_minimum_override_role
return 'developer' unless restrict_user_defined_variables
self[:pipeline_variables_minimum_override_role]
end
private
def set_pipeline_variables_secure_defaults

View File

@ -411,10 +411,6 @@ class Todo < ApplicationRecord
self_added? && (assigned? || review_requested?)
end
def keep_around_commit
project.repository.keep_around(self.commit_id, source: self.class.name)
end
private
def build_work_item_target_url

View File

@ -8,6 +8,8 @@ module Projects
enable :update_branch_rule
enable :destroy_branch_rule
end
rule { can?(:update_branch_rule) }.enable :update_squash_option
end
end

View File

@ -52,7 +52,7 @@ module Projects
end
def authorized?
Ability.allowed?(current_user, :update_branch_rule, branch_rule)
Ability.allowed?(current_user, :update_squash_option, branch_rule)
end
end
end

View File

@ -323,9 +323,6 @@ class TodoService
todos = bulk_insert_todos(users, attributes)
users.each { |user| track_todo_creation(user, issue_type, namespace, project) }
# replicate `keep_around_commit` after_save callback
todos.select { |todo| todo.commit_id.present? }.each(&:keep_around_commit)
Users::UpdateTodoCountCacheService.new(users.map(&:id)).execute
todos

View File

@ -1,6 +1,6 @@
---
key_path: redis_hll_counters.count_distinct_namespace_id_from_create_ci_runner
description: Count of unique namespaces where a group runner was created
description: Count of unique namespaces where a runner was created
product_group: runner
product_categories:
- fleet_visibility

View File

@ -1,6 +1,6 @@
---
key_path: redis_hll_counters.count_distinct_project_id_from_create_ci_runner
description: Count of unique projects where a project runner was created
description: Count of unique projects where a runner was created
product_group: runner
product_categories:
- fleet_visibility

View File

@ -0,0 +1,11 @@
---
migration_job_name: ResyncApprovalPolicies
description:
Triggers PersistSecurityPoliciesWorker for each policy configurations with incorrect indexes.
This migration triggers a worker for each row, so we cannot determine when the migration is finished.
So before finalizing this BBM, we need to make sure that the indexes are correct.
feature_category: security_policy_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/181303
milestone: '17.10'
queued_migration_version: 20250212162708
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class QueueResyncApprovalPolicies < Gitlab::Database::Migration[2.2]
milestone '17.10'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "ResyncApprovalPolicies"
DELAY_INTERVAL = 10.minutes
BATCH_SIZE = 10
SUB_BATCH_SIZE = 10
def up
queue_batched_background_migration(
MIGRATION,
:security_policies,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :security_policies, :id, [])
end
end

View File

@ -0,0 +1 @@
2d10aff098d29c95e3aff390411d8cf583f400daef6ee23b49f1f0b9d55fa52e

View File

@ -477,6 +477,21 @@ You should not use the task for routine checks as database inconsistencies might
gitlab-rake gitlab:db:schema_checker:run
```
## Check the database for deduplicate CI/CD tags
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/518698) in GitLab 17.10.
This Rake task checks the `ci` database for duplicate tags in the `tags` table.
This issue might affect instances that have undergone multiple major upgrades over an extended period.
Run the following command to search duplicate tags, then rewrite any tag assignments that
reference duplicate tags to use the original tag instead.
```shell
sudo gitlab-rake gitlab:db:deduplicate_tags
```
To run this command in dry-run mode, set the environment variable `DRY_RUN=true`.
## Troubleshooting
### Advisory lock connection information

View File

@ -425,6 +425,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="querycidedicatedhostedrunnerusagebillingmonth"></a>`billingMonth` | [`Date`](#date) | First day of the month to retrieve data for. |
| <a id="querycidedicatedhostedrunnerusagegrouping"></a>`grouping` | [`GroupingEnum`](#groupingenum) | Groups usage data by instance aggregate or root namespace. |
| <a id="querycidedicatedhostedrunnerusagerunnerid"></a>`runnerId` | [`CiRunnerID`](#cirunnerid) | Runner ID to retrieve data for. |
| <a id="querycidedicatedhostedrunnerusageyear"></a>`year` | [`Int`](#int) | Year to retrieve data for. |
### `Query.ciMinutesUsage`
@ -9023,6 +9024,7 @@ Input type: `ProjectCiCdSettingsUpdateInput`
| <a id="mutationprojectcicdsettingsupdatemergepipelinesenabled"></a>`mergePipelinesEnabled` | [`Boolean`](#boolean) | Indicates if merged results pipelines are enabled for the project. |
| <a id="mutationprojectcicdsettingsupdatemergetrainsenabled"></a>`mergeTrainsEnabled` | [`Boolean`](#boolean) | Indicates if merge trains are enabled for the project. |
| <a id="mutationprojectcicdsettingsupdatemergetrainsskiptrainallowed"></a>`mergeTrainsSkipTrainAllowed` | [`Boolean`](#boolean) | Indicates whether an option is allowed to merge without refreshing the merge train. Ignored unless the `merge_trains_skip_train` feature flag is also enabled. |
| <a id="mutationprojectcicdsettingsupdatepipelinevariablesminimumoverriderole"></a>`pipelineVariablesMinimumOverrideRole` | [`String`](#string) | Minimum role required to set variables when creating a pipeline or running a job. |
| <a id="mutationprojectcicdsettingsupdatepushrepositoryforjobtokenallowed"></a>`pushRepositoryForJobTokenAllowed` | [`Boolean`](#boolean) | Indicates the ability to push to the original project repository using a job token. |
#### Fields
@ -35527,6 +35529,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="projectcicdsettingmergepipelinesenabled"></a>`mergePipelinesEnabled` | [`Boolean`](#boolean) | Whether merged results pipelines are enabled. |
| <a id="projectcicdsettingmergetrainsenabled"></a>`mergeTrainsEnabled` | [`Boolean`](#boolean) | Whether merge trains are enabled. |
| <a id="projectcicdsettingmergetrainsskiptrainallowed"></a>`mergeTrainsSkipTrainAllowed` | [`Boolean!`](#boolean) | Whether merge immediately is allowed for merge trains. |
| <a id="projectcicdsettingpipelinevariablesminimumoverriderole"></a>`pipelineVariablesMinimumOverrideRole` | [`String!`](#string) | Minimum role required to set variables when creating a pipeline or running a job. |
| <a id="projectcicdsettingproject"></a>`project` | [`Project`](#project) | Project the CI/CD settings belong to. |
| <a id="projectcicdsettingpushrepositoryforjobtokenallowed"></a>`pushRepositoryForJobTokenAllowed` | [`Boolean`](#boolean) | Indicates the ability to push to the original project repository using a job token. |

View File

@ -139,6 +139,32 @@ job-artifact-upload-on-timeout:
when: on_failure # on_failure because script termination after a timeout is treated as a failure
```
### Ensuring `after_script` execution
For `after_script` to run successfully, the total of `RUNNER_SCRIPT_TIMEOUT` +
`RUNNER_AFTER_SCRIPT_TIMEOUT` must not exceed the job's configured timeout.
The following example shows how to configure timeouts to ensure `after_script` runs even when the main script times out:
```yaml
job-with-script-timeouts:
timeout: 5m
variables:
RUNNER_SCRIPT_TIMEOUT: 1m
RUNNER_AFTER_SCRIPT_TIMEOUT: 1m
script:
- echo "Starting build..."
- sleep 120 # Wait 2 minutes to trigger timeout. Script aborts after 1 minute due to RUNNER_SCRIPT_TIMEOUT.
- echo "Build finished."
after_script:
- echo "Starting Clean-up..."
- sleep 15 # Wait just a few seconds. Runs successfully because it's within RUNNER_AFTER_SCRIPT_TIMEOUT.
- echo "Clean-up finished."
```
The `script` is canceled by `RUNNER_SCRIPT_TIMEOUT`, but the `after_script` runs successfully because it takes 15 seconds,
which is less than both `RUNNER_AFTER_SCRIPT_TIMEOUT` and the job's `timeout` value.
## Protecting sensitive information
The security risks are greater when using instance runners as they are available by default to all groups and projects in a GitLab instance.

View File

@ -1195,8 +1195,10 @@ Scripts you specify in `after_script` execute in a new shell, separate from any
immediately becomes invalid if the job is canceled. See [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/473376)
for more details.
If a job times out, the `after_script` commands do not execute.
[An issue exists](https://gitlab.com/gitlab-org/gitlab/-/issues/15603) to add support for executing `after_script` commands for timed-out jobs.
For jobs that time out:
- `after_script` commands do not execute by default.
- You can [configure timeout values](../runners/configure_runners.md#ensuring-after_script-execution) to ensure `after_script` runs by setting appropriate `RUNNER_SCRIPT_TIMEOUT` and `RUNNER_AFTER_SCRIPT_TIMEOUT` values that don't exceed the job's timeout.
**Related topics**:

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
module ActiveContext
module Databases
module Concerns
module ElasticExecutor
include Executor
private
def raw_client
@raw_client ||= adapter.client.client
end
def do_create_collection(name:, number_of_partitions:, fields:)
strategy = PartitionStrategy.new(
name: name,
number_of_partitions: number_of_partitions
)
# Early return if everything exists
return if collection_exists?(strategy)
# Create missing partitions
strategy.each_partition do |partition_name|
create_partition(partition_name, fields) unless index_exists?(partition_name)
end
# Create alias if needed
create_alias(strategy) unless alias_exists?(strategy.collection_name)
end
def create_partition(name, fields)
body = {
mappings: {
properties: build_field_mappings(fields)
},
settings: settings(fields)
}
raw_client.indices.create(index: name, body: body)
end
def create_alias(strategy)
actions = [{
add: {
indices: strategy.partition_names,
alias: strategy.collection_name
}
}]
raw_client.indices.update_aliases(body: { actions: actions })
end
def build_field_mappings(fields)
fields.each_with_object({}) do |field, mappings|
mappings[field.name] = case field
when Field::Bigint
{ type: 'long' }
when Field::Prefix
{ type: 'keyword' }
when Field::Vector
vector_field_mapping(field)
else
raise ArgumentError, "Unknown field type: #{field.class}"
end
end
end
def collection_exists?(strategy)
return false unless alias_exists?(strategy.collection_name)
strategy.fully_exists? do |partition_name|
index_exists?(partition_name)
end
end
def index_exists?(name)
raw_client.indices.exists?(index: name)
end
def alias_exists?(name)
raw_client.indices.exists_alias?(name: name)
end
def settings(_)
{}
end
end
end
end
end

View File

@ -4,85 +4,15 @@ module ActiveContext
module Databases
module Elasticsearch
class Executor
include ActiveContext::Databases::Concerns::Executor
include ActiveContext::Databases::Concerns::ElasticExecutor
private
def raw_client
@raw_client ||= adapter.client.client
end
def do_create_collection(name:, number_of_partitions:, fields:)
strategy = PartitionStrategy.new(
name: name,
number_of_partitions: number_of_partitions
)
# Early return if everything exists
return if collection_exists?(strategy)
# Create missing partitions
strategy.each_partition do |partition_name|
create_partition(partition_name, fields) unless index_exists?(partition_name)
end
# Create alias if needed
create_alias(strategy) unless alias_exists?(strategy.collection_name)
end
def create_partition(name, fields)
mappings = {
mappings: {
properties: build_field_mappings(fields)
}
def vector_field_mapping(field)
{
type: 'dense_vector',
dims: field.options[:dimensions],
index: true,
similarity: 'cosine'
}
raw_client.indices.create(index: name, body: mappings)
end
def create_alias(strategy)
actions = [{
add: {
indices: strategy.partition_names,
alias: strategy.collection_name
}
}]
raw_client.indices.update_aliases(body: { actions: actions })
end
def build_field_mappings(fields)
fields.each_with_object({}) do |field, mappings|
mappings[field.name] = case field
when Field::Bigint
{ type: 'long' }
when Field::Prefix
{ type: 'keyword' }
when Field::Vector
{
type: 'dense_vector',
dims: field.options[:dimensions],
index: true,
similarity: 'cosine'
}
else
raise ArgumentError, "Unknown field type: #{field.class}"
end
end
end
def collection_exists?(strategy)
return false unless alias_exists?(strategy.collection_name)
strategy.fully_exists? do |partition_name|
index_exists?(partition_name)
end
end
def index_exists?(name)
raw_client.indices.exists?(index: name)
end
def alias_exists?(name)
raw_client.indices.exists_alias?(name: name)
end
end
end

View File

@ -4,7 +4,34 @@ module ActiveContext
module Databases
module Opensearch
class Executor
include ActiveContext::Databases::Concerns::Executor
include ActiveContext::Databases::Concerns::ElasticExecutor
# These constants match the defaults on Elasticsearch
# to ensure we have similar results on OpenSearch and Elasticsearch
EF_CONSTRUCTION = 100
M = 16
def vector_field_mapping(field)
{
type: 'knn_vector',
dimension: field.options[:dimensions],
method: {
name: 'hnsw',
engine: 'nmslib',
space_type: 'cosinesimil',
parameters: {
ef_construction: EF_CONSTRUCTION,
m: M
}
}
}
end
def settings(fields)
return super unless fields.any?(Field::Vector)
super.merge({ index: { knn: true } })
end
end
end
end

View File

@ -18,7 +18,7 @@ module Backup
Be sure to stop Puma, Sidekiq, and any other process that
connects to the database before proceeding. For Omnibus
installs, see the following link for more information:
#{help_page_url('administration/backup_restore/restore_gitlab.md', 'restore-for-linux-package-installations')}
#{::Gitlab::Routing.url_helpers.help_page_url('administration/backup_restore/restore_gitlab.md', anchor: 'restore-for-linux-package-installations')}
Before restoring the database, we will remove all existing
tables to avoid future upgrade problems. Be aware that if you have
@ -44,10 +44,6 @@ module Backup
def target
@target ||= ::Backup::Targets::Database.new(progress, options: options)
end
def help_page_url(path, anchor = nil)
::Gitlab::Routing.url_helpers.help_page_url(path, anchor: anchor)
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class ResyncApprovalPolicies < BatchedMigrationJob
feature_category :security_policy_management
def perform; end
end
end
end
Gitlab::BackgroundMigration::ResyncApprovalPolicies.prepend_mod

View File

@ -0,0 +1,107 @@
# frozen_string_literal: true
module Gitlab
module Database
class DeduplicateCiTags
TAGGING_BATCH_SIZE = 10_000
TAG_BATCH_SIZE = 10_000
TAGS_INDEX_NAME = 'index_tags_on_name'
def initialize(logger:, dry_run:)
@logger = logger
@dry_run = dry_run
end
def execute
logger.info "DRY RUN:" if dry_run
good_tag_ids_query = ::Ci::Tag.group(:name).select('MIN(id) AS id')
bad_tag_map = ::Ci::Tag
.id_not_in(good_tag_ids_query)
.pluck(:id, :name)
.to_h
if bad_tag_map.empty?
logger.info "No duplicate tags found in ci database"
return
end
logger.info "Deduplicating #{bad_tag_map.count} #{'tag'.pluralize(bad_tag_map.count)} for ci database"
bad_tag_ids = bad_tag_map.keys
good_tags_name_id_map = ::Ci::Tag.id_in(good_tag_ids_query).pluck(:name, :id).to_h
tag_remap = bad_tag_map.transform_values { |name| good_tags_name_id_map[name] }
deduplicate_ci_tags(bad_tag_ids, tag_remap)
logger.info 'Done'
end
private
attr_reader :logger, :dry_run
class Tagging < ::Ci::ApplicationRecord
include EachBatch
self.table_name = :taggings
end
def deduplicate_ci_tags(bad_tag_ids, tag_remap)
::Ci::PendingBuild.each_batch do |batch|
changes = batch
.filter { |pending_build| pending_build.tag_ids.intersect?(bad_tag_ids) }
.map do |pending_build|
{
**pending_build.slice(:id, :build_id, :partition_id, :project_id),
tag_ids: pending_build.tag_ids.map { |tag_id| tag_remap.fetch(tag_id, tag_id) }
}
end
::Ci::PendingBuild.upsert_all(changes, unique_by: :id, update_only: [:tag_ids]) unless dry_run
logger.info("Updated tag_ids on a batch of #{batch.count} #{::Ci::PendingBuild.table_name} records")
sleep(1)
end
deduplicate_ci_taggings(bad_tag_ids, tag_remap)
::Ci::Tag.include EachBatch
::Ci::Tag.id_in(bad_tag_ids).each_batch(of: TAG_BATCH_SIZE) do |batch|
count = dry_run ? batch.count : batch.delete_all
logger.info "Deleted batch of #{count} #{'tag'.pluralize(count)}"
end
unless dry_run
::Ci::Tag.connection.exec_query("DROP INDEX IF EXISTS #{TAGS_INDEX_NAME};")
::Ci::Tag.connection.exec_query(<<~SQL)
CREATE UNIQUE INDEX #{TAGS_INDEX_NAME} ON #{::Ci::Tag.table_name} USING btree (name);
SQL
end
logger.info "Recreated #{TAGS_INDEX_NAME}"
end
def deduplicate_ci_taggings(bad_tag_ids, tag_remap)
tagging_models = [Tagging, ::Ci::BuildTag, ::Ci::RunnerTagging]
tagging_models.each do |tagging_model|
tagging_model.include EachBatch
bad_tag_ids.each do |bad_tag_id|
tagging_model.where(tag_id: bad_tag_id).each_batch(of: TAGGING_BATCH_SIZE) do |batch|
batch.update_all(tag_id: tag_remap.fetch(bad_tag_id)) unless dry_run
end
logger.info(
"Updated tag_id #{bad_tag_id} on #{tagging_model.table_name} records to #{tag_remap.fetch(bad_tag_id)}"
)
end
end
end
end
end
end
Gitlab::Database::DeduplicateCiTags.prepend_mod

View File

@ -20,16 +20,6 @@ module SystemCheck
end
end
# Construct a help page based on the instance's external_url
#
# @param <String> path of the documentation page
# @param <String> anchor to the specific docs heading
#
# @return <String> URL of the help page
def construct_help_page_url(path, anchor = nil)
Rails.application.routes.url_helpers.help_page_url(path, anchor)
end
def see_installation_guide_section(section)
"doc/install/installation.md in section \"#{section}\""
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
namespace :gitlab do
namespace :db do
desc "GitLab | DB | Deduplicate CI tags"
task deduplicate_tags: %i[environment] do
Gitlab::Database::DeduplicateCiTags.new(
logger: Logger.new($stdout),
dry_run: Gitlab::Utils.to_boolean(ENV['DRY_RUN'], default: false)
).execute
end
end
end

View File

@ -14646,6 +14646,12 @@ msgstr ""
msgid "ComplianceFrameworksReport|Default"
msgstr ""
msgid "ComplianceFrameworksReport|Default framework removed successfully"
msgstr ""
msgid "ComplianceFrameworksReport|Default framework set successfully"
msgstr ""
msgid "ComplianceFrameworksReport|Description"
msgstr ""
@ -14655,12 +14661,24 @@ msgstr ""
msgid "ComplianceFrameworksReport|Failed to export framework"
msgstr ""
msgid "ComplianceFrameworksReport|Failed to remove default framework"
msgstr ""
msgid "ComplianceFrameworksReport|Failed to set default framework"
msgstr ""
msgid "ComplianceFrameworksReport|Framework ID copied to clipboard."
msgstr ""
msgid "ComplianceFrameworksReport|Policies"
msgstr ""
msgid "ComplianceFrameworksReport|Remove as default framework"
msgstr ""
msgid "ComplianceFrameworksReport|Set as default framework"
msgstr ""
msgid "ComplianceFrameworksReport|Use the compliance framework ID in configuration or API requests."
msgstr ""
@ -21342,6 +21360,9 @@ msgstr ""
msgid "Download PDF"
msgstr ""
msgid "Download all"
msgstr ""
msgid "Download artifacts"
msgstr ""
@ -47772,6 +47793,9 @@ msgstr ""
msgid "Remove approvers?"
msgstr ""
msgid "Remove as default"
msgstr ""
msgid "Remove asset link"
msgstr ""
@ -51539,6 +51563,11 @@ msgstr ""
msgid "SecurityConfiguration|%{scanType} configuration code snippet"
msgstr ""
msgid "SecurityConfiguration|1 vulnerability"
msgid_plural "SecurityConfiguration|%d vulnerabilities"
msgstr[0] ""
msgstr[1] ""
msgid "SecurityConfiguration|An error occurred while creating the merge request."
msgstr ""
@ -51590,6 +51619,9 @@ msgstr ""
msgid "SecurityConfiguration|Immediately begin risk analysis and remediation with application security features. Start with SAST and Secret Detection, available to all plans. Upgrade to Ultimate to get all features, including:"
msgstr ""
msgid "SecurityConfiguration|Learn more about our vulnerability data retention policies"
msgstr ""
msgid "SecurityConfiguration|Learn more about vulnerability training"
msgstr ""
@ -51653,9 +51685,15 @@ msgstr ""
msgid "SecurityConfiguration|Using custom settings. You won't receive automatic updates on this variable. %{anchorStart}Restore to default%{anchorEnd}"
msgstr ""
msgid "SecurityConfiguration|Vulnerabilities are retained in the database for one year after the last update. After one year of inactivity, vulnerabilities are archived on the first of each month. Archives are removed after five years."
msgstr ""
msgid "SecurityConfiguration|Vulnerability Management"
msgstr ""
msgid "SecurityConfiguration|Vulnerability archives"
msgstr ""
msgid "SecurityConfiguration|Vulnerability details and statistics in the merge request"
msgstr ""
@ -54171,6 +54209,9 @@ msgstr ""
msgid "Set a password on your account to pull or push via %{protocol}."
msgstr ""
msgid "Set as default"
msgstr ""
msgid "Set due date"
msgstr ""

View File

@ -101,6 +101,21 @@ describe('~/lib/dompurify', () => {
expect(sanitize('<form method="post" action="path"></form>')).toBe('');
});
describe('handles style attributes correctly', () => {
it('does remove all styles when style is forbidden', () => {
const htmlStyle = '<span style="background-color: red; color: green; width: 100%;">hello</a>';
expect(sanitize(htmlStyle, { FORBID_ATTR: ['style'] })).toBe('<span>hello</span>');
});
it("doesn't remove background-color from GlLabel style when style is forbidden", () => {
const htmlStyle =
'<span class="gl-label-text" style="background-color: red; color: green; width: 100%;">hello</a>';
expect(sanitize(htmlStyle, { FORBID_ATTR: ['style'] })).toBe(
'<span class="gl-label-text" style="background-color: red;">hello</span>',
);
});
});
describe.each`
type | gon
${'root'} | ${rootGon}

View File

@ -10,6 +10,7 @@ RSpec.describe Types::Ci::CiCdSettingType, feature_category: :continuous_integra
inbound_job_token_scope_enabled job_token_scope_enabled
keep_latest_artifact merge_pipelines_enabled project
push_repository_for_job_token_allowed
pipeline_variables_minimum_override_role
]
if Gitlab.ee?

View File

@ -0,0 +1,238 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature_category: :runner do
let(:logger) { instance_double(Logger) }
let(:dry_run) { false }
let(:service) { described_class.new(logger: logger, dry_run: dry_run) }
# Set up the following structure:
# - runner1 tagged with `tag1`, `tag2` (#1)
# - runner2 tagged with `tag3`
# - build1 tagged with `tag1`
# - pending_build1 tagged with `tag1`
let(:connection) { ::Ci::ApplicationRecord.connection }
let(:partition_id) { 100 }
let(:project_id) { 1 }
let(:build1_id) { create_build }
let!(:pending_build1_id) { create_pending_build(build1_id, [tag_ids.first]) }
let(:runner1_id) { create_runner }
let(:runner2_id) { create_runner }
let(:tag_ids) { connection.select_values("INSERT INTO tags (name) VALUES ('tag1'), ('tag2'), ('tag3') RETURNING id") }
let!(:tagging_ids) do
create_taggings(
{ taggable_type: 'Ci::Runner', taggable_id: runner1_id, tag_id: tag_ids.first },
{ taggable_type: 'Ci::Runner', taggable_id: runner1_id, tag_id: tag_ids.second },
{ taggable_type: 'Ci::Runner', taggable_id: runner2_id, tag_id: tag_ids.third },
{ taggable_type: 'Ci::Build', taggable_id: build1_id, tag_id: tag_ids.second }
)
end
let!(:ci_runner_tagging_ids) do
connection.select_values(<<~SQL)
INSERT INTO ci_runner_taggings (runner_id, tag_id, runner_type)
VALUES (#{runner1_id}, #{tag_ids.first}, 1),
(#{runner1_id}, #{tag_ids.second}, 1),
(#{runner2_id}, #{tag_ids.third}, 1)
RETURNING id;
SQL
end
let!(:ci_build_tagging_ids) do
connection.select_values(<<~SQL)
INSERT INTO p_ci_build_tags (build_id, tag_id, partition_id, project_id)
VALUES (#{build1_id}, #{tag_ids.second}, #{partition_id}, #{project_id})
RETURNING id;
SQL
end
describe '#execute' do
subject(:execute) { service.execute }
before do
allow(logger).to receive(:info)
end
it 'does not change number of tags or taggings tag_ids' do
expect { execute }.to not_change { table_count('tags') }
.and not_change { tagging_relationship_for(tagging_ids.third) }
.and not_change { tagging_relationship_for(tagging_ids.second) }
.and not_change { tagging_relationship_for(tagging_ids.third) }
.and not_change { ci_pending_build_tag_ids_for(pending_build1_id) }
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.second) }
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.third) }
expect(logger).to have_received(:info).with('No duplicate tags found in ci database')
end
context 'when duplicate tags exist' do
# Set up the following structure:
# - runner1 tagged with `tag1`, `tag2` (#1)
# - runner2 tagged with `tag3`, `tag2` (#2)
# - build1 tagged with `tag2` (#1)
# - build2 tagged with `tag2` (#3), `tag3`
# - pending_build2 tagged with `tag2` (#3), `tag3`
let(:duplicate_tag_ids) do
connection.select_values("INSERT INTO tags (name) VALUES ('tag2'), ('tag2') RETURNING id")
end
let(:build2_id) { create_build }
let(:pending_build2_id) { create_pending_build(build2_id, [duplicate_tag_ids.second, tag_ids.third]) }
let(:build2_tagging_ids) do
create_taggings(
{ taggable_type: 'Ci::Build', taggable_id: build2_id, tag_id: duplicate_tag_ids.second },
{ taggable_type: 'Ci::Build', taggable_id: build2_id, tag_id: tag_ids.third }
)
end
let(:duplicate_build_tagging_id) { build2_tagging_ids.first }
let!(:duplicate_ci_build_tagging_id) do
connection.select_value(<<~SQL)
INSERT INTO p_ci_build_tags (build_id, tag_id, partition_id, project_id)
VALUES (#{build2_id}, #{duplicate_tag_ids.second}, #{partition_id}, #{project_id}) RETURNING id;
SQL
end
let(:duplicate_runner_tagging_id) do
create_taggings({ taggable_type: 'Ci::Runner', taggable_id: runner2_id, tag_id: duplicate_tag_ids.first }).sole
end
let(:duplicate_ci_runner_tagging_id) do
connection.select_value(<<~SQL)
INSERT INTO ci_runner_taggings (runner_id, tag_id, runner_type)
VALUES (#{runner2_id}, #{duplicate_tag_ids.first}, 1) RETURNING id
SQL
end
let(:duplicate_tagging_ids) do
[
duplicate_build_tagging_id, duplicate_ci_build_tagging_id, duplicate_runner_tagging_id,
duplicate_ci_runner_tagging_id, pending_build2_id
]
end
around do |example|
connection.transaction do
tagging_ids
# allow a scenario where multiple tags with same name coexist
connection.execute('DROP INDEX index_tags_on_name')
duplicate_tagging_ids
example.run
end
end
it 'deletes duplicate tags and updates taggings' do
expect { execute }
.to change { table_count('tags') }.by(-2)
.and not_change { tagging_relationship_for(tagging_ids.third) }
.and not_change { tagging_relationship_for(tagging_ids.second) }
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.second) }
.and not_change { ci_pending_build_tag_ids_for(pending_build1_id) }
.and change { tagging_relationship_for(duplicate_build_tagging_id) }
.from(build2_id => duplicate_tag_ids.second)
.to(build2_id => tag_ids.second)
.and change { ci_build_tagging_relationship_for(duplicate_ci_build_tagging_id) }
.from(build2_id => duplicate_tag_ids.second)
.to(build2_id => tag_ids.second)
.and change { ci_pending_build_tag_ids_for(pending_build2_id) }
.from([duplicate_tag_ids.second, tag_ids.third])
.to([tag_ids.second, tag_ids.third])
.and change { tagging_relationship_for(duplicate_runner_tagging_id) }
.from(runner2_id => duplicate_tag_ids.first)
.to(runner2_id => tag_ids.second)
.and change { ci_runner_tagging_relationship_for(duplicate_ci_runner_tagging_id) }
.from(runner2_id => duplicate_tag_ids.first)
.to(runner2_id => tag_ids.second)
# Index was recreated
expect(index_exists?('index_tags_on_name')).to be true
expect(logger).to have_received(:info).with('Deduplicating 2 tags for ci database')
expect(logger).to have_received(:info).with('Done')
end
context 'and dry_run is true' do
let(:dry_run) { true }
it 'does not change number of tags or taggings tag_ids' do
expect { execute }.to not_change { table_count('tags') }
.and not_change { tagging_relationship_for(tagging_ids.third) }
.and not_change { tagging_relationship_for(tagging_ids.second) }
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.second) }
.and not_change { tagging_relationship_for(duplicate_build_tagging_id) }
.and not_change { ci_pending_build_tag_ids_for(pending_build2_id) }
.and not_change { ci_build_tagging_relationship_for(duplicate_ci_build_tagging_id) }
.and not_change { tagging_relationship_for(duplicate_runner_tagging_id) }
.and not_change { ci_runner_tagging_relationship_for(duplicate_ci_runner_tagging_id) }
# Index wasn't recreated because we're in dry run mode
expect(index_exists?('index_tags_on_name')).to be false
expect(logger).to have_received(:info).with('DRY RUN:')
expect(logger).to have_received(:info).with('Deduplicating 2 tags for ci database')
expect(logger).to have_received(:info).with('Done')
end
end
end
end
private
def create_runner
connection.select_value("INSERT INTO ci_runners (runner_type) VALUES (#{project_id}) RETURNING id")
end
def create_build
connection.select_value(<<~SQL)
INSERT INTO p_ci_builds (partition_id, project_id) VALUES (#{partition_id}, #{project_id}) RETURNING id
SQL
end
def create_pending_build(build_id, tag_ids)
connection.select_value(<<~SQL)
INSERT INTO ci_pending_builds (build_id, tag_ids, partition_id, project_id)
VALUES (#{build_id}, ARRAY#{tag_ids}, #{partition_id}, #{project_id}) RETURNING id
SQL
end
def create_taggings(*taggings)
values = taggings.map { |t| "(#{t[:tag_id]}, #{t[:taggable_id]}, '#{t[:taggable_type]}', 'tags')" }.join(', ')
connection.select_values(<<~SQL)
INSERT INTO taggings (tag_id, taggable_id, taggable_type, context)
VALUES #{values}
RETURNING id
SQL
end
def table_count(table_name)
connection.select_value("SELECT COUNT(*) FROM #{table_name}")
end
def index_exists?(index_name)
connection.select_value(<<-SQL).present?
SELECT 1 FROM pg_class WHERE relname = '#{index_name}'
SQL
end
def tagging_relationship_for(tagging_id)
connection.execute("SELECT taggable_id, tag_id FROM taggings WHERE id = #{tagging_id}").values.to_h
end
def ci_pending_build_tag_ids_for(pending_build_id)
connection.select_value("SELECT tag_ids FROM ci_pending_builds WHERE id = #{pending_build_id}")
.tr('{}', '').split(',').map(&:to_i)
end
def ci_build_tagging_relationship_for(tagging_id)
connection.execute("SELECT build_id, tag_id FROM p_ci_build_tags WHERE id = #{tagging_id}").values.to_h
end
def ci_runner_tagging_relationship_for(tagging_id)
connection.execute("SELECT runner_id, tag_id FROM ci_runner_taggings WHERE id = #{tagging_id}").values.to_h
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueResyncApprovalPolicies, feature_category: :security_policy_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_main_cell,
table_name: :security_policies,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -60,6 +60,32 @@ RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do
end
end
context 'when restrict_user_defined_variables is false' do
let(:project) { build(:project) }
let(:setting) { described_class.new(project: project) }
before do
setting.pipeline_variables_minimum_override_role = :maintainer
setting.restrict_user_defined_variables = false
end
it_behaves_like 'sets the default ci_pipeline_variables_minimum_override_role', 'developer'
end
context 'when restrict_user_defined_variables is true' do
let(:project) { build(:project) }
let(:setting) { described_class.new(project: project) }
before do
setting.pipeline_variables_minimum_override_role = :maintainer
setting.restrict_user_defined_variables = true
end
it 'returns the set role' do
expect(setting.pipeline_variables_minimum_override_role).to eq('maintainer')
end
end
context 'when a namespace is defined' do
let_it_be(:project) { create(:project, :with_namespace_settings) }
@ -99,6 +125,36 @@ RSpec.describe ProjectCiCdSetting, feature_category: :continuous_integration do
end
end
describe '#pipeline_variables_minimum_override_role=' do
let(:project) { build(:project) }
let(:setting) { described_class.new(project: project) }
context 'when setting a value' do
before do
setting.pipeline_variables_minimum_override_role = :developer
end
it 'sets the role' do
expect(setting.pipeline_variables_minimum_override_role_for_database).to eq(described_class::DEVELOPER_ROLE)
end
it 'enables restrict_user_defined_variables' do
expect(setting.restrict_user_defined_variables).to be true
end
end
context 'when setting nil value' do
before do
setting.restrict_user_defined_variables = false
setting.pipeline_variables_minimum_override_role = nil
end
it 'does not change the current settings' do
expect(setting.restrict_user_defined_variables).to be false
end
end
end
describe '#id_token_sub_claim_components' do
it 'is project_path, ref_type, ref by default' do
expect(described_class.new.id_token_sub_claim_components).to eq(%w[project_path ref_type ref])

View File

@ -1055,8 +1055,8 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
before do
ci_cd_settings = project.ci_cd_settings
ci_cd_settings.restrict_user_defined_variables = restrict_variables
ci_cd_settings.pipeline_variables_minimum_override_role = minimum_role
ci_cd_settings.restrict_user_defined_variables = restrict_variables
ci_cd_settings.save!
end

View File

@ -18,6 +18,7 @@ RSpec.describe Projects::BranchRulePolicy, feature_category: :source_code_manage
end
it_behaves_like 'allows branch rule crud'
it { is_expected.to be_allowed(:update_squash_option) }
end
context 'as a developer' do
@ -26,6 +27,7 @@ RSpec.describe Projects::BranchRulePolicy, feature_category: :source_code_manage
end
it_behaves_like 'disallows branch rule crud'
it { is_expected.not_to be_allowed(:update_squash_option) }
end
context 'as a guest' do
@ -34,5 +36,6 @@ RSpec.describe Projects::BranchRulePolicy, feature_category: :source_code_manage
end
it_behaves_like 'disallows branch rule crud'
it { is_expected.not_to be_allowed(:update_squash_option) }
end
end

View File

@ -51,6 +51,8 @@ RSpec.describe 'Getting Ci Cd Setting', feature_category: :continuous_integratio
project.ci_cd_settings.inbound_job_token_scope_enabled?)
expect(settings_data['pushRepositoryForJobTokenAllowed']).to eql(
project.ci_cd_settings.push_repository_for_job_token_allowed?)
expect(settings_data['pipelineVariablesMinimumOverrideRole']).to eql(
project.ci_pipeline_variables_minimum_override_role)
if Gitlab.ee?
expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled?
@ -58,5 +60,19 @@ RSpec.describe 'Getting Ci Cd Setting', feature_category: :continuous_integratio
expect(settings_data['mergeTrainsEnabled']).to be_nil
end
end
describe '#pipelineVariablesMinimumOverrideRole' do
before do
project.ci_pipeline_variables_minimum_override_role = :no_one_allowed
project.restrict_user_defined_variables = false
project.save!
end
context 'when restrict_user_defined_variables is disabled' do
it 'fetches according translation layer' do
expect(settings_data['pipelineVariablesMinimumOverrideRole']).to eql('developer')
end
end
end
end
end

View File

@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integration do
include GraphqlHelpers
let_it_be(:project) do
def response_errors
(graphql_errors || []) + (graphql_data_at('projectCiCdSettingsUpdate', 'errors') || [])
end
let_it_be_with_reload(:project) do
create(:project,
keep_latest_artifact: true,
ci_outbound_job_token_scope_enabled: true,
@ -19,20 +23,21 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
keep_latest_artifact: false,
job_token_scope_enabled: false,
inbound_job_token_scope_enabled: false,
push_repository_for_job_token_allowed: false
push_repository_for_job_token_allowed: false,
pipeline_variables_minimum_override_role: 'no_one_allowed'
}
end
let(:mutation) { graphql_mutation(:project_ci_cd_settings_update, variables) }
context 'when unauthorized' do
let(:user) { create(:user) }
let_it_be(:user) { create(:user) }
shared_examples 'unauthorized' do
it 'returns an error' do
post_graphql_mutation(mutation, current_user: user)
expect(graphql_errors).not_to be_empty
expect(response_errors).not_to be_empty
end
end
@ -41,7 +46,7 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
end
context 'when a non-admin project member' do
before do
before_all do
project.add_developer(user)
end
@ -52,13 +57,16 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
context 'when authorized' do
let_it_be(:user) { project.first_owner }
it 'updates ci cd settings' do
it 'updates ci cd settings', :aggregate_failures do
post_graphql_mutation(mutation, current_user: user)
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.keep_latest_artifact).to eq(false)
expect(response_errors).to be_blank
expect(project.keep_latest_artifact).to be(false)
expect(project.restrict_user_defined_variables?).to be(true)
expect(project.ci_pipeline_variables_minimum_override_role).to eq('no_one_allowed')
end
it 'allows setting job_token_scope_enabled to false' do
@ -67,7 +75,8 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.ci_outbound_job_token_scope_enabled).to eq(false)
expect(response_errors).to be_blank
expect(project.ci_outbound_job_token_scope_enabled).to be(false)
end
context 'when push_repository_for_job_token_allowed requested to be true' do
@ -78,12 +87,13 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
}
end
it 'updates push_repository_for_job_token_allowed' do
it 'updates push_repository_for_job_token_allowed', :aggregate_failures do
post_graphql_mutation(mutation, current_user: user)
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.ci_cd_settings.push_repository_for_job_token_allowed).to eq(true)
expect(response_errors).to be_blank
expect(project.ci_cd_settings.push_repository_for_job_token_allowed).to be(true)
end
end
@ -104,18 +114,18 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(graphql_errors).to(
expect(response_errors).to(
include(
hash_including(
'message' => 'job_token_scope_enabled can only be set to false'
)
)
)
expect(project.ci_outbound_job_token_scope_enabled).to eq(false)
expect(project.ci_outbound_job_token_scope_enabled).to be(false)
end
end
it 'does not update job_token_scope_enabled if not specified' do
it 'does not update job_token_scope_enabled if not specified', :aggregate_failures do
variables.except!(:job_token_scope_enabled)
post_graphql_mutation(mutation, current_user: user)
@ -123,22 +133,21 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.ci_outbound_job_token_scope_enabled).to eq(true)
expect(project.ci_outbound_job_token_scope_enabled).to be(true)
end
describe 'inbound_job_token_scope_enabled' do
let(:category) { Mutations::Ci::ProjectCiCdSettingsUpdate }
it 'updates inbound_job_token_scope_enabled' do
it 'updates inbound_job_token_scope_enabled', :aggregate_failures do
post_graphql_mutation(mutation, current_user: user)
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.ci_inbound_job_token_scope_enabled).to eq(false)
expect(response_errors).to be_blank
expect(project.ci_inbound_job_token_scope_enabled).to be(false)
end
it 'does not update inbound_job_token_scope_enabled if not specified' do
it 'does not update inbound_job_token_scope_enabled if not specified', :aggregate_failures do
variables.except!(:inbound_job_token_scope_enabled)
post_graphql_mutation(mutation, current_user: user)
@ -146,7 +155,8 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
project.reload
expect(response).to have_gitlab_http_status(:success)
expect(project.ci_inbound_job_token_scope_enabled).to eq(true)
expect(response_errors).to be_blank
expect(project.ci_inbound_job_token_scope_enabled).to be(true)
end
context 'when inbound_job_token_scope_enabled is changed from false to true' do
@ -156,6 +166,7 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
end
it_behaves_like 'internal event tracking' do
let(:category) { Projects::UpdateService }
let(:event) { 'enable_inbound_job_token_scope' }
subject(:service_action) { post_graphql_mutation(mutation, current_user: user) }
end
@ -168,6 +179,7 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
end
it_behaves_like 'internal event tracking' do
let(:category) { Projects::UpdateService }
let(:event) { 'disable_inbound_job_token_scope' }
subject(:service_action) { post_graphql_mutation(mutation, current_user: user) }
end
@ -200,13 +212,108 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
end
end
describe 'pipeline_variables_minimum_override_role' do
let_it_be(:owner) { user }
let_it_be(:maintainer) { create(:user, maintainer_of: project) }
let(:initial_restrict_user_defined_variables) { false }
let(:variables) do
{
full_path: project.full_path,
pipeline_variables_minimum_override_role: 'maintainer'
}
end
before do
project.ci_cd_settings.update!(
pipeline_variables_minimum_override_role: 'no_one_allowed',
restrict_user_defined_variables: initial_restrict_user_defined_variables
)
end
specify do
expect { post_graphql_mutation(mutation, current_user: maintainer) }.to(
change { project.reload.restrict_user_defined_variables }
.from(false)
.to(true)
)
end
context 'when pipeline_variables_minimum_override_role is not specified' do
using RSpec::Parameterized::TableSyntax
let(:variables) do
{
full_path: project.full_path,
push_repository_for_job_token_allowed: true
}
end
where(:initial_restrict_user_defined_variables) { [false, true] }
with_them do
specify do
expect { post_graphql_mutation(mutation, current_user: maintainer) }.not_to(
change { project.reload.restrict_user_defined_variables }
.from(initial_restrict_user_defined_variables)
)
end
end
end
it 'maintainers can change minimum override role to a non-owner value', :aggregate_failures do
expect { post_graphql_mutation(mutation, current_user: maintainer) }.to(
change { project.reload.ci_pipeline_variables_minimum_override_role }
.from('developer')
.to('maintainer')
)
expect(response_errors).to be_blank
expect(response).to have_gitlab_http_status(:success)
end
context 'when changing to owner' do
let(:variables) do
{
full_path: project.full_path,
pipeline_variables_minimum_override_role: 'owner'
}
end
it 'is allowed for owners', :aggregate_failures do
expect { post_graphql_mutation(mutation, current_user: owner) }.to(
change { project.reload.ci_pipeline_variables_minimum_override_role }
.from('developer')
.to('owner')
)
expect(response_errors).to be_blank
expect(response).to have_gitlab_http_status(:success)
end
it 'is not allowed for maintainers', :aggregate_failures do
expect { post_graphql_mutation(mutation, current_user: maintainer) }.not_to(
change { project.reload.ci_pipeline_variables_minimum_override_role }
)
expect(response_errors).to(
include(
'Changing the ci_pipeline_variables_minimum_override_role to the owner role is not allowed'
)
)
expect(response).to have_gitlab_http_status(:success)
end
end
end
context 'when bad arguments are provided' do
let(:variables) { { full_path: '', keep_latest_artifact: false } }
it 'returns the errors' do
post_graphql_mutation(mutation, current_user: user)
expect(graphql_errors).not_to be_empty
expect(response_errors).to be_present
end
end
end

View File

@ -4431,51 +4431,58 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
context 'when ci_pipeline_variables_minimum_override_role is owner' do
let(:ci_cd_settings) { project3.ci_cd_settings }
before do
project3.add_maintainer(user2)
ci_cd_settings = project3.ci_cd_settings
ci_cd_settings.restrict_user_defined_variables = false
ci_cd_settings.pipeline_variables_minimum_override_role = 'owner'
ci_cd_settings.save!
end
context 'and current user is maintainer' do
let_it_be(:current_user) { user2 }
it 'rejects to change restrict_user_defined_variables' do
project_param = { restrict_user_defined_variables: true }
put api("/projects/#{project3.id}", current_user), params: project_param
expect(response).to have_gitlab_http_status(:bad_request)
context 'and resrict_user_defined_variables is false' do
before do
ci_cd_settings.restrict_user_defined_variables = false
ci_cd_settings.save!
end
it 'rejects to change ci_pipeline_variables_minimum_override_role' do
project_param = { ci_pipeline_variables_minimum_override_role: 'developer' }
context 'and current user is maintainer' do
let_it_be(:current_user) { user2 }
put api("/projects/#{project3.id}", current_user), params: project_param
it 'accepts to change restrict_user_defined_variables' do
project_param = { restrict_user_defined_variables: true }
expect(response).to have_gitlab_http_status(:bad_request)
end
end
put api("/projects/#{project3.id}", current_user), params: project_param
context 'and current user is owner' do
let_it_be(:current_user) { user }
expect(response).to have_gitlab_http_status(:ok)
end
it 'successfully changes restrict_user_defined_variables' do
project_param = { restrict_user_defined_variables: true }
it 'accepts to change ci_pipeline_variables_minimum_override_role' do
project_param = { ci_pipeline_variables_minimum_override_role: 'developer' }
put api("/projects/#{project3.id}", current_user), params: project_param
put api("/projects/#{project3.id}", current_user), params: project_param
expect(response).to have_gitlab_http_status(:ok)
expect(response).to have_gitlab_http_status(:ok)
end
end
it 'successfully changes ci_pipeline_variables_minimum_override_role' do
project_param = { ci_pipeline_variables_minimum_override_role: 'developer' }
context 'and current user is owner' do
let_it_be(:current_user) { user }
put api("/projects/#{project3.id}", current_user), params: project_param
it 'successfully changes restrict_user_defined_variables' do
project_param = { restrict_user_defined_variables: true }
expect(response).to have_gitlab_http_status(:ok)
put api("/projects/#{project3.id}", current_user), params: project_param
expect(response).to have_gitlab_http_status(:ok)
end
it 'successfully changes ci_pipeline_variables_minimum_override_role' do
project_param = { ci_pipeline_variables_minimum_override_role: 'developer' }
put api("/projects/#{project3.id}", current_user), params: project_param
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
@ -4484,8 +4491,8 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
before do
project3.add_maintainer(user2)
ci_cd_settings = project3.ci_cd_settings
ci_cd_settings.restrict_user_defined_variables = false
ci_cd_settings.pipeline_variables_minimum_override_role = 'maintainer'
ci_cd_settings.restrict_user_defined_variables = false
ci_cd_settings.save!
end

View File

@ -41,11 +41,12 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
:maintainer | :developer | true | false | :success
:maintainer | :maintainer | true | false | :success
:maintainer | :owner | true | false | :api_error
:maintainer | :owner | false | true | :api_error
:maintainer | :owner | false | true | :success
:maintainer | :owner | true | true | :success
:developer | :owner | true | false | :api_error
:developer | :developer | true | false | :api_error
:developer | :maintainer | true | false | :api_error
:developer | :maintainer | false | true | :api_error
end
with_them do
@ -57,8 +58,8 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
project.add_owner(owner)
ci_cd_settings = project.ci_cd_settings
ci_cd_settings.pipeline_variables_minimum_override_role = project_minimum_role
ci_cd_settings.restrict_user_defined_variables = from_value
ci_cd_settings[:pipeline_variables_minimum_override_role] = project_minimum_role
ci_cd_settings[:restrict_user_defined_variables] = from_value
ci_cd_settings.save!
end
@ -82,7 +83,7 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
:maintainer | true | :developer | :maintainer | :success
:maintainer | true | :maintainer | :owner | :api_error
:owner | false | :owner | :maintainer | :success
:maintainer | false | :owner | :developer | :api_error
:maintainer | false | :owner | :developer | :success
:maintainer | false | :maintainer | :owner | :api_error
end
@ -95,8 +96,8 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
project.add_owner(owner)
ci_cd_settings = project.ci_cd_settings
ci_cd_settings.pipeline_variables_minimum_override_role = from_value
ci_cd_settings.restrict_user_defined_variables = restrict_user_defined_variables
ci_cd_settings[:pipeline_variables_minimum_override_role] = from_value
ci_cd_settings[:restrict_user_defined_variables] = restrict_user_defined_variables
ci_cd_settings.save!
end

View File

@ -6630,7 +6630,6 @@
- './spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb'
- './spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb'
- './spec/requests/api/graphql/mutations/ci/pipeline_retry_spec.rb'
- './spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb'
- './spec/requests/api/graphql/mutations/ci/runners_registration_token/reset_spec.rb'
- './spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb'
- './spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb'

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'gitlab:db:deduplicate_tags', :silence_stdout, feature_category: :runner do
before(:all) do
Rake.application.rake_require 'tasks/gitlab/db/deduplicate_tags'
end
subject(:run_rake) { run_rake_task('gitlab:db:deduplicate_tags') }
it 'calls execute on DeduplicateCiTags' do
expect_next_instance_of(
Gitlab::Database::DeduplicateCiTags, logger: an_instance_of(Logger), dry_run: false
) do |service|
expect(service).to receive(:execute)
end
run_rake
end
context 'when DRY_RUN is true' do
before do
stub_env('DRY_RUN', true)
end
it 'calls execute on DeduplicateCiTags with dry_run = true' do
expect_next_instance_of(
Gitlab::Database::DeduplicateCiTags, logger: an_instance_of(Logger), dry_run: true
) do |service|
expect(service).to receive(:execute)
end
run_rake
end
end
end