Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-05 12:24:46 +00:00
parent 319424852b
commit 0704d5dada
121 changed files with 1000 additions and 468 deletions

View File

@ -265,13 +265,13 @@ include:
- local: .gitlab/ci/includes/as-if-jh.gitlab-ci.yml
rules:
# Only run as-if-jh triggerred pipelines for gitlab.com/gitlab-org/gitlab MRs that don't target stable branches
# and that don't have the quarantine or pipeline:expedite labels.
# and that don't have the quarantine or pipeline::expedited labels.
- if: '$CI_PROJECT_URL != "https://gitlab.com/gitlab-org/gitlab"'
when: never
- if: '$CI_MERGE_REQUEST_ID == null'
when: never
- if: '$CI_MERGE_REQUEST_TARGET_BRANCH_NAME =~ /^[\d-]+-stable(-ee|-jh)?$/'
when: never
- if: '$CI_MERGE_REQUEST_LABELS =~ /quarantine/ || $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
- if: '$CI_MERGE_REQUEST_LABELS =~ /quarantine/ || $CI_MERGE_REQUEST_LABELS =~ /pipeline::expedited/ || $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
when: never
- when: always

View File

@ -39,26 +39,9 @@ review-docs-cleanup:
script:
- ./scripts/trigger-build.rb docs cleanup
docs-lint links:
extends:
- .docs:rules:docs-lint
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-html:alpine-3.19-ruby-3.2.3-acbeb4ef
stage: lint
needs: []
script:
# Prepare docs for build
# The path must be 'ee/' because we have hardcoded links relying on it
# https://gitlab.com/gitlab-org/gitlab-docs/-/blob/887850752fc0e72856da6632db132f005ba77f16/content/index.erb#L44-63
- mv doc/ /tmp/gitlab-docs/content/ee
- cd /tmp/gitlab-docs
# Build HTML from Markdown
- make compile
# Check the internal links and anchors (in parallel)
- "parallel time bundle exec nanoc check ::: internal_links internal_anchors"
.docs-markdown-lint-image:
# When updating the image version here, update it in /scripts/lint-doc.sh too.
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-markdown:alpine-3.19-vale-3.0.7-markdownlint-0.39.0-markdownlint2-0.12.1
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-docs/lint-markdown:alpine-3.20-vale-3.4.2-markdownlint2-0.13.0-lychee-0.15.1
docs-lint markdown:
extends:
@ -103,6 +86,15 @@ docs code_quality:
expire_in: 2 weeks
when: always
docs-lint links:
extends:
- .docs:rules:docs-lint
- .docs-markdown-lint-image
stage: lint
needs: []
script:
- lychee --offline --include-fragments doc/**/*.md
ui-docs-links lint:
extends:
- .docs:rules:docs-lint

View File

@ -105,7 +105,7 @@
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /group::global search/'
.if-merge-request-labels-pipeline-expedite: &if-merge-request-labels-pipeline-expedite
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && ($CI_MERGE_REQUEST_LABELS =~ /pipeline::expedited/ || $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/)'
.if-merge-request-labels-frontend-and-feature-flag: &if-merge-request-labels-frontend-and-feature-flag
if: '($CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE == "detached") && $CI_MERGE_REQUEST_LABELS =~ /frontend/ && $CI_MERGE_REQUEST_LABELS =~ /feature flag/'

View File

@ -23,7 +23,7 @@
### Related issues and merge requests
/label ~"pipeline:expedite" ~"master:broken" ~"Pick into auto-deploy" ~"severity::1" ~"priority::1"
/label ~"pipeline::expedited" ~"master:broken" ~"Pick into auto-deploy" ~"severity::1" ~"priority::1"
<!--
Regression label: if applicable, specify the milestone-specific regression label

View File

@ -505,10 +505,8 @@ Gitlab/StrongMemoizeAttr:
- 'lib/gitlab/ci/pipeline/chain/command.rb'
- 'lib/gitlab/ci/pipeline/chain/config/content.rb'
- 'lib/gitlab/ci/pipeline/chain/create.rb'
- 'lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules.rb'
- 'lib/gitlab/ci/pipeline/chain/limit/active_jobs.rb'
- 'lib/gitlab/ci/pipeline/chain/limit/rate_limit.rb'
- 'lib/gitlab/ci/pipeline/chain/seed.rb'
- 'lib/gitlab/ci/pipeline/chain/skip.rb'
- 'lib/gitlab/ci/pipeline/expression/lexer.rb'
- 'lib/gitlab/ci/pipeline/quota/deployments.rb'

View File

@ -1597,7 +1597,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/ci/parsers/test/junit_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/command_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/create_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/duration_spec.rb'

View File

@ -2880,7 +2880,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb'

View File

@ -1876,7 +1876,6 @@ RSpec/NamedSubject:
- 'spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb'

View File

@ -126,7 +126,6 @@ RSpec/ScatteredLet:
- 'spec/lib/gitlab/checks/matching_merge_request_spec.rb'
- 'spec/lib/gitlab/ci/config/external/file/artifact_spec.rb'
- 'spec/lib/gitlab/ci/config_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb'
- 'spec/lib/gitlab/ci/status/stage/factory_spec.rb'
- 'spec/lib/gitlab/ci/yaml_processor_spec.rb'
- 'spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb'

View File

@ -373,7 +373,6 @@ RSpec/VerifiedDoubles:
- 'spec/lib/gitlab/ci/config/external/rules_spec.rb'
- 'spec/lib/gitlab/ci/parsers/test/junit_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb'

View File

@ -620,8 +620,6 @@ Style/IfUnlessModifier:
- 'lib/gitlab/ci/config/normalizer.rb'
- 'lib/gitlab/ci/parsers/coverage/sax_document.rb'
- 'lib/gitlab/ci/parsers/security/common.rb'
- 'lib/gitlab/ci/pipeline/chain/populate.rb'
- 'lib/gitlab/ci/pipeline/chain/seed.rb'
- 'lib/gitlab/ci/pipeline/chain/validate/abilities.rb'
- 'lib/gitlab/ci/pipeline/chain/validate/repository.rb'
- 'lib/gitlab/ci/pipeline/expression/lexeme/base.rb'

View File

@ -1 +1 @@
9fef3b59e0562e140085c950a87e0f6a2566dfc5
a8ff22f80ef2fff0e29bea366cc601f278c8f7db

View File

@ -273,7 +273,7 @@ gem 're2', '2.7.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Misc
gem 'semver_dialects', '~> 2.0', '>= 2.0.2', feature_category: :software_composition_analysis
gem 'semver_dialects', '~> 3.0', feature_category: :software_composition_analysis
gem 'version_sorter', '~> 2.3' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'csv_builder', path: 'gems/csv_builder' # rubocop:todo Gemfile/MissingFeatureCategory

View File

@ -618,7 +618,7 @@
{"name":"sd_notify","version":"0.1.1","platform":"ruby","checksum":"cbc7ac6caa7cedd26b30a72b5eeb6f36050dc0752df263452ea24fb5a4ad3131"},
{"name":"seed-fu","version":"2.3.7","platform":"ruby","checksum":"f19673443e9af799b730e3d4eca6a89b39e5a36825015dffd00d02ea3365cf74"},
{"name":"selenium-webdriver","version":"4.21.1","platform":"ruby","checksum":"c30b64014532fc5156c60797985f839f36adbe60ff4653e7112b008dc1c83263"},
{"name":"semver_dialects","version":"2.0.2","platform":"ruby","checksum":"60059c9f416f931b5212d862fad2879d6b9affb8e0b9afb0d91b793639c116fe"},
{"name":"semver_dialects","version":"3.0.0","platform":"ruby","checksum":"daab2476c2a5d779e1c97ae9b92e59803757e679453692402dfbe364c3cf7b3e"},
{"name":"sentry-rails","version":"5.17.3","platform":"ruby","checksum":"017771c42d739c0ad2213a581ca9d005cf543227bc13662cd1ca9909f2429459"},
{"name":"sentry-ruby","version":"5.17.3","platform":"ruby","checksum":"61791a4b0bb0f95cd87aceeaa1efa6d4ab34d64236c9d5df820478adfe2fbbfc"},
{"name":"sentry-sidekiq","version":"5.17.3","platform":"ruby","checksum":"d0714a218999e41e38127d0c174e0ee62a32b069f92e85b544e0c2125eca2c58"},

View File

@ -1649,7 +1649,7 @@ GEM
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2, < 3.0)
websocket (~> 1.0)
semver_dialects (2.0.2)
semver_dialects (3.0.0)
deb_version (~> 1.0.1)
pastel (~> 0.8.0)
thor (~> 1.3)
@ -2205,7 +2205,7 @@ DEPENDENCIES
sd_notify (~> 0.1.0)
seed-fu (~> 2.3.7)
selenium-webdriver (~> 4.21, >= 4.21.1)
semver_dialects (~> 2.0, >= 2.0.2)
semver_dialects (~> 3.0)
sentry-rails (~> 5.17.3)
sentry-ruby (~> 5.17.3)
sentry-sidekiq (~> 5.17.3)

View File

@ -198,7 +198,7 @@ export default {
</gl-form-group>
<gl-form-group
v-if="showGitlabDsnSetting"
:label="__('Paste this DSN into your Sentry SDK')"
:label="__('Paste this Data Source Name (DSN) into your Sentry SDK.')"
data-testid="gitlab-dsn-setting-form"
>
<gl-form-input-group readonly :value="gitlabDsn">

View File

@ -12,6 +12,7 @@ import {
OPERATOR_AFTER,
OPERATOR_BEFORE,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_MR_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_CONFIDENTIAL,
TOKEN_TYPE_CONTACT,
@ -214,6 +215,26 @@ export const filtersMap = {
},
},
},
[TOKEN_TYPE_MR_ASSIGNEE]: {
[API_PARAM]: {
[NORMAL_FILTER]: 'assigneeUsername',
[SPECIAL_FILTER]: 'assigneeWildcardId',
[ALTERNATIVE_FILTER]: 'assigneeId',
},
[URL_PARAM]: {
[OPERATOR_IS]: {
[NORMAL_FILTER]: 'mr_assignee_username',
[SPECIAL_FILTER]: 'mr_assignee_id',
[ALTERNATIVE_FILTER]: 'mr_assignee_username',
},
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[mr_assignee_username]',
},
[OPERATOR_OR]: {
[NORMAL_FILTER]: 'or[mr_assignee_username]',
},
},
},
[TOKEN_TYPE_ASSIGNEE]: {
[API_PARAM]: {
[NORMAL_FILTER]: 'assigneeUsernames',

View File

@ -167,7 +167,7 @@ export default {
/>
</div>
<span class="gl-display-flex gl-flex-direction-column">
<span class="gl-font-weight-bold gl-white-space-nowrap">{{ item.text }}</span>
<span class="gl-font-bold gl-white-space-nowrap">{{ item.text }}</span>
<span class="gl-text-gray-400"> {{ item.secondaryText }}</span>
</span>
</span>

View File

@ -23,6 +23,8 @@ import {
TOKEN_TYPE_TARGET_BRANCH,
TOKEN_TITLE_SOURCE_BRANCH,
TOKEN_TYPE_SOURCE_BRANCH,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TYPE_MR_ASSIGNEE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
convertToApiParams,
@ -171,6 +173,20 @@ export default {
}
return [
{
type: TOKEN_TYPE_MR_ASSIGNEE,
title: TOKEN_TITLE_ASSIGNEE,
icon: 'user',
token: UserToken,
dataType: 'user',
operators: OPERATORS_IS,
fullPath: this.fullPath,
isProject: true,
recentSuggestionsStorageKey: `${this.fullPath}-merge-requests-recent-tokens-assignee`,
preloadedUsers,
multiSelect: false,
unique: true,
},
{
type: TOKEN_TYPE_AUTHOR,
title: TOKEN_TITLE_AUTHOR,

View File

@ -7,6 +7,7 @@ query getMergeRequests(
$fullPath: ID!
$sort: MergeRequestSort
$state: MergeRequestState
$assigneeUsername: String
$authorUsername: String
$draft: Boolean
$sourceBranches: [String!]
@ -21,6 +22,7 @@ query getMergeRequests(
mergeRequests(
sort: $sort
state: $state
assigneeUsername: $assigneeUsername
authorUsername: $authorUsername
draft: $draft
sourceBranches: $sourceBranches

View File

@ -90,6 +90,7 @@ export const TOKEN_TITLE_CLOSED = __('Closed date');
export const TOKEN_TYPE_APPROVED_BY = 'approved-by';
export const TOKEN_TYPE_MERGE_USER = 'merge-user';
export const TOKEN_TYPE_ASSIGNEE = 'assignee';
export const TOKEN_TYPE_MR_ASSIGNEE = 'mr-assignee';
export const TOKEN_TYPE_AUTHOR = 'author';
export const TOKEN_TYPE_CONFIDENTIAL = 'confidential';
export const TOKEN_TYPE_CONTACT = 'contact';

View File

@ -75,6 +75,11 @@ module FinderWithGroupHierarchy
end
def preload_associations(groups)
ActiveRecord::Associations::Preloader.new(
records: groups,
associations: [:organization]
).call
Preloaders::UserMaxAccessLevelInGroupsPreloader.new(groups, current_user).execute
end
end

View File

@ -833,8 +833,8 @@ module Ci
# Like #drop!, but does not persist the pipeline nor trigger any state
# machine callbacks.
def set_failed(drop_reason)
self.failure_reason = drop_reason.to_s
def set_failed(failure_reason)
self.failure_reason = failure_reason.to_s
self.status = 'failed'
end

View File

@ -7,10 +7,10 @@ module Enums
}.with_indifferent_access.freeze
PURL_TYPES = {
composer: 1, # refered to as `packagist` in gemnasium-db
composer: 1, # refered to as `packagist` in gemnasium-db and semver_dialects
conan: 2,
gem: 3,
golang: 4, # refered to as `go` in gemnasium-db
golang: 4, # refered to as `go` in gemnasium-db and semver_dialects
maven: 5,
npm: 6,
nuget: 7,

View File

@ -729,7 +729,7 @@ class Group < Namespace
unless only_concrete_membership
return GroupMember::OWNER if user.can_admin_all_resources?
return GroupMember::OWNER if user.can_admin_organization?(organization_id)
return GroupMember::OWNER if user.can_admin_organization?(organization)
end
max_member_access(user)

View File

@ -8,6 +8,11 @@ module Preloaders
end
def execute
ActiveRecord::Associations::Preloader.new(
records: groups,
associations: [:organization]
).call
Preloaders::UserMaxAccessLevelInGroupsPreloader.new(groups, current_user).execute
end

View File

@ -2148,7 +2148,7 @@ class User < MainClusterwide::ApplicationRecord
end
def can_admin_organization?(organization)
owns_organization?(organization)
can?(:admin_organization, organization)
end
def update_two_factor_requirement

View File

@ -34,13 +34,11 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
desc "User owns the group's organization"
condition(:organization_owner) do
if @user.is_a?(User)
@user.owns_organization?(@subject.organization_id)
else
false
end
owns_group_organization?
end
rule { admin | organization_owner }.enable :admin_organization
with_options scope: :subject, score: 0
condition(:request_access_enabled) { @subject.request_access_enabled }
@ -458,6 +456,21 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
def valid_dependency_proxy_deploy_token
@user.is_a?(DeployToken) && @user&.valid_for_dependency_proxy? && @user&.has_access_to_group?(@subject)
end
# rubocop:disable Cop/UserAdmin -- specifically check the admin attribute
def owns_group_organization?
return false unless @user
return false unless user_is_user?
return false unless @subject.organization
# Ensure admins can't bypass admin mode.
return false if @user.admin? && !can?(:admin)
# Load the owners with a single query.
@subject.organization
.owner_user_ids
.include?(@user.id)
end
# rubocop:enable Cop/UserAdmin
end
GroupPolicy.prepend_mod_with('GroupPolicy')

View File

@ -1,9 +0,0 @@
---
name: use_ids_for_markdown_upload_urls
feature_issue_url: https://gitlab.com/gitlab-sirt/shared-incidents/incident_5281/-/work_items/6
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150939
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/461757
milestone: '17.0'
group: group::project management
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
class RecreateCiUsedMinutesByRunnerDailyMv2 < ClickHouse::Migration
def up
execute <<~SQL
DROP VIEW IF EXISTS ci_used_minutes_by_runner_daily_mv
SQL
execute <<~SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS ci_used_minutes_by_runner_daily_mv
TO ci_used_minutes_by_runner_daily
AS
SELECT
toStartOfInterval(finished_at, INTERVAL 1 day) AS finished_at_bucket,
runner_type,
status,
runner_id,
countState() AS count_builds,
sumSimpleState(duration) AS total_duration,
project_id
FROM ci_finished_builds
GROUP BY finished_at_bucket, runner_type, project_id, status, runner_id
SQL
end
def down
execute <<~SQL
DROP VIEW IF EXISTS ci_used_minutes_by_runner_daily_mv
SQL
execute <<~SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS ci_used_minutes_by_runner_daily_mv
TO ci_used_minutes_by_runner_daily
AS
SELECT
toStartOfInterval(finished_at, INTERVAL 1 day) AS finished_at_bucket,
runner_type,
status,
project_id,
runner_id,
countState() AS count_builds,
sumSimpleState(duration) AS total_duration
FROM ci_finished_builds
GROUP BY finished_at_bucket, runner_type, project_id, status, runner_id
SQL
end
end

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillMlExperimentMetadataProjectId
description: Backfills sharding key `ml_experiment_metadata.project_id` from `ml_experiments`.
feature_category: mlops
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/155223
milestone: '17.1'
queued_migration_version: 20240604074204
finalize_after: '2024-07-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: ml_experiments
sharding_key: project_id
belongs_to: experiment
desired_sharding_key_migration_job_name: BackfillMlExperimentMetadataProjectId

View File

@ -4,7 +4,9 @@ classes:
- RequirementsManagement::TestReport
feature_categories:
- requirements_management
description: Information related to Test Reports, which relate historical test outcomes to Requirements
description: Information related to Test Reports, which relate historical test outcomes
to Requirements
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/31643
milestone: '13.0'
gitlab_schema: gitlab_main
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/465553

View File

@ -9,4 +9,4 @@ classes:
- GitlabSubscriptions::AddOnPurchase
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
organization_id: organizations

View File

@ -60,8 +60,7 @@ class Gitlab::Seeder::TriageOps
master-broken::pipeline-skipped-before-merge
master-broken::test-selection-gap
master-broken::undetermined
pipeline:expedite
pipeline:expedite-master-fixing
pipeline::expedited
pipeline:mr-approved
pipeline:run-all-jest
pipeline:run-all-rspec

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class AddOrganizationIdToSubscriptionAddOnPurchases < Gitlab::Database::Migration[2.2]
milestone '17.1'
DEFAULT_ORGANIZATION_ID = 1
enable_lock_retries!
def change
add_column :subscription_add_on_purchases, :organization_id, :bigint, default: DEFAULT_ORGANIZATION_ID, null: false
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddOrganizationIdIndexOnSubscriptionAddOnPurchases < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX = 'index_add_on_purchases_on_organization_id'
def up
add_concurrent_index :subscription_add_on_purchases,
%i[organization_id],
name: INDEX
end
def down
remove_concurrent_index_by_name :subscription_add_on_purchases, name: INDEX
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddOrganizationIdFkToSubscriptionAddOnPurchases < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
def up
add_concurrent_foreign_key(
:subscription_add_on_purchases,
:organizations,
column: :organization_id,
on_delete: :cascade
)
end
def down
with_lock_retries do
remove_foreign_key_if_exists(
:subscription_add_on_purchases,
column: :organization_id
)
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToMlExperimentMetadata < Gitlab::Database::Migration[2.2]
milestone '17.1'
def change
add_column :ml_experiment_metadata, :project_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexMlExperimentMetadataOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX_NAME = 'index_ml_experiment_metadata_on_project_id'
def up
add_concurrent_index :ml_experiment_metadata, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :ml_experiment_metadata, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddMlExperimentMetadataProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :ml_experiment_metadata, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :ml_experiment_metadata, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddMlExperimentMetadataProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.1'
def up
install_sharding_key_assignment_trigger(
table: :ml_experiment_metadata,
sharding_key: :project_id,
parent_table: :ml_experiments,
parent_sharding_key: :project_id,
foreign_key: :experiment_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :ml_experiment_metadata,
sharding_key: :project_id,
parent_table: :ml_experiments,
parent_sharding_key: :project_id,
foreign_key: :experiment_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillMlExperimentMetadataProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillMlExperimentMetadataProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:ml_experiment_metadata,
:id,
:project_id,
:ml_experiments,
:project_id,
:experiment_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:ml_experiment_metadata,
:id,
[
:project_id,
:ml_experiments,
:project_id,
:experiment_id
]
)
end
end

View File

@ -0,0 +1 @@
59760472bb020a0e37f6756be4099704834f71708e0e6f6b562aa51491bce42d

View File

@ -0,0 +1 @@
3646666d1c590d6914fca46f4d710b511943567fc07ba1d2112328902fbec833

View File

@ -0,0 +1 @@
56baaaa8e2269b15f8d8a54706a74b1dbfc1716bfd75423d29e7a5b8c1ae6379

View File

@ -0,0 +1 @@
85ede49137abc93d7b761dfac4a68d8242bc99402d27010a6b175ceb386a7a92

View File

@ -0,0 +1 @@
d18dfeaed00fed629e50c3fbc17ce9dd27f0595a5917c18eabf2e5715d29f383

View File

@ -0,0 +1 @@
966cc073b725930440cf7964b217f74759bf9d1c26a0095a007b5f2ce6b4e338

View File

@ -0,0 +1 @@
b1e26b36e1befd2edd81561fb7c59aaae93ab91f9dba48c4fd884f2e20a6849a

View File

@ -0,0 +1 @@
3272c1bbabb01f874409fffb45d4481ca67073df9292563186a508b5f9902289

View File

@ -815,6 +815,22 @@ BEGIN
END;
$$;
CREATE FUNCTION trigger_2b8fdc9b4a4e() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "ml_experiments"
WHERE "ml_experiments"."id" = NEW."experiment_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_3691f9f6a69f() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -12153,6 +12169,7 @@ CREATE TABLE ml_experiment_metadata (
experiment_id bigint NOT NULL,
name text NOT NULL,
value text NOT NULL,
project_id bigint,
CONSTRAINT check_112fe5002d CHECK ((char_length(name) <= 255)),
CONSTRAINT check_a91c633d68 CHECK ((char_length(value) <= 5000))
);
@ -16986,6 +17003,7 @@ CREATE TABLE subscription_add_on_purchases (
last_assigned_users_refreshed_at timestamp with time zone,
trial boolean DEFAULT false NOT NULL,
started_at date,
organization_id bigint DEFAULT 1 NOT NULL,
CONSTRAINT check_3313c4d200 CHECK ((char_length(purchase_xid) <= 255))
);
@ -24926,6 +24944,8 @@ CREATE UNIQUE INDEX index_activity_pub_releases_sub_on_project_id_inbox_url ON a
CREATE UNIQUE INDEX index_activity_pub_releases_sub_on_project_id_sub_url ON activity_pub_releases_subscriptions USING btree (project_id, lower(subscriber_url));
CREATE INDEX index_add_on_purchases_on_organization_id ON subscription_add_on_purchases USING btree (organization_id);
CREATE INDEX index_agent_activity_events_on_agent_id_and_recorded_at_and_id ON agent_activity_events USING btree (agent_id, recorded_at, id);
CREATE INDEX index_agent_activity_events_on_agent_project_id ON agent_activity_events USING btree (agent_project_id);
@ -26906,6 +26926,8 @@ CREATE INDEX index_ml_candidates_on_user_id ON ml_candidates USING btree (user_i
CREATE UNIQUE INDEX index_ml_experiment_metadata_on_experiment_id_and_name ON ml_experiment_metadata USING btree (experiment_id, name);
CREATE INDEX index_ml_experiment_metadata_on_project_id ON ml_experiment_metadata USING btree (project_id);
CREATE INDEX index_ml_experiments_on_model_id ON ml_experiments USING btree (model_id);
CREATE UNIQUE INDEX index_ml_experiments_on_project_id_and_iid ON ml_experiments USING btree (project_id, iid);
@ -30478,6 +30500,8 @@ CREATE TRIGGER trigger_25c44c30884f BEFORE INSERT OR UPDATE ON work_item_parent_
CREATE TRIGGER trigger_2ac3d66ed1d3 BEFORE INSERT OR UPDATE ON vulnerability_occurrence_pipelines FOR EACH ROW EXECUTE FUNCTION trigger_2ac3d66ed1d3();
CREATE TRIGGER trigger_2b8fdc9b4a4e BEFORE INSERT OR UPDATE ON ml_experiment_metadata FOR EACH ROW EXECUTE FUNCTION trigger_2b8fdc9b4a4e();
CREATE TRIGGER trigger_3691f9f6a69f BEFORE INSERT OR UPDATE ON remote_development_agent_configs FOR EACH ROW EXECUTE FUNCTION trigger_3691f9f6a69f();
CREATE TRIGGER trigger_3857ca5ea4af BEFORE INSERT OR UPDATE ON merge_trains FOR EACH ROW EXECUTE FUNCTION trigger_3857ca5ea4af();
@ -31441,6 +31465,9 @@ ALTER TABLE ONLY bulk_import_entities
ALTER TABLE ONLY compliance_management_frameworks
ADD CONSTRAINT fk_b74c45b71f FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY ml_experiment_metadata
ADD CONSTRAINT fk_b764e76c6c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY external_status_checks_protected_branches
ADD CONSTRAINT fk_b7d788e813 FOREIGN KEY (protected_branch_id) REFERENCES protected_branches(id) ON DELETE CASCADE;
@ -31558,6 +31585,9 @@ ALTER TABLE ONLY personal_access_tokens
ALTER TABLE ONLY jira_tracker_data
ADD CONSTRAINT fk_c98abcd54c FOREIGN KEY (integration_id) REFERENCES integrations(id) ON DELETE CASCADE;
ALTER TABLE ONLY subscription_add_on_purchases
ADD CONSTRAINT fk_caed789645 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY boards_epic_board_labels
ADD CONSTRAINT fk_cb8ded70e2 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -20,7 +20,7 @@ For a full list of reference architectures, see
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
> - **Cost calculator template:** [See cost calculator templates section](index.md#cost-calculator-templates)
> - **Cloud Native Hybrid:** No. For a cloud native hybrid environment, you
> can follow a [modified hybrid reference architecture](#cloud-native-hybrid-reference-architecture-with-helm-charts).
> can follow a [modified hybrid reference architecture](#cloud-native-hybrid-reference-architecture-with-helm-charts).
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](index.md#deciding-which-architecture-to-start-with).
| Users | Configuration | GCP | AWS | Azure |

View File

@ -111,6 +111,20 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="queryaimessagesrequestids"></a>`requestIds` | [`[ID!]`](#id) | Array of request IDs to fetch. |
| <a id="queryaimessagesroles"></a>`roles` | [`[AiMessageRole!]`](#aimessagerole) | Array of roles to fetch. |
### `Query.aiSelfHostedModels`
List of Self-Hosted LLM servers.
DETAILS:
**Introduced** in GitLab 17.1.
**Status**: Experiment.
Returns [`AiSelfHostedModelConnection`](#aiselfhostedmodelconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#pagination-arguments):
`before: String`, `after: String`, `first: Int`, and `last: Int`.
### `Query.auditEventDefinitions`
Definitions for all audit events available on the instance.
@ -10259,6 +10273,29 @@ The edge type for [`AiMessage`](#aimessage).
| <a id="aimessageedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="aimessageedgenode"></a>`node` | [`AiMessage`](#aimessage) | The item at the end of the edge. |
#### `AiSelfHostedModelConnection`
The connection type for [`AiSelfHostedModel`](#aiselfhostedmodel).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aiselfhostedmodelconnectionedges"></a>`edges` | [`[AiSelfHostedModelEdge]`](#aiselfhostedmodeledge) | A list of edges. |
| <a id="aiselfhostedmodelconnectionnodes"></a>`nodes` | [`[AiSelfHostedModel]`](#aiselfhostedmodel) | A list of nodes. |
| <a id="aiselfhostedmodelconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `AiSelfHostedModelEdge`
The edge type for [`AiSelfHostedModel`](#aiselfhostedmodel).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aiselfhostedmodeledgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="aiselfhostedmodeledgenode"></a>`node` | [`AiSelfHostedModel`](#aiselfhostedmodel) | The item at the end of the edge. |
#### `AlertManagementAlertConnection`
The connection type for [`AlertManagementAlert`](#alertmanagementalert).
@ -16285,6 +16322,22 @@ Extra metadata for AI message.
| <a id="aimetricscodesuggestionscontributorscount"></a>`codeSuggestionsContributorsCount` | [`Int!`](#int) | Number of code contributors who used GitLab Duo Code Suggestions features. |
| <a id="aimetricscodesuggestionsusagerate"></a>`codeSuggestionsUsageRate` | [`Float!`](#float) | Percentage of contributors who used GitLab Duo Code Suggestions features. |
### `AiSelfHostedModel`
Self-hosted LLM servers.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aiselfhostedmodelcreatedat"></a>`createdAt` | [`Time!`](#time) | Date of creation. |
| <a id="aiselfhostedmodelendpoint"></a>`endpoint` | [`String!`](#string) | Endpoint of the Self-Hosted model server. |
| <a id="aiselfhostedmodelhasapitoken"></a>`hasApiToken` | [`Boolean!`](#boolean) | Indicates if an API key is set for the Self-Hosted model server. |
| <a id="aiselfhostedmodelid"></a>`id` | [`AiSelfHostedModelID!`](#aiselfhostedmodelid) | ID of the Self-Hosted model server. |
| <a id="aiselfhostedmodelmodel"></a>`model` | [`String!`](#string) | Model running the Self-Hosted model server. |
| <a id="aiselfhostedmodelmodifiedat"></a>`modifiedAt` | [`Time!`](#time) | Date of last modification. |
| <a id="aiselfhostedmodelname"></a>`name` | [`String!`](#string) | Given name of the Self-Hosted model server. |
### `AlertManagementAlert`
Describes an alert from the project's Alert Management.
@ -35836,6 +35889,12 @@ A `AiModelID` is a global ID. It is encoded as a string.
An example `AiModelID` is: `"gid://gitlab/Ai::Model/1"`.
### `AiSelfHostedModelID`
A `AiSelfHostedModelID` is a global ID. It is encoded as a string.
An example `AiSelfHostedModelID` is: `"gid://gitlab/Ai::SelfHostedModel/1"`.
### `AlertManagementAlertID`
A `AlertManagementAlertID` is a global ID. It is encoded as a string.

View File

@ -329,6 +329,10 @@ Example response:
## List all billable members of a group
Prerequisites:
- You must have the Owner role to access the API endpoint for billing permissions, as shown in [billing permissions](../user/free_user_limit.md).
Gets a list of group members that count as billable. The list includes members in subgroups and projects.
This API endpoint works on top-level groups only. It does not work on subgroups.

View File

@ -104,8 +104,6 @@ You can use the Docker executor to run jobs in a Docker container.
##### Docker-in-Docker with TLS enabled in the Docker executor
> - Introduced in GitLab Runner 11.11.
The Docker daemon supports connections over TLS. TLS is the default in Docker 19.03.12 and later.
WARNING:
@ -257,8 +255,6 @@ You can use the [Kubernetes executor](https://docs.gitlab.com/runner/executors/k
##### Docker-in-Docker with TLS enabled in Kubernetes
> - [Introduced](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/106) in GitLab Runner Helm Chart 0.23.0.
To use Docker-in-Docker with TLS enabled in Kubernetes:
1. Using the
@ -411,9 +407,7 @@ Docker-in-Docker is the recommended configuration, but you should be aware of th
To use Docker commands in your CI/CD jobs, you can bind-mount `/var/run/docker.sock` into the
container. Docker is then available in the context of the image.
If you bind the Docker socket and you are
[using GitLab Runner 11.11 or later](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1261),
you can no longer use `docker:24.0.5-dind` as a service. Volume bindings also affect services,
If you bind the Docker socket you can't use `docker:24.0.5-dind` as a service. Volume bindings also affect services,
making them incompatible.
To make Docker available in the context of the image, you need to mount
@ -473,8 +467,6 @@ services:
##### The service in the GitLab Runner configuration file
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173) in GitLab Runner 13.6.
If you are a GitLab Runner administrator, you can specify the `command` to configure the registry mirror
for the Docker daemon. The `dind` service must be defined for the
[Docker](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runnersdockerservices-section)
@ -543,8 +535,6 @@ detected by the `dind` service.
##### The Kubernetes executor in the GitLab Runner configuration file
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3223) in GitLab Runner 13.6.
If you are a GitLab Runner administrator, you can use
the mirror for every `dind` service. Update the
[configuration](https://docs.gitlab.com/runner/configuration/advanced-configuration.html)

View File

@ -250,15 +250,15 @@ There was a proposal from a contributor, but the approach is not without some do
### Broken Master Fixes
When you need to [fix a broken `master`](https://handbook.gitlab.com/handbook/engineering/workflow/#resolution-of-broken-master), you can add the `pipeline:expedite` label to expedite the pipelines that run on the merge request.
When you need to [fix a broken `master`](https://handbook.gitlab.com/handbook/engineering/workflow/#resolution-of-broken-master), you can add the `pipeline::expedited` label to expedite the pipelines that run on the merge request.
Note that the merge request also needs to have the `master:broken` or `master:foss-broken` label set.
### Revert MRs
To make your Revert MRs faster, use the [revert MR template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/merge_request_templates/Revert%20To%20Resolve%20Incident.md) **before** you create your merge request. It will apply the `pipeline:expedite` label and others that will expedite the pipelines that run on the merge request.
To make your Revert MRs faster, use the [revert MR template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/merge_request_templates/Revert%20To%20Resolve%20Incident.md) **before** you create your merge request. It will apply the `pipeline::expedited` label and others that will expedite the pipelines that run on the merge request.
### The `pipeline:expedite` label
### The `pipeline::expedited` label
When this label is assigned, the following steps of the CI/CD pipeline are skipped:

View File

@ -101,6 +101,23 @@ In GitLab 16.11, PostgreSQL will automatically be upgraded to 14.x except for th
Fault-tolerant and Geo installations support manual upgrades to PostgreSQL 14,
see [Packaged PostgreSQL deployed in an HA/Geo Cluster](https://docs.gitlab.com/omnibus/settings/database.html#packaged-postgresql-deployed-in-an-hageo-cluster).
### Geo installations
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.10.0
You might encounter the following error while upgrading to GitLab 16.10 or later:
@ -150,6 +167,23 @@ If this is your case, read [Multi-node upgrades with downtime](../../update/with
For more information on the changes introduced between version 2.1.0 and version 3.0.1, see the [Patroni release notes](https://patroni.readthedocs.io/en/latest/releases.html).
### Geo installations
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.9.0
You might encounter the following error while upgrading to GitLab 16.9.0:
@ -193,6 +227,21 @@ planned for release in 16.9.1.
| 16.8 | 16.8.0 - 16.8.3 | 16.8.4 |
| 16.9 | 16.9.0 - 16.9.1 | 16.9.2 |
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
### Linux package installations
- The [Sidekiq `min_concurrency` and `max_concurrency`](../../administration/sidekiq/extra_sidekiq_processes.md#manage-thread-counts-with-min_concurrency-and-max_concurrency-fields-deprecated) options are deprecated in GitLab 16.9.0 and due for removal in GitLab 17.0.0. In GitLab 16.9.0 and later, to avoid breaking changes in GitLab 17.0.0, set the new [`concurrency`](../../administration/sidekiq/extra_sidekiq_processes.md#manage-thread-counts-with-concurrency-field) option and remove the `min_concurrency` and `max_concurrency` options.
@ -256,6 +305,21 @@ you must take one of the following actions based on your configuration:
| 16.8 | 16.8.0 - 16.8.3 | 16.8.4 |
| 16.9 | 16.9.0 - 16.9.1 | 16.9.2 |
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.7.0
- GitLab 16.7 is a required upgrade stop. This ensures that all database changes introduced
@ -331,6 +395,21 @@ take one of the following actions based on your configuration:
| 16.8 | 16.8.0 - 16.8.3 | 16.8.4 |
| 16.9 | 16.9.0 - 16.9.1 | 16.9.2 |
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.6.0
- GitLab 16.6 introduces a background migration that re-writes every row in the
@ -405,6 +484,21 @@ take one of the following actions based on your configuration:
| 16.8 | 16.8.0 - 16.8.3 | 16.8.4 |
| 16.9 | 16.9.0 - 16.9.1 | 16.9.2 |
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.5.0
- Git 2.42.0 and later is required by Gitaly. For self-compiled installations, you should use the [Git version provided by Gitaly](../../install/installation.md#git).
@ -546,6 +640,21 @@ Specific information applies to installations using Geo:
| 16.8 | 16.8.0 - 16.8.3 | 16.8.4 |
| 16.9 | 16.9.0 - 16.9.1 | 16.9.2 |
- Due to a bug introduced GitLab 16.5 and fixed in 17.0, [GitLab Pages](../../administration/pages/index.md) deployment files are being orphaned on secondary Geo sites. If Pages deployments are stored locally, then this can lead to zero remaining storage and subsequently data loss in the event of a failover.
See details of the problem and workaround in issue [#457159](https://gitlab.com/gitlab-org/gitlab/-/issues/457159).
**Affected releases**:
| Affected minor releases | Affected patch releases | Fixed in |
| ----------------------- | ----------------------- | -------- |
| 16.5 | All | None |
| 16.6 | All | None |
| 16.7 | All | None |
| 16.8 | All | None |
| 16.9 | All | None |
| 16.10 | All | None |
| 16.11 | All | None |
## 16.4.0
- Updating a group path [received a bug fix](https://gitlab.com/gitlab-org/gitlab/-/issues/419289) that uses a database index introduced in 16.3.

View File

@ -321,7 +321,7 @@ You can customize which [secrets are reported in the GitLab UI](#pipeline-secret
However, the `secret_detection` job logs always include the number
of secrets detected by the default Pipeline Secret Detection rules.
The following customization options can be used separately, or in combination (except for disabling or overriding rules when using a remote configuration file):
The following customization options can be used separately, or in combination (except for synthesizing a custom configuration with a remote configuration file):
- [Disable predefined rules](#disable-predefined-analyzer-rules).
- [Override predefined rules](#override-predefined-analyzer-rules).
@ -333,8 +333,12 @@ The following customization options can be used separately, or in combination (e
You can use passthroughs to override the default Pipeline Secret Detection ruleset. The
following passthrough types are supported by the `secrets` analyzer:
- `raw`
- `file`
- `raw`: Include custom rules directly in the `secret-detection-ruleset.toml` file.
- `file`: Include custom rules in a separate file in the project's repository.
NOTE:
The `file` option can only be used to synthesize a custom configuration from
a file in the project's repository, not [a remote configuration file](#specify-a-remote-configuration-file).
To define a passthrough, add _one_ of the following to the
`secret-detection-ruleset.toml` file:
@ -357,9 +361,6 @@ To define a passthrough, add _one_ of the following to the
"""
```
NOTE:
The `file` passthrough only works with an external file that is committed to the current repository. It cannot be used to synthesize a custom configuration from [a remote configuration file](#specify-a-remote-configuration-file).
- Using an external `file` committed to the current repository:
```toml

View File

@ -448,6 +448,8 @@ Group items that are migrated to the destination GitLab instance include:
| Subgroups | [GitLab 13.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18938) |
| Uploads | [GitLab 13.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18938) |
**Footnotes:**
1. Epic resource state events [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/291983) in GitLab 15.4, label
associations [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62074) in GitLab 13.12, state and
state ID [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28203) in GitLab 13.7, and system note

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

View File

@ -206,38 +206,33 @@ DETAILS:
> - Introduced in GitLab 15.11 as an [experiment](../../../policy/experiment-beta-support.md#experiment) on GitLab.com.
GitLab Duo Code explanation is an [experiment](../../../policy/experiment-beta-support.md#experiment).
If you spend a lot of time trying to understand code that others have created, or
you struggle to understand code written in a language you are not familiar with,
you can ask GitLab Duo to explain the code to you.
To use this feature:
Prerequisites:
- The parent group of the project must:
- Enable the [experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features).
- You must:
- Belong to at least one group with the [experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features) enabled.
- Have sufficient permissions to view the project.
- You must belong to at least one group with the
[experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features) enabled.
- You must have access to view the project.
GitLab can help you get up to speed faster if you:
- Spend a lot of time trying to understand pieces of code that others have created, or
- Struggle to understand code written in a language that you are not familiar with.
By using a large language model, GitLab can explain the code in natural language.
To explain your code in a merge request:
To explain the code in a merge request:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select **Code > Merge requests**, then select your merge request.
1. On the secondary menu, select **Changes**.
1. Select **Changes**.
1. On the file you would like explained, select the three dots (**{ellipsis_v}**) and select **View File @ $SHA**.
A separate browser tab opens and shows the full file with the latest changes.
1. On the new tab, select the lines that you want to have explained.
1. On the left side, select the question mark (**{question}**). You might have to scroll to the first line of your selection to view it. This sends the selected code, together with a prompt, to provide an explanation to the large language model.
1. A drawer is displayed on the right side of the page. Wait a moment for the explanation to be generated.
1. Provide feedback about how satisfied you are with the explanation, so we can improve the results.
1. On the new tab, select the lines you want to have explained.
1. On the left side, select the question mark (**{question}**). You might have to scroll to the first line of your selection to view it.
![How to use the Explain Code Experiment](../../../user/img/explain_code_experiment.png)
![explain code in a merge request](../repository/img/explain_code_v17_1.png)
Duo Chat explains the code. It might take a moment for the explanation to be generated.
If you'd like, you can provide feedback about the quality of the explanation.
We cannot guarantee that the large language model produces results that are correct. Use the explanation with caution.

View File

@ -14,33 +14,29 @@ DETAILS:
> - Introduced in GitLab 15.11 as an [experiment](../../../policy/experiment-beta-support.md#experiment) on GitLab.com.
GitLab Duo Code explanation is an [experiment](../../../policy/experiment-beta-support.md#experiment).
If you spend a lot of time trying to understand code that others have created, or
you struggle to understand code written in a language you are not familiar with,
you can ask GitLab Duo to explain the code to you.
To use this feature:
Prerequisites:
- The parent group of the project must:
- Enable the [experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features).
- You must:
- Belong to at least one group with the [experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features) enabled.
- Have sufficient permissions to view the project.
- You must belong to at least one group with the
[experiment and beta features setting](../../../user/ai_features_enable.md#turn-on-beta-and-experimental-features) enabled.
- You must have access to view the project.
GitLab can help you get up to speed faster if you:
- Spend a lot of time trying to understand pieces of code that others have created, or
- Struggle to understand code written in a language that you are not familiar with.
By using a large language model, GitLab can explain the code in natural language.
To explain your code in a file:
To explain the code in a file:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select any file in your project that contains code.
1. On the file, select the lines that you want to have explained.
1. On the left side, select the question mark (**{question}**). You might have to scroll to the first line of your selection to view it. This sends the selected code, together with a prompt, to provide an explanation to the large language model.
1. A drawer is displayed on the right side of the page. Wait a moment for the explanation to be generated.
1. Provide feedback about how satisfied you are with the explanation, so we can improve the results.
1. Select a file that contains code.
1. Select the lines you want explained.
1. On the left side, select the question mark (**{question}**).
You might have to scroll to the first line of your selection to view it.
![How to use the Explain Code Experiment](../../../user/img/explain_code_experiment.png)
![explain code in a file](img/explain_code_v17_1.png)
Duo Chat explains the code. It might take a moment for the explanation to be generated.
If you'd like, you can provide feedback about the quality of the explanation.
We cannot guarantee that the large language model produces results that are correct. Use the explanation with caution.

View File

@ -13,6 +13,19 @@ DETAILS:
When working with GitLab Duo Code Suggestions, you might encounter the following issues.
## Verify Code Suggestions status in Admin Area
Prerequisites:
- You must be an administrator of the instance.
You can verify that Code Suggestions is set up correctly on the server. Because you are checking this on the server side and not for a specific user, it does not guarantee that Code Suggestions will work for a specific user on client side.
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **GitLab Duo Pro**.
A flash message with Code Suggestions check status is displayed at the top of the page.
## Code Suggestions are not displayed
If Code Suggestions are not displayed:

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

View File

@ -22,6 +22,8 @@ These environments ensure that different projects don't interfere with each othe
Each workspace includes its own set of dependencies, libraries, and tools,
which you can customize to meet the specific needs of each project.
For a click-through demo, see [GitLab workspaces](https://tech-marketing.gitlab.io/static-demos/workspaces/ws_html.html).
## Workspaces and projects
Workspaces are scoped to a project.
@ -221,7 +223,3 @@ see [Create a custom workspace image that supports arbitrary user IDs](../worksp
For more information, see the
[OpenShift documentation](https://docs.openshift.com/container-platform/4.12/openshift_images/create-images.html#use-uid_create-images).
## Related topics
- [GitLab workspaces demo](https://go.gitlab.com/qtu66q)

View File

@ -205,7 +205,7 @@ module Keeps
'maintenance::refactor',
'test',
'failure::flaky-test',
'pipeline:expedite',
'pipeline::expedited',
'quarantine',
'quarantine::flaky',
group_label

View File

@ -8,20 +8,12 @@ module API
expose :markdown_name, as: :alt
expose :secure_url, as: :url
expose :full_path do |uploader|
if ::Feature.enabled?(:use_ids_for_markdown_upload_urls, uploader.model)
banzai_upload_path(
'project',
uploader.model.id,
uploader.secret,
uploader.filename
)
else
show_project_uploads_path(
uploader.model,
uploader.secret,
uploader.filename
)
end
banzai_upload_path(
'project',
uploader.model.id,
uploader.secret,
uploader.filename
)
end
expose :markdown_link, as: :markdown

View File

@ -32,17 +32,9 @@ module Banzai
path_parts = [unescape_and_scrub_uri(html_attr.value)]
if project
if Feature.enabled?(:use_ids_for_markdown_upload_urls, project)
path_parts.unshift(relative_url_root, '-', 'project', project.id.to_s)
else
path_parts.unshift(relative_url_root, project.full_path)
end
path_parts.unshift(relative_url_root, '-', 'project', project.id.to_s)
elsif group
if Feature.enabled?(:use_ids_for_markdown_upload_urls, group)
path_parts.unshift(relative_url_root, '-', 'group', group.id.to_s)
else
path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
end
path_parts.unshift(relative_url_root, '-', 'group', group.id.to_s)
else
path_parts.unshift(relative_url_root)
end

View File

@ -2,23 +2,28 @@
module ClickHouse
module WriteBuffer
BUFFER_KEY = 'clickhouse_write_buffer'
BUFFER_KEY_PREFIX = 'clickhouse_write_buffer_'
class << self
# Currently scoped to code suggestion events only
def write_event(event_hash)
def add(table_name, event_hash)
Gitlab::Redis::SharedState.with do |redis|
redis.rpush(BUFFER_KEY, event_hash.to_json)
redis.rpush(buffer_key(table_name), event_hash.to_json)
end
end
def pop_events(limit)
def pop(table_name, limit)
Gitlab::Redis::SharedState.with do |redis|
Array.wrap(redis.lpop(BUFFER_KEY, limit)).map do |hash|
Array.wrap(redis.lpop(buffer_key(table_name), limit)).map do |hash|
Gitlab::Json.parse(hash, symbolize_names: true)
end
end
end
private
def buffer_key(table_name)
"#{BUFFER_KEY_PREFIX}#{table_name}"
end
end
end
end

View File

@ -46,15 +46,18 @@ module Gitlab
def perform
each_sub_batch do |sub_batch|
# prevent an epic being updated while we sync its data to issues table. Wrap the locking into a transaction
# so that locks are kept for the duration of transaction.
sub_batch_with_lock = sub_batch.lock!('FOR UPDATE')
# First update any epics with a not null issue_id and only afterwards follow-up with the epics
# without an issue_id, otherwise we end up updating the same issues/epics twice, as first time we'd
# fetch epics without an issue_id then set the issue_id and then we query the same batch for epics
# with an issue_id we just did set.
backfill_epics_with_synced_work_item(sub_batch)
backfill_epics_without_synced_work_item(sub_batch)
backfill_epics_with_synced_work_item(sub_batch_with_lock)
backfill_epics_without_synced_work_item(sub_batch_with_lock)
# force reload the batch as it now should have the issue_id set and we need it
# to create work_item_colors records.
backfill_epics_color(sub_batch.all)
backfill_epics_color(sub_batch_with_lock.all)
end
end
@ -62,11 +65,8 @@ module Gitlab
def backfill_epics_without_synced_work_item(sub_batch)
Issues.transaction do
# prevent an epic being updated while we sync its data to issues table. Wrap the locking into a transaction
# so that locks are kept for the duration of transaction.
# without_sync_work_item = sub_batch.where(issue_id: nil).lock!('FOR UPDATE').load
cte = Gitlab::SQL::CTE.new(:batched_relation, sub_batch)
without_sync_work_item = cte.apply_to(Epics.all).where(issue_id: nil).lock!('FOR UPDATE').load
without_sync_work_item = cte.apply_to(Epics.all).where(issue_id: nil)
work_items = build_work_items(epic_work_item_type_id, without_sync_work_item)
unless work_items.blank?
@ -81,11 +81,8 @@ module Gitlab
def backfill_epics_with_synced_work_item(sub_batch)
Issues.transaction do
# prevent an epic being updated while we sync its data to issues table. Wrap the locking into a transaction
# so that locks are kept for the duration of transaction.
# with_sync_work_item = sub_batch.where.not(issue_id: nil).lock!('FOR UPDATE').load
cte = Gitlab::SQL::CTE.new(:batched_relation, sub_batch)
with_sync_work_item = cte.apply_to(Epics.all).where.not(issue_id: nil).lock!('FOR UPDATE').load
with_sync_work_item = cte.apply_to(Epics.all).where.not(issue_id: nil)
work_items = build_work_items(epic_work_item_type_id, with_sync_work_item, epics_with_synced_work_item: true)
Issues.upsert_all(work_items, unique_by: :id) unless work_items.blank?
@ -94,9 +91,6 @@ module Gitlab
def backfill_epics_color(sub_batch)
Issues.transaction do
# prevent an epic being updated while we sync its data to issues table. Wrap the locking into a transaction
# so that locks are kept for the duration of transaction.
sub_batch.where(issue_id: nil).lock!('FOR UPDATE').load
work_items_color = build_work_items_color(sub_batch)
WorkItemColors.upsert_all(work_items_color, unique_by: :issue_id) unless work_items_color.blank?

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillMlExperimentMetadataProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_ml_experiment_metadata_project_id
feature_category :mlops
end
end
end

View File

@ -28,20 +28,18 @@ module Gitlab
Epics.transaction do
# prevent an epic being updated while we sync its data to work_item_parent_links table.
# Wrap the locking into a transaction so that locks are kept for the duration of transaction.
cte = Gitlab::SQL::CTE.new(:batched_relation, sub_batch)
parents_and_children_batch =
cte.apply_to(Epics.all)
.joins("INNER JOIN epics parent_epics ON epics.parent_id = parent_epics.id")
.joins("INNER JOIN issues ON parent_epics.issue_id = issues.id")
.select(
<<-SQL
epics.issue_id AS child_id,
epics.relative_position,
parent_epics.issue_id AS parent_id,
issues.namespace_id as namespace_id
SQL
).lock!('FOR UPDATE').load
sub_batch
.joins("INNER JOIN epics parent_epics ON epics.parent_id = parent_epics.id")
.joins("INNER JOIN issues ON parent_epics.issue_id = issues.id")
.select(
<<-SQL
epics.issue_id AS child_id,
epics.relative_position,
parent_epics.issue_id AS parent_id,
issues.namespace_id as namespace_id
SQL
).lock!('FOR UPDATE').load
parent_links = build_relationship(parents_and_children_batch)
WorkItemParentLinks.upsert_all(parent_links, unique_by: :work_item_id) unless parent_links.blank?

View File

@ -61,14 +61,14 @@ module Gitlab
def validate_uniqueness(variables)
duplicated_keys = variables
.map { |var| var[:key] }
.map { |var| var[:key] } # rubocop: disable Rails/Pluck -- Pluck raises error too
.tally
.filter_map { |key, count| key if count > 1 }
if duplicated_keys.empty?
variables
else
error(duplicate_variables_message(duplicated_keys), config_error: true)
error(duplicate_variables_message(duplicated_keys), failure_reason: :config_error)
[]
end
end

View File

@ -33,7 +33,7 @@ module Gitlab
if result.valid?
@command.yaml_processor_result = result
else
error(result.errors.first, config_error: true)
error(result.errors.first, failure_reason: :config_error)
end
@pipeline.config_metadata = result.config_metadata
@ -45,7 +45,7 @@ module Gitlab
)
error("Undefined error (#{Labkit::Correlation::CorrelationId.current_id})",
config_error: true)
failure_reason: :config_error)
end
def break?

View File

@ -29,7 +29,7 @@ module Gitlab
error(
'Pipeline filtered out by workflow rules.',
drop_reason: :filtered_by_workflow_rules
failure_reason: :filtered_by_workflow_rules
)
end
@ -44,10 +44,9 @@ module Gitlab
end
def workflow_rules_result
strong_memoize(:workflow_rules_result) do
workflow_rules.evaluate(@pipeline, global_context)
end
workflow_rules.evaluate(@pipeline, global_context)
end
strong_memoize_attr :workflow_rules_result
def workflow_rules
Gitlab::Ci::Build::Rules.new(
@ -64,10 +63,9 @@ module Gitlab
end
def workflow_rules_config
strong_memoize(:workflow_rules_config) do
@command.yaml_processor_result.workflow_rules
end
@command.yaml_processor_result.workflow_rules
end
strong_memoize_attr :workflow_rules_config
# rubocop:disable Gitlab/NoCodeCoverageComment -- method is tested in EE
# :nocov:

View File

@ -5,17 +5,14 @@ module Gitlab
module Pipeline
module Chain
module Helpers
def error(message, config_error: false, drop_reason: nil)
def error(message, failure_reason: nil)
sanitized_message = ActionController::Base.helpers.sanitize(message, tags: [])
if config_error
drop_reason = :config_error
pipeline.yaml_errors = sanitized_message
end
pipeline.yaml_errors = sanitized_message if failure_reason == :config_error
pipeline.add_error_message(sanitized_message)
drop_pipeline!(drop_reason)
drop_pipeline!(failure_reason)
# TODO: consider not to rely on AR errors directly as they can be
# polluted with other unrelated errors (e.g. state machine)
@ -32,21 +29,21 @@ module Gitlab
private
def drop_pipeline!(drop_reason)
def drop_pipeline!(failure_reason)
if pipeline.readonly?
# Only set the status and reason without tracking failures
pipeline.set_failed(drop_reason)
elsif Enums::Ci::Pipeline.persistable_failure_reason?(drop_reason) && command.save_incompleted
pipeline.set_failed(failure_reason)
elsif Enums::Ci::Pipeline.persistable_failure_reason?(failure_reason) && command.save_incompleted
# Project iid must be called outside a transaction, so we ensure it is set here
# otherwise it may be set within the state transition transaction of the drop! call
# which it will lock the InternalId row for the whole transaction
pipeline.ensure_project_iid!
pipeline.drop!(drop_reason)
pipeline.drop!(failure_reason)
else
command.increment_pipeline_failure_reason_counter(drop_reason)
command.increment_pipeline_failure_reason_counter(failure_reason)
pipeline.set_failed(drop_reason)
pipeline.set_failed(failure_reason)
end
end
end

View File

@ -15,7 +15,7 @@ module Gitlab
def perform!
return unless limits.exceeded?(LIMIT_NAME, count_jobs_in_alive_pipelines)
error(MESSAGE, drop_reason: :job_activity_limit_exceeded)
error(MESSAGE, failure_reason: :job_activity_limit_exceeded)
Gitlab::AppLogger.info(
class: self.class.name,

View File

@ -24,7 +24,7 @@ module Gitlab
return unless limit.exceeded?
limit.log_error!(project_id: project.id, plan: project.actual_plan_name)
error(limit.message, drop_reason: :deployments_limit_exceeded)
error(limit.message, failure_reason: :deployments_limit_exceeded)
end
override :break?

View File

@ -21,13 +21,11 @@ module Gitlab
return error(
'Pipeline will not run for the selected trigger. ' \
'The rules configuration prevented any jobs from being added to the pipeline.',
drop_reason: :filtered_by_rules
failure_reason: :filtered_by_rules
)
end
if pipeline.invalid?
return error('Failed to build the pipeline!')
end
return error('Failed to build the pipeline!') if pipeline.invalid?
raise Populate::PopulateError if pipeline.persisted?
end

View File

@ -29,9 +29,7 @@ module Gitlab
pipeline_seed.errors
end
if seed_errors
return error(seed_errors.join("\n"), config_error: true)
end
return error(seed_errors.join("\n"), failure_reason: :config_error) if seed_errors
@command.pipeline_seed = pipeline_seed
end
@ -43,14 +41,13 @@ module Gitlab
private
def pipeline_seed
strong_memoize(:pipeline_seed) do
logger.instrument(:pipeline_seed_initialization, once: true) do
stages_attributes = @command.yaml_processor_result.stages_attributes
logger.instrument(:pipeline_seed_initialization, once: true) do
stages_attributes = @command.yaml_processor_result.stages_attributes
Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
end
Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
end
end
strong_memoize_attr :pipeline_seed
def context
Gitlab::Ci::Pipeline::Seed::Context.new(
@ -61,13 +58,11 @@ module Gitlab
end
def root_variables
strong_memoize(:root_variables) do
::Gitlab::Ci::Variables::Helpers.merge_variables(
@command.yaml_processor_result.root_variables,
@command.workflow_rules_result.variables
)
end
::Gitlab::Ci::Variables::Helpers.merge_variables(
@command.yaml_processor_result.root_variables,
@command.workflow_rules_result.variables)
end
strong_memoize_attr :root_variables
end
end
end

View File

@ -20,7 +20,9 @@ module Gitlab
log_message = pipeline_authorized ? 'authorized' : 'not authorized'
Gitlab::AppLogger.info(message: "Pipeline #{log_message}", project_id: project.id, user_id: current_user.id)
error('External validation failed', drop_reason: :external_validation_failure) unless pipeline_authorized
return if pipeline_authorized
error('External validation failed', failure_reason: :external_validation_failure)
end
def break?

View File

@ -37470,7 +37470,7 @@ msgstr ""
msgid "Paste project path (i.e. gitlab-org/gitlab)"
msgstr ""
msgid "Paste this DSN into your Sentry SDK"
msgid "Paste this Data Source Name (DSN) into your Sentry SDK."
msgstr ""
msgid "Patch to apply"

View File

@ -278,8 +278,8 @@
"jest-jasmine2": "^28.1.3",
"jest-junit": "^12.3.0",
"jest-util": "^28.1.3",
"markdownlint-cli2": "^0.12.1",
"markdownlint-rule-helpers": "^0.20.0",
"markdownlint-cli2": "^0.13.0",
"markdownlint-rule-helpers": "^0.25.0",
"miragejs": "^0.1.40",
"mock-apollo-client": "1.2.0",
"nodemon": "^2.0.19",

View File

@ -185,7 +185,7 @@ function run_locally_or_in_container() {
local cmd=$1
local args=$2
local files=$3
local registry_url="registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.19-vale-3.0.7-markdownlint-0.39.0-markdownlint2-0.12.1"
local registry_url="registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.20-vale-3.4.2-markdownlint2-0.13.0-lychee-0.15.1"
if hash "${cmd}" 2>/dev/null
then

View File

@ -38,15 +38,8 @@ class SetPipelineName
#
# See https://docs.gitlab.com/ee/ci/jobs/ci_job_token.html for more info.
def initialize(api_endpoint:, gitlab_access_token:)
@api_endpoint = api_endpoint
@gitlab_access_token = gitlab_access_token
end
def gitlab
@gitlab ||= Gitlab.client(
endpoint: @api_endpoint,
private_token: @gitlab_access_token
)
@api_endpoint = api_endpoint
@gitlab_access_token = gitlab_access_token
end
def execute
@ -72,17 +65,18 @@ class SetPipelineName
private
attr_reader :api_endpoint, :gitlab_access_token
def api_client
@api_client ||= Gitlab.client(
endpoint: api_endpoint,
private_token: gitlab_access_token
)
end
def pipeline_tier
return unless ENV['CI_MERGE_REQUEST_LABELS']
# The first pipeline of any MR won't have any tier label, unless the label was added in the MR description
# before creating the MR. This is a known limitation.
#
# Fetching the labels from the API instead of relying on ENV['CI_MERGE_REQUEST_LABELS']
# would solve this problem, but it would also mean that we would update the tier information
# based on the merge request labels at the time of retrying the job, which isn't what we want.
merge_request_labels = ENV['CI_MERGE_REQUEST_LABELS'].split(',')
puts "Labels from the MR: #{merge_request_labels}"
return if expedited_pipeline?
tier_label = merge_request_labels.find { |label| label.start_with?('pipeline::tier-') }
return if tier_label.nil?
@ -90,14 +84,34 @@ class SetPipelineName
tier_label[/\d+\z/]
end
def merge_request_labels
# The first pipeline of any MR won't have any tier label, unless the label was added in the MR description
# before creating the MR. This is a known limitation.
#
# Fetching the labels from the API instead of relying on ENV['CI_MERGE_REQUEST_LABELS']
# would solve this problem, but it would also mean that we would update the tier information
# based on the merge request labels at the time of retrying the job, which isn't what we want.
@merge_request_labels ||= ENV.fetch('CI_MERGE_REQUEST_LABELS', '').split(',').tap do |labels|
puts "Labels from the MR: #{labels}"
end
end
def expedited_pipeline?
merge_request_labels.any?('pipeline::expedited') ||
# TODO: Remove once the label is renamed to be scoped
merge_request_labels.any?('pipeline:expedite')
end
def pipeline_types
return ['expedited'] if expedited_pipeline?
types = Set.new
gitlab.pipeline_bridges(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
api_client.pipeline_bridges(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
types.merge(pipeline_types_for(job))
end
gitlab.pipeline_jobs(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
api_client.pipeline_jobs(ENV['CI_PROJECT_ID'], ENV['CI_PIPELINE_ID']).auto_paginate do |job|
types.merge(pipeline_types_for(job))
end

View File

@ -11,7 +11,7 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
let_it_be_with_refind(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
let_it_be(:admin_with_admin_mode) { create(:admin) }
let_it_be(:admin_without_admin_mode) { create(:admin, :without_default_org) }
let_it_be(:admin_without_admin_mode) { create(:admin) }
let_it_be(:group_member) { create(:group_member, group: group, user: user) }
let_it_be(:owner) { group.add_owner(create(:user)).user }
let_it_be(:maintainer) { group.add_maintainer(create(:user)).user }

View File

@ -84,7 +84,7 @@ RSpec.describe 'Admin::Hooks', feature_category: :webhooks do
end
context 'removes existing hook' do
it 'from hooks list page' do
it 'from hooks list page', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/451295' do
visit admin_hooks_path
accept_gl_confirm(button_text: 'Delete webhook') { click_link 'Delete' }

View File

@ -248,7 +248,8 @@ RSpec.describe 'Expand and collapse diffs', :js, feature_category: :source_code_
let(:branch) { 'expand-collapse-lines' }
# safe-files -> 100 | safe-lines -> 5000 | commit_files -> 8 (each 1250 lines)
it 'does collapsing from the safe number of lines to the end' do
it 'does collapsing from the safe number of lines to the end',
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/436532' do
expect(page).to have_link('Expand all')
expect(page).to have_selector('.diff-content', count: 6)

View File

@ -248,7 +248,7 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, path: 'foo') }
context 'as admin' do
let(:user) { create(:admin, :without_default_org) }
let(:user) { create(:admin) }
before do
visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')

View File

@ -87,7 +87,7 @@ RSpec.describe 'Multiple view Diffs', :js, feature_category: :source_code_manage
expect(page).to have_selector('td.line_content:not(.nomappinginraw) ~ td.diff-line-num > .add-diff-note')
end
it 'lines numbers without mapping are empty' do
it 'lines numbers without mapping are empty', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/452350' do
expect(page).not_to have_selector('td.nomappinginraw + td.diff-line-num')
expect(page).to have_selector('td.nomappinginraw + td.diff-line-num', visible: false)
end

View File

@ -96,7 +96,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
end
context 'as an admin' do
let(:user) { create(:admin, :without_default_org) }
let(:user) { create(:admin) }
it_behaves_like 'shows correct navigation'

View File

@ -186,7 +186,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
click_button('Expand')
end
expect(page).to have_content('Paste this DSN into your Sentry SDK')
expect(page).to have_content('Paste this Data Source Name (DSN) into your Sentry SDK')
end
end
end

View File

@ -12,6 +12,7 @@ import {
TOKEN_TYPE_DRAFT,
TOKEN_TYPE_SOURCE_BRANCH,
TOKEN_TYPE_TARGET_BRANCH,
TOKEN_TYPE_MR_ASSIGNEE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import { mergeRequestListTabs } from '~/vue_shared/issuable/list/constants';
import { getSortOptions } from '~/issues/list/utils';
@ -107,6 +108,7 @@ describe('Merge requests list app', () => {
it('does not have preloaded users when gon.current_user_id does not exist', () => {
expect(findIssuableList().props('searchTokens')).toMatchObject([
{ type: TOKEN_TYPE_MR_ASSIGNEE },
{ type: TOKEN_TYPE_AUTHOR, preloadedUsers: [] },
{ type: TOKEN_TYPE_DRAFT },
{ type: TOKEN_TYPE_TARGET_BRANCH },
@ -116,8 +118,18 @@ describe('Merge requests list app', () => {
});
describe('when all tokens are available', () => {
const urlParams = {
mr_assignee_username: 'bob',
draft: 'yes',
'target_branches[]': 'branch-a',
'source_branches[]': 'branch-b',
};
const paramString = Object.entries(urlParams)
.map(([k, v]) => `${k}=${v}`)
.join('&');
beforeEach(async () => {
setWindowLocation('?draft=yes&target_branches[]=branch-a&source_branches[]=branch-b');
setWindowLocation(`?${paramString}`);
window.gon = {
current_user_id: mockCurrentUser.id,
current_user_fullname: mockCurrentUser.name,
@ -141,6 +153,7 @@ describe('Merge requests list app', () => {
];
expect(findIssuableList().props('searchTokens')).toMatchObject([
{ type: TOKEN_TYPE_MR_ASSIGNEE },
{ type: TOKEN_TYPE_AUTHOR, preloadedUsers },
{ type: TOKEN_TYPE_DRAFT },
{ type: TOKEN_TYPE_TARGET_BRANCH },
@ -150,6 +163,7 @@ describe('Merge requests list app', () => {
it('pre-displays tokens that are in the url search parameters', () => {
expect(findIssuableList().props('initialFilterValue')).toMatchObject([
{ type: TOKEN_TYPE_MR_ASSIGNEE },
{ type: TOKEN_TYPE_DRAFT },
{ type: TOKEN_TYPE_TARGET_BRANCH },
{ type: TOKEN_TYPE_SOURCE_BRANCH },

View File

@ -116,28 +116,6 @@ RSpec.describe Banzai::Filter::UploadLinkFilter, feature_category: :team_plannin
expect(doc.at_css('img').classes).to include('gfm')
expect(doc.at_css('img')['data-link']).not_to eq('true')
end
context 'when use_ids_for_markdown_upload_urls is disabled' do
let(:relative_path) { "/#{project.full_path}#{upload_path}" }
before do
stub_feature_flags(use_ids_for_markdown_upload_urls: false)
end
it 'prepends project path to the URL' do
doc = filter(link(upload_path))
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
expect(doc.at_css('a')['data-link']).to eq('true')
doc = filter(nested(link(upload_path)))
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
expect(doc.at_css('a')['data-link']).to eq('true')
end
end
end
context 'to a group upload' do
@ -185,22 +163,6 @@ RSpec.describe Banzai::Filter::UploadLinkFilter, feature_category: :team_plannin
expect(doc.at_css('a').classes).not_to include('gfm')
expect(doc.at_css('a')['data-link']).not_to eq('true')
end
context 'when use_ids_for_markdown_upload_urls is disabled' do
let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
before do
stub_feature_flags(use_ids_for_markdown_upload_urls: false)
end
it 'prepends group path to the URL' do
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
expect(doc.at_css('a')['data-link']).to eq('true')
end
end
end
context 'to a personal snippet' do

View File

@ -3,17 +3,19 @@
require 'spec_helper'
RSpec.describe ClickHouse::WriteBuffer, :clean_gitlab_redis_shared_state, feature_category: :database do
describe '.write_event' do
subject(:write_event) { described_class.write_event(event_hash) }
let(:table_name) { 'test' }
describe '.add' do
subject(:add_event) { described_class.add(table_name, event_hash) }
let(:event_hash) { { foo: 'bar' } }
it 'saves ClickHouse event to Redis' do
expect do
write_event
add_event
end.to change {
Gitlab::Redis::SharedState.with do |redis|
redis.lrange(described_class::BUFFER_KEY, 0, 10)
redis.lrange(described_class::BUFFER_KEY_PREFIX + table_name, 0, 10)
end
}.from([]).to([event_hash.to_json])
end
@ -27,15 +29,15 @@ RSpec.describe ClickHouse::WriteBuffer, :clean_gitlab_redis_shared_state, featur
let(:event3) { { foo: 'bar3' } }
before do
described_class.write_event(event1)
described_class.write_event(event2)
described_class.write_event(event3)
described_class.add(table_name, event1)
described_class.add(table_name, event2)
described_class.add(table_name, event3)
end
it 'pops events from redis' do
expect(described_class.pop_events(limit)).to eq([event1, event2])
expect(described_class.pop_events(limit)).to eq([event3])
expect(described_class.pop_events(limit)).to eq([])
expect(described_class.pop(table_name, limit)).to eq([event1, event2])
expect(described_class.pop(table_name, limit)).to eq([event3])
expect(described_class.pop(table_name, limit)).to eq([])
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillMlExperimentMetadataProjectId,
feature_category: :mlops,
schema: 20240604074200 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :ml_experiment_metadata }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :ml_experiments }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :experiment_id }
end
end

View File

@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuous_integration do
let(:pipeline) { build(:ci_empty_pipeline) }
let(:command) { instance_double(::Gitlab::Ci::Pipeline::Chain::Command, save_incompleted: true) }
let(:message) { 'message' }
let(:helper_class) do
Class.new do
include Gitlab::Ci::Pipeline::Chain::Helpers
@ -18,17 +22,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
subject(:helper) { helper_class.new(pipeline, command) }
let(:pipeline) { build(:ci_empty_pipeline) }
let(:command) { double(save_incompleted: true, dry_run?: false) }
let(:message) { 'message' }
describe '.warning' do
context 'when the warning includes malicious HTML' do
let(:message) { '<div>gimme your password</div>' }
let(:sanitized_message) { 'gimme your password' }
it 'sanitizes' do
subject.warning(message)
helper.warning(message)
expect(pipeline.warning_messages[0].content).to include(sanitized_message)
end
@ -42,15 +42,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
if command.save_incompleted
expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
expect(pipeline).to receive(:drop!).with(failure_reason).and_call_original
end
subject.error(message, config_error: config_error, drop_reason: drop_reason)
helper.error(message, failure_reason: failure_reason)
expect(pipeline.yaml_errors).to eq(yaml_error)
expect(pipeline.errors[:base]).to include(message)
expect(pipeline.status).to eq 'failed'
expect(pipeline.failure_reason).to eq drop_reason.to_s
expect(pipeline.failure_reason).to eq failure_reason.to_s
end
end
@ -59,29 +59,28 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
let(:sanitized_message) { 'gimme your password' }
it 'sanitizes the error and removes the HTML tags' do
subject.error(message, config_error: true, drop_reason: :config_error)
helper.error(message, failure_reason: :config_error)
expect(pipeline.yaml_errors).to eq(sanitized_message)
expect(pipeline.errors[:base]).to include(sanitized_message)
end
end
context 'when given a drop reason' do
context 'when config error is true' do
context 'sets the yaml error and overrides the drop reason' do
let(:drop_reason) { :config_error }
let(:config_error) { true }
let(:yaml_error) { message }
context 'when failure_reason is present' do
context 'when failure_reason is `config_error`' do
let(:failure_reason) { :config_error }
let(:yaml_error) { message }
it_behaves_like "error function"
end
it_behaves_like "error function"
end
context 'when drop_reason is nil' do
let(:command) { double(project: nil, dry_run?: false) }
context 'when failure_reason is nil' do
let(:command) do
instance_double(::Gitlab::Ci::Pipeline::Chain::Command, project: nil, dry_run?: false)
end
shared_examples "error function with no drop reason" do
it 'drops with out failure reason' do
shared_examples "error function with no failure_reason" do
it 'drops the pipeline without setting any failure_reason' do
expect(command).to receive(:increment_pipeline_failure_reason_counter)
call_error
@ -94,77 +93,82 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
end
end
context 'when no drop_reason argument is passed' do
let(:call_error) { subject.error(message) }
context 'when no failure_reason argument is passed' do
let(:call_error) { helper.error(message) }
it_behaves_like "error function with no drop reason"
it_behaves_like "error function with no failure_reason"
end
context 'when drop_reason argument is passed as nil' do
let(:drop_reason) { nil }
let(:call_error) { subject.error(message, drop_reason: drop_reason) }
context 'when failure_reason argument is passed as nil' do
let(:failure_reason) { nil }
let(:call_error) { subject.error(message, failure_reason: failure_reason) }
it_behaves_like "error function with no drop reason"
it_behaves_like "error function with no failure_reason"
end
end
context 'when config error is false' do
context 'does not set the yaml error or override the drop reason' do
let(:drop_reason) { :size_limit_exceeded }
let(:config_error) { false }
let(:yaml_error) { nil }
context 'when failure_reason is present but is not `config_error`' do
let(:failure_reason) { :size_limit_exceeded }
let(:yaml_error) { nil }
it_behaves_like "error function"
specify do
helper.error(message, failure_reason: failure_reason)
expect(pipeline).to be_persisted
end
context 'when the failure_reason is not persistable' do
let(:failure_reason) { :filtered_by_rules }
let(:command) { instance_double(::Gitlab::Ci::Pipeline::Chain::Command, project: nil, dry_run?: false) }
specify do
expect(command).to receive(:increment_pipeline_failure_reason_counter)
helper.error(message, failure_reason: failure_reason)
expect(pipeline).to be_failed
expect(pipeline.failure_reason).to eq failure_reason.to_s
expect(pipeline).not_to be_persisted
end
end
context 'when save_incompleted is false' do
let(:command) do
instance_double(
::Gitlab::Ci::Pipeline::Chain::Command,
save_incompleted: false, project: nil, dry_run?: false)
end
before do
allow(command).to receive(:increment_pipeline_failure_reason_counter)
end
it_behaves_like "error function"
specify do
subject.error(message, config_error: config_error, drop_reason: drop_reason)
helper.error(message, failure_reason: failure_reason)
expect(pipeline).to be_persisted
expect(pipeline).not_to be_persisted
end
context 'with readonly pipeline and dry run enabled' do
let(:command) { double(project: nil, save_incompleted: true, dry_run?: true) }
let(:command) do
instance_double(
::Gitlab::Ci::Pipeline::Chain::Command,
save_incompleted: true, project: nil, dry_run?: true)
end
before do
pipeline.readonly!
end
specify do
subject.error(message, config_error: config_error, drop_reason: drop_reason)
helper.error(message, failure_reason: failure_reason)
expect(pipeline).to be_failed
expect(pipeline.failure_reason).to eq drop_reason.to_s
expect(pipeline).not_to be_persisted
end
end
context 'when the drop reason is not persistable' do
let(:drop_reason) { :filtered_by_rules }
let(:command) { double(project: nil, dry_run?: false) }
specify do
expect(command).to receive(:increment_pipeline_failure_reason_counter)
subject.error(message, config_error: config_error, drop_reason: drop_reason)
expect(pipeline).to be_failed
expect(pipeline.failure_reason).to eq drop_reason.to_s
expect(pipeline).not_to be_persisted
end
end
context 'when save_incompleted is false' do
let(:command) { double(save_incompleted: false, project: nil, dry_run?: false) }
before do
allow(command).to receive(:increment_pipeline_failure_reason_counter)
end
it_behaves_like "error function"
specify do
subject.error(message, config_error: config_error, drop_reason: drop_reason)
expect(pipeline.failure_reason).to eq failure_reason.to_s
expect(pipeline).not_to be_persisted
end
end

View File

@ -40,7 +40,6 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
'pages_domains.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/442178,
'remote_mirrors.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/444643
'sprints.group_id',
'subscription_add_on_purchases.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/444338
*['todos.project_id', 'todos.group_id']
]
end

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
let(:group) { create(:group, maintainers: importer_user) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
let(:admin) { create(:admin, :without_default_org) }
let(:admin) { create(:admin) }
let(:importer_user) { admin }
let(:excluded_keys) { [] }
let(:additional_relation_attributes) { {} }

Some files were not shown because too many files have changed in this diff Show More