Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-13 09:17:17 +00:00
parent 7f4f5e9a14
commit de5a3e7e69
79 changed files with 939 additions and 443 deletions

View File

@ -2,6 +2,7 @@ include:
- local: .gitlab/ci/qa-common/main.gitlab-ci.yml
- local: .gitlab/ci/qa-common/rules.gitlab-ci.yml
- local: .gitlab/ci/qa-common/variables.gitlab-ci.yml
- local: .gitlab/ci/qa-common/omnibus.gitlab-ci.yml
workflow:
rules:

View File

@ -4,6 +4,7 @@ include:
- local: .gitlab/ci/qa-common/main.gitlab-ci.yml
- local: .gitlab/ci/qa-common/rules.gitlab-ci.yml
- local: .gitlab/ci/qa-common/variables.gitlab-ci.yml
- local: .gitlab/ci/qa-common/omnibus.gitlab-ci.yml
# ==========================================
# Prepare stage

View File

@ -0,0 +1,86 @@
trigger-omnibus-env:
stage: .pre
extends:
- .ruby-image
- .rules:trigger-omnibus
needs:
# We need this job because we need its `cached-assets-hash.txt` artifact, so that we can pass the assets image tag to the downstream omnibus-gitlab pipeline.
- pipeline: $PARENT_PIPELINE_ID
job: compile-production-assets
variables:
BUILD_ENV: build.env
before_script:
- source scripts/utils.sh
script:
# Note that OMNIBUS_GITLAB_CACHE_UPDATE is not used in the code, but it is actually used in the 2-hourly maintenance pipeline schedule.
- |
SECURITY_SOURCES=$([[ ! "$CI_PROJECT_NAMESPACE" =~ ^gitlab-org\/security ]] || echo "true")
echo "SECURITY_SOURCES=${SECURITY_SOURCES:-false}" > $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_UPDATE=${OMNIBUS_GITLAB_CACHE_UPDATE:-false}" >> $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_EDITION=${OMNIBUS_GITLAB_CACHE_EDITION}" >> $BUILD_ENV
for version_file in *_VERSION; do echo "$version_file=$(cat $version_file)" >> $BUILD_ENV; done
echo "OMNIBUS_GITLAB_BUILD_ON_ALL_OS=${OMNIBUS_GITLAB_BUILD_ON_ALL_OS:-false}" >> $BUILD_ENV
ruby -e 'puts "FULL_RUBY_VERSION=#{RUBY_VERSION}"' >> $BUILD_ENV
echo "SHORT_RUBY_VERSION=${RUBY_VERSION}" >> $BUILD_ENV
echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
echo "EE=$([[ $FOSS_ONLY == '1' ]] && echo 'false' || echo 'true')" >> $BUILD_ENV
define_trigger_branch_in_build_env
- |
echo "Built environment file for omnibus build:"
cat $BUILD_ENV
artifacts:
expire_in: 3 days
reports:
dotenv: $BUILD_ENV
paths:
- $BUILD_ENV
trigger-omnibus-env as-if-foss:
extends:
- trigger-omnibus-env
- .rules:trigger-omnibus as-if-foss
needs:
- pipeline: $PARENT_PIPELINE_ID
job: compile-production-assets as-if-foss
trigger-omnibus:
stage: .pre
extends:
- .rules:trigger-omnibus
needs:
- trigger-omnibus-env
inherit:
variables: false
variables:
GITALY_SERVER_VERSION: $GITALY_SERVER_VERSION
GITLAB_ELASTICSEARCH_INDEXER_VERSION: $GITLAB_ELASTICSEARCH_INDEXER_VERSION
GITLAB_KAS_VERSION: $GITLAB_KAS_VERSION
GITLAB_PAGES_VERSION: $GITLAB_PAGES_VERSION
GITLAB_SHELL_VERSION: $GITLAB_SHELL_VERSION
GITLAB_WORKHORSE_VERSION: $GITLAB_WORKHORSE_VERSION
GITLAB_VERSION: $CI_COMMIT_SHA
GITLAB_ASSETS_TAG: $GITLAB_ASSETS_TAG
IMAGE_TAG: "${CI_COMMIT_SHA}-ruby${SHORT_RUBY_VERSION}"
TOP_UPSTREAM_SOURCE_PROJECT: $CI_PROJECT_PATH
SECURITY_SOURCES: $SECURITY_SOURCES
CACHE_UPDATE: $OMNIBUS_GITLAB_CACHE_UPDATE
CACHE_EDITION: $OMNIBUS_GITLAB_CACHE_EDITION
USE_SPECIFIED_RUBY_VERSION: "true"
RUBY_VERSION: $FULL_RUBY_VERSION
BUILD_ON_ALL_OS: $OMNIBUS_GITLAB_BUILD_ON_ALL_OS
SKIP_QA_TEST: "true"
ee: $EE
trigger:
project: gitlab-org/build/omnibus-gitlab-mirror
branch: $TRIGGER_BRANCH
strategy: depend
trigger-omnibus as-if-foss:
extends:
- trigger-omnibus
- .rules:trigger-omnibus as-if-foss
needs:
- trigger-omnibus-env as-if-foss
variables:
# Override gitlab repository so that omnibus doesn't use foss repository for CE build
GITLAB_ALTERNATIVE_REPO: $CI_PROJECT_URL

View File

@ -50,6 +50,10 @@ include:
.if-schedule-pipeline: &if-schedule-pipeline
if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $SCHEDULE_TYPE == "maintenance"'
.never-if-skip-omnibus-trigger: &never-if-skip-omnibus-trigger
if: '$SKIP_OMNIBUS_TRIGGER == "true"'
when: never
# Selective test execution against omnibus instance have following execution scenarios:
# * only e2e spec files changed - runs only changed specs
# * qa framework changes - runs full test suite
@ -67,6 +71,16 @@ include:
rules:
- when: always
.rules:trigger-omnibus:
rules:
- *never-if-skip-omnibus-trigger
- if: '$FOSS_ONLY != "1"'
.rules:trigger-omnibus as-if-foss:
rules:
- *never-if-skip-omnibus-trigger
- if: '$FOSS_ONLY == "1"'
# ------------------------------------------
# Test
# ------------------------------------------

View File

@ -36,6 +36,10 @@
- OS_VERSION
- REGISTRY_GROUP
- REGISTRY_HOST
- OMNIBUS_GITLAB_CACHE_UPDATE
- OMNIBUS_GITLAB_CACHE_EDITION
- OMNIBUS_GITLAB_BUILD_ON_ALL_OS
- FOSS_ONLY
trigger:
strategy: depend
forward:
@ -69,66 +73,14 @@ cache-qa-gems-e2e-runners:
tags:
- e2e
trigger-omnibus:
stage: qa
extends:
- .qa:rules:trigger-omnibus
needs:
- trigger-omnibus-env
- build-assets-image
inherit:
variables: false
variables:
GITALY_SERVER_VERSION: $GITALY_SERVER_VERSION
GITLAB_ELASTICSEARCH_INDEXER_VERSION: $GITLAB_ELASTICSEARCH_INDEXER_VERSION
GITLAB_KAS_VERSION: $GITLAB_KAS_VERSION
GITLAB_PAGES_VERSION: $GITLAB_PAGES_VERSION
GITLAB_SHELL_VERSION: $GITLAB_SHELL_VERSION
GITLAB_WORKHORSE_VERSION: $GITLAB_WORKHORSE_VERSION
GITLAB_VERSION: $CI_COMMIT_SHA
GITLAB_ASSETS_TAG: $GITLAB_ASSETS_TAG
IMAGE_TAG: "${CI_COMMIT_SHA}-ruby${SHORT_RUBY_VERSION}"
TOP_UPSTREAM_SOURCE_PROJECT: $CI_PROJECT_PATH
SECURITY_SOURCES: $SECURITY_SOURCES
CACHE_UPDATE: $OMNIBUS_GITLAB_CACHE_UPDATE
CACHE_EDITION: $OMNIBUS_GITLAB_CACHE_EDITION
USE_SPECIFIED_RUBY_VERSION: "true"
RUBY_VERSION: $FULL_RUBY_VERSION
BUILD_ON_ALL_OS: $OMNIBUS_GITLAB_BUILD_ON_ALL_OS
SKIP_QA_TEST: "true"
ee: $EE
trigger:
project: gitlab-org/build/omnibus-gitlab-mirror
branch: $TRIGGER_BRANCH
strategy: depend
follow-up:trigger-omnibus:
extends:
- trigger-omnibus
- .qa:rules:follow-up-e2e
needs:
- follow-up:trigger-omnibus-env
- follow-up:build-assets-image
trigger-omnibus as-if-foss:
extends:
- trigger-omnibus
- .qa:rules:trigger-omnibus-ce
needs:
- trigger-omnibus-env as-if-foss
- build-assets-image as-if-foss
variables:
# Override gitlab repository so that omnibus doesn't use foss repository for CE build
GITLAB_ALTERNATIVE_REPO: $CI_PROJECT_URL
# If a rename is required for this job, please notify the Delivery team (`@gitlab-org/delivery`)
e2e:package-and-test-ee:
extends:
- .e2e-trigger-base
- .qa:rules:package-and-test-ee
needs:
- build-assets-image
- build-qa-image
- trigger-omnibus
- e2e-test-pipeline-generate
variables:
RELEASE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/build/omnibus-gitlab-mirror/gitlab-ee:${RELEASE_IMAGE_TAG}"
@ -142,7 +94,7 @@ follow-up:e2e:package-and-test-ee:
- .qa:rules:follow-up-e2e
needs:
- follow-up:build-qa-image
- follow-up:trigger-omnibus
- follow-up:build-assets-image
- manual:e2e-test-pipeline-generate
e2e:post-run-e2e-message:
@ -160,7 +112,6 @@ e2e:post-run-e2e-message:
- scripts/generate-message-to-run-e2e-pipeline.rb
needs:
- e2e-test-pipeline-generate
- trigger-omnibus-env
- build-assets-image
artifacts:
expire_in: 1 day
@ -172,8 +123,8 @@ e2e:package-and-test-ce:
- e2e:package-and-test-ee
- .qa:rules:package-and-test-ce
needs:
- build-assets-image as-if-foss
- build-qa-image as-if-foss
- trigger-omnibus as-if-foss
- e2e-test-pipeline-generate
variables:
FOSS_ONLY: "1"
@ -188,8 +139,8 @@ e2e:package-and-test-nightly:
- .e2e-trigger-base
- .qa:rules:package-and-test-nightly
needs:
- build-assets-image
- build-qa-image
- trigger-omnibus
- e2e-test-pipeline-generate
variables:
GITLAB_SEMVER_VERSION: $GITLAB_SEMVER_VERSION

View File

@ -679,6 +679,9 @@
- "tooling/custom_roles/docs/templates/custom_abilities.md.erb"
- "ee/{lib/,spec/}tasks/gitlab/custom_roles/*"
.cng-orchestrator-patterns: &cng-orchestrator-patterns
- gems/gitlab-cng/**/*.rb
##################
# Conditions set #
##################
@ -1247,6 +1250,8 @@
changes: *dependency-patterns
- <<: *if-default-refs
changes: *code-qa-patterns
- <<: *if-merge-request
changes: *cng-orchestrator-patterns
.frontend:rules:compile-production-assets:
rules:
@ -1586,40 +1591,6 @@
- <<: *if-force-ci
allow_failure: true
# All rules copied from qa:rules:package-and-test-ee but jobs are automatic and not allowed to fail
.qa:rules:trigger-omnibus-env:
rules:
# From .qa:rules:package-and-test-common
- !reference [".qa:rules:package-and-test-never-run", rules]
- <<: *if-ruby-branch
- <<: *if-merge-request-labels-run-all-e2e
- <<: *if-merge-request
changes: *qa-patterns
- <<: *if-merge-request-targeting-stable-branch
changes: *setup-test-env-patterns
- <<: *if-merge-request-not-approved
when: never
- <<: *if-merge-request
changes: *dependency-patterns
variables:
OMNIBUS_GITLAB_BUILD_ON_ALL_OS: 'true'
- <<: *if-merge-request
changes: *feature-flag-development-config-patterns
- <<: *if-merge-request
changes: *initializers-patterns
- <<: *if-merge-request
changes: *nodejs-patterns
- <<: *if-merge-request
changes: *ci-qa-patterns
- <<: *if-security-merge-request
changes: *code-patterns
- <<: *if-merge-request-and-specific-devops-stage
changes: *code-patterns
- <<: *if-force-ci
# From .qa:rules:package-and-test-schedule
- <<: *if-dot-com-gitlab-org-schedule
- !reference [".qa:rules:code-merge-request", rules]
.qa:rules:package-and-test-never-run:
rules:
- <<: *if-not-canonical-namespace
@ -1633,16 +1604,12 @@
rules:
- !reference [".qa:rules:package-and-test-never-run", rules]
- <<: *if-ruby-branch
allow_failure: true
- <<: *if-merge-request-labels-run-all-e2e
allow_failure: true
# Run tests automatically for MRs that touch QA files
- <<: *if-merge-request
changes: *qa-patterns
allow_failure: true
- <<: *if-merge-request-targeting-stable-branch
changes: *setup-test-env-patterns
allow_failure: true
# Otherwise, only run tests after the MR is approved
- <<: *if-merge-request-not-approved
when: never
@ -1652,22 +1619,16 @@
OMNIBUS_GITLAB_BUILD_ON_ALL_OS: 'true'
- <<: *if-merge-request
changes: *feature-flag-development-config-patterns
allow_failure: true
- <<: *if-merge-request
changes: *initializers-patterns
allow_failure: true
- <<: *if-merge-request
changes: *nodejs-patterns
allow_failure: true
- <<: *if-merge-request
changes: *ci-qa-patterns
allow_failure: true
- <<: *if-security-merge-request
changes: *code-patterns
allow_failure: true
- <<: *if-merge-request-and-specific-devops-stage
changes: *code-patterns
allow_failure: true
- <<: *if-force-ci
when: manual
allow_failure: true
@ -1698,9 +1659,6 @@
- <<: *if-merge-request-targeting-stable-branch
changes: *setup-test-env-patterns
- <<: *if-ruby-branch
# Certain components trigger a rebuild of the e2e GDK image so we want to test it too
- <<: *if-merge-request
changes: *gdk-component-patterns
- <<: *if-merge-request
changes: *code-patterns
variables:
@ -1714,11 +1672,6 @@
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false" # on main runs, metrics are exported to separate bucket via rake task for better consistency
.qa:rules:package-and-test-schedule:
rules:
- <<: [*if-dot-com-gitlab-org-schedule, *qa-e2e-test-schedule-variables]
allow_failure: true
.qa:rules:e2e-schedule-blocking:
rules:
- <<: [*if-dot-com-gitlab-org-schedule, *qa-e2e-test-schedule-variables]
@ -1747,13 +1700,11 @@
- <<: *if-merge-request
changes: *code-patterns
# Note: If any changes are made to this rule, the following should also be updated:
# 1) .qa:rules:trigger-omnibus-env
# 2) .qa:rules:post-run-e2e-message
# Note: If any changes are made to this rule, the following should also be updated - .qa:rules:post-run-e2e-message
.qa:rules:package-and-test-ee:
rules:
- !reference [".qa:rules:package-and-test-common", rules]
- !reference [".qa:rules:package-and-test-schedule", rules]
- !reference [".qa:rules:e2e-schedule-blocking", rules]
- !reference [".qa:rules:code-merge-request-manual", rules]
.qa:rules:package-and-test-ce:
@ -1784,17 +1735,17 @@
- <<: *if-default-branch-schedule-nightly # already executed in the 2-hourly schedule
when: never
- !reference [".qa:rules:e2e-blocking", rules]
# Certain components trigger a rebuild of the e2e GDK image so we want to test it too
- <<: *if-merge-request
changes: *gdk-component-patterns
- !reference [".qa:rules:e2e-schedule-blocking", rules]
.qa:rules:e2e:test-on-cng:
rules:
- !reference [".qa:rules:package-and-test-never-run", rules]
- <<: [*if-dot-com-gitlab-org-schedule, *qa-e2e-test-schedule-variables]
- !reference [".qa:rules:e2e-blocking", rules]
- !reference [".qa:rules:e2e-schedule-blocking", rules]
- <<: *if-merge-request
changes: *qa-patterns
- <<: *if-merge-request
changes:
- gems/gitlab-cng/**/*.rb
changes: *cng-orchestrator-patterns
.qa:rules:package-and-test-nightly:
rules:
@ -1808,16 +1759,6 @@
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false"
.qa:rules:trigger-omnibus:
rules:
- !reference [".qa:rules:package-and-test-nightly", rules]
- !reference [".qa:rules:package-and-test-ee", rules]
.qa:rules:trigger-omnibus-ce:
rules:
- !reference [".qa:rules:package-and-test-nightly", rules]
- !reference [".qa:rules:package-and-test-ce", rules]
# These provide a manual way to trigger follow-up:e2e:package-and-test-ee
# It is fine if they're overlapping with the automatic ones.
# It'll not hurt and it can simplify the rules, decoupling them.

View File

@ -214,59 +214,9 @@ e2e-test-pipeline-generate:
# This is the manual start of the chain of E2E jobs
# After playing this, it'll follow up with:
# follow-up:compile-production-assets -> follow-up:build-assets-image --\
# \-> follow-up:trigger-omnibus-env -> follow-up:trigger-omnibus -> follow-up:e2e:package-and-test-ee
# \-> follow-up:e2e:package-and-test-ee
# follow-up:build-qa-image --------------------------------------------------------------------------/
manual:e2e-test-pipeline-generate:
extends:
- e2e-test-pipeline-generate
- .qa:rules:manual-e2e
trigger-omnibus-env:
stage: prepare
extends:
- .qa:rules:trigger-omnibus-env
needs:
# We need this job because we need its `cached-assets-hash.txt` artifact, so that we can pass the assets image tag to the downstream omnibus-gitlab pipeline.
- compile-production-assets
variables:
BUILD_ENV: build.env
before_script:
- source scripts/utils.sh
script:
# Note that OMNIBUS_GITLAB_CACHE_UPDATE is not used in the code, but it is actually used in the 2-hourly maintenance pipeline schedule.
- |
SECURITY_SOURCES=$([[ ! "$CI_PROJECT_NAMESPACE" =~ ^gitlab-org\/security ]] || echo "true")
echo "SECURITY_SOURCES=${SECURITY_SOURCES:-false}" > $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_UPDATE=${OMNIBUS_GITLAB_CACHE_UPDATE:-false}" >> $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_EDITION=${OMNIBUS_GITLAB_CACHE_EDITION}" >> $BUILD_ENV
for version_file in *_VERSION; do echo "$version_file=$(cat $version_file)" >> $BUILD_ENV; done
echo "OMNIBUS_GITLAB_BUILD_ON_ALL_OS=${OMNIBUS_GITLAB_BUILD_ON_ALL_OS:-false}" >> $BUILD_ENV
ruby -e 'puts "FULL_RUBY_VERSION=#{RUBY_VERSION}"' >> $BUILD_ENV
echo "SHORT_RUBY_VERSION=${RUBY_VERSION}" >> $BUILD_ENV
echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
echo "EE=$([[ $FOSS_ONLY == '1' ]] && echo 'false' || echo 'true')" >> $BUILD_ENV
define_trigger_branch_in_build_env
- |
echo "Built environment file for omnibus build:"
cat $BUILD_ENV
artifacts:
expire_in: 3 days
reports:
dotenv: $BUILD_ENV
paths:
- $BUILD_ENV
trigger-omnibus-env as-if-foss:
extends:
- trigger-omnibus-env
- .qa:rules:package-and-test-ce
needs:
- compile-production-assets as-if-foss
variables:
FOSS_ONLY: "1"
follow-up:trigger-omnibus-env:
extends:
- trigger-omnibus-env
- .qa:rules:follow-up-e2e
needs: ["follow-up:compile-production-assets"]

View File

@ -91,23 +91,8 @@ Style/EmptyMethod:
- 'lib/gitlab/background_migration/migrate_job_artifact_registry_to_ssf.rb'
- 'lib/gitlab/background_migration/migrate_requirements_to_work_items.rb'
- 'lib/gitlab/background_migration/migrate_shared_vulnerability_scanners.rb'
- 'lib/gitlab/ci/config/entry/need.rb'
- 'lib/gitlab/ci/config/entry/rules/rule.rb'
- 'lib/gitlab/ci/limit.rb'
- 'lib/gitlab/ci/pipeline/chain/validate/after_config.rb'
- 'lib/gitlab/config/entry/node.rb'
- 'lib/gitlab/config/entry/simplifiable.rb'
- 'lib/gitlab/empty_search_results.rb'
- 'lib/gitlab/git_access.rb'
- 'lib/gitlab/import_export/json/ndjson_writer.rb'
- 'lib/gitlab/mailgun/webhook_processors/base.rb'
- 'lib/gitlab/usage_data_non_sql_metrics.rb'
- 'lib/mattermost/session.rb'
- 'qa/qa/resource/clusters/agent.rb'
- 'qa/qa/resource/clusters/agent_token.rb'
- 'qa/qa/resource/job.rb'
- 'qa/qa/resource/package.rb'
- 'qa/qa/resource/registry_repository.rb'
- 'qa/qa/service/cluster_provider/k3d.rb'
- 'qa/qa/service/cluster_provider/k3s.rb'
- 'qa/qa/service/cluster_provider/minikube.rb'

View File

@ -167,6 +167,7 @@ module Ci
validates :status, presence: { unless: :importing? }
validate :valid_commit_sha, unless: :importing?
validates :source, exclusion: { in: %w[unknown], unless: :importing? }, on: :create
validates :project, presence: true, on: :create
after_create :keep_around_commits, unless: :importing?
after_commit :track_ci_pipeline_created_event, on: :create, if: :internal_pipeline?

View File

@ -12,7 +12,7 @@
help_page_path('administration/settings/jira_cloud_app',
aria: { label: _('GitLab for Jira Cloud') },
class: 'has-tooltip',
anchor: 'connect-the-gitlab-for-jira-cloud-app'),
anchor: 'install-the-gitlab-for-jira-cloud-app-manually'),
title: _('More information')
.settings-content

View File

@ -1,9 +0,0 @@
---
name: project_ref_name_in_pipeline
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/441771
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148305
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/454773
milestone: '17.0'
group: group::pipeline authoring
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillOrDropCiPipelineOnProjectId
description: Backfill ci_pipelines's project_id if possible, otherwise drop records that don't have a project_id
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/155137
milestone: '17.1'
queued_migration_version: 20240531202753
finalize_after: '2024-06-30'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillSbomOccurrencesVulnerabilitiesProjectId
description: Backfills sharding key `sbom_occurrences_vulnerabilities.project_id` from `sbom_occurrences`.
feature_category: dependency_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156129
milestone: '17.1'
queued_migration_version: 20240612075305
finalize_after: '2024-07-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -1,8 +1,9 @@
---
table_name: sbom_occurrences_vulnerabilities
classes:
- Sbom::OccurrencesVulnerability
- Sbom::OccurrencesVulnerability
feature_categories:
- dependency_management
- dependency_management
milestone: '16.7'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
@ -14,3 +15,4 @@ desired_sharding_key:
table: sbom_occurrences
sharding_key: project_id
belongs_to: occurrence
desired_sharding_key_migration_job_name: BackfillSbomOccurrencesVulnerabilitiesProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToSbomOccurrencesVulnerabilities < Gitlab::Database::Migration[2.2]
milestone '17.1'
def change
add_column :sbom_occurrences_vulnerabilities, :project_id, :bigint
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class QueueBackfillOrDropCiPipelineOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillOrDropCiPipelineOnProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:ci_pipelines,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :ci_pipelines, :id, [])
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class EnsureBackfillMergeRequestsHeadPipelineIdIsCompleted < Gitlab::Database::Migration[2.2]
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
milestone '17.1'
TABLE_NAME = :merge_requests
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: TABLE_NAME,
column_name: 'id',
job_arguments: [['head_pipeline_id'], ['head_pipeline_id_convert_to_bigint']]
)
end
def down
# no-op
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class PrepareAsyncIndexesForMergeRequestsHeadPipelines < Gitlab::Database::Migration[2.2]
milestone '17.1'
TABLE_NAME = :merge_requests
INDEX_NAME = :index_merge_requests_on_head_pipeline_id_bigint
COLUMN_NAME = :head_pipeline_id_convert_to_bigint
def up
prepare_async_index TABLE_NAME, COLUMN_NAME, name: INDEX_NAME
end
def down
unprepare_async_index TABLE_NAME, COLUMN_NAME, name: INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexSbomOccurrencesVulnerabilitiesOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX_NAME = 'index_sbom_occurrences_vulnerabilities_on_project_id'
def up
add_concurrent_index :sbom_occurrences_vulnerabilities, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :sbom_occurrences_vulnerabilities, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddSbomOccurrencesVulnerabilitiesProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :sbom_occurrences_vulnerabilities, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :sbom_occurrences_vulnerabilities, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddSbomOccurrencesVulnerabilitiesProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.1'
def up
install_sharding_key_assignment_trigger(
table: :sbom_occurrences_vulnerabilities,
sharding_key: :project_id,
parent_table: :sbom_occurrences,
parent_sharding_key: :project_id,
foreign_key: :sbom_occurrence_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :sbom_occurrences_vulnerabilities,
sharding_key: :project_id,
parent_table: :sbom_occurrences,
parent_sharding_key: :project_id,
foreign_key: :sbom_occurrence_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillSbomOccurrencesVulnerabilitiesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.1'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillSbomOccurrencesVulnerabilitiesProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:sbom_occurrences_vulnerabilities,
:id,
:project_id,
:sbom_occurrences,
:project_id,
:sbom_occurrence_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:sbom_occurrences_vulnerabilities,
:id,
[
:project_id,
:sbom_occurrences,
:project_id,
:sbom_occurrence_id
]
)
end
end

View File

@ -0,0 +1 @@
f60c14d3feb4b141ce039729cd6ee3177f46e5257725a2b4659354871534edf0

View File

@ -0,0 +1 @@
89e746f56cf1c4563675b80d0116c4984b44fd82fd08d2796c0fc7b0789e67e1

View File

@ -0,0 +1 @@
46b031a01c2135d9e8a502e9cf0ea63392e204f22751c27f70c766364e60b9ac

View File

@ -0,0 +1 @@
d105433c4f4bd51b9547ed12361e37c4c4a4ab465c6bf28d2d4d7c4e08053d1a

View File

@ -0,0 +1 @@
852f9a9585a219d2506949b14c716eb09b5842d3fd4bb10d3e19c8f597a757d6

View File

@ -0,0 +1 @@
2f659f2ce23c9328916af67faefbae461034da9b1958c00fb227ca14d1a4c539

View File

@ -0,0 +1 @@
ae98d890608b4395a89098eaacabeb8634d3b3a0f0dacbf0b1a9cc68799e266a

View File

@ -0,0 +1 @@
da5407921c8d524c975c517519212f8529e45cfee8fff2520355bc03071e7e62

View File

@ -1025,6 +1025,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_4ad9a52a6614() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "sbom_occurrences"
WHERE "sbom_occurrences"."id" = NEW."sbom_occurrence_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_56d49f4ed623() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -16532,7 +16548,8 @@ CREATE TABLE sbom_occurrences_vulnerabilities (
sbom_occurrence_id bigint NOT NULL,
vulnerability_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
updated_at timestamp with time zone NOT NULL,
project_id bigint
);
CREATE SEQUENCE sbom_occurrences_vulnerabilities_id_seq
@ -28296,6 +28313,8 @@ CREATE INDEX index_sbom_occurrences_on_source_id ON sbom_occurrences USING btree
CREATE UNIQUE INDEX index_sbom_occurrences_on_uuid ON sbom_occurrences USING btree (uuid);
CREATE INDEX index_sbom_occurrences_vulnerabilities_on_project_id ON sbom_occurrences_vulnerabilities USING btree (project_id);
CREATE INDEX index_sbom_occurrences_vulnerabilities_on_vulnerability_id ON sbom_occurrences_vulnerabilities USING btree (vulnerability_id);
CREATE INDEX index_sbom_source_packages_on_source_package_id_and_id ON sbom_occurrences USING btree (source_package_id, id);
@ -30970,6 +30989,8 @@ CREATE TRIGGER trigger_43484cb41aca BEFORE INSERT OR UPDATE ON wiki_repository_s
CREATE TRIGGER trigger_44558add1625 BEFORE INSERT OR UPDATE ON merge_request_assignees FOR EACH ROW EXECUTE FUNCTION trigger_44558add1625();
CREATE TRIGGER trigger_4ad9a52a6614 BEFORE INSERT OR UPDATE ON sbom_occurrences_vulnerabilities FOR EACH ROW EXECUTE FUNCTION trigger_4ad9a52a6614();
CREATE TRIGGER trigger_56d49f4ed623 BEFORE INSERT OR UPDATE ON workspace_variables FOR EACH ROW EXECUTE FUNCTION trigger_56d49f4ed623();
CREATE TRIGGER trigger_57ad2742ac16 BEFORE INSERT OR UPDATE ON user_achievements FOR EACH ROW EXECUTE FUNCTION trigger_57ad2742ac16();
@ -31105,6 +31126,9 @@ ALTER TABLE ONLY service_desk_settings
ALTER TABLE ONLY design_management_designs_versions
ADD CONSTRAINT fk_03c671965c FOREIGN KEY (design_id) REFERENCES design_management_designs(id) ON DELETE CASCADE;
ALTER TABLE ONLY sbom_occurrences_vulnerabilities
ADD CONSTRAINT fk_058f258503 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY analytics_dashboards_pointers
ADD CONSTRAINT fk_05d96922bd FOREIGN KEY (target_project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -422,3 +422,123 @@ PROJECT_ID
The artifact usage value can fluctuate to `0` while the script is running. After
recalculation, usage should display as expected again.
## Artifact download flow diagrams
The following flow diagrams illustrate how job artifacts work. These
diagrams assume object storage is configured for job artifacts.
### Proxy download disabled
With [`proxy_download` set to `false`](object_storage.md), GitLab
redirects the runner to download artifacts from object storage with a
pre-signed URL. It is usually faster for runners to fetch from the
source directly so this configuration is generally recommended. It
should also reduce bandwidth usage because the data does not have to be
fetched by GitLab and sent to the runner. However, it does require
giving runners direct access to object storage.
The request flow looks like:
```mermaid
sequenceDiagram
autonumber
participant C as Runner
participant O as Object Storage
participant W as Workhorse
participant R as Rails
participant P as PostgreSQL
C->>+W: GET /api/v4/jobs/:id/artifacts?direct_download=true
Note over C,W: gitlab-ci-token@<CI_JOB_TOKEN>
W-->+R: GET /api/v4/jobs/:id/artifacts?direct_download=true
Note over W,R: gitlab-ci-token@<CI_JOB_TOKEN>
R->>P: Look up job for CI_JOB_TOKEN
R->>P: Find user who triggered job
R->>R: Does user have :read_build access?
alt Yes
R->>W: Send 302 redirect to object storage presigned URL
R->>C: 302 redirect
C->>O: GET <presigned URL>
else No
R->>W: 401 Unauthorized
W->>C: 401 Unauthorized
end
```
In this diagram:
1. First, the runner attempts to fetch a job artifact by using the
`GET /api/v4/jobs/:id/artifacts` endpoint. The runner attaches the
`direct_download=true` query parameter on the first attempt to indicate
that it is capable of downloading from object storage directly. Direct
downloads can be disabled in the runner configuration via the
[`FF_USE_DIRECT_DOWNLOAD` feature flag](https://docs.gitlab.com/runner/configuration/feature-flags.html).
This flag is set to `true` by default.
1. The runner sends the GET request using HTTP Basic Authentication
with the `gitlab-ci-token` username and an auto-generated
CI/CD job token as the password. This token is generated by GitLab and
given to the runner at the start of a job.
1. The GET request gets passed to the GitLab API, which looks
up the token in the database and finds the user who triggered the job.
1. In steps 5-8:
- If the user has access to the build, then GitLab generates
a presigned URL and sends a 302 Redirect with the `Location` set to that
URL. The runner follows the 302 Redirect and downloads the artifacts.
- If the job cannot be found or the user does not have access to the job,
then the API returns 401 Unauthorized.
The runner does not retry if it receives the following HTTP status codes:
- 200 OK
- 401 Unauthorized
- 403 Forbidden
- 404 Not Found
However, if the runner receives any other status code, such as a 500 error,
it re-attempts to download the artifacts two more times, sleeping 1 second
between each attempt. The subsequent attempts omit `direct_download=true`.
### Proxy download enabled
If `proxy_download` is `true`, GitLab always fetches the
artifacts from object storage and send the data to the runner, even if
the runner sends the `direct_download=true` query parameter. Proxy
downloads might be desirable if runners have restricted network access.
The following diagram is similar to the disabled proxy download example,
except at steps 6-9, GitLab does not send a 302 Redirect to the
runner. Instead, GitLab instructs Workhorse to fetch the data and stream
it back to the runner. From the runner perspective, the original GET
request to `/api/v4/jobs/:id/artifacts` returns the binary data
directly.
```mermaid
sequenceDiagram
autonumber
participant C as Runner
participant O as Object Storage
participant W as Workhorse
participant R as Rails
participant P as PostgreSQL
C->>+W: GET /api/v4/jobs/:id/artifacts?direct_download=true
Note over C,W: gitlab-ci-token@<CI_JOB_TOKEN>
W-->+R: GET /api/v4/jobs/:id/artifacts?direct_download=true
Note over W,R: gitlab-ci-token@<CI_JOB_TOKEN>
R->>P: Look up job for CI_JOB_TOKEN
R->>P: Find user who triggered job
R->>R: Does user have :read_build access?
alt Yes
R->>W: SendURL with object storage presigned URL
W->>O: GET <presigned URL>
O->>W: <artifacts data>
W->>C: <artifacts data>
else No
R->>W: 401 Unauthorized
W->>C: 401 Unauthorized
end
```

View File

@ -1887,3 +1887,22 @@ if prj.has_container_registry_tags?
prj.container_repositories.each { |p| p.destroy }
end
```
### Registry service listens on IPv6 address instead of IPv4
You might see the following error if the `localhost` hostname resolves to a IPv6
loopback address (`::1`) on your GitLab server and GitLab expects the registry service
to be available on the IPv4 loopback address (`127.0.0.1`):
```plaintext
request: "GET /v2/ HTTP/1.1", upstream: "http://[::1]:5000/v2/", host: "registry.example.com:5005"
[error] 1201#0: *13442797 connect() failed (111: Connection refused) while connecting to upstream, client: x.x.x.x, server: registry.example.com, request: "GET /v2/<path> HTTP/1.1", upstream: "http://[::1]:5000/v2/<path>", host: "registry.example.com:5005"
```
To fix the error, change `registry['registry_http_addr']` to an IPv4 address in `/etc/gitlab/gitlab.rb`. For example:
```ruby
registry['registry_http_addr'] = "127.0.0.1:5000"
```
See [issue 5449](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5449) for more details.

View File

@ -15,11 +15,9 @@ This page contains administrator documentation for the GitLab for Jira Cloud app
With the [GitLab for Jira Cloud](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?tab=overview&hosting=cloud) app, you can connect GitLab and Jira Cloud to sync development information in real time. You can view this information in the [Jira development panel](../../integration/jira/development_panel.md).
You can use the GitLab for Jira Cloud app to link top-level groups or subgroups. It's not possible to directly link projects or personal namespaces.
To set up the GitLab for Jira Cloud app on your self-managed instance, do one of the following:
- [Connect the GitLab for Jira Cloud app](#connect-the-gitlab-for-jira-cloud-app) (GitLab 15.7 and later).
- [Install the GitLab for Jira Cloud app from the Atlassian Marketplace](#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace) (GitLab 15.7 and later).
- [Install the GitLab for Jira Cloud app manually](#install-the-gitlab-for-jira-cloud-app-manually).
After you set up the app, you can use the [project toolchain](https://support.atlassian.com/jira-software-cloud/docs/what-is-the-connections-feature/)
@ -30,10 +28,7 @@ For Jira Data Center or Jira Server, use the [Jira DVCS connector](../../integra
## Set up OAuth authentication
You must set up OAuth authentication to:
- [Connect the GitLab for Jira Cloud app](#connect-the-gitlab-for-jira-cloud-app).
- [Install the GitLab for Jira Cloud app manually](#install-the-gitlab-for-jira-cloud-app-manually).
Whether you want to install the GitLab for Jira Cloud app [from the Atlassian Marketplace](#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace) or [manually](#install-the-gitlab-for-jira-cloud-app-manually), you must create an OAuth application.
To create an OAuth application on your self-managed instance:
@ -41,7 +36,7 @@ To create an OAuth application on your self-managed instance:
1. Select **Applications**.
1. Select **New application**.
1. In **Redirect URI**:
- If you're installing the app from the official marketplace listing, enter `https://gitlab.com/-/jira_connect/oauth_callbacks`.
- If you're installing the app from the Atlassian Marketplace listing, enter `https://gitlab.com/-/jira_connect/oauth_callbacks`.
- If you're installing the app manually, enter `<instance_url>/-/jira_connect/oauth_callbacks` and replace `<instance_url>` with the URL of your instance.
1. Clear the **Trusted** and **Confidential** checkboxes.
@ -77,23 +72,23 @@ If necessary:
1. If you customized your global permissions in Jira, you might also need to grant the
[`Browse users and groups` permission](https://confluence.atlassian.com/jirakb/unable-to-browse-for-users-and-groups-120521888.html) to the Jira user.
## Connect the GitLab for Jira Cloud app
## Install the GitLab for Jira Cloud app from the Atlassian Marketplace
> - Introduced in GitLab 15.7.
You can link your self-managed instance after you install the GitLab for Jira Cloud app from the marketplace.
Jira apps can only link to one URL per marketplace listing. The official listing links to GitLab.com.
You can use the official GitLab for Jira Cloud app from the Atlassian Marketplace with your self-managed instance.
With this method:
- GitLab.com serves as a proxy for Jira traffic from your instance.
- GitLab.com [handles the install and uninstall lifecycle events](#gitlabcom-handling-of-app-lifecycle-events) sent from Jira Cloud and forwards them to your GitLab instance. All data from your self-managed instance is still sent directly to Jira Cloud.
- It's not possible to create branches from Jira Cloud.
For more information, see [issue 391432](https://gitlab.com/gitlab-org/gitlab/-/issues/391432).
[Install the GitLab for Jira Cloud app manually](#install-the-gitlab-for-jira-cloud-app-manually) if:
Alternatively, you might want to [install the GitLab for Jira Cloud app manually](#install-the-gitlab-for-jira-cloud-app-manually) if:
- Your instance does not meet the [prerequisites](#prerequisites).
- You do not want to use the official marketplace listing.
- You do not want to use the official Atlassian Marketplace listing.
- You do not want GitLab.com to [handle the app lifecycle events](#gitlabcom-handling-of-app-lifecycle-events) or to know that your instance has installed the app.
- You want to create branches from Jira Cloud.
### Prerequisites
@ -157,7 +152,7 @@ to check if Jira Cloud is linked to:
## Install the GitLab for Jira Cloud app manually
If you do not want to [use the official marketplace listing](#connect-the-gitlab-for-jira-cloud-app),
If you do not want to [use the official Atlassian Marketplace listing](#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
install the GitLab for Jira Cloud app manually.
You must install each Jira Cloud app from a single location. Jira fetches a
@ -167,7 +162,7 @@ from the location you provide. The manifest file describes the app to the system
To support your self-managed instance with Jira Cloud, do one of the following:
- [Install the app in development mode](#install-the-app-in-development-mode).
- [Create a marketplace listing](#create-a-marketplace-listing).
- [Create an Atlassian Marketplace listing](#create-an-atlassian-marketplace-listing).
### Prerequisites
@ -184,7 +179,7 @@ To support your self-managed instance with Jira Cloud, do one of the following:
[Prerequisites](#prerequisites-1)
To configure your Jira instance so you can install apps from outside the marketplace:
To configure your Jira instance so you can install apps from outside the Atlassian Marketplace:
1. Sign in to your Jira instance as an administrator.
1. [Enable development mode](https://developer.atlassian.com/cloud/jira/platform/getting-started-with-connect/#step-3--enable-development-mode-in-your-site)
@ -209,14 +204,14 @@ You can also select **Get started** to [configure the GitLab for Jira Cloud app]
If a GitLab upgrade makes changes to the app descriptor, you must reinstall the app.
### Create a marketplace listing
### Create an Atlassian Marketplace listing
[Prerequisites](#prerequisites-1)
If you do not want to [use development mode](#install-the-app-in-development-mode), you can create your own marketplace listing.
If you do not want to [use development mode](#install-the-app-in-development-mode), you can create your own Atlassian Marketplace listing.
This way, you can install the GitLab for Jira Cloud app from the Atlassian Marketplace.
To create a marketplace listing:
To create an Atlassian Marketplace listing:
1. Register as an Atlassian Marketplace vendor.
1. List your application with the application descriptor URL.
@ -225,10 +220,10 @@ To create a marketplace listing:
applications can be viewed and installed by any user.
1. Generate test license tokens for your application.
Like the GitLab.com marketplace listing, this method uses
Like the GitLab.com Marketplace listing, this method uses
[automatic updates](../../integration/jira/connect-app.md#update-the-gitlab-for-jira-cloud-app).
For more information about creating a marketplace listing, see the
For more information about creating an Atlassian Marketplace listing, see the
[Atlassian documentation](https://developer.atlassian.com/platform/marketplace/listing-connect-apps/#create-your-marketplace-listing).
## Configure your GitLab instance to serve as a proxy
@ -253,17 +248,42 @@ Other GitLab instances that use the proxy must configure the following settings
## Security considerations
The GitLab for Jira Cloud app connects GitLab and Jira. Data must be shared between the two applications, and access must be granted in both directions.
The following security considerations are specific to administering the app.
For considerations related to using the app, see
[security considerations](../../integration/jira/connect-app.md#security-considerations).
### Using GitLab.com as a proxy
### GitLab.com handling of app lifecycle events
When you use [GitLab.com as a proxy](#configure-your-gitlab-instance-to-serve-as-a-proxy),
the Jira access token is shared with GitLab.com.
When you [Install the GitLab for Jira Cloud app from the Atlassian Marketplace](#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
GitLab.com receives [lifecycle events](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#lifecycle) from Jira.
These events are limited to when the app is installed in or uninstalled from your Jira Project.
The Jira access token is stored on GitLab.com because the token must be used to verify
incoming requests from Jira before the requests are sent to your self-managed instance.
The token is encrypted and is not used to access data in Jira.
Any data from your self-managed instance is sent directly to Jira.
In the install event, GitLab.com receives a **secret token** from Jira.
GitLab.com stores this token encrypted with `AES256-GCM` to later verify incoming lifecycle events from Jira.
GitLab.com then forwards the token to your self-managed instance so your instance can authenticate its [requests to Jira](../../integration/jira/connect-app.md#data-sent-from-gitlab-to-jira) with the same token.
Your self-managed instance is also notified that the GitLab for Jira Cloud app has been installed or uninstalled.
When [data is sent](../../integration/jira/connect-app.md#data-sent-from-gitlab-to-jira) from your self-managed instance to the Jira development panel,
it is sent from your self-managed instance directly to Jira and not to GitLab.com.
GitLab.com does not use the token to access data in your Jira project.
Your self-managed instance uses the token to [access the data](../../integration/jira/connect-app.md#gitlab-access-to-jira).
For more information about the lifecycle events and payloads that GitLab.com receives,
see the [Atlassian documentation](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#lifecycle).
```mermaid
sequenceDiagram
accTitle: Dataflow of the GitLab for Jira Cloud app installed from the Atlassian Marketplace
accDescr: How GitLab.com handles lifecycle events when the GitLab for Jira Cloud app was installed from the Atlassian Marketplace
participant Jira
participant Your instance
participant GitLab.com
Jira->>+GitLab.com: App install/uninstall event
GitLab.com->>-Your instance: App install/uninstall event
Your instance->>Jira: Your development data
```
### Access to GitLab through OAuth
@ -287,7 +307,7 @@ To use the GitLab for Jira Cloud app on a self-managed instance that cannot be a
from the internet, the self-managed instance must be accessible from Jira Cloud.
You can use a reverse proxy, but keep the following in mind:
- When you [connect the GitLab for Jira Cloud app](#connect-the-gitlab-for-jira-cloud-app),
- When you [install the GitLab for Jira Cloud app from the Atlassian Marketplace](#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
use a client with access to both the internal GitLab FQDN and the reverse proxy FQDN.
- When you [install the GitLab for Jira Cloud app manually](#install-the-gitlab-for-jira-cloud-app-manually),
use the reverse proxy FQDN for **Redirect URI** to [set up OAuth authentication](#set-up-oauth-authentication).

View File

@ -74,7 +74,7 @@ If GitLab fails to process or store these tokens, an `Invalid JWT` error occurs.
To resolve this issue on your self-managed GitLab instance:
- Confirm your self-managed GitLab instance is publicly available to:
- GitLab.com (if you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#connect-the-gitlab-for-jira-cloud-app)).
- GitLab.com (if you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace)).
- Jira Cloud (if you [installed the app manually](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-manually)).
- Ensure the token request sent to the `/-/jira_connect/events/installed` endpoint when you install the app is accessible from Jira.
The following command should return a `401 Unauthorized`:
@ -88,7 +88,7 @@ To resolve this issue on your self-managed GitLab instance:
Depending on how you installed the app, you might want to check the following:
- If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#connect-the-gitlab-for-jira-cloud-app),
- If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
switch between GitLab versions in the GitLab for Jira Cloud app:
<!-- markdownlint-disable MD044 -->
@ -272,5 +272,5 @@ To resolve this issue:
curl --include "https://gitlab.example.com/-/jira_connect/oauth_application_id"
```
1. If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#connect-the-gitlab-for-jira-cloud-app),
1. If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
ensure [**Jira Connect Proxy URL**](jira_cloud_app.md#set-up-your-instance) is set to `https://gitlab.com` without leading slashes.

View File

@ -66,8 +66,10 @@ To avoid a broken workflow, you must:
1. Replace the registration token in your runner registration workflow with the
authentication token.
**Critical note** - Previously stored `registration tokens` cannot be used to register new runners after 17.0, unless the
`CI/CD Settings >> Runners >> Allow members of projects and groups to create runners with runner registration tokens` setting is re-enabled.
WARNING:
In GitLab 17.0 and later, runner registration tokens are disabled.
To use stored runner registration tokens to register new runners,
you must [enable the tokens](../../administration/settings/continuous_integration.md#enable-runner-registrations-tokens).
## Using registration tokens after GitLab 17.0

View File

@ -18,10 +18,9 @@ that will create:
- The image tag is the commit that triggered the pipeline.
When you push a commit to either the GitLab CE or GitLab EE project, the
pipeline for that commit will have a `trigger-omnibus` job in the `qa` stage you
can trigger manually (if it didn't trigger already).
pipeline for that commit will have a `trigger-omnibus` job inside `e2e:package-and-test` child pipeline in the `.pre` stage.
![Trigger omnibus QA job](img/trigger_omnibus_v16_3.png)
![Trigger omnibus job](img/trigger_omnibus_v16_3.png)
After the child pipeline started, you can select `trigger-omnibus` to go to
the child pipeline named `TRIGGERED_EE_PIPELINE`.

View File

@ -694,17 +694,6 @@ When reviewing merge requests added by wider community contributors:
the current milestone. This is to avoid confusion around when it'll be
merged and avoid moving milestone too often when it's not yet ready.
If the MR source branch is more than 1,000 commits behind the target branch:
- Ask the author to rebase it, or consider taking a bias-for-action and rebasing it yourself
if the MR has "Allows commits from members who can merge to the target branch" enabled.
- Reviewing MRs in the context of recent changes can help prevent hidden runtime conflicts and
promote consistency. Depending on the nature of the change, you might also want to rebase if the
MR is less than 1,000 commits behind.
- A forced push could throw off the contributor, so it's a good idea to communicate that you've performed a rebase,
or check with the contributor first when they're actively working on the MR.
- The rebase can usually be done inside GitLab with the `/rebase` [quick action](../user/project/quick_actions.md).
#### Taking over a community merge request
When an MR needs further changes but the author is not responding for a long period of time,

View File

@ -30,6 +30,21 @@ For an [overview of the feature flag lifecycle](https://handbook.gitlab.com/hand
Moved to the ["When to use feature flags"](https://handbook.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#when-to-use-feature-flags) section in the handbook.
### Do not use feature flags for long lived settings
Feature flags are meant to be short lived. If you are intending on adding a
feature flag so that something can be enabled per user/group/project for a long
period of time, consider introducing
[Cascading Settings](../cascading_settings.md) or [Application Settings](../application_settings.md)
instead. Settings
offer a way for customers to enable or disable features for themselves on
GitLab.com or self-managed and can remain in the codebase as long as needed. In
contrast users have no way to enable or disable feature flags for themselves on
GitLab.com and only self-managed admins can change the feature flags.
Also note that
[feature flags are not supported in GitLab Dedicated](../enabling_features_on_dedicated.md#feature-flags)
which is another reason you should not use them as a replacement for settings.
## Feature flags in GitLab development
The following highlights should be considered when deciding if feature flags

View File

@ -647,6 +647,7 @@ After triggering a successful [e2e:package-and-test-ee](testing_guide/end_to_end
1. In the [GitLab project](https://gitlab.com/gitlab-org/gitlab), select the **Pipelines** tab of a merge request.
1. Select the `Stage: qa` stage on the latest pipeline to expand and list all the related jobs.
1. Select trigger job `e2e:package-and-test` to navigate inside child pipeline.
1. Select `trigger-omnibus` to view the [Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror) pipeline corresponding to the merge request.
1. The `GET:Geo` job can be found and triggered under the `trigger-qa` stage.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 78 KiB

View File

@ -34,9 +34,11 @@ After you link a group, the following GitLab data is synced to Jira for all proj
- The last 400 branches and the last commit to each of those branches (GitLab 15.11 and later)
- New project data (after you linked the group):
- Merge requests
- Merge request author
- Branches
- Commits
- Builds
- Commit author
- Pipelines
- Deployments
- Feature flags
@ -112,12 +114,37 @@ If the app requires additional permissions, [you must manually approve the updat
The GitLab for Jira Cloud app connects GitLab and Jira. Data must be shared between the two applications, and access must be granted in both directions.
### Access to Jira through access token
### GitLab access to Jira
Jira shares an access token with GitLab to authenticate and authorize data pushes to Jira.
As part of the app installation process, Jira sends a handshake request to GitLab containing the access token.
The handshake is signed with an [asymmetric JWT](https://developer.atlassian.com/cloud/jira/platform/understanding-jwt-for-connect-apps/),
and the access token is stored encrypted with `AES256-GCM` on GitLab.
When you [configure the GitLab for Jira Cloud app](#configure-the-gitlab-for-jira-cloud-app), GitLab receives a **shared secret token** from Jira.
The token grants GitLab `READ`, `WRITE`, and `DELETE` [app scopes](https://developer.atlassian.com/cloud/jira/software/scopes-for-connect-apps/#scopes-for-atlassian-connect-apps) for the Jira project.
These scopes are required to update information in the Jira project's development panel.
The token does not grant GitLab access to any other Atlassian product besides the Jira project the app was installed in.
The token is encrypted with `AES256-GCM` and stored on GitLab.
When the GitLab for Jira Cloud app is uninstalled from your Jira project, GitLab deletes the token.
### Jira access to GitLab
Jira does not gain any access to GitLab.
### Data sent from GitLab to Jira
For all the data sent to Jira, see [GitLab data synced to Jira](#gitlab-data-synced-to-jira).
For more information about the specific data properties sent to Jira, see the [serializer classes](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/atlassian/jira_connect/serializers) involved in data synchronization.
### Data sent from Jira to GitLab
GitLab receives a [lifecycle event](https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#lifecycle) from Jira when the GitLab for Jira Cloud app is installed or uninstalled.
The event includes a [token](#gitlab-access-to-jira) to verify subsequent lifecycle events and to authenticate when [sending data to Jira](#data-sent-from-gitlab-to-jira).
Lifecycle event requests from Jira are [verified](https://developer.atlassian.com/cloud/jira/platform/security-for-connect-apps/#validating-installation-lifecycle-requests).
For self-managed instances that use the GitLab for Jira Cloud app from the Atlassian Marketplace, GitLab.com handles lifecycle events and forwards them to the self-managed instance. For more information, see [GitLab.com handling of app lifecycle events](../../administration/settings/jira_cloud_app.md#gitlabcom-handling-of-app-lifecycle-events).
### Privacy and security details in the Atlassian Marketplace
For more information, see the [privacy and security details of the Atlassian Marketplace listing](https://marketplace.atlassian.com/apps/1221011/gitlab-for-jira-cloud?tab=privacy-and-security&hosting=cloud).
## Troubleshooting

View File

@ -192,6 +192,9 @@ To create a cleanup policy in the UI:
| **Remove tags older than** | Remove only tags older than X days. |
| **Remove tags matching** | A regex pattern that determines which tags to remove. This value cannot be blank. For all tags, use `.*`. See other [regex pattern examples](#regex-pattern-examples). |
NOTE:
Both keep and remove regex patterns are automatically surrounded with `\A` and `\Z` anchors, so you do not need to include them. However, make sure to take this into account when choosing and testing your regex patterns.
1. Select **Save**.
The policy runs on the scheduled interval you selected.
@ -205,9 +208,6 @@ Cleanup policies use regex patterns to determine which tags should be preserved
GitLab uses [RE2 syntax](https://github.com/google/re2/wiki/Syntax) for regular expressions in the cleanup policy.
Regex patterns are automatically surrounded with `\A` and `\Z` anchors. Therefore, you do not need to include any
`\A`, `\Z`, `^` or `$` tokens in the regex patterns.
Here are some examples of regex patterns you can use:
- Match all tags:

View File

@ -36,26 +36,7 @@ With remote development, you can use:
- A separate machine as a backend runtime environment
For a complete IDE experience, connect the Web IDE to a development environment configured to run as a remote host.
You can create this environment [inside](../../workspace/configuration.md) or [outside](connect_machine.md) of GitLab.
## Workspaces
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
A [workspace](../../workspace/index.md) is a virtual sandbox environment for your code in GitLab that includes:
- A runtime environment
- Dependencies
- Configuration files
You can create a workspace from scratch or from a template that you can also customize.
When you configure and connect a workspace to the [Web IDE](../web_ide/index.md), you can:
- Edit files directly from the Web IDE and commit and push changes to GitLab.
- Use the Web IDE to run tests, debug code, and view real-time feedback.
You can create this environment [outside](connect_machine.md) of GitLab.
## Manage a development environment

View File

@ -19,44 +19,46 @@ You can use [workspaces](index.md) to create and manage isolated development env
Each workspace includes its own set of dependencies, libraries, and tools,
which you can customize to meet the specific needs of each project.
## Set up a workspace
## Set up workspace infrastructure
Before you [create a workspace](#create-a-workspace), you must set up your infrastructure only once.
To set up infrastructure for workspaces:
1. Set up a Kubernetes cluster that the GitLab agent supports.
See the [supported Kubernetes versions](../clusters/agent/index.md#supported-kubernetes-versions-for-gitlab-features).
1. Ensure autoscaling for the Kubernetes cluster is enabled.
1. In the Kubernetes cluster:
1. Verify that a [default storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/)
is defined so that volumes can be dynamically provisioned for each workspace.
1. Install an Ingress controller of your choice (for example, `ingress-nginx`).
1. [Install](../clusters/agent/install/index.md) and [configure](gitlab_agent_configuration.md) the GitLab agent.
1. Point [`dns_zone`](gitlab_agent_configuration.md#dns_zone) and `*.<dns_zone>`
to the load balancer exposed by the Ingress controller.
1. [Set up the GitLab workspaces proxy](set_up_workspaces_proxy.md).
## Create a workspace
> - Support for private projects [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124273) in GitLab 16.4.
### Prerequisites
- Set up a Kubernetes cluster that the GitLab agent supports.
See the [supported Kubernetes versions](../clusters/agent/index.md#supported-kubernetes-versions-for-gitlab-features).
- Ensure autoscaling for the Kubernetes cluster is enabled.
- In the Kubernetes cluster:
- Verify that a [default storage class](https://kubernetes.io/docs/concepts/storage/storage-classes/)
is defined so that volumes can be dynamically provisioned for each workspace.
- Install an Ingress controller of your choice (for example, `ingress-nginx`) and make
that controller accessible over a domain.
- In development environments, add an entry to the `/etc/hosts` file or update your DNS records.
- In production environments, point `*.<workspaces.example.dev>` and `<workspaces.example.dev>`
to the load balancer exposed by the Ingress controller.
- [Set up the GitLab workspaces proxy](set_up_workspaces_proxy.md).
- [Install](../clusters/agent/install/index.md) and [configure](gitlab_agent_configuration.md) the GitLab agent.
- You must have at least the Developer role in the root group.
- In each project you want to use this feature for, create a [devfile](index.md#devfile):
1. On the left sidebar, select **Search or go to** and find your project.
1. In the root directory of your project, create a file named `.devfile.yaml`.
You can use one of the [example configurations](index.md#example-configurations).
- Ensure the container images used in the devfile support [arbitrary user IDs](index.md#arbitrary-user-ids).
### Create a workspace
> - **Git reference** and **Devfile location** [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/392382) in GitLab 16.10.
> - **Time before automatic termination** [renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/392382) to **Workspace automatically terminates after** in GitLab 16.10.
Prerequisites:
- Ensure your [workspace infrastructure](#set-up-workspace-infrastructure) is already set up.
- You must have at least the Developer role in the root group.
- In each project where you want to create a workspace, create a [devfile](index.md#devfile):
1. On the left sidebar, select **Search or go to** and find your project.
1. In the root directory of your project, create a file named `devfile`.
You can use one of the [example configurations](index.md#example-configurations).
- Ensure the container images used in the devfile support [arbitrary user IDs](index.md#arbitrary-user-ids).
To create a workspace:
1. On the left sidebar, select **Search or go to**.
1. Select **Your work**.
1. Select **Workspaces**.
1. Select **New workspace**.
1. From the **Project** dropdown list, [select a project with a `.devfile.yaml` file](#prerequisites).
1. From the **Project** dropdown list, select a project with a [devfile](index.md#devfile).
1. From the **Cluster agent** dropdown list, select a cluster agent owned by the group the project belongs to.
1. From the **Git reference** dropdown list, select the branch, tag, or commit hash
GitLab uses to create the workspace.
@ -95,46 +97,7 @@ When you connect to `gitlab-workspaces-proxy` through the TCP load balancer,
- The personal access token
- User access to the workspace
### Set up the GitLab workspaces proxy for SSH connections
Prerequisites:
- You must have an SSH host key for client verification.
SSH is now enabled by default in the [GitLab workspaces proxy](set_up_workspaces_proxy.md).
To set up `gitlab-workspaces-proxy` with the GitLab Helm chart:
1. Run this command:
```shell
ssh-keygen -f ssh-host-key -N '' -t rsa
export SSH_HOST_KEY=$(pwd)/ssh-host-key
```
1. Install `gitlab-workspaces-proxy` with the generated SSH host key:
```shell
helm upgrade --install gitlab-workspaces-proxy \
gitlab-workspaces-proxy/gitlab-workspaces-proxy \
--version 0.1.8 \
--namespace=gitlab-workspaces \
--create-namespace \
--set="auth.client_id=${CLIENT_ID}" \
--set="auth.client_secret=${CLIENT_SECRET}" \
--set="auth.host=${GITLAB_URL}" \
--set="auth.redirect_uri=${REDIRECT_URI}" \
--set="auth.signing_key=${SIGNING_KEY}" \
--set="ingress.host.workspaceDomain=${GITLAB_WORKSPACES_PROXY_DOMAIN}" \
--set="ingress.host.wildcardDomain=${GITLAB_WORKSPACES_WILDCARD_DOMAIN}" \
--set="ingress.tls.workspaceDomainCert=$(cat ${WORKSPACES_DOMAIN_CERT})" \
--set="ingress.tls.workspaceDomainKey=$(cat ${WORKSPACES_DOMAIN_KEY})" \
--set="ingress.tls.wildcardDomainCert=$(cat ${WILDCARD_DOMAIN_CERT})" \
--set="ingress.tls.wildcardDomainKey=$(cat ${WILDCARD_DOMAIN_KEY})" \
--set="ssh.host_key=$(cat ${SSH_HOST_KEY})" \
--set="ingress.className=nginx"
```
### Update your runtime images
### Update your workspace container image
To update your runtime images for SSH connections:

View File

@ -15,9 +15,9 @@ DETAILS:
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/391543) in GitLab 16.0.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136744) in GitLab 16.7. Feature flag `remote_development_feature_flag` removed.
When you [set up a workspace](configuration.md#set-up-a-workspace),
you must configure the GitLab agent for remote development.
The remote development settings are available in the agent
When you [set up workspace infrastructure](configuration.md#set-up-workspace-infrastructure),
you must configure the GitLab agent.
The workspace settings are available in the agent
configuration file under `remote_development`.
You can use any agent in the top-level group of your workspace project
@ -26,7 +26,7 @@ is properly configured for remote development.
For example, if the path to your workspace project is `top-group/subgroup-1/subgroup-2/workspace-project`,
you can use any configured agent in `top-group` and in any of its subgroups.
## Remote development settings
## Workspace settings
| Setting | Required | Default value | Description |
|-------------------------------------------------------------------------------------------|----------|-----------------------------------------|-------------|
@ -47,7 +47,7 @@ If a setting has an invalid value, it's not possible to update any setting until
Use this setting to define whether:
- The GitLab agent can communicate with the GitLab instance.
- You can [create a workspace](configuration.md#set-up-a-workspace) with the GitLab agent.
- You can [create a workspace](configuration.md#create-a-workspace) with the GitLab agent.
The default value is `false`.

View File

@ -27,13 +27,13 @@ For a click-through demo, see [GitLab workspaces](https://tech-marketing.gitlab.
## Workspaces and projects
Workspaces are scoped to a project.
When you [create a workspace](configuration.md#set-up-a-workspace), you must:
When you [create a workspace](configuration.md#create-a-workspace), you must:
- Assign the workspace to a specific project.
- Select a project with a [`.devfile.yaml`](#devfile) file.
- Select a project with a [devfile](#devfile).
The workspace can interact with the GitLab API, with the access level defined by current user permissions.
A running workspace remains accessible even if user permissions are later revoked.
A running workspace remains accessible to the user even if user permissions are later revoked.
### Manage workspaces from a project
@ -51,9 +51,10 @@ WARNING:
When you terminate a workspace, any unsaved or uncommitted data
in that workspace is deleted and cannot be recovered.
### Deleting data associated with a workspace
### Deleting resources associated with a workspace
When you delete a project, agent, user, or token associated with a workspace:
When you terminate a workspace, all resources associated with the workspace are deleted.
When you delete a project, agent, user, or token associated with a running workspace:
- The workspace is deleted from the user interface.
- In the Kubernetes cluster, the running workspace resources become orphaned and are not automatically deleted.
@ -95,7 +96,7 @@ A devfile is a file that defines a development environment by specifying the nec
tools, languages, runtimes, and other components for a GitLab project.
Workspaces have built-in support for devfiles.
You can specify a devfile for your project in the GitLab configuration file.
The default location is `.devfile.yaml`, but you can also use a custom location.
The devfile is used to automatically configure the development environment with the defined specifications.
This way, you can create consistent and reproducible development environments
@ -180,7 +181,7 @@ For more information, see the [VS Code documentation](https://code.visualstudio.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129715) in GitLab 16.4.
When you [create a workspace](configuration.md#set-up-a-workspace), you get a personal access token with `write_repository` permission.
When you [create a workspace](configuration.md#create-a-workspace), you get a personal access token with `write_repository` permission.
This token is used to initially clone the project while starting the workspace.
Any Git operation you perform in the workspace uses this token for authentication and authorization.

View File

@ -152,6 +152,12 @@ module Gitlab
kubeclient.execute("toolbox", ["gitlab-rails", "runner", admin_pat_seed], container: "toolbox"),
[admin_token]
).strip
rescue Kubectl::Client::Error => e
token_exists_error = "duplicate key value violates unique constraint " \
"\"index_personal_access_tokens_on_token_digest\""
return log("Token already exists, skipping!", :warn) if e.message.include?(token_exists_error)
raise e
end
end
end

View File

@ -64,7 +64,8 @@ module Gitlab
chart_reference = run_pre_deploy_setup
run_deploy(chart_reference)
run_post_deploy_setup
rescue Helpers::Shell::CommandFailure
# Exit on error to not duplicate error messages and exit cleanly when kubectl or helm related errors are raised
rescue Kubectl::Client::Error, Helm::Client::Error
exit(1)
end
@ -181,7 +182,7 @@ module Gitlab
def create_namespace
log("Creating namespace '#{namespace}'", :info)
puts kubeclient.create_namespace
rescue StandardError => e
rescue Kubectl::Client::Error => e
return log("namespace already exists, skipping", :warn) if e.message.include?("already exists")
raise(e)

View File

@ -13,6 +13,9 @@ module Gitlab
HELM_CHART = "https://charts.gitlab.io"
HELM_CHART_REPO = "https://gitlab.com/gitlab-org/charts/gitlab"
# Error raised by helm client class
Error = Class.new(StandardError)
# Add helm chart and return reference
#
# @param [String] sha fetch and package helm chart using specific repo sha
@ -23,14 +26,14 @@ module Gitlab
log("Adding gitlab helm chart '#{HELM_CHART}'", :info)
puts run_helm(%W[repo add #{HELM_CHART_PREFIX} #{HELM_CHART}])
"#{HELM_CHART_PREFIX}/gitlab"
rescue Helpers::Shell::CommandFailure => e
rescue Error => e
if e.message.include?("already exists")
log("helm chart repo already exists, updating", :warn)
puts(run_helm(%w[repo update gitlab]))
return "#{HELM_CHART_PREFIX}/gitlab"
end
raise(e)
raise(Error, e.message)
end
# Run helm upgrade command with --install argument
@ -72,7 +75,7 @@ module Gitlab
# @return [<String, nil>] status of helm release or nil if release is not found
def status(name, namespace:)
run_helm(%W[status #{name} --namespace #{namespace}])
rescue Helpers::Shell::CommandFailure => e
rescue Error => e
e.message.include?("release: not found") ? nil : raise(e)
end
@ -120,6 +123,8 @@ module Gitlab
# @return [String]
def run_helm(cmd, stdin = nil)
execute_shell(["helm", *cmd], stdin_data: stdin)
rescue Helpers::Shell::CommandFailure => e
raise(Error, e.message)
end
end
end

View File

@ -21,6 +21,8 @@ module Gitlab
# @return [String] command output
def create_namespace
execute_shell(["kubectl", "create", "namespace", namespace])
rescue Helpers::Shell::CommandFailure => e
raise(Error, e.message)
end
# Create kubernetes resource
@ -128,6 +130,8 @@ module Gitlab
# @return [String]
def run_in_namespace(*action, args:, stdin_data: nil)
execute_shell(["kubectl", *action, "-n", namespace, *args], stdin_data: stdin_data)
rescue Helpers::Shell::CommandFailure => e
raise(Error, e.message)
end
end
end

View File

@ -108,4 +108,14 @@ RSpec.describe Gitlab::Cng::Deployment::Configurations::Kind do
it "returns correct gitlab url" do
expect(configuration.gitlab_url).to eq("http://gitlab.127.0.0.1.nip.io")
end
it "handles already existing admin PAT" do
allow(kubeclient).to receive(:execute)
.with("toolbox", kind_of(Array), container: "toolbox")
.and_raise(Gitlab::Cng::Kubectl::Client::Error, <<~MSG)
/srv/gitlab/vendor/bundle/ruby/3.1.0/gems/activerecord-7.0.8.1/lib/active_record/connection_adapters/postgresql_adapter.rb:768:in `exec_params': PG::UniqueViolation: ERROR: duplicate key value violates unique constraint "index_personal_access_tokens_on_token_digest" (ActiveRecord::RecordNotUnique)
MSG
expect { configuration.run_post_deployment_setup }.to output(/Token already exists, skipping!/).to_stdout
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillOrDropCiPipelineOnProjectId < BatchedMigrationJob
operation_name :backfill_or_drop_ci_pipelines_on_project_id
scope_to ->(relation) { relation.where(project_id: nil) }
feature_category :continuous_integration
def perform
each_sub_batch do |sub_batch|
sub_batch.each do |pipeline|
next if backfill_with_build_or_merge_request(pipeline)
CiTriggerRequest.where(commit_id: pipeline.id).delete_all
pipeline.delete
end
end
end
class CiBuild < ::Ci::ApplicationRecord
self.table_name = :p_ci_builds
self.inheritance_column = :_type_disabled
self.primary_key = :id
end
class CiTriggerRequest < ::Ci::ApplicationRecord
self.table_name = :ci_trigger_requests
end
class MergeRequest < ApplicationRecord
self.table_name = :merge_requests
end
private
def backfill_with_build_or_merge_request(pipeline)
project_id =
CiBuild.where(commit_id: pipeline.id).where.not(project_id: nil).select(:project_id).first&.project_id ||
MergeRequest.where(["target_project_id = source_project_id AND id = ?", pipeline.merge_request_id])
.select(:target_project_id).first&.target_project_id
return false unless project_id
pipeline.update_column(:project_id, project_id)
end
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillSbomOccurrencesVulnerabilitiesProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_sbom_occurrences_vulnerabilities_project_id
feature_category :dependency_management
end
end
end

View File

@ -29,11 +29,7 @@ module Gitlab
@inject_edge_stages = inject_edge_stages
@context = self.logger.instrument(:config_build_context, once: true) do
pipeline ||= if ::Feature.enabled?(:project_ref_name_in_pipeline, project)
::Ci::Pipeline.new(project: project, sha: sha, ref: ref, user: user, source: source)
else
::Ci::Pipeline.new(project: project, sha: sha, user: user, source: source)
end
pipeline ||= ::Ci::Pipeline.new(project: project, sha: sha, ref: ref, user: user, source: source)
build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline, pipeline_config: pipeline_config)
end

View File

@ -102,11 +102,9 @@ module Gitlab
end
class UnknownStrategy < ::Gitlab::Config::Entry::Node
def type
end
def type; end
def value
end
def value; end
def errors
["#{location} has an unsupported type"]

View File

@ -76,8 +76,7 @@ module Gitlab
self.when == 'delayed'
end
def default
end
def default; end
end
end
end

View File

@ -8,8 +8,7 @@ module Gitlab
class Limit
LimitExceededError = Class.new(StandardError)
def initialize(_context, _resource)
end
def initialize(_context, _resource); end
def enabled?
raise NotImplementedError

View File

@ -146,8 +146,6 @@ module Gitlab
end
def project_ref_name
return unless ::Feature.enabled?(:project_ref_name_in_pipeline, project)
return unless project
Rails.cache.fetch(['project', project.id, 'ref/containing/sha', sha], expires_in: 5.minutes) do

View File

@ -8,8 +8,7 @@ module Gitlab
class AfterConfig < Chain::Base
include Chain::Helpers
def perform!
end
def perform!; end
def break?
@pipeline.errors.any?

View File

@ -118,8 +118,7 @@ module Gitlab
@config.is_a?(Integer)
end
def self.default(**)
end
def self.default(**); end
def self.aspects
@aspects ||= []

View File

@ -42,8 +42,7 @@ module Gitlab
end
end
def self.default
end
def self.default; end
end
end
end

View File

@ -28,8 +28,7 @@ module Mattermost
LEASE_TIMEOUT = 60
Request = Struct.new(:parameters, keyword_init: true) do
def method_missing(method_name, *args, &block)
end
def method_missing(method_name, *args, &block); end
end
attr_accessor :current_resource_owner, :token, :base_uri

View File

@ -16,8 +16,7 @@ module QA
@name = "my-agent"
end
def fabricate!
end
def fabricate!; end
def resource_web_url(resource)
super

View File

@ -10,8 +10,7 @@ module QA
QA::Resource::Clusters::Agent.fabricate_via_api!
end
def fabricate!
end
def fabricate!; end
def resource_web_url(resource)
super

View File

@ -25,8 +25,7 @@ module QA
"#{api_get_path}/trace"
end
def api_post_path
end
def api_post_path; end
def api_post_body
{

View File

@ -19,8 +19,7 @@ module QA
this_package.try(:fetch, :id)
end
def fabricate!
end
def fabricate!; end
def fabricate_via_api!
resource_web_url(api_get)

View File

@ -25,8 +25,7 @@ module QA
@tag_name = 'master'
end
def fabricate!
end
def fabricate!; end
def fabricate_via_api!
resource_web_url(api_get)

View File

@ -10,8 +10,7 @@ module QA
Runtime::ApplicationSettings.set_application_settings(allow_local_requests_from_web_hooks_and_services: true)
end
def set_credentials(admin_user)
end
def set_credentials(admin_user); end
def setup
shell "k3d create --workers 1 --name #{cluster_name} --wait 0"

View File

@ -38,8 +38,7 @@ module QA
@k3s&.remove!
end
def set_credentials(admin_user)
end
def set_credentials(admin_user); end
# Fetch "real" certificate
# See https://github.com/rancher/k3s/issues/27

View File

@ -8,8 +8,7 @@ module QA
find_executable('minikube') || raise("You must first install `minikube` executable to run these tests.")
end
def set_credentials(admin_user)
end
def set_credentials(admin_user); end
def setup
shell 'minikube stop'

View File

@ -54,7 +54,9 @@ RSpec.describe 'Merge request > User sees pipelines from forked project', :js,
visit project_merge_request_path(target_project, merge_request)
end
it 'user visits a pipelines page', :sidekiq_might_not_need_inline do
it 'user visits a pipelines page',
:sidekiq_might_not_need_inline,
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/467299' do
page.within('.merge-request-tabs') { click_link 'Pipelines' }
expect(page).to have_content(pipeline.id)

View File

@ -0,0 +1,102 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillOrDropCiPipelineOnProjectId, feature_category: :continuous_integration do
let(:project_id_with_build) { 137 }
let(:project_id_for_merge_request) { 140 }
let(:project_id_for_unaffected_pipeline) { 1 }
let!(:pipeline_with_nothing) { table(:ci_pipelines, database: :ci).create!(id: 1, partition_id: 100) }
let!(:pipeline_with_builds) { table(:ci_pipelines, database: :ci).create!(id: 2, partition_id: 100) }
let!(:pipeline_with_merge_request) do
table(:ci_pipelines, database: :ci).create!(id: 3, partition_id: 100, merge_request_id: 1)
end
let!(:untouched_pipeline) do
table(:ci_pipelines, database: :ci)
.create!(id: 4, partition_id: 100, project_id: project_id_for_unaffected_pipeline)
end
let!(:ci_trigger) { table(:ci_triggers, database: :ci).create!(owner_id: 1) }
let!(:trigger_request) { table(:ci_trigger_requests, database: :ci).create!(trigger_id: ci_trigger.id, commit_id: 1) }
let!(:build) do
table(:p_ci_builds, database: :ci).create!(partition_id: 100, project_id: project_id_with_build, commit_id: 2)
end
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:project) do
table(:projects)
.create!(id: project_id_for_merge_request, namespace_id: namespace.id, project_namespace_id: namespace.id)
end
let!(:merge_request) do
table(:merge_requests).create!(
id: 1,
target_branch: 'main',
source_branch: 'feature',
target_project_id: project.id,
source_project_id: project.id
)
end
subject(:migration) do
described_class.new(
start_id: 1,
end_id: 5,
batch_table: :ci_pipelines,
batch_column: :id,
sub_batch_size: 100,
pause_ms: 0,
connection: ::Ci::ApplicationRecord.connection
)
end
describe '#perform' do
it 'backfills if applicable otherwise deletes' do
migration.perform
expect { pipeline_with_nothing.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect { trigger_request.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect(pipeline_with_builds.reload.project_id).to eq(project_id_with_build)
expect(pipeline_with_merge_request.reload.project_id).to eq(project_id_for_merge_request)
expect(untouched_pipeline.reload.project_id).to eq(project_id_for_unaffected_pipeline)
end
context 'when associations are invalid as well' do
let!(:pipeline_with_bad_build) { table(:ci_pipelines, database: :ci).create!(id: 5, partition_id: 100) }
let!(:bad_build) { table(:p_ci_builds, database: :ci).create!(partition_id: 100, commit_id: 5) }
it 'deletes pipeline if associations do not have project_id' do
migration.perform
expect { pipeline_with_bad_build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'when the merge request is from a fork project' do
let(:another_namespace) { table(:namespaces).create!(name: 'user2', path: 'user2') }
let(:another_project) do
table(:projects)
.create!(id: 141, namespace_id: another_namespace.id, project_namespace_id: another_namespace.id)
end
let!(:merge_request) do
table(:merge_requests).create!(
id: 1,
target_branch: 'main',
source_branch: 'feature',
target_project_id: project.id,
source_project_id: another_project.id
)
end
it 'deletes the pipeline as association is not definite' do
migration.perform
expect { pipeline_with_merge_request.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillSbomOccurrencesVulnerabilitiesProjectId,
feature_category: :dependency_management,
schema: 20240612075301 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :sbom_occurrences_vulnerabilities }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :sbom_occurrences }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :sbom_occurrence_id }
end
end

View File

@ -121,24 +121,10 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_composition do
EOS
end
context 'when feature :project_ref_name_in_pipeline is enabled' do
it 'sets the ref in the pipeline' do
expect(Ci::Pipeline).to receive(:new).with(hash_including(ref: ref)).and_call_original
it 'sets the ref in the pipeline' do
expect(Ci::Pipeline).to receive(:new).with(hash_including(ref: ref)).and_call_original
described_class.new(yml, project: project, ref: ref, user: user)
end
end
context 'when feature is disabled' do
before do
stub_feature_flags(project_ref_name_in_pipeline: false)
end
it 'does not set the ref in the pipeline' do
expect(Ci::Pipeline).to receive(:new).with(hash_not_including(ref: ref)).and_call_original
described_class.new(yml, project: project, ref: ref, user: user)
end
described_class.new(yml, project: project, ref: ref, user: user)
end
end
end

View File

@ -333,21 +333,6 @@ RSpec.describe Gitlab::Ci::Lint, feature_category: :pipeline_composition do
2.times { lint.validate(content, dry_run: dry_run) }
end
end
context 'when project_ref_name_in_pipeline feature flag is disabled' do
before do
stub_feature_flags(project_ref_name_in_pipeline: false)
end
it 'passes nil as the ref name to YamlProcessor' do
expect(Gitlab::Ci::YamlProcessor)
.to receive(:new)
.with(content, a_hash_including(ref: nil))
.and_call_original
expect(subject.errors).to include("Project `#{project.full_path}` reference `` does not exist!")
end
end
end
context 'when a pipeline ref variable is used in an include and project_sha_exists? returns false' do

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillOrDropCiPipelineOnProjectId, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :ci_pipelines,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci
)
}
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillSbomOccurrencesVulnerabilitiesProjectId, feature_category: :dependency_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :sbom_occurrences_vulnerabilities,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:sbom_occurrences,
:project_id,
:sbom_occurrence_id
]
)
}
end
end
end

View File

@ -94,6 +94,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
describe 'validations' do
it { is_expected.to validate_presence_of(:sha) }
it { is_expected.to validate_presence_of(:status) }
it { is_expected.to validate_presence_of(:project) }
end
describe 'associations' do