Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-21 18:12:38 +00:00
parent 72d86e022e
commit 16a3b4cc09
91 changed files with 773 additions and 1679 deletions

View File

@ -84,6 +84,7 @@ include:
# skip short lived dev image signing and verification to save time
SKIP_IMAGE_SIGNING: "true"
SKIP_IMAGE_VERIFICATION: "true"
SKIP_IMAGE_TAGGING: "${SKIP_IMAGE_TAGGING}"
# set specific arch list
ARCH_LIST: "${ARCH_LIST}"
# use larger runner for complex rails build jobs

View File

@ -17,6 +17,8 @@ variables:
QA_RETRY_FAILED_SPECS: "true"
# Helm chart ref used by test-on-cng pipeline
GITLAB_HELM_CHART_REF: "3f89420cf319778195403711af12c57b4aab6511"
# Specific ref for cng-mirror project to trigger builds for
GITLAB_CNG_MIRROR_REF: "ea1b126a34dc702f4e7c84ebcc1504e4ab5008a3"
# Makes sure some of the common scripts from pipeline-common use bundler to execute commands
RUN_WITH_BUNDLE: "true"
# Makes sure reporting script defined in .gitlab-qa-report from pipeline-common is executed from correct folder

View File

@ -88,6 +88,10 @@ workflow:
build-cng-env:
stage: .pre
extends: .build-cng-env
variables:
CNG_COMMIT_SHA: $GITLAB_CNG_MIRROR_REF
CNG_ACCESS_TOKEN: $CNG_MIRROR_ACCESS_TOKEN
CNG_SKIP_REDUNDANT_JOBS: "false"
build-cng:
stage: .pre

View File

@ -38,7 +38,6 @@ Gitlab/Rails/AttrEncrypted:
- 'app/models/slack_integration.rb'
- 'app/models/snippet.rb'
- 'app/models/user.rb'
- 'app/models/virtual_registries/packages/maven/upstream.rb'
- 'db/migrate/20241017160504_generate_ci_job_token_signing_key.rb'
- 'db/migrate/20241017160505_regenerate_ci_job_token_signing_key.rb'
- 'ee/app/models/ai/self_hosted_model.rb'

View File

@ -163,7 +163,6 @@ RSpec/ContainExactly:
- 'spec/models/packages/package_file_spec.rb'
- 'spec/models/packages/package_spec.rb'
- 'spec/models/slack_integration_spec.rb'
- 'spec/models/virtual_registries/packages/maven/upstream_spec.rb'
- 'spec/presenters/packages/nuget/search_results_presenter_spec.rb'
- 'spec/requests/api/ci/pipelines_spec.rb'
- 'spec/requests/api/graphql/packages/package_spec.rb'

View File

@ -454,9 +454,6 @@ RSpec/ExampleWithoutDescription:
- 'spec/models/users/banned_user_spec.rb'
- 'spec/models/users/phone_number_validation_spec.rb'
- 'spec/models/users/project_callout_spec.rb'
- 'spec/models/virtual_registries/packages/maven/registry_spec.rb'
- 'spec/models/virtual_registries/packages/maven/registry_upstream_spec.rb'
- 'spec/models/virtual_registries/packages/maven/upstream_spec.rb'
- 'spec/models/webauthn_registration_spec.rb'
- 'spec/models/wiki_page/meta_spec.rb'
- 'spec/models/work_items/parent_link_spec.rb'

View File

@ -22,7 +22,7 @@ export default {
<template>
<div>
<gl-card
class="gl-rounded-lg gl-bg-section"
class="ci-card gl-rounded-lg gl-bg-section"
header-class="gl-rounded-lg gl-px-0 gl-py-0 gl-bg-section gl-border-b-0"
body-class="gl-pt-2 gl-pb-0 gl-px-2"
>

View File

@ -1,4 +1,5 @@
<script>
import { clamp } from 'lodash';
import { QUERIES } from '../constants';
import eventHub from '../event_hub';
@ -125,7 +126,11 @@ export default {
}
this.$apollo.queries.mergeRequests.refetch({
perPage: Math.min(100, Math.ceil(this.mergeRequests.nodes.length / PER_PAGE) * PER_PAGE),
perPage: clamp(
Math.ceil(this.mergeRequests.nodes.length / PER_PAGE) * PER_PAGE,
PER_PAGE,
100,
),
});
if (!this.hideCount) {

View File

@ -219,6 +219,10 @@
background-color: var(--blue-500, $blue-500) !important;
}
.ci-card:has(.ci-job-item-failed) {
border-color: var(--gl-control-border-color-error);
}
.ci-job-item-failed {
@apply gl-bg-red-50;

View File

@ -178,6 +178,7 @@ module Ci
validates :project, presence: true
after_create :keep_around_commits, unless: :importing?
after_commit :trigger_pipeline_status_change_subscription, if: :saved_change_to_status?
after_commit :track_ci_pipeline_created_event, on: :create, if: :internal_pipeline?
after_find :observe_age_in_minutes, unless: :importing?
@ -304,8 +305,6 @@ module Ci
end
after_transition do |pipeline, transition|
GraphqlTriggers.ci_pipeline_status_updated(pipeline)
next if transition.loopback?
pipeline.run_after_commit do
@ -609,6 +608,10 @@ module Ci
:ci_pipelines
end
def trigger_pipeline_status_change_subscription
GraphqlTriggers.ci_pipeline_status_updated(self)
end
def uses_needs?
processables.where(scheduling_type: :dag).any?
end

View File

@ -41,8 +41,7 @@ class NamespaceStatistics < ApplicationRecord # rubocop:disable Gitlab/Namespace
self.dependency_proxy_size = [
namespace.dependency_proxy_manifests,
namespace.dependency_proxy_blobs,
::VirtualRegistries::Packages::Maven::Cache::Entry.for_group(namespace)
namespace.dependency_proxy_blobs
].sum { |rel| rel.sum(:size) }
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
def self.table_name_prefix
'virtual_registries_packages_maven_'
end
end
end
end

View File

@ -1,13 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
module Cache
def self.table_name_prefix
'virtual_registries_packages_maven_cache_'
end
end
end
end
end

View File

@ -1,116 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
module Cache
class Entry < ApplicationRecord
include FileStoreMounter
include Gitlab::SQL::Pattern
include ::UpdateNamespaceStatistics
include ShaAttribute
# we're using a composite primary key: upstream_id, relative_path and status
self.primary_key = :upstream_id
query_constraints :upstream_id, :relative_path, :status
belongs_to :group
belongs_to :upstream,
class_name: 'VirtualRegistries::Packages::Maven::Upstream',
inverse_of: :cache_entries,
optional: false
alias_attribute :namespace, :group
update_namespace_statistics namespace_statistics_name: :dependency_proxy_size
# Used in destroying stale cached responses in DestroyOrphanCachedEntriesWorker
enum :status, default: 0, processing: 1, pending_destruction: 2, error: 3
ignore_column :file_final_path, remove_with: '17.11', remove_after: '2025-03-23'
sha_attribute :file_sha1
sha_attribute :file_md5
validates :group, top_level_group: true, presence: true
validates :relative_path,
:object_storage_key,
:size,
:file_sha1,
presence: true
validates :upstream_etag, :content_type, length: { maximum: 255 }
validates :relative_path, :object_storage_key, length: { maximum: 1024 }
validates :file_md5, length: { is: 32 }, allow_nil: true
validates :file_sha1, length: { is: 40 }
validates :relative_path,
uniqueness: { scope: [:upstream_id, :status] },
if: :default?
validates :object_storage_key, uniqueness: { scope: :relative_path }
validates :file, presence: true
mount_file_store_uploader ::VirtualRegistries::Cache::EntryUploader
before_validation :set_object_storage_key,
if: -> { object_storage_key.blank? && upstream && upstream.registry }
attr_readonly :object_storage_key
scope :search_by_relative_path, ->(query) do
fuzzy_search(query, [:relative_path], use_minimum_char_limit: false)
end
scope :for_group, ->(group) { where(group: group) }
scope :order_created_desc, -> { reorder(arel_table['created_at'].desc) }
def self.next_pending_destruction
pending_destruction.lock('FOR UPDATE SKIP LOCKED').take
end
# create or update a cached response identified by the upstream, group_id and relative_path
# Given that we have chances that this function is not executed in isolation, we can't use
# safe_find_or_create_by.
# We are using the check existence and rescue alternative.
def self.create_or_update_by!(upstream:, group_id:, relative_path:, updates: {})
default.find_or_initialize_by(
upstream: upstream,
group_id: group_id,
relative_path: relative_path
).tap do |record|
record.update!(**updates)
end
rescue ActiveRecord::RecordInvalid => invalid
# in case of a race condition, retry the block
retry if invalid.record&.errors&.of_kind?(:relative_path, :taken)
# otherwise, bubble up the error
raise
end
def filename
return unless relative_path
File.basename(relative_path)
end
def stale?
return true unless upstream
return false if upstream.cache_validity_hours == 0
(upstream_checked_at + upstream.cache_validity_hours.hours).past?
end
def mark_as_pending_destruction
update_columns(
status: :pending_destruction,
relative_path: "#{relative_path}/deleted/#{SecureRandom.uuid}"
)
end
private
def set_object_storage_key
self.object_storage_key = upstream.object_storage_key_for(registry_id: upstream.registry.id)
end
end
end
end
end
end

View File

@ -1,31 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class Registry < ApplicationRecord
ignore_column :cache_validity_hours, remove_with: '17.8', remove_after: '2024-12-23'
belongs_to :group
has_one :registry_upstream,
class_name: 'VirtualRegistries::Packages::Maven::RegistryUpstream',
inverse_of: :registry
has_one :upstream, class_name: 'VirtualRegistries::Packages::Maven::Upstream', through: :registry_upstream
validates :group, top_level_group: true, presence: true, uniqueness: true
scope :for_group, ->(group) { where(group: group) }
before_destroy :destroy_upstream
private
# TODO: revisit this when we support multiple upstreams.
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
def destroy_upstream
upstream&.destroy!
end
end
end
end
end

View File

@ -1,16 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class RegistryUpstream < ApplicationRecord
belongs_to :group
belongs_to :registry, class_name: 'VirtualRegistries::Packages::Maven::Registry', inverse_of: :registry_upstream
belongs_to :upstream, class_name: 'VirtualRegistries::Packages::Maven::Upstream', inverse_of: :registry_upstream
validates :registry_id, :upstream_id, uniqueness: true
validates :group, top_level_group: true, presence: true
end
end
end
end

View File

@ -1,94 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class Upstream < ApplicationRecord
belongs_to :group
has_one :registry_upstream,
class_name: 'VirtualRegistries::Packages::Maven::RegistryUpstream',
inverse_of: :upstream
has_one :registry, class_name: 'VirtualRegistries::Packages::Maven::Registry', through: :registry_upstream
has_many :cache_entries,
class_name: 'VirtualRegistries::Packages::Maven::Cache::Entry',
inverse_of: :upstream
attr_encrypted_options.merge!(
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
)
attr_encrypted :username
attr_encrypted :password
validates :group, top_level_group: true, presence: true
validates :url, addressable_url: { allow_localhost: false, allow_local_network: false }, presence: true
validates :username, presence: true, if: :password?
validates :password, presence: true, if: :username?
validates :url, :username, :password, length: { maximum: 255 }
validates :cache_validity_hours, numericality: { greater_than_or_equal_to: 0, only_integer: true }
validates :encrypted_username_iv, :encrypted_password_iv, uniqueness: true, allow_nil: true
before_validation :set_cache_validity_hours_for_maven_central, if: :url?, on: :create
after_validation :reset_credentials, if: -> { persisted? && url_changed? }
prevent_from_serialization(:username, :password) if respond_to?(:prevent_from_serialization)
def url_for(path)
full_url = File.join(url, path)
Addressable::URI.parse(full_url).to_s
end
def headers
return {} unless username.present? && password.present?
authorization = ActionController::HttpAuthentication::Basic.encode_credentials(username, password)
{ Authorization: authorization }
end
def default_cache_entries
cache_entries.default
end
def object_storage_key_for(registry_id:)
hash = Digest::SHA2.hexdigest(SecureRandom.uuid)
Gitlab::HashedPath.new(
'virtual_registries',
'packages',
'maven',
registry_id.to_s,
'upstream',
id.to_s,
'cache',
'entry',
hash[0..1],
hash[2..3],
hash[4..],
root_hash: registry_id
).to_s
end
private
def reset_credentials
return if username_changed? && password_changed?
self.username = nil
self.password = nil
end
def set_cache_validity_hours_for_maven_central
return unless url.start_with?('https://repo1.maven.org/maven2')
self.cache_validity_hours = 0
end
end
end
end
end

View File

@ -1,17 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Policies
class Group
attr_reader :group
delegate_missing_to :group
def initialize(group)
@group = group.root_ancestor
end
end
end
end
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class RegistryPolicy < ::BasePolicy
delegate { ::VirtualRegistries::Packages::Policies::Group.new(@subject.group) }
end
end
end
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class UpstreamPolicy < ::BasePolicy
delegate { @subject.registry }
end
end
end
end

View File

@ -1,41 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Policies
class GroupPolicy < ::BasePolicy
include CrudPolicyHelpers
delegate(:group) { @subject.group }
condition(:deploy_token_user, scope: :user, score: 0) { @user.is_a?(DeployToken) }
condition(:deploy_token_can_read_virtual_registry, score: 10) do
@user.read_virtual_registry && @user.has_access_to_group?(@subject.group)
end
rule { anonymous }.policy do
prevent(*create_read_update_admin_destroy(:virtual_registry))
end
rule { group.guest | admin | group.has_projects }.policy do
enable :read_virtual_registry
end
rule { group.maintainer }.policy do
enable :create_virtual_registry
enable :update_virtual_registry
enable :destroy_virtual_registry
end
rule { deploy_token_user & deploy_token_can_read_virtual_registry }.policy do
enable :read_virtual_registry
end
rule { deploy_token_user & ~deploy_token_can_read_virtual_registry }.policy do
prevent :read_virtual_registry
end
end
end
end
end

View File

@ -1,47 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Cache
class EntryUploader < GitlabUploader
include ObjectStorage::Concern
extend ::Gitlab::Utils::Override
storage_location :dependency_proxy
alias_method :upload, :model
before :cache, :set_content_type
delegate :filename, to: :model
def store_dir
dynamic_segment
end
override :check_remote_file_existence_on_upload?
def check_remote_file_existence_on_upload?
false
end
override :sync_model_object_store?
def sync_model_object_store?
true
end
override :direct_upload_final_path_attribute_name
def direct_upload_final_path_attribute_name
:object_storage_key
end
private
def set_content_type(file)
file.content_type = model.content_type
end
def dynamic_segment
model.object_storage_key
end
end
end
end

View File

@ -330,6 +330,10 @@ fork_networks:
- table: organizations
column: organization_id
on_delete: async_delete
geo_node_namespace_links:
- table: namespaces
column: namespace_id
on_delete: async_delete
group_security_exclusions:
- table: namespaces
column: group_id

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class PrepareIndexForPCiPipelinesTriggerIdAndId < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
milestone '17.11'
TABLE_NAME = :p_ci_pipelines
COLUMN_NAMES = [:trigger_id, :id]
INDEX_NAME = :p_ci_pipelines_trigger_id_id_desc_idx
INDEX_ORDER = { id: :desc }
def up
prepare_partitioned_async_index(TABLE_NAME, COLUMN_NAMES, name: INDEX_NAME, order: INDEX_ORDER)
end
def down
unprepare_partitioned_async_index(TABLE_NAME, COLUMN_NAMES, name: INDEX_NAME)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class RemoveNamespacesGeoNodeNamespaceLinksNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
FOREIGN_KEY_NAME = "fk_rails_41ff5fb854"
def up
with_lock_retries do
remove_foreign_key_if_exists(:geo_node_namespace_links, :namespaces,
name: FOREIGN_KEY_NAME, reverse_lock_order: true)
end
end
def down
add_concurrent_foreign_key(:geo_node_namespace_links, :namespaces,
name: FOREIGN_KEY_NAME, column: :namespace_id,
target_column: :id, on_delete: :cascade)
end
end

View File

@ -0,0 +1 @@
7c0790a6b4ab51fdfdd5ff38002238efeeb4cefb84ca1cbed9210d991ef6f25b

View File

@ -0,0 +1 @@
3608cd4edc28d65f28e023c3ec111738ca4774bb77c0a49a40b6971d0d64a10f

View File

@ -42236,9 +42236,6 @@ ALTER TABLE ONLY clusters_kubernetes_namespaces
ALTER TABLE ONLY lfs_object_states
ADD CONSTRAINT fk_rails_4188448cd5 FOREIGN KEY (lfs_object_id) REFERENCES lfs_objects(id) ON DELETE CASCADE;
ALTER TABLE ONLY geo_node_namespace_links
ADD CONSTRAINT fk_rails_41ff5fb854 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY epic_issues
ADD CONSTRAINT fk_rails_4209981af6 FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE;

View File

@ -1351,10 +1351,10 @@ link outside it.
- In GitLab Runner 12.10 and earlier, [`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
- For [GitLab Pages job](#pages):
- In [GitLab 17.10 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/428018),
the [`pages:pages.publish`](#pagespagespublish) path is automatically appended to `artifacts:paths`,
the [`pages.publish`](#pagespublish) path is automatically appended to `artifacts:paths`,
so you don't need to specify it again.
- In [GitLab 17.10 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/428018),
when the [`pages:pages.publish`](#pagespagespublish) path is not specified,
when the [`pages.publish`](#pagespublish) path is not specified,
the `public` directory is automatically appended to `artifacts:paths`.
CI/CD variables [are supported](../variables/where_variables_can_be_used.md#gitlab-ciyml-file).
@ -1465,7 +1465,7 @@ job:
them fail with a [`could not retrieve the needed artifacts` error](../jobs/job_artifacts_troubleshooting.md#error-message-this-job-could-not-start-because-it-could-not-retrieve-the-needed-artifacts).
Set the expiry time to be longer, or use [`dependencies`](#dependencies) in later jobs
to ensure they don't try to fetch expired artifacts.
- `artifacts:expire_in` doesn't affect GitLab Pages deployments. To configure Pages deployments' expiry, use [`pages:pages.expire_in`](#pagespagesexpire_in).
- `artifacts:expire_in` doesn't affect GitLab Pages deployments. To configure Pages deployments' expiry, use [`pages.expire_in`](#pagesexpire_in).
#### `artifacts:expose_as`
@ -3592,14 +3592,58 @@ uploads static content to GitLab. The content is then published as a website.
You must:
- Define [`artifacts`](#artifacts) with a path to the content directory, which is
`public` by default.
- Use [`pages.publish`](#pagespagespublish) if want to use a different content directory.
- Define `pages: true` to publish a directory named `public`
- Alternatively, define [`pages.publish`](#pagespublish) if want to use a different content directory.
**Keyword type**: Job name.
**Keyword type**: Job keyword or Job name (deprecated). You can use it only as part of a job.
**Supported Values**:
- A boolean. Uses the default configuration when set to `true`
- A hash of configuration options, see the following sections for details.
**Example of `pages`**:
```yaml
create-pages:
stage: deploy
script:
- mv my-html-content public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
```
This example renames the `my-html-content/` directory to `public/`.
This directory is exported as an artifact and published with GitLab Pages.
**Example using a configuration hash**:
```yaml
create-pages:
stage: deploy
script:
- echo "nothing to do here"
pages: # specifies that this is a Pages job and publishes the default public directory
publish: my-html-content
expire_in: "1 week"
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
```
This example does not move the directory, but uses the `publish` property directly.
It also configures the pages deployment to be unpublished after a week.
**Deprecated: Use `pages` as a job name**
Using `pages` as a job name results in the same behavior as specifying
the Pages property `pages: true`. This method is available for backwards compatibility,
but might not receive all future improvements to the Pages job configuration.
**Example using `pages` as a job name**:
```yaml
pages: # specifies that this is a Pages job and publishes the default public directory
stage: deploy
@ -3610,17 +3654,28 @@ pages: # specifies that this is a Pages job and publishes the default public di
environment: production
```
This example renames the `my-html-content/` directory to `public/`.
This directory is exported as an artifact and published with GitLab Pages.
To use `pages` as a job name without triggering a Pages deployment, set the `pages`
property to false:
#### `pages:pages.publish`
```yaml
pages:
stage: deploy
script:
- mv my-html-content public
pages: false # this job will not trigger a Pages deployment
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
```
#### `pages.publish`
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415821) in GitLab 16.1.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
- [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `publish` property under the `pages` keyword in GitLab 17.9.
- [Appended](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `pages:pages.publish` path automatically to `artifacts:paths` in GitLab 17.10.
- [Appended](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `pages.publish` path automatically to `artifacts:paths` in GitLab 17.10.
{{< /history >}}
@ -3637,7 +3692,7 @@ this path is automatically appended to [`artifacts:paths`](#artifactspaths).
**Example of `pages.publish`**:
```yaml
pages:
create-pages:
stage: deploy
script:
- npx @11ty/eleventy --input=path/to/eleventy/root --output=dist
@ -3655,7 +3710,7 @@ as an artifact and published with GitLab Pages.
It is also possible to use variables in the `pages.publish` field. For example:
```yaml
pages:
create-pages:
stage: deploy
script:
- mkdir -p $CUSTOM_FOLDER/$CUSTOM_PATH
@ -3669,7 +3724,9 @@ pages:
CUSTOM_SUBFOLDER: "custom_subfolder"
```
#### `pages:pages.path_prefix`
The publish path specified must be relative to the build root.
#### `pages.path_prefix`
{{< details >}}
@ -3705,7 +3762,7 @@ Leading and trailing hyphens or periods are not permitted.
**Example of `pages.path_prefix`**:
```yaml
pages:
create-pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}/${CI_COMMIT_BRANCH}"
@ -3715,7 +3772,7 @@ pages:
In this example, a different pages deployment is created for each branch.
#### `pages:pages.expire_in`
#### `pages.expire_in`
{{< details >}}
@ -3753,10 +3810,10 @@ Valid values include:
- `3 weeks and 2 days`
- `never`
**Example of `pages:pages.expire_in`**:
**Example of `pages.expire_in`**:
```yaml
pages:
create-pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}"

View File

@ -1112,8 +1112,8 @@ This guidance follows the [Use of Third-party Trademarks](https://handbook.gitla
## GitLab AI vendor model
Use **GitLab AI vendor model** to refer to a [language model](#language-model-large-language-model)
that is hosted by GitLab, and that customers access through the GitLab-hosted
[AI gateway](#ai-gateway).
that is hosted by a third-party provider, and that customers access using the GitLab
[AI gateway](#ai-gateway) through the [Cloud Connector](../../cloud_connector/architecture.md).
Do not use this term when the [language model is hosted by a customer](#self-hosted-model),
or when the customer is using the [GitLab Duo Self-Hosted](#gitlab-duo-self-hosted)

View File

@ -24,12 +24,28 @@ Before writing tests, understand the different testing levels and determine the
[Learn more about the different testing levels](../testing_guide/testing_levels.md), and how to decide at what level your changes should be tested.
### Recommendation when mocking
### Recommendations
#### Name test files the same as the files they are testing
For unit tests, naming the test file with `test_{file_being_tested}.py` and placing it in the same directory structure
helps with later discoverability of tests. This also avoids confusion between files that have the same name, but
different modules.
```shell
File: /foo/bar/cool_feature.py
# Bad
Test file: /tests/my_cool_feature.py
# Good
Test file: /tests/foo/bar/test_cool_feature.py
```
#### Mocking
- Use `unittest.mock` library.
- Mock at the right level, for example, at method call boundaries.
- Mock external services and APIs.
### Testing in Python vs. Ruby on Rails
- [Work item](https://gitlab.com/gitlab-org/gitlab/-/issues/516193)

View File

@ -108,7 +108,7 @@ test: # builds and tests your site
rules:
- if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
deploy-pages: # a user-defined job that builds your pages and saves them to the specified path.
create-pages: # a user-defined job that builds your pages and saves them to the specified path.
script:
- hugo
pages: true # specifies that this is a Pages job

View File

@ -48,7 +48,7 @@ Use these features to gain insights into developer productivity and code coverag
| Feature | Description | Project-level | Group-level | Instance-level |
| ------- | ----------- | ------------- | ----------- | -------------- |
| [Contribution analytics](../group/contribution_analytics/_index.md) | Overview of [contribution events](../profile/contributions_calendar.md) made by group members, with bar chart of push events, merge requests, and issues. | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No |
| [Contributor analytics](../analytics/contributor_analytics.md) | Overview of commits made by project members, with line chart of number of commits. | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No |
| [Contributor analytics](contributor_analytics.md) | Overview of commits made by project members, with line chart of number of commits. | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No |
| [Repository analytics](../group/repositories_analytics/_index.md) | Programming languages used in the repository and code coverage statistics. | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No |
### CI/CD analytics

View File

@ -44,8 +44,8 @@ You cannot edit the built-in dashboards, but you can create custom dashboards wi
The following built-in dashboards are available:
- [**Value Streams Dashboard**](../analytics/value_streams_dashboard.md) displays metrics related to DevOps performance, security exposure, and workstream optimization.
- [**AI Impact Dashboard**](../analytics/ai_impact_analytics.md) displays the impact of AI tools on software development lifecycle (SDLC) metrics for a project or group.
- [**Value Streams Dashboard**](value_streams_dashboard.md) displays metrics related to DevOps performance, security exposure, and workstream optimization.
- [**AI Impact Dashboard**](ai_impact_analytics.md) displays the impact of AI tools on software development lifecycle (SDLC) metrics for a project or group.
## Custom dashboards
@ -427,4 +427,4 @@ defined in `ee/app/validators/json_schemas/analytics_visualization.json`.
If a dashboard panel displays an error message:
- Make sure your [visualization](../analytics/analytics_dashboards.md#define-a-chart-visualization-template) configuration is set up correctly.
- Make sure your [visualization](analytics_dashboards.md#define-a-chart-visualization-template) configuration is set up correctly.

View File

@ -20,7 +20,7 @@ Use productivity analytics to identify:
- Potential causes of merge requests that take a long time to merge.
- Authors, labels, or milestones that take the longest time to merge or contain most changes.
To view merge request data for projects, use [merge request analytics](../analytics/merge_request_analytics.md).
To view merge request data for projects, use [merge request analytics](merge_request_analytics.md).
## Charts

View File

@ -411,7 +411,7 @@ To unlink a security policy project, follow the same steps but instead select th
the dialog.
You can link to a security policy project from a different subgroup in the same top-level group, or from an entirely different top-level group.
However, when you enforce a
[pipeline execution policy](../policies/pipeline_execution_policies.md#pipeline-execution-policy-schema), users must have at least read-only access to the project that contains the CI/CD configuration referenced in the policy to trigger the pipeline.
[pipeline execution policy](pipeline_execution_policies.md#pipeline-execution-policy-schema), users must have at least read-only access to the project that contains the CI/CD configuration referenced in the policy to trigger the pipeline.
### Viewing the linked security policy project
@ -503,7 +503,7 @@ When working with security policies, consider these troubleshooting tips:
subgroup the development project belongs to. Linking this way results in approval
rules from the merge request approval policies not being applied to merge requests in the development project.
- When creating a merge request approval policy, neither the array `severity_levels` nor the array
`vulnerability_states` in the [`scan_finding` rule](../policies/merge_request_approval_policies.md#scan_finding-rule-type)
`vulnerability_states` in the [`scan_finding` rule](merge_request_approval_policies.md#scan_finding-rule-type)
can be left empty. For a working rule, at least one entry must exist for each array.
- The owner of a project can enforce policies for that project, provided they also have permissions to create projects in the group.
Project owners who are not group members may face limitations in adding or editing policies. If you're unable to manage policies for your project,

View File

@ -177,9 +177,9 @@ You can also use a ruleset configuration file stored remotely (that is, a remote
You can disable rules that you don't want active. To disable rules from the analyzer default ruleset:
1. [Create a ruleset configuration file](#create-a-ruleset-configuration-file), if one doesn't exist already.
1. Set the `disabled` flag to `true` in the context of a [`ruleset` section](../pipeline/custom_rulesets_schema.md#the-secretsruleset-section).
1. Set the `disabled` flag to `true` in the context of a [`ruleset` section](custom_rulesets_schema.md#the-secretsruleset-section).
1. In one or more `ruleset.identifier` subsections, list the rules to disable. Every
[`ruleset.identifier` section](../pipeline/custom_rulesets_schema.md#the-secretsrulesetidentifier-section) has:
[`ruleset.identifier` section](custom_rulesets_schema.md#the-secretsrulesetidentifier-section) has:
- A `type` field for the predefined rule identifier.
- A `value` field for the rule name.
@ -208,10 +208,10 @@ To override rules from the analyzer default ruleset:
1. [Create a ruleset configuration file](#create-a-ruleset-configuration-file), if one doesn't exist already.
1. In one or more `ruleset.identifier` subsections, list the rules to override. Every
[`ruleset.identifier` section](../pipeline/custom_rulesets_schema.md#the-secretsrulesetidentifier-section) has:
[`ruleset.identifier` section](custom_rulesets_schema.md#the-secretsrulesetidentifier-section) has:
- A `type` field for the predefined rule identifier.
- A `value` field for the rule name.
1. In the [`ruleset.override` context](../pipeline/custom_rulesets_schema.md#the-secretsrulesetoverride-section) of a [`ruleset` section](../pipeline/custom_rulesets_schema.md#the-secretsruleset-section), provide the keys to override. Any combination of keys can be overridden. Valid keys are:
1. In the [`ruleset.override` context](custom_rulesets_schema.md#the-secretsrulesetoverride-section) of a [`ruleset` section](custom_rulesets_schema.md#the-secretsruleset-section), provide the keys to override. Any combination of keys can be overridden. Valid keys are:
- `description`
- `message`
- `name`
@ -277,18 +277,18 @@ See [bot users for groups](../../../group/settings/group_access_tokens.md#bot-us
### Replace the default ruleset
You can replace the default ruleset configuration using a number of [customizations](../pipeline/custom_rulesets_schema.md). Those can be combined using [passthroughs](../pipeline/custom_rulesets_schema.md#passthrough-types) into a single configuration.
You can replace the default ruleset configuration using a number of [customizations](custom_rulesets_schema.md). Those can be combined using [passthroughs](custom_rulesets_schema.md#passthrough-types) into a single configuration.
Using passthroughs, you can:
- Chain up to [20 passthroughs](../pipeline/custom_rulesets_schema.md#the-secretspassthrough-section) into a single configuration to replace or extend predefined rules.
- Include [environment variables in passthroughs](../pipeline/custom_rulesets_schema.md#interpolate).
- Set a [timeout](../pipeline/custom_rulesets_schema.md#the-secrets-configuration-section) for evaluating passthroughs.
- [Validate](../pipeline/custom_rulesets_schema.md#the-secrets-configuration-section) TOML syntax used in each defined passthrough.
- Chain up to [20 passthroughs](custom_rulesets_schema.md#the-secretspassthrough-section) into a single configuration to replace or extend predefined rules.
- Include [environment variables in passthroughs](custom_rulesets_schema.md#interpolate).
- Set a [timeout](custom_rulesets_schema.md#the-secrets-configuration-section) for evaluating passthroughs.
- [Validate](custom_rulesets_schema.md#the-secrets-configuration-section) TOML syntax used in each defined passthrough.
#### With an inline ruleset
You can use [`raw` passthrough](../pipeline/custom_rulesets_schema.md#passthrough-types) to replace default ruleset with configuration provided inline.
You can use [`raw` passthrough](custom_rulesets_schema.md#passthrough-types) to replace default ruleset with configuration provided inline.
To do so, add the following in the `.gitlab/secret-detection-ruleset.toml` configuration file stored in the same repository, and adjust the rule defined under `[[rules]]` as appropriate:
@ -308,11 +308,11 @@ regex = '''Custom Raw Ruleset T[est]{3}'''
The above example replaces the default ruleset with a rule that checks for the regex defined - `Custom Raw Ruleset T` with a suffix of 3 characters from either one of `e`, `s`, or `t` letters.
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
#### With a local ruleset
You can use [`file` passthrough](../pipeline/custom_rulesets_schema.md#passthrough-types) to replace the default ruleset with another file committed to the current repository.
You can use [`file` passthrough](custom_rulesets_schema.md#passthrough-types) to replace the default ruleset with another file committed to the current repository.
To do so, add the following in the `.gitlab/secret-detection-ruleset.toml` configuration file stored in the same repository and adjust the `value` as appropriate to point to the path of the file with the local ruleset configuration:
@ -326,7 +326,7 @@ To do so, add the following in the `.gitlab/secret-detection-ruleset.toml` confi
This would replace the default ruleset with the configuration defined in `config/gitleaks.toml` file.
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
#### With a remote ruleset
@ -366,11 +366,11 @@ To use the `url` passthrough, add the following to the `.gitlab/secret-detection
In this configuration the analyzer loads the ruleset configuration from `gitleaks.toml` file stored at the address provided.
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
#### With a private remote ruleset
If a ruleset configuration is stored in a private repository you must provide the credentials to access the repository by using the passthrough's [`auth` setting](../pipeline/custom_rulesets_schema.md#the-secretspassthrough-section).
If a ruleset configuration is stored in a private repository you must provide the credentials to access the repository by using the passthrough's [`auth` setting](custom_rulesets_schema.md#the-secretspassthrough-section).
{{< alert type="note" >}}
@ -392,11 +392,11 @@ To use a remote ruleset stored in a private repository, add the following to the
{{< alert type="warning" >}}
Beware of leaking credentials when using this feature. Check [this section](../pipeline/custom_rulesets_schema.md#interpolate) for an example on how to use environment variables to minimize the risk.
Beware of leaking credentials when using this feature. Check [this section](custom_rulesets_schema.md#interpolate) for an example on how to use environment variables to minimize the risk.
{{< /alert >}}
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
### Extend the default ruleset
@ -441,7 +441,7 @@ path = "/gitleaks.toml"
With this ruleset configuration the analyzer detects any strings matching with those two defined regex patterns.
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
#### With a remote ruleset
@ -493,7 +493,7 @@ To use a `url` passthrough, add the following to `.gitlab/secret-detection-rules
value = "https://example.com/gitleaks.toml"
```
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
### Ignore patterns and paths
@ -556,7 +556,7 @@ path = "/gitleaks.toml"
This ignores any secrets detected in either `/gitleaks.toml` file or any file ending with one of the specified extensions.
For more information on the passthrough syntax to use, see [Schema](../pipeline/custom_rulesets_schema.md#schema).
For more information on the passthrough syntax to use, see [Schema](custom_rulesets_schema.md#schema).
### Ignore secrets inline

View File

@ -12,7 +12,7 @@ title: Custom rulesets schema
{{< /details >}}
You can use [different kinds of ruleset customizations](../pipeline/configure.md#customize-analyzer-rulesets)
You can use [different kinds of ruleset customizations](configure.md#customize-analyzer-rulesets)
to customize the behavior of pipeline secret detection.
## Schema
@ -103,7 +103,7 @@ rule that you wish to modify.
| `value` | The value of the identifier used by the predefined rule. |
To determine the correct values for `type` and `value`, view the
[`gl-secret-detection-report.json`](../pipeline/_index.md#output) produced by the analyzer.
[`gl-secret-detection-report.json`](_index.md#output) produced by the analyzer.
You can download this file as a job artifact from the analyzer's CI job.
For example, the snippet below shows a finding from a `gitlab_personal_access_token` rule with one
@ -244,7 +244,7 @@ The size of the configuration generated by a single passthrough is limited to 10
| `mode` | All | If `overwrite`, the `target` file is overwritten. If `append`, new content is appended to the `target` file. The `git` type only supports `overwrite`. (Default: `overwrite`) |
| `ref` | `type = "git"` | Contains the name of the branch, tag, or the SHA to pull. |
| `subdir` | `type = "git"` | Used to select a subdirectory of the Git repository as the configuration source. |
| `auth` | `type = "git"` | Used to provide credentials to use when using a [configuration stored in a private Git repository](../pipeline/configure.md#with-a-private-remote-ruleset). |
| `auth` | `type = "git"` | Used to provide credentials to use when using a [configuration stored in a private Git repository](configure.md#with-a-private-remote-ruleset). |
| `value` | All | For the `file`, `url`, and `git` types, defines the location of the file or Git repository. For the `raw` type, contains the inline configuration. |
| `validator` | All | Used to explicitly invoke validators (`xml`, `yaml`, `json`, `toml`) on the target file after the evaluation of a passthrough. |

View File

@ -137,6 +137,6 @@ Vulnerabilities are listed in the following order:
1. Higher EPSS scores (closer to 1) are prioritized.
1. Severities are ordered from `Critical` to `Low`.
Only vulnerabilities detected by [dependency scanning](../dependency_scanning/_index.md) and [container scanning](../container_scanning/_index.md) are included because the Vulnerability Prioritizer CI/CD component requires data only available in Common Vulnerabilities and Exposures (CVE) records. Moreover, only [detected (**Needs triage**) and confirmed](../vulnerabilities/_index.md#vulnerability-status-values) vulnerabilities are shown.
Only vulnerabilities detected by [dependency scanning](../dependency_scanning/_index.md) and [container scanning](../container_scanning/_index.md) are included because the Vulnerability Prioritizer CI/CD component requires data only available in Common Vulnerabilities and Exposures (CVE) records. Moreover, only [detected (**Needs triage**) and confirmed](_index.md#vulnerability-status-values) vulnerabilities are shown.
To add the Vulnerability Prioritizer CI/CD component to your project's CI/CD pipeline, see the [Vulnerability Prioritizer documentation](https://gitlab.com/components/vulnerability-prioritizer).

View File

@ -32,8 +32,8 @@ compliance:
| Feature | Instances | Groups | Projects | Description |
|:-----------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------|:-------------------------------------|:-------------------------------------|:------------|
| [Compliance frameworks](../compliance/compliance_frameworks.md) | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Describe the type of compliance requirements projects must follow. |
| [Compliance pipelines](../compliance/compliance_pipelines.md) | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Define a pipeline configuration to run for any projects with a given compliance framework. |
| [Compliance frameworks](compliance_frameworks.md) | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Describe the type of compliance requirements projects must follow. |
| [Compliance pipelines](compliance_pipelines.md) | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Define a pipeline configuration to run for any projects with a given compliance framework. |
| [Merge request approval policy approval settings](../application_security/policies/merge_request_approval_policies.md#approval_settings) | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Enforce a merge request approval policy enforcing multiple approvers and override various project settings in all enforced groups or projects across your GitLab instance or group. |
## Audit management

View File

@ -40,15 +40,15 @@ For a click-through demo, see [Compliance frameworks](https://gitlab.navattic.co
You can create, edit, or delete a compliance framework from a compliance framework report. For more information, see:
- [Create a new compliance framework](../compliance/compliance_center/compliance_frameworks_report.md#create-a-new-compliance-framework).
- [Edit a compliance framework](../compliance/compliance_center/compliance_frameworks_report.md#edit-a-compliance-framework).
- [Delete a compliance framework](../compliance/compliance_center/compliance_frameworks_report.md#delete-a-compliance-framework).
- [Create a new compliance framework](compliance_center/compliance_frameworks_report.md#create-a-new-compliance-framework).
- [Edit a compliance framework](compliance_center/compliance_frameworks_report.md#edit-a-compliance-framework).
- [Delete a compliance framework](compliance_center/compliance_frameworks_report.md#delete-a-compliance-framework).
You can create, edit, or delete a compliance framework from a compliance projects report. For more information, see:
- [Create a new compliance framework](../compliance/compliance_center/compliance_projects_report.md#create-a-new-compliance-framework).
- [Edit a compliance framework](../compliance/compliance_center/compliance_projects_report.md#edit-a-compliance-framework).
- [Delete a compliance framework](../compliance/compliance_center/compliance_projects_report.md#delete-a-compliance-framework).
- [Create a new compliance framework](compliance_center/compliance_projects_report.md#create-a-new-compliance-framework).
- [Edit a compliance framework](compliance_center/compliance_projects_report.md#edit-a-compliance-framework).
- [Delete a compliance framework](compliance_center/compliance_projects_report.md#delete-a-compliance-framework).
Subgroups and projects have access to all compliance frameworks created on their top-level group. However, compliance frameworks cannot be created, edited,
or deleted at the subgroup or project level. Project owners can choose a framework to apply to their projects.
@ -64,7 +64,7 @@ or deleted at the subgroup or project level. Project owners can choose a framewo
You can apply multiple compliance frameworks to a project but cannot apply compliance frameworks to projects in personal namespaces.
To apply a compliance framework to a project, apply the compliance framework through the
[Compliance projects report](../compliance/compliance_center/compliance_projects_report.md#apply-a-compliance-framework-to-projects-in-a-group).
[Compliance projects report](compliance_center/compliance_projects_report.md#apply-a-compliance-framework-to-projects-in-a-group).
You can use the [GraphQL API](../../api/graphql/reference/_index.md#mutationprojectupdatecomplianceframeworks) to apply one or many
compliance frameworks to a project.
@ -88,7 +88,7 @@ A compliance framework that is set to default has a **default** label.
### Set and remove a default by using the compliance center
To set as default (or remove the default) from [compliance projects report](../compliance/compliance_center/compliance_projects_report.md):
To set as default (or remove the default) from [compliance projects report](compliance_center/compliance_projects_report.md):
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Secure > Compliance center**.
@ -97,7 +97,7 @@ To set as default (or remove the default) from [compliance projects report](../c
1. Select **Set as default**.
1. Select **Save changes**.
To set as default (or remove the default) from [compliance framework report](../compliance/compliance_center/compliance_frameworks_report.md):
To set as default (or remove the default) from [compliance framework report](compliance_center/compliance_frameworks_report.md):
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Secure > Compliance center**.
@ -109,4 +109,4 @@ To set as default (or remove the default) from [compliance framework report](../
## Remove a compliance framework from a project
To remove a compliance framework from one or multiple project in a group, remove the compliance framework through the
[Compliance projects report](../compliance/compliance_center/compliance_projects_report.md#remove-a-compliance-framework-from-projects-in-a-group).
[Compliance projects report](compliance_center/compliance_projects_report.md#remove-a-compliance-framework-from-projects-in-a-group).

View File

@ -43,7 +43,7 @@ License approval policies rely on the output of a dependency scanning job to ver
To ensure enforcement of your policies, you should enable dependency scanning on your target development projects. You can achieve this a few different ways:
- Create a global [scan execution policy](../application_security/policies/scan_execution_policies.md) that enforces Dependency Scanning to run in all target development projects.
- Use a [Compliance Pipeline](../compliance/compliance_frameworks.md) to define a Dependency Scanning job that is enforced on projects enforced by a given Compliance Framework.
- Use a [Compliance Pipeline](compliance_frameworks.md) to define a Dependency Scanning job that is enforced on projects enforced by a given Compliance Framework.
- Work with development teams to configure [Dependency Scanning](../application_security/dependency_scanning/_index.md) in each of their project's `.gitlab-ci.yml` files or enable by using the [Security Configuration panel](../application_security/configuration/_index.md).
License approval policies require license information from [GitLab-supported packages](license_scanning_of_cyclonedx_files/_index.md#supported-languages-and-package-managers).

View File

@ -81,9 +81,9 @@ To use Workflow in VS Code, ensure your repository is properly connected.
1. In VS Code, on the top menu, select **Terminal > New Terminal**.
1. Clone your repository: `git clone <repository>`.
1. Change to the directory where your repository was cloned and check out your branch: `git checkout <branch_name>`.
1. Ensure your repository is selected:
1. Ensure your project is selected:
1. On the left sidebar, select **GitLab Workflow** ({{< icon name="tanuki" >}}).
1. Select the repository name. If you have multiple repositories, select the one you want to work with.
1. Select the project name. If you have multiple projects, select the one you want to work with.
1. In the terminal, ensure your repository is configured with a remote: `git remote -v`. The results should look similar to:
```plaintext

View File

@ -21,30 +21,47 @@ This feature is [a private beta](../../policy/development_stages_support.md) and
## General guidance
If you encounter issues:
If you encounter issues, ensure that you have:
1. Ensure that you have the latest version of the GitLab Workflow extension.
1. Ensure that the project you want to use it with meets the [prerequisites](_index.md#prerequisites).
1. Ensure that the folder you opened in VS Code has a Git repository for your GitLab project.
1. Ensure that you've checked out the branch for the code you'd like to change.
1. Ensure that you can connect to the Workflow service:
1. In Google Chrome or Firefox, open Developer Tools and the **Network** tab.
1. Right-click the column headers to trigger protocol column visibility.
1. In the address bar, enter `https://duo-workflow.runway.gitlab.net/DuoWorkflow/ExecuteWorkflow`.
1. Ensure the request was successful and the **Protocol** column includes `h2` in Chrome or `HTTP/2` in Firefox.
1. If the request fails, your network might be blocking the connection, for example with a firewall. The network must let HTTP/2 traffic through to the service.
1. Check local debugging logs:
1. For more output in the logs, open the settings:
1. On macOS: <kbd>Cmd</kbd> + <kbd>,</kbd>
1. On Windows and Linux: <kbd>Ctrl</kbd> + <kbd>,</kbd>
1. Search for the setting **GitLab: Debug** and enable it.
1. Check the language server logs:
1. To open the logs in VS Code, select **View** > **Output**. In the output panel at the bottom, in the top-right corner, select **GitLab Workflow** or **GitLab Language Server** from the list.
1. Review for errors, warnings, connection issues, or authentication problems.
1. The latest version of the GitLab Workflow extension for VS Code.
1. A project that meets the [prerequisites](_index.md#prerequisites).
1. The repository open in VS Code.
1. The branch checked out.
## Docker guidance
For details on these steps, see [the prerequisites](_index.md#prerequisites) and
[how to connect to your repository](_index.md#connect-to-your-repository).
If you encounter issues with your Docker setup for Duo Workflow, try the following steps.
## View debugging logs
You can troubleshoot some issues by viewing debugging logs.
1. Open local debugging logs:
- On macOS: <kbd>Cmd</kbd> + <kbd>,</kbd>
- On Windows and Linux: <kbd>Ctrl</kbd> + <kbd>,</kbd>
1. Search for the setting **GitLab: Debug** and enable it.
1. Open the language server logs:
1. In VS Code, select **View** > **Output**.
1. In the output panel at the bottom, in the upper-right corner,
select **GitLab Workflow** or **GitLab Language Server** from the list.
1. Review for errors, warnings, connection issues, or authentication problems.
## Network issues
Your network might block the connection to the Workflow service,
for example, by using a firewall. The network must let HTTP/2 traffic through to the service.
To confirm that you can connect to the Workflow service:
1. In Google Chrome or Firefox, open Developer Tools and select the **Network** tab.
1. Right-click the column headers to show the **Protocol** column.
1. In the address bar, enter `https://duo-workflow.runway.gitlab.net/DuoWorkflow/ExecuteWorkflow`.
1. Ensure the request was successful and the **Protocol** column includes `h2` in Chrome or `HTTP/2` in Firefox.
If the request fails, your network might be blocking the connection.
## Docker setup
If you encounter issues with your Docker setup for Workflow, try the following steps.
1. [Install Docker and set the socket file path](docker_set_up.md#install-docker-and-set-the-socket-file-path).
1. Restart your container manager. For example, if you use Colima, `colima restart`.
@ -55,9 +72,77 @@ If you encounter issues with your Docker setup for Duo Workflow, try the followi
```
1. For permission issues, ensure your operating system user has the necessary Docker permissions.
1. Verify Docker's internet connectivity by executing the command `docker image pull redhat/ubi8`.
1. Verify that Docker has internet connectivity by executing the command `docker image pull redhat/ubi8`.
If this does not work, the DNS configuration of Colima might be at fault.
Edit the DNS setting in `~/.colima/default/colima.yaml` to `dns: [1.1.1.1]` and then restart Colima with `colima restart`.
1. Check the executor logs:
1. Use `docker ps -a | grep duo-workflow` to get the list of Workflow containers and their ids.
1. Use `docker logs <container_id>` to view the logs for the specific container.
- Use `docker ps -a | grep duo-workflow` to get the list of Workflow containers and their ids.
- Use `docker logs <container_id>` to view the logs for the specific container.
## IDE configuration
You can try several things to ensure your repository is properly configured and connected.
### View the project in the GitLab Workflow extension
Start by ensuring the correct project is selected in the GitLab Workflow extension for VS Code.
1. In VS Code, on the left sidebar, select **GitLab Workflow** ({{< icon name="tanuki" >}}).
1. Ensure the project is listed and selected.
If an error message appears next to the project name, select it to reveal what needs to be updated.
For example, you might have multiple repositories and need to select one, or there might be no repositories at all.
#### No Git repository
If your workspace doesn't have a Git repository initialized, you must create a new one:
1. On the left sidebar, select **Source Control** ({{< icon name="branch" >}}).
1. Select **Initialize Repository**.
When the repository is initialized, you should see the name in the **Source Control** view.
#### Git repository with no GitLab remote
You might have a Git repository but it's not properly connected to GitLab.
1. On the left sidebar, select **Source Control** ({{< icon name="branch" >}}).
1. On the **Source Control** label, right-click and select **Repositories**.
1. Next to your repository, select the ellipsis ({{< icon name=ellipsis_h >}}), then **Remote > Add Remote**.
1. Enter your GitLab project URL.
1. Select the newly added remote as your upstream.
#### Multiple GitLab remotes
Your repository might have multiple GitLab remotes configured.
To select the correct one:
1. On the left sidebar, select **Source Control** ({{< icon name="branch" >}}).
1. On the status bar, select the current remote name.
1. From the list, select the appropriate GitLab remote.
1. Ensure the selected remote belongs to a group namespace in GitLab.
#### Multiple GitLab projects
If your VS Code workspace contains multiple GitLab projects, you might want
to close all the projects you're not using.
To close projects:
1. On the left sidebar, select **Source Control** ({{< icon name="branch" >}}).
1. Ensure repositories are shown: on the **Source Control** label, right-click and select **Repositories**.
1. Right-click the repository you want to close and select **Close Repository**.
### Project not in a group namespace
GitLab Duo Workflow requires that projects belong to a group namespace.
To determine the namespace your project is in, [look at the URL](../../user/namespace/_index.md#determine-which-type-of-namespace-youre-in).
If necessary, you can
[transfer your project to a group namespace](../../tutorials/move_personal_project_to_group/_index.md#move-your-project-to-a-group).
## Still having issues?
Contact your GitLab administrator for assistance.

View File

@ -6,7 +6,7 @@ remove_date: "2025-01-09"
<!-- markdownlint-disable -->
<!-- vale off -->
This document was moved to [another location](../gitlab_duo/_index.md).
This document was moved to [another location](_index.md).
<!-- This redirect file can be deleted after <2025-01-09>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->

View File

@ -383,7 +383,7 @@ For tips and tricks about integrating GitLab Duo Chat into your AI-powered DevSe
read the blog post:
[10 best practices for using AI-powered GitLab Duo Chat](https://about.gitlab.com/blog/2024/04/02/10-best-practices-for-using-ai-powered-gitlab-duo-chat/).
[View examples of how to use GitLab Duo Chat](../gitlab_duo_chat/examples.md).
[View examples of how to use GitLab Duo Chat](examples.md).
## Give feedback

View File

@ -97,10 +97,10 @@ This error occurs when an unexpected issue arises during the processing of a sla
For more information about slash commands, refer to the documentation:
- [/tests](../gitlab_duo_chat/examples.md#write-tests-in-the-ide)
- [/refactor](../gitlab_duo_chat/examples.md#refactor-code-in-the-ide)
- [/fix](../gitlab_duo_chat/examples.md#fix-code-in-the-ide)
- [/explain](../gitlab_duo_chat/examples.md#explain-selected-code)
- [/tests](examples.md#write-tests-in-the-ide)
- [/refactor](examples.md#refactor-code-in-the-ide)
- [/fix](examples.md#fix-code-in-the-ide)
- [/explain](examples.md#explain-selected-code)
## `Error M4001`
@ -114,7 +114,7 @@ This error occurs when there is a problem finding the information needed to comp
You might get an error that states
`I'm sorry, I can't generate a response. Please try again. Error code: M4002`.
This error occurs when there is a problem answering [questions related to CI/CD](../gitlab_duo_chat/examples.md#ask-about-cicd). Try your request again.
This error occurs when there is a problem answering [questions related to CI/CD](examples.md#ask-about-cicd). Try your request again.
## `Error M4003`
@ -122,7 +122,7 @@ You might get an error that states
`This command is used for explaining vulnerabilities and can only be invoked from a vulnerability detail page.` or
`Vulnerability Explanation currently only supports vulnerabilities reported by SAST. Error code: M4003`.
This error occurs when there is a problem when using [`Explain Vulnerability`](../gitlab_duo_chat/examples.md#explain-a-vulnerability) feature.
This error occurs when there is a problem when using [`Explain Vulnerability`](examples.md#explain-a-vulnerability) feature.
## `Error M4004`
@ -136,7 +136,7 @@ This error occurs when there is a problem when using `Summarize Discussion` feat
You might get an error that states
`There is no job log to troubleshoot.` or `This command is used for troubleshooting jobs and can only be invoked from a failed job log page.`.
This error occurs when there is a problem when using [`Troubleshoot job`](../gitlab_duo_chat/examples.md#troubleshoot-failed-cicd-jobs-with-root-cause-analysis) feature.
This error occurs when there is a problem when using [`Troubleshoot job`](examples.md#troubleshoot-failed-cicd-jobs-with-root-cause-analysis) feature.
## `Error M5000`

View File

@ -232,7 +232,7 @@ The availability of this feature is controlled by a feature flag. For more infor
{{< /alert >}}
GitLab sends multiple [expiry emails](../group/settings/group_access_tokens.md#group-access-token-expiry-emails) and triggers a related [webhook](../project/integrations/webhook_events.md#project-and-group-access-token-events) before a group token expires. By default, GitLab only triggers these webhooks 7 days before the token expires. When this feature is enabled, GitLab also can trigger these webhooks 60 days and 30 days before the token expires.
GitLab sends multiple [expiry emails](settings/group_access_tokens.md#group-access-token-expiry-emails) and triggers a related [webhook](../project/integrations/webhook_events.md#project-and-group-access-token-events) before a group token expires. By default, GitLab only triggers these webhooks 7 days before the token expires. When this feature is enabled, GitLab also can trigger these webhooks 60 days and 30 days before the token expires.
To enable additional triggers for these webhooks:

View File

@ -110,7 +110,7 @@ An image layer is only counted once if:
- You share the image layer across different repositories.
Only layers that are referenced by tagged images are accounted for. Untagged images and any layers
referenced exclusively by them are subject to [online garbage collection](../container_registry/delete_container_registry_images.md#garbage-collection).
referenced exclusively by them are subject to [online garbage collection](delete_container_registry_images.md#garbage-collection).
Untagged image layers are automatically deleted after 24 hours if they remain unreferenced during that period.
Image layers are stored on the storage backend in the original (usually compressed) format. This
@ -132,7 +132,7 @@ in the namespace. Untagged image layers are not ignored. As a result,
the displayed usage size might not change significantly after deleting tags. Instead,
the size value only changes when:
- An automated [garbage collection process](../container_registry/delete_container_registry_images.md#garbage-collection)
- An automated [garbage collection process](delete_container_registry_images.md#garbage-collection)
runs and deletes untagged image layers. After a user deletes a tag, a garbage collection run
is scheduled to start 24 hours later. During that run, images that were previously tagged
are analyzed and their layers deleted if not referenced by any other tagged image.

View File

@ -112,12 +112,12 @@ The following items are changed when they are imported:
- User mapping by email address or username [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36885) in GitLab 13.4 [with a flag](../../../administration/feature_flags.md) named `bitbucket_server_user_mapping_by_username`. Disabled by default.
- Mapping user mentions to GitLab users [added](https://gitlab.com/gitlab-org/gitlab/-/issues/433008) in GitLab 16.8.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/153041) to map users only by email address in GitLab 17.1.
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](../import/_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Enabled on GitLab.com and GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176675) in GitLab 17.8.
{{< /history >}}
The Bitbucket Server importer uses an [improved method](../import/_index.md#user-contribution-and-membership-mapping)
The Bitbucket Server importer uses an [improved method](_index.md#user-contribution-and-membership-mapping)
of mapping user contributions for GitLab.com and GitLab Self-Managed.
### Old method of user contribution mapping
@ -125,7 +125,7 @@ of mapping user contributions for GitLab.com and GitLab Self-Managed.
You can use the old user contribution mapping method for imports to GitLab Self-Managed and GitLab Dedicated instances.
To use this method, `importer_user_mapping` and `bitbucket_server_user_mapping` must be disabled.
For imports to GitLab.com, you must
use the [improved method](../import/_index.md#user-contribution-and-membership-mapping) instead.
use the [improved method](_index.md#user-contribution-and-membership-mapping) instead.
Using the old method, the importer tries to match a Bitbucket Server user's email address with a confirmed email address in the GitLab user database. If no
such user is found:

View File

@ -103,12 +103,12 @@ You also can:
{{< history >}}
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](../import/_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Enabled on GitLab.com and GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176675) in GitLab 17.8.
{{< /history >}}
The Gitea importer uses an [improved method](../import/_index.md#user-contribution-and-membership-mapping)
The Gitea importer uses an [improved method](_index.md#user-contribution-and-membership-mapping)
of mapping user contributions for GitLab.com and GitLab Self-Managed.
### Old method of user contribution mapping
@ -116,6 +116,6 @@ of mapping user contributions for GitLab.com and GitLab Self-Managed.
You can use the old user contribution mapping method for imports to GitLab Self-Managed and GitLab Dedicated instances.
To use this method, `importer_user_mapping` and `gitea_user_mapping` must be disabled.
For imports to GitLab.com, you must
use the [improved method](../import/_index.md#user-contribution-and-membership-mapping) instead.
use the [improved method](_index.md#user-contribution-and-membership-mapping) instead.
Using the old method, user contributions are assigned to the project creator (usually the user who started the import process) by default.

View File

@ -79,7 +79,7 @@ on the GitLab instance you import to.
{{< /history >}}
Before using [the old method of user contribution mapping](#old-method-of-user-contribution-mapping) for imports to GitLab Self-Managed and GitLab
Dedicated, you must meet certain requirements. Imports to GitLab.com use an [improved method](../import/_index.md#user-contribution-and-membership-mapping)
Dedicated, you must meet certain requirements. Imports to GitLab.com use an [improved method](_index.md#user-contribution-and-membership-mapping)
that doesn't require preparation.
These requirements are:
@ -283,12 +283,12 @@ These backticks prevent linking to an incorrect user with the same username on t
{{< history >}}
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](../import/_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Changed on GitLab.com](https://gitlab.com/groups/gitlab-org/-/epics/14667) to [user contribution and membership mapping](_index.md#user-contribution-and-membership-mapping) in GitLab 17.8.
- [Enabled on GitLab.com and GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176675) in GitLab 17.8.
{{< /history >}}
The GitHub importer uses an [improved method](../import/_index.md#user-contribution-and-membership-mapping)
The GitHub importer uses an [improved method](_index.md#user-contribution-and-membership-mapping)
of mapping user contributions for GitLab.com and GitLab Self-Managed.
### Old method of user contribution mapping
@ -296,7 +296,7 @@ of mapping user contributions for GitLab.com and GitLab Self-Managed.
You can use the old user contribution mapping method for imports to GitLab Self-Managed and GitLab Dedicated instances.
To use this method, `importer_user_mapping` and `github_user_mapping` must be disabled.
For imports to GitLab.com, you must
use the [improved method](../import/_index.md#user-contribution-and-membership-mapping) instead.
use the [improved method](_index.md#user-contribution-and-membership-mapping) instead.
Using the old method, when [user accounts are provisioned correctly](#accounts-for-user-contribution-mapping), users are mapped during the import.

View File

@ -31,7 +31,7 @@ If you opt to migrate your Jira issues, you can choose from several migration op
GitLab has a built-in tool to import your Jira issue data. To use the GitLab Jira importer:
1. [Configure the GitLab Jira issues integration in your target project](../../../integration/jira/configure.md#configure-the-integration)
1. [Import your Jira project issues to GitLab](../import/jira.md)
1. [Import your Jira project issues to GitLab](jira.md)
Alternatively, you can watch a complete demo of the process: <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Import Jira project issues to GitLab](https://www.youtube.com/watch?v=OTJdJWmODFA)
<!-- Video published on 2023-07-27 -->

View File

@ -37,7 +37,7 @@ Indirect membership can be inherited, shared, or inherited shared.
| Direct | The user is added directly to the current group or project. |
| Inherited | The user is a member of a parent group that contains the current group or project. |
| [Shared](sharing_projects_groups.md) | The user is a member of a group invited to the current group or project. |
| [Inherited shared](../members/sharing_projects_groups.md#invite-a-group-to-a-group) | The user is a member of a group invited to an ancestor of the current group or project. |
| [Inherited shared](sharing_projects_groups.md#invite-a-group-to-a-group) | The user is a member of a group invited to an ancestor of the current group or project. |
| Indirect | An umbrella term for inherited, shared, or inherited shared members. |
```mermaid

View File

@ -212,10 +212,10 @@ Prerequisites:
{{< /history >}}
You can configure your Pages deployments to be automatically deleted after
a period of time has passed by specifying a duration at [`pages.expire_in`](../../../ci/yaml/_index.md#pagespagesexpire_in):
a period of time has passed by specifying a duration at [`pages.expire_in`](../../../ci/yaml/_index.md#pagesexpire_in):
```yaml
deploy-pages:
create-pages:
stage: deploy
script:
- ...

View File

@ -77,7 +77,7 @@ There are some certificate authorities that
offer free certificates, aiming to make the internet more secure
to everyone. The most popular is [Let's Encrypt](https://letsencrypt.org/),
which issues certificates trusted by most of browsers, it's open
source, and free to use. See [GitLab Pages integration with Let's Encrypt](../custom_domains_ssl_tls_certification/lets_encrypt_integration.md) to enable HTTPS on your custom domain.
source, and free to use. See [GitLab Pages integration with Let's Encrypt](lets_encrypt_integration.md) to enable HTTPS on your custom domain.
Similarly popular are [certificates issued by Cloudflare](https://www.cloudflare.com/application-services/products/ssl/),
which also offers a [free CDN service](https://blog.cloudflare.com/cloudflares-free-cdn-and-you/).

View File

@ -120,7 +120,7 @@ This setting tells the runner you want the job to deploy your website
with GitLab Pages:
```yaml
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -139,7 +139,7 @@ Jekyll uses a destination flag (`-d`) to specify an output directory for the bui
Add the destination to your `.gitlab-ci.yml` file:
```yaml
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -151,18 +151,18 @@ deploy-pages:
{{< history >}}
- Automatically appending `pages:pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
- Automatically appending `pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
{{< /history >}}
Now that Jekyll has output the files to the `public` directory, the runner needs to know where
to get them. In GitLab 17.10 and later, for Pages jobs only, the `public` directory is
appended automatically to [`artifacts:paths`](../../../../ci/yaml/_index.md#artifactspaths)
when the [`pages:pages.publish`](../../../../ci/yaml/_index.md#pagespagespublish) path
when the [`pages.publish`](../../../../ci/yaml/_index.md#pagespublish) path
is not explicitly specified:
```yaml
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -176,7 +176,7 @@ Your `.gitlab-ci.yml` file should now look like this:
default:
image: ruby:3.2
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -221,7 +221,7 @@ workflow:
rules:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -240,7 +240,7 @@ workflow:
rules:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
script:
- gem install bundler
- bundle install
@ -270,7 +270,7 @@ workflow:
rules:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
stage: deploy
script:
- gem install bundler
@ -293,7 +293,7 @@ workflow:
rules:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
stage: deploy
script:
- gem install bundler
@ -347,7 +347,7 @@ workflow:
rules:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
stage: deploy
script:
- bundle exec jekyll build -d public
@ -390,7 +390,7 @@ workflow:
- if: $CI_COMMIT_BRANCH
deploy-pages:
create-pages:
stage: deploy
script:
- bundle exec jekyll build -d public

View File

@ -92,10 +92,10 @@ You must host your GitLab Pages website in a project. This project can be
[private, internal, or public](../../public_access.md) and belong
to a [group](../../group/_index.md) or [subgroup](../../group/subgroups/_index.md).
For [group websites](../pages/getting_started_part_one.md#user-and-group-website-examples),
For [group websites](getting_started_part_one.md#user-and-group-website-examples),
the group must be at the top level and not a subgroup.
For [project websites](../pages/getting_started_part_one.md#project-website-examples),
For [project websites](getting_started_part_one.md#project-website-examples),
you can create your project first and access it under `http(s)://namespace.example.io/project-path`.
## Specific configuration options for Pages
@ -119,7 +119,7 @@ directory of the project to the `public/` directory. The `.public` workaround
is so `cp` doesn't also copy `public/` to itself in an infinite loop:
```yaml
deploy-pages:
create-pages:
script:
- mkdir .public
- cp -r * .public
@ -160,7 +160,7 @@ Below is a copy of `.gitlab-ci.yml` where the most significant line is the last
one, specifying to execute everything in the `pages` branch:
```yaml
deploy-pages:
create-pages:
image: ruby:2.6
script:
- gem install jekyll
@ -209,7 +209,7 @@ This can be achieved by including a `script:` command like this in your
`.gitlab-ci.yml` pages job:
```yaml
deploy-pages:
create-pages:
# Other directives
script:
# Build the public/ directory first
@ -274,37 +274,49 @@ for both the `/data` and `/data/` URL paths.
- [Enabled on GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab-pages/-/merge_requests/890) in GitLab 16.2.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
- [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `publish` property under the `pages` keyword in GitLab 17.9.
- [Appended](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `pages.publish` path automatically to `artifacts:paths` in GitLab 17.10.
{{< /history >}}
By default, the [artifact](../../../ci/jobs/job_artifacts.md) folder
that contains the static files of your site needs to have the name `public`.
By default, Pages looks for a folder named `public` in your build files to publish it.
To change that folder name to any other value, add a `pages.publish` property to your
`deploy-pages` job configuration in `.gitlab-ci.yml`. The top-level `publish` keyword
is deprecated as of GitLab 17.9 and must now be nested under the `pages` keyword.
`deploy-pages` job configuration in `.gitlab-ci.yml`.
The following example publishes a folder named `dist` instead:
```yaml
deploy-pages:
create-pages:
script:
- npm run build
pages: # specifies that this is a Pages job
publish: dist
```
The previous YAML example uses [user-defined job names](_index.md#user-defined-job-names).
To use variables in the `pages.publish` field, see [`pages.publish`](../../../ci/yaml/_index.md#pagespublish).
Pages uses artifacts to store the files of your site, so the value from
`pages.publish` is automatically appended to [`artifacts:paths`](../../../ci/yaml/_index.md#artifactspaths).
The above example is equivalent to:
```yaml
create-pages:
script:
- npm run build
pages:
publish: dist
artifacts:
paths:
- dist
```
If you're using a folder name other than `public`, you must specify
the directory to be deployed with Pages both as an artifact, and under the
`pages.publish` property. The reason you need both is that you can define multiple paths
as artifacts, and GitLab doesn't know which one you want to deploy.
{{< alert type="warning" >}}
The previous YAML example uses [user-defined job names](_index.md#user-defined-job-names).
The top-level `publish` keyword was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/519499) in GitLab 17.9 and must now be nested under the `pages` keyword.
To use variables in the `pages.publish` field, see [`pages:pages.publish`](../../../ci/yaml/_index.md#pagespagespublish).
{{< /alert >}}
## Regenerate unique domain for GitLab Pages
@ -367,7 +379,7 @@ Safari requires the web server to support the [Range request header](https://dev
HTTP Range requests, you should use the following two variables in your `.gitlab-ci.yml` file:
```yaml
deploy-pages:
create-pages:
stage: deploy
variables:
FF_USE_FASTZIP: "true"

View File

@ -21,7 +21,7 @@ title: GitLab Pages parallel deployments
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/507423) to allow periods in `path_prefix` in GitLab 17.8.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/487161) in GitLab 17.9. Feature flag `pages_multiple_versions_setting` removed.
- Automatically appending `pages:pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
- Automatically appending `pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
{{< /history >}}
@ -164,7 +164,7 @@ Mixing [CI/CD variables](../../../ci/variables/_index.md) with other strings can
possibility. For example:
```yaml
deploy-pages:
create-pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}"
@ -197,7 +197,7 @@ You can use parallel GitLab Pages deployments to create a new [environment](../.
For example:
```yaml
deploy-pages:
create-pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}"

View File

@ -125,7 +125,7 @@ module.exports = nextConfig
An example `.gitlab-ci.yml` can be as minimal as:
```yaml
deploy-pages:
create-pages:
before_script:
- npm install
script:

View File

@ -62,7 +62,7 @@ To auto-format this table, use the VS Code Markdown Table formatter: `https://do
| Command | Issue | Merge request | Epic | Action |
|:------------------------------------------------------------------------------------------------|:-----------------------|:-----------------------|:-----------------------|:-------|
| `/add_child <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Add `<item>` as a child item. The `<item>` value should be in the format of `#item`, `group/project#item`, or a URL to the item. For issues, you can add tasks and OKRs. [The new look for issues](../project/issues/issue_work_items.md) must be enabled. For epics, you can add issues, tasks, and OKRs. Multiple work items can be added as child items at the same time. [The new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/add_child <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Add `<item>` as a child item. The `<item>` value should be in the format of `#item`, `group/project#item`, or a URL to the item. For issues, you can add tasks and OKRs. [The new look for issues](issues/issue_work_items.md) must be enabled. For epics, you can add issues, tasks, and OKRs. Multiple work items can be added as child items at the same time. [The new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/add_contacts [contact:email1@example.com] [contact:email2@example.com]` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Add one or more active [CRM contacts](../crm/_index.md). |
| `/add_email email1 email2` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Add up to six [email participants](service_desk/external_participants.md). This action is behind the feature flag `issue_email_participants`. Not supported in [issue templates](description_templates.md). |
| `/approve` | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Approve the merge request. |
@ -101,7 +101,7 @@ To auto-format this table, use the VS Code Markdown Table formatter: `https://do
| `/page <policy name>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Start escalations for the incident. |
| `/parent_epic <epic>` | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Set parent epic to `<epic>`. The `<epic>` value should be in the format of `&epic`, `group&epic`, or a URL to an epic. If [the new look for epics](../group/epics/epic_work_items.md) is enabled, use `/set_parent` instead. |
| `/promote_to_incident` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Promote issue to incident. In [GitLab 15.8 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/376760), you can also use the quick action when creating a new issue. |
| `/promote` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Promote issue to epic. If [the new look for issues](../project/issues/issue_work_items.md) is enabled, use `/promote_to epic` instead. |
| `/promote` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Promote issue to epic. If [the new look for issues](issues/issue_work_items.md) is enabled, use `/promote_to epic` instead. |
| `/publish` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Publish issue to an associated [Status Page](../../operations/incident_management/status_page.md). |
| `/react :emoji:` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Toggle an emoji reaction. [Renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/409884) from `/award` in GitLab 16.7. `/award` is still available as an aliased command. |
| `/ready` | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Set the [ready status](merge_requests/drafts.md#mark-merge-requests-as-ready) ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90361) in GitLab 15.1). |
@ -110,7 +110,7 @@ To auto-format this table, use the VS Code Markdown Table formatter: `https://do
| `/rebase` | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Rebase source branch on the latest commit of the target branch. For help, see [troubleshooting information](../../topics/git/troubleshooting_git.md). |
| `/relabel ~label1 ~label2` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Replace current labels with those specified. |
| `/relate <item1> <item2>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Mark items as related. The `<item>` value should be in the format of `#item`, `group/project#item`, or the full URL. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_child <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove `<item>` as child. The `<item>` value should be in the format of `#item`, `group/project#item`, or a URL to the item. For issues, [the new look for issues](../project/issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_child <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove `<item>` as child. The `<item>` value should be in the format of `#item`, `group/project#item`, or a URL to the item. For issues, [the new look for issues](issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_child_epic <epic>` | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove child epic from `<epic>`. The `<epic>` value should be in the format of `&epic`, `group&epic`, or a URL to an epic. If [the new look for epics](../group/epics/epic_work_items.md) is enabled, use `/remove_child` instead. |
| `/remove_contacts [contact:email1@example.com] [contact:email2@example.com]` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Remove one or more [CRM contacts](../crm/_index.md) |
| `/remove_due_date` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Remove due date. |
@ -119,14 +119,14 @@ To auto-format this table, use the VS Code Markdown Table formatter: `https://do
| `/remove_estimate` or `/remove_time_estimate` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Remove time estimate. Alias `/remove_time_estimate` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/16501) in GitLab 15.6. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_iteration` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Remove iteration. |
| `/remove_milestone` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Remove milestone. |
| `/remove_parent` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove the parent from item. For issues, [the new look for issues](../project/issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_parent` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove the parent from item. For issues, [the new look for issues](issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_parent_epic` | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Remove parent epic from epic. If [the new look for epics](../group/epics/epic_work_items.md) is enabled, use `/remove_parent` instead. |
| `/remove_time_spent` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes |{{< icon name="check-circle" >}} Yes | Remove time spent. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/remove_zoom` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Remove Zoom meeting from this issue. |
| `/reopen` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Reopen. |
| `/request_review @user1 @user2` | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Assigns or requests a new review from one or more users. |
| `/request_review me` | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | Assigns or requests a new review from one or more users. |
| `/set_parent <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Set parent item. The `<item>` value should be in the format of `#IID`, reference, or a URL to an item. For issues, [the new look for issues](../project/issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/set_parent <item>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | Set parent item. The `<item>` value should be in the format of `#IID`, reference, or a URL to an item. For issues, [the new look for issues](issues/issue_work_items.md) must be enabled. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |
| `/severity <severity>` | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="dotted-circle" >}} No | Set the severity. Issue type must be `Incident`. Options for `<severity>` are `S1` ... `S4`, `critical`, `high`, `medium`, `low`, `unknown`. |
| `/shrug` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | Add `¯\_(ツ)_/¯`. |
| `/spend <time> [<date>]` or `/spend_time <time> [<date>]` | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes| Add or subtract spent time. Optionally, specify the date that time was spent on. For example, `/spend 1mo 2w 3d 4h 5m 2018-08-26` or `/spend -1h 30m`. For more information, see [Time tracking](time_tracking.md). Alias `/spend_time` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/16501) in GitLab 15.6. For epics, [the new look for epics](../group/epics/epic_work_items.md) must be enabled. |

View File

@ -82,7 +82,7 @@ You can access the latest release URL with the following permanent link:
https://gitlab.com/gitlab-org/gitlab-runner/-/releases/permalink/latest#release
```
To learn about adding permanent links to release assets, see [Permanent links to latest release assets](../releases/release_fields.md#permanent-links-to-latest-release-assets).
To learn about adding permanent links to release assets, see [Permanent links to latest release assets](release_fields.md#permanent-links-to-latest-release-assets).
#### Sorting preferences

View File

@ -178,7 +178,7 @@ you can sign individual commits manually, or configure Git to default to signed
```
1. Enter the passphrase of your GPG key when asked.
1. Push to GitLab and check that your commits [are verified](../signed_commits/_index.md#verify-commits).
1. Push to GitLab and check that your commits [are verified](_index.md#verify-commits).
- Sign all Git commits by default by running this command:
```shell

View File

@ -101,7 +101,7 @@ To sign a commit:
## Verify commits
You can verify all types of signed commits
[in the GitLab UI](../signed_commits/_index.md#verify-commits). Commits signed
[in the GitLab UI](_index.md#verify-commits). Commits signed
with an SSH key can also be verified locally.
### Verify commits locally
@ -168,6 +168,6 @@ Removing your SSH key can impact any commits signed with the key:
## Related topics
- [Sign commits and tags with X.509 certificates](../signed_commits/x509.md)
- [Sign commits and tags with X.509 certificates](x509.md)
- [Sign commits with GPG](gpg.md)
- [Commits API](../../../../api/commits.md)

View File

@ -186,7 +186,7 @@ can start signing your tags:
## Troubleshooting
For committers without administrator access, review the list of
[verification problems with signed commits](../signed_commits/_index.md#fix-verification-problems-with-signed-commits)
[verification problems with signed commits](_index.md#fix-verification-problems-with-signed-commits)
for possible fixes. The other troubleshooting suggestions on this page require
administrator access.

View File

@ -90,7 +90,7 @@ to move any project to any namespace.
When you transfer a project from a namespace licensed for GitLab.com Premium or Ultimate to GitLab Free:
- [Project access tokens](../settings/project_access_tokens.md) are revoked.
- [Project access tokens](project_access_tokens.md) are revoked.
- [Pipeline subscriptions](../../../ci/pipelines/_index.md#trigger-a-pipeline-when-an-upstream-project-is-rebuilt-deprecated)
and [test cases](../../../ci/test_cases/_index.md) are deleted.

View File

@ -342,7 +342,7 @@ The container runtime used by the Kubernetes cluster must ensure all containers
If you have a container image that does not support arbitrary user IDs,
you cannot create, update, or delete files in a workspace.
To create a container image that supports arbitrary user IDs,
see [Create a custom workspace image that supports arbitrary user IDs](../workspace/create_image.md).
see [Create a custom workspace image that supports arbitrary user IDs](create_image.md).
For more information, see the
[OpenShift documentation](https://docs.openshift.com/container-platform/4.12/openshift_images/create-images.html#use-uid_create-images).

View File

@ -11,7 +11,7 @@ module API
feature_category :source_code_management
allow_access_with_scope :read_repository, if: ->(request) { request.get? || request.head? }
allow_access_with_scope [:read_repository, :ai_workflows], if: ->(request) { request.get? || request.head? }
helpers ::API::Helpers::HeadersHelpers

View File

@ -73,7 +73,7 @@ module API
def revoke_token(token)
service = ::PersonalAccessTokens::RevokeService.new(current_user, token: token).execute
service.success? ? no_content! : bad_request!(nil)
service.success? ? no_content! : bad_request!(service.message)
end
def rotate_token(token, params)

View File

@ -54004,6 +54004,9 @@ msgstr ""
msgid "SecurityOrchestration|more than %{allowed}"
msgstr ""
msgid "SecurityOrchestration|on every"
msgstr ""
msgid "SecurityOrchestration|projects with compliance frameworks"
msgstr ""

View File

@ -50,7 +50,6 @@ ee/spec/frontend/ml/ai_agents/views/edit_agent_spec.js
ee/spec/frontend/oncall_schedule/schedule/components/preset_days/days_header_sub_item_spec.js
ee/spec/frontend/pages/admin/application_settings/general/components/allowed_integrations_spec.js
ee/spec/frontend/password/components/password_requirement_list_spec.js
ee/spec/frontend/product_analytics/onboarding/components/onboarding_state_spec.js
ee/spec/frontend/product_analytics/onboarding/onboarding_view_spec.js
ee/spec/frontend/projects/merge_requests/blocking_mr_input_root_spec.js
ee/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
@ -66,7 +65,6 @@ ee/spec/frontend/roles_and_permissions/components/role_selector_spec.js
ee/spec/frontend/security_configuration/components/app_spec.js
ee/spec/frontend/security_configuration/components/dynamic_fields_spec.js
ee/spec/frontend/security_configuration/dast_profiles/components/dast_profiles_list_spec.js
ee/spec/frontend/security_dashboard/components/shared/filters/querystring_sync_spec.js
ee/spec/frontend/security_dashboard/components/shared/vulnerability_details_graphql/details_section_spec.js
ee/spec/frontend/security_dashboard/components/shared/vulnerability_report/vulnerability_list_graphql_spec.js
ee/spec/frontend/security_dashboard/components/shared/vulnerability_report/vulnerability_report_spec.js

View File

@ -204,62 +204,82 @@ module Trigger
# * registry is checked for image existence and appropriate jobs are added to skip regex pattern
#
class CNG < Base
TriggerRefBranchCreationFailed = Class.new(StandardError)
ASSETS_HASH = "cached-assets-hash.txt"
DEFAULT_DEBIAN_IMAGE = "debian:bookworm-slim"
DEFAULT_ALPINE_IMAGE = "alpine:3.20"
DEFAULT_SKIPPED_JOBS = %w[final-images-listing].freeze
DEFAULT_SKIPPED_JOB_REGEX = "/#{DEFAULT_SKIPPED_JOBS.join('|')}/".freeze
STABLE_BASE_JOBS = %w[alpine-stable debian-stable].freeze
def variables
hash = super.dup
# Delete variables that aren't useful when using native triggers.
hash.delete('TRIGGER_SOURCE')
hash.delete('TRIGGERED_USER')
hash = without_trigger_vars(super.dup)
unless skip_redundant_jobs?
logger.info("Skipping redundant jobs is disabled, skipping existing container image check")
return hash
end
begin
hash.merge({
**deploy_component_tag_variables,
'SKIP_JOB_REGEX' => skip_job_regex,
'DEBIAN_IMAGE' => debian_image,
'DEBIAN_DIGEST' => debian_image.split('@').last,
'DEBIAN_BUILD_ARGS' => "--build-arg DEBIAN_IMAGE=#{ENV['GITLAB_DEPENDENCY_PROXY']}#{debian_image}",
'ALPINE_IMAGE' => alpine_image,
'ALPINE_DIGEST' => alpine_image.split('@').last,
'ALPINE_BUILD_ARGS' => "--build-arg ALPINE_IMAGE=#{ENV['GITLAB_DEPENDENCY_PROXY']}#{alpine_image}"
})
rescue StandardError => e
logger.error("Error while calculating variables, err: #{e.message}")
logger.error(e.backtrace.join("\n"))
logger.error("Falling back to default variables")
hash
end
hash.merge({
**deploy_component_tag_variables,
'SKIP_IMAGE_TAGGING' => "true",
'SKIP_JOB_REGEX' => skip_job_regex,
'DEBIAN_IMAGE' => debian_image,
'DEBIAN_DIGEST' => debian_image.split('@').last,
'DEBIAN_BUILD_ARGS' => "--build-arg DEBIAN_IMAGE=#{ENV['GITLAB_DEPENDENCY_PROXY']}#{debian_image}",
'ALPINE_IMAGE' => alpine_image,
'ALPINE_DIGEST' => alpine_image.split('@').last,
'ALPINE_BUILD_ARGS' => "--build-arg ALPINE_IMAGE=#{ENV['GITLAB_DEPENDENCY_PROXY']}#{alpine_image}"
})
rescue TriggerRefBranchCreationFailed => e
# raise if pipeline runs in MR that updates ref to make sure branch for trigger is created
raise(e) if ref_update_mr?
logger.error("Error while creating trigger ref branch, err: #{e.message}")
logger.error(e.backtrace.join("\n"))
logger.error("Falling back to default variables")
without_trigger_vars(super.dup)
rescue StandardError => e
# if skipping redundant jobs is enabled and fetching jobs to skip failed, attempt fallback to default variables
raise(e) unless skip_redundant_jobs?
logger.error("Error while calculating variables, err: #{e.message}")
logger.error(e.backtrace.join("\n"))
logger.error("Falling back to default variables")
hash
end
def simple_forwarded_variables
super.merge({
'TOP_UPSTREAM_SOURCE_REF_SLUG' => ENV['CI_COMMIT_REF_SLUG']
})
end
private
def logger
@logger ||= Logger.new(ENV["CNG_VAR_SETUP_LOG_FILE"] || "tmp/cng-var-setup.log")
end
# overridden base class methods
def downstream_project_path
ENV.fetch('CNG_PROJECT_PATH', 'gitlab-org/build/CNG-mirror')
end
def skip_redundant_jobs?
ENV["CNG_SKIP_REDUNDANT_JOBS"] == "true"
end
def ref
return @ref if @ref
return @ref = super if cng_commit_sha.to_s.empty?
def default_skip_job_regex
"/#{DEFAULT_SKIPPED_JOBS.join('|')}/"
end
# TODO: remove this hack once https://gitlab.com/gitlab-org/gitlab/-/issues/369583 is resolved
trigger_branch_name = "trigger-refs/#{cng_commit_sha}"
return @ref = trigger_branch_name if branch_exists?(trigger_branch_name)
def skip_job_regex
"/#{[*DEFAULT_SKIPPED_JOBS, *STABLE_BASE_JOBS, *skippable_jobs].join('|')}/"
downstream_client.create_branch(downstream_project_path, trigger_branch_name, cng_commit_sha)
logger.info("Created temp trigger branch '#{trigger_branch_name}' for commit '#{cng_commit_sha}'")
@ref = trigger_branch_name
rescue StandardError => e
# redundancy in case explicit branch existence api request failed
return trigger_branch_name if e.message.include?("already exists")
@ref = super
raise TriggerRefBranchCreationFailed, e.message
end
def ref_param_name
@ -292,14 +312,6 @@ module Trigger
super.merge('GITLAB_REF_SLUG' => gitlab_ref_slug)
end
def default_build_vars
@default_build_vars ||= {
"CONTAINER_VERSION_SUFFIX" => ENV["CI_PROJECT_PATH_SLUG"] || "upstream-trigger",
"CACHE_BUSTER" => "false",
"ARCH_LIST" => ENV["ARCH_LIST"] || "amd64"
}
end
def extra_variables
{
"TRIGGER_BRANCH" => ref,
@ -309,19 +321,13 @@ module Trigger
"CE_PIPELINE" => Trigger.ee? ? nil : "true", # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"EE_PIPELINE" => Trigger.ee? ? "true" : nil, # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"FULL_RUBY_VERSION" => RUBY_VERSION,
"SKIP_JOB_REGEX" => default_skip_job_regex,
"SKIP_JOB_REGEX" => DEFAULT_SKIPPED_JOB_REGEX,
"DEBIAN_IMAGE" => DEFAULT_DEBIAN_IMAGE, # Make sure default values are always set to not end up as empty string
"ALPINE_IMAGE" => DEFAULT_ALPINE_IMAGE, # Make sure default values are always set to not end up as empty string
**default_build_vars
}
end
def simple_forwarded_variables
super.merge({
'TOP_UPSTREAM_SOURCE_REF_SLUG' => ENV['CI_COMMIT_REF_SLUG']
})
end
def version_param_value(_version_file)
raw_version = super
@ -333,6 +339,71 @@ module Trigger
end
end
def access_token
ENV["CNG_ACCESS_TOKEN"].then { |token| token.to_s.empty? ? super : token }
end
# overridden base class methods
# Logger with file output
#
# @return [Logger]
def logger
@logger ||= Logger.new(ENV.fetch("CNG_VAR_SETUP_LOG_FILE", "tmp/cng-var-setup.log"))
end
# Specific commit sha to be used instead of branch if defined
#
# @return [String]
def cng_commit_sha
@cng_commit_sha ||= ENV['CNG_COMMIT_SHA']
end
# Default variables used in CNG builds that affect container version values
#
# @return [Hash]
def default_build_vars
@default_build_vars ||= {
"CONTAINER_VERSION_SUFFIX" => ENV.fetch("CI_PROJECT_PATH_SLUG", "upstream-trigger"),
"CACHE_BUSTER" => "false",
"ARCH_LIST" => ENV.fetch("ARCH_LIST", "amd64")
}
end
# Skip redundant build jobs by calculating if container images are already present in the registry
#
# @return [Boolean]
def skip_redundant_jobs?
ENV["CNG_SKIP_REDUNDANT_JOBS"] == "true"
end
# Pipeline is part of MR that updates cng-mirror ref
#
# @return [Boolean]
def ref_update_mr?
ENV["CI_MERGE_REQUEST_TARGET_BRANCH_NAME"]&.match?(%r{renovate-e2e/cng\S+digest})
end
# Skipped job regex based on existing container tags in the registry
#
# @return [String]
def skip_job_regex
"/#{[*DEFAULT_SKIPPED_JOBS, *STABLE_BASE_JOBS, *skippable_jobs].join('|')}/"
end
# Branch existence check
#
# @param branch_name [String]
# @return [Boolean]
def branch_exists?(branch_name)
!!downstream_client.branch(downstream_project_path, branch_name)
rescue Gitlab::Error::ResponseError
false
end
def without_trigger_vars(hash)
hash.except('TRIGGER_SOURCE', 'TRIGGERED_USER')
end
# Repository file tree in form of the output of `git ls-tree` command
#
# @return [String]

View File

@ -1,30 +0,0 @@
# frozen_string_literal: true
FactoryBot.define do
factory :virtual_registries_packages_maven_cache_entry,
class: 'VirtualRegistries::Packages::Maven::Cache::Entry' do
upstream { association :virtual_registries_packages_maven_upstream }
group { upstream.group }
sequence(:relative_path) { |n| "/a/relative/path/test-#{n}.txt" }
size { 1.kilobyte }
upstream_etag { OpenSSL::Digest.hexdigest('SHA256', 'test') }
content_type { 'text/plain' }
file_md5 { 'd8e8fca2dc0f896fd7cb4cb0031ba249' }
file_sha1 { '4e1243bd22c66e76c2ba9eddc1f91394e57f9f83' }
status { :default }
transient do
file_fixture { 'spec/fixtures/bfg_object_map.txt' }
end
after(:build) do |entry, evaluator|
entry.upstream.registry_upstream.group = entry.group
entry.file = fixture_file_upload(evaluator.file_fixture)
end
trait :upstream_checked do
upstream_checked_at { 30.minutes.ago }
upstream_etag { 'test' }
end
end
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
FactoryBot.define do
factory :virtual_registries_packages_maven_registry, class: 'VirtualRegistries::Packages::Maven::Registry' do
group
trait :with_upstream do
upstream { association(:virtual_registries_packages_maven_upstream, group: group) }
end
end
end

View File

@ -1,17 +0,0 @@
# frozen_string_literal: true
FactoryBot.define do
factory :virtual_registries_packages_maven_registry_upstream,
class: 'VirtualRegistries::Packages::Maven::RegistryUpstream' do
group { registry.group }
registry { association(:virtual_registries_packages_maven_registry) }
upstream do
association(
:virtual_registries_packages_maven_upstream,
group: registry.group,
registry: registry,
registry_upstream: nil
)
end
end
end

View File

@ -1,16 +0,0 @@
# frozen_string_literal: true
FactoryBot.define do
factory :virtual_registries_packages_maven_upstream, class: 'VirtualRegistries::Packages::Maven::Upstream' do
sequence(:url) { |n| "http://local.test/maven/#{n}" }
username { 'user' }
password { 'password' }
registry { association(:virtual_registries_packages_maven_registry) }
group { registry.group }
cache_validity_hours { 24 }
after(:build) do |entry, _|
entry.registry_upstream.group = entry.group if entry.registry_upstream
end
end
end

View File

@ -21,6 +21,7 @@ describe('Merge requests query component', () => {
function createComponent(
props = { query: 'reviewRequestedMergeRequests', variables: { state: 'opened' } },
mergeRequests = [createMockMergeRequest({ title: 'reviewer' })],
) {
reviewerQueryMock = jest.fn().mockResolvedValue({
data: {
@ -34,7 +35,7 @@ describe('Merge requests query component', () => {
startCursor: null,
endCursor: null,
},
nodes: [createMockMergeRequest({ title: 'reviewer' })],
nodes: mergeRequests,
},
},
},
@ -147,7 +148,10 @@ describe('Merge requests query component', () => {
describe('when refetching', () => {
it('refetches merge requests with eventHub emit event and query type matches', async () => {
createComponent();
createComponent(
{ query: 'reviewRequestedMergeRequests', variables: { state: 'opened' } },
[],
);
await waitForPromises();
@ -156,6 +160,7 @@ describe('Merge requests query component', () => {
await waitForPromises();
expect(reviewerQueryMock.mock.calls).toHaveLength(2);
expect(reviewerQueryMock.mock.calls[1][0]).toEqual(expect.objectContaining({ perPage: 20 }));
});
it('does not refetch merge requests with eventHub emit event and query type does not matches', async () => {

View File

@ -11,7 +11,6 @@ RSpec.describe 'cross-database foreign keys' do
# should be added as a comment along with the name of the column.
let!(:allowed_cross_database_foreign_keys) do
[
'geo_node_namespace_links.namespace_id',
'zoekt_indices.zoekt_enabled_namespace_id',
'zoekt_repositories.project_id',
'zoekt_replicas.zoekt_enabled_namespace_id',

View File

@ -257,6 +257,20 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
describe '.trigger_pipeline_status_change_subscription' do
let(:pipeline) { build(:ci_pipeline, user: user) }
%w[run! succeed! drop! skip! cancel! block! delay!].each do |action|
context "when pipeline receives #{action} event" do
it 'triggers GraphQL subscription ciPipelineStatusUpdated' do
expect(GraphqlTriggers).to receive(:ci_pipeline_status_updated).with(pipeline)
pipeline.public_send(action)
end
end
end
end
describe 'unlocking pipelines based on state transition' do
let(:ci_ref) { create(:ci_ref) }
let(:unlock_previous_pipelines_worker_spy) { class_spy(::Ci::Refs::UnlockPreviousPipelinesWorker) }
@ -2250,18 +2264,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
describe 'pipeline status update subscription trigger' do
%w[run! succeed! drop! skip! cancel! block! delay!].each do |action|
context "when pipeline receives #{action} event" do
it 'triggers GraphQL subscription ciPipelineStatusUpdated' do
expect(GraphqlTriggers).to receive(:ci_pipeline_status_updated).with(pipeline)
pipeline.public_send(action)
end
end
end
end
def create_build(name, *traits, queued_at: current, started_from: 0, **opts)
create(
:ci_build, *traits,

View File

@ -97,14 +97,13 @@ RSpec.describe NamespaceStatistics, type: :model, feature_category: :consumables
let_it_be(:statistics, reload: true) { create(:namespace_statistics, namespace: group) }
let_it_be(:dependency_proxy_manifest) { create(:dependency_proxy_manifest, group: group, size: 50) }
let_it_be(:dependency_proxy_blob) { create(:dependency_proxy_blob, group: group, size: 50) }
let_it_be(:vreg_maven_cache_entry) { create(:virtual_registries_packages_maven_cache_entry, group: group, size: 50) }
subject(:update_dependency_proxy_size) { statistics.update_dependency_proxy_size }
it 'updates the dependency proxy size' do
update_dependency_proxy_size
expect(statistics.dependency_proxy_size).to eq 150
expect(statistics.dependency_proxy_size).to eq 100
end
context 'when namespace does not belong to a group' do

View File

@ -1,332 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entry, type: :model, feature_category: :virtual_registry do
subject(:cache_entry) { build(:virtual_registries_packages_maven_cache_entry) }
it { is_expected.to include_module(FileStoreMounter) }
it { is_expected.to include_module(::UpdateNamespaceStatistics) }
it_behaves_like 'updates namespace statistics' do
let(:statistic_source) { cache_entry }
let(:non_statistic_attribute) { :relative_path }
end
describe 'validations' do
%i[group file file_sha1 relative_path size].each do |attr|
it { is_expected.to validate_presence_of(attr) }
end
%i[upstream_etag content_type].each do |attr|
it { is_expected.to validate_length_of(attr).is_at_most(255) }
end
%i[relative_path object_storage_key].each do |attr|
it { is_expected.to validate_length_of(attr).is_at_most(1024) }
end
it { is_expected.to validate_length_of(:file_md5).is_equal_to(32).allow_nil }
it { is_expected.to validate_length_of(:file_sha1).is_equal_to(40) }
context 'with persisted cached response' do
before do
cache_entry.save!
end
it { is_expected.to validate_uniqueness_of(:relative_path).scoped_to(:upstream_id, :status) }
it { is_expected.to validate_uniqueness_of(:object_storage_key).scoped_to(:relative_path) }
context 'with a similar cached response in a different status' do
let!(:cache_entry_in_error) do
create(
:virtual_registries_packages_maven_cache_entry,
:error,
group_id: cache_entry.group_id,
upstream_id: cache_entry.upstream_id,
relative_path: cache_entry.relative_path
)
end
let(:new_cache_entry) do
build(
:virtual_registries_packages_maven_cache_entry,
:error,
group_id: cache_entry.group_id,
upstream_id: cache_entry.upstream_id,
relative_path: cache_entry.relative_path
)
end
it 'does not validate uniqueness of relative_path' do
new_cache_entry.validate
expect(new_cache_entry.errors.messages_for(:relative_path)).not_to include 'has already been taken'
end
end
end
end
describe 'associations' do
it 'belongs to an upstream' do
is_expected.to belong_to(:upstream)
.class_name('VirtualRegistries::Packages::Maven::Upstream')
.required
.inverse_of(:cache_entries)
end
end
describe 'scopes' do
describe '.for_group' do
let_it_be(:cache_entry1) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:cache_entry2) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:cache_entry3) { create(:virtual_registries_packages_maven_cache_entry) }
let(:groups) { [cache_entry1.group, cache_entry2.group] }
subject { described_class.for_group(groups) }
it { is_expected.to match_array([cache_entry1, cache_entry2]) }
end
end
describe '.next_pending_destruction' do
subject { described_class.next_pending_destruction }
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:pending_destruction_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
it { is_expected.to eq(pending_destruction_cache_entry) }
end
describe 'object storage key' do
it 'can not be null' do
cache_entry.object_storage_key = nil
cache_entry.relative_path = nil
cache_entry.upstream = nil
expect(cache_entry).to be_invalid
expect(cache_entry.errors.full_messages).to include("Object storage key can't be blank")
end
it 'can not be too large' do
cache_entry.object_storage_key = 'a' * 1025
cache_entry.relative_path = nil
expect(cache_entry).to be_invalid
expect(cache_entry.errors.full_messages)
.to include('Object storage key is too long (maximum is 1024 characters)')
end
it 'is set before saving' do
expect { cache_entry.save! }
.to change { cache_entry.object_storage_key }.from(nil).to(an_instance_of(String))
end
context 'with a persisted cached response' do
let(:key) { cache_entry.object_storage_key }
before do
cache_entry.save!
end
it 'does not change after an update' do
expect(key).to be_present
cache_entry.update!(
file: CarrierWaveStringFile.new('test'),
size: 2.kilobytes
)
expect(cache_entry.object_storage_key).to eq(key)
end
it 'is read only' do
expect(key).to be_present
cache_entry.object_storage_key = 'new-key'
cache_entry.save!
expect(cache_entry.reload.object_storage_key).to eq(key)
end
end
end
describe '.search_by_relative_path' do
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:other_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, relative_path: 'other/path')
end
subject { described_class.search_by_relative_path(relative_path) }
context 'with a matching relative path' do
let(:relative_path) { cache_entry.relative_path.slice(3, 8) }
it { is_expected.to contain_exactly(cache_entry) }
end
end
describe '.create_or_update_by!' do
let_it_be(:upstream) { create(:virtual_registries_packages_maven_upstream) }
let(:size) { 10.bytes }
subject(:create_or_update) do
with_threads do
Tempfile.create('test.txt') do |file|
file.write('test')
described_class.create_or_update_by!(
upstream: upstream,
group_id: upstream.group_id,
relative_path: '/test',
updates: { file: file, size: size, file_sha1: '4e1243bd22c66e76c2ba9eddc1f91394e57f9f95' }
)
end
end
end
it 'creates or update the existing record' do
expect { create_or_update }.to change { described_class.count }.by(1)
end
context 'with invalid updates' do
let(:size) { nil }
it 'bubbles up the error' do
expect { create_or_update }.to not_change { described_class.count }
.and raise_error(ActiveRecord::RecordInvalid)
end
end
end
describe '#filename' do
let(:cache_entry) { build(:virtual_registries_packages_maven_cache_entry) }
subject { cache_entry.filename }
it { is_expected.to eq(File.basename(cache_entry.relative_path)) }
context 'when relative_path is nil' do
before do
cache_entry.relative_path = nil
end
it { is_expected.to be_nil }
end
end
describe '#stale?' do
let(:cache_entry) do
build(:virtual_registries_packages_maven_cache_entry, upstream_checked_at: 10.hours.ago)
end
let(:threshold) do
cache_entry.upstream_checked_at + cache_entry.upstream.cache_validity_hours.hours
end
subject { cache_entry.stale? }
context 'when before the threshold' do
before do
allow(Time.zone).to receive(:now).and_return(threshold - 1.hour)
end
it { is_expected.to be(false) }
end
context 'when on the threshold' do
before do
allow(Time.zone).to receive(:now).and_return(threshold)
end
it { is_expected.to be(false) }
end
context 'when after the threshold' do
before do
allow(Time.zone).to receive(:now).and_return(threshold + 1.hour)
end
it { is_expected.to be(true) }
end
context 'with no upstream' do
before do
cache_entry.upstream = nil
end
it { is_expected.to be(true) }
end
context 'with 0 cache validity hours' do
before do
cache_entry.upstream.cache_validity_hours = 0
end
it { is_expected.to be(false) }
end
end
describe '#mark_as_pending_destruction' do
let_it_be_with_refind(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry, :default) }
subject(:execute) { cache_entry.mark_as_pending_destruction }
shared_examples 'updating the status and relative_path properly' do
it 'updates the status and relative_path' do
previous_path = cache_entry.relative_path
expect { execute }.to change { cache_entry.status }.from('default').to('pending_destruction')
.and not_change { cache_entry.object_storage_key }
expect(cache_entry.relative_path).to start_with(previous_path)
expect(cache_entry.relative_path).to include('/deleted/')
end
end
it_behaves_like 'updating the status and relative_path properly'
context 'with an existing pending destruction record with same relative_path and upstream_id' do
let_it_be(:already_pending_destruction) do
create(
:virtual_registries_packages_maven_cache_entry,
:pending_destruction,
upstream: cache_entry.upstream,
relative_path: cache_entry.relative_path
)
end
it_behaves_like 'updating the status and relative_path properly'
end
end
context 'with loose foreign key on virtual_registries_packages_maven_cache_entries.upstream_id' do
it_behaves_like 'update by a loose foreign key' do
let_it_be(:parent) { create(:virtual_registries_packages_maven_upstream) }
let_it_be(:model) { create(:virtual_registries_packages_maven_cache_entry, upstream: parent) }
let(:find_model) { described_class.last }
end
end
def with_threads(count: 5, &block)
return unless block
# create a race condition - structure from https://blog.arkency.com/2015/09/testing-race-conditions/
wait_for_it = true
threads = Array.new(count) do
Thread.new do
# A loop to make threads busy until we `join` them
true while wait_for_it
yield
end
end
wait_for_it = false
threads.each(&:join)
end
end

View File

@ -1,55 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::Registry, type: :model, feature_category: :virtual_registry do
subject(:registry) { build(:virtual_registries_packages_maven_registry) }
describe 'associations' do
it { is_expected.to belong_to(:group) }
it do
is_expected.to have_one(:registry_upstream)
.class_name('VirtualRegistries::Packages::Maven::RegistryUpstream')
.inverse_of(:registry)
end
it do
is_expected.to have_one(:upstream)
.through(:registry_upstream)
.class_name('VirtualRegistries::Packages::Maven::Upstream')
end
end
describe 'validations' do
it { is_expected.to validate_uniqueness_of(:group) }
it { is_expected.to validate_presence_of(:group) }
end
describe '.for_group' do
let_it_be(:group) { create(:group) }
let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, group: group) }
let_it_be(:other_registry) { create(:virtual_registries_packages_maven_registry) }
subject { described_class.for_group(group) }
it { is_expected.to eq([registry]) }
end
describe 'callbacks' do
describe '.destroy_upstream' do
let(:upstream) { build(:virtual_registries_packages_maven_upstream) }
before do
allow(registry).to receive(:upstream).and_return(upstream)
allow(upstream).to receive(:destroy!)
end
it 'destroys the upstream' do
registry.destroy!
expect(upstream).to have_received(:destroy!)
end
end
end
end

View File

@ -1,29 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::RegistryUpstream, type: :model, feature_category: :virtual_registry do
subject(:registry_upstream) { build(:virtual_registries_packages_maven_registry_upstream) }
describe 'associations' do
it { is_expected.to belong_to(:group) }
it do
is_expected.to belong_to(:registry)
.class_name('VirtualRegistries::Packages::Maven::Registry')
.inverse_of(:registry_upstream)
end
it do
is_expected.to belong_to(:upstream)
.class_name('VirtualRegistries::Packages::Maven::Upstream')
.inverse_of(:registry_upstream)
end
end
describe 'validations' do
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_uniqueness_of(:registry_id) }
it { is_expected.to validate_uniqueness_of(:upstream_id) }
end
end

View File

@ -1,269 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::Upstream, type: :model, feature_category: :virtual_registry do
using RSpec::Parameterized::TableSyntax
subject(:upstream) { build(:virtual_registries_packages_maven_upstream) }
it_behaves_like 'it has loose foreign keys' do
let(:factory_name) { :virtual_registries_packages_maven_upstream }
end
describe 'associations' do
it do
is_expected.to have_many(:cache_entries)
.class_name('VirtualRegistries::Packages::Maven::Cache::Entry')
.inverse_of(:upstream)
end
it do
is_expected.to have_one(:registry_upstream)
.class_name('VirtualRegistries::Packages::Maven::RegistryUpstream')
.inverse_of(:upstream)
end
it do
is_expected.to have_one(:registry)
.through(:registry_upstream)
.class_name('VirtualRegistries::Packages::Maven::Registry')
end
end
describe 'validations' do
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:url) }
it { is_expected.to validate_presence_of(:username) }
it { is_expected.to validate_presence_of(:password) }
it { is_expected.to validate_uniqueness_of(:encrypted_username_iv).ignoring_case_sensitivity.allow_nil }
it { is_expected.to validate_uniqueness_of(:encrypted_password_iv).ignoring_case_sensitivity.allow_nil }
it { is_expected.to validate_length_of(:url).is_at_most(255) }
it { is_expected.to validate_length_of(:username).is_at_most(255) }
it { is_expected.to validate_length_of(:password).is_at_most(255) }
it { is_expected.to validate_numericality_of(:cache_validity_hours).only_integer.is_greater_than_or_equal_to(0) }
context 'for url' do
where(:url, :valid, :error_messages) do
'http://test.maven' | true | nil
'https://test.maven' | true | nil
'git://test.maven' | false | ['Url is blocked: Only allowed schemes are http, https']
nil | false | ["Url can't be blank", 'Url must be a valid URL']
'' | false | ["Url can't be blank", 'Url must be a valid URL']
"http://#{'a' * 255}" | false | 'Url is too long (maximum is 255 characters)'
'http://127.0.0.1' | false | 'Url is blocked: Requests to localhost are not allowed'
'maven.local' | false | 'Url is blocked: Only allowed schemes are http, https'
'http://192.168.1.2' | false | 'Url is blocked: Requests to the local network are not allowed'
end
with_them do
before do
upstream.url = url
end
if params[:valid]
it { expect(upstream).to be_valid }
else
it do
expect(upstream).not_to be_valid
expect(upstream.errors).to contain_exactly(*error_messages)
end
end
end
end
context 'for credentials' do
where(:username, :password, :valid, :error_message) do
'user' | 'password' | true | nil
'' | '' | true | nil
'' | nil | true | nil
nil | '' | true | nil
nil | 'password' | false | "Username can't be blank"
'user' | nil | false | "Password can't be blank"
'' | 'password' | false | "Username can't be blank"
'user' | '' | false | "Password can't be blank"
('a' * 256) | 'password' | false | 'Username is too long (maximum is 255 characters)'
'user' | ('a' * 256) | false | 'Password is too long (maximum is 255 characters)'
end
with_them do
before do
upstream.username = username
upstream.password = password
end
if params[:valid]
it { expect(upstream).to be_valid }
else
it do
expect(upstream).not_to be_valid
expect(upstream.errors).to contain_exactly(error_message)
end
end
end
context 'when url is updated' do
where(:new_url, :new_user, :new_pwd, :expected_user, :expected_pwd) do
'http://original_url.test' | 'test' | 'test' | 'test' | 'test'
'http://update_url.test' | 'test' | 'test' | 'test' | 'test'
'http://update_url.test' | :none | :none | nil | nil
'http://update_url.test' | 'test' | :none | nil | nil
'http://update_url.test' | :none | 'test' | nil | nil
end
with_them do
before do
upstream.update!(url: 'http://original_url.test', username: 'original_user', password: 'original_pwd')
end
it 'resets the username and the password when necessary' do
new_attributes = { url: new_url, username: new_user, password: new_pwd }.select { |_, v| v != :none }
upstream.update!(new_attributes)
expect(upstream.reload.url).to eq(new_url)
expect(upstream.username).to eq(expected_user)
expect(upstream.password).to eq(expected_pwd)
end
end
end
end
end
describe 'callbacks' do
context 'for set_cache_validity_hours_for_maven_central' do
%w[
https://repo1.maven.org/maven2
https://repo1.maven.org/maven2/
].each do |maven_central_url|
context "with url set to #{maven_central_url}" do
before do
upstream.url = maven_central_url
end
it 'sets the cache validity hours to 0' do
upstream.save!
expect(upstream.cache_validity_hours).to eq(0)
end
end
end
context 'with url other than maven central' do
before do
upstream.url = 'https://test.org/maven2'
end
it 'sets the cache validity hours to the database default value' do
upstream.save!
expect(upstream.cache_validity_hours).not_to eq(0)
end
end
context 'with no url' do
before do
upstream.url = nil
end
it 'does not set the cache validity hours' do
expect(upstream).not_to receive(:set_cache_validity_hours_for_maven_central)
expect { upstream.save! }.to raise_error(ActiveRecord::RecordInvalid)
end
end
end
end
context 'for credentials persistance' do
it 'persists and reads back credentials properly' do
upstream.username = 'test'
upstream.password = 'test'
upstream.save!
upstream_read = upstream.reload
expect(upstream_read.username).to eq('test')
expect(upstream_read.password).to eq('test')
end
end
describe '#url_for' do
subject { upstream.url_for(path) }
where(:path, :expected_url) do
'path' | 'http://test.maven/path'
'' | 'http://test.maven/'
'/path' | 'http://test.maven/path'
'/sub/path' | 'http://test.maven/sub/path'
end
with_them do
before do
upstream.url = 'http://test.maven/'
end
it { is_expected.to eq(expected_url) }
end
end
describe '#headers' do
subject { upstream.headers }
where(:username, :password, :expected_headers) do
'user' | 'pass' | { Authorization: 'Basic dXNlcjpwYXNz' }
'user' | '' | {}
'' | 'pass' | {}
'' | '' | {}
end
with_them do
before do
upstream.username = username
upstream.password = password
end
it { is_expected.to eq(expected_headers) }
end
end
describe '#as_json' do
subject { upstream.as_json }
it { is_expected.not_to include('username', 'password') }
end
describe '#default_cache_entries' do
let_it_be(:upstream) { create(:virtual_registries_packages_maven_upstream) }
let_it_be(:default_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, upstream: upstream)
end
let_it_be(:pending_destruction_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction, upstream: upstream)
end
subject { upstream.default_cache_entries }
it { is_expected.to contain_exactly(default_cache_entry) }
end
describe '#object_storage_key_for' do
let_it_be(:upstream) { build_stubbed(:virtual_registries_packages_maven_upstream) }
let(:registry_id) { '555' }
subject { upstream.object_storage_key_for(registry_id: registry_id) }
it 'contains the expected terms' do
is_expected.to include("virtual_registries/packages/maven/#{registry_id}/upstream/#{upstream.id}/cache/entry")
end
it 'does not return the same value when called twice' do
first_value = upstream.object_storage_key_for(registry_id: registry_id)
second_value = upstream.object_storage_key_for(registry_id: registry_id)
expect(first_value).not_to eq(second_value)
end
end
end

View File

@ -1,23 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::UpstreamPolicy, feature_category: :virtual_registry do
let_it_be(:upstream) { create(:virtual_registries_packages_maven_upstream) }
let(:user) { upstream.group.first_owner }
subject(:policy) { described_class.new(user, upstream) }
describe 'delegation' do
let(:delegations) { policy.delegated_policies }
it 'delegates to the registry policy' do
expect(delegations.size).to eq(1)
delegations.each_value do |delegated_policy|
expect(delegated_policy).to be_instance_of(::VirtualRegistries::Packages::Maven::RegistryPolicy)
end
end
end
end

View File

@ -1,168 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Policies::GroupPolicy, feature_category: :virtual_registry do
include_context 'GroupPolicy context'
using RSpec::Parameterized::TableSyntax
let_it_be(:subgroup) { create(:group, parent: group, visibility_level: group.visibility_level) }
let(:policy_subject) { ::VirtualRegistries::Packages::Policies::Group.new(group) }
subject { described_class.new(current_user, policy_subject) }
describe 'read_virtual_registry' do
where(:group_visibility, :current_user, :allowed?) do
'PUBLIC' | nil | false
'PUBLIC' | ref(:non_group_member) | false
'PUBLIC' | ref(:guest) | true
'PUBLIC' | ref(:reporter) | true
'PUBLIC' | ref(:developer) | true
'PUBLIC' | ref(:maintainer) | true
'PUBLIC' | ref(:owner) | true
'PUBLIC' | ref(:organization_owner) | true
'INTERNAL' | nil | false
'INTERNAL' | ref(:non_group_member) | false
'INTERNAL' | ref(:guest) | true
'INTERNAL' | ref(:reporter) | true
'INTERNAL' | ref(:developer) | true
'INTERNAL' | ref(:maintainer) | true
'INTERNAL' | ref(:owner) | true
'INTERNAL' | ref(:organization_owner) | true
'PRIVATE' | nil | false
'PRIVATE' | ref(:non_group_member) | false
'PRIVATE' | ref(:guest) | true
'PRIVATE' | ref(:reporter) | true
'PRIVATE' | ref(:developer) | true
'PRIVATE' | ref(:maintainer) | true
'PRIVATE' | ref(:owner) | true
'PRIVATE' | ref(:organization_owner) | true
end
with_them do
before do
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
end
it { is_expected.to public_send(allowed? ? :be_allowed : :be_disallowed, :read_virtual_registry) }
end
context 'with project membership' do
let_it_be(:project) { create(:project, group: group) }
let(:current_user) { non_group_member }
%i[
guest
reporter
developer
maintainer
owner
].each do |role|
context "for #{role}" do
before do
project.send(:"add_#{role}", current_user)
end
it { expect_allowed(:read_virtual_registry) }
end
end
end
context 'for admin' do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
it { expect_allowed(:read_virtual_registry) }
end
context 'when admin mode is disabled' do
it { expect_disallowed(:read_virtual_registry) }
end
end
context 'for deploy token' do
let(:deploy_token) { create(:deploy_token, :group, groups: [target]) }
let(:current_user) { deploy_token }
where(:target, :group_visibility, :read_virtual_registry, :allowed?) do
ref(:group) | 'PUBLIC' | true | true
ref(:group) | 'PUBLIC' | false | false
ref(:group) | 'INTERNAL' | true | true
ref(:group) | 'INTERNAL' | false | false
ref(:group) | 'PRIVATE' | true | true
ref(:group) | 'PRIVATE' | false | false
ref(:subgroup) | 'PUBLIC' | true | false
ref(:subgroup) | 'PUBLIC' | false | false
ref(:subgroup) | 'INTERNAL' | true | false
ref(:subgroup) | 'INTERNAL' | false | false
ref(:subgroup) | 'PRIVATE' | true | false
ref(:subgroup) | 'PRIVATE' | false | false
end
with_them do
before do
deploy_token.read_virtual_registry = read_virtual_registry
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
end
it { is_expected.to public_send(allowed? ? :be_allowed : :be_disallowed, :read_virtual_registry) }
end
end
end
%i[create update destroy].each do |action|
describe "#{action}_virtual_registry" do
where(:group_visibility, :current_user, :allowed?) do
'PUBLIC' | nil | false
'PUBLIC' | ref(:non_group_member) | false
'PUBLIC' | ref(:guest) | false
'PUBLIC' | ref(:reporter) | false
'PUBLIC' | ref(:developer) | false
'PUBLIC' | ref(:maintainer) | true
'PUBLIC' | ref(:owner) | true
'PUBLIC' | ref(:organization_owner) | true
'INTERNAL' | nil | false
'INTERNAL' | ref(:non_group_member) | false
'INTERNAL' | ref(:guest) | false
'INTERNAL' | ref(:reporter) | false
'INTERNAL' | ref(:developer) | false
'INTERNAL' | ref(:maintainer) | true
'INTERNAL' | ref(:owner) | true
'INTERNAL' | ref(:organization_owner) | true
'PRIVATE' | nil | false
'PRIVATE' | ref(:non_group_member) | false
'PRIVATE' | ref(:guest) | false
'PRIVATE' | ref(:reporter) | false
'PRIVATE' | ref(:developer) | false
'PRIVATE' | ref(:maintainer) | true
'PRIVATE' | ref(:owner) | true
'PRIVATE' | ref(:organization_owner) | true
end
with_them do
before do
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
end
it { is_expected.to public_send(allowed? ? :be_allowed : :be_disallowed, :"#{action}_virtual_registry") }
end
end
context 'for admin' do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
it { expect_allowed(:"#{action}_virtual_registry") }
end
context 'when admin mode is disabled' do
it { expect_disallowed(:"#{action}_virtual_registry") }
end
end
end
end

View File

@ -104,6 +104,22 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
end
shared_examples 'ai_workflows scope' do
subject(:file_action) { nil }
let(:expected_status) { nil }
context 'when authenticated with a token that has the ai_workflows scope' do
let(:oauth_token) { create(:oauth_access_token, user: user, scopes: [:ai_workflows]) }
it 'is successful' do
file_action
expect(response).to have_gitlab_http_status(expected_status)
end
end
end
describe 'HEAD /projects/:id/repository/files/:file_path' do
shared_examples_for 'repository files' do
let(:options) { {} }
@ -226,6 +242,12 @@ RSpec.describe API::Files, feature_category: :source_code_management do
let(:request) { head api(route(file_path), guest), params: params }
end
end
it_behaves_like 'ai_workflows scope' do
subject(:file_action) { head api(route(file_path), oauth_access_token: oauth_token), params: params }
let(:expected_status) { :ok }
end
end
end
@ -428,9 +450,7 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
end
end
end
context 'when authenticated' do
context 'and user is an inherited member from the group' do
context 'when project is public with private repository' do
let(:project) { public_project_private_repo }
@ -476,6 +496,12 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
end
end
it_behaves_like 'ai_workflows scope' do
subject(:file_action) { get api(route(file_path), oauth_access_token: oauth_token), params: params }
let(:expected_status) { :ok }
end
end
end
@ -711,6 +737,12 @@ RSpec.describe API::Files, feature_category: :source_code_management do
let(:request) { get api(route(file_path) + '/blame', guest), params: params }
end
end
it_behaves_like 'ai_workflows scope' do
subject(:file_action) { get api(route(file_path) + '/blame', oauth_access_token: oauth_token), params: params }
let(:expected_status) { :ok }
end
end
context 'when PATs are used' do
@ -969,6 +1001,15 @@ RSpec.describe API::Files, feature_category: :source_code_management do
let(:request) { get api(route(file_path), guest), params: params }
end
end
it_behaves_like 'ai_workflows scope' do
subject(:file_action) do
url = route(file_path) + '/raw'
get api(url, oauth_access_token: oauth_token), params: params
end
let(:expected_status) { :ok }
end
end
context 'when PATs are used' do

View File

@ -5,6 +5,9 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
require 'gitlab/error'
require 'gitlab/objectified_hash'
require_relative '../../scripts/trigger-build'
RSpec.describe Trigger, feature_category: :tooling do
@ -479,6 +482,172 @@ RSpec.describe Trigger, feature_category: :tooling do
})
end
end
describe 'with skipping redundant jobs' do
let(:downstream_project_path) { 'gitlab-org/build/cng' }
let(:ref) { 'main' }
let(:image_digest) { 'sha256:digest' }
let(:debian_image) { 'debian:bookworm-slim' }
let(:alpine_image) { 'alpine:3.20' }
let(:tree_node) do
{
'mode' => '040000',
'type' => 'tree',
'id' => 'df239f023af22fc672d31dc50fdd5f593d4481b1',
'path' => '.gitlab'
}
end
before do
stub_env('CNG_SKIP_REDUNDANT_JOBS', 'true')
stub_env('CNG_BRANCH', ref)
stub_env('CNG_PROJECT_PATH', downstream_project_path)
stub_env('CI_PROJECT_PATH_SLUG', 'project-path')
stub_env('GITLAB_DEPENDENCY_PROXY', '')
# mock repo tree and file fetching
allow(downstream_gitlab_client).to receive(:repo_tree).with(
downstream_project_path,
ref: ref,
per_page: 100
).and_return(double(auto_paginate: [tree_node]))
allow(downstream_gitlab_client).to receive(:file_contents).with(
downstream_project_path,
"build-scripts/container_versions.sh",
ref
).and_return("script")
allow(downstream_gitlab_client).to receive(:file_contents).with(
downstream_project_path,
"ci_files/variables.yml",
ref
).and_return("---\nvariables:\n DEBIAN_IMAGE: '#{debian_image}'\n ALPINE_IMAGE: '#{alpine_image}'")
# mock fetching image digest
allow(HTTParty).to receive(:get).with(
%r{https://auth\.docker\.io/token\?service=registry\.docker\.io&scope=repository:library/(debian|alpine):pull}
).and_return(double(body: '{"token": "token"}', success?: true))
allow(HTTParty).to receive(:head).with(
%r{https://registry\.hub\.docker\.com/v2/library/(debian|alpine)/manifests/(bookworm-slim|3\.20)},
{
headers: {
'Authorization' => 'Bearer token',
'Accept' => 'application/vnd.docker.distribution.manifest.v2+json'
}
}
).and_return(double(headers: { 'docker-content-digest' => image_digest }, success?: true))
# mock version calculation script execution
allow(Open3).to receive(:capture2e).with(
hash_including({
"REPOSITORY_TREE" => "#{tree_node['mode']} #{tree_node['type']} #{tree_node['id']} #{tree_node['path']}",
"DEBIAN_DIGEST" => image_digest,
"ALPINE_DIGEST" => image_digest
}),
/bash -c 'source (\S+) && get_all_versions'/
).and_return(["gitlab-base=32a931c622f7ef7728bf8255cca9e8a46d472e85\n", double(success?: true)])
# mock existing tag check
allow(downstream_gitlab_client).to receive(:registry_repositories).with(
downstream_project_path,
per_page: 100
).and_return(double(auto_paginate: [double(name: 'registry/gitlab-base', id: 1)]))
allow(downstream_gitlab_client).to receive(:registry_repository_tag).with(
downstream_project_path,
1,
"32a931c622f7ef7728bf8255cca9e8a46d472e85"
).and_return({})
end
it 'includes additional variables for skipping redundant jobs' do
expect(subject.variables).to include({
"SKIP_IMAGE_TAGGING" => "true",
"SKIP_JOB_REGEX" => "/final-images-listing|alpine-stable|debian-stable|gitlab-base/",
"DEBIAN_IMAGE" => "#{debian_image}@#{image_digest}",
"DEBIAN_DIGEST" => image_digest,
"DEBIAN_BUILD_ARGS" => "--build-arg DEBIAN_IMAGE=#{debian_image}@#{image_digest}",
"ALPINE_IMAGE" => "#{alpine_image}@#{image_digest}",
"ALPINE_DIGEST" => image_digest,
"ALPINE_BUILD_ARGS" => "--build-arg ALPINE_IMAGE=#{alpine_image}@#{image_digest}"
})
end
context 'when tag does not exist in repository' do
let(:response) do
Gitlab::ObjectifiedHash.new(
code: 404,
parsed_response: "Failure",
request: { base_uri: "gitlab.com", path: "/repository_tag" }
)
end
before do
allow(downstream_gitlab_client).to receive(:registry_repository_tag).and_raise(
Gitlab::Error::NotFound.new(response)
)
end
it 'does not skip jobs with non existing tags' do
expect(subject.variables).to include({
"SKIP_JOB_REGEX" => "/final-images-listing|alpine-stable|debian-stable/"
})
end
end
end
describe 'with specific commit sha' do
let(:downstream_project_path) { 'gitlab-org/build/cng' }
let(:sha) { '3f1b1cdc5209' }
let(:trigger_ref) { "trigger-refs/#{sha}" }
let(:response) do
Gitlab::ObjectifiedHash.new(
code: 404,
parsed_response: "Failure",
request: { base_uri: "gitlab.com", path: "/branch" }
)
end
before do
stub_env('CNG_PROJECT_PATH', downstream_project_path)
stub_env('CNG_COMMIT_SHA', sha)
allow(downstream_gitlab_client).to receive(:branch).with(downstream_project_path, trigger_ref).and_raise(
Gitlab::Error::ResponseError.new(response)
)
allow(downstream_gitlab_client).to receive(:create_branch).with(downstream_project_path, trigger_ref, sha)
end
it "uses trigger ref branch with specific commit sha" do
expect(subject.variables).to include({
"TRIGGER_BRANCH" => trigger_ref
})
end
context 'when trigger ref branch creation fails' do
before do
allow(downstream_gitlab_client).to receive(:create_branch).and_raise("failed to create branch")
end
it "falls back to default ref" do
expect(subject.variables).to include({
"TRIGGER_BRANCH" => "master"
})
end
end
context 'when trigger ref branch creation fails in sha update mr' do
before do
stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', 'renovate-e2e/cng-mirror-digest')
allow(downstream_gitlab_client).to receive(:create_branch).and_raise("failed to create branch")
end
it "raises error" do
expect { subject.variables }.to raise_error("failed to create branch")
end
end
end
end
end

View File

@ -1,40 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::VirtualRegistries::Cache::EntryUploader, feature_category: :virtual_registry do
let(:object_storage_key) { 'object/storage/key' }
let(:cache_entry) do
build_stubbed(
:virtual_registries_packages_maven_cache_entry,
object_storage_key: object_storage_key,
relative_path: 'relative/path/test.txt'
)
end
let(:uploader) { described_class.new(cache_entry, :file) }
describe 'inclusions' do
subject { uploader }
it { is_expected.to include_module(::ObjectStorage::Concern) }
end
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to eq(object_storage_key) }
end
describe '#check_remote_file_existence_on_upload?' do
subject { uploader.check_remote_file_existence_on_upload? }
it { is_expected.to be(false) }
end
describe '#sync_model_object_store?' do
subject { uploader.sync_model_object_store? }
it { is_expected.to be(true) }
end
end

View File

@ -4,22 +4,19 @@ require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker, type: :worker, feature_category: :virtual_registry do
let(:worker) { described_class.new }
let(:model) { ::VirtualRegistries::Packages::Maven::Cache::Entry }
describe '#perform_work', unless: Gitlab.ee? do
subject(:perform_work) { worker.perform_work(model.name) }
subject(:perform_work) { worker.perform_work('') }
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
it 'does not trigger any sql query' do
control = ActiveRecord::QueryRecorder.new { perform_work }
expect(control.count).to be_zero
end
it { expect { perform_work }.to not_change { model.count } }
end
describe '#remaining_work_count', unless: Gitlab.ee? do
subject { worker.remaining_work_count(model.name) }
subject { worker.remaining_work_count('') }
it { is_expected.to eq(0) }
it { is_expected.to be_zero }
end
end

View File

@ -1,19 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker, type: :worker, feature_category: :virtual_registry do
let(:worker) { described_class.new }
let(:model) { ::VirtualRegistries::Packages::Maven::Cache::Entry }
describe '#perform_work', unless: Gitlab.ee? do
subject(:perform_work) { worker.perform_work(model.name) }
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
it { expect { perform_work }.to not_change { model.count } }
end
end