Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-20 21:12:18 +00:00
parent 1642f39792
commit 10c8bfaa82
84 changed files with 759 additions and 351 deletions

View File

@ -52,5 +52,4 @@ TODO: Fill out or delete (optional)
/label ~"section::dev"
/label ~"devops::create"
/label ~"group::remote development"
/label ~"webide-workflow::unprioritized"
/label ~"workflow::refinement"

View File

@ -52,5 +52,4 @@ TODO: Fill out or delete (optional)
/label ~"section::dev"
/label ~"devops::create"
/label ~"group::remote development"
/label ~"workspaces-workflow::unprioritized"
/label ~"workflow::refinement"

View File

@ -196,9 +196,12 @@ export default {
<aside-item v-if="hasJob" class="gl-mb-3">
<template #header>{{ $options.i18n.job }}</template>
<gl-link :href="deployment.job.webPath">
{{ deployment.job.name }}
</gl-link>
<component
:is="deployment.job.webPath ? 'gl-link' : 'span'"
:href="deployment.job.webPath"
data-testid="deployment-job"
>{{ deployment.job.name }}</component
>
</aside-item>
<aside-item class="gl-mb-3" data-testid="deployment-ref">

View File

@ -1,11 +1,12 @@
<script>
import { GlTruncate, GlLink, GlBadge } from '@gitlab/ui';
import { GlTruncate, GlLink, GlBadge, GlIcon } from '@gitlab/ui';
export default {
components: {
GlBadge,
GlTruncate,
GlLink,
GlIcon,
},
props: {
job: {
@ -14,11 +15,26 @@ export default {
default: null,
},
},
computed: {
jobPath() {
return this.job?.webPath;
},
pipelinePath() {
return this.job?.pipeline?.path;
},
},
};
</script>
<template>
<gl-link v-if="job" :href="job.webPath">
<gl-truncate :text="job.label" />
</gl-link>
<div v-if="job" class="gl-mb-2 gl-flex gl-flex-wrap gl-justify-end gl-gap-2 lg:gl-justify-start">
<component :is="jobPath ? 'gl-link' : 'span'" :href="jobPath">
<gl-truncate :text="job.label" />
</component>
<gl-link v-if="pipelinePath" :href="pipelinePath">
<gl-icon name="pipeline" />
{{ job.pipeline.label }}
</gl-link>
</div>
<gl-badge v-else variant="info">{{ __('API') }}</gl-badge>
</template>

View File

@ -52,6 +52,7 @@ query getEnvironmentDetails(
...DeploymentJob
deploymentPipeline: pipeline {
id
path
... on Pipeline {
jobs(whenExecuted: ["manual"], retried: false) {
nodes {
@ -61,6 +62,10 @@ query getEnvironmentDetails(
}
}
}
downstreamPipeline {
id
path
}
}
commit {
id

View File

@ -122,14 +122,21 @@ const getDeploymentApprovalFromDeploymentNode = (deploymentNode, environment) =>
export const convertToDeploymentTableRow = (deploymentNode, environment) => {
const { lastDeployment } = environment;
const commit = getCommitFromDeploymentNode(deploymentNode);
const { job } = deploymentNode;
const pipeline = job?.downstreamPipeline ? job?.downstreamPipeline : job?.deploymentPipeline;
return {
status: deploymentNode.status.toLowerCase(),
id: deploymentNode.iid,
triggerer: deploymentNode.triggerer,
commit,
job: deploymentNode.job && {
webPath: deploymentNode.job.webPath,
label: `${deploymentNode.job.name} (#${getIdFromGraphQLId(deploymentNode.job.id)})`,
job: job && {
webPath: job.webPath,
label: `${job.name} (#${getIdFromGraphQLId(job.id)})`,
pipeline: pipeline?.path && {
path: pipeline?.path,
label: `#${getIdFromGraphQLId(pipeline.id)}`,
},
},
created: deploymentNode.createdAt || '',
finished: deploymentNode.finishedAt || '',

View File

@ -17,7 +17,7 @@ module CreatesCommit
else
@project_to_commit_into = current_user.fork_of(target_project)
@different_project = true
@branch_name ||= @project_to_commit_into.repository.next_branch('patch')
@branch_name ||= generated_branch_name(@project_to_commit_into)
end
@start_branch ||= @ref || @branch_name
@ -93,6 +93,12 @@ module CreatesCommit
end
end
def generated_branch_name(project)
return unless project
project.repository.next_branch('patch')
end
def update_flash_notice(success_notice, success_path)
changes_link = ActionController::Base.helpers.link_to _('changes'), success_path, class: 'gl-link'

View File

@ -33,7 +33,11 @@ class Projects::JobsController < Projects::ApplicationController
def show
if @build.instance_of?(::Ci::Bridge)
redirect_to project_pipeline_path(@build.downstream_pipeline.project, @build.downstream_pipeline.id)
if @build.downstream_pipeline&.project
redirect_to project_pipeline_path(@build.downstream_pipeline&.project, @build.downstream_pipeline&.id)
else
redirect_to project_pipeline_path(@build.project, @build.pipeline.id)
end
end
respond_to do |format|

View File

@ -242,7 +242,7 @@ module Types
end
def web_path
::Gitlab::Routing.url_helpers.project_job_path(object.project, object)
::Gitlab::Routing.url_helpers.project_job_path(object.project, object) unless object.is_a?(::Ci::Bridge)
end
def play_path

View File

@ -6,7 +6,7 @@
stage: secure
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/387561
body: |
**Update:** We previously announced we would remove the existing License Compliance CI template in GitLab 16.0. However, due to performance issues with the [license scanning of CycloneDX files](https://docs.gitlab.com/user/compliance/license_scanning_of_cyclonedx_files/) we will do this in 16.3 instead.
**Update**: We previously announced we would remove the existing License Compliance CI template in GitLab 16.0. However, due to performance issues with the [license scanning of CycloneDX files](https://docs.gitlab.com/user/compliance/license_scanning_of_cyclonedx_files/) we will do this in 16.3 instead.
The GitLab [**License Compliance**](https://docs.gitlab.com/user/compliance/license_approval_policies/) CI/CD template is now deprecated and is scheduled for removal in the GitLab 16.3 release.

View File

@ -27,7 +27,7 @@
If you applied customizations to the affected analyzer, or if you currently disable the Semgrep-based analyzer in your pipelines, you must take action as detailed in the [deprecation issue for this change](https://gitlab.com/gitlab-org/gitlab/-/issues/390416#breaking-change).
**Update:** We've reduced the scope of this change. We will no longer make the following changes in GitLab 16.0:
**Update**: We've reduced the scope of this change. We will no longer make the following changes in GitLab 16.0:
1. Remove support for the analyzer based on [PHPCS Security Audit](https://gitlab.com/gitlab-org/security-products/analyzers/phpcs-security-audit) and replace it with GitLab-managed detection rules in the [Semgrep-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep).
1. Remove Scala from the scope of the [SpotBugs-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs) and replace it with GitLab-managed detection rules in the [Semgrep-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep).

View File

@ -26,5 +26,5 @@
We recommend that you test your pipelines before the 16.0 release if you use one of the templates listed above and you use the `_DISABLED` variables but set a value other than `"true"`.
**Update:** We previously announced that we would update the `rules` on the affected templates to run in [merge request pipelines](https://docs.gitlab.com/ci/pipelines/merge_request_pipelines/) by default.
**Update**: We previously announced that we would update the `rules` on the affected templates to run in [merge request pipelines](https://docs.gitlab.com/ci/pipelines/merge_request_pipelines/) by default.
However, due to compatibility issues [discussed in the deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/388988#note_1372629948), we will no longer make this change in GitLab 16.0. We will still release the changes to the `_DISABLED` variables as described above.

View File

@ -6,7 +6,7 @@
stage: Secure
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/408396
body: |
**Update:** We previously announced a change to how the MobSF-based GitLab SAST analyzer would scan multi-module Android projects.
**Update**: We previously announced a change to how the MobSF-based GitLab SAST analyzer would scan multi-module Android projects.
We've canceled that change, and no action is required.
Instead of changing which single module would be scanned, we [improved multi-module support](https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/-/merge_requests/73).

View File

@ -390,11 +390,11 @@ These are only examples and may not work on all setups. Further adjustments may
{{< /alert >}}
- **% CPU utilization:** `1 - avg without (mode,cpu) (rate(node_cpu_seconds_total{mode="idle"}[5m]))`
- **% Memory available:** `((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) or ((node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes) / node_memory_MemTotal_bytes)) * 100`
- **Data transmitted:** `rate(node_network_transmit_bytes_total{device!="lo"}[5m])`
- **Data received:** `rate(node_network_receive_bytes_total{device!="lo"}[5m])`
- **Disk read IOPS:** `sum by (instance) (rate(node_disk_reads_completed_total[1m]))`
- **% CPU utilization**: `1 - avg without (mode,cpu) (rate(node_cpu_seconds_total{mode="idle"}[5m]))`
- **% Memory available**: `((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) or ((node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes) / node_memory_MemTotal_bytes)) * 100`
- **Data transmitted**: `rate(node_network_transmit_bytes_total{device!="lo"}[5m])`
- **Data received**: `rate(node_network_receive_bytes_total{device!="lo"}[5m])`
- **Disk read IOPS**: `sum by (instance) (rate(node_disk_reads_completed_total[1m]))`
- **Disk write IOPS**: `sum by (instance) (rate(node_disk_writes_completed_total[1m]))`
- **RPS via GitLab transaction count**: `sum(irate(gitlab_transaction_duration_seconds_count{controller!~'HealthController|MetricsController|'}[1m])) by (controller, action)`

View File

@ -24,10 +24,10 @@ specifically the [Before you start](_index.md#before-you-start) and [Deciding wh
{{< /alert >}}
> - **Target load:** API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Target load**: API: 200 RPS, Web: 20 RPS, Git (Pull): 20 RPS, Git (Push): 4 RPS
> - **High Availability**: Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with)
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -17,11 +17,11 @@ This reference architecture targets a peak load of 20 requests per second (RPS).
For a full list of reference architectures, see
[available reference architectures](_index.md#available-reference-architectures).
> - **Target Load:** API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
> - **High Availability:** No. For a high availability environment,
> - **Target Load**: API: 20 RPS, Web: 2 RPS, Git (Pull): 2 RPS, Git (Push): 1 RPS
> - **High Availability**: No. For a high availability environment,
> follow a modified [3K reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
> - **Cost calculator template:** For more information, see [cost calculator templates](_index.md#cost-calculator-templates).
> - **Cloud Native Hybrid:** No. For a cloud native hybrid environment, you
> - **Cost calculator template**: For more information, see [cost calculator templates](_index.md#cost-calculator-templates).
> - **Cloud Native Hybrid**: No. For a cloud native hybrid environment, you
> can follow a [modified hybrid reference architecture](#cloud-native-hybrid-reference-architecture-with-helm-charts).
> - **Unsure which Reference Architecture to use?** For more information, see [deciding which architecture to start with](_index.md#deciding-which-architecture-to-start-with).

View File

@ -24,10 +24,10 @@ specifically the [Before you start](_index.md#before-you-start) and [Deciding wh
{{< /alert >}}
> - **Target load:** API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Target load**: API: 500 RPS, Web: 50 RPS, Git (Pull): 50 RPS, Git (Push): 10 RPS
> - **High Availability**: Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with)
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -17,11 +17,11 @@ This page describes the GitLab reference architecture designed to target a peak
For a full list of reference architectures, see
[Available reference architectures](_index.md#available-reference-architectures).
> - **Target Load:** API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
> - **High Availability:** No. For a highly-available environment, you can
> - **Target Load**: API: 40 RPS, Web: 4 RPS, Git (Pull): 4 RPS, Git (Push): 1 RPS
> - **High Availability**: No. For a highly-available environment, you can
> follow a modified [3K or 60 RPS reference architecture](3k_users.md#supported-modifications-for-lower-user-counts-ha).
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with).
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -21,10 +21,10 @@ section details how to reduce this architecture's size while maintaining HA.
For a full list of reference architectures, see
[Available reference architectures](_index.md#available-reference-architectures).
> - **Target Load:** API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
> - **High Availability:** Yes, although [Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Target Load**: API: 60 RPS, Web: 6 RPS, Git (Pull): 6 RPS, Git (Push): 1 RPS
> - **High Availability**: Yes, although [Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with).
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -24,10 +24,10 @@ specifically the [Before you start](_index.md#before-you-start) and [Deciding wh
{{< /alert >}}
> - **Target load:** API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Target load**: API: 1000 RPS, Web: 100 RPS, Git (Pull): 100 RPS, Git (Push): 20 RPS
> - **High Availability**: Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with)
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -24,10 +24,10 @@ specifically the [Before you start](_index.md#before-you-start) and [Deciding wh
{{< /alert >}}
> - **Target load:** API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
> - **High Availability:** Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template:** [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative:** [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Target load**: API: 100 RPS, Web: 10 RPS, Git (Pull): 10 RPS, Git (Push): 2 RPS
> - **High Availability**: Yes ([Praefect](#configure-praefect-postgresql) needs a third-party PostgreSQL solution for HA)
> - **Cost calculator template**: [See cost calculator templates section](_index.md#cost-calculator-templates)
> - **Cloud Native Hybrid Alternative**: [Yes](#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative)
> - **Unsure which Reference Architecture to use?** [Go to this guide for more info](_index.md#deciding-which-architecture-to-start-with)
| Service | Nodes | Configuration | GCP example<sup>1</sup> | AWS example<sup>1</sup> | Azure example<sup>1</sup> |

View File

@ -44,7 +44,7 @@ prerequisites for [migrating groups by direct transfer](../user/group/import/dir
Use this endpoint to start a new group or project migration. Specify:
- `entities[group_entity]` to migrate a group.
- `entities[project_entity]` to migrate a project. (**Status:** Beta)
- `entities[project_entity]` to migrate a project. (**Status**: Beta)
```plaintext
POST /bulk_imports

View File

@ -354,7 +354,7 @@ DELETE /projects/:id/registry/repositories/:repository_id/tags
|---------------------|----------------|----------|-------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](rest/_index.md#namespaced-paths). |
| `repository_id` | integer | yes | The ID of registry repository. |
| `name_regex` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. **Note:** `name_regex` is deprecated in favor of `name_regex_delete`. This field is validated. |
| `name_regex` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. **Note**: `name_regex` is deprecated in favor of `name_regex_delete`. This field is validated. |
| `name_regex_delete` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. This field is validated. |
| `name_regex_keep` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to keep. This value overrides any matches from `name_regex_delete`. This field is validated. Note: setting to `.*` results in a no-op. |
| `keep_n` | integer | no | The amount of latest tags of given name to keep. |

View File

@ -18,10 +18,10 @@ GitLab has several major integrations that can help streamline your development
These integrations cover a variety of areas, including:
- **Authentication:** OAuth, SAML, LDAP
- **Planning:** Jira, Bugzilla, Redmine, Pivotal Tracker
- **Communication:** Slack, Microsoft Teams, Mattermost
- **Security:** Checkmarx, Veracode, Fortify
- **Authentication**: OAuth, SAML, LDAP
- **Planning**: Jira, Bugzilla, Redmine, Pivotal Tracker
- **Communication**: Slack, Microsoft Teams, Mattermost
- **Security**: Checkmarx, Veracode, Fortify
For more information, see:

View File

@ -28385,6 +28385,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="groupdependenciescomponentids"></a>`componentIds` | [`[SbomComponentID!]`](#sbomcomponentid) | Filter dependencies by component IDs. |
| <a id="groupdependenciescomponentnames"></a>`componentNames` | [`[String!]`](#string) | Filter dependencies by component names. |
| <a id="groupdependenciescomponentversions"></a>`componentVersions` | [`[String!]`](#string) | Filter dependencies by component versions. |
| <a id="groupdependenciesnotcomponentversions"></a>`notComponentVersions` {{< icon name="warning-solid" >}} | [`[String!]`](#string) | **Introduced** in GitLab 18.1. **Status**: Experiment. Filter dependencies to exclude the specified component versions. |
| <a id="groupdependenciespackagemanagers"></a>`packageManagers` | [`[PackageManager!]`](#packagemanager) | Filter dependencies by package managers. |
| <a id="groupdependenciessort"></a>`sort` | [`DependencySort`](#dependencysort) | Sort dependencies by given criteria. |
| <a id="groupdependenciessourcetypes"></a>`sourceTypes` | [`[SbomSourceType!]`](#sbomsourcetype) | Filter dependencies by source type. |
@ -28411,6 +28412,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="groupdependencyaggregationscomponentids"></a>`componentIds` | [`[SbomComponentID!]`](#sbomcomponentid) | Filter dependencies by component IDs. |
| <a id="groupdependencyaggregationscomponentnames"></a>`componentNames` | [`[String!]`](#string) | Filter dependencies by component names. |
| <a id="groupdependencyaggregationscomponentversions"></a>`componentVersions` | [`[String!]`](#string) | Filter dependencies by component versions. |
| <a id="groupdependencyaggregationsnotcomponentversions"></a>`notComponentVersions` {{< icon name="warning-solid" >}} | [`[String!]`](#string) | **Introduced** in GitLab 18.1. **Status**: Experiment. Filter dependencies to exclude the specified component versions. |
| <a id="groupdependencyaggregationspackagemanagers"></a>`packageManagers` | [`[PackageManager!]`](#packagemanager) | Filter dependencies by package managers. |
| <a id="groupdependencyaggregationsprojectcountmax"></a>`projectCountMax` | [`Int`](#int) | Filter dependencies by maximum project count. |
| <a id="groupdependencyaggregationsprojectcountmin"></a>`projectCountMin` | [`Int`](#int) | Filter dependencies by minimum project count. |
@ -36106,6 +36108,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="projectdependenciescomponentids"></a>`componentIds` | [`[SbomComponentID!]`](#sbomcomponentid) | Filter dependencies by component IDs. |
| <a id="projectdependenciescomponentnames"></a>`componentNames` | [`[String!]`](#string) | Filter dependencies by component names. |
| <a id="projectdependenciescomponentversions"></a>`componentVersions` | [`[String!]`](#string) | Filter dependencies by component versions. |
| <a id="projectdependenciesnotcomponentversions"></a>`notComponentVersions` {{< icon name="warning-solid" >}} | [`[String!]`](#string) | **Introduced** in GitLab 18.1. **Status**: Experiment. Filter dependencies to exclude the specified component versions. |
| <a id="projectdependenciespackagemanagers"></a>`packageManagers` | [`[PackageManager!]`](#packagemanager) | Filter dependencies by package managers. |
| <a id="projectdependenciessort"></a>`sort` | [`DependencySort`](#dependencysort) | Sort dependencies by given criteria. |
| <a id="projectdependenciessourcetypes"></a>`sourceTypes` | [`[SbomSourceType!]`](#sbomsourcetype) | Filter dependencies by source type. |

View File

@ -243,7 +243,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `token` | string | yes | Buildkite project GitLab token. |
| `project_url` | string | yes | Pipeline URL (for example, `https://buildkite.com/example/pipeline`). |
| `enable_ssl_verification` | boolean | no | **Deprecated:** This parameter has no effect because SSL verification is always enabled. |
| `enable_ssl_verification` | boolean | no | **Deprecated**: This parameter has no effect because SSL verification is always enabled. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `merge_requests_events` | boolean | no | Enable notifications for merge request events. |
| `tag_push_events` | boolean | no | Enable notifications for tag push events. |
@ -837,7 +837,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `alert_events` | boolean | no | Enable notifications for alert events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -901,7 +901,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `webhook` | string | yes | The Hangouts Chat webhook (for example, `https://chat.googleapis.com/v1/spaces...`). |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -1247,7 +1247,7 @@ Parameters:
| `username` | string | no | Mattermost notifications username. |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | no | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | no | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
@ -1336,7 +1336,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `webhook` | string | yes | The Microsoft Teams webhook (for example, `https://outlook.office.com/webhook/...`). |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -1679,7 +1679,7 @@ Parameters:
| `username` | string | no | Slack notifications username. |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | no | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | no | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |

View File

@ -166,7 +166,7 @@ Parameters:
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/_index.md#namespaced-paths). |
| `issue_iid` | integer | yes | The IID of an issue. |
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
| `confidential` | boolean | no | **Deprecated:** Scheduled to be removed in GitLab 16.0 and renamed to `internal`. The confidential flag of a note. Default is false. |
| `confidential` | boolean | no | **Deprecated**: Scheduled to be removed in GitLab 16.0 and renamed to `internal`. The confidential flag of a note. Default is false. |
| `internal` | boolean | no | The internal flag of a note. Overrides `confidential` when both parameters are submitted. Default is false. |
| `created_at` | string | no | Date time string, ISO 8601 formatted. It must be after 1970-01-01. Example: `2016-03-11T03:45:40Z` (requires administrator or project/group owner rights) |
@ -191,7 +191,7 @@ Parameters:
| `issue_iid` | integer | yes | The IID of an issue. |
| `note_id` | integer | yes | The ID of a note. |
| `body` | string | no | The content of a note. Limited to 1,000,000 characters. |
| `confidential` | boolean | no | **Deprecated:** Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
| `confidential` | boolean | no | **Deprecated**: Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
@ -473,7 +473,7 @@ Parameters:
| `merge_request_iid` | integer | yes | The IID of a project merge request |
| `note_id` | integer | no | The ID of a note |
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
| `confidential` | boolean | no | **Deprecated:** Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
| `confidential` | boolean | no | **Deprecated**: Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \
@ -613,7 +613,7 @@ Parameters:
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
| `epic_id` | integer | yes | The ID of an epic |
| `id` | integer or string | yes | The ID or [URL-encoded path of the group](rest/_index.md#namespaced-paths) |
| `confidential` | boolean | no | **Deprecated:** Scheduled to be removed in GitLab 16.0 and is renamed to `internal`. The confidential flag of a note. Default is `false`. |
| `confidential` | boolean | no | **Deprecated**: Scheduled to be removed in GitLab 16.0 and is renamed to `internal`. The confidential flag of a note. Default is `false`. |
| `internal` | boolean | no | The internal flag of a note. Overrides `confidential` when both parameters are submitted. Default is `false`. |
```shell
@ -637,7 +637,7 @@ Parameters:
| `epic_id` | integer | yes | The ID of an epic |
| `note_id` | integer | yes | The ID of a note |
| `body` | string | yes | The content of a note. Limited to 1,000,000 characters. |
| `confidential` | boolean | no | **Deprecated:** Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
| `confidential` | boolean | no | **Deprecated**: Scheduled to be removed in GitLab 16.0. The confidential flag of a note. Default is false. |
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" \

View File

@ -48,9 +48,9 @@ GitLab supports the following authorization flows:
- **Authorization code with [Proof Key for Code Exchange (PKCE)](https://www.rfc-editor.org/rfc/rfc7636):**
Most secure. Without PKCE, you'd have to include client secrets on mobile clients,
and is recommended for both client and server apps.
- **Authorization code:** Secure and common flow. Recommended option for secure
- **Authorization code**: Secure and common flow. Recommended option for secure
server-side apps.
- **Resource owner password credentials:** To be used **only** for securely
- **Resource owner password credentials**: To be used **only** for securely
hosted, first-party services. GitLab recommends against use of this flow.
- **Device Authorization Grant** (GitLab 17.1 and later) Secure flow oriented toward devices without browser access. Requires a secondary device to complete the authorization flow.

View File

@ -324,7 +324,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `token` | string | yes | Token you get after you create a Buildkite pipeline with a GitLab repository. |
| `project_url` | string | yes | Pipeline URL (for example, `https://buildkite.com/example/pipeline`). |
| `enable_ssl_verification` | boolean | no | **Deprecated:** This parameter has no effect because SSL verification is always enabled. |
| `enable_ssl_verification` | boolean | no | **Deprecated**: This parameter has no effect because SSL verification is always enabled. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `merge_requests_events` | boolean | no | Enable notifications for merge request events. |
| `tag_push_events` | boolean | no | Enable notifications for tag push events. |
@ -1017,7 +1017,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `alert_events` | boolean | no | Enable notifications for alert events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -1088,7 +1088,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `webhook` | string | yes | The Hangouts Chat webhook (for example, `https://chat.googleapis.com/v1/spaces...`). |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -1588,7 +1588,7 @@ Parameters:
| `username` | string | no | Mattermost notifications username. |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | no | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | no | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
@ -1691,7 +1691,7 @@ Parameters:
| --------- | ---- | -------- | ----------- |
| `webhook` | string | yes | The Microsoft Teams webhook (for example, `https://outlook.office.com/webhook/...`). |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `push_events` | boolean | no | Enable notifications for push events. |
| `issues_events` | boolean | no | Enable notifications for issue events. |
@ -2098,7 +2098,7 @@ Parameters:
| `username` | string | no | Slack notifications username. |
| `channel` | string | no | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | no | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | no | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `notify_only_default_branch` | boolean | no | **Deprecated**: This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | no | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | no | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | no | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |

View File

@ -439,7 +439,7 @@ to configure other related settings. These requirements are
| `email_restrictions` | string | required by: `email_restrictions_enabled` | Regular expression that is checked against the email used during registration. |
| `after_sign_up_text` | string | no | Text shown to the user after signing up. |
| `akismet_api_key` | string | required by: `akismet_enabled` | API key for Akismet spam protection. |
| `akismet_enabled` | boolean | no | (**If enabled, requires:** `akismet_api_key`) Enable or disable Akismet spam protection. |
| `akismet_enabled` | boolean | no | (**If enabled, requires**: `akismet_api_key`) Enable or disable Akismet spam protection. |
| `allow_all_integrations` | boolean | no | When `false`, only integrations in `allowed_integrations` are allowed on the instance. Premium and Ultimate only. |
| `allowed_integrations` | array of strings | no | When `allow_all_integrations` is `false`, only integrations in this list are allowed on the instance. Premium and Ultimate only. |
| `allow_account_deletion` | boolean | no | Set to `true` to allow users to delete their accounts. Premium and Ultimate only. |
@ -450,7 +450,7 @@ to configure other related settings. These requirements are
| `allow_project_creation_for_guest_and_below` | boolean | no | Indicates whether users assigned up to the Guest role can create groups and personal projects. Defaults to `true`. |
| `allow_runner_registration_token` | boolean | no | Allow using a registration token to create a runner. Defaults to `true`. |
| `archive_builds_in_human_readable` | string | no | Set the duration for which the jobs are considered as old and expired. After that time passes, the jobs are archived and no longer able to be retried. Make it empty to never expire jobs. It has to be no less than 1 day, for example: `15 days`, `1 month`, `2 years`. |
| `asset_proxy_enabled` | boolean | no | (**If enabled, requires:** `asset_proxy_url`) Enable proxying of assets. GitLab restart is required to apply changes. |
| `asset_proxy_enabled` | boolean | no | (**If enabled, requires**: `asset_proxy_url`) Enable proxying of assets. GitLab restart is required to apply changes. |
| `asset_proxy_secret_key` | string | no | Shared secret with the asset proxy server. GitLab restart is required to apply changes. |
| `asset_proxy_url` | string | no | URL of the asset proxy server. GitLab restart is required to apply changes. |
| `asset_proxy_whitelist` | string or array of strings | no | (Deprecated: Use `asset_proxy_allowlist` instead) Assets that match these domains are **not** proxied. Wildcards allowed. Your GitLab installation URL is automatically allowlisted. GitLab restart is required to apply changes. |
@ -513,7 +513,7 @@ to configure other related settings. These requirements are
| `disable_personal_access_tokens` | boolean | no | Disable personal access tokens. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/384201) in GitLab 15.7. GitLab Self-Managed, Premium and Ultimate only. There is no method available to enable a personal access token that's been disabled through the API. This is a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/399233). For more information about available workarounds, see [Workaround](https://gitlab.com/gitlab-org/gitlab/-/issues/399233#workaround). |
| `disabled_oauth_sign_in_sources` | array of strings | no | Disabled OAuth sign-in sources. |
| `dns_rebinding_protection_enabled` | boolean | no | Enforce DNS-rebinding attack protection. |
| `domain_denylist_enabled` | boolean | no | (**If enabled, requires:** `domain_denylist`) Allows blocking sign-ups from emails from specific domains. |
| `domain_denylist_enabled` | boolean | no | (**If enabled, requires**: `domain_denylist`) Allows blocking sign-ups from emails from specific domains. |
| `domain_denylist` | array of strings | no | Users with email addresses that match these domains **cannot** sign up. Wildcards allowed. Enter multiple entries on separate lines. For example: `domain.com`, `*.domain.com`. |
| `domain_allowlist` | array of strings | no | Force people to use only corporate emails for sign-up. Default is `null`, meaning there is no restriction. |
| `downstream_pipeline_trigger_limit_per_project_user_sha` | integer | no | [Maximum downstream pipeline trigger rate](../administration/settings/continuous_integration.md#limit-downstream-pipeline-trigger-rate). Default: `0` (no restriction). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144077) in GitLab 16.10. |
@ -552,12 +552,12 @@ to configure other related settings. These requirements are
| `custom_http_clone_url_root` | string | no | Set a custom Git clone URL for HTTP(S). |
| `enabled_git_access_protocol` | string | no | Enabled protocols for Git access. Allowed values are: `ssh`, `http`, and `all` to allow both protocols. `all` value [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12944) in GitLab 16.9. |
| `enforce_namespace_storage_limit` | boolean | no | Enabling this permits enforcement of namespace storage limits. |
| `enforce_terms` | boolean | no | (**If enabled, requires:** `terms`) Enforce application ToS to all users. |
| `external_auth_client_cert` | string | no | (**If enabled, requires:** `external_auth_client_key`) The certificate to use to authenticate with the external authorization service. |
| `enforce_terms` | boolean | no | (**If enabled, requires**: `terms`) Enforce application ToS to all users. |
| `external_auth_client_cert` | string | no | (**If enabled, requires**: `external_auth_client_key`) The certificate to use to authenticate with the external authorization service. |
| `external_auth_client_key_pass` | string | no | Passphrase to use for the private key when authenticating with the external service this is encrypted when stored. |
| `external_auth_client_key` | string | required by: `external_auth_client_cert` | Private key for the certificate when authentication is required for the external authorization service, this is encrypted when stored. |
| `external_authorization_service_default_label` | string | required by:<br>`external_authorization_service_enabled` | The default classification label to use when requesting authorization and no classification label has been specified on the project. |
| `external_authorization_service_enabled` | boolean | no | (**If enabled, requires:** `external_authorization_service_default_label`, `external_authorization_service_timeout` and `external_authorization_service_url`) Enable using an external authorization service for accessing projects. |
| `external_authorization_service_enabled` | boolean | no | (**If enabled, requires**: `external_authorization_service_default_label`, `external_authorization_service_timeout` and `external_authorization_service_url`) Enable using an external authorization service for accessing projects. |
| `external_authorization_service_timeout` | float | required by:<br>`external_authorization_service_enabled` | The timeout after which an authorization request is aborted, in seconds. When a request times out, access is denied to the user. (min: 0.001, max: 10, step: 0.001). |
| `external_authorization_service_url` | string | required by:<br>`external_authorization_service_enabled` | URL to which authorization requests are directed. |
| `external_pipeline_validation_service_url` | string | no | URL to use for pipeline validation requests. |
@ -652,12 +652,12 @@ to configure other related settings. These requirements are
| `performance_bar_enabled` | boolean | no | (Deprecated: Pass `performance_bar_allowed_group_path: nil` instead) Allow enabling the performance bar. |
| `personal_access_token_prefix` | string | no | Prefix for all generated personal access tokens. |
| `pipeline_limit_per_project_user_sha` | integer | no | Maximum number of pipeline creation requests per minute per user and commit. Disabled by default. |
| `gitpod_enabled` | boolean | no | (**If enabled, requires:** `gitpod_url`) Enable [Gitpod integration](../integration/gitpod.md). Default is `false`. |
| `gitpod_enabled` | boolean | no | (**If enabled, requires**: `gitpod_url`) Enable [Gitpod integration](../integration/gitpod.md). Default is `false`. |
| `gitpod_url` | string | required by: `gitpod_enabled` | The Gitpod instance URL for integration. |
| `kroki_enabled` | boolean | no | (**If enabled, requires:** `kroki_url`) Enable [Kroki integration](../administration/integration/kroki.md). Default is `false`. |
| `kroki_enabled` | boolean | no | (**If enabled, requires**: `kroki_url`) Enable [Kroki integration](../administration/integration/kroki.md). Default is `false`. |
| `kroki_url` | string | required by: `kroki_enabled` | The Kroki instance URL for integration. |
| `kroki_formats` | object | no | Additional formats supported by the Kroki instance. Possible values are `true` or `false` for formats `bpmn`, `blockdiag`, and `excalidraw` in the format `<format>: true` or `<format>: false`. |
| `plantuml_enabled` | boolean | no | (**If enabled, requires:** `plantuml_url`) Enable [PlantUML integration](../administration/integration/plantuml.md). Default is `false`. |
| `plantuml_enabled` | boolean | no | (**If enabled, requires**: `plantuml_url`) Enable [PlantUML integration](../administration/integration/plantuml.md). Default is `false`. |
| `plantuml_url` | string | required by: `plantuml_enabled` | The PlantUML instance URL for integration. |
| `polling_interval_multiplier` | float | no | Interval multiplier used by endpoints that perform polling. Set to `0` to disable polling. |
| `project_export_enabled` | boolean | no | Enable project export. |
@ -682,7 +682,7 @@ to configure other related settings. These requirements are
| `raw_blob_request_limit` | integer | no | Maximum number of requests per minute for each raw path (default is `300`). Set to `0` to disable throttling.|
| `search_rate_limit` | integer | no | Max number of requests per minute for performing a search while authenticated. Default: 30. To disable throttling set to 0.|
| `search_rate_limit_unauthenticated` | integer | no | Max number of requests per minute for performing a search while unauthenticated. Default: 10. To disable throttling set to 0.|
| `recaptcha_enabled` | boolean | no | (**If enabled, requires:** `recaptcha_private_key` and `recaptcha_site_key`) Enable reCAPTCHA. |
| `recaptcha_enabled` | boolean | no | (**If enabled, requires**: `recaptcha_private_key` and `recaptcha_site_key`) Enable reCAPTCHA. |
| `login_recaptcha_protection_enabled` | boolean | no | Enable reCAPTCHA for login. |
| `recaptcha_private_key` | string | required by: `recaptcha_enabled` | Private key for reCAPTCHA. |
| `recaptcha_site_key` | string | required by: `recaptcha_enabled` | Site key for reCAPTCHA. |
@ -694,7 +694,7 @@ to configure other related settings. These requirements are
| `repository_storages_weighted` | hash of strings to integers | no | Hash of names of taken from `gitlab.yml` to [weights](../administration/repository_storage_paths.md#configure-where-new-repositories-are-stored). New projects are created in one of these stores, chosen by a weighted random selection. |
| `require_admin_approval_after_user_signup` | boolean | no | When enabled, any user that signs up for an account using the registration form is placed under a **Pending approval** state and has to be explicitly [approved](../administration/moderate_users.md) by an administrator. |
| `require_personal_access_token_expiry` | boolean | no | When enabled, users must set an expiration date when creating a group or project access token, or a personal access token owned by a non-service account. |
| `require_two_factor_authentication` | boolean | no | (**If enabled, requires:** `two_factor_grace_period`) Require all users to set up two-factor authentication. |
| `require_two_factor_authentication` | boolean | no | (**If enabled, requires**: `two_factor_grace_period`) Require all users to set up two-factor authentication. |
| `resource_usage_limits` | hash | no | Definition for resource usage limits enforced in Sidekiq workers. This setting is available for GitLab.com only. |
| `restricted_visibility_levels` | array of strings | no | Selected levels cannot be used by non-Administrator users for groups, projects or snippets. Can take `private`, `internal` and `public` as a parameter. Default is `null` which means there is no restriction.[Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131203) in GitLab 16.4: cannot select levels that are set as `default_project_visibility` and `default_group_visibility`. |
| `rsa_key_restriction` | integer | no | The minimum allowed bit length of an uploaded RSA key. Default is `0` (no restriction). `-1` disables RSA keys. |
@ -706,7 +706,7 @@ to configure other related settings. These requirements are
| `scan_execution_policies_schedule_limit` | integer | no | Maximum number of `type: schedule` rules per scan execution policy. Default: 0. Maximum: 20 |
| `security_txt_content` | string | no | [Public security contact information](../administration/settings/security_contact_information.md). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/433210) in GitLab 16.7. |
| `service_access_tokens_expiration_enforced` | boolean | no | Flag to indicate if token expiry date can be optional for service account users |
| `shared_runners_enabled` | boolean | no | (**If enabled, requires:** `shared_runners_text` and `shared_runners_minutes`) Enable instance runners for new projects. |
| `shared_runners_enabled` | boolean | no | (**If enabled, requires**: `shared_runners_text` and `shared_runners_minutes`) Enable instance runners for new projects. |
| `shared_runners_minutes` | integer | required by: `shared_runners_enabled` | Set the maximum number of compute minutes that a group can use on instance runners per month. Premium and Ultimate only. |
| `shared_runners_text` | string | required by: `shared_runners_enabled` | Instance runners text. |
| `runner_token_expiration_interval` | integer | no | Set the expiration time (in seconds) of authentication tokens of newly registered instance runners. Minimum value is 7200 seconds. For more information, see [Automatically rotate authentication tokens](../ci/runners/configure_runners.md#automatically-rotate-runner-authentication-tokens). |
@ -720,7 +720,7 @@ to configure other related settings. These requirements are
| `signup_enabled` | boolean | no | Enable registration. Default is `true`. |
| `silent_admin_exports_enabled` | boolean | no | Enable [Silent admin exports](../administration/settings/import_and_export_settings.md#enable-silent-admin-exports). Default is `false`. |
| `silent_mode_enabled` | boolean | no | Enable [Silent mode](../administration/silent_mode/_index.md). Default is `false`. |
| `slack_app_enabled` | boolean | no | (**If enabled, requires:** `slack_app_id`, `slack_app_secret`, `slack_app_signing_secret`, and `slack_app_verification_token`) Enable the GitLab for Slack app. |
| `slack_app_enabled` | boolean | no | (**If enabled, requires**: `slack_app_id`, `slack_app_secret`, `slack_app_signing_secret`, and `slack_app_verification_token`) Enable the GitLab for Slack app. |
| `slack_app_id` | string | required by: `slack_app_enabled` | The client ID of the GitLab for Slack app. |
| `slack_app_secret` | string | required by: `slack_app_enabled` | The client secret of the GitLab for Slack app. Used for authenticating OAuth requests from the app. |
| `slack_app_signing_secret` | string | required by: `slack_app_enabled` | The signing secret of the GitLab for Slack app. Used for authenticating API requests from the app. |
@ -740,33 +740,33 @@ to configure other related settings. These requirements are
| `suggest_pipeline_enabled` | boolean | no | Enable pipeline suggestion banner. |
| `enable_artifact_external_redirect_warning_page` | boolean | no | Show the external redirect page that warns you about user-generated content in GitLab Pages. |
| `terminal_max_session_time` | integer | no | Maximum time for web terminal websocket connection (in seconds). Set to `0` for unlimited time. |
| `terms` | text | required by: `enforce_terms` | (**Required by:** `enforce_terms`) Markdown content for the ToS. |
| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `terms` | text | required by: `enforce_terms` | (**Required by**: `enforce_terms`) Markdown content for the ToS. |
| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires**: `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_authenticated_api_period_in_seconds` | integer | required by:<br>`throttle_authenticated_api_enabled` | Rate limit period (in seconds). |
| `throttle_authenticated_api_requests_per_period` | integer | required by:<br>`throttle_authenticated_api_enabled` | Maximum requests per period per user. |
| `throttle_authenticated_packages_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_packages_api_period_in_seconds` and `throttle_authenticated_packages_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_authenticated_packages_api_enabled` | boolean | no | (**If enabled, requires**: `throttle_authenticated_packages_api_period_in_seconds` and `throttle_authenticated_packages_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_authenticated_packages_api_period_in_seconds` | integer | required by:<br>`throttle_authenticated_packages_api_enabled` | Rate limit period (in seconds). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_authenticated_packages_api_requests_per_period` | integer | required by:<br>`throttle_authenticated_packages_api_enabled` | Maximum requests per period per user. View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_authenticated_web_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_web_period_in_seconds` and `throttle_authenticated_web_requests_per_period`) Enable authenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_authenticated_web_enabled` | boolean | no | (**If enabled, requires**: `throttle_authenticated_web_period_in_seconds` and `throttle_authenticated_web_requests_per_period`) Enable authenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_authenticated_web_period_in_seconds` | integer | required by:<br>`throttle_authenticated_web_enabled` | Rate limit period (in seconds). |
| `throttle_authenticated_web_requests_per_period` | integer | required by:<br>`throttle_authenticated_web_enabled` | Maximum requests per period per user. |
| `throttle_unauthenticated_enabled` | boolean | no | ([Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/335300) in GitLab 14.3. Use `throttle_unauthenticated_web_enabled` or `throttle_unauthenticated_api_enabled` instead.) (**If enabled, requires:** `throttle_unauthenticated_period_in_seconds` and `throttle_unauthenticated_requests_per_period`) Enable unauthenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_enabled` | boolean | no | ([Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/335300) in GitLab 14.3. Use `throttle_unauthenticated_web_enabled` or `throttle_unauthenticated_api_enabled` instead.) (**If enabled, requires**: `throttle_unauthenticated_period_in_seconds` and `throttle_unauthenticated_requests_per_period`) Enable unauthenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_period_in_seconds` | integer | required by:<br>`throttle_unauthenticated_enabled` | ([Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/335300) in GitLab 14.3. Use `throttle_unauthenticated_web_period_in_seconds` or `throttle_unauthenticated_api_period_in_seconds` instead.) Rate limit period in seconds. |
| `throttle_unauthenticated_requests_per_period` | integer | required by:<br>`throttle_unauthenticated_enabled` | ([Deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/335300) in GitLab 14.3. Use `throttle_unauthenticated_web_requests_per_period` or `throttle_unauthenticated_api_requests_per_period` instead.) Max requests per period per IP. |
| `throttle_unauthenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_unauthenticated_api_period_in_seconds` and `throttle_unauthenticated_api_requests_per_period`) Enable unauthenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_api_enabled` | boolean | no | (**If enabled, requires**: `throttle_unauthenticated_api_period_in_seconds` and `throttle_unauthenticated_api_requests_per_period`) Enable unauthenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_api_period_in_seconds` | integer | required by:<br>`throttle_unauthenticated_api_enabled` | Rate limit period in seconds. |
| `throttle_unauthenticated_api_requests_per_period` | integer | required by:<br>`throttle_unauthenticated_api_enabled` | Max requests per period per IP. |
| `throttle_unauthenticated_packages_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_unauthenticated_packages_api_period_in_seconds` and `throttle_unauthenticated_packages_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_unauthenticated_packages_api_enabled` | boolean | no | (**If enabled, requires**: `throttle_unauthenticated_packages_api_period_in_seconds` and `throttle_unauthenticated_packages_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_unauthenticated_packages_api_period_in_seconds` | integer | required by:<br>`throttle_unauthenticated_packages_api_enabled` | Rate limit period (in seconds). View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_unauthenticated_packages_api_requests_per_period` | integer | required by:<br>`throttle_unauthenticated_packages_api_enabled` | Maximum requests per period per user. View [package registry rate limits](../administration/settings/package_registry_rate_limits.md) for more details. |
| `throttle_unauthenticated_web_enabled` | boolean | no | (**If enabled, requires:** `throttle_unauthenticated_web_period_in_seconds` and `throttle_unauthenticated_web_requests_per_period`) Enable unauthenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_web_enabled` | boolean | no | (**If enabled, requires**: `throttle_unauthenticated_web_period_in_seconds` and `throttle_unauthenticated_web_requests_per_period`) Enable unauthenticated web request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
| `throttle_unauthenticated_web_period_in_seconds` | integer | required by:<br>`throttle_unauthenticated_web_enabled` | Rate limit period in seconds. |
| `throttle_unauthenticated_web_requests_per_period` | integer | required by:<br>`throttle_unauthenticated_web_enabled` | Max requests per period per IP. |
| `time_tracking_limit_to_hours` | boolean | no | Limit display of time tracking units to hours. Default is `false`. |
| `top_level_group_creation_enabled` | boolean | no | Allows a user to create top-level-groups. Default is `true`. |
| `two_factor_grace_period` | integer | required by: `require_two_factor_authentication` | Amount of time (in hours) that users are allowed to skip forced configuration of two-factor authentication. |
| `unconfirmed_users_delete_after_days` | integer | no | Specifies how many days after sign-up to delete users who have not confirmed their email. Only applicable if `delete_unconfirmed_users` is set to `true`. Must be `1` or greater. Default is `7`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/352514) in GitLab 16.1. GitLab Self-Managed, Premium and Ultimate only. |
| `unique_ips_limit_enabled` | boolean | no | (**If enabled, requires:** `unique_ips_limit_per_user` and `unique_ips_limit_time_window`) Limit sign in from multiple IPs. |
| `unique_ips_limit_enabled` | boolean | no | (**If enabled, requires**: `unique_ips_limit_per_user` and `unique_ips_limit_time_window`) Limit sign in from multiple IPs. |
| `unique_ips_limit_per_user` | integer | required by: `unique_ips_limit_enabled` | Maximum number of IPs per user. |
| `unique_ips_limit_time_window` | integer | required by: `unique_ips_limit_enabled` | How many seconds an IP is counted towards the limit. |
| `update_runner_versions_enabled` | boolean | no | Fetch GitLab Runner release version data from GitLab.com. For more information, see how to [determine which runners need to be upgraded](../ci/runners/runners_scope.md#determine-which-runners-need-to-be-upgraded). |

View File

@ -55,7 +55,7 @@ staging:
environment: staging
```
In the above example we use Dpl to deploy `my-app-staging` to Heroku server with API key stored in `HEROKU_STAGING_API_KEY` secure variable.
In the previous example we use Dpl to deploy `my-app-staging` to Heroku server with API key stored in `HEROKU_STAGING_API_KEY` secure variable.
To use different provider take a look at long list of [Supported Providers](https://github.com/travis-ci/dpl#supported-providers).
@ -81,7 +81,7 @@ staging:
The first line `apt-get update -yq` updates the list of available packages,
where second `apt-get install -y ruby-dev` installs the Ruby runtime on system.
The above example is valid for all Debian-compatible systems.
The previous example is valid for all Debian-compatible systems.
## Usage in staging and production

View File

@ -83,7 +83,7 @@ And this is basically all you need in the `before_script` section.
## How to deploy
As we stated above, we need to deploy the `build` folder from the Docker image to our server. To do so, we create a new job:
As we stated previously, we need to deploy the `build` folder from the Docker image to our server. To do so, we create a new job:
```yaml
stage_deploy:

View File

@ -182,7 +182,7 @@ Using phpenv also allows you to configure the PHP environment with:
phpenv config-add my_config.ini
```
**Important note:** It seems `phpenv/phpenv`
**Important note**: It seems `phpenv/phpenv`
[is abandoned](https://github.com/phpenv/phpenv/issues/57). There is a fork
at [`madumlao/phpenv`](https://github.com/madumlao/phpenv) that tries to bring
the project back to life. [`CHH/phpenv`](https://github.com/CHH/phpenv) also

View File

@ -112,9 +112,9 @@ GitLab-hosted runners have different cost factors depending on the runner type
| Linux Arm64 | `small` | `1` |
| Linux Arm64 | `medium` | `2` |
| Linux Arm64 | `large` | `3` |
| macOS M1 | `medium` | `6` (**Status:** Beta) |
| macOS M2 Pro | `large` | `12` (**Status:** Beta) |
| Windows | `medium` | `1` (**Status:** Beta) |
| macOS M1 | `medium` | `6` (**Status**: Beta) |
| macOS M2 Pro | `large` | `12` (**Status**: Beta) |
| Windows | `medium` | `1` (**Status**: Beta) |
These cost factors apply to GitLab-hosted runners on GitLab.com and GitLab Dedicated.

View File

@ -207,8 +207,8 @@ Additionally, sometimes the behavior of a pipeline needs to be more dynamic. The
to choose to start sub-pipelines (or not) is a powerful ability, especially if the
YAML is dynamically generated.
In the [basic pipeline](#basic-pipelines) and [`needs` pipeline](#pipelines-with-the-needs-keyword)
examples above, there are two packages that could be built independently.
In the previous [basic pipeline](#basic-pipelines) and [`needs` pipeline](#pipelines-with-the-needs-keyword)
examples, there are two packages that could be built independently.
These cases are ideal for using [parent-child pipelines](downstream_pipelines.md#parent-child-pipelines).
It separates out the configuration into multiple files, keeping things simpler.
You can combine parent-child pipelines with:

View File

@ -61,7 +61,7 @@ The owner of a pipeline schedule can edit it:
1. Select **Build > Pipeline schedules**.
1. Next to the schedule, select **Edit** ({{< icon name="pencil" >}}) and fill in the form.
The user must have the Developer role or above for the project. If the user is
The user must have at least the Developer role for the project. If the user is
not the owner of the schedule, they must first [take ownership](#take-ownership)
of the schedule.

View File

@ -38,7 +38,7 @@ display reports or link to important information directly from [merge requests](
{{< /details >}}
In addition to the reports listed above, GitLab can do many types of [Security reports](../../user/application_security/_index.md),
In addition to the previous reports listed, GitLab can do many types of [Security reports](../../user/application_security/_index.md),
generated by scanning and reporting any vulnerabilities found in your project:
| Feature | Description |

View File

@ -89,7 +89,7 @@ using Docker-in-Docker.
URL: https://example.com
```
The above example:
The previous example:
- Creates a `browser_performance` job in your CI/CD pipeline and runs sitespeed.io against the webpage you
defined in `URL` to gather key metrics.
@ -144,7 +144,7 @@ The `Total Score` metric is based on sitespeed.io's [coach performance score](ht
### Performance testing on review apps
The above CI YAML configuration is great for testing against static environments, and it can
The previous CI YAML configuration is great for testing against static environments, and it can
be extended for dynamic environments, but a few extra steps are required:
1. The `browser_performance` job should run after the dynamic environment has started.

View File

@ -190,7 +190,7 @@ Each object in that array must have at least the following properties:
The format is different from the [CodeClimate report format](https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types) in the following ways:
- Although the [CodeClimate report format](https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types) supports more properties, Code Quality only processes the fields listed above.
- Although the [CodeClimate report format](https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types) supports more properties, Code Quality only processes the fields listed previously.
- The GitLab parser does not allow a [byte order mark](https://en.wikipedia.org/wiki/Byte_order_mark) at the beginning of the file.
For example, this is a compliant report:

View File

@ -365,7 +365,7 @@ To use private runners:
--non-interactive
```
1. **Optional, but recommended:** Set the builds directory to `/tmp/builds`,
1. **Optional, but recommended**: Set the builds directory to `/tmp/builds`,
so job artifacts are periodically purged from the runner host. If you skip
this step, you must clean up the default builds directory (`/builds`) yourself.
You can do this by adding the following two flags to `gitlab-runner register`
@ -529,9 +529,9 @@ name = "docker:20.10.12-dind"
{{< alert type="note" >}}
If you use the [GitLab Runner Helm Chart](https://docs.gitlab.com/runner/install/kubernetes.html), you can use
the above Kubernetes configuration in the [`config` field](https://docs.gitlab.com/runner/install/kubernetes_helm_chart_configuration.html)
in the `values.yaml` file.
the previous Kubernetes configuration in the [`config` field](https://docs.gitlab.com/runner/install/kubernetes_helm_chart_configuration.html)
of the `values.yaml` file.
x
{{< /alert >}}
To ensure that you use the `overlay2` [storage driver](https://docs.docker.com/storage/storagedriver/select-storage-driver/), which offers the best overall performance:

View File

@ -69,8 +69,7 @@ Configuring your Load Performance Testing job can be broken down into several di
The first thing you need to do is determine the [type of load test](https://grafana.com/load-testing/types-of-load-testing/)
you want to run, and how you want it to run (for example, the number of users, throughput, and so on).
Refer to the [k6 docs](https://k6.io/docs/), especially the [k6 testing guides](https://k6.io/docs/testing-guides),
for guidance on the above and more.
Refer to the [k6 docs](https://k6.io/docs/), especially the [k6 testing guides](https://k6.io/docs/testing-guides) for guidance.
### Test Environment setup
@ -125,7 +124,7 @@ An example configuration workflow:
K6_TEST_FILE: <PATH TO K6 TEST FILE IN PROJECT>
```
The above example creates a `load_performance` job in your CI/CD pipeline that runs
The previous example creates a `load_performance` job in your CI/CD pipeline that runs
the k6 test.
{{< alert type="note" >}}
@ -160,7 +159,7 @@ If [GitLab Pages](../../user/project/pages/_index.md) is enabled, you can view t
### Load Performance testing in review apps
The CI/CD YAML configuration example above works for testing against static environments,
The previous CI/CD YAML configuration example works for testing against static environments,
but it can be extended to work with [review apps](../review_apps/_index.md) or
[dynamic environments](../environments/_index.md) with a few extra steps.

View File

@ -47,11 +47,11 @@ is not available, the panel shows only the list of failed tests for the source b
The types of results are:
- **Newly failed tests:** Test cases which passed on the base branch and failed on the head branch.
- **Newly encountered errors:** Test cases which passed on the base branch and failed due to a
- **Newly failed tests**: Test cases which passed on the base branch and failed on the head branch.
- **Newly encountered errors**: Test cases which passed on the base branch and failed due to a
test error on the head branch.
- **Existing failures:** Test cases which failed on the base branch and failed on the head branch.
- **Resolved failures:** Test cases which failed on the base branch and passed on the head branch.
- **Existing failures**: Test cases which failed on the base branch and failed on the head branch.
- **Resolved failures**: Test cases which failed on the base branch and passed on the head branch.
### View failed tests

View File

@ -7,10 +7,10 @@ title: Backport documentation changes
There are two types of backports:
- **Current stable release:** Any maintainer can backport
- **Current stable release**: Any maintainer can backport
changes, usually bug fixes but also important documentation changes, into the
current stable release.
- **Older stable releases:** To guarantee the
- **Older stable releases**: To guarantee the
[maintenance policy](../../policy/maintenance.md) is respected, merging to
older stable releases is restricted to release managers.

View File

@ -50,10 +50,10 @@ title: 'Cloud Connector: Configuration'
A GitLab Rails instance accesses...
```
**Why:** Hugo can generate automated listings of pages. For these to work, Hugo needs the page title to be handled more like data than regular content.
**Why**: Hugo can generate automated listings of pages. For these to work, Hugo needs the page title to be handled more like data than regular content.
We are not using these initially, but may do so in the future.
**Testing:** Error-level Vale rule ([`FrontMatter.yml`](https://gitlab.com/gitlab-org/cloud-native/gitlab-operator/-/blob/master/doc/.vale/gitlab_docs/FrontMatter.yml?ref_type=heads)).
**Testing**: Error-level Vale rule ([`FrontMatter.yml`](https://gitlab.com/gitlab-org/cloud-native/gitlab-operator/-/blob/master/doc/.vale/gitlab_docs/FrontMatter.yml?ref_type=heads)).
### Shortcodes
@ -79,10 +79,10 @@ Don't delete your docs!
See the [Shortcodes reference](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/blob/main/doc/shortcodes.md) for syntax and examples.
**Why:** Shortcodes are the standard Hugo method for creating custom templated
**Why**: Shortcodes are the standard Hugo method for creating custom templated
bits of content.
**Testing:** Shortcodes are validated on docs pipelines (see [implementation issue](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/issues/161)).
**Testing**: Shortcodes are validated on docs pipelines (see [implementation issue](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/issues/161)).
#### Shortcodes in `/help`
@ -93,7 +93,7 @@ Shortcodes, like our existing custom Markdown elements, will not render in `/hel
Shortcodes have more verbose syntax, so we've modified `/help` to hide these
tags and show simplified plain text fallbacks for elements like tabs and alert boxes.
**Why:** `/help` only renders plain Markdown. It is not a static site generator with
**Why**: `/help` only renders plain Markdown. It is not a static site generator with
functionality to transform content or render templated frontend code.
### Kramdown
@ -110,9 +110,9 @@ A few example Kramdown tags that exist on the site right now:
With Hugo, these will no longer have any effect. They will render as plain text.
**Why:** Hugo uses the Goldmark Markdown rendering engine, not Kramdown.
**Why**: Hugo uses the Goldmark Markdown rendering engine, not Kramdown.
**Testing:** We are running an audit job on the CI pipeline for Kramdown tags ([example](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/jobs/8885163533)).
**Testing**: We are running an audit job on the CI pipeline for Kramdown tags ([example](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/jobs/8885163533)).
These tags will be manually removed as part of launch.
### Menu entries in `navigation.yaml`
@ -148,7 +148,7 @@ names at each level of the hierarchy.
url: 'tutorials/left_sidebar/'
```
**Why:** Using the same property names at each level of the hierarchy significantly
**Why**: Using the same property names at each level of the hierarchy significantly
simplifies everything we do programmatically with the menu. It also simplifies
menu edits for contributors.
@ -167,7 +167,7 @@ include a `.html` extension. End each URL with a trailing `/`.
url: 'tutorials/gitlab_navigation/'
```
**Testing:** We run various checks on `navigation.yaml` in [this script](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/main/scripts/check-navigation.sh?ref_type=heads),
**Testing**: We run various checks on `navigation.yaml` in [this script](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/main/scripts/check-navigation.sh?ref_type=heads),
which runs as a pipeline job when the YAML file is updated.
## File naming
@ -196,10 +196,10 @@ doc/
└── _index.md # Renamed
```
**Why:** Hugo requires this specific naming convention for section index pages (pages that serve as the main page for a directory).
**Why**: Hugo requires this specific naming convention for section index pages (pages that serve as the main page for a directory).
See Hugo's documentation on [Page bundles](https://gohugo.io/content-management/page-bundles/) for more information.
**Testing:** We will test for this on the pipeline and prevent merges that include an `index.md` file (see [this issue](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/issues/161) for details).
**Testing**: We will test for this on the pipeline and prevent merges that include an `index.md` file (see [this issue](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/issues/161) for details).
### Clashing file names
@ -225,11 +225,11 @@ inaccessible.
# Resulting URL for both: /administration/dedicated/configure_instance/
```
**Why:** Hugo's options for URL paths are `prettyURLs` and `uglyURLs`. Both of these produce
**Why**: Hugo's options for URL paths are `prettyURLs` and `uglyURLs`. Both of these produce
somewhat different paths than the Nanoc website does. We've opted for `prettyURLs` because it's
Hugo's default, and Hugo's pattern for `uglyURLs` is different from most other static site generators.
**Testing:** After launch, Hugo will throw an error on docs pipelines if it detects a new path clash.
**Testing**: After launch, Hugo will throw an error on docs pipelines if it detects a new path clash.
## Processes
@ -238,7 +238,7 @@ Hugo's default, and Hugo's pattern for `uglyURLs` is different from most other s
Cutting a release no longer requires updating `latest.Dockerfile`. This file no longer exists in
the project, and the release template has been updated accordingly.
**Why:** We've refactored versioning to use the [Parallel Deployments](../../user/project/pages/_index.md#parallel-deployments) feature.
**Why**: We've refactored versioning to use the [Parallel Deployments](../../user/project/pages/_index.md#parallel-deployments) feature.
You can review the new release process [here](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/blob/main/.gitlab/issue_templates/release.md).
### Monthly technical writing tasks
@ -253,11 +253,11 @@ This does not impact the release post [structural check](https://handbook.gitlab
{{< /alert >}}
**Why:** Some Ruby scripts need to be rewritten in Go, and the maintenance tasks are
**Why**: Some Ruby scripts need to be rewritten in Go, and the maintenance tasks are
low-priority enough that we can launch without them. There may be more opportunity
post-launch to share more of these scripts with the Handbook project.
**Testing:** Because we will pause on removing old redirects temporarily,
**Testing**: Because we will pause on removing old redirects temporarily,
we've added a [test script](https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/blob/main/scripts/redirect-threshold-check.sh?ref_type=heads) to warn if we get near the Pages redirect limit.
## User-facing changes

View File

@ -380,9 +380,9 @@ These constructions are more casual than the alternatives:
If you use an acronym, spell it out on first use on a page. You do not need to spell it out more than once on a page.
- **Titles:** Try to avoid acronyms in topic titles, especially if the acronym is not widely used.
- **Plurals:** Try not to make acronyms plural. For example, use `YAML files`, not `YAMLs`. If you must make an acronym plural, do not use an apostrophe. For example, use `APIs`, not `API's`.
- **Possessives:** Use caution when making an acronym possessive. If possible,
- **Titles**: Try to avoid acronyms in topic titles, especially if the acronym is not widely used.
- **Plurals**: Try not to make acronyms plural. For example, use `YAML files`, not `YAMLs`. If you must make an acronym plural, do not use an apostrophe. For example, use `APIs`, not `API's`.
- **Possessives**: Use caution when making an acronym possessive. If possible,
write the sentence to avoid making the acronym possessive. If you must make the
acronym possessive, consider spelling out the words.

View File

@ -285,8 +285,8 @@ On the EC2 dashboard, look for **Load Balancers** in the left navigation bar:
| TLS | 443 | `gitlab-loadbalancer-http-target` |
1. For the TLS listener on port `443`, under **Security Policy** settings:
1. **Policy name:** Pick a predefined security policy from the dropdown list. You can see a breakdown of [Predefined SSL Security Policies for Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/create-tls-listener.html#describe-ssl-policies) in the AWS documentation. Check the GitLab codebase for a list of [supported SSL ciphers and protocols](https://gitlab.com/gitlab-org/gitlab/-/blob/9ee7ad433269b37251e0dd5b5e00a0f00d8126b4/lib/support/nginx/gitlab-ssl#L97-99).
1. **Default SSL/TLS server certificate:** Select an SSL/TLS certificate from ACM or upload a certificate to IAM.
1. **Policy name**: Pick a predefined security policy from the dropdown list. You can see a breakdown of [Predefined SSL Security Policies for Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/create-tls-listener.html#describe-ssl-policies) in the AWS documentation. Check the GitLab codebase for a list of [supported SSL ciphers and protocols](https://gitlab.com/gitlab-org/gitlab/-/blob/9ee7ad433269b37251e0dd5b5e00a0f00d8126b4/lib/support/nginx/gitlab-ssl#L97-99).
1. **Default SSL/TLS server certificate**: Select an SSL/TLS certificate from ACM or upload a certificate to IAM.
1. For each listener we created, we need to create a target group and assign them based on the table earlier. We haven't created any EC2 instances yet so you don't need to register targets. The EC2 instances are created and assigned as part of the [auto scaling group setup](#create-an-auto-scaling-group) later on.
1. Select `Create target group`.on. Select **Instances** as the target type.
@ -316,14 +316,14 @@ On the Route 53 dashboard, select **Hosted zones** in the left navigation bar:
1. Select an existing hosted zone or, if you do not already have one for your domain, select **Create Hosted Zone**, enter your domain name, and select **Create**.
1. Select **Create record** and provide the following values:
1. **Name:** Use the domain name (the default value) or enter a subdomain.
1. **Type:** Select **A - IPv4 address**.
1. **Alias:** Defaults to **disabled**. Enable this option.
1. **Route traffic to:** Select **Alias to Network Load Balancer**.
1. **Region:** Select the region where the Network Load Balancer resides.
1. **Choose network load balancer:** Select the Network Load Balancer we created earlier.
1. **Routing Policy:** We use **Simple** but you can choose a different policy based on your use case.
1. **Evaluate Target Health:** We set this to **No** but you can choose to have the load balancer route traffic based on target health.
1. **Name**: Use the domain name (the default value) or enter a subdomain.
1. **Type**: Select **A - IPv4 address**.
1. **Alias**: Defaults to **disabled**. Enable this option.
1. **Route traffic to**: Select **Alias to Network Load Balancer**.
1. **Region**: Select the region where the Network Load Balancer resides.
1. **Choose network load balancer**: Select the Network Load Balancer we created earlier.
1. **Routing Policy**: We use **Simple** but you can choose a different policy based on your use case.
1. **Evaluate Target Health**: We set this to **No** but you can choose to have the load balancer route traffic based on target health.
1. Select **Create**.
1. If you registered your domain through Route 53, you're done. If you used a different domain registrar, you must update your DNS records with your domain registrar. You must:
1. Select **Hosted zones** and select the domain you added previously.
@ -345,9 +345,9 @@ We need a security group for our database that allows inbound traffic from the i
1. Select **Create security group**.
1. Give it a name (we use `gitlab-rds-sec-group`), a description, and select the `gitlab-vpc` from the **VPC** dropdown list.
1. In the **Inbound rules** section, select **Add rule** and set the following:
1. **Type:** search for and select the **PostgreSQL** rule.
1. **Source type:** set as "Custom".
1. **Source:** select the `gitlab-loadbalancer-sec-group` we created earlier.
1. **Type**: search for and select the **PostgreSQL** rule.
1. **Source type**: set as "Custom".
1. **Source**: select the `gitlab-loadbalancer-sec-group` we created earlier.
1. When done, select **Create security group**.
### RDS Subnet Group
@ -441,8 +441,8 @@ persistence and is used to store session data, temporary cache information, and
1. Leave the port as `6379` because this is what we previously used in our Redis security group.
1. Select the node type (at least `cache.t3.medium`, but adjust to your needs) and the number of replicas.
1. In the Connectivity settings section:
1. **Network type:** IPv4
1. **Subnet groups:** Select **Choose existing subnet group** and choose the `gitlab-redis-group` we had previously created.
1. **Network type**: IPv4
1. **Subnet groups**: Select **Choose existing subnet group** and choose the `gitlab-redis-group` we had previously created.
1. In the Availability Zone placements section:
1. Manually select the preferred availability zones, and under "Replica 2"
choose a different zone than the other two.
@ -537,10 +537,10 @@ From the EC2 dashboard:
1. In the **Key pair** section, select **Create new key pair**.
1. Give the key pair a name (we use `gitlab`) and save the `gitlab.pem` file for later use.
1. In the **Network settings** section:
1. **VPC:** Select `gitlab-vpc`, the VPC we created earlier.
1. **Subnet:** Select `gitlab-private-10.0.1.0` from the list of subnets we created earlier.
1. **Auto-assign Public IP:** Select `Disable`.
1. **Firewall:** Chose **Select existing security group** and select the `gitlab-loadbalancer-sec-group` we created earlier.
1. **VPC**: Select `gitlab-vpc`, the VPC we created earlier.
1. **Subnet**: Select `gitlab-private-10.0.1.0` from the list of subnets we created earlier.
1. **Auto-assign Public IP**: Select `Disable`.
1. **Firewall**: Chose **Select existing security group** and select the `gitlab-loadbalancer-sec-group` we created earlier.
1. For storage, the root volume is 8 GiB by default and should be enough given that we do not store any data there.
1. Review all your settings and, if you're happy, select **Launch Instance**.
@ -836,9 +836,9 @@ From the EC2 dashboard:
1. The root volume is 8 GiB by default and should be enough given that we do not store any data there. Select **Configure Security Group**.
1. Check **Select and existing security group** and select the `gitlab-loadbalancer-sec-group` we created earlier.
1. In the **Network settings** section:
1. **Firewall:** Choose **Select existing security group** and select the `gitlab-loadbalancer-sec-group` we created earlier.
1. **Firewall**: Choose **Select existing security group** and select the `gitlab-loadbalancer-sec-group` we created earlier.
1. In the **Advanced details** section:
1. **IAM instance profile:** Select the `GitLabS3Access` role we [created earlier](#create-an-iam-role).
1. **IAM instance profile**: Select the `GitLabS3Access` role we [created earlier](#create-an-iam-role).
1. Review all your settings and, if you're happy, select **Create launch template**.
### Create an auto scaling group
@ -860,8 +860,8 @@ From the EC2 dashboard:
1. For **Group size**, set **Desired capacity** to `2`.
1. In the Scaling settings section:
1. Select **No scaling policies**. The policies are configured later one.
1. **Min desired capacity:** Set to `2`.
1. **Max desired capacity:** Set to `4`.
1. **Min desired capacity**: Set to `2`.
1. **Max desired capacity**: Set to `4`.
1. Select **Next**.
1. Finally, configure notifications and tags as you see fit, review your changes, and create the
auto scaling group.

View File

@ -37,10 +37,10 @@ you to use.
1. Select **Add consumer**.
1. Provide the required details:
- **Name:** This can be anything. Consider something like `<Organization>'s GitLab`
- **Name**: This can be anything. Consider something like `<Organization>'s GitLab`
or `<Your Name>'s GitLab` or something else descriptive.
- **Application description:** Optional. Fill this in if you wish.
- **Callback URL:** (Required in GitLab versions 8.15 and greater)
- **Application description**: Optional. Fill this in if you wish.
- **Callback URL**: (Required in GitLab versions 8.15 and greater)
The URL to your GitLab installation, such as
`https://gitlab.example.com/users/auth`.
Leaving this field empty
@ -54,7 +54,7 @@ you to use.
{{< /alert >}}
- **URL:** The URL to your GitLab installation, such as `https://gitlab.example.com`.
- **URL**: The URL to your GitLab installation, such as `https://gitlab.example.com`.
1. Grant at least the following permissions:

View File

@ -16,10 +16,10 @@ error tracking increases efficiency and awareness. Users can choose between
For error tracking to work, you need:
- **Your application configured with the Sentry SDK:** When the error happens, Sentry SDK captures information
- **Your application configured with the Sentry SDK**: When the error happens, Sentry SDK captures information
about it and sends it over the network to the backend. The backend stores information about all
errors.
- **Error tracking backend:** The backend can be either GitLab itself or Sentry.
- **Error tracking backend**: The backend can be either GitLab itself or Sentry.
- To use the GitLab backend, see [GitLab integrated error tracking](integrated_error_tracking.md).
Integrated error tracking is available only on GitLab.com.
- To use Sentry as the backend, see [Sentry error tracking](sentry_error_tracking.md).

View File

@ -39,9 +39,9 @@ To create an escalation policy:
When configuring an escalation rule, you can designate who to page:
- **Email on-call user in schedule:** notifies the users who are on-call when the rule is triggered,
- **Email on-call user in schedule**: notifies the users who are on-call when the rule is triggered,
covering all rotations on the specified [on-call schedule](oncall_schedules.md).
- **Email user:** notifies the specified user directly.
- **Email user**: notifies the specified user directly.
When a notification is sent to a user through an on-call schedule or directly, a system note listing
the paged users is created on the alert.

View File

@ -76,13 +76,13 @@ To create a rotation:
1. Select the **Add a rotation** link.
1. Enter the following information:
- **Name:** Your rotation's name.
- **Participants:** The people you want in the rotation.
- **Rotation length:** The rotation's duration.
- **Starts on:** The date and time the rotation begins.
- **Enable end date:** With the toggle on, you can select the date and time your rotation
- **Name**: Your rotation's name.
- **Participants**: The people you want in the rotation.
- **Rotation length**: The rotation's duration.
- **Starts on**: The date and time the rotation begins.
- **Enable end date**: With the toggle on, you can select the date and time your rotation
ends.
- **Restrict to time intervals:** With the toggle on, you can restrict your rotation to the
- **Restrict to time intervals**: With the toggle on, you can restrict your rotation to the
time period you select.
### Edit a rotation

View File

@ -147,12 +147,12 @@ Ollama is a streamlined, open-source framework for running Large Language Models
Key Highlights:
1. **Simplified Deployment:** A user-friendly command-line interface ensures quick setup and hassle-free installation.
1. **Wide Model Support:** Compatible with popular open-source models like Llama 2, Mistral, and Code Llama.
1. **Optimized Performance:** Operates seamlessly across both GPU and CPU environments for resource efficiency.
1. **Integration-Ready:** Features an OpenAI-compatible API for easy integration with existing tools and workflows.
1. **No Containers Needed:** Runs directly on host systems, eliminating the need for Docker or containerized environments.
1. **Versatile Hosting Options:** Deployable on local machines, on-premises servers, or cloud GPU instances.
1. **Simplified Deployment**: A user-friendly command-line interface ensures quick setup and hassle-free installation.
1. **Wide Model Support**: Compatible with popular open-source models like Llama 2, Mistral, and Code Llama.
1. **Optimized Performance**: Operates seamlessly across both GPU and CPU environments for resource efficiency.
1. **Integration-Ready**: Features an OpenAI-compatible API for easy integration with existing tools and workflows.
1. **No Containers Needed**: Runs directly on host systems, eliminating the need for Docker or containerized environments.
1. **Versatile Hosting Options**: Deployable on local machines, on-premises servers, or cloud GPU instances.
Designed for simplicity and performance, Ollama empowers users to harness the power of LLMs without the complexity of traditional AI infrastructure. Further details on setup and supported models will be covered later in the documentation.

View File

@ -104,9 +104,9 @@ your application. The default installation method changed in GitLab 13.0, and
upgrading existing databases requires user involvement. The two installation
methods are:
- **channel 1 (deprecated):** Pulls in the database as a dependency of the associated
- **channel 1 (deprecated)**: Pulls in the database as a dependency of the associated
Helm chart. Only supports Kubernetes versions up to version 1.15.
- **channel 2 (current):** Installs the database as an independent Helm chart. Required
- **channel 2 (current)**: Installs the database as an independent Helm chart. Required
for using the in-cluster database feature with Kubernetes versions 1.16 and greater.
If you receive this error, you can do one of the following actions:

View File

@ -105,7 +105,7 @@ If your Auto DevOps project has an active environment that was deployed with the
- `MIGRATE_HELM_2TO3` to `true`. If this variable is not present, migration jobs do not run.
- `AUTO_DEVOPS_FORCE_DEPLOY_V2` to `1`.
- **Optional:** `BACKUP_HELM2_RELEASES` to `1`. If you set this variable, the migration
- **Optional**: `BACKUP_HELM2_RELEASES` to `1`. If you set this variable, the migration
job saves a backup for 1 week in a job artifact called `helm-2-release-backups`.
If you accidentally delete the Helm v2 releases before you are ready, you can restore
this backup from a Kubernetes manifest file by using `kubectl apply -f $backup`.

View File

@ -161,11 +161,11 @@ To create each label:
Repeat these steps to create all the labels you'll need:
- **Priority:** You will use these on an epic board to facilitate feature-level release prioritization.
- **Priority**: You will use these on an epic board to facilitate feature-level release prioritization.
- `priority::now`
- `priority::next`
- `priority::later`
- **Status:** You will use these labels on an issue board to understand a story's current step in the
- **Status**: You will use these labels on an issue board to understand a story's current step in the
overall development lifecycle.
- `status::triage`
- `status::refine`
@ -174,7 +174,7 @@ Repeat these steps to create all the labels you'll need:
- `status::in review`
- `status::acceptance`
- `status::done`
- **Type:** You will use these labels to represent the different types of work typically pulled into a single iteration:
- **Type**: You will use these labels to represent the different types of work typically pulled into a single iteration:
- `type::story`
- `type::bug`
- `type::maintenance`
@ -327,11 +327,11 @@ You'll use these lists to facilitate moving features through your board from lef
Use each list in your release planning board to represent the following time horizons:
- **Open:** Features that are not yet ready for prioritization.
- **Later:** Features that will be prioritized into a later release.
- **Next:** Features tentatively planned for the next release.
- **Now:** Features prioritized for your current release.
- **Closed:** Features that have been completed or canceled.
- **Open**: Features that are not yet ready for prioritization.
- **Later**: Features that will be prioritized into a later release.
- **Next**: Features tentatively planned for the next release.
- **Now**: Features prioritized for your current release.
- **Closed**: Features that have been completed or canceled.
### Create your first epic

View File

@ -4242,7 +4242,7 @@ However, enabling the bundled Grafana will no longer work from GitLab 16.3.
</div>
**Update:** We previously announced we would remove the existing License Compliance CI template in GitLab 16.0. However, due to performance issues with the [license scanning of CycloneDX files](https://docs.gitlab.com/user/compliance/license_scanning_of_cyclonedx_files/) we will do this in 16.3 instead.
**Update**: We previously announced we would remove the existing License Compliance CI template in GitLab 16.0. However, due to performance issues with the [license scanning of CycloneDX files](https://docs.gitlab.com/user/compliance/license_scanning_of_cyclonedx_files/) we will do this in 16.3 instead.
The GitLab [**License Compliance**](https://docs.gitlab.com/user/compliance/license_approval_policies/) CI/CD template is now deprecated and is scheduled for removal in the GitLab 16.3 release.
@ -4441,7 +4441,7 @@ When using the native HashiCorp Vault integration, CI/CD jobs will fail when no
</div>
**Update:** We previously announced a change to how the MobSF-based GitLab SAST analyzer would scan multi-module Android projects.
**Update**: We previously announced a change to how the MobSF-based GitLab SAST analyzer would scan multi-module Android projects.
We've canceled that change, and no action is required.
Instead of changing which single module would be scanned, we [improved multi-module support](https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/-/merge_requests/73).
@ -5389,7 +5389,7 @@ See [Vulnerability translation documentation](https://docs.gitlab.com/user/appli
If you applied customizations to the affected analyzer, or if you currently disable the Semgrep-based analyzer in your pipelines, you must take action as detailed in the [deprecation issue for this change](https://gitlab.com/gitlab-org/gitlab/-/issues/390416#breaking-change).
**Update:** We've reduced the scope of this change. We will no longer make the following changes in GitLab 16.0:
**Update**: We've reduced the scope of this change. We will no longer make the following changes in GitLab 16.0:
1. Remove support for the analyzer based on [PHPCS Security Audit](https://gitlab.com/gitlab-org/security-products/analyzers/phpcs-security-audit) and replace it with GitLab-managed detection rules in the [Semgrep-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep).
1. Remove Scala from the scope of the [SpotBugs-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs) and replace it with GitLab-managed detection rules in the [Semgrep-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep).
@ -5475,7 +5475,7 @@ The following templates will be updated:
We recommend that you test your pipelines before the 16.0 release if you use one of the templates listed above and you use the `_DISABLED` variables but set a value other than `"true"`.
**Update:** We previously announced that we would update the `rules` on the affected templates to run in [merge request pipelines](https://docs.gitlab.com/ci/pipelines/merge_request_pipelines/) by default.
**Update**: We previously announced that we would update the `rules` on the affected templates to run in [merge request pipelines](https://docs.gitlab.com/ci/pipelines/merge_request_pipelines/) by default.
However, due to compatibility issues [discussed in the deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/388988#note_1372629948), we will no longer make this change in GitLab 16.0. We will still release the changes to the `_DISABLED` variables as described above.
</div>

View File

@ -22,7 +22,7 @@ The scope of the analysis phase is all those vulnerabilities that have been thro
and confirmed as needing further action. To list these vulnerabilities, use the following filter
criteria in the vulnerability report:
- **Status:** Confirmed
- **Status**: Confirmed
## Risk analysis
@ -61,9 +61,9 @@ To help identify vulnerabilities of highest severity:
to help prioritize vulnerabilities for analysis.
- For each group, use the following filter criteria in the vulnerability report to prioritize
analysis of vulnerabilities by severity:
- **Status:** Confirmed
- **Activity:** Still detected
- **Group by:** Severity
- **Status**: Confirmed
- **Activity**: Still detected
- **Group by**: Severity
- Prioritize vulnerability triage on your highest-priority projects - for example, applications
deployed to customers.
@ -77,11 +77,11 @@ Use the following filter criteria in the vulnerability report to identify vulner
a solution available.
- For vulnerabilities detected by SBOM scanning, use the criteria:
- **Status:** Confirmed
- **Activity:** Has a solution
- **Status**: Confirmed
- **Activity**: Has a solution
- For vulnerabilities detected by SAST, use the criteria:
- **Status:** Confirmed
- **Activity:** Vulnerability Resolution available
- **Status**: Confirmed
- **Activity**: Vulnerability Resolution available
## Vulnerability details and action

View File

@ -75,7 +75,7 @@ These variables tell the scanner where to look for certain elements, which actio
| `DAST_TARGET_PATHS_FILE` | string | `/builds/project/urls.txt` | Ensures that the provided paths are always scanned. Set to a file path containing a list of URL paths relative to `DAST_TARGET_URL`. The file must be plain text with one path per line. |
| `DAST_TARGET_PATHS` | string | `/page1.html,/category1/page3.html` | Ensures that the provided paths are always scanned. Set to a comma-separated list of URL paths relative to `DAST_TARGET_URL`. |
| `DAST_TARGET_URL` | URL | `https://site.com` | The URL of the website to scan. |
| `DAST_USE_CACHE` | boolean | `true` | Set to `false` to disable caching. Default: `true`. **Note:** Disabling cache can cause OOM events or DAST job timeouts. |
| `DAST_USE_CACHE` | boolean | `true` | Set to `false` to disable caching. Default: `true`. **Note**: Disabling cache can cause OOM events or DAST job timeouts. |
### Authentication

View File

@ -413,13 +413,13 @@ A scanner profile defines the configuration details of a security scanner.
A scanner profile contains:
- **Profile name:** A name you give the scanner profile. For example, "Spider_15". While a scanner
- **Profile name**: A name you give the scanner profile. For example, "Spider_15". While a scanner
profile is referenced in either `.gitlab-ci.yml` or an on-demand scan, it **cannot** be renamed.
- **Scan mode:** A passive scan monitors all HTTP messages (requests and responses) sent to the target. An active scan attacks the target to find potential vulnerabilities.
- **Crawl timeout:** The maximum number of minutes allowed for the crawler to traverse the site.
- **Target timeout:** The maximum number of seconds DAST waits for the site to be available before
- **Scan mode**: A passive scan monitors all HTTP messages (requests and responses) sent to the target. An active scan attacks the target to find potential vulnerabilities.
- **Crawl timeout**: The maximum number of minutes allowed for the crawler to traverse the site.
- **Target timeout**: The maximum number of seconds DAST waits for the site to be available before
starting the scan.
- **Debug messages:** Include debug messages in the DAST console output.
- **Debug messages**: Include debug messages in the DAST console output.
You can reference a scanner profile in `.gitlab-ci.yml` and
on-demand scans.

View File

@ -910,7 +910,7 @@ The following variables configure the behavior of specific dependency scanning a
| `DS_GRADLE_RESOLUTION_POLICY` | `gemnasium-maven` | `"failed"` | Controls Gradle dependency resolution strictness. Accepts `"none"` to allow partial results, or `"failed"` to fail the scan when any dependencies fail to resolve. |
| `SBT_CLI_OPTS` | `gemnasium-maven` | | List of command-line arguments that the analyzer passes to `sbt`. |
| `PIP_INDEX_URL` | `gemnasium-python` | `https://pypi.org/simple` | Base URL of Python Package Index. |
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning:** Read [the following security consideration](#python-projects) when using this environment variable. |
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning**: Read [the following security consideration](#python-projects) when using this environment variable. |
| `PIP_REQUIREMENTS_FILE` | `gemnasium-python` | | Pip requirements file to be scanned. This is a filename and not a path. When this environment variable is set only the specified file is scanned. |
| `PIPENV_PYPI_MIRROR` | `gemnasium-python` | | If set, overrides the PyPi index used by Pipenv with a [mirror](https://github.com/pypa/pipenv/blob/v2022.1.8/pipenv/environments.py#L263). |
| `DS_PIP_VERSION` | `gemnasium-python` | | Force the install of a specific pip version (example: `"19.3"`), otherwise the pip installed in the Docker image is used. |

View File

@ -167,9 +167,9 @@ Share any feedback on the new Dependency Scanning analyzer in this [feedback iss
### Bundler
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Bundler projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `Gemfile.lock` file (`gems.locked` alternate filename is also supported). The combination of supported versions of Bundler and the `Gemfile.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Bundler projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `Gemfile.lock` file (`gems.locked` alternate filename is also supported). The combination of supported versions of Bundler and the `Gemfile.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `Gemfile.lock` file (`gems.locked` alternate filename is also supported) and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `Gemfile.lock` file (`gems.locked` alternate filename is also supported) and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a Bundler project
@ -183,9 +183,9 @@ There are no additional steps needed to migrate a Bundler project to use the Dep
### CocoaPods
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer does not support CocoaPods projects when using the CI/CD templates or the Scan Execution Policies. Support for CocoaPods is only available on the experimental Cocoapods CI/CD component.
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer does not support CocoaPods projects when using the CI/CD templates or the Scan Execution Policies. Support for CocoaPods is only available on the experimental Cocoapods CI/CD component.
**New behavior:** The new Dependency Scanning analyzer extracts the project dependencies by parsing the `Podfile.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer extracts the project dependencies by parsing the `Podfile.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a CocoaPods project
@ -199,9 +199,9 @@ There are no additional steps to migrate a CocoaPods project to use the Dependen
### Composer
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Composer projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `composer.lock` file. The combination of supported versions of Composer and the `composer.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Composer projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `composer.lock` file. The combination of supported versions of Composer and the `composer.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `composer.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `composer.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a Composer project
@ -215,9 +215,9 @@ There are no additional steps to migrate a Composer project to use the Dependenc
### Conan
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Conan projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `conan.lock` file. The combination of supported versions of Conan and the `conan.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Conan projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `conan.lock` file. The combination of supported versions of Conan and the `conan.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `conan.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `conan.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a Conan project
@ -231,9 +231,9 @@ There are no additional steps to migrate a Conan project to use the Dependency S
### Go
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Go projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by using the `go.mod` and `go.sum` file. This analyzer attempts to execute the `go list` command to increase the accuracy of the detected dependencies, which requires a functional Go environment. In case of failure, it falls back to parsing the `go.sum` file. The combination of supported versions of Go, the `go.mod`, and the `go.sum` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Go projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by using the `go.mod` and `go.sum` file. This analyzer attempts to execute the `go list` command to increase the accuracy of the detected dependencies, which requires a functional Go environment. In case of failure, it falls back to parsing the `go.sum` file. The combination of supported versions of Go, the `go.mod`, and the `go.sum` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer does not attempt to execute the `go list` command in the project to extract the dependencies and it no longer falls back to parsing the `go.sum` file. Instead, the project must provide at least a `go.mod` file and ideally a `go.graph` file generated with the [`go mod graph` command](https://go.dev/ref/mod#go-mod-graph) from the Go Toolchains. The `go.graph` file is required to increase the accuracy of the detected components and to generate the dependency graph to enable features like the [dependency path](../dependency_list/_index.md#dependency-paths). These files are processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Go.
**New behavior**: The new Dependency Scanning analyzer does not attempt to execute the `go list` command in the project to extract the dependencies and it no longer falls back to parsing the `go.sum` file. Instead, the project must provide at least a `go.mod` file and ideally a `go.graph` file generated with the [`go mod graph` command](https://go.dev/ref/mod#go-mod-graph) from the Go Toolchains. The `go.graph` file is required to increase the accuracy of the detected components and to generate the dependency graph to enable features like the [dependency path](../dependency_list/_index.md#dependency-paths). These files are processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Go.
#### Migrate a Go project
@ -251,9 +251,9 @@ See the [enablement instructions for Go](dependency_scanning_sbom/_index.md#go)
### Gradle
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Gradle projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `build.gradle` and `build.gradle.kts` files. The combinations of supported versions for Java, Kotlin, and Gradle are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Gradle projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `build.gradle` and `build.gradle.kts` files. The combinations of supported versions for Java, Kotlin, and Gradle are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `dependencies.lock` file generated with the [Gradle Dependency Lock Plugin](https://github.com/nebula-plugins/gradle-dependency-lock-plugin). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Kotlin, and Gradle.
**New behavior**: The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `dependencies.lock` file generated with the [Gradle Dependency Lock Plugin](https://github.com/nebula-plugins/gradle-dependency-lock-plugin). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Kotlin, and Gradle.
#### Migrate a Gradle project
@ -273,9 +273,9 @@ See the [enablement instructions for Gradle](dependency_scanning_sbom/_index.md#
### Maven
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Maven projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `pom.xml` file. The combinations of supported versions for Java, Kotlin, and Maven are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Maven projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `pom.xml` file. The combinations of supported versions for Java, Kotlin, and Maven are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `maven.graph.json` file generated with the [maven dependency plugin](https://maven.apache.org/plugins/maven-dependency-plugin/index.html). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Kotlin, and Maven.
**New behavior**: The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `maven.graph.json` file generated with the [maven dependency plugin](https://maven.apache.org/plugins/maven-dependency-plugin/index.html). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Kotlin, and Maven.
#### Migrate a Maven project
@ -293,10 +293,10 @@ See the [enablement instructions for Maven](dependency_scanning_sbom/_index.md#m
### npm
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports npm projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `package-lock.json` or `npm-shrinkwrap.json.lock` files. The combination of supported versions of npm and the `package-lock.json` or `npm-shrinkwrap.json.lock` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports npm projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `package-lock.json` or `npm-shrinkwrap.json.lock` files. The combination of supported versions of npm and the `package-lock.json` or `npm-shrinkwrap.json.lock` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
This analyzer may scan JavaScript files vendored in a npm project using the `Retire.JS` scanner.
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `package-lock.json` or `npm-shrinkwrap.json.lock` files and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `package-lock.json` or `npm-shrinkwrap.json.lock` files and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
This analyzer does not scan vendored JavaScript files. Support for a replacement feature is proposed in [epic 7186](https://gitlab.com/groups/gitlab-org/-/epics/7186).
#### Migrate an npm project
@ -311,9 +311,9 @@ There are no additional steps to migrate an npm project to use the Dependency Sc
### NuGet
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports NuGet projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `packages.lock.json` file. The combination of supported versions of NuGet and the `packages.lock.json` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports NuGet projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `packages.lock.json` file. The combination of supported versions of NuGet and the `packages.lock.json` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `packages.lock.json` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `packages.lock.json` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a NuGet project
@ -327,9 +327,9 @@ There are no additional steps to migrate a NuGet project to use the Dependency S
### pip
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports pip projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `requirements.txt` file (`requirements.pip` and `requires.txt` alternate filenames are also supported). The `PIP_REQUIREMENTS_FILE` environment variable can also be used to specify a custom filename. The combinations of supported versions for Python and pip are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports pip projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `requirements.txt` file (`requirements.pip` and `requires.txt` alternate filenames are also supported). The `PIP_REQUIREMENTS_FILE` environment variable can also be used to specify a custom filename. The combinations of supported versions for Python and pip are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `requirements.txt` lockfile generated by the [pip-compile command line tool](https://pip-tools.readthedocs.io/en/latest/cli/pip-compile/). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Python and pip. The `DS_PIPCOMPILE_REQUIREMENTS_FILE_NAME_PATTERN` environment variable can also be used to specify custom filnames for pip-compile lockfiles.
**New behavior**: The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `requirements.txt` lockfile generated by the [pip-compile command line tool](https://pip-tools.readthedocs.io/en/latest/cli/pip-compile/). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Python and pip. The `DS_PIPCOMPILE_REQUIREMENTS_FILE_NAME_PATTERN` environment variable can also be used to specify custom filnames for pip-compile lockfiles.
Alternatively, the project can provide a `pipdeptree.json` file generated with the [pipdeptree command line utility](https://pypi.org/project/pipdeptree/).
@ -355,9 +355,9 @@ See the [enablement instructions for pip](dependency_scanning_sbom/_index.md#pip
### Pipenv
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Pipenv projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `Pipfile` file or from a `Pipfile.lock` file if present. The combinations of supported versions for Python and Pipenv are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Pipenv projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `Pipfile` file or from a `Pipfile.lock` file if present. The combinations of supported versions for Python and Pipenv are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not build the Pipenv project to extract the dependencies. Instead, the project must provide at least a `Pipfile.lock` file and ideally a `pipenv.graph.json` file generated by the [`pipenv graph` command](https://pipenv.pypa.io/en/latest/cli.html#graph). The `pipenv.graph.json` file is required to generate the dependency graph and enable features like the [dependency path](../dependency_list/_index.md#dependency-paths). These files are processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Python and Pipenv.
**New behavior**: The new Dependency Scanning analyzer does not build the Pipenv project to extract the dependencies. Instead, the project must provide at least a `Pipfile.lock` file and ideally a `pipenv.graph.json` file generated by the [`pipenv graph` command](https://pipenv.pypa.io/en/latest/cli.html#graph). The `pipenv.graph.json` file is required to generate the dependency graph and enable features like the [dependency path](../dependency_list/_index.md#dependency-paths). These files are processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Python and Pipenv.
#### Migrate a Pipenv project
@ -381,9 +381,9 @@ See the [enablement instructions for Pipenv](dependency_scanning_sbom/_index.md#
### Poetry
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Poetry projects using the `gemnasium-python-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `poetry.lock` file. The combination of supported versions of Poetry and the `poetry.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Poetry projects using the `gemnasium-python-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `poetry.lock` file. The combination of supported versions of Poetry and the `poetry.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `poetry.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `poetry.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a Poetry project
@ -397,10 +397,10 @@ There are no additional steps to migrate a Poetry project to use the Dependency
### pnpm
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports pnpm projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `pnpm-lock.yaml` file. The combination of supported versions of pnpm and the `pnpm-lock.yaml` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports pnpm projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `pnpm-lock.yaml` file. The combination of supported versions of pnpm and the `pnpm-lock.yaml` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
This analyzer may scan JavaScript files vendored in a npm project using the `Retire.JS` scanner.
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `pnpm-lock.yaml` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `pnpm-lock.yaml` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
This analyzer does not scan vendored JavaScript files. Support for a replacement feature is proposed in [epic 7186](https://gitlab.com/groups/gitlab-org/-/epics/7186).
#### Migrate a pnpm project
@ -415,9 +415,9 @@ There is no additional steps to migrate a pnpm project to use the Dependency Sca
### sbt
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports sbt projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `build.sbt` file. The combinations of supported versions for Java, Scala, and sbt are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports sbt projects using the `gemnasium-maven-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `build.sbt` file. The combinations of supported versions for Java, Scala, and sbt are complex, as detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `dependencies-compile.dot` file generated with the [sbt-dependency-graph plugin](https://github.com/sbt/sbt-dependency-graph) ([included in sbt >= 1.4.0](https://www.scala-sbt.org/1.x/docs/sbt-1.4-Release-Notes.html#sbt-dependency-graph+is+in-sourced)). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Scala, and sbt.
**New behavior**: The new Dependency Scanning analyzer does not build the project to extract the dependencies. Instead, the project must provide a `dependencies-compile.dot` file generated with the [sbt-dependency-graph plugin](https://github.com/sbt/sbt-dependency-graph) ([included in sbt >= 1.4.0](https://www.scala-sbt.org/1.x/docs/sbt-1.4-Release-Notes.html#sbt-dependency-graph+is+in-sourced)). This file is processed by the `dependency-scanning` CI/CD job to generate a CycloneDX SBOM report artifact. This approach does not require GitLab to support specific versions of Java, Scala, and sbt.
#### Migrate an sbt project
@ -435,9 +435,9 @@ See the [enablement instructions for sbt](dependency_scanning_sbom/_index.md#sbt
### setuptools
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports setuptools projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `setup.py` file. The combinations of supported versions for Python and setuptools are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports setuptools projects using the `gemnasium-python-dependency_scanning` CI/CD job to extract the project dependencies by building the application from the `setup.py` file. The combinations of supported versions for Python and setuptools are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-running-a-package-manager-to-generate-a-parsable-file).
**New behavior:** The new Dependency Scanning analyzer does not support building a setuptool project to extract the dependencies. We recommend to configure the [pip-compile command line tool](https://pip-tools.readthedocs.io/en/latest/cli/pip-compile/) to generate a compatible `requirements.txt` lockfile. Alternatively you can provide your own CycloneDX SBOM document.
**New behavior**: The new Dependency Scanning analyzer does not support building a setuptool project to extract the dependencies. We recommend to configure the [pip-compile command line tool](https://pip-tools.readthedocs.io/en/latest/cli/pip-compile/) to generate a compatible `requirements.txt` lockfile. Alternatively you can provide your own CycloneDX SBOM document.
#### Migrate a setuptools project
@ -457,9 +457,9 @@ See the [enablement instructions for pip](dependency_scanning_sbom/_index.md#pip
### Swift
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer does not support Swift projects when using the CI/CD templates or the Scan Execution Policies. Support for Swift is only available on the experimental Swift CI/CD component.
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer does not support Swift projects when using the CI/CD templates or the Scan Execution Policies. Support for Swift is only available on the experimental Swift CI/CD component.
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `Package.resolved` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `Package.resolved` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a Swift project
@ -473,9 +473,9 @@ There are no additional steps to migrate a Swift project to use the Dependency S
### uv
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports uv projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `uv.lock` file. The combination of supported versions of uv and the `uv.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports uv projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `uv.lock` file. The combination of supported versions of uv and the `uv.lock` file are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `uv.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `uv.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
#### Migrate a uv project
@ -489,11 +489,11 @@ There are no additional steps to migrate a uv project to use the Dependency Scan
### Yarn
**Previous behavior:** Dependency Scanning based on the Gemnasium analyzer supports Yarn projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `yarn.lock` file. The combination of supported versions of Yarn and the `yarn.lock` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
**Previous behavior**: Dependency Scanning based on the Gemnasium analyzer supports Yarn projects using the `gemnasium-dependency_scanning` CI/CD job and its ability to extract the project dependencies by parsing the `yarn.lock` file. The combination of supported versions of Yarn and the `yarn.lock` files are detailed in the [Dependency Scanning (Gemnasium-based) documentation](_index.md#obtaining-dependency-information-by-parsing-lockfiles).
This analyzer may provide remediation data to [resolve a vulnerability via merge request](../vulnerabilities/_index.md#resolve-a-vulnerability) for Yarn dependencies.
This analyzer may scan JavaScript files vendored in a Yarn project using the `Retire.JS` scanner.
**New behavior:** The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `yarn.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
**New behavior**: The new Dependency Scanning analyzer also extracts the project dependencies by parsing the `yarn.lock` file and generates a CycloneDX SBOM report artifact with the `dependency-scanning` CI/CD job.
This analyzer does not provide remediations data for Yarn dependencies. Support for a replacement feature is proposed in [epic 759](https://gitlab.com/groups/gitlab-org/-/epics/759).
This analyzer does not scan vendored JavaScript files. Support for a replacement feature is proposed in [epic 7186](https://gitlab.com/groups/gitlab-org/-/epics/7186).

View File

@ -56,10 +56,10 @@ number of false positives.
Most of the GitLab application security tools have two template editions:
- **Stable:** The stable template is the default. It offers a reliable and consistent application
- **Stable**: The stable template is the default. It offers a reliable and consistent application
security experience. You should use the stable template for most users and projects that require
stability and predictable behavior in their CI/CD pipelines.
- **Latest:** The latest template is for those who want to access and test cutting-edge features. It
- **Latest**: The latest template is for those who want to access and test cutting-edge features. It
is identified by the word `latest` in the template's name. It is not considered stable and may
include breaking changes that are planned for the next major release. This template allows you to
try new features and updates before they become part of the stable release.

View File

@ -89,9 +89,9 @@ When designing your policies, your goals should be to:
To enforce policies to meet your requirements, consider the following factors:
- **Inheritance:** By default, a policy is enforced on the organizational units it's linked to, and
- **Inheritance**: By default, a policy is enforced on the organizational units it's linked to, and
all their descendent subgroups and their projects.
- **Scope:** To customize policy enforcement, you can define a policy's scope to match your needs.
- **Scope**: To customize policy enforcement, you can define a policy's scope to match your needs.
#### Inheritance

View File

@ -29,8 +29,8 @@ The scope of the remediation phase is all those vulnerabilities that have been t
phase and confirmed as needing further action. To list these vulnerabilities, use the following
filter criteria in the vulnerability report:
- **Status:** Confirmed
- **Activity:** Has issue
- **Status**: Confirmed
- **Activity**: Has issue
## Document the vulnerability

View File

@ -28,7 +28,7 @@ and effectively.
The scope of the triage phase is all those vulnerabilities that have not been triaged. To list these
vulnerabilities, use the following filter criteria in the vulnerability report:
- **Status:** Needs triage
- **Status**: Needs triage
## Risk analysis
@ -46,8 +46,8 @@ threshold for vulnerabilities.
After you triage a vulnerability you should change its status to either:
- **Confirmed:** You have triaged this vulnerability and decided it requires analysis.
- **Dismissed:** You have triaged this vulnerability and decided against analysis.
- **Confirmed**: You have triaged this vulnerability and decided it requires analysis.
- **Dismissed**: You have triaged this vulnerability and decided against analysis.
When you dismiss a vulnerability you must provide a brief comment that states why it has been
dismissed. Dismissed vulnerabilities are ignored if detected in subsequent scans. Vulnerability

View File

@ -235,11 +235,11 @@ We are actively working to expand coverage to more types of vulnerabilities.
Vulnerability Resolution sometimes cannot generate a suggested fix. Common causes include:
- **False positive detected:** Before proposing a fix, the AI model assesses whether the vulnerability is valid. It may judge that the vulnerability is not a true vulnerability, or isn't worth fixing.
- **False positive detected**: Before proposing a fix, the AI model assesses whether the vulnerability is valid. It may judge that the vulnerability is not a true vulnerability, or isn't worth fixing.
- This can happen if the vulnerability occurs in test code. Your organization might still choose to fix vulnerabilities even if they happen in test code, but models sometimes assess these to be false positives.
- If you agree that the vulnerability is a false-positive or is not worth fixing, you should [dismiss the vulnerability](#vulnerability-status-values) and [select a matching reason](#vulnerability-dismissal-reasons).
- To customize your SAST configuration or report a problem with a GitLab SAST rule, see [SAST rules](../sast/rules.md).
- **Temporary or unexpected error:** The error message may state that "an unexpected error has occurred", "the upstream AI provider request timed out", "something went wrong", or a similar cause.
- **Temporary or unexpected error**: The error message may state that "an unexpected error has occurred", "the upstream AI provider request timed out", "something went wrong", or a similar cause.
- These errors may be caused by temporary problems with the AI provider or with GitLab Duo.
- A new request may succeed, so you can try to resolve the vulnerability again.
- If you continue to see these errors, contact GitLab for assistance.
@ -292,11 +292,11 @@ Provide feedback on this feature in [issue 476553](https://gitlab.com/gitlab-org
Vulnerability Resolution in a merge request sometimes cannot generate a suggested fix. Common causes include:
- **False positive detected:** Before proposing a fix, the AI model assesses whether the vulnerability is valid. It may judge that the vulnerability is not a true vulnerability, or isn't worth fixing.
- **False positive detected**: Before proposing a fix, the AI model assesses whether the vulnerability is valid. It may judge that the vulnerability is not a true vulnerability, or isn't worth fixing.
- This can happen if the vulnerability occurs in test code. Your organization might still choose to fix vulnerabilities even if they happen in test code, but models sometimes assess these to be false positives.
- If you agree that the vulnerability is a false-positive or is not worth fixing, you should [dismiss the vulnerability](#vulnerability-status-values) and [select a matching reason](#vulnerability-dismissal-reasons).
- To customize your SAST configuration or report a problem with a GitLab SAST rule, see [SAST rules](../sast/rules.md).
- **Temporary or unexpected error:** The error message may state that "an unexpected error has occurred", "the upstream AI provider request timed out", "something went wrong", or a similar cause.
- **Temporary or unexpected error**: The error message may state that "an unexpected error has occurred", "the upstream AI provider request timed out", "something went wrong", or a similar cause.
- These errors may be caused by temporary problems with the AI provider or with GitLab Duo.
- A new request may succeed, so you can try to resolve the vulnerability again.
- If you continue to see these errors, contact GitLab for assistance.

View File

@ -50,9 +50,9 @@ It's important to acknowledge the current limitations regarding strict data sove
The following factors influence where data is routed.
- **Network latency:** The primary routing mechanism focuses on minimizing latency, meaning data might be processed in a region other than the nearest one if network conditions dictate.
- **Service availability:** In case of regional outages or service disruptions, requests might be automatically rerouted to ensure uninterrupted service.
- **Third-Party dependencies:** The GitLab AI infrastructure relies on third-party model providers, like Google Vertex AI, which have their own data handling practices.
- **Network latency**: The primary routing mechanism focuses on minimizing latency, meaning data might be processed in a region other than the nearest one if network conditions dictate.
- **Service availability**: In case of regional outages or service disruptions, requests might be automatically rerouted to ensure uninterrupted service.
- **Third-Party dependencies**: The GitLab AI infrastructure relies on third-party model providers, like Google Vertex AI, which have their own data handling practices.
### AI gateway deployment regions

View File

@ -555,8 +555,8 @@ Prerequisites:
To make an epic confidential:
- **When creating an epic:** select the checkbox under **Confidentiality**.
- **In an existing epic:** on the right sidebar, select **Edit** next to **Confidentiality**, and then
- **When creating an epic**: select the checkbox under **Confidentiality**.
- **In an existing epic**: on the right sidebar, select **Edit** next to **Confidentiality**, and then
select **Turn on**.
In GitLab 15.6 and later, you can also use the `/confidential` [quick action](../../project/quick_actions.md).

View File

@ -18,7 +18,8 @@ The key elements are:
- File paths: Specific files, directories, or wildcards.
- Code Owners: Use `@mentions` for users, groups, or roles.
- Comments: Lines starting with `#` are ignored.
- Comments: Lines starting with `#` are ignored. Inline comments are unsupported.
Any Code Owners listed in a comment are parsed.
- Sections: Optional groupings of rules, defined using `[Section name]`.
{{< alert type="note" >}}

View File

@ -30,8 +30,8 @@ Deploy keys can't be used for Git operations if [external authorization](../../.
A deploy key has a defined scope when it is created:
- **Project deploy key:** Access is limited to the selected project.
- **Public deploy key:** Access can be granted to _any_ project in a GitLab instance. Access to each
- **Project deploy key**: Access is limited to the selected project.
- **Public deploy key**: Access can be granted to _any_ project in a GitLab instance. Access to each
project must be [granted](#grant-project-access-to-a-public-deploy-key) by a user with at least
the Maintainer role.
@ -41,8 +41,8 @@ You cannot change a deploy key's scope after creating it.
A deploy key is given a permission level when it is created:
- **Read-only:** A read-only deploy key can only read from the repository.
- **Read-write:** A read-write deploy key can read from, and write to, the repository.
- **Read-only**: A read-only deploy key can only read from the repository.
- **Read-write**: A read-write deploy key can read from, and write to, the repository.
You can change a deploy key's permission level after creating it. Changing a project deploy key's
permissions only applies for the current project.
@ -89,9 +89,9 @@ To view the deploy keys available to a project:
The deploy keys available are listed:
- **Enabled deploy keys:** Deploy keys that have access to the project.
- **Privately accessible deploy keys:** Project deploy keys that don't have access to the project.
- **Public accessible deploy keys:** Public deploy keys that don't have access to the project.
- **Enabled deploy keys**: Deploy keys that have access to the project.
- **Privately accessible deploy keys**: Project deploy keys that don't have access to the project.
- **Public accessible deploy keys**: Public deploy keys that don't have access to the project.
## Create a project deploy key

View File

@ -23,11 +23,11 @@ In this document, we focus on the TFVC to Git migration.
The main differences between TFVC and Git are:
- **Git is distributed:** While TFVC is centralized using a client-server architecture,
- **Git is distributed**: While TFVC is centralized using a client-server architecture,
Git is distributed. This translates to Git having a more flexible workflow because
you work with a copy of the entire repository. This allows you to quickly
switch branches or merge, for example, without needing to communicate with a remote server.
- **Storage:** Changes in a centralized version control system are per file (changeset),
- **Storage**: Changes in a centralized version control system are per file (changeset),
while in Git a committed file is stored in its entirety (snapshot). That means that it is
very easy to revert or undo a whole change in Git.
@ -40,10 +40,10 @@ For more information, see:
Advantages of migrating to Git/GitLab:
- **No licensing costs:** Git is open source, while TFVC is proprietary.
- **Shorter learning curve:** Git has a big community and a vast number of
- **No licensing costs**: Git is open source, while TFVC is proprietary.
- **Shorter learning curve**: Git has a big community and a vast number of
tutorials to get you started (see our [Git topic](../../../topics/git/_index.md)).
- **Integration with modern tools:** After migrating to Git and GitLab, you have
- **Integration with modern tools**: After migrating to Git and GitLab, you have
an open source, end-to-end software development platform with built-in version
control, issue tracking, code review, CI/CD, and more.

View File

@ -18,7 +18,7 @@ GitLab Duo is designed to provide contextually relevant information during the l
- Add-on: GitLab Duo Enterprise
- Offering: GitLab.com, GitLab Self-Managed
- Status: Beta
- LLM: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet)
- LLM: Anthropic [Claude 3.7 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-7-sonnet)
{{< /details >}}
@ -27,6 +27,7 @@ GitLab Duo is designed to provide contextually relevant information during the l
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/10401) in GitLab 16.2 as an [experiment](../../../policy/development_stages_support.md#experiment).
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/429882) to beta in GitLab 16.10.
- Changed to require GitLab Duo add-on in GitLab 17.6 and later.
- LLM [updated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186862) to Claude 3.7 Sonnet in GitLab 17.10
- Feature flag `add_ai_summary_for_new_mr` [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186108) in GitLab 17.11.
- Changed to include Premium in GitLab 18.0.
@ -137,7 +138,7 @@ To enable `@GitLabDuo` to automatically review merge requests:
- Add-on: GitLab Duo Enterprise
- Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated
- Status: Experiment
- LLM: Anthropic [Claude 3.5 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-5-sonnet)
- LLM: Anthropic [Claude 3.7 Sonnet](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-7-sonnet)
{{< /details >}}
@ -145,6 +146,7 @@ To enable `@GitLabDuo` to automatically review merge requests:
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/10466) in GitLab 16.0 as an [experiment](../../../policy/development_stages_support.md#experiment).
- Feature flag `summarize_my_code_review` [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182448) in GitLab 17.10.
- LLM [updated](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183873) to Claude 3.7 Sonnet in GitLab 17.11.
- Changed to include Premium in GitLab 18.0.
{{< /history >}}

View File

@ -177,23 +177,23 @@ The version of a model version in GitLab must follow [Semantic Version specifica
Using semantic versioning facilitates model deployment, by communicating which
if a new version can be deployed without changes to the application:
- **Major (integer):** A change in the major component signifies a breaking change in the model, and that the application
- **Major (integer)**: A change in the major component signifies a breaking change in the model, and that the application
that consumes the model must be updated to properly use this new version.
A new algorithm or the addition of a mandatory feature column are examples of breaking
changes that would require a bump at the major component.
- **Minor (integer):** A change in the minor component signifies a non-breaking change, and that the
- **Minor (integer)**: A change in the minor component signifies a non-breaking change, and that the
consumer can safely use the new version without breaking, although the consumer might
need to be updated to use its new functionality. For example, adding a non-mandatory
feature column with a default value to the model is a minor bump, because when a value for
the added column is not passed, inference still works.
- **Patch (integer):** A change in the patch component means that a new version is out that does not
- **Patch (integer)**: A change in the patch component means that a new version is out that does not
require any action by the application. For example, a daily retrain of the
model does not change the feature set or how the application consumes the
model version. Auto updating to a new patch is a safe update.
- **Prerelease (text):** Represents a version that is not yet ready for production use.
- **Prerelease (text)**: Represents a version that is not yet ready for production use.
Used to identify alpha, beta, or release candidate versions of the model.
### Model version examples

View File

@ -59,10 +59,10 @@ level of protection for the branch. For example, consider these rules, which inc
A branch named `v1.x` is a case-sensitive match for all three branch name patterns: `v1.x`, `v1.*`, and `v*`.
As the most permissive option determines the behavior, the resulting permissions for branch `v1.x` are:
- **Allowed to merge:** Of the three settings, `Maintainer + Developer` is most permissive,
- **Allowed to merge**: Of the three settings, `Maintainer + Developer` is most permissive,
and controls branch behavior as a result. Even though the branch also matched `v1.x` and `v*`
(which each have stricter permissions), users with the Developer role can merge into the branch.
- **Allowed to push and merge:** Of the three settings, `Maintainer` is the most permissive, and controls
- **Allowed to push and merge**: Of the three settings, `Maintainer` is the most permissive, and controls
branch behavior as a result. Even though branches matching `v*` are set to `No one`, branches
that _also_ match `v1.x` or `v1.*` receive the more permissive `Maintainer` permission.
@ -93,7 +93,7 @@ force push is allowed. For example, consider these rules, which include
A branch named `v1.x` matches all three branch name patterns: `v1.x`, `v1.*`, and `v*`.
As the most permissive option determines the behavior, the resulting permissions for branch `v1.x` are:
- **Allow force push:** Of the three settings, `Yes` is most permissive,
- **Allow force push**: Of the three settings, `Yes` is most permissive,
and controls branch behavior as a result. Even though the branch also matched `v1.x` and `v*`
(which each have stricter permissions), any user that can push to this branch can also force push.

View File

@ -279,17 +279,17 @@ to you after the import is complete.
When importing requirements from a CSV file, it must be formatted in a certain way:
- **Header row:** CSV files must include the following headers:
- **Header row**: CSV files must include the following headers:
`title` and `description`. The headers are case-insensitive.
- **Columns:** data from columns other than `title` and `description` is not imported.
- **Separators:** the column separator is automatically detected from the header row.
- **Columns**: data from columns other than `title` and `description` is not imported.
- **Separators**: the column separator is automatically detected from the header row.
Supported separator characters are: commas (`,`), semicolons (`;`), and tabs (`\t`).
The row separator can be either `CRLF` or `LF`.
- **Double-quote character:** the double-quote (`"`) character is used to quote fields,
- **Double-quote character**: the double-quote (`"`) character is used to quote fields,
enabling the use of the column separator in a field (see the third line in the
sample CSV data below). To insert a double-quote (`"`) in a quoted
field, use two double-quote characters in succession (`""`).
- **Data rows:** below the header row, succeeding rows must follow the same column
- **Data rows**: below the header row, succeeding rows must follow the same column
order. The title text is required, while the description is optional and can be left empty.
Sample CSV data:

View File

@ -33,7 +33,7 @@ If you try to execute a restricted action, you might get a `404` error.
| Merge requests | Create and update a merge request. |
| Package registry | Publish a package. |
| CI/CD | Create, edit, administer, and run pipelines. <br> Create, edit, administer, and run builds. <br> Create and edit admin environments. <br> Create and edit admin deployments. <br> Create and edit admin clusters. <br> Create and edit admin releases. |
| Namespaces | **For exceeded free user limits:** Invite new users. |
| Namespaces | **For exceeded free user limits**: Invite new users. |
## Read-only projects

View File

@ -20,9 +20,9 @@ Choose from three types of search to match your needs: **basic search**,
For code search, GitLab uses these types in this order:
- **Exact code search:** where you can use exact match and regular expression modes.
- **Advanced search:** when exact code search is not available.
- **Basic search:** when exact code search and advanced search are not available
- **Exact code search**: where you can use exact match and regular expression modes.
- **Advanced search**: when exact code search is not available.
- **Basic search**: when exact code search and advanced search are not available
or when you search against a non-default branch.
This type does not support group or global search.

View File

@ -113,8 +113,8 @@ When this feature is enabled for instances with more than 20,000 projects, your
GitLab has two search modes:
- **Exact match mode:** returns results that exactly match the query.
- **Regular expression mode:** supports regular and boolean expressions.
- **Exact match mode**: returns results that exactly match the query.
- **Regular expression mode**: supports regular and boolean expressions.
The exact match mode is used by default.
To switch to the regular expression mode, to the right of the search box,

View File

@ -366,6 +366,31 @@ RSpec.describe Projects::BlobController, feature_category: :source_code_manageme
expect(response).to redirect_to(blob_after_edit_path)
end
context "when user doesn't have permission to push" do
before do
create(:protected_branch, :no_one_can_push, project: mutable_project, name: '*')
end
context 'when branch_name is not provided' do
let(:params) do
{
project_id: mutable_project,
namespace_id: mutable_project.namespace,
id: 'master/CHANGELOG',
content: 'Added changes',
commit_message: 'Update CHANGELOG'
}
end
it 'handles missing branch_name gracefully and returns permission error' do
put :update, params: params, format: :json
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['error']).to eq('You are not allowed to push into this branch')
end
end
end
context 'when file is renamed' do
let(:default_params) do
{

View File

@ -155,14 +155,27 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu
end
context 'when the job is a bridge' do
let!(:downstream_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let(:job) { downstream_pipeline.source_job }
context 'with a downstream pipeline' do
let!(:downstream_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let(:job) { downstream_pipeline.source_job }
it 'redirects to the downstream pipeline page' do
get_show(id: job.id)
it 'redirects to the downstream pipeline page' do
get_show(id: job.id)
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_pipeline_path(id: downstream_pipeline.id))
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_pipeline_path(id: downstream_pipeline.id))
end
end
context 'without a downstream pipeline' do
let(:job) { create(:ci_bridge, pipeline: pipeline) }
it 'redirects to the job pipeline path' do
get_show(id: job.id)
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(project_pipeline_path(project, pipeline.id))
end
end
end
end

View File

@ -31,6 +31,7 @@ describe('~/deployments/components/deployment_aside.vue', () => {
const findTriggererItem = () => wrapper.findByTestId('deployment-triggerer-item');
const findPipelineSection = () => wrapper.findByTestId('deployment-pipeline');
const findPipelineLink = () => wrapper.findByTestId('deployment-pipeline-link');
const findJobLink = () => wrapper.findByTestId('deployment-job');
const createComponent = ({ propsData = {} } = {}) => {
wrapper = mountExtended(DeploymentAside, {
@ -112,6 +113,30 @@ describe('~/deployments/components/deployment_aside.vue', () => {
});
});
describe('link to the job page', () => {
it('displays a link when webPath is available', () => {
createComponent();
expect(findJobLink().attributes('href')).toBe(deployment.job.webPath);
expect(findJobLink().text()).toBe(deployment.job.name);
});
it('displays job name without a link when webPath is not available', () => {
const deploymentWithNoJobPath = {
...deployment,
job: {
...deployment.job,
webPath: null,
},
};
createComponent({ propsData: { deployment: deploymentWithNoJobPath } });
expect(findJobLink().attributes('href')).toBeUndefined();
expect(findJobLink().text()).toBe(deploymentWithNoJobPath.job.name);
});
});
describe('without optional properties', () => {
beforeEach(() => {
createComponent({

View File

@ -1,48 +1,101 @@
import { GlTruncate, GlLink, GlBadge } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { shallowMount } from '@vue/test-utils';
import { GlTruncate, GlLink, GlBadge, GlIcon } from '@gitlab/ui';
import DeploymentJob from '~/environments/environment_details/components/deployment_job.vue';
describe('app/assets/javascripts/environments/environment_details/components/deployment_job.vue', () => {
const jobData = {
webPath: 'http://example.com',
label: 'example job',
};
describe('DeploymentJob', () => {
let wrapper;
const createWrapper = ({ job }) => {
return mountExtended(DeploymentJob, {
propsData: {
job,
const createWrapper = (props = {}) => {
return shallowMount(DeploymentJob, {
propsData: props,
stubs: {
GlTruncate,
GlIcon,
},
});
};
describe('when the job data exists', () => {
beforeEach(() => {
wrapper = createWrapper({ job: jobData });
const findGlLink = () => wrapper.findComponent(GlLink);
const findTruncatedLabel = () => wrapper.findComponent(GlTruncate);
const findPipelineIcon = () => wrapper.findComponent(GlIcon);
const findBadge = () => wrapper.findComponent(GlBadge);
describe('with job data', () => {
const defaultJob = {
label: 'example job',
};
const webPath = 'path/to/job';
describe('with job path', () => {
beforeEach(() => {
wrapper = createWrapper({
job: {
...defaultJob,
webPath,
},
});
});
it('renders a link with correct href', () => {
expect(findGlLink().attributes('href')).toBe(webPath);
});
it('passes job label to truncate component', () => {
expect(findTruncatedLabel().props('text')).toBe(defaultJob.label);
});
});
it('should render a link with a correct href', () => {
const jobLink = wrapper.findComponent(GlLink);
expect(jobLink.exists()).toBe(true);
expect(jobLink.attributes().href).toBe(jobData.webPath);
describe('without job path', () => {
beforeEach(() => {
wrapper = createWrapper({ job: defaultJob });
});
it('renders job label without link', () => {
expect(findGlLink().exists()).toBe(false);
expect(findTruncatedLabel().props('text')).toBe(defaultJob.label);
});
});
it('should render a truncated label', () => {
const truncatedLabel = wrapper.findComponent(GlTruncate);
expect(truncatedLabel.exists()).toBe(true);
expect(truncatedLabel.props().text).toBe(jobData.label);
describe('with pipeline information', () => {
const pipeline = {
path: 'path/to/pipeline',
label: '#123',
};
beforeEach(() => {
wrapper = createWrapper({
job: {
...defaultJob,
pipeline,
},
});
});
it('renders pipeline link with correct attributes', () => {
expect(findGlLink().attributes('href')).toBe(pipeline.path);
});
it('includes pipeline icon', () => {
expect(findPipelineIcon().exists()).toBe(true);
expect(findPipelineIcon().props('name')).toBe('pipeline');
});
it('includes pipeline label', () => {
expect(findGlLink().text()).toBe(pipeline.label);
});
});
});
describe('when the job data does not exist', () => {
describe('without job data', () => {
beforeEach(() => {
wrapper = createWrapper({ job: null });
});
it('should render a badge with the text "API"', () => {
const badge = wrapper.findComponent(GlBadge);
it('renders an API badge', () => {
const badge = findBadge();
expect(badge.exists()).toBe(true);
expect(badge.props().variant).toBe('info');
expect(badge.props('variant')).toBe('info');
expect(badge.text()).toBe('API');
});
});

View File

@ -32,6 +32,7 @@ exports[`deployment_data_transformation_helper convertToDeploymentTableRow shoul
"id": "31",
"job": {
"label": "deploy-prod (#860)",
"pipeline": undefined,
"webPath": "/gitlab-org/pipelinestest/-/jobs/860",
},
"rollback": {
@ -69,6 +70,150 @@ exports[`deployment_data_transformation_helper convertToDeploymentTableRow shoul
`;
exports[`deployment_data_transformation_helper convertToDeploymentTableRow should be converted to proper table row data 2`] = `
{
"actions": [
{
"name": "deploy-staging",
"playPath": "https://gdk.test:3000/redeploy/play",
"playable": true,
"scheduledAt": "2023-01-17T11:02:41.369Z",
},
],
"commit": {
"author": {
"avatar_url": "/uploads/-/system/user/avatar/1/avatar.png",
"path": "http://gdk.test:3000/root",
"username": "Administrator",
},
"commitRef": {
"name": "main",
},
"commitUrl": "http://gdk.test:3000/gitlab-org/pipelinestest/-/commit/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
"shortSha": "0cb48dd5",
"tag": false,
"title": "Update .gitlab-ci.yml file",
},
"created": "2022-10-17T07:44:17Z",
"deploymentApproval": {
"isApprovalActionAvailable": false,
},
"finished": "2022-10-17T07:44:43Z",
"id": "31",
"job": {
"label": "deploy-prod (#860)",
"pipeline": {
"label": "#101",
"path": "/gitlab-org/pipelinestest/-/pipelines/101",
},
"webPath": null,
},
"rollback": {
"id": "gid://gitlab/Deployment/76",
"lastDeployment": {
"commit": {
"author": {
"avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
"id": "gid://gitlab/User/1",
"name": "Administrator",
"webUrl": "http://gdk.test:3000/root",
},
"authorEmail": "admin@example.com",
"authorGravatar": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"authorName": "Administrator",
"id": "gid://gitlab/CommitPresenter/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
"message": "Update .gitlab-ci.yml file",
"shortId": "0cb48dd5",
"webUrl": "http://gdk.test:3000/gitlab-org/pipelinestest/-/commit/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
},
"isLast": false,
},
"name": undefined,
"retryUrl": "null/retry",
},
"status": "success",
"triggerer": {
"avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
"id": "gid://gitlab/User/1",
"name": "Administrator",
"webUrl": "http://gdk.test:3000/root",
},
"webPath": "",
}
`;
exports[`deployment_data_transformation_helper convertToDeploymentTableRow should be converted to proper table row data 3`] = `
{
"actions": [
{
"name": "deploy-staging",
"playPath": "https://gdk.test:3000/redeploy/play",
"playable": true,
"scheduledAt": "2023-01-17T11:02:41.369Z",
},
],
"commit": {
"author": {
"avatar_url": "/uploads/-/system/user/avatar/1/avatar.png",
"path": "http://gdk.test:3000/root",
"username": "Administrator",
},
"commitRef": {
"name": "main",
},
"commitUrl": "http://gdk.test:3000/gitlab-org/pipelinestest/-/commit/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
"shortSha": "0cb48dd5",
"tag": false,
"title": "Update .gitlab-ci.yml file",
},
"created": "2022-10-17T07:44:17Z",
"deploymentApproval": {
"isApprovalActionAvailable": false,
},
"finished": "2022-10-17T07:44:43Z",
"id": "31",
"job": {
"label": "deploy-prod (#860)",
"pipeline": {
"label": "#101",
"path": "/gitlab-org/pipelinestest/-/pipelines/101",
},
"webPath": null,
},
"rollback": {
"id": "gid://gitlab/Deployment/76",
"lastDeployment": {
"commit": {
"author": {
"avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
"id": "gid://gitlab/User/1",
"name": "Administrator",
"webUrl": "http://gdk.test:3000/root",
},
"authorEmail": "admin@example.com",
"authorGravatar": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"authorName": "Administrator",
"id": "gid://gitlab/CommitPresenter/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
"message": "Update .gitlab-ci.yml file",
"shortId": "0cb48dd5",
"webUrl": "http://gdk.test:3000/gitlab-org/pipelinestest/-/commit/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74",
},
"isLast": false,
},
"name": undefined,
"retryUrl": "null/retry",
},
"status": "success",
"triggerer": {
"avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
"id": "gid://gitlab/User/1",
"name": "Administrator",
"webUrl": "http://gdk.test:3000/root",
},
"webPath": "",
}
`;
exports[`deployment_data_transformation_helper convertToDeploymentTableRow should be converted to proper table row data 4`] = `
{
"actions": [],
"commit": {
@ -104,7 +249,7 @@ exports[`deployment_data_transformation_helper convertToDeploymentTableRow shoul
}
`;
exports[`deployment_data_transformation_helper convertToDeploymentTableRow should be converted to proper table row data 3`] = `
exports[`deployment_data_transformation_helper convertToDeploymentTableRow should be converted to proper table row data 5`] = `
{
"actions": [],
"commit": {

View File

@ -77,6 +77,94 @@ describe('deployment_data_transformation_helper', () => {
finishedAt: '2022-10-17T07:44:43Z',
};
const deploymentNodeWithPipeline = {
id: 'gid://gitlab/Deployment/76',
iid: '31',
status: 'SUCCESS',
createdAt: '2022-10-17T07:44:17Z',
ref: 'main',
tag: false,
job: {
name: 'deploy-prod',
refName: 'main',
id: 'gid://gitlab/Ci::Bridge/860',
webPath: null,
deploymentPipeline: {
id: 'gid://gitlab/Ci::Pipeline/101',
path: '/gitlab-org/pipelinestest/-/pipelines/101',
jobs: {
nodes: [
{
name: 'deploy-staging',
playable: true,
scheduledAt: '2023-01-17T11:02:41.369Z',
webPath: 'https://gdk.test:3000/redeploy',
},
],
},
},
},
commit: commitWithAuthor,
triggerer: {
id: 'gid://gitlab/User/1',
webUrl: 'http://gdk.test:3000/root',
name: 'Administrator',
avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png',
},
finishedAt: '2022-10-17T07:44:43Z',
};
const deploymentNodeWithDownstreamPipeline = {
id: 'gid://gitlab/Deployment/76',
iid: '31',
status: 'SUCCESS',
createdAt: '2022-10-17T07:44:17Z',
ref: 'main',
tag: false,
job: {
name: 'deploy-prod',
refName: 'main',
id: 'gid://gitlab/Ci::Bridge/860',
webPath: null,
deploymentPipeline: {
id: 'gid://gitlab/Ci::Pipeline/101',
path: '/gitlab-org/pipelinestest/-/pipelines/101',
jobs: {
nodes: [
{
name: 'deploy-staging',
playable: true,
scheduledAt: '2023-01-17T11:02:41.369Z',
webPath: 'https://gdk.test:3000/redeploy',
},
],
},
},
downStreamPipeline: {
id: 'gid://gitlab/Ci::Pipeline/102',
path: '/gitlab-org/pipelinestest/-/pipelines/102',
jobs: {
nodes: [
{
name: 'deploy-staging',
playable: true,
scheduledAt: '2023-01-17T11:02:41.369Z',
webPath: 'https://gdk.test:3000/redeploy',
},
],
},
},
},
commit: commitWithAuthor,
triggerer: {
id: 'gid://gitlab/User/1',
webUrl: 'http://gdk.test:3000/root',
name: 'Administrator',
avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png',
},
finishedAt: '2022-10-17T07:44:43Z',
};
const deploymentNodeWithNoJob = {
...deploymentNode,
job: null,
@ -111,11 +199,14 @@ describe('deployment_data_transformation_helper', () => {
describe('convertToDeploymentTableRow', () => {
const deploymentNodeWithEmptyJob = { ...deploymentNode, job: undefined };
it.each([deploymentNode, deploymentNodeWithEmptyJob, deploymentNodeWithNoJob])(
'should be converted to proper table row data',
(node) => {
expect(convertToDeploymentTableRow(node, environment)).toMatchSnapshot();
},
);
it.each([
deploymentNode,
deploymentNodeWithPipeline,
deploymentNodeWithDownstreamPipeline,
deploymentNodeWithEmptyJob,
deploymentNodeWithNoJob,
])('should be converted to proper table row data', (node) => {
expect(convertToDeploymentTableRow(node, environment)).toMatchSnapshot();
});
});
});

View File

@ -67,14 +67,26 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do
end
describe '#web_path' do
subject { resolve_field(:web_path, build, current_user: user, object_type: described_class) }
subject { resolve_field(:web_path, job, current_user: user, object_type: described_class) }
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:build) { create(:ci_build, project: project, user: user) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
it 'returns the web path of the job' do
is_expected.to eq("/#{project.full_path}/-/jobs/#{build.id}")
context 'when the job is a regular build' do
let(:job) { create(:ci_build, project: project, user: user) }
it 'returns the project job path' do
expected_path = "/#{project.full_path}/-/jobs/#{job.id}"
is_expected.to eq(expected_path)
end
end
context 'when the job is a bridge' do
let(:job) { create(:ci_bridge, project: project, user: user) }
it 'returns nil' do
is_expected.to be_nil
end
end
end