Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-04-09 18:11:05 +00:00
parent 1666be87db
commit 42be04412b
59 changed files with 530 additions and 238 deletions

View File

@ -87,7 +87,7 @@ docs-lint blueprint:
docs code_quality:
extends:
- .reports:rules:code_quality
- .reports:rules:docs_code_quality
- .docs-markdown-lint-image
stage: lint
needs: []

View File

@ -1,6 +1,6 @@
include:
- project: gitlab-org/quality/pipeline-common
ref: 8.4.4
ref: 8.5.0
file:
- /ci/danger-review.yml

View File

@ -2464,6 +2464,14 @@
- <<: *if-default-refs
changes: *code-backstage-qa-patterns
.reports:rules:docs_code_quality:
# Similar to above, run docs code quality job in every pipeline so there is always
# a report artifact to compare against.
rules:
- !reference [".reports:rules:code_quality", rules]
- <<: *if-default-refs
changes: *docs-patterns
.reports:rules:code_quality_cache:
rules:
- <<: *if-default-branch-refs

View File

@ -230,7 +230,7 @@ gem 'asciidoctor-kroki', '~> 0.8.0', require: false # rubocop:todo Gemfile/Missi
gem 'rouge', '~> 4.2.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'truncato', '~> 0.7.12' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'nokogiri', '~> 1.16' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'gitlab-glfm-markdown', '~> 0.0.13', feature_category: :team_planning
gem 'gitlab-glfm-markdown', '~> 0.0.14', feature_category: :team_planning
# Calendar rendering
gem 'icalendar' # rubocop:todo Gemfile/MissingFeatureCategory
@ -402,7 +402,7 @@ gem 'prometheus-client-mmap', '~> 1.1', '>= 1.1.1', require: 'prometheus/client'
gem 'warning', '~> 1.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
group :development do
gem 'lefthook', '~> 1.5.6', require: false, feature_category: :tooling
gem 'lefthook', '~> 1.6.8', require: false, feature_category: :tooling
gem 'rubocop', feature_category: :tooling
gem 'solargraph', '~> 0.47.2', require: false # rubocop:todo Gemfile/MissingFeatureCategory

View File

@ -212,11 +212,11 @@
{"name":"gitlab-dangerfiles","version":"4.7.0","platform":"ruby","checksum":"2576876a8dcb7290853fc3aef8048001cfe593b87318dd0016959d42e0e145ca"},
{"name":"gitlab-experiment","version":"0.9.1","platform":"ruby","checksum":"f230ee742154805a755d5f2539dc44d93cdff08c5bbbb7656018d61f93d01f48"},
{"name":"gitlab-fog-azure-rm","version":"1.9.1","platform":"ruby","checksum":"026b8e188ac4183c1bf1b1909b0489da0ffad453996a6e744e0eba67dc284f37"},
{"name":"gitlab-glfm-markdown","version":"0.0.13","platform":"aarch64-linux","checksum":"997c750df1229a3330f889f10a107779e73da9f47ee1e11555fedec9df98386f"},
{"name":"gitlab-glfm-markdown","version":"0.0.13","platform":"arm64-darwin","checksum":"7e2e8f3b2246a84e53d0f0c314895205745e3c737603cdc5898dc80f5c6d9aa9"},
{"name":"gitlab-glfm-markdown","version":"0.0.13","platform":"ruby","checksum":"abed5d49dd7801b1646771e5fb23c158bb7a3116fde01cf185a18bf5c79d695a"},
{"name":"gitlab-glfm-markdown","version":"0.0.13","platform":"x86_64-darwin","checksum":"2d03075da96854f4a66ef1da3e8bd49f8124a23991fc0d9497fc9567820179a5"},
{"name":"gitlab-glfm-markdown","version":"0.0.13","platform":"x86_64-linux","checksum":"ef703ea4eb1a222a67df1a88d8731909960bde7c9375d947518b21859d9f5975"},
{"name":"gitlab-glfm-markdown","version":"0.0.14","platform":"aarch64-linux","checksum":"4c5e720771eee72a77b03327ed7b32882a3bf23342822f57e9ac2aedd439905f"},
{"name":"gitlab-glfm-markdown","version":"0.0.14","platform":"arm64-darwin","checksum":"53dd2ad3dd82a88d402c31db06ab178b3e2e3d30e0e5358fef2f7a192d7dcce9"},
{"name":"gitlab-glfm-markdown","version":"0.0.14","platform":"ruby","checksum":"96394f039923b569f3cc53ea852436bfa1582fb9e37b8b0be2fd5dd115ed363a"},
{"name":"gitlab-glfm-markdown","version":"0.0.14","platform":"x86_64-darwin","checksum":"df7ac87711d0be2b0073ad19e00ea72e1216a6461df6eb78e81a576ec9cb57c2"},
{"name":"gitlab-glfm-markdown","version":"0.0.14","platform":"x86_64-linux","checksum":"09dffdfa3fe4f0af2a90a5b94475cfe82b31b96b2a207ae475091434a15df54c"},
{"name":"gitlab-labkit","version":"0.35.1","platform":"ruby","checksum":"e9501b33633a2a1bb3e5a7de640d713efb50565eb77cbd13f6a0780a835f81d4"},
{"name":"gitlab-license","version":"2.4.0","platform":"ruby","checksum":"fd238fb1e605a6b9250d4eb1744434ffd131f18d50a3be32f613c883f7635e20"},
{"name":"gitlab-mail_room","version":"0.0.24","platform":"ruby","checksum":"c7bf3df73dbcc024bc98dbf72514520ac2ff2b6d0124de496279fe56c13c3cb3"},
@ -347,7 +347,7 @@
{"name":"kubeclient","version":"4.11.0","platform":"ruby","checksum":"4985fcd749fb8c364a668a8350a49821647f03aa52d9ee6cbc582beb8e883fcc"},
{"name":"language_server-protocol","version":"3.17.0.3","platform":"ruby","checksum":"3d5c58c02f44a20d972957a9febe386d7e7468ab3900ce6bd2b563dd910c6b3f"},
{"name":"launchy","version":"2.5.0","platform":"ruby","checksum":"954243c4255920982ce682f89a42e76372dba94770bf09c23a523e204bdebef5"},
{"name":"lefthook","version":"1.5.6","platform":"ruby","checksum":"cc7e191eec20ca2f8284e8e8224c99c89607cbf8749b96cff690c94cde7787e5"},
{"name":"lefthook","version":"1.6.8","platform":"ruby","checksum":"248181e8374ce9ef306a45fdbd3f64c076aaa4845cad6c126be4414b9c71c623"},
{"name":"letter_opener","version":"1.7.0","platform":"ruby","checksum":"095bc0d58e006e5b43ea7d219e64ecf2de8d1f7d9dafc432040a845cf59b4725"},
{"name":"letter_opener_web","version":"2.0.0","platform":"ruby","checksum":"33860ad41e1785d75456500e8ca8bba8ed71ee6eaf08a98d06bbab67c5577b6f"},
{"name":"libyajl2","version":"2.1.0","platform":"ruby","checksum":"aa5df6c725776fc050c8418450de0f7c129cb7200b811907c4c0b3b5c0aea0ef"},

View File

@ -705,7 +705,7 @@ GEM
fog-core (~> 2.1)
fog-json (~> 1.2)
mime-types
gitlab-glfm-markdown (0.0.13)
gitlab-glfm-markdown (0.0.14)
rb_sys (~> 0.9.86)
gitlab-labkit (0.35.1)
actionpack (>= 5.0.0, < 8.0.0)
@ -1025,7 +1025,7 @@ GEM
language_server-protocol (3.17.0.3)
launchy (2.5.0)
addressable (~> 2.7)
lefthook (1.5.6)
lefthook (1.6.8)
letter_opener (1.7.0)
launchy (~> 2.2)
letter_opener_web (2.0.0)
@ -1910,7 +1910,7 @@ DEPENDENCIES
gitlab-dangerfiles (~> 4.7.0)
gitlab-experiment (~> 0.9.1)
gitlab-fog-azure-rm (~> 1.9.1)
gitlab-glfm-markdown (~> 0.0.13)
gitlab-glfm-markdown (~> 0.0.14)
gitlab-housekeeper!
gitlab-http!
gitlab-labkit (~> 0.35.1)
@ -1986,7 +1986,7 @@ DEPENDENCIES
knapsack (~> 1.22.0)
kramdown (~> 2.3.1)
kubeclient (~> 4.11.0)
lefthook (~> 1.5.6)
lefthook (~> 1.6.8)
letter_opener_web (~> 2.0.0)
license_finder (~> 7.0)
licensee (~> 9.16)

View File

@ -1,4 +1,5 @@
<script>
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import { reportToSentry } from '~/ci/utils';
import { JOB_DROPDOWN, SINGLE_JOB } from '../constants';
import JobItem from './job_item.vue';
@ -12,6 +13,8 @@ import JobItem from './job_item.vue';
export default {
components: {
JobItem,
GlDisclosureDropdown,
GlDisclosureDropdownItem,
},
props: {
group: {
@ -57,54 +60,50 @@ export default {
pipelineActionRequestComplete() {
this.$emit('pipelineActionRequestComplete');
},
jobItem(job) {
return {
text: job.name,
href: job.status?.detailsPath,
};
},
},
};
</script>
<template>
<!-- eslint-disable @gitlab/vue-no-data-toggle -->
<div
<gl-disclosure-dropdown
:id="computedJobId"
class="ci-job-dropdown-container dropdown dropright"
class="ci-job-group-dropdown"
block
placement="right-start"
data-testid="job-dropdown-container"
>
<button
type="button"
data-toggle="dropdown"
data-display="static"
:class="jobGroupClasses"
class="dropdown-menu-toggle gl-pipeline-job-width! gl-pr-4!"
>
<div class="gl-display-flex gl-align-items-stretch gl-justify-content-space-between">
<job-item
:type="$options.jobItemTypes.jobDropdown"
:group-tooltip="tooltipText"
:job="group"
:stage-name="stageName"
/>
<template #toggle>
<button type="button" :class="jobGroupClasses" class="gl-w-full gl-pr-4">
<div class="gl-display-flex gl-align-items-stretch gl-justify-content-space-between">
<job-item
:type="$options.jobItemTypes.jobDropdown"
:group-tooltip="tooltipText"
:job="group"
:stage-name="stageName"
/>
<div class="gl-font-weight-100 gl-font-size-lg gl-ml-n4 gl-align-self-center">
{{ group.size }}
<div class="gl-font-weight-100 gl-font-size-lg gl-ml-n4 gl-align-self-center">
{{ group.size }}
</div>
</div>
</div>
</button>
</button>
</template>
<ul
class="dropdown-menu big-pipeline-graph-dropdown-menu js-grouped-pipeline-dropdown"
data-testid="jobs-dropdown-menu"
>
<li class="scrollable-menu">
<ul>
<li v-for="job in group.jobs" :key="job.id">
<job-item
:dropdown-length="group.size"
:job="job"
:type="$options.jobItemTypes.singleJob"
css-class-job-name="pipeline-job-item"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
</li>
</ul>
</li>
</ul>
</div>
<gl-disclosure-dropdown-item v-for="job in group.jobs" :key="job.id" :item="jobItem(job)">
<template #list-item>
<job-item
:is-link="false"
:job="job"
:type="$options.jobItemTypes.singleJob"
css-class-job-name="gl-p-3"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
</template>
</gl-disclosure-dropdown-item>
</gl-disclosure-dropdown>
</template>

View File

@ -80,11 +80,6 @@ export default {
required: false,
default: '',
},
dropdownLength: {
type: Number,
required: false,
default: Infinity,
},
groupTooltip: {
type: String,
required: false,
@ -125,6 +120,11 @@ export default {
required: false,
default: SINGLE_JOB,
},
isLink: {
type: Boolean,
required: false,
default: true,
},
},
data() {
return {
@ -134,14 +134,14 @@ export default {
};
},
computed: {
boundary() {
return this.dropdownLength === 1 ? 'viewport' : 'scrollParent';
},
computedJobId() {
return this.pipelineId > -1 ? `${this.job.name}-${this.pipelineId}` : '';
},
detailsPath() {
return this.status.detailsPath;
if (this.isLink) {
return this.status.detailsPath;
}
return null;
},
hasDetails() {
return this.status.hasDetails;
@ -161,8 +161,11 @@ export default {
kind() {
return this.job?.kind || '';
},
shouldRenderLink() {
return this.isLink && this.hasDetails;
},
nameComponent() {
return this.hasDetails ? 'gl-link' : 'div';
return this.shouldRenderLink ? 'gl-link' : 'div';
},
retryTriggerJobWarningText() {
return sprintf(this.$options.i18n.confirmationModal.title, {
@ -323,7 +326,7 @@ export default {
:title="tooltipText"
:class="jobClasses"
:href="detailsPath"
class="js-pipeline-graph-job-link menu-item gl-text-gray-900 gl-active-text-decoration-none gl-focus-text-decoration-none gl-hover-text-decoration-none gl-w-full"
class="js-pipeline-graph-job-link menu-item gl-text-gray-900 gl-active-text-decoration-none gl-focus-text-decoration-none gl-hover-text-decoration-none gl-hover-bg-gray-50 gl-focus-bg-gray-50 gl-w-full"
:data-testid="testId"
@click="jobItemClick"
@mouseout="hideTooltips"
@ -331,7 +334,9 @@ export default {
<div class="gl-display-flex gl-align-items-center gl-flex-grow-1">
<ci-icon :status="job.status" :use-link="false" />
<div class="gl-pl-3 gl-pr-3 gl-display-flex gl-flex-direction-column gl-pipeline-job-width">
<div class="gl-text-truncate gl-pr-9 gl-line-height-normal">{{ job.name }}</div>
<div class="gl-text-truncate gl-pr-9 gl-line-height-normal gl-text-left gl-text-gray-700">
{{ job.name }}
</div>
<div
v-if="showStageName"
data-testid="stage-name-in-job"

View File

@ -58,34 +58,34 @@
padding: 0;
max-height: 245px;
overflow: auto;
}
li {
position: relative;
li {
position: relative;
// link to the build
.pipeline-job-item {
align-items: center;
clear: both;
display: flex;
font-weight: normal;
line-height: $line-height-base;
white-space: nowrap;
// link to the build
.pipeline-job-item {
align-items: center;
clear: both;
display: flex;
font-weight: normal;
line-height: $line-height-base;
white-space: nowrap;
// Match dropdown.scss for all `a` tags
&.non-details-job-component {
padding: $gl-padding-8 $gl-btn-horz-padding;
// Match dropdown.scss for all `a` tags
&.non-details-job-component {
padding: $gl-padding-8 $gl-btn-horz-padding;
}
}
}
// ensure .pipeline-job-item has hover style when action-icon is hovered
&:hover > .pipeline-job-item,
&:hover > .ci-job-component > .pipeline-job-item,
.pipeline-job-item:hover,
.pipeline-job-item:focus {
outline: none;
text-decoration: none;
background-color: var(--gray-100, $gray-50);
// ensure .pipeline-job-item has hover style when action-icon is hovered
&:hover > .pipeline-job-item,
&:hover > .ci-job-component > .pipeline-job-item,
.pipeline-job-item:hover,
.pipeline-job-item:focus {
outline: none;
text-decoration: none;
background-color: var(--gray-100, $gray-50);
}
}
}
}

View File

@ -137,10 +137,6 @@
}
}
.gl-pipeline-job-width\! {
width: 100% !important;
}
.gl-downstream-pipeline-job-width {
width: 8rem;
@ -324,6 +320,29 @@
}
}
.stage-column .ci-job-group-dropdown {
&,
.gl-new-dropdown-custom-toggle {
width: 100%;
}
// Reset padding, as inner element will
// define padding
.gl-new-dropdown-item-content,
.gl-new-dropdown-item-text-wrapper {
padding: 0;
}
// Set artificial focus on the menu-item to keep
// it consistent with the original dropdown items
.gl-new-dropdown-item:focus .menu-item,
.gl-new-dropdown-item-content:focus .menu-item {
background-color: $gray-50;
border-radius: $border-radius-default;
@include gl-focus($inset: true);
}
}
.scan-reports-summary-grid {
grid-template-columns: 1fr 1fr max-content;
}

View File

@ -21,3 +21,5 @@ module Clusters
end
end
end
Clusters::AgentPolicy.prepend_mod_with('Clusters::AgentPolicy')

View File

@ -61,8 +61,7 @@ module Deployments
# deployment we may end up running a handful of queries to get and insert
# the data.
commits.each_slice(COMMITS_PER_QUERY) do |slice|
deployment.link_merge_requests(merge_requests_by_merge_commit_sha(slice))
link_merge_requests_by_merge_commits(slice)
link_fast_forward_merge_requests(slice)
# The cherry picked commits are tracked via `notes.commit_id`
@ -88,9 +87,12 @@ module Deployments
private
def link_merge_requests_by_merge_commits(commits)
deployment.link_merge_requests(merge_requests_by_merge_commit_sha(commits))
end
def link_fast_forward_merge_requests(commits)
return if Feature.disabled?(:link_fast_forward_merge_requests_to_deployment, project, type: :gitlab_com_derisk)
return unless project.merge_requests_ff_only_enabled
deployment.link_merge_requests(merge_requests_by_head_commit_sha(commits))
end

View File

@ -295,8 +295,8 @@ The default backup strategy is to essentially stream data from the respective
data locations to the backup using the Linux command `tar` and `gzip`. This works
fine in most cases, but can cause problems when data is rapidly changing.
When data changes while `tar` is reading it, the error `file changed as we read
it` may occur, and causes the backup process to fail. In that case, you can use
When data changes while `tar` is reading it, the error `file changed as we read it`
may occur, and causes the backup process to fail. In that case, you can use
the backup strategy called `copy`. The strategy copies data files
to a temporary location before calling `tar` and `gzip`, avoiding the error.

View File

@ -826,8 +826,8 @@ Prerequisites:
```
1. After the first sync completes, use the web UI or command-line interface of your new object storage provider to
verify that there are objects in the new bucket. If there are none, or if you encounter an error while running `rclone
sync`, check your Rclone configuration and try again.
verify that there are objects in the new bucket. If there are none, or if you encounter an error while running
`rclone sync`, check your Rclone configuration and try again.
After you have done at least one successful Rclone copy from the old location to the new location, schedule maintenance and take your GitLab server offline. During your maintenance window you must do two things:
@ -961,8 +961,8 @@ GitLab Workhorse uploads files to S3 using pre-signed URLs that do
not have a `Content-MD5` HTTP header computed for them. To ensure data
is not corrupted, Workhorse checks that the MD5 hash of the data sent
equals the ETag header returned from the S3 server. When encryption is
enabled, this is not the case, which causes Workhorse to report an `ETag
mismatch` error during an upload.
enabled, this is not the case, which causes Workhorse to report an `ETag mismatch`
error during an upload.
When the consolidated form is:

View File

@ -367,7 +367,7 @@ cat /home/git/transfer-logs/* | sort | uniq -u |\
/home/git/transfer-logs/success-$(date +%s).log \
/home/git/repositories \
/mnt/gitlab/repositories
`
'
```
#### Parallel `rsync` only for repositories with recent activity
@ -383,8 +383,8 @@ Using `rsync` to migrate Git data can cause data loss and repository corruption.
Suppose you have already done one sync that started after 2015-10-1 12:00 UTC.
Then you might only want to sync repositories that were changed by using GitLab
after that time. You can use the `SINCE` variable to tell `rake
gitlab:list_repos` to only print repositories with recent activity.
after that time. You can use the `SINCE` variable to tell `rake gitlab:list_repos`
to only print repositories with recent activity.
```shell
# Omnibus

View File

@ -51,8 +51,8 @@ added the `TrustedUserCAKeys` of your CA to your `sshd_config`, for example:
TrustedUserCAKeys /etc/security/mycompany_user_ca.pub
```
Usually `TrustedUserCAKeys` would not be scoped under a `Match User
git` in such a setup, since it would also be used for system logins to
Usually `TrustedUserCAKeys` would not be scoped under a `Match User git`
in such a setup, since it would also be used for system logins to
the GitLab server itself, but your setup may vary. If the CA is only
used for GitLab consider putting this in the `Match User git` section
(described below).

View File

@ -182,7 +182,7 @@ Prerequisites:
To remove server hooks, pass an empty tarball to `hook set` to indicate that the repository should contain no hooks. For example:
```shell
cat empty_hooks.tar | sudo /opt/gitlab/embedded/bin/gitaly hooks set --storage <storage> --repository <relative path> --config <config path>`.
cat empty_hooks.tar | sudo /opt/gitlab/embedded/bin/gitaly hooks set --storage <storage> --repository <relative path> --config <config path>
```
:::TabTitle GitLab 15.10 and earlier

View File

@ -6237,6 +6237,44 @@ Input type: `NamespaceCiCdSettingsUpdateInput`
| <a id="mutationnamespacecicdsettingsupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacecicdsettingsupdateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.namespaceCreateRemoteDevelopmentClusterAgentMapping`
Input type: `NamespaceCreateRemoteDevelopmentClusterAgentMappingInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationnamespacecreateremotedevelopmentclusteragentmappingclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacecreateremotedevelopmentclusteragentmappingclusteragentid"></a>`clusterAgentId` | [`ClustersAgentID!`](#clustersagentid) | GlobalID of the cluster agent to be associated with the namespace. |
| <a id="mutationnamespacecreateremotedevelopmentclusteragentmappingnamespaceid"></a>`namespaceId` | [`NamespaceID!`](#namespaceid) | GlobalID of the namespace to be associated with the cluster agent. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationnamespacecreateremotedevelopmentclusteragentmappingclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacecreateremotedevelopmentclusteragentmappingerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.namespaceDeleteRemoteDevelopmentClusterAgentMapping`
Input type: `NamespaceDeleteRemoteDevelopmentClusterAgentMappingInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingclusteragentid"></a>`clusterAgentId` | [`ClustersAgentID!`](#clustersagentid) | GlobalID of the cluster agent to be un-associated from the namespace. |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingnamespaceid"></a>`namespaceId` | [`NamespaceID!`](#namespaceid) | GlobalID of the namespace to be un-associated from the cluster agent. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
### `Mutation.namespaceIncreaseStorageTemporarily`
Input type: `NamespaceIncreaseStorageTemporarilyInput`

View File

@ -49,7 +49,7 @@ for Conan recipes.
### Project-level
```plaintext
/projects/:id/packages/conan/v1`
/projects/:id/packages/conan/v1
```
| Attribute | Type | Required | Description |

View File

@ -127,7 +127,7 @@ The examples in this document all use the project-level prefix.
### Project-level
```plaintext
/projects/:id/packages/debian`
/projects/:id/packages/debian
```
| Attribute | Type | Required | Description |
@ -137,7 +137,7 @@ The examples in this document all use the project-level prefix.
### Group-level
```plaintext
/groups/:id/-/packages/debian`
/groups/:id/-/packages/debian
```
| Attribute | Type | Required | Description |

View File

@ -135,7 +135,7 @@ The examples in this document all use the project-level prefix.
### Instance-level
```plaintext
/packages/npm`
/packages/npm
```
| Attribute | Type | Required | Description |
@ -145,7 +145,7 @@ The examples in this document all use the project-level prefix.
### Project-level
```plaintext
/projects/:id/packages/npm`
/projects/:id/packages/npm
```
| Attribute | Type | Required | Description |
@ -158,7 +158,7 @@ The examples in this document all use the project-level prefix.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121837) in GitLab 16.1. Feature flag `npm_group_level_endpoints` removed.
```plaintext
/groups/:id/-/packages/npm`
/groups/:id/-/packages/npm
```
| Attribute | Type | Required | Description |

View File

@ -163,7 +163,7 @@ The examples in this document all use the project-level prefix.
### Group-level
```plaintext
/groups/:id/-/packages/nuget`
/groups/:id/-/packages/nuget
```
| Attribute | Type | Required | Description |
@ -173,7 +173,7 @@ The examples in this document all use the project-level prefix.
### Project-level
```plaintext
/projects/:id/packages/nuget`
/projects/:id/packages/nuget
```
| Attribute | Type | Required | Description |

View File

@ -27,8 +27,8 @@ of `state` are:
- `cleanup failed`: The project has been moved but the repositories on the source storage could not be deleted.
To ensure data integrity, projects are put in a temporary read-only state for the
duration of the move. During this time, users receive a `The repository is temporarily
read-only. Please try again later.` message if they try to push new commits.
duration of the move. During this time, users receive a `The repository is temporarily read-only. Please try again later.`
message if they try to push new commits.
This API requires you to [authenticate yourself](rest/index.md#authentication) as an administrator.

View File

@ -28,8 +28,8 @@ of `state` are:
- `cleanup failed`: The snippet has been moved but the repository on the source storage could not be deleted.
To ensure data integrity, snippets are put in a temporary read-only state for the
duration of the move. During this time, users receive a `The repository is temporarily
read-only. Please try again later.` message if they try to push new commits.
duration of the move. During this time, users receive a `The repository is temporarily read-only. Please try again later.`
message if they try to push new commits.
This API requires you to [authenticate yourself](rest/index.md#authentication) as an administrator.

View File

@ -0,0 +1,113 @@
---
stage: core platform
group: Tenant Scale
description: 'Cells: Disaster Recovery'
status: proposed
---
# Cells 1.0 Disaster Recovery
## Terms used
1. Primary Cell: GitLab.com SaaS which is the current GitLab.com deployment. A special purpose Cell that serves as a cluster-wide service in this architecture.
1. Secondary Cells: A Cell that connects to the Primary Cell to ensure cluster-wide uniqueness.
1. Global Service: A service to keep global uniqueness, manage database sequences across the cluster, and help classify which resources belong to which Cell.
1. Routing Service: The Routing Service depends on the Global Service and is for managing routing rules to different cells.
1. RTO: [Recovery Time Objective]
1. RPO: [Recovery Point Objective]
1. WAL: [Write-ahead logging]
## Goals
Cells 1.0 is the first iteration of cells where multiple Secondary Cells can be operated independently of the Primary Cell.
Though it can be operated independently it has a dependency on the Global Service and Routing Service.
For Disaster Recovery, the Global Service might still have dependencies on the Primary Cell in Cells 1.0. [^cells-1.0]
A decision on whether or not we use Geo for Cells DR is pending in the [Using Geo for Cells 1.0 tracking issue](https://gitlab.com/gitlab-com/gl-infra/production-engineering/-/issues/25246).
This document focuses only on defining the strategy for recovering secondary Cells.
It does not cover recovering the Global Service, Routing Service, Primary Cell, or any other external service.
Disaster Recovery for Cells creates a fork in our existing recovery process because cells are provisioned with different tooling.
For example:
1. Different processes, runbooks, and tooling to recover a Cell.
1. Different RPO/RTO for primary Cell and the other cells.
Due to this, there are different goals for RPO/RTO for the Primary and Secondary Cells.
- Meet or exceed the RTO and RPO FY24 targets that have been validated for zonal outages which are covered in the [GitLab.com Disaster Recovery Blueprint](../../disaster_recovery/index.md).
- Take into account the FY25 plans for regional recovery on the Primary Cell including regional recovery and alternate region selection.
- Leverage the same DR for procedure we use for Dedicated for Cells.
### RTO/RPO Targets
NOTE:
FY25 targets have not yet been validated on the Primary Cell.
**Zonal Outages**:
| | RTO | RPO |
|--------------------------------------|-----------|-----|
| Primary Cell (current) | 2 hours | 1 hour |
| Primary Cell (FY25 Target) | <1 minute | <1 minute |
| Cells 1.0 (without the primary cell) | _unknown_ | _unknown_ |
**Regional Outages**:
| | RTO | RPO |
|--------------------------------------|-----------|-----|
| Primary Cell (current) | 96 hours | 2 hours |
| Primary Cell (FY25 Target) | 48 hours | <1 minute [^object-storage] |
| Cells 1.0 (without the primary cell) | _unknown_ | _unknown_ |
## Disaster Recovery Overview
NOTE:
The services below are taken from the [Cells 1.0 Architecture Overview].
Zonal recovery refers to a disaster, outage, or deletion that is limited in scope to a single availability zone.
The outage might affect the entire zone, or a subset of infrastructure in a zone.
Regional recovery refers to a disaster, outage, or deletion that is limited in scope to an entire region.
The outage might affect the entire region, or a subset of infrastructure that affects more than one zone.
| Service | Zonal Disaster Recovery | Estimated RTO | Estimated RPO |
| --- | --- | --- | --- |
| GitLab Rails | All services running in a cell are redundant across zones. There is no data stored for this service. | <=1 minute | not applicable |
| Gitaly Cluster | Gitaly Cluster consists of a single SPOF (single point of failure) node and remains so for Cells 1.0. It requires a restore from backup in the case of a zonal failure. | <=30 min | <=1 hr for snapshot restore until WAL is available for restore. [^blueprint-dr] |
| Redis Cluster | Redis is deployed in multiple availability zones and be capable of recovering automatically from a service interruption in a single zone. | <=1 minute | <=1 minute |
| PostgreSQL Cluster | PostgreSQL cluster is deployed in multiple availability zones and be capable of recovering automatically from a service interruption in a single zone. A small amount of data-loss might occur on failover. | <=1 minute | <=1 minute |
| Service | Regional Disaster Recovery | Estimated RTO | Estimated RPO |
| --- | --- | --- | --- |
| GitLab Rails | All services running in a cell are local to a region and require a rebuild on a regional failure. There is no data stored for this service. | <=12 hours | not applicable |
| Gitaly Cluster | Initially, Gitaly Cluster consists of a single SPOF node and remains so for Cells 1.0. It requires a rebuild in the case of a regional failure. | _Unknown_ | <=1 hr for snapshot restore until WAL is available for restore. [^blueprint-dr] |
| Redis Cluster | Redis is deployed in a single region and requires a rebuild in the case of a regional failure. In flight jobs, session data and cache can not be recovered. | _Unknown_ | not applicable |
| PostgreSQL Cluster | The PostgreSQL cluster is deployed in a single region and requires a rebuild in the case of a regional failure. Recovery is from backups and WAL files. A small amount of data-loss might occur on failover. | _Unknown_ | <=5 minutes |
NOTE:
For data stored in Object storage in Cells multi-region buckets are used. For restoring data due to accidental deletion we rely on object versioning for recovery.
## Disaster Recovery Validation
Disaster Recovery for Cells needs to be validated through periodic restore testing.
This recovery should be done on a Cell in the Production environment.
This testing is done once a quarter and is completed by running game-days using the disaster recovery runbook.
## Risks
1. The Primary Cell is not using Dedicated for deployment and operation where the Secondary Cells are. This might split our processes and runbooks and add to our RTO.
1. The current plan is to run Secondary Cells using Dedicated. The process for Disaster Recovery on Dedicated has a large number of manual steps and is not yet automated.[^dedicated-dr]
1. The Dedicated DR runbook has guidance, but is not structured in a way that can be followed by an SRE in the event of a Disaster. [^dedicated-dr-final-update]
---
[Cells 1.0 Architecture Overview]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/architecture/blueprints/cells/iterations/cells-1.0.md#architecture-overview
[Recovery Time Objective]: https://en.wikipedia.org/wiki/Disaster_recovery#Recovery_Time_Objective
[Recovery Point Objective]: https://en.wikipedia.org/wiki/Disaster_recovery#Recovery_Point_Objective
[Write-ahead logging]: https://en.wikipedia.org/wiki/Write-ahead_logging
[^cells-1.0]: See the [Cells 1.0 blueprint](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/architecture/blueprints/cells/iterations/cells-1.0.md)
[^blueprint-dr]: See the [DR Blueprint](https://gitlab.com/gitlab-org/gitlab/-/tree/master/doc/architecture/blueprints/disaster_recovery?ref_type=heads#current-recovery-time-objective-rto-and-recovery-point-objective-rpo-for-zonal-recovery)
[^object-storage]: On the Primary cell and Cells 1.0 backups and data are stored on Google Object Storage which makes no RPO guarantees for regional failure. At this time, there are no plans to use dual-region buckets which have a 15 minute RPO guarantee.
[^dedicated-dr]: See [this tracking epic](https://gitlab.com/groups/gitlab-com/gl-infra/gitlab-dedicated/-/epics/292) for the work that was done to validate DR on Dedicated and [this issue](https://gitlab.com/gitlab-com/gl-infra/gitlab-dedicated/team/-/issues/3948) for future plans to improve the Dedicated runbooks.
[^dedicated-dr-final-update]: See [this note](https://gitlab.com/groups/gitlab-com/gl-infra/gitlab-dedicated/-/epics/292#note_1751653953) on why this is the case and that Dedicated and how Geo is the preferred method for Disaster Recovery.

View File

@ -308,6 +308,7 @@ When we have a blueprint merged ideally the confidence should move to 👍 becau
| Cell Architecture and Tooling | team::Foundations | [To-Do](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/1209) | 👎 |
| Provisioning | team::Foundations | To-Do | 👎 |
| Configuration Management/Rollout | team::Foundations | To-Do | 👎 |
| Disaster Recovery | team::Production Engineering | [Blueprint](disaster_recovery.md) | 👍 |
```plantuml
@startuml

View File

@ -608,7 +608,7 @@ You might receive an error message similar to the following when using the `~lat
version qualifier to reference a component hosted by a [catalog project](#set-a-component-project-as-a-catalog-project):
```plaintext
This GitLab CI configuration is invalid: component 'gitlab.com/my-namespace/my-project/my-component@~latest' - content not found`
This GitLab CI configuration is invalid: component 'gitlab.com/my-namespace/my-project/my-component@~latest' - content not found
```
The `~latest` behavior [was updated](https://gitlab.com/gitlab-org/gitlab/-/issues/442238)

View File

@ -12,8 +12,7 @@ do not work, because a fresh Docker daemon is started with the service.
## Option 1: Run `docker login`
In [`before_script`](../yaml/index.md#before_script), run `docker
login`:
In [`before_script`](../yaml/index.md#before_script), run `docker login`:
```yaml
default:

View File

@ -571,3 +571,21 @@ which configuration file is the source of the loop or excessive included files.
In [GitLab 16.0 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/207270) self-managed users can
change the [maximum includes](../../administration/settings/continuous_integration.md#maximum-includes) value.
### `SSL_connect SYSCALL returned=5 errno=0 state=SSLv3/TLS write client hello` and other network failures
When using [`include:remote`](index.md#includeremote), GitLab tries to fetch the remote file
through HTTP(S). This process can fail because of a variety of connectivity issues.
The `SSL_connect SYSCALL returned=5 errno=0 state=SSLv3/TLS write client hello` error
happens when GitLab can't establish an HTTPS connection to the remote host. This issue
can be caused if the remote host has rate limits to prevent overloading the server
with requests.
For example, the [GitLab Pages](../../user/project/pages/index.md) server for GitLab.com
is rate limited. Repeated attempts to fetch CI/CD configuration files hosted on GitLab Pages
can cause the rate limit to be reached and cause the error. You should avoid hosting
CI/CD configuration files on a GitLab Pages site.
When possible, use [`include:project`](index.md#includeproject) to fetch configuration
files from other projects within the GitLab instance without making external HTTP(S) requests.

View File

@ -10,12 +10,15 @@ This is a list of terms that may have a general meaning but also may have a
specific meaning at GitLab. If you encounter a piece of technical jargon related
to AI that you think could benefit from being in this list, add it!
- **Adapters**: A variation on Fine Tuning. Instead of opening the model and adjusting the layer weights, new trained layers are added onto the model or hosted in an upstream standalone model. Also known as Adapter-based Models. By selectively fine-tuning these specific modules rather than the entire model, Adapters facilitate the customisation of pre-trained models for distinct tasks, requiring only a minimal increase in parameters. This method enables precise, task-specific adjustments of the model without altering its foundational structure.
- **AI Gateway**: standalone service used to give access to AI features to
non-SaaS GitLab users. This logic will be moved to Cloud Connector when that
service is ready. Eventually, the AI Gateway will be used to host endpoints that
proxy requests to AI providers, removing the need for the GitLab Rails monolith
to integrate and communicate directly with third-party LLMs.
to integrate and communicate directly with third-party Large Language Models (LLMs).
[Blueprint](../../architecture/blueprints/ai_gateway/index.md).
- **Air-Gapped Model**: A hosted model that is internal to an organisations intranet only. In the context of GitLab AI features, this could be connected to an air-gapped GitLab instance.
- **Bring Your Own Model (BYOM)**: A third-party model to be connected to one or more GitLab Duo features. Could be an off-the-shelf Open Source (OS) model, a fine-tuned model, or a closed source model. GitLab is planning to support specific, validated BYOMs for GitLab Duo features, but does not currently support or plan to support general BYOM use for GitLab Duo features.
- **Chat Evaluation**: automated mechanism for determining the helpfulness and
accuracy of GitLab Duo Chat to various user questions. The MVC is an RSpec test
run via GitLab CI that asks a set of questions to Chat and then has a
@ -26,15 +29,15 @@ to AI that you think could benefit from being in this list, add it!
multiple GitLab deployments, instances, and cells. We use it as an umbrella term to refer to the
set of technical solutions and APIs used to make such services available to all GitLab customers.
For more information, see the [Cloud Connector architecture](../cloud_connector/architecture.md).
- **Consensus Filtering**: method for LLM evaluation where you instruct an LLM
to evaluate the output of another LLM based on the question and context that
resulted in the output. This is the method of evaluation being used for the Chat
- **Closed Source Model**: A private model fine-tuned or built from scratch by an organisation. These may be hosted as cloud services, for example ChatGPT.
- **Consensus Filtering**: Consensus filtering is a method of LLM evaluation. An LLM judge is asked to rate and compare the output of multiple LLMs to sets of prompts. This is the method of evaluation being used for the Chat
Evaluation MVC.
[Issue from Model Validation team](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/prompt-library/-/issues/91#metric-2-consensus-filtering-with-llm-based-evaluation).
- **Context**: relevant information that surrounds a data point, an event, or a
piece of information, which helps to clarify its meaning and implications.
For GitLab Duo Chat, context is the attributes of the Issue or Epic being
referenced in a user question.
- **Custom Model**: Any implementation of a GitLab Duo feature using a self-hosted model, BYOM, fine-tuned model, RAG-enhanced model, or adapter-based model.
- **Embeddings**: In the context of machine learning and large language models,
embeddings refer to a technique used to represent words, phrases, or even
entire documents as dense numerical vectors in a continuous vector space.
@ -44,29 +47,39 @@ multiple GitLab deployments, instances, and cells. We use it as an umbrella term
`embeddings` database. The embeddings search is done in Postgres using the
`vector` extension. The vertex embeddings database is updated based on the
latest version of GitLab documentation on a daily basis by running `Llm::Embedding::GitlabDocumentation::CreateEmbeddingsRecordsWorker` as a cronjob.
- **Fine Tuning**: Altering an existing model using a supervised learning process that utilizes a dataset of labeled examples to update the weights of the LLM, improving its output for specific tasks such as code completion or chat.
- **Frozen Model**: A LLM which cannot be fine-tuned (also Frozen LLM).
- **GitLab Duo**: AI-assisted features across the GitLab DevSecOps platform. These features aim to help increase velocity and solve key pain points across the software development lifecycle. See also the [GitLab Duo](../../user/ai_features.md) features page.
- **GitLab Managed Model**: A LLM that is managed by GitLab. Currently all [GitLab Managed Models](https://gitlab.com/gitlab-com/g**l-infra/scalability/-/issues/2864#note_1787040242) are hosted externally and accessed through the AI Gateway. GitLab-owned API keys are used to access the models.
- **Golden Questions**: a small subset of the types of questions we think a user
should be able to ask GitLab Duo Chat. Used to generate data for Chat evaluation.
[Questions for Chat Beta](https://gitlab.com/groups/gitlab-org/-/epics/10550#what-the-user-can-ask).
- **Ground Truth**: data that is determined to be the true
output for a given input, representing the reality that the AI model aims to
learn and predict. Ground truth data is usually human-annotated.
learn and predict. Ground truth data are often human-annotated, but may also be produced from a trusted source such as an LLM that has known good output for a given use case.
- **Local Model**: A LLM running on a user's workstation. [More information](https://gitlab.com/groups/gitlab-org/-/epics/12907).
- **LLM**: A Large Language Model, or LLM, is a very large-scale neural network trained to understand and generate human-like text. For [GitLab Duo features](../../user/ai_features.md), GitLab is currently working with frozen models hosted at [Google and Anthropic](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/2864#note_1787040242)
- **Model Validation**: group within the AI-powered Stage working on the Prompt
Library and researching AI/ML models to support other use-cases for AI at GitLab.
[Team handbook section](https://handbook.gitlab.com/handbook/product/categories/features/#ai-powered-ai-model-validation-group).
Library, supporting AI Validation of GitLab Duo features, and researching AI/ML models to support other use-cases for AI at GitLab.
[Team handbook section](https://handbook.gitlab.com/handbook/product/categories/features/index.html#ai-powered-ai-model-validation-group)
- **Offline Model**: A model that runs without internet or intranet connection (for example, you are running a model on your laptop on a plane).
- **Open Source Model**: Models that are published with their source code and weights and are available for modifications and re-distribution. Examples: Llama / Llama 2, BLOOM, Falcon, Mistral, Gemma.
- **Prompt library**: The ["Prompt Library"](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/prompt-library) is a Python library that provides a CLI for testing different prompting techniques with LLMs. It enables data-driven improvements to LLM applications by facilitating hypothesis testing. Key features include the ability to manage and run dataflow pipelines using Apache Beam, and the execution of multiple evaluation experiments in a single pipeline run.
on prompts with various third-party AI Services.
[Code](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/prompt-library).
- **Prompt Registry**: stored, versioned prompts used to interact with third-party
AI Services. [Blueprint](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135872).
- **Prompt**: instructions sent to an LLM to perform certain tasks. [Prompt guidelines](prompts.md).
- **RAG Pipeline**: (Retrieval-Augmented Generation) is a mechanism used to take
- **Prompt**: Natural language instructions sent to an LLM to perform certain tasks. [Prompt guidelines](prompts.md).
- **RAG (Retrieval Augmented Generation)**: RAG provide contextual data to an LLM as part of a query to personalise results. RAG is used to inject additional context into a prompt to decrease hallucinations and improve the quality of outputs.
- **RAG Pipeline**: A mechanism used to take
an input (such as a user question) into a system, retrieve any relevant data
for that input, augment the input with additional context, and then
synthesize the information to generate a coherent, contextualy-relevant answer.
This design pattern is helpful in open-domain question answering with LLMs,
which is why we use this design pattern for answering questions to GitLab Duo Chat.
- **Similarity Score**: method to determine the likeness between answers produced by an LLM and the reference ground truth answers.
[Issue from Model Validation team](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/prompt-library/-/issues/91#metric-1-similarity-score-as-comparisons-for-llms).
- **Self-Hosted Model**: A LLM hosted externally to GitLab by an organisation and interacting with GitLab AI features.
- **Similarity Score**: A mathematical method to determine the likeness between answers produced by an LLM and the reference ground truth answers.
See also the [Model Validation direction page](https://about.gitlab.com/direction/ai-powered/ai_model_validation/ai_evaluation/metrics/#similarity-scores)
- **Tool**: logic that performs a specific LLM-related task; each tool has a
description and its own prompt. [How to add a new tool](duo_chat.md#adding-a-new-tool).
- **Word-Level Metrics**: method for LLM evaluation that compares aspects of

View File

@ -549,8 +549,8 @@ it does less joins and needs less filtering.
##### Use `disable_joins` for `has_one` or `has_many` `through:` relations
Sometimes a join query is caused by using `has_one ... through:` or `has_many
... through:` across tables that span the different databases. These joins
Sometimes a join query is caused by using `has_one ... through:` or `has_many ... through:`
across tables that span the different databases. These joins
sometimes can be solved by adding
[`disable_joins:true`](https://edgeguides.rubyonrails.org/active_record_multiple_databases.html#handling-associations-with-joins-across-databases).
This is a Rails feature which we

View File

@ -427,8 +427,8 @@ For example:
CREATE INDEX CONCURRENTLY some_index ON users (email) WHERE id < 100
```
This index would only index the `email` value of rows that match `WHERE id <
100`. We can use partial indexes to change our Twitter index to the following:
This index would only index the `email` value of rows that match `WHERE id < 100`.
We can use partial indexes to change our Twitter index to the following:
```sql
CREATE INDEX CONCURRENTLY twitter_test ON users (twitter) WHERE twitter != '';

View File

@ -417,10 +417,10 @@ Therefore, you do not need to add `aria-hidden="true"` to `GlIcon`, as this is r
```html
<!-- unnecessary — gl-icon hides icons from screen readers by default -->
<gl-icon name="rocket" aria-hidden="true" />`
<gl-icon name="rocket" aria-hidden="true" />
<!-- good -->
<gl-icon name="rocket" />`
<gl-icon name="rocket" />
```
### Icons that convey information
@ -433,10 +433,10 @@ Icons that convey information must have an accessible name so that the informati
```html
<!-- bad -->
<gl-icon name="eye-slash" />`
<gl-icon name="eye-slash" />
<!-- good -->
<gl-icon name="eye-slash" :aria-label="__('Confidential issue')" />`
<gl-icon name="eye-slash" :aria-label="__('Confidential issue')" />
```
### Icons that are clickable

View File

@ -85,7 +85,7 @@ All the `GitlabUploader` derived classes should comply with this path segment sc
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | `CarrierWave::Uploader#store_dir` | |
| | | `FileUploader#upload_path |
| | | `FileUploader#upload_path` |
| ObjectStore::Concern (store = remote)
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
@ -93,7 +93,7 @@ All the `GitlabUploader` derived classes should comply with this path segment sc
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
| `#fog_dir` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | | `ObjectStorage::Concern#store_dir` | |
| | | `ObjectStorage::Concern#upload_path |
| | | `ObjectStorage::Concern#upload_path` |
```
The `RecordsUploads::Concern` concern creates an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using

View File

@ -77,10 +77,9 @@ Prior to Go 1.12, the process for fetching a package was as follows:
1. Scan the response for the `go-import` meta tag.
1. Fetch the repository indicated by the meta tag using the indicated VCS.
The meta tag should have the form `<meta name="go-import" content="{prefix}
{vcs} {url}">`. For example, `gitlab.com/my/project git
https://gitlab.com/my/project.git` indicates that packages beginning with
`gitlab.com/my/project` should be fetched from
The meta tag should have the form `<meta name="go-import" content="{prefix} {vcs} {url}">`.
For example, `gitlab.com/my/project git https://gitlab.com/my/project.git` indicates
that packages beginning with `gitlab.com/my/project` should be fetched from
`https://gitlab.com/my/project.git` using Git.
## Fetching Modules

View File

@ -59,9 +59,9 @@ On rows where `json.event` is `Failed Attempt`, you can find valuable debugging
To view Telesign status updates logs for SMS sent to a user, query the GitLab production logs with:
```plaintext
json.message: "IdentityVerification::Phone" AND json.event: "Telesign transaction status update" AND json.username:<username>`
```
```plaintext
json.message: "IdentityVerification::Phone" AND json.event: "Telesign transaction status update" AND json.username:<username>
```
Status update logs include the following fields:

View File

@ -189,8 +189,8 @@ sequenceDiagram
appropriate Gitaly server.
1. The Gitaly server calls `git archive <ref>` to begin generating
the Git archive on-the-fly. If the `include_lfs_blobs` flag is enabled,
Gitaly enables a custom LFS smudge filter via the `-c
filter.lfs.smudge=/path/to/gitaly-lfs-smudge` Git option.
Gitaly enables a custom LFS smudge filter with the `-c filter.lfs.smudge=/path/to/gitaly-lfs-smudge`
Git option.
1. When `git` identifies a possible LFS pointer using the
`.gitattributes` file, `git` calls `gitaly-lfs-smudge` and provides the
LFS pointer via the standard input. Gitaly provides `GL_PROJECT_PATH`

View File

@ -266,8 +266,8 @@ This can be done via `pkill -USR2 puma:`. The `:` distinguishes between `puma
4.3.3.gitlab.2 ...` (the master process) from `puma: cluster worker 0: ...` (the
worker processes), selecting the latter.
For Sidekiq, the signal can be sent to the `sidekiq-cluster` process via `pkill
-USR2 bin/sidekiq-cluster`, which forwards the signal to all Sidekiq
For Sidekiq, the signal can be sent to the `sidekiq-cluster` process with
`pkill -USR2 bin/sidekiq-cluster` which forwards the signal to all Sidekiq
children. Alternatively, you can also select a specific PID of interest.
### Reading a Stackprof profile

View File

@ -174,9 +174,8 @@ If you're not sure what queue a worker uses,
you can find it using `SomeWorker.queue`. There is almost never a reason to
manually override the queue name using `sidekiq_options queue: :some_queue`.
After adding a new worker, run `bin/rake
gitlab:sidekiq:all_queues_yml:generate` to regenerate
`app/workers/all_queues.yml` or `ee/app/workers/all_queues.yml` so that
After adding a new worker, run `bin/rake gitlab:sidekiq:all_queues_yml:generate`
to regenerate `app/workers/all_queues.yml` or `ee/app/workers/all_queues.yml` so that
it can be picked up by
[`sidekiq-cluster`](../../administration/sidekiq/extra_sidekiq_processes.md)
in installations that don't use routing rules. For more information about potential changes,

View File

@ -90,7 +90,7 @@ If the query is valid, the result contains debug information about the user's se
To check if a user failed to sign in because the ArkoseLabs challenge was not solved, [query the GitLab production logs](https://log.gprd.gitlab.net/goto/b97c8a80-935a-11ed-85ed-e7557b0a598c) with the following KQL:
```plaintext
KQL: json.message:"Challenge was not solved" AND json.username:replace_username_here`
KQL: json.message:"Challenge was not solved" AND json.username:replace_username_here
```
## Allowlists

View File

@ -19,7 +19,7 @@ When you use Google Chrome to sign in to GitLab with Kerberos, you must enter yo
If you do not enter your full username, the sign-in fails. Check the logs to see the following event message as evidence of this sign-in failure:
```plain
"message":"OmniauthKerberosController: failed to process Negotiate/Kerberos authentication: gss_accept_sec_context did not return GSS_S_COMPLETE: An unsupported mechanism was requested\nUnknown error"`.
"message":"OmniauthKerberosController: failed to process Negotiate/Kerberos authentication: gss_accept_sec_context did not return GSS_S_COMPLETE: An unsupported mechanism was requested\nUnknown error".
```
## Test connectivity between the GitLab and Kerberos servers

View File

@ -557,7 +557,7 @@ There are two methods to update the `extern_uid`:
- Using the [Rails console](../administration/operations/rails_console.md):
```ruby
Identity.where(extern_uid: 'old-id').update!(extern_uid: 'new-id')`
Identity.where(extern_uid: 'old-id').update!(extern_uid: 'new-id')
```
## Known issues

View File

@ -298,7 +298,7 @@ Given that non-Ruby parsers parse IDs as Ruby Objects, the [naming conventions](
Examples of invalid IDs:
- IDs that start with a number
- IDs that have special characters (-, !, $, @, `, =, <, >, ;, :)
- IDs that have special characters (`-`, `!`, `$`, `@`, `` ` ``, `=`, `<`, `>`, `;`, `:`)
#### ActiveRecord::AssociationTypeMismatch: Model expected, got ... which is an instance of String

View File

@ -459,8 +459,7 @@ DETAILS:
`MigrateMergeRequestDiffCommitUsers` background migration jobs are
foregrounded in GitLab 14.5, and may take a long time to complete.
You can check the count of pending jobs for
`MigrateMergeRequestDiffCommitUsers` by using the PostgreSQL console (or `sudo
gitlab-psql`):
`MigrateMergeRequestDiffCommitUsers` by using the PostgreSQL console (or `sudo gitlab-psql`):
```sql
select status, count(*) from background_migration_jobs

View File

@ -249,8 +249,7 @@ impersonation credentials in the following way:
- Project `group1/group1-1/project1` has ID 150.
- Job running in the `prod` environment, which has the `production` environment tier.
Group list would be `[gitlab:ci_job, gitlab:group:23, gitlab:group_env_tier:23:production, gitlab:group:25,
gitlab:group_env_tier:25:production, gitlab:project:150, gitlab:project_env:150:prod, gitlab:project_env_tier:150:production]`.
Group list would be `[gitlab:ci_job, gitlab:group:23, gitlab:group_env_tier:23:production, gitlab:group:25, gitlab:group_env_tier:25:production, gitlab:project:150, gitlab:project_env:150:prod, gitlab:project_env_tier:150:production]`.
- `Extra` carries extra information about the request. The following properties are set on the impersonated identity:

View File

@ -108,8 +108,7 @@ in case the customer has [configured SAML Group Sync](group_sync.md):
In the relevant log entry, the:
- `json.args` are in the form `<userID>, <group ID>,
[group link ID 1, group link ID 2, ..., group link ID N]`.
- `json.args` are in the form `<userID>, <group ID>, [group link ID 1, group link ID 2, ..., group link ID N]`.
- `json.extra.group_saml_group_sync_worker.stats.*` fields show how many times
this run of group sync `added`, `removed` or `changed` the user's membership.

View File

@ -236,8 +236,8 @@ invalid JSON primitives (such as `.`). Removing or URL encoding these characters
### `(Field) can't be blank` sync error
When checking the Audit Events for the provisioning, you sometimes see a `Namespace can't be blank, Name can't be blank,
and User can't be blank.` error.
When checking the Audit Events for the provisioning, you sometimes see a
`Namespace can't be blank, Name can't be blank, and User can't be blank.` error.
This error can occur because not all required fields (such as first name and last name) are present for all users
being mapped.

View File

@ -174,7 +174,7 @@ If you don't set a default user or remote, you can still include the user and
remote in your commands:
```shell
`CONAN_LOGIN_USERNAME=<gitlab_username or deploy_token_username> CONAN_PASSWORD=<personal_access_token or deploy_token> <conan command> --remote=gitlab
CONAN_LOGIN_USERNAME=<gitlab_username or deploy_token_username> CONAN_PASSWORD=<personal_access_token or deploy_token> <conan command> --remote=gitlab
```
## Publish a Conan package

View File

@ -32,8 +32,8 @@ Prerequisites:
- The `dpkg-deb` binary must be installed on the GitLab instance.
This binary is usually provided by the [`dpkg` package](https://wiki.debian.org/Teams/Dpkg/Downstream),
installed by default on Debian and derivatives.
- Support for compression algorithm ZStandard requires version `dpkg >=
1.21.18` from Debian 12 Bookworm or `dpkg >= 1.19.0.5ubuntu2` from Ubuntu
- Support for compression algorithm ZStandard requires version `dpkg >= 1.21.18`
from Debian 12 Bookworm or `dpkg >= 1.19.0.5ubuntu2` from Ubuntu
18.04 Bionic Beaver.
## Enable the Debian API

View File

@ -166,9 +166,9 @@ job. By default, GitLab ensures that:
- Your repository is clean.
[`GIT_CLEAN_FLAGS`](../../../../ci/runners/configure_runners.md#git-clean-flags) is disabled when set
to `none`. On very big repositories, this might be desired because `git
clean` is disk I/O intensive. Controlling that with `GIT_CLEAN_FLAGS: -ffdx
-e .build/` (for example) allows you to control and disable removal of some
to `none`. On very big repositories, this might be desired because `git clean`
is disk I/O intensive. Controlling that with `GIT_CLEAN_FLAGS: -ffdx -e .build/`
(for example) allows you to control and disable removal of some
directories in the worktree between subsequent runs, which can speed-up
the incremental builds. This has the biggest effect if you re-use existing
machines and have an existing worktree that you can re-use for builds.

View File

@ -8411,7 +8411,7 @@
<p data-sourcepos="1:1-1:36" dir="auto">This math is inline <code data-sourcepos="1:23-1:33" data-math-style="inline" class="code math js-render-math">a^2+b^2=c^2</code>.</p>
<p data-sourcepos="3:1-3:27" dir="auto">This is on a separate line:</p>
<div class="gl-relative markdown-code-block js-markdown-code">
<pre data-sourcepos="5:1-7:3" data-canonical-lang="math" data-math-style="display" class="js-render-math code highlight js-syntax-highlight language-math" v-pre="true"><code><span id="LC1" class="line" lang="math">a^2+b^2=c^2</span></code></pre>
<pre data-math-style="display" data-sourcepos="5:1-7:3" data-canonical-lang="math" class="js-render-math code highlight js-syntax-highlight language-math" v-pre="true"><code><span id="LC1" class="line" lang="math">a^2+b^2=c^2</span></code></pre>
<copy-code></copy-code>
</div>
wysiwyg: |-

View File

@ -27,7 +27,6 @@ module QA
view 'app/assets/javascripts/ci/pipeline_details/graph/components/job_group_dropdown.vue' do
element 'job-dropdown-container'
element 'jobs-dropdown-menu'
end
view 'app/assets/javascripts/ci/pipeline_details/graph/components/stage_column_component.vue' do
@ -121,7 +120,7 @@ module QA
end
def has_skipped_job_in_group?
within_element('jobs-dropdown-menu') do
within_element('disclosure-content') do
all_elements('job-with-link', minimum: 1).all? do
has_selector?('.ci-status-icon-skipped')
end
@ -129,7 +128,7 @@ module QA
end
def has_no_skipped_job_in_group?
within_element('jobs-dropdown-menu') do
within_element('disclosure-content') do
all_elements('job-with-link', minimum: 1).all? do
has_no_selector?('.ci-status-icon-skipped')
end

View File

@ -6,5 +6,9 @@ FactoryBot.define do
association :created_by_user, factory: :user
sequence(:name) { |n| "agent-#{n}" }
trait :in_group do
project factory: [:project, :in_group]
end
end
end

View File

@ -97,8 +97,8 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js, feature_cat
describe 'build list build item' do
let(:build_item) do
find('.pipeline-job-item')
first('.pipeline-job-item')
find('.ci-job-component')
first('.ci-job-component')
end
it 'visits the build page when clicked' do

View File

@ -620,7 +620,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :continuous_integration do
wait_for_requests
within_testid('mini-pipeline-graph-dropdown') do
build_element = page.find('.pipeline-job-item [data-testid="job-name"]')
build_element = page.find('.ci-job-component [data-testid="job-name"]')
expect(build_element['title']).to eq('build - failed - (unknown failure)')
end
end

View File

@ -1,20 +1,41 @@
import { shallowMount, mount } from '@vue/test-utils';
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import JobGroupDropdown from '~/ci/pipeline_details/graph/components/job_group_dropdown.vue';
import JobItem from '~/ci/pipeline_details/graph/components/job_item.vue';
import { SINGLE_JOB } from '~/ci/pipeline_details/graph/constants';
describe('job group dropdown component', () => {
const group = {
name: 'rspec:linux',
size: 2,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
tooltip: 'passed',
group: 'success',
detailsPath: '/root/ci-mock/builds/4256',
hasDetails: true,
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4256/retry',
method: 'post',
},
},
jobs: [
{
id: 4256,
name: '<img src=x onerror=alert(document.domain)>',
name: 'rspec:linux 1/2',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
tooltip: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4256',
has_details: true,
detailsPath: '/root/ci-mock/builds/4256',
hasDetails: true,
action: {
icon: 'retry',
title: 'Retry',
@ -25,7 +46,7 @@ describe('job group dropdown component', () => {
},
{
id: 4299,
name: 'test',
name: 'rspec:linux 2/2',
status: {
icon: 'status_success',
text: 'passed',
@ -43,42 +64,92 @@ describe('job group dropdown component', () => {
},
},
],
name: 'rspec:linux',
size: 2,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
tooltip: 'passed',
group: 'success',
details_path: '/root/ci-mock/builds/4256',
has_details: true,
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/builds/4256/retry',
method: 'post',
},
},
};
let wrapper;
const findButton = () => wrapper.find('button');
const findJobItem = () => wrapper.findComponent(JobItem);
const findTriggerButton = () => wrapper.find('button');
const findDisclosureDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findDisclosureDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
const createComponent = ({ mountFn = shallowMount }) => {
wrapper = mountFn(JobGroupDropdown, { propsData: { group } });
const createComponent = ({ props, mountFn = shallowMount } = {}) => {
wrapper = mountFn(JobGroupDropdown, {
propsData: {
group,
...props,
},
});
};
beforeEach(() => {
createComponent({ mountFn: mount });
});
it('renders button with group name and size', () => {
expect(findButton().text()).toContain(group.name);
expect(findButton().text()).toContain(group.size.toString());
});
it('renders dropdown with jobs', () => {
expect(wrapper.findAll('.scrollable-menu>ul>li').length).toBe(group.jobs.length);
createComponent({ mountFn: mount });
expect(wrapper.findAll('[data-testid="disclosure-content"] > li').length).toBe(
group.jobs.length,
);
});
it('renders dropdown', () => {
createComponent();
expect(findDisclosureDropdown().props()).toMatchObject({
block: true,
placement: 'right-start',
});
});
it('renders trigger button with group name and size', () => {
createComponent({ mountFn: mount });
expect(findJobItem().text().trim()).toBe(group.name);
expect(findJobItem().props()).toMatchObject({
type: 'job_dropdown',
groupTooltip: 'rspec:linux - passed',
job: group,
});
expect(findTriggerButton().text()).toContain(group.size.toString());
});
it('renders stage name when provided', () => {
createComponent({
props: {
stageName: 'my-stage-name',
},
mountFn: mount,
});
expect(findJobItem().props()).toMatchObject({
stageName: 'my-stage-name',
});
});
it('renders parallel jobs in group', () => {
createComponent({ mountFn: mount });
const [item1, item2] = findDisclosureDropdownItems().wrappers;
expect(findDisclosureDropdownItems()).toHaveLength(2);
expect(item1.props('item')).toEqual({
text: group.jobs[0].name,
href: group.jobs[0].status.detailsPath,
});
expect(item1.findComponent(JobItem).props()).toMatchObject({
isLink: false,
job: group.jobs[0],
type: SINGLE_JOB,
cssClassJobName: 'gl-p-3',
});
expect(item2.props('item')).toEqual({
text: group.jobs[1].name,
href: group.jobs[1].status.detailsPath,
});
expect(item2.findComponent(JobItem).props()).toMatchObject({
isLink: false,
job: group.jobs[1],
type: SINGLE_JOB,
cssClassJobName: 'gl-p-3',
});
});
});

View File

@ -41,27 +41,16 @@ describe('pipeline graph job item', () => {
const myCustomClass1 = 'my-class-1';
const myCustomClass2 = 'my-class-2';
const defaultProps = {
job: mockJob,
};
const createWrapper = ({ props, data, mountFn = mountExtended, mocks = {} } = {}) => {
const createWrapper = ({ mountFn = mountExtended, props, ...options } = {}) => {
wrapper = mountFn(JobItem, {
data() {
return {
...data,
};
},
propsData: {
...defaultProps,
job: mockJob,
...props,
},
mocks: {
...mocks,
},
stubs: {
CiIcon,
},
...options,
});
};
@ -121,6 +110,25 @@ describe('pipeline graph job item', () => {
});
});
describe('name when is-link is false', () => {
beforeEach(() => {
createWrapper({
props: {
isLink: false,
},
});
});
it('should render status and name', () => {
expect(findJobCiIcon().exists()).toBe(true);
expect(findJobCiIcon().find('[data-testid="status_success_borderless-icon"]').exists()).toBe(
true,
);
expect(wrapper.text()).toBe(mockJob.name);
});
});
describe('CiIcon', () => {
it('should not render a link', () => {
createWrapper();
@ -492,8 +500,10 @@ describe('pipeline graph job item', () => {
// or emit an event directly. We therefore set the data property
// as it would be if the box was checked.
createWrapper({
data: {
currentSkipModalValue: true,
data() {
return {
currentSkipModalValue: true,
};
},
props: {
skipRetryModal: false,

View File

@ -18,7 +18,7 @@ RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do
end
shared_examples 'display math' do
let_it_be(:template_prefix_with_pre) { '<pre data-canonical-lang="math" data-math-style="display" class="js-render-math"><code>' }
let_it_be(:template_prefix_with_pre) { '<pre data-math-style="display" data-canonical-lang="math" class="js-render-math"><code>' }
let_it_be(:template_prefix_with_code) { '<code data-math-style="display" class="code math js-render-math">' }
let(:use_pre_tags) { false }

View File

@ -238,10 +238,6 @@ RSpec.describe Deployments::LinkMergeRequestsService, feature_category: :continu
)
end
before do
project.update!(merge_requests_ff_only_enabled: true)
end
it "links merge requests by the HEAD commit sha of the MR's diff" do
link_merge_requests_for_range