Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-30 09:08:38 +00:00
parent efe8d31390
commit ce9d6d8a63
69 changed files with 793 additions and 376 deletions

View File

@ -18,7 +18,7 @@ variables:
# Helm chart ref used by test-on-cng pipeline
GITLAB_HELM_CHART_REF: "074bb942c9c65613c2576ce418f59b8577fff37c"
# Specific ref for cng-mirror project to trigger builds for
GITLAB_CNG_MIRROR_REF: "951f0f683ca1e1bde564619619809660de68feb3"
GITLAB_CNG_MIRROR_REF: "f58c446dbb99b0e447190d22a61f5cf0cad69c97"
# Makes sure some of the common scripts from pipeline-common use bundler to execute commands
RUN_WITH_BUNDLE: "true"
# Makes sure reporting script defined in .gitlab-qa-report from pipeline-common is executed from correct folder

View File

@ -11,6 +11,16 @@ export const FEED_TOKEN = 'feedToken';
export const INCOMING_EMAIL_TOKEN = 'incomingEmailToken';
export const STATIC_OBJECT_TOKEN = 'staticObjectToken';
export const DEFAULT_FILTER = [
{
type: 'state',
value: {
data: 'active',
operator: '=',
},
},
];
export const FILTER_OPTIONS = [
{
icon: 'status',

View File

@ -0,0 +1,106 @@
import { queryToObject } from '~/lib/utils/url_utility';
import {
OPERATORS_BEFORE,
OPERATORS_AFTER,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
DEFAULT_FILTER,
DEFAULT_SORT,
FILTER_OPTIONS,
FILTER_OPTIONS_CREDENTIALS_INVENTORY,
SORT_OPTIONS,
} from './constants';
/**
* Parses filters as provided in the URL and returns a set of tokens.
* For example, a URL like `{ created_before: '2022-12-31' }` will return an array of
* tokens like `[{ type: 'created', value: { data: '2022-12-31', operator: '<' }}]`.
* @param {Object<string, string>} filters
* @param {string} [search]
* @param {boolean} isCredentialsInventory
*/
export function initializeFilters(filters, search, isCredentialsInventory) {
const tokens = [];
const filterOptions = isCredentialsInventory
? FILTER_OPTIONS_CREDENTIALS_INVENTORY
: FILTER_OPTIONS;
for (const [key, value] of Object.entries(filters)) {
const isBefore = key.endsWith('_before');
const isAfter = key.endsWith('_after');
if (isBefore || isAfter) {
tokens.push({
type: key.replace(/_(before|after)$/, ''),
value: {
data: value,
operator: isBefore ? OPERATORS_BEFORE[0].value : OPERATORS_AFTER[0].value,
},
});
} else {
try {
const { operators } = filterOptions.find(({ options }) =>
options.some((option) => option.value === value),
);
tokens.push({
type: key,
value: {
data: value,
operator: operators[0].value,
},
});
} catch {
// Unknown token
}
}
}
if (search) {
tokens.push(search);
}
if (!isCredentialsInventory && tokens.length === 0) {
return DEFAULT_FILTER;
}
return tokens;
}
/**
* Parses sort the option as provided in the URL and returns a proper structure.
* For example, a URL like `created_asc` will return `{ value: 'created', isAsc: true}`.
* @param {string} [sort]
*/
export function initializeSort(sort) {
let sorting = DEFAULT_SORT;
const sortOption = SORT_OPTIONS.find((item) => [item.sort.desc, item.sort.asc].includes(sort));
if (sortOption) {
sorting = {
value: sortOption.value,
isAsc: sortOption.sort.asc === sort,
};
}
return sorting;
}
/**
* Parses the query params from the URL and returns an object with filters/tokens, page, and sorting.
* @param {boolean} [isCredentialsInventory]
* @param {string} [query] - document.location.search
*/
export function initializeValuesFromQuery(
isCredentialsInventory = false,
query = document.location.search,
) {
const { page, search, sort, ...filters } = queryToObject(query);
const tokens = initializeFilters(filters, search, isCredentialsInventory);
const sorting = initializeSort(sort);
return {
...(isCredentialsInventory ? { tokens } : { filters: tokens }),
page: parseInt(page, 10) || 1,
sorting,
};
}

View File

@ -1,9 +1,9 @@
import Vue from 'vue';
export const lineState = Vue.observable({
currentLineNumber: null,
export const hashState = Vue.observable({
currentHash: window.location.hash,
});
export const updateLineNumber = (lineNumber) => {
lineState.currentLineNumber = lineNumber;
hashState.currentHash = lineNumber;
};

View File

@ -1,7 +1,8 @@
<script>
import { GlFilteredSearch, GlSorting } from '@gitlab/ui';
import { SORT_OPTIONS, FILTER_OPTIONS_CREDENTIALS_INVENTORY } from '~/access_tokens/constants';
import { initializeValuesFromQuery, goTo } from '../utils';
import { initializeValuesFromQuery } from '~/access_tokens/utils';
import { goTo } from '../utils';
export default {
components: {
@ -9,7 +10,7 @@ export default {
GlSorting,
},
data() {
const { sorting, tokens } = initializeValuesFromQuery();
const { sorting, tokens } = initializeValuesFromQuery(true);
return {
sorting,
tokens,

View File

@ -1,86 +1,5 @@
import { queryToObject, setUrlParams, visitUrl } from '~/lib/utils/url_utility';
import {
OPERATORS_BEFORE,
OPERATORS_AFTER,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
DEFAULT_SORT,
SORT_OPTIONS,
FILTER_OPTIONS_CREDENTIALS_INVENTORY,
} from '~/access_tokens/constants';
/**
* @param {Object<string, string>} filters
* @param {string} [search]
*/
function initializeFilters(filters, search) {
const tokens = [];
for (const [key, value] of Object.entries(filters)) {
const isBefore = key.endsWith('_before');
const isAfter = key.endsWith('_after');
if (isBefore || isAfter) {
tokens.push({
type: key.replace(/_(before|after)$/, ''),
value: {
data: value,
operator: isBefore ? OPERATORS_BEFORE[0].value : OPERATORS_AFTER[0].value,
},
});
} else {
try {
const { operators } = FILTER_OPTIONS_CREDENTIALS_INVENTORY.find(({ options }) =>
options.some((option) => option.value === value),
);
tokens.push({
type: key,
value: {
data: value,
operator: operators[0].value,
},
});
} catch {
// Unknown token
}
}
}
if (search) {
tokens.push(search);
}
return tokens;
}
/**
* @param {string} [sort]
*/
function initializeSort(sort) {
let sorting = DEFAULT_SORT;
const sortOption = SORT_OPTIONS.find((item) => [item.sort.desc, item.sort.asc].includes(sort));
if (sortOption) {
sorting = {
value: sortOption.value,
isAsc: sortOption.sort.asc === sort,
};
}
return sorting;
}
/**
* Initialize tokens and sort based on the URL parameters
* @param {string} query - document.location.search
*/
export function initializeValuesFromQuery(query = document.location.search) {
const { sort, search, ...filters } = queryToObject(query);
const sorting = initializeSort(sort);
const tokens = initializeFilters(filters, search);
return { sorting, tokens };
}
import { setUrlParams, visitUrl } from '~/lib/utils/url_utility';
import { SORT_OPTIONS } from '~/access_tokens/constants';
/**
* @param {string} sortValue

View File

@ -369,7 +369,11 @@ export default {
:show-web-ide-button="showWebIdeButton"
:show-gitpod-button="isGitpodEnabledForInstance"
/>
<repository-overflow-menu v-if="comparePath" />
<repository-overflow-menu
:full-path="projectPath"
:path="currentPath"
:current-ref="currentRef"
/>
</div>
<template v-else-if="!isReadmeView">
<code-dropdown
@ -381,25 +385,27 @@ export default {
:current-path="currentPath"
:directory-download-links="downloadLinks"
/>
<div class="gl-flex gl-items-stretch gl-gap-3 sm:gl-hidden">
<source-code-download-dropdown
:download-links="downloadLinks"
:download-artifacts="downloadArtifacts"
<div class="gl-flex gl-w-full gl-gap-3 sm:gl-inline-block sm:gl-w-auto">
<div class="gl-flex gl-w-full gl-items-stretch gl-gap-3 sm:gl-hidden">
<source-code-download-dropdown
:download-links="downloadLinks"
:download-artifacts="downloadArtifacts"
/>
<clone-code-dropdown
class="mobile-git-clone js-git-clone-holder !gl-w-full"
:ssh-url="sshUrl"
:http-url="httpUrl"
:kerberos-url="kerberosUrl"
/>
</div>
<repository-overflow-menu
:full-path="projectPath"
:path="currentPath"
:current-ref="currentRef"
/>
<clone-code-dropdown
class="mobile-git-clone js-git-clone-holder !gl-w-full"
:ssh-url="sshUrl"
:http-url="httpUrl"
:kerberos-url="kerberosUrl"
/>
<repository-overflow-menu v-if="comparePath" />
</div>
</template>
</div>
<repository-overflow-menu
v-if="comparePath && !showCompactCodeDropdown"
class="gl-hidden sm:gl-inline-flex"
/>
</div>
<!-- Blob controls -->

View File

@ -6,7 +6,7 @@ import { keysFor, PROJECT_FILES_GO_TO_PERMALINK } from '~/behaviors/shortcuts/ke
import { Mousetrap } from '~/lib/mousetrap';
import { shouldDisableShortcuts } from '~/behaviors/shortcuts/shortcuts_toggle';
import { getBaseURL, relativePathToAbsolute } from '~/lib/utils/url_utility';
import { lineState } from '~/blob/state';
import { hashState } from '~/blob/state';
import { getPageParamValue, getPageSearchString } from '~/blob/utils';
Vue.use(GlToast);
@ -21,6 +21,11 @@ export default {
required: true,
},
},
data() {
return {
mousetrap: null,
};
},
computed: {
permalinkShortcutKey() {
return keysFor(PROJECT_FILES_GO_TO_PERMALINK)[0];
@ -30,19 +35,23 @@ export default {
},
absolutePermalinkPath() {
const baseAbsolutePath = relativePathToAbsolute(this.permalinkPath, getBaseURL());
if (lineState.currentLineNumber) {
const page = getPageParamValue(lineState.currentLineNumber);
if (hashState.currentHash) {
const page = getPageParamValue(hashState.currentHash);
const searchString = getPageSearchString(baseAbsolutePath, page);
return `${baseAbsolutePath}${searchString}#L${lineState.currentLineNumber}`;
if (Number.isNaN(Number(hashState.currentHash))) {
return `${baseAbsolutePath}${searchString}${hashState.currentHash}`;
}
return `${baseAbsolutePath}${searchString}#L${hashState.currentHash}`;
}
return baseAbsolutePath;
},
},
mounted() {
Mousetrap.bind(keysFor(PROJECT_FILES_GO_TO_PERMALINK), this.triggerCopyPermalink);
this.mousetrap = new Mousetrap();
this.mousetrap.bind(keysFor(PROJECT_FILES_GO_TO_PERMALINK), this.triggerCopyPermalink);
},
beforeDestroy() {
Mousetrap.unbind(keysFor(PROJECT_FILES_GO_TO_PERMALINK));
this.mousetrap.unbind(keysFor(PROJECT_FILES_GO_TO_PERMALINK));
},
methods: {
triggerCopyPermalink() {

View File

@ -1,6 +1,10 @@
<script>
import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlTooltipDirective } from '@gitlab/ui';
import { __ } from '~/locale';
import permalinkPathQuery from '~/repository/queries/permalink_path.query.graphql';
import { logError } from '~/lib/logger';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import PermalinkDropdownItem from './permalink_dropdown_item.vue';
export const i18n = {
dropdownLabel: __('Actions'),
@ -12,11 +16,48 @@ export default {
components: {
GlDisclosureDropdown,
GlDisclosureDropdownItem,
PermalinkDropdownItem,
},
directives: {
GlTooltipDirective,
},
inject: ['comparePath'],
props: {
fullPath: {
type: String,
required: true,
},
path: {
type: String,
required: false,
default: '',
},
currentRef: {
type: String,
required: true,
},
},
apollo: {
permalinkPath: {
query: permalinkPathQuery,
variables() {
return this.queryVariables;
},
update(data) {
const result = data?.project?.repository?.paginatedTree?.nodes[0]?.permalinkPath;
return result;
},
error(error) {
logError(`Failed to fetch permalink. See exception details for more information.`, error);
Sentry.captureException(error);
},
},
},
data() {
return {
permalinkPath: '',
};
},
computed: {
compareItem() {
return {
@ -28,6 +69,21 @@ export default {
},
};
},
queryVariables() {
return {
fullPath: this.fullPath,
path: this.path,
ref: this.currentRef,
};
},
},
watch: {
queryVariables: {
handler() {
this.$apollo.queries.permalinkPath.refetch();
},
deep: true,
},
},
};
</script>
@ -42,6 +98,7 @@ export default {
:toggle-text="$options.i18n.dropdownLabel"
text-sr-only
>
<permalink-dropdown-item v-if="permalinkPath" :permalink-path="permalinkPath" />
<gl-disclosure-dropdown-item v-if="comparePath" :item="compareItem" class="shortcuts-compare" />
</gl-disclosure-dropdown>
</template>

View File

@ -0,0 +1,13 @@
query getPermalinkPath($fullPath: ID!, $path: String!, $ref: String!) {
project(fullPath: $fullPath) {
id
repository {
paginatedTree(path: $path, ref: $ref) {
nodes {
__typename
permalinkPath
}
}
}
}
}

View File

@ -3,6 +3,7 @@ import { GlButton, GlFilteredSearch, GlPagination, GlSorting } from '@gitlab/ui'
import { mapActions, mapState } from 'pinia';
import PageHeading from '~/vue_shared/components/page_heading.vue';
import { FILTER_OPTIONS, SORT_OPTIONS } from '~/access_tokens/constants';
import { initializeValuesFromQuery } from '~/access_tokens/utils';
import { useAccessTokens } from '../stores/access_tokens';
import AccessToken from './access_token.vue';
@ -44,15 +45,7 @@ export default {
},
created() {
this.setup({
filters: [
{
type: 'state',
value: {
data: 'active',
operator: '=',
},
},
],
...initializeValuesFromQuery(),
id: this.id,
urlCreate: this.accessTokenCreate,
urlRevoke: this.accessTokenRevoke,

View File

@ -11,7 +11,7 @@ import {
import { joinPaths } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
import { SORT_OPTIONS, DEFAULT_SORT } from '~/access_tokens/constants';
import { serializeParams, update2WeekFromNow } from '../utils';
import { serializeParams, update2WeekFromNow, updateUrlWithQueryParams } from '../utils';
/**
* @typedef {{type: string, value: {data: string, operator: string}}} Filter
@ -133,6 +133,7 @@ export const useAccessTokens = defineStore('accessTokens', {
this.busy = true;
try {
const url = Api.buildUrl(this.urlShow.replace(':id', this.id));
updateUrlWithQueryParams({ params: this.params, sort: this.sort });
const { data, perPage, total } = await fetchTokens({
url,
id: this.id,
@ -249,9 +250,11 @@ export const useAccessTokens = defineStore('accessTokens', {
* @param {string} options.urlRotate
* @param {string} options.urlShow
*/
setup({ filters, id, urlCreate, urlRevoke, urlRotate, urlShow }) {
setup({ filters, id, page, sorting, urlCreate, urlRevoke, urlRotate, urlShow }) {
this.filters = filters;
this.id = id;
this.page = page;
this.sorting = sorting;
this.urlCreate = urlCreate;
this.urlRevoke = urlRevoke;
this.urlRotate = urlRotate;

View File

@ -1,4 +1,5 @@
import { getDateInFuture, nWeeksAfter, toISODateFormat } from '~/lib/utils/datetime_utility';
import { setUrlParams, updateHistory } from '~/lib/utils/url_utility';
import { STATISTICS_CONFIG } from '~/access_tokens/constants';
/**
@ -54,3 +55,17 @@ export function update2WeekFromNow(stats = STATISTICS_CONFIG) {
return clonedStats;
}
/**
* Sets the URL parameters based on the provided query parameters.
* @param {Object} options
* @param {Object<string, string|number>} options.params
* @param {string} options.sort
*/
export function updateUrlWithQueryParams({ params, sort }) {
const queryParams = { ...params, sort };
updateHistory({
url: setUrlParams(queryParams, window.location.href, true),
replace: true,
});
}

View File

@ -5,5 +5,4 @@
@import './pages/issues';
@import './pages/note_form';
@import './pages/pipelines';
@import './pages/registry';
@import './pages/settings';

View File

@ -1,17 +1,3 @@
.breadcrumbs {
flex: 1;
min-width: 0;
align-self: center;
@apply gl-text-subtle;
.avatar-tile {
margin-right: 4px;
@apply gl-border;
border-radius: 50%;
vertical-align: sub;
}
}
#js-vue-page-breadcrumbs-wrapper {
display: flex;
flex-grow: 1;

View File

@ -180,7 +180,7 @@
cursor: help;
}
.issue-boards-content:not(.breadcrumbs) {
.issue-boards-content {
isolation: isolate;
}

View File

@ -1,9 +0,0 @@
// Workaround for gl-breadcrumb at the last child of the handwritten breadcrumb
// until this gitlab-ui issue is resolved: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1079
//
// See app/assets/javascripts/registry/explorer/components/registry_breadcrumb.vue when this is changed.
// stylelint-disable-next-line gitlab/no-gl-class
.breadcrumbs .gl-breadcrumbs {
padding: 0;
box-shadow: none;
}

View File

@ -7,12 +7,13 @@ module Resolvers
argument :template_content_input, ::Types::WorkItems::DescriptionTemplateContentInputType,
required: true,
description: "Input for fetching a specific Descriptiontemplate."
description: "Input for fetching a specific description template."
def resolve(args)
project = Project.find(args[:template_content_input].project_id)
::TemplateFinder.new(:issues, project, { name: args[:template_content_input].name }).execute
::TemplateFinder.new(:issues, project,
{ name: args[:template_content_input].name, source_template_project_id: project.id }).execute
rescue Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError, ActiveRecord::RecordNotFound
nil

View File

@ -149,6 +149,7 @@ module MergeRequests
if branch_and_project_match?(merge_request) || @push.force_push?
merge_request.reload_diff(current_user)
schedule_duo_code_review(merge_request)
# Clear existing merge error if the push were directed at the
# source branch. Clearing the error when the target branch
# changes will hide the error from the user.
@ -332,6 +333,10 @@ module MergeRequests
.from_source_branches(@push.branch_name)
.from_fork
end
def schedule_duo_code_review(merge_request)
# Overridden in EE
end
end
end

View File

@ -0,0 +1,9 @@
---
name: duo_code_review_response_logging
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/537623
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189329
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/538176
milestone: '18.0'
group: group::code review
type: ops
default_enabled: false

View File

@ -39,7 +39,7 @@ on your instance, configure these maximum values for diffs:
When a diff reaches 10% of any of these values, the files are shown in a
collapsed view, with a link to expand the diff. Diffs that exceed any of the
set values are presented as **Too large** are cannot be expanded in the UI.
set values are presented as **Too large** and cannot be expanded in the UI.
To configure these values:

View File

@ -23,7 +23,7 @@ blocks to an HTML image tag, with the source pointing to the PlantUML instance.
diagram delimiters `@startuml`/`@enduml` aren't required, as these are replaced
by the `plantuml` block:
- **Markdown** files with the extension `.md`:
- Markdown files with the extension `.md`:
````markdown
```plantuml
@ -35,7 +35,7 @@ by the `plantuml` block:
For additional acceptable extensions, review the
[`languages.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/vendor/languages.yml#L3174) file.
- **AsciiDoc** files with the extension `.asciidoc`, `.adoc`, or `.asc`:
- AsciiDoc files with the extension `.asciidoc`, `.adoc`, or `.asc`:
```plaintext
[plantuml, format="png", id="myDiagram", width="200px"]
@ -45,7 +45,7 @@ by the `plantuml` block:
----
```
- **reStructuredText**
- reStructuredText:
```plaintext
.. plantuml::
@ -91,7 +91,7 @@ You can include or embed a PlantUML diagram from separate files in the repositor
the `include` directive. Use this to maintain complex diagrams in dedicated files, or to
reuse diagrams. For example:
- **Markdown**:
- Markdown:
````markdown
```plantuml
@ -99,7 +99,7 @@ reuse diagrams. For example:
```
````
- **AsciiDoc**:
- AsciiDoc:
```plaintext
[plantuml, format="png", id="myDiagram", width="200px"]

View File

@ -186,7 +186,7 @@ You should use the
### Migrating to object storage
You can migrate the LFS objects from local storage to object storage. The
processing is done in the background and requires **no downtime**.
processing is done in the background and requires no downtime.
1. [Configure the object storage](../object_storage.md#configure-a-single-storage-connection-for-all-object-types-consolidated-form).
1. Migrate the LFS objects:

View File

@ -36,11 +36,11 @@ For more details about the implementation, see [the blog post](https://about.git
If you are considering switching from OpenSSH to `gitlab-sshd`, consider the following:
- **PROXY protocol**: `gitlab-sshd` supports the PROXY protocol, allowing it to run behind proxy
- PROXY protocol: `gitlab-sshd` supports the PROXY protocol, allowing it to run behind proxy
servers like HAProxy. This feature is not enabled by default but [can be enabled](#proxy-protocol-support).
- **SSH certificates**: `gitlab-sshd` does not support SSH certificates. For more information, see
- SSH certificates: `gitlab-sshd` does not support SSH certificates. For more information, see
[issue 655](https://gitlab.com/gitlab-org/gitlab-shell/-/issues/655).
- **2FA recovery codes**: `gitlab-sshd` does not support 2FA recovery code regeneration.
- 2FA recovery codes: `gitlab-sshd` does not support 2FA recovery code regeneration.
Attempting to run `2fa_recovery_codes` results in the error:
`remote: ERROR: Unknown command: 2fa_recovery_codes`. See
[the discussion](https://gitlab.com/gitlab-org/gitlab-shell/-/issues/766#note_1906707753) for details.

View File

@ -23,7 +23,7 @@ switch to the alternatives.
Not all deprecated API endpoints are included in this rate limit - just those
that might have a performance impact:
- [`GET /groups/:id`](../../api/groups.md#get-a-single-group) **without** the `with_projects=0` query parameter.
- [`GET /groups/:id`](../../api/groups.md#get-a-single-group) without the `with_projects=0` query parameter.
## Define Deprecated API rate limits

View File

@ -24,7 +24,7 @@ content changes.
To configure the snippet size limit, you can use the Rails console
or the [Application settings API](../../api/settings.md).
The limit **must** be in bytes.
The limit must be in bytes.
This setting is not available in the [**Admin** area settings](../settings/_index.md).

View File

@ -30,11 +30,11 @@ Label notes are not part of this API, but recorded as separate events in
Not all discussion types are equally available in the API:
- **Note**: A comment left on the _root_ of an issue, merge request, commit,
- Note: A comment left on the _root_ of an issue, merge request, commit,
or snippet.
- **Discussion**: A collection, often called a _thread_, of `DiscussionNotes` in
- Discussion: A collection, often called a _thread_, of `DiscussionNotes` in
an issue, merge request, commit, or snippet.
- **DiscussionNote**: An individual item in a discussion on an issue, merge request,
- DiscussionNote: An individual item in a discussion on an issue, merge request,
commit, or snippet. Items of type `DiscussionNote` are not returned as part of the Note API.
Not available in the [Events API](events.md).

View File

@ -1701,7 +1701,7 @@ Returns [`WorkItemDescriptionTemplate`](#workitemdescriptiontemplate).
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryworkitemdescriptiontemplatecontenttemplatecontentinput"></a>`templateContentInput` | [`WorkItemDescriptionTemplateContentInput!`](#workitemdescriptiontemplatecontentinput) | Input for fetching a specific Descriptiontemplate. |
| <a id="queryworkitemdescriptiontemplatecontenttemplatecontentinput"></a>`templateContentInput` | [`WorkItemDescriptionTemplateContentInput!`](#workitemdescriptiontemplatecontentinput) | Input for fetching a specific description template. |
### `Query.workItemsByReference`

View File

@ -14,23 +14,19 @@ title: Project badges API
## Placeholder tokens
[Badges](../user/project/badges.md) support placeholders that are replaced in real-time in both the link and image URL. The allowed placeholders are:
[Badges](../user/project/badges.md) support placeholders that are replaced in real-time in both the link and image URL. The allowed placeholders are:>
<!-- vale gitlab_base.Spelling = NO -->
- **%{project_path}**: Replaced by the project path.
- **%{project_title}**: Replaced by the project title.
- **%{project_name}**: Replaced by the project name.
- **%{project_id}**: Replaced by the project ID.
- **%{project_namespace}**: Replaced by the project's namespace full path.
- **%{group_name}**: Replaced by the project's top-level group name.
- **%{gitlab_server}**: Replaced by the project's server name.
- **%{gitlab_pages_domain}**: Replaced by the domain name hosting GitLab Pages.
- **%{default_branch}**: Replaced by the project default branch.
- **%{commit_sha}**: Replaced by the project's last commit SHA.
- **%{latest_tag}**: Replaced by the project's last tag.
<!-- vale gitlab_base.Spelling = YES -->
- `%{project_path}`: Replaced by the project path.
- `%{project_title}`: Replaced by the project title.
- `%{project_name}`: Replaced by the project name.
- `%{project_id}`: Replaced by the project ID.
- `%{project_namespace}`: Replaced by the project's namespace full path.
- `%{group_name}`: Replaced by the project's top-level group name.
- `%{gitlab_server}`: Replaced by the project's server name.
- `%{gitlab_pages_domain}`: Replaced by the domain name hosting GitLab Pages.
- `%{default_branch}`: Replaced by the project default branch.
- `%{commit_sha}`: Replaced by the project's last commit SHA.
- `%{latest_tag}`: Replaced by the project's last tag.
## List all badges of a project

View File

@ -69,7 +69,7 @@ Supported attributes:
| `commit_message_negative_regex` | string | No | No commit message is allowed to match this regular expression. |
| `commit_message_regex` | string | No | All commit messages must match this regular expression. |
| `deny_delete_tag` | boolean | No | Deny deleting a tag. |
| `file_name_regex` | string | No | All committed filenames must **not** match this regular expression. |
| `file_name_regex` | string | No | All committed filenames must not match this regular expression. |
| `max_file_size` | integer | No | Maximum file size (MB). |
| `member_check` | boolean | No | Restrict commits by author (email) to existing GitLab users. |
| `prevent_secrets` | boolean | No | GitLab rejects any files that are likely to contain secrets. |
@ -96,7 +96,7 @@ Supported attributes:
| `commit_message_negative_regex` | string | No | No commit message is allowed to match this regular expression. |
| `commit_message_regex` | string | No | All commit messages must match this regular expression. |
| `deny_delete_tag` | boolean | No | Deny deleting a tag. |
| `file_name_regex` | string | No | All committed filenames must **not** match this regular expression. |
| `file_name_regex` | string | No | All committed filenames must not match this regular expression. |
| `max_file_size` | integer | No | Maximum file size (MB). |
| `member_check` | boolean | No | Restrict commits by author (email) to existing GitLab users. |
| `prevent_secrets` | boolean | No | GitLab rejects any files that are likely to contain secrets. |

View File

@ -11,17 +11,17 @@ Example configuration for Solargraph can be found in [`.solargraph.yml.example`]
Refer to particular IDE plugin documentation on how to integrate it with Solargraph language server:
- **Visual Studio Code**
- Visual Studio Code
- GitHub: [`vscode-solargraph`](https://github.com/castwide/vscode-solargraph)
- **Atom**
- Atom
- GitHub: [`atom-solargraph`](https://github.com/castwide/atom-solargraph)
- **Vim**
- Vim
- GitHub: [`LanguageClient-neovim`](https://github.com/autozimu/LanguageClient-neovim)
- **Emacs**
- Emacs
- GitHub: [`emacs-solargraph`](https://github.com/guskovd/emacs-solargraph)
- **Eclipse**
- Eclipse
- GitHub: [`eclipse-solargraph`](https://github.com/PyvesB/eclipse-solargraph)

View File

@ -53,7 +53,7 @@ The method you choose depends on whether the language already has a Highlight.js
We can add third-party dependencies to our [`package.json`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/package.json) and import the dependency in [`highlight_js_language_loader`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/content_editor/services/highlight_js_language_loader.js#L260).
**Example:**
Example:
- Add the dependency to `package.json`:

View File

@ -5,7 +5,7 @@ info: Any user with at least the Maintainer role can merge updates to this conte
title: Source Editor
---
**Source Editor** provides the editing experience at GitLab. This thin wrapper around
Source Editor provides the editing experience at GitLab. This thin wrapper around
[the Monaco editor](https://microsoft.github.io/monaco-editor/) provides necessary
helpers and abstractions, and extends Monaco [using extensions](#extensions). Multiple
GitLab features use it, including:
@ -62,9 +62,16 @@ An instance of Source Editor accepts the following configuration options:
| `blobPath` | `false` | `String`: The name of a file to render in the editor, used to identify the correct syntax highlighter to use with that file, or another file type. Can accept wildcards like `*.js` when the actual filename isn't known or doesn't play any role. |
| `blobContent` | `false` | `String`: The initial content to render in the editor. |
| `extensions` | `false` | `Array`: Extensions to use in this instance. |
| `blobGlobalId` | `false` | `String`: An auto-generated property.<br>**Note:** This property may go away in the future. Do not pass `blobGlobalId` unless you know what you're doing.|
| `blobGlobalId` | `false` | `String`: An auto-generated property.|
| Editor Options | `false` | `Object(s)`: Any property outside of the list above is treated as an Editor Option for this particular instance. Use this field to override global Editor Options on the instance level. A full [index of Editor Options](https://microsoft.github.io/monaco-editor/docs.html#enums/editor.EditorOption.html) is available. |
{{< alert type="note" >}}
The `blobGlobalId` property may be removed in a future release. Use the standard blob properties
instead unless you have a specific use case that requires `blobGlobalId`.
{{< /alert >}}
## API
The editor uses the same public API as
@ -74,7 +81,7 @@ with additional functions on the instance level:
| Function | Arguments | Description |
| --------------------- | ----- | ----- |
| `updateModelLanguage` | `path`: String | Updates the instance's syntax highlighting to follow the extension of the passed `path`. Available only on the instance level. |
| `use` | Array of objects | Array of extensions to apply to the instance. Accepts only an array of **objects**. The extensions' ES6 modules must be fetched and resolved in your views or components before they're passed to `use`. Available on the instance and global editor (all instances) levels. |
| `use` | Array of objects | Array of extensions to apply to the instance. Accepts only an array of objects. The extensions' ES6 modules must be fetched and resolved in your views or components before they're passed to `use`. Available on the instance and global editor (all instances) levels. |
| Monaco Editor options | See [documentation](https://microsoft.github.io/monaco-editor/docs.html#interfaces/editor.IStandaloneCodeEditor.html) | Default Monaco editor options. |
## Tips

View File

@ -452,16 +452,19 @@ deleting feature flags.
To migrate an `ops` feature flag to an application setting:
1. In application settings, create or identify an existing `JSONB` column to store the setting.
1. Write a migration to backfill the column.
For an example, see [merge request 148014](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148014).
1. Write a migration to backfill the column. Avoid using `Feature.enabled?` in the migration. Use the `feature_flag_enabled?` migration helper method.
1. Optional. In application settings, update the documentation for the setting.
1. In the **Admin** area, create a setting to enable or disable the feature.
1. Replace the feature flag everywhere with the application setting.
1. Update all the relevant documentation pages.
1. Mark the backfill migration as a `NOOP` and remove the feature flag after the mandatory upgrade path is crossed.
For an example, see [merge request 151080](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/151080).
1. To remove a feature flag from an existing migration, replace `Feature.enabled?` with migration helper method `feature_flag_enabled?`.
{{< alert type="warning" >}}
The changes to backfill application settings and use the settings in the code must be merged in the same milestone.
{{< /alert >}}
If frontend changes are merged in a later milestone, you should add documentation about how to update the settings
by using the [application settings API](../../api/settings.md) or the Rails console.

View File

@ -58,11 +58,11 @@ thereby minimizing the direct load on Rails.
### Expanding functionality
- **Maintaining Simplicity:** While expanding Workhorse's functionalities to include direct handling
- Maintaining Simplicity: While expanding Workhorse's functionalities to include direct handling
of specific services (for example, container registry), it's crucial to maintain its simplicity and efficiency.
Workhorse should not encompass complex control logic but rather focus on executing tasks as directed
by Rails.
- **Service Implementation and Data Migration:** Implementing new functionalities in Workhorse
- Service Implementation and Data Migration: Implementing new functionalities in Workhorse
requires careful consideration of data migration strategies and service continuity.
### Data management and operational integrity

View File

@ -18,7 +18,7 @@ the architecture of these connections.
## Introduction to WebSockets
Websockets are an "upgraded" `HTTP/1.1` request. They permit bidirectional
communication between a client and a server. **Websockets are not HTTP**.
communication between a client and a server. Websockets are not HTTP.
Clients can send messages (known as frames) to the server at any time, and
vice versa. Client messages are not necessarily requests, and server messages are
not necessarily responses. WebSocket URLs have schemes like `ws://` (unencrypted) or
@ -121,7 +121,7 @@ This returns a JSON response containing details of where the
terminal can be found, and how to connect it. In particular,
the following details are returned in case of success:
- WebSocket URL to connect** to, such as `wss://example.com/terminals/1.ws?tty=1`.
- WebSocket URL to connect to, such as `wss://example.com/terminals/1.ws?tty=1`.
- WebSocket sub-protocols to support, such as `["channel.k8s.io"]`.
- Headers to send, such as `Authorization: Token xxyyz`.
- Optional. Certificate authority to verify `wss` connections with.

View File

@ -5,7 +5,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
title: Create a Git branch for your changes
---
A **branch** is a copy of the files in the repository at the time you create the branch.
A branch is a copy of the files in the repository at the time you create the branch.
You can work in your branch without affecting other branches. When
you're ready to add your changes to the main codebase, you can merge your branch into
the default branch, for example, `main`.

View File

@ -38,9 +38,9 @@ When you clone a Git repository, you create a local copy of the repository in yo
You can edit files, add new ones, and test your code.
To collaborate, you can:
- **Commit**: After you make changes in your working directory, commit those changes to your local repository.
- **Push**: Push your changes to a remote Git repository hosted on GitLab. This makes your changes available to other team members.
- **Pull**: Pull changes made by others from the remote repository, and ensure that your local repository is updated with the latest changes.
- Commit: After you make changes in your working directory, commit those changes to your local repository.
- Push: Push your changes to a remote Git repository hosted on GitLab. This makes your changes available to other team members.
- Pull: Pull changes made by others from the remote repository, and ensure that your local repository is updated with the latest changes.
For more information, see [Common Git commands](commands.md).
@ -83,20 +83,20 @@ You can manage your code, collaborate with others, and keep your project organiz
with a Git workflow.
A standard Git workflow includes the following steps:
1. **Clone a repository**: Create a local copy of the repository by cloning it to your machine.
1. Clone a repository: Create a local copy of the repository by cloning it to your machine.
You can work on the project without affecting the original repository.
1. **Create a new branch**: Before you make any changes, it's recommended to create a new branch.
1. Create a new branch: Before you make any changes, it's recommended to create a new branch.
This ensures that your changes are isolated and don't interfere with the work of others on the
default branch.
1. **Make changes**: Make changes to files in your working directory.
1. Make changes: Make changes to files in your working directory.
You can add new features, fix bugs, or make other modifications.
1. **Stage changes**: After you make changes to your files, stage the changes you want to commit.
1. Stage changes: After you make changes to your files, stage the changes you want to commit.
Staging tells Git which changes should be included in the next commit.
1. **Commit changes**: Commit your staged changes to your local repository.
1. Commit changes: Commit your staged changes to your local repository.
A commit saves a snapshot of your work and creates a history of the changes to your files.
1. **Push changes**: To share your changes with others, push them to the remote repository.
1. Push changes: To share your changes with others, push them to the remote repository.
This makes your changes available to other collaborators.
1. **Merge your branch**: After your changes are reviewed and approved, merge your branch into the
1. Merge your branch: After your changes are reviewed and approved, merge your branch into the
default branch. For example, `main`. This step incorporates your changes into the project.
## Forks

View File

@ -28,9 +28,9 @@ using Git LFS, GitLab:
size 804
```
- **Version** - the version of the Git LFS specification in use
- **OID** - The hashing method used, and a unique object ID, in the form `{hash-method}:{hash}`.
- **Size** - The file size, in bytes.
- Version - the version of the Git LFS specification in use
- OID - The hashing method used, and a unique object ID, in the form `{hash-method}:{hash}`.
- Size - The file size, in bytes.
1. Queues a job to recalculate your project's statistics, including storage size and
LFS object storage. Your LFS object storage is the sum of the size of all LFS
@ -136,9 +136,9 @@ on how to migrate an existing Git repository with Git LFS.
It's important to understand the differences between untracking a file in Git LFS and deleting a file:
- **Untrack**: The file remains on disk and in your repository history.
- Untrack: The file remains on disk and in your repository history.
If users check out historical branches or tags, they still need the LFS version of the file.
- **Delete**: The file is removed but remains in your repository history.
- Delete: The file is removed but remains in your repository history.
To delete a tracked file with Git LFS, see [Remove a file](../undo.md#remove-a-file-from-a-repository).

View File

@ -16,12 +16,12 @@ When working with Git LFS, you might encounter the following issues.
This error can occur for a few reasons, including:
- **You don't have permissions to access certain LFS object.** Confirm you have
- You don't have permissions to access certain LFS object. Confirm you have
permission to push to the project, or fetch from the project.
- **The project isn't allowed to access the LFS object.** The LFS object you want
- The project isn't allowed to access the LFS object. The LFS object you want
to push (or fetch) is no longer available to the project. In most cases, the object
has been removed from the server.
- **The local Git repository is using deprecated version of the Git LFS API.** Update
- The local Git repository is using deprecated version of the Git LFS API. Update
your local copy of Git LFS and try again.
## Invalid status for `<url>` : 501

View File

@ -123,7 +123,7 @@ If neither approach fixes the error, you may need a different internet service p
### Check your SSH configuration
**If pushing over SSH**, first check your SSH configuration as 'Broken pipe'
If pushing over SSH, first check your SSH configuration as 'Broken pipe'
errors can sometimes be caused by underlying issues with SSH (such as
authentication). Make sure that SSH is correctly configured by following the
instructions in the [SSH troubleshooting](../../user/ssh_troubleshooting.md#password-prompt-with-git-clone) documentation.
@ -137,7 +137,7 @@ Configuring both the client and the server is unnecessary.
{{< /alert >}}
**To configure SSH on the client side**:
To configure SSH on the client side:
- On UNIX, edit `~/.ssh/config` (create the file if it doesn't exist) and
add or edit:
@ -152,7 +152,7 @@ Configuring both the client and the server is unnecessary.
go to "Connection" and under "Sending of null packets to keep
session active", set `Seconds between keepalives (0 to turn off)` to `60`.
**To configure SSH on the server side**, edit `/etc/ssh/sshd_config` and add:
To configure SSH on the server side, edit `/etc/ssh/sshd_config` and add:
```plaintext
ClientAliveInterval 60
@ -161,7 +161,7 @@ ClientAliveCountMax 5
### Running a `git repack`
**If 'pack-objects' type errors are also being displayed**, you can try to
If 'pack-objects' type errors are also being displayed, you can try to
run a `git repack` before attempting to push to the remote repository again:
```shell

View File

@ -16,10 +16,10 @@ title: Code Owners
Use the Code Owners feature to define who has expertise for specific parts of your project's codebase.
Define the owners of files and directories in a repository to:
- **Require owners to approve changes.** Combine protected branches with Code Owners to require
- Require owners to approve changes. Combine protected branches with Code Owners to require
experts to approve merge requests before they merge into a protected branch. For more information,
see [Code Owners and protected branches](#code-owners-and-protected-branches).
- **Identify owners.** Code Owner names are displayed on the files and directories they own:
- Identify owners. Code Owner names are displayed on the files and directories they own:
![Code Owners displayed in UI](../img/codeowners_in_UI_v15_10.png)
@ -29,9 +29,9 @@ Combine Code Owners with merge request
[approval rules](../merge_requests/approvals/rules.md) (either optional or required)
to build a flexible approval workflow:
- Use **Code Owners** to ensure quality. Define the users who have domain expertise
- Use Code Owners to ensure quality. Define the users who have domain expertise
for specific paths in your repository.
- Use **Approval rules** to define areas of expertise that don't correspond to specific
- Use approval rules to define areas of expertise that don't correspond to specific
file paths in your repository. Approval rules help guide merge request creators to
the correct set of reviewers, such as frontend developers or a security team.
@ -133,7 +133,7 @@ This permission is often granted to accounts associated with
automation ([internal users](../../../administration/internal_users.md))
and release tooling.
All changes from users _without_ the **Allowed to push** permission must be routed through a merge request.
All changes from users without the **Allowed to push** permission must be routed through a merge request.
## Related topics

View File

@ -38,7 +38,7 @@ config/db/database-setup.md @docs-team
## Regular entries and sections
If you set a default Code Owner for a path **outside a section**, their approval is always required.
If you set a default Code Owner for a path outside a section, their approval is always required.
Such entries aren't overridden by sections.
Entries without sections are treated as if they were another, unnamed section:
@ -86,9 +86,9 @@ model/db/ @database
README.md @docs
```
This code results in three entries under the **Documentation** section header, and two
entries under **Database**. The entries defined under the sections **Documentation** and
**DOCUMENTATION** are combined, using the case of the first section.
This code results in three entries under the `Documentation` section header, and two
entries under `Database`. The entries defined under the sections `Documentation` and
`DOCUMENTATION` are combined, using the case of the first section.
## Define Code Owners for specific files or directories
@ -167,19 +167,19 @@ graph TD
In this example:
- **Parent group X** (`group-x`) owns **Project A**.
- **Parent group X** also contains a subgroup, **Subgroup Y**. (`group-x/subgroup-y`)
- **Subgroup Y** owns **Project B**.
- Parent group X (`group-x`) owns Project A.
- Parent group X also contains a subgroup, Subgroup Y. (`group-x/subgroup-y`)
- Subgroup Y owns Project B.
The eligible Code Owners are:
- **Project A**: the members of **Group X** only, because **Project A** doesn't belong to **Subgroup Y**.
- **Project B**: the members of both **Group X** and **Subgroup Y**.
- Project A: the members of Group X only, because Project A doesn't belong to Subgroup Y.
- Project B: the members of both Group X and Subgroup Y.
### Invite subgroups to parent groups
Inviting **Subgroup Y** to a parent group of **Project A**
[is not supported](https://gitlab.com/gitlab-org/gitlab/-/issues/288851). To set **Subgroup Y** as
Inviting Subgroup Y to a parent group of Project A
[is not supported](https://gitlab.com/gitlab-org/gitlab/-/issues/288851). To set Subgroup Y as
Code Owners, [invite this group directly to the project](#invite-subgroups-to-projects-in-parent-groups) itself.
{{< alert type="note" >}}
@ -193,7 +193,7 @@ and not inherit membership from any parent groups.
### Invite subgroups to projects in parent groups
You can [invite](../members/sharing_projects_groups.md) **Subgroup Y** to **Project A**
You can [invite](../members/sharing_projects_groups.md) Subgroup Y to Project A
so that their members also become eligible Code Owners.
```mermaid
@ -209,7 +209,7 @@ graph LR
F -.-> |Add Subgroup Y<br/> as Code Owners to Project A| J[Approvals can only<br/>be optional] -.-> B
```
If you do not invite **Subgroup Y** to **Project A**, but make them Code Owners, their approval
If you do not invite Subgroup Y to Project A, but make them Code Owners, their approval
of the merge request becomes optional.
## Error handling

View File

@ -16,10 +16,10 @@ The `CODEOWNERS` file uses a syntax to define ownership rules.
Each line in the file represents a rule, and specifies a file path pattern and one or more owners.
The key elements are:
- **File paths**: Specific files, directories, or wildcards.
- **Code Owners**: Use `@mentions` for users, groups, or roles.
- **Comments**: Lines starting with `#` are ignored.
- **Sections**: Optional groupings of rules, defined using `[Section name]`.
- File paths: Specific files, directories, or wildcards.
- Code Owners: Use `@mentions` for users, groups, or roles.
- Comments: Lines starting with `#` are ignored.
- Sections: Optional groupings of rules, defined using `[Section name]`.
{{< alert type="note" >}}
@ -105,7 +105,7 @@ internal/README.md @user2
```
Each Code Owner in the merge request widget is listed under a label.
The following image shows **Default**, **Frontend**, and **Technical Writing** sections:
The following image shows `Default`, `Frontend`, and `Technical Writing` sections:
![MR widget - Sectional Code Owners](../img/sectional_code_owners_v17_4.png)
@ -251,7 +251,7 @@ role as Code Owners for `file.md`:
## Add a group as a Code Owner
You can set **direct members** of a group or subgroup as a Code Owner.
You can set direct members of a group or subgroup as a Code Owner.
For more information about group membership, see [Membership types](../members/_index.md#membership-types).
Prerequisites:

View File

@ -44,7 +44,7 @@ if any of these conditions are true:
- A rule prevents the specific user from approving the merge request.
Check the project [merge request approval](../merge_requests/approvals/settings.md#edit-merge-request-approval-settings) settings.
- A Code Owner group has a visibility of **private**, and the current user is not a
- A Code Owner group has a visibility of private, and the current user is not a
member of the Code Owner group.
- The specific username is spelled incorrectly or
[malformed in the `CODEOWNERS` file](advanced.md#malformed-owners).

View File

@ -46,7 +46,7 @@ collaborator workflows in other branches.
When a file or directory is locked by a user:
- Only the user who created the lock can modify the file or directory on the default branch.
- For other users, the locked file or directory is **read-only** on the default branch.
- For other users, the locked file or directory is read-only on the default branch.
- Direct changes to locked files or directories on the default branch are blocked.
- Merge requests that modify locked files or directories cannot be merged to the default branch.

View File

@ -167,8 +167,8 @@ and select this event.
### Group approvers
You can add a group of users as approvers. All **direct members** of this group
can approve the rule. **Inherited members** cannot approve the rule.
You can add a group of users as approvers. All direct members of this group
can approve the rule. Inherited members cannot approve the rule.
Typically the group is a subgroup in your top-level namespace, unless you are
collaborating with an external group. If you are collaborating with another group,
@ -212,7 +212,7 @@ oversight on proposed work.
Prerequisites:
- You must select a specific branch, as this method does **not** work with `All Branches` or `All protected branches` settings.
- You must select a specific branch, as this method does not work with `All Branches` or `All protected branches` settings.
- The shared group must be added to an approval rule and not individual users, even when the added user is part of the group.
To enable approval permissions for these users without granting them push access:

View File

@ -123,7 +123,7 @@ When you change this field, it can affect all open merge requests depending on t
- If users could edit approval rules previously, and you disable this behavior,
GitLab updates all open merge requests to enforce the approval rules.
- If users could **not** edit approval rules previously, and you enable approval rule
- If users could not edit approval rules previously, and you enable approval rule
editing, open merge requests remain unchanged. This preserves any changes already
made to approval rules in those merge requests.

View File

@ -77,7 +77,7 @@ looks like this:
merge feature
```
- In comparison, a **squash merge** constructs a squash commit, a virtual copy of all commits
- In comparison, a squash merge constructs a squash commit, a virtual copy of all commits
from the `feature` branch. The original commits (B and D) remain unchanged
on the `feature` branch, and then a merge commit is made on the `main` branch to merge in the squashed branch:
@ -150,7 +150,7 @@ gitGraph
```
When you visit the merge request page with `Merge commit with semi-linear history`
method selected, you can accept it **only if a fast-forward merge is possible**.
method selected, you can accept it only if a fast-forward merge is possible.
When a fast-forward merge is not possible, the user is given the option to rebase, see
[Rebasing in (semi-)linear merge methods](#rebasing-in-semi-linear-merge-methods).
@ -193,7 +193,7 @@ When a fast-forward merge is not possible, the user is given the option to rebas
[Rebasing in (semi-)linear merge methods](#rebasing-in-semi-linear-merge-methods).
When you visit the merge request page with `Fast-forward merge`
method selected, you can accept it **only if a fast-forward merge is possible**.
method selected, you can accept it only if a fast-forward merge is possible.
## Rebasing in (semi-)linear merge methods

View File

@ -88,13 +88,13 @@ graph TD
In this example:
- **Parent group X** (`group-x`) owns **Project A**.
- **Parent group X** also contains a subgroup, **Subgroup Y**. (`group-x/subgroup-y`)
- **Project A** is shared with **Subgroup Y**.
- Parent group X (`group-x`) owns Project A.
- Parent group X also contains a subgroup, Subgroup Y. (`group-x/subgroup-y`)
- Project A is shared with Subgroup Y.
The eligible groups for protected tag permissions are:
- **Project A**: Both **Group X** and **Subgroup Y**, because **Project A** is shared with **Subgroup Y**.
- Project A: Both Group X and Subgroup Y, because Project A is shared with Subgroup Y.
#### Share projects with groups for protected tag permissions
@ -114,7 +114,7 @@ graph LR
F -.-> |Add Subgroup Y<br/> to protected tag settings| J[Settings will not<br/>take effect] -.-> B
```
To grant access to **Subgroup Y** members for **Project A**, you must share the project with the subgroup.
To grant access to Subgroup Y members for Project A, you must share the project with the subgroup.
Adding the subgroup directly to the protected tag settings is not effective and isn't applicable to subgroup members.
{{< alert type="note" >}}

View File

@ -6,7 +6,7 @@ description: Expand your Git branching strategy when you outgrow the basic appro
title: Branching strategies
---
The way you organize and merge Git branches is called a **branching strategy**.
The way you organize and merge Git branches is called a branching strategy.
For many teams, the simplest approach is sensible and effective:
1. Make changes in a feature branch.

View File

@ -134,7 +134,7 @@ To update your fork from the command line, follow the instruction in
A fork can be configured as a mirror of the upstream if all these conditions are met:
1. Your subscription is **Premium** or **Ultimate**.
1. Your subscription is GitLab Premium or GitLab Ultimate.
1. You create all changes in branches (not `main`).
1. You do not work on [merge requests for confidential issues](../merge_requests/confidential.md),
which requires changes to `main`.

View File

@ -60,8 +60,8 @@ After you configure a GitLab repository as a pull mirror:
- How many mirrors are already in the queue and due for updates. Being due depends
on when the repository mirror was last updated, and how many times updates have been retried.
1. Sidekiq becomes available to process updates, mirrors are updated. If the update process:
- **Succeeds**: An update is enqueued again with at least a 30 minute wait.
- **Fails**: The update is attempted again later. After 14 failures, a mirror is marked as a
- Succeeds: An update is enqueued again with at least a 30 minute wait.
- Fails: The update is attempted again later. After 14 failures, a mirror is marked as a
[hard failure](#fix-hard-failures-when-mirroring) and is no longer enqueued for updates. A branch diverging
from its upstream counterpart can cause failures. To prevent branches from
diverging, configure [Overwrite diverged branches](#overwrite-diverged-branches) when

View File

@ -25,7 +25,8 @@ push mirrors only receive changes when:
- Commits are pushed to the upstream GitLab repository.
- An administrator [force-updates the mirror](_index.md#force-an-update).
When you push a change to the upstream repository, the push mirror receives it in five minutes, or one minute if the setting **Only mirror protected branches** is on.
When you push a change to the upstream repository, the push mirror receives it in five minutes, or
one minute if the setting **Only mirror protected branches** is on.
When a branch is merged into the default branch and deleted in the source project,
it is deleted from the remote mirror on the next push. Branches with unmerged

View File

@ -155,7 +155,7 @@ fail nor succeed. They also do not leave a clear log. To check for this problem:
If you receive this error while setting up mirroring over [SSH](_index.md#ssh-authentication), make sure the URL is in a valid format.
Mirroring **does not** support SCP-like clone URLs in the form of
Mirroring does not support SCP-like clone URLs in the form of
`git@gitlab.com:gitlab-org/gitlab.git`, with host and project path separated using `:`.
It requires a [standard URL](https://git-scm.com/docs/git-clone#_git_urls)
that includes the `ssh://` protocol, like `ssh://git@gitlab.com/gitlab-org/gitlab.git`.

View File

@ -177,6 +177,7 @@ To configure CI/CD variables:
- `AWS_ACCESS_KEY_ID`: Your AWS access key ID
- `AWS_SECRET_ACCESS_KEY`: Your AWS secret access key
- `TF_VAR_agent_token`: GitLab Agent for Kubernetes token
- `TF_VAR_kas_address`: GitLab Kubernetes Agent Server address. Required if on a GitLab Self-Managed instance.
- `TF_VAR_workspaces_proxy_auth_client_id`: OAuth application client ID
- `TF_VAR_workspaces_proxy_auth_client_secret`: OAuth application secret
- `TF_VAR_workspaces_proxy_auth_redirect_uri`: OAuth callback URL
@ -195,10 +196,12 @@ To configure CI/CD variables:
- `TF_VAR_zones`: AWS availability zones.
- `TF_VAR_name`: Name prefix for resources.
- `TF_VAR_cluster_endpoint_public_access`: Enable public access to cluster endpoint.
- `TF_VAR_instance_type`: EC2 instance type for Kubernetes nodes.
- `TF_VAR_instance_count`: Number of worker nodes.
- `TF_VAR_cluster_node_instance_type`: EC2 instance type for Kubernetes nodes.
- `TF_VAR_cluster_node_count_min`: Minimum number of worker nodes.
- `TF_VAR_cluster_node_count_max`: Maximum number of worker nodes.
- `TF_VAR_cluster_node_count`: Number of worker nodes.
- `TF_VAR_cluster_node_labels`: Map of labels to apply on the cluster nodes
- `TF_VAR_agent_namespace`: Kubernetes namespace for the agent.
- `TF_VAR_kas_address`: GitLab Kubernetes Agent Server address. Required if on a GitLab Self-Managed instance.
- `TF_VAR_workspaces_proxy_namespace`: Kubernetes namespace for workspaces proxy.
- `TF_VAR_workspaces_proxy_ingress_class_name`: Ingress class name.
- `TF_VAR_ingress_nginx_namespace`: Kubernetes namespace for Ingress-NGINX.

View File

@ -1139,6 +1139,20 @@ into similar problems in the future (e.g. when new tables are created).
YAML.safe_load_file(File.join(INTEGER_IDS_YET_TO_INITIALIZED_TO_BIGINT_FILE_PATH))
end
def feature_flag_enabled?(feature_flag_name)
quoted_name = connection.quote(feature_flag_name)
result = execute <<~SQL.squish
SELECT 1
FROM feature_gates
WHERE feature_key = #{quoted_name}
AND value = 'true'
LIMIT 1;
SQL
result.ntuples > 0
end
private
def multiple_columns(columns, separator: ', ')

View File

@ -35989,6 +35989,9 @@ msgid_plural "Licenses|This package also includes %{count} licenses which were n
msgstr[0] ""
msgstr[1] ""
msgid "Lifecycle can only have a maximum of %{limit} statuses."
msgstr ""
msgid "Limit display of time tracking units to hours."
msgstr ""
@ -71261,6 +71264,12 @@ msgstr ""
msgid "can only have a maximum of %{limit} custom fields."
msgstr ""
msgid "can only have a maximum of %{limit} lifecycles."
msgstr ""
msgid "can only have a maximum of %{limit} statuses."
msgstr ""
msgid "can only have one escalation policy"
msgstr ""

View File

@ -0,0 +1,68 @@
import { DEFAULT_FILTER, DEFAULT_SORT } from '~/access_tokens/constants';
import {
initializeFilters,
initializeSort,
initializeValuesFromQuery,
} from '~/access_tokens/utils';
describe('initializeFilters', () => {
it('returns correct value of search', () => {
expect(initializeFilters({}, 'dummy')).toEqual(['dummy']);
});
it('returns correct value of filter', () => {
expect(initializeFilters({ revoked: 'false' })).toEqual([
{ type: 'revoked', value: { data: 'false', operator: '=' } },
]);
});
it('returns correct value for filters ending with `before`', () => {
expect(initializeFilters({ created_before: '2025-01-01' })).toEqual([
{ type: 'created', value: { data: '2025-01-01', operator: '<' } },
]);
});
it('returns correct value for filters ending with `after`', () => {
expect(initializeFilters({ last_used_after: '2024-01-01' })).toEqual([
{ type: 'last_used', value: { data: '2024-01-01', operator: '≥' } },
]);
});
it('when `isCredentialsInventory` is false and no filters or search term are provided, it returns a default filter', () => {
expect(initializeFilters({})).toEqual(DEFAULT_FILTER);
});
it('when `isCredentialsInventory` is true and no filters or search term are provided, it returns an empty array', () => {
expect(initializeFilters({}, '', true)).toEqual([]);
});
});
describe('initializeSort', () => {
it('returns default sort when no sort is provided', () => {
expect(initializeSort()).toEqual(DEFAULT_SORT);
});
it('returns correct value of sort', () => {
expect(initializeSort('name_desc')).toEqual({ value: 'name', isAsc: false });
});
});
describe('initializeValuesFromQuery', () => {
it('returns correct object when `isCredentialsInventory` is false', () => {
expect(initializeValuesFromQuery(false, '?page=1&revoked=true&sort=expires_asc')).toMatchObject(
{
filters: [{ type: 'revoked', value: { data: 'true', operator: '=' } }],
page: 1,
sorting: { value: 'expires', isAsc: true },
},
);
});
it('returns correct object when `isCredentialsInventory` is true', () => {
expect(initializeValuesFromQuery(true, '?page=1&revoked=true&sort=expires_asc')).toMatchObject({
tokens: [{ type: 'revoked', value: { data: 'true', operator: '=' } }],
page: 1,
sorting: { value: 'expires', isAsc: true },
});
});
});

View File

@ -1,4 +1,4 @@
import { initializeValuesFromQuery, goTo } from '~/credentials/utils';
import { goTo } from '~/credentials/utils';
import { visitUrl, getBaseURL } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
@ -6,71 +6,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
}));
describe('initializeValuesFromQuery', () => {
describe('when no query parameters', () => {
it('returns default sorting and tokens', () => {
const { sorting, tokens } = initializeValuesFromQuery('');
expect(sorting).toMatchObject({ value: 'expires', isAsc: true });
expect(tokens).toMatchObject([]);
});
});
describe('when query parameters present', () => {
describe('sorting', () => {
it('returns correct value', () => {
const { sorting } = initializeValuesFromQuery('sort=created_asc');
expect(sorting).toMatchObject({ value: 'created', isAsc: true });
});
it('returns correct sorting direction', () => {
const { sorting } = initializeValuesFromQuery('sort=name_desc');
expect(sorting).toMatchObject({ value: 'name', isAsc: false });
});
});
describe('tokens', () => {
it('returns correct value for filters ending on "before"', () => {
const { tokens } = initializeValuesFromQuery('created_before=2025-01-01');
expect(tokens).toMatchObject([
{ type: 'created', value: { data: '2025-01-01', operator: '<' } },
]);
});
it('returns correct value for filters ending on "after"', () => {
const { tokens } = initializeValuesFromQuery('last_used_after=2024-01-01');
expect(tokens).toMatchObject([
{ type: 'last_used', value: { data: '2024-01-01', operator: '≥' } },
]);
});
it('returns correct value for known filters', () => {
const { tokens } = initializeValuesFromQuery('filter=ssh_keys');
expect(tokens).toMatchObject([
{ type: 'filter', value: { data: 'ssh_keys', operator: '=' } },
]);
});
it('ignores unknown filters', () => {
const { tokens } = initializeValuesFromQuery('unknown=dummy');
expect(tokens).toMatchObject([]);
});
it('returns correct search term', () => {
const { tokens } = initializeValuesFromQuery('search=my search term');
expect(tokens).toMatchObject(['my search term']);
});
});
});
});
describe('goTo', () => {
it('reset pagination and contains sorting', () => {
goTo('name', true, []);

View File

@ -64,3 +64,47 @@ export const openMRsDetailResult = jest.fn().mockResolvedValue({
},
},
});
export const mockPermalinkResult = jest.fn().mockResolvedValue({
data: {
project: {
id: '1',
repository: {
paginatedTree: {
nodes: [
{
__typename: 'Tree',
permalinkPath:
'/gitlab-org/gitlab-shell/-/tree/5059017dea6e834f2f86fc670703ca36cbae98d6/cmd',
},
],
__typename: 'TreeConnection',
},
__typename: 'Repository',
},
__typename: 'Project',
},
},
});
export const mockRootPermalinkResult = jest.fn().mockResolvedValue({
data: {
project: {
id: '2',
repository: {
paginatedTree: {
nodes: [
{
__typename: 'Tree',
permalinkPath:
'/gitlab-org/gitlab-shell/-/tree/5059017dea6e834f2f86fc670703ca36cbae98d6/',
},
],
__typename: 'TreeConnection',
},
__typename: 'Repository',
},
__typename: 'Project',
},
},
});

View File

@ -4,7 +4,7 @@ import PermalinkDropdownItem from '~/repository/components/header_area/permalink
import { keysFor, PROJECT_FILES_GO_TO_PERMALINK } from '~/behaviors/shortcuts/keybindings';
import { shouldDisableShortcuts } from '~/behaviors/shortcuts/shortcuts_toggle';
import { Mousetrap } from '~/lib/mousetrap';
import { lineState } from '~/blob/state';
import { hashState } from '~/blob/state';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
jest.mock('~/behaviors/shortcuts/shortcuts_toggle');
@ -35,7 +35,7 @@ describe('PermalinkDropdownItem', () => {
const findPermalinkLinkDropdown = () => wrapper.findComponent(GlDisclosureDropdownItem);
beforeEach(() => {
lineState.currentLineNumber = null;
hashState.currentHash = null;
createComponent();
});
@ -51,13 +51,31 @@ describe('PermalinkDropdownItem', () => {
});
it('returns updated path with line number when set', () => {
lineState.currentLineNumber = '10';
hashState.currentHash = 10;
createComponent();
expect(findPermalinkLinkDropdown().attributes('data-clipboard-text')).toBe(
`http://test.host/flightjs/Flight/-/blob/46ca9ebd5a43ec240ee8d64e2bb829169dff744e/bower.json#L10`,
);
});
it('returns updated path with line number range when set', () => {
hashState.currentHash = '#L5-10';
createComponent();
expect(findPermalinkLinkDropdown().attributes('data-clipboard-text')).toBe(
`http://test.host/flightjs/Flight/-/blob/46ca9ebd5a43ec240ee8d64e2bb829169dff744e/bower.json#L5-10`,
);
});
it('returns updated path with anchors when set', () => {
hashState.currentHash = '#something-wonderful';
createComponent();
expect(findPermalinkLinkDropdown().attributes('data-clipboard-text')).toBe(
`http://test.host/flightjs/Flight/-/blob/46ca9ebd5a43ec240ee8d64e2bb829169dff744e/bower.json#something-wonderful`,
);
});
});
describe('handles onCopyPermalink correctly', () => {
@ -71,9 +89,14 @@ describe('PermalinkDropdownItem', () => {
it('triggers copy permalink when shortcut is used', async () => {
const clickSpy = jest.spyOn(findPermalinkLinkDropdown().element, 'click');
Mousetrap.trigger('y');
const mousetrapInstance = wrapper.vm.mousetrap;
const triggerSpy = jest.spyOn(mousetrapInstance, 'trigger');
mousetrapInstance.trigger('y');
await nextTick();
expect(triggerSpy).toHaveBeenCalledWith('y');
expect(clickSpy).toHaveBeenCalled();
expect(mockToastShow).toHaveBeenCalledWith('Permalink copied to clipboard.');
});
@ -81,8 +104,8 @@ describe('PermalinkDropdownItem', () => {
describe('lifecycle hooks', () => {
it('binds and unbinds Mousetrap shortcuts', () => {
const bindSpy = jest.spyOn(Mousetrap, 'bind');
const unbindSpy = jest.spyOn(Mousetrap, 'unbind');
const bindSpy = jest.spyOn(Mousetrap.prototype, 'bind');
const unbindSpy = jest.spyOn(Mousetrap.prototype, 'unbind');
createComponent();
expect(bindSpy).toHaveBeenCalledWith(

View File

@ -1,7 +1,27 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { RouterLinkStub } from '@vue/test-utils';
import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RepositoryOverflowMenu from '~/repository/components/header_area/repository_overflow_menu.vue';
import PermalinkDropdownItem from '~/repository/components/header_area/permalink_dropdown_item.vue';
import permalinkPathQuery from '~/repository/queries/permalink_path.query.graphql';
import { logError } from '~/lib/logger';
import {
mockPermalinkResult,
mockRootPermalinkResult,
} from 'jest/repository/components/header_area/mock_data';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
Vue.use(VueApollo);
jest.mock('~/lib/logger');
jest.mock('~/sentry/sentry_browser_wrapper');
const path = 'cmd';
const projectPath = 'gitlab-org/gitlab-shell';
const ref = '5059017dea6e834f2f86fc670703ca36cbae98d6';
const defaultMockRoute = {
params: {
@ -18,18 +38,34 @@ const defaultMockRoute = {
describe('RepositoryOverflowMenu', () => {
let wrapper;
let permalinkQueryHandler;
const findDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
const findDropdownItemWithText = (text) =>
findDropdownItems().wrappers.find((x) => x.props('item').text === text);
const findCompareItem = () => findDropdownItemWithText('Compare');
const createComponent = (route = {}, provide = {}) => {
const findPermalinkItem = () => wrapper.findComponent(PermalinkDropdownItem);
const createComponent = ({
route = {},
provide = {},
props = {},
mockResolver = mockPermalinkResult,
} = {}) => {
permalinkQueryHandler = mockResolver;
const mockApollo = createMockApollo([[permalinkPathQuery, mockResolver]]);
return shallowMountExtended(RepositoryOverflowMenu, {
provide: {
comparePath: null,
...provide,
},
propsData: {
fullPath: projectPath,
path,
currentRef: ref,
...props,
},
stubs: {
RouterLink: RouterLinkStub,
},
@ -39,6 +75,7 @@ describe('RepositoryOverflowMenu', () => {
...route,
},
},
apolloProvider: mockApollo,
});
};
@ -50,26 +87,72 @@ describe('RepositoryOverflowMenu', () => {
expect(wrapper.exists()).toBe(true);
});
describe('Compare item', () => {
it('does not render Compare button for root ref', () => {
wrapper = createComponent({ params: { path: '/-/tree/new-branch-3' } });
expect(findCompareItem()).toBeUndefined();
});
it('renders Compare button for non-root ref', () => {
wrapper = createComponent(
{ params: { path: '/-/tree/new-branch-3' } },
{ comparePath: 'test/project/-/compare?from=master&to=new-branch-3' },
);
expect(findCompareItem().exists()).toBe(true);
expect(findCompareItem().props('item')).toMatchObject({
href: 'test/project/-/compare?from=master&to=new-branch-3',
describe('computed properties', () => {
it('computes queryVariables correctly', () => {
expect(permalinkQueryHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab-shell',
path: 'cmd',
ref: '5059017dea6e834f2f86fc670703ca36cbae98d6',
});
});
it('does not render compare button when comparePath is not provided', () => {
wrapper = createComponent();
expect(findCompareItem()).toBeUndefined();
describe('Compare item', () => {
it('does not render Compare button for root ref', () => {
wrapper = createComponent({ route: { params: { path: '/-/tree/new-branch-3' } } });
expect(findCompareItem()).toBeUndefined();
});
it('renders Compare button for non-root ref', () => {
wrapper = createComponent({
route: {
params: { path: '/-/tree/new-branch-3' },
},
provide: { comparePath: 'test/project/-/compare?from=master&to=new-branch-3' },
});
expect(findCompareItem().exists()).toBe(true);
expect(findCompareItem().props('item')).toMatchObject({
href: 'test/project/-/compare?from=master&to=new-branch-3',
});
});
it('does not render compare button when comparePath is not provided', () => {
wrapper = createComponent();
expect(findCompareItem()).toBeUndefined();
});
});
describe('Permalink item', () => {
it('renders Permalink button for non-root route', async () => {
wrapper = createComponent();
await waitForPromises();
expect(findPermalinkItem().props('permalinkPath')).toBe(
'/gitlab-org/gitlab-shell/-/tree/5059017dea6e834f2f86fc670703ca36cbae98d6/cmd',
);
});
it('renders Permalink button with projectPath for root route', async () => {
wrapper = createComponent({
props: { path: undefined },
mockResolver: mockRootPermalinkResult,
});
await waitForPromises();
expect(findPermalinkItem().props('permalinkPath')).toBe(
'/gitlab-org/gitlab-shell/-/tree/5059017dea6e834f2f86fc670703ca36cbae98d6/',
);
});
it('handles errors when fetching permalinkPath', async () => {
const mockError = new Error();
wrapper = createComponent({ mockResolver: jest.fn().mockRejectedValueOnce(mockError) });
await waitForPromises();
expect(findPermalinkItem().exists()).toBe(false);
expect(logError).toHaveBeenCalledWith(
'Failed to fetch permalink. See exception details for more information.',
mockError,
);
expect(Sentry.captureException).toHaveBeenCalledWith(mockError);
});
});
});
});

View File

@ -209,19 +209,27 @@ describe('HeaderArea', () => {
});
describe('RepositoryOverflowMenu', () => {
it('does not render RepositoryOverflowMenu component on default ref', () => {
expect(findRepositoryOverflowMenu().exists()).toBe(false);
it('renders RepositoryOverflowMenu component with correct props when on default branch', () => {
wrapper = createComponent({
route: { name: 'treePathDecoded' },
});
expect(findRepositoryOverflowMenu().props()).toStrictEqual({
currentRef: 'main',
fullPath: 'test/project',
path: 'index.js',
});
});
it('renders RepositoryOverflowMenu component with correct props when on ref different than default branch', () => {
it('renders RepositoryOverflowMenu component with correct props when on non-default branch', () => {
wrapper = createComponent({
route: { name: 'treePathDecoded' },
provided: { comparePath: 'test/project/compare' },
});
expect(findRepositoryOverflowMenu().exists()).toBe(true);
expect(findRepositoryOverflowMenu().props('comparePath')).toBe(
headerAppInjected.comparePath,
);
expect(findRepositoryOverflowMenu().props()).toStrictEqual({
currentRef: 'main',
fullPath: 'test/project',
path: 'index.js',
});
});
});
});

View File

@ -7,6 +7,7 @@ import AccessTokenForm from '~/vue_shared/access_tokens/components/access_token_
import { useAccessTokens } from '~/vue_shared/access_tokens/stores/access_tokens';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { DEFAULT_FILTER, DEFAULT_SORT } from '~/access_tokens/constants';
Vue.use(PiniaVuePlugin);
@ -48,8 +49,10 @@ describe('AccessTokens', () => {
waitForPromises();
expect(store.setup).toHaveBeenCalledWith({
filters: [{ type: 'state', value: { data: 'active', operator: '=' } }],
filters: DEFAULT_FILTER,
id: 235,
page: 1,
sorting: DEFAULT_SORT,
urlCreate: '/api/v4/groups/1/service_accounts/:id/personal_access_tokens/',
urlRevoke: '/api/v4/groups/2/service_accounts/:id/personal_access_tokens/',
urlRotate: '/api/v4/groups/3/service_accounts/:id/personal_access_tokens/',

View File

@ -59,6 +59,8 @@ describe('useAccessTokens store', () => {
const mockAxios = new MockAdapter(axios);
const filters = ['dummy'];
const id = 235;
const page = 1;
const sorting = DEFAULT_SORT;
const urlCreate = '/api/v4/groups/1/service_accounts/:id/personal_access_tokens';
const urlRevoke = '/api/v4/groups/2/service_accounts/:id/personal_access_tokens';
const urlRotate = '/api/v4/groups/3/service_accounts/:id/personal_access_tokens';
@ -81,7 +83,7 @@ describe('useAccessTokens store', () => {
const scopes = ['dummy-scope'];
beforeEach(() => {
store.setup({ id, filters, urlCreate, urlShow });
store.setup({ filters, id, page, sorting, urlCreate, urlShow });
});
it('dismisses any existing alert', () => {
@ -182,7 +184,7 @@ describe('useAccessTokens store', () => {
const title = 'Active tokens';
const tooltipTitle = 'Filter for active tokens';
beforeEach(() => {
store.setup({ id, filters, urlShow });
store.setup({ filters, id, page, sorting, urlShow });
update2WeekFromNow.mockReturnValueOnce([{ title, tooltipTitle, filters }]);
});
@ -230,7 +232,7 @@ describe('useAccessTokens store', () => {
describe('fetchTokens', () => {
beforeEach(() => {
store.setup({ id, filters, urlShow });
store.setup({ filters, id, page, sorting, urlShow });
});
it('sets busy to true when fetching', () => {
@ -299,7 +301,7 @@ describe('useAccessTokens store', () => {
describe('revokeToken', () => {
beforeEach(() => {
store.setup({ id, filters, urlRevoke, urlShow });
store.setup({ filters, id, page, sorting, urlRevoke, urlShow });
});
it('sets busy to true when revoking', () => {
@ -412,7 +414,7 @@ describe('useAccessTokens store', () => {
describe('rotateToken', () => {
beforeEach(() => {
store.setup({ id, filters, urlRotate, urlShow });
store.setup({ filters, id, page, sorting, urlRotate, urlShow });
});
it('sets busy to true when rotating', () => {
@ -555,10 +557,12 @@ describe('useAccessTokens store', () => {
describe('setup', () => {
it('sets up the store', () => {
store.setup({ filters, id, urlCreate, urlRevoke, urlRotate, urlShow });
store.setup({ filters, id, page, sorting, urlCreate, urlRevoke, urlRotate, urlShow });
expect(store.filters).toEqual(filters);
expect(store.id).toBe(id);
expect(store.page).toBe(page);
expect(store.sorting).toEqual(sorting);
expect(store.urlCreate).toBe(urlCreate);
expect(store.urlRevoke).toBe(urlRevoke);
expect(store.urlRotate).toBe(urlRotate);

View File

@ -1,4 +1,15 @@
import { defaultDate, serializeParams, update2WeekFromNow } from '~/vue_shared/access_tokens/utils';
import {
defaultDate,
serializeParams,
update2WeekFromNow,
updateUrlWithQueryParams,
} from '~/vue_shared/access_tokens/utils';
import { getBaseURL, updateHistory } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
updateHistory: jest.fn(),
}));
// Current date, `new Date()`, for these tests is 2020-07-06
describe('defaultDate', () => {
@ -86,3 +97,14 @@ describe('update2WeekFromNow', () => {
expect(result[0].filters).not.toBe(param[0].filters);
});
});
describe('updateUrlWithQueryParams', () => {
it('calls updateHistory with correct parameters', () => {
updateUrlWithQueryParams({ params: { page: 1, revoked: true }, sort: 'name_asc' });
expect(updateHistory).toHaveBeenCalledWith({
url: `${getBaseURL()}/?page=1&revoked=true&sort=name_asc`,
replace: true,
});
});
});

View File

@ -2588,6 +2588,44 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
end
end
describe '#feature_flag_enabled?' do
let(:feature_flag_name) { 'test_feature_flag' }
context 'when feature flag is enabled' do
let(:result) { instance_double(PG::Result, ntuples: 1) }
it 'returns true' do
expected_sql = <<~SQL.squish
SELECT 1
FROM feature_gates
WHERE feature_key = '#{feature_flag_name}'
AND value = 'true'
LIMIT 1;
SQL
expect(model).to receive(:execute).with(expected_sql.strip).and_return(result)
expect(model.feature_flag_enabled?(feature_flag_name)).to be true
end
end
context 'when feature flag is disabled' do
let(:result) { instance_double(PG::Result, ntuples: 0) }
it 'returns false' do
expected_sql = <<~SQL.squish
SELECT 1
FROM feature_gates
WHERE feature_key = '#{feature_flag_name}'
AND value = 'true'
LIMIT 1;
SQL
expect(model).to receive(:execute).with(expected_sql.strip).and_return(result)
expect(model.feature_flag_enabled?(feature_flag_name)).to be false
end
end
end
describe 'bigint conversion helpers' do
include MigrationsHelpers
include Database::TriggerHelpers

View File

@ -67,4 +67,37 @@ RSpec.describe 'getting a WorkItem description template and content', feature_ca
expect(expected_graphql_data).to be_nil
end
end
context 'when two templates from different projects have identical names' do
let(:query) do
graphql_query_for(:workItemDescriptionTemplateContent,
{ templateContentInput: { projectId: project.id, name: "project_issues_template_a" } })
end
let_it_be(:group_default_template_files) do
{
".gitlab/issue_templates/project_issues_template_a.md" => "group default content"
}
end
let_it_be(:group_default_template_project) do
create(:project, :custom_repo, files: group_default_template_files, group: group)
end
before do
group.file_template_project_id = group_default_template_project.id
end
it 'returns the template from the specified project' do
post_graphql(query, current_user: current_user)
expect(expected_graphql_data["projectId"]).to eq(project.id)
expect(expected_graphql_data["name"]).to eq("project_issues_template_a")
expect(expected_graphql_data["category"]).to be_nil
expect(expected_graphql_data["content"]).to eq("project_issues_template_a content")
expect(response).to have_gitlab_http_status(:ok)
expect(graphql_errors).to be_nil
end
end
end