Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-04 12:11:58 +00:00
parent 96afc7c114
commit c01e12a62e
59 changed files with 1401 additions and 422 deletions

View File

@ -44,8 +44,8 @@ export default {
:data-testid="`list-item-${itemIndex}`"
>
<gl-intersperse separator=" - ">
<span v-for="(field, fieldIndex) in fields" :key="fieldIndex">
<component :is="presenter.forField(item, field)" />
<span v-for="field in fields" :key="field.key">
<component :is="presenter.forField(item, field.key)" />
</span>
</gl-intersperse>
</li>

View File

@ -1,6 +1,5 @@
<script>
import { GlIcon } from '@gitlab/ui';
import { toSentenceCase } from '../../utils/common';
import Sorter from '../../core/sorter';
export default {
@ -26,10 +25,7 @@ export default {
return {
items,
fields: this.config.fields.map((field) => ({
key: field,
label: toSentenceCase(field),
})),
fields: this.config.fields,
sorter: new Sorter(items),
};
},

View File

@ -1,91 +1,26 @@
import { uniq } from 'lodash';
import { GitLabQueryLanguage as GlqlCompiler } from '@gitlab/query-language';
import { gql } from '@apollo/client/core';
import createDefaultClient from '~/lib/graphql';
import { extractGroupOrProject, parseQueryText, parseFrontmatter } from '../utils/common';
/**
* @import ApolloClient from '@apollo/client/core';
*/
const REQUIRED_QUERY_FIELDS = ['id', 'iid', 'title', 'webUrl', 'reference', 'state', 'type'];
const DEFAULT_DISPLAY_FIELDS = ['title'];
export default class Executor {
#compiler;
#client;
#compiled;
async #initCompiler() {
const compiler = GlqlCompiler();
const { group, project } = extractGroupOrProject();
compiler.group = group;
compiler.project = project;
compiler.username = gon.current_username;
await compiler.initialize();
this.#compiler = compiler;
}
/**
* Set the ApolloClient instance or use the default one
*
* @param {ApolloClient} client
*/
#initClient(client) {
this.#client = client || createDefaultClient();
}
/**
* Initialize the Executor with the given ApolloClient instance
*
* @param {ApolloClient?} client
* @returns {Promise<Executor>} this
*/
async init(client) {
await this.#initCompiler();
this.#initClient(client);
init(client = createDefaultClient()) {
this.#client = client;
return this;
}
/**
* Compile the given GLQL query with metadata
*
* @param {*} glqlQueryWithMetadata
* @returns {Executor} this
*/
compile(glqlQueryWithMetadata) {
const { frontmatter, query } = parseQueryText(glqlQueryWithMetadata);
const config = parseFrontmatter(frontmatter, { fields: DEFAULT_DISPLAY_FIELDS });
this.#compiler.fields = uniq([...REQUIRED_QUERY_FIELDS, ...config.fields]);
const limit = Math.min(100, parseInt(config.limit, 10) || 100);
const { output } = this.#compiler.compile('graphql', query, limit);
if (output.toLowerCase().startsWith('error')) {
throw new Error(output.replace(/^error: /i, ''));
}
this.#compiled = { query: output, config };
return this;
}
/**
* Execute the compiled query and return the result
*
* @returns {Promise<{ data: any, config: any }>}
*/
async execute() {
const { query, config } = this.#compiled;
async execute(query) {
const { data } = await this.#client.query({
query: gql`
${query}
`,
});
return { data, config };
return data;
}
}
export const execute = async (query) => {
const executor = new Executor().init();
return executor.execute(query);
};

View File

@ -1,12 +1,11 @@
import Executor from './executor';
import Presenter from './presenter';
import { execute } from './executor';
import { parse } from './parser';
import { present } from './presenter';
import { transform } from './transformer/data';
export const executeAndPresentQuery = async (query) => {
const executor = await new Executor().init();
const { data, config } = await executor.compile(query).execute();
const { component } = new Presenter().init({
data: (data.project || data.group).issues,
config,
});
return component;
export const executeAndPresentQuery = async (glqlQuery) => {
const { query, config } = await parse(glqlQuery);
const data = await execute(query);
const transformed = transform(data, config);
return present(transformed, config);
};

View File

@ -0,0 +1,11 @@
export const Types = {
FIELD_NAME: 'field_name',
STRING: 'string',
COLLECTION: 'collection',
FUNCTION_CALL: 'function_call',
};
export const fieldName = (name) => ({ type: Types.FIELD_NAME, value: name });
export const string = (value) => ({ type: Types.STRING, value });
export const functionCall = (name, args) => ({ type: Types.FUNCTION_CALL, name, args });
export const collection = (...values) => ({ type: Types.COLLECTION, value: values });

View File

@ -0,0 +1,82 @@
import { __ } from '~/locale';
import { truncate } from '~/lib/utils/text_utility';
const success = (value, rest) => ({ success: true, value, rest });
const error = (expected, got) => ({ success: false, expected, got });
export const Parser = (parserFn) => ({
run: parserFn,
map(fn) {
return Parser((input) => {
const result = this.run(input);
return result.success ? { ...result, value: fn(result.value) } : result;
});
},
chain(fn) {
return Parser((input) => {
const result = this.run(input);
return result.success ? fn(result.value).run(result.rest) : result;
});
},
});
export const str = (s) =>
Parser((input) =>
input.startsWith(s) ? success(s, input.slice(s.length)) : error(s, input.slice(0, s.length)),
);
export const regex = (re, description) =>
Parser((input) => {
const match = input.match(re);
return match && match.index === 0
? success(match[0], input.slice(match[0].length))
: error(description, truncate(input, 10));
});
export const seq = (...parsers) =>
Parser((input) => {
const results = [];
let currentInput = input;
for (const parser of parsers) {
const result = parser.run(currentInput);
if (!result.success) return result;
results.push(result.value);
currentInput = result.rest;
}
return success(results, currentInput);
});
export const alt = (...parsers) =>
Parser((input) => {
for (const parser of parsers) {
const result = parser.run(input);
if (result.success) return result;
}
return error(__('something to parse'), truncate(input, 10));
});
export const many = (parser) =>
Parser((input) => {
const results = [];
let currentInput = input;
let result;
do {
result = parser.run(currentInput);
if (result.success) {
results.push(result.value);
currentInput = result.rest;
}
} while (result.success);
return success(results, currentInput);
});
export const optional = (parser) =>
alt(
parser,
Parser((input) => success(null, input)),
);
export const whitespace = regex(/^\s+/, 'whitespace');
export const token = (parser) => seq(optional(whitespace), parser).map(([, t]) => t);

View File

@ -0,0 +1,16 @@
import jsYaml from 'js-yaml';
import { uniq } from 'lodash';
import { transformAstToDisplayFields } from '../transformer/ast';
import { parseFields } from './fields';
export const parseConfig = (frontmatter, defaults = {}) => {
const config = jsYaml.safeLoad(frontmatter) || {};
const parsedFields = transformAstToDisplayFields(
parseFields(config.fields || defaults?.fields.join(',')),
);
config.fields = uniq(parsedFields);
config.display = config.display || 'list';
return config;
};

View File

@ -0,0 +1,46 @@
import { sprintf, __ } from '~/locale';
import { truncate } from '~/lib/utils/text_utility';
import { alt, many, optional, regex, seq, str, token } from './combinators';
import * as ast from './ast';
const fieldName = token(regex(/^[a-z_][a-z0-9_]*/i, __('field name'))).map((name) =>
ast.fieldName(name),
);
const string = token(regex(/^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'/, __('string'))).map((s) =>
ast.string(s.slice(1, -1)),
);
const sepBy = (parser, separator) =>
seq(parser, many(seq(separator, parser))).map(([first, rest]) =>
ast.collection(first, ...rest.map(([, item]) => item)),
);
const leftParen = token(str('('));
const rightParen = token(str(')'));
const comma = token(str(','));
const functionName = token(regex(/^[a-z_][a-z0-9_]*/i, __('function name')));
const functionArgs = optional(sepBy(string, comma));
const functionCall = seq(functionName, leftParen, functionArgs, rightParen).map(([name, , args]) =>
ast.functionCall(name, args),
);
const value = alt(functionCall, fieldName);
const parser = sepBy(value, comma);
// Parser function
export const parseFields = (input) => {
const result = parser.run(input);
const rest = result.rest.trim();
if (rest)
throw new Error(
sprintf(__('Parse error: Unexpected input near `%{input}`.'), {
input: truncate(rest, 10),
}),
);
if (result.success) return result.value;
throw new Error(sprintf(__('Parse error: Expected `%{expected}`, but got `%{got}`.'), result));
};

View File

@ -0,0 +1,21 @@
import { parseConfig } from './config';
import { parseQuery } from './query';
const DEFAULT_DISPLAY_FIELDS = ['title'];
export const parseQueryText = (text) => {
const frontmatter = text.match(/---\n([\s\S]*?)\n---/);
const remaining = text.replace(frontmatter ? frontmatter[0] : '', '');
return {
frontmatter: frontmatter ? frontmatter[1].trim() : '',
query: remaining.trim(),
};
};
export const parse = async (glqlQuery, target = 'graphql') => {
const { frontmatter, query } = parseQueryText(glqlQuery);
const config = parseConfig(frontmatter, { fields: DEFAULT_DISPLAY_FIELDS });
const limit = parseInt(config.limit, 10) || undefined;
return { query: await parseQuery(query, { ...config, target, limit }), config };
};

View File

@ -0,0 +1,28 @@
import { uniq, once } from 'lodash';
import { GitLabQueryLanguage as QueryParser } from '@gitlab/query-language';
import { extractGroupOrProject } from '../../utils/common';
const REQUIRED_QUERY_FIELDS = ['id', 'iid', 'title', 'webUrl', 'reference', 'state', 'type'];
const initParser = once(async () => {
const parser = QueryParser();
const { group, project } = extractGroupOrProject();
parser.group = group;
parser.project = project;
parser.username = gon.current_username;
await parser.initialize();
return parser;
});
export const parseQuery = async (query, config) => {
const parser = await initParser();
parser.fields = uniq([...REQUIRED_QUERY_FIELDS, ...config.fields.map(({ name }) => name)]);
const { output } = parser.compile(config.target || 'graphql', query, config.limit);
if (output.toLowerCase().startsWith('error')) throw new Error(output.replace(/^error: /i, ''));
return output;
};

View File

@ -103,3 +103,8 @@ export default class Presenter {
return this.#component;
}
}
export const present = (data, config, ...props) => {
const presenter = new Presenter().init({ data, config, ...props });
return presenter.component;
};

View File

@ -0,0 +1,39 @@
import { uniqueId } from 'lodash';
import { __, sprintf } from '~/locale';
import { toSentenceCase } from '../../utils/common';
import * as ast from '../parser/ast';
import { getFieldAlias } from './field_aliases';
import { getFunction } from './functions';
const getValue = (astNode) => {
if (astNode.type === ast.Types.COLLECTION) return astNode.value.map(getValue);
if (astNode.type === ast.Types.FIELD_NAME) return getFieldAlias(astNode.value) || astNode.value;
if (astNode.type === ast.Types.FUNCTION_CALL) {
const fn = getFunction(astNode.name);
if (!fn) throw new Error(sprintf(__('Unknown function: %{name}'), { name: astNode.name }));
return fn.getFieldName(...getValue(astNode.args));
}
return astNode.value;
};
export const transformAstToDisplayFields = (astNode) => {
if (astNode.type === ast.Types.COLLECTION) return astNode.value.map(transformAstToDisplayFields);
if (astNode.type === ast.Types.FIELD_NAME) {
const fieldName = getValue(astNode);
return { key: fieldName, label: toSentenceCase(astNode.value), name: fieldName };
}
if (astNode.type === ast.Types.FUNCTION_CALL) {
const args = getValue(astNode.args);
const key = uniqueId(`${astNode.name}_${args.join('_')}_`);
const fn = getFunction(astNode.name);
return {
key,
label: fn.getFieldLabel(...args),
name: getValue(astNode),
transform: fn.getTransformer(key, ...args),
};
}
throw new Error(sprintf(__('Unknown value type: %{type}'), { type: astNode.type }));
};

View File

@ -0,0 +1,28 @@
import { __, sprintf } from '~/locale';
const dataSourceTransformers = {
issues: (data) => (data.project || data.group).issues,
};
const transformForDataSource = (data, source = 'issues') => {
const dataSource = dataSourceTransformers[source];
if (!dataSource) throw new Error(sprintf(__('Unknown data source: %{source}'), { source }));
return dataSource(data);
};
const transformField = (data, field) => {
if (field.transform) return field.transform(data);
return data;
};
const transformFields = (data, fields) => {
return fields.reduce((acc, field) => transformField(acc, field), data);
};
export const transform = (data, config) => {
let transformed = data;
transformed = transformForDataSource(transformed, config.source);
transformed = transformFields(transformed, config.fields);
return transformed;
};

View File

@ -0,0 +1,11 @@
const fieldAliases = {
assignee: 'assignees',
closed: 'closedAt',
created: 'createdAt',
due: 'dueDate',
health: 'healthStatus',
label: 'labels',
updated: 'updatedAt',
};
export const getFieldAlias = (fieldName) => fieldAliases[fieldName] || fieldName;

View File

@ -0,0 +1,33 @@
import { n__ } from '~/locale';
import { toSentenceCase } from '../../utils/common';
const functions = {
labels: {
getFieldName: () => 'labels',
getFieldLabel: (...values) => {
const labels = values.map(toSentenceCase).join(', ');
return `${n__('Label', 'Labels', values.length)}: ${labels}`;
},
getTransformer:
(key, ...values) =>
(data) => {
return {
...data,
nodes: data.nodes.map((node) => {
const filter = (label) =>
values.some((value) => label.title.toLowerCase().includes(value.toLowerCase()));
return {
...node,
[key]: { ...node.labels, nodes: node.labels.nodes.filter(filter) },
labels: {
...node.labels,
nodes: node.labels.nodes.filter((label) => !filter(label)),
},
};
}),
};
},
},
};
export const getFunction = (name) => functions[name];

View File

@ -1,5 +1,4 @@
import jsYaml from 'js-yaml';
import { uniq, upperFirst, lowerCase } from 'lodash';
import { upperFirst, lowerCase } from 'lodash';
export const extractGroupOrProject = (url = window.location.href) => {
let fullPath = url
@ -15,22 +14,6 @@ export const extractGroupOrProject = (url = window.location.href) => {
};
};
export const parseQueryText = (text) => {
const frontmatter = text.match(/---\n([\s\S]*?)\n---/);
const remaining = text.replace(frontmatter ? frontmatter[0] : '', '');
return {
frontmatter: frontmatter ? frontmatter[1].trim() : '',
query: remaining.trim(),
};
};
export const parseFrontmatter = (frontmatter, defaults = {}) => {
const config = jsYaml.safeLoad(frontmatter) || {};
config.fields = uniq(config.fields?.split(',').map((f) => f.trim()) || defaults?.fields);
config.display = config.display || 'list';
return config;
};
export const toSentenceCase = (str) => {
if (str === 'id' || str === 'iid') return str.toUpperCase();
return upperFirst(lowerCase(str));

View File

@ -24,6 +24,15 @@ import TitleComponent from './title.vue';
const STICKY_HEADER_VISIBLE_CLASS = 'issuable-sticky-header-visible';
function stripClientState(html) {
// remove all attributes of details tags
return html.replace(/<details[^>]*>/g, '<details>');
}
function hasDescriptionChanged(oldDesc, newDesc) {
return stripClientState(oldDesc) !== stripClientState(newDesc);
}
export default {
components: {
HeaderActions,
@ -356,7 +365,12 @@ export default {
const details =
descriptionSection != null && descriptionSection.getElementsByTagName('details');
this.state.descriptionHtml = updateDescription(sanitize(data.description), details);
const newDescriptionHtml = updateDescription(sanitize(data.description), details);
if (hasDescriptionChanged(this.state.descriptionHtml, newDescriptionHtml)) {
this.state.descriptionHtml = newDescriptionHtml;
}
this.state.titleHtml = sanitize(data.title);
this.state.lock_version = data.lock_version;
},

View File

@ -89,7 +89,8 @@ export default {
}
},
},
TIME_INPUT_CLASS: 'gl-flex gl-items-center gl-justify-center !gl-py-2 gl-min-w-fit gl-w-15',
TIME_INPUT_CLASS:
'gl-flex gl-items-center gl-justify-center !gl-py-2 gl-min-w-fit gl-w-15 !gl-h-7',
};
</script>

View File

@ -3,16 +3,14 @@
*
*/
.file-holder {
border: 1px solid $border-color;
border-radius: $gl-border-radius-base;
@apply gl-border gl-rounded-base;
&.file-holder-top-border {
border-top: 1px solid $border-color;
@apply gl-border;
.file-title {
// Prevents the top border getting clipped by the background
border-top-left-radius: $gl-border-radius-base;
border-top-right-radius: $gl-border-radius-base;
@apply gl-rounded-t-base;
}
}
@ -22,8 +20,7 @@
.file-title {
position: relative;
background-color: var(--gray-10, $gray-10);
border-bottom: 1px solid var(--gl-border-color-default);
@apply gl-bg-subtle gl-border-b;
margin: 0;
text-align: left;
padding: 10px $gl-padding;
@ -38,15 +35,14 @@
}
a:not(.btn) {
color: $gl-text-color;
@apply gl-text-primary;
}
}
.file-blame-legend {
background-color: $gray-10;
text-align: right;
padding: 8px $gl-padding;
border-bottom: 1px solid $border-color;
@apply gl-bg-subtle gl-border-b;
@include media-breakpoint-down(xs) {
text-align: left;
@ -84,7 +80,7 @@
&.image_file,
&.audio,
&.video {
background: $gray-50;
@apply gl-bg-strong;
text-align: center;
padding: 30px;
@ -104,22 +100,21 @@
}
&.blob-no-preview {
background: $gray-50;
@apply gl-bg-strong;
text-shadow: 0 1px 2px $white;
padding: 100px 0;
}
&.logs {
background: $gray-50;
@apply gl-bg-strong;
max-height: 700px;
overflow-y: auto;
ol {
margin-left: 40px;
padding: 10px 0;
border-left: 1px solid $border-color;
margin-bottom: 0;
background: $white;
@apply gl-bg-default gl-border-l;
li {
color: $logs-li-color;
@ -208,8 +203,7 @@ span.idiff {
flex-wrap: wrap;
align-items: center;
justify-content: space-between;
background-color: var(--gl-background-color-subtle);
border-bottom: 1px solid $border-color;
@apply gl-bg-subtle gl-border-b;
padding: $gl-padding-8 $gl-padding;
margin: 0;
min-height: px-to-rem($file-header-height);
@ -225,7 +219,7 @@ span.idiff {
}
a {
color: $gl-text-color;
@apply gl-text-primary;
}
}
@ -247,8 +241,7 @@ span.idiff {
display: flex;
align-items: center;
justify-content: flex-end;
background-color: $gray-10;
border-bottom: 1px solid $border-color;
@apply gl-bg-subtle gl-border-b;
padding: 5px $gl-padding;
}
@ -260,7 +253,7 @@ span.idiff {
overflow: auto;
.file-container {
background-color: $gray-50;
@apply gl-bg-strong;
display: flex;
height: 100%;
align-items: center;
@ -295,7 +288,7 @@ span.idiff {
padding-bottom: $gl-padding;
.discussion-reply-holder {
border-bottom: 1px solid $gray-50;
@apply gl-border-b gl-border-b-subtle;
border-radius: 0;
}
}
@ -458,7 +451,7 @@ span.idiff {
.tr {
display: flex;
border-bottom: 1px solid $gray-50;
@apply gl-border-b gl-border-b-subtle;
&.last-row {
border-bottom: 0;
@ -512,7 +505,7 @@ span.idiff {
.blame.file-content .td.line-numbers {
float: none;
border-left: 1px solid $gray-100;
@apply gl-border-l;
border-radius: 0;
.file-line-num {
@ -522,11 +515,11 @@ span.idiff {
.code {
padding: 0;
border-radius: 0 0 $gl-border-radius-base $gl-border-radius-base;
@apply gl-rounded-t-none gl-rounded-b-base;
}
.blame-stream-container {
border-top: 1px solid $border-color;
@apply gl-border-t;
}
.blame-stream-loading {

View File

@ -185,3 +185,5 @@ module Resolvers
end
end
end
Resolvers::MergeRequestsResolver.prepend_mod

View File

@ -15,6 +15,8 @@ module Ml
length: { maximum: 255 }
validate :valid_default_experiment?
validates :description,
length: { maximum: 10_000 }
has_one :default_experiment, class_name: 'Ml::Experiment'
belongs_to :project

View File

@ -15,7 +15,7 @@ module Ml
length: { maximum: 255 }
validates :description,
length: { maximum: 500 }
length: { maximum: 10_000 }
validate :valid_model?, :valid_package?

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddTextLimitModelDescription < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.4'
def up
model_constraint_name = check_constraint_name(:ml_models, :description, '10K')
version_constraint_name = check_constraint_name(:ml_model_versions, :description, '10K')
add_text_limit :ml_models, :description, 10_000, constraint_name: model_constraint_name
add_text_limit :ml_model_versions, :description, 10_000, constraint_name: version_constraint_name
remove_text_limit :ml_models, :description, constraint_name: 'check_d0c47d63b5'
remove_text_limit :ml_model_versions, :description, constraint_name: 'check_caff7d000b'
end
def down
# no-op: Danger of failing if there are records with smaller length
end
end

View File

@ -0,0 +1 @@
ec759636ce8e6a2cea093886761b642f6c91414dd25622fdfa89938066cb8313

View File

@ -13584,7 +13584,7 @@ CREATE TABLE ml_model_versions (
CONSTRAINT check_246f5048b5 CHECK ((char_length(semver_prerelease) <= 255)),
CONSTRAINT check_28b2d892c8 CHECK ((char_length(version) <= 255)),
CONSTRAINT check_4d50116294 CHECK ((char_length(description_html) <= 50000)),
CONSTRAINT check_caff7d000b CHECK ((char_length(description) <= 500))
CONSTRAINT check_f1545d8a9e CHECK ((char_length(description) <= 10000))
);
CREATE SEQUENCE ml_model_versions_id_seq
@ -13608,7 +13608,7 @@ CREATE TABLE ml_models (
description_html text,
CONSTRAINT check_1fd2cc7d93 CHECK ((char_length(name) <= 255)),
CONSTRAINT check_51a38acdaa CHECK ((char_length(description_html) <= 50000)),
CONSTRAINT check_d0c47d63b5 CHECK ((char_length(description) <= 5000))
CONSTRAINT check_f8df2fefc5 CHECK ((char_length(description) <= 10000))
);
CREATE SEQUENCE ml_models_id_seq

View File

@ -0,0 +1,31 @@
---
# Error: gitlab_base.CodeBlockNesting
#
# Ensures content nested in lists are spaced correctly.
#
extends: existence
message: "Use three spaces for lines under ordered lists, and two spaces under unordered lists"
link: https://docs.gitlab.com/ee/development/documentation/styleguide/#nesting-inside-a-list-item
level: error
nonword: true
ignorecase: true
scope: raw
tokens:
- '^1. .*\n\n? ( )?[`\w-]'
- '^- .*\n\n? ( )?[`\w-]'
# Regex guide:
#
# "^1. .*" - Lines that start with an ordered list.
# "^- .*" - Lines that start with an unordered list.
#
# "\n\n?" - Then one or two newlines
#
# Ordered lists: " ( )?" - Two or four spaces (three = correct)
# Unordered lists: " ( )?" - One or three spaces (two = correct)
#
# "[`\w-]" - Any one of:
#
# - A backtick - For code blocks after a list.
# - A letter/number - For alert boxes, sentences, and nested ordered lists (after a list).
# - A hyphen - For nested unordered lists (after a list).

View File

@ -126,7 +126,7 @@ Set the number of `workers` to `0` to reduce memory usage by hundreds of MB:
```
Unlike in a clustered mode, which is set up by default, only a single Puma process would serve the application.
For details on Puma worker and thread settings, see the [Puma requirements](../../install/requirements.md#puma-settings).
For details on Puma worker and thread settings, see the [Puma requirements](../../install/requirements.md#puma).
The downside of running Puma in this configuration is the reduced throughput, which can be
considered a fair tradeoff in a memory-constrained environment.
@ -268,7 +268,7 @@ automatically, due to differences between the two application servers.
To switch from Unicorn to Puma:
1. Determine suitable Puma [worker and thread settings](../../install/requirements.md#puma-settings).
1. Determine suitable Puma [worker and thread settings](../../install/requirements.md#puma).
1. Convert any custom Unicorn settings to Puma in `/etc/gitlab/gitlab.rb`.
The table below summarizes which Unicorn configuration keys correspond to those

View File

@ -28,7 +28,7 @@ Find out [which versions of PostgreSQL (and other components) ship](https://gitl
with each Linux package release.
The lowest supported PostgreSQL versions are listed in the
[installation requirements](../../install/requirements.md#postgresql-requirements).
[installation requirements](../../install/requirements.md#postgresql).
Read more about update policies and warnings in the PostgreSQL
[upgrade docs](https://docs.gitlab.com/omnibus/settings/database.html#upgrade-packaged-postgresql-server).

View File

@ -72,7 +72,7 @@ pg_dump: error: Error message from server: SSL SYSCALL error: EOF detected
```
To resolve this error, ensure that you are meeting the
[minimum PostgreSQL requirements](../../install/requirements.md#postgresql-requirements). After
[minimum PostgreSQL requirements](../../install/requirements.md#postgresql). After
upgrading your RDS instance to a [supported version](../../install/requirements.md#database),
you should be able to perform a backup without this error.
See [issue 64763](https://gitlab.com/gitlab-org/gitlab/-/issues/364763) for more information.

View File

@ -34,10 +34,10 @@ Backup and restore recreates the entire database, including the indexes.
1. Take a scheduled downtime window. In all nodes, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. Backup the PostgreSQL database with `pg_dump` or the
[GitLab backup tool, with all data types except `db` excluded](../backup_restore/backup_gitlab.md#excluding-specific-data-from-the-backup)
@ -63,10 +63,10 @@ Backup and restore recreates the entire database, including the indexes.
1. Take a scheduled downtime window. In all nodes of all sites, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. In the primary site, backup the PostgreSQL database with `pg_dump` or the
[GitLab backup tool, with all data types except `db` excluded](../backup_restore/backup_gitlab.md#excluding-specific-data-from-the-backup)
@ -89,10 +89,10 @@ Backup and restore recreates the entire database, including the indexes.
1. Take a scheduled downtime window. In all nodes, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. In all PostgreSQL nodes, upgrade the OS.
1. In all PostgreSQL nodes,
@ -120,10 +120,10 @@ Backup and restore recreates the entire database, including the indexes.
1. Take a scheduled downtime window. In all nodes of all sites, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. In all PostgreSQL nodes, upgrade the OS.
1. In all PostgreSQL nodes,
@ -148,10 +148,10 @@ different types of indexes were handled, see the blog post about
1. Take a scheduled downtime window. In all nodes, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. In all PostgreSQL nodes, upgrade the OS.
1. In all PostgreSQL nodes,
@ -187,10 +187,10 @@ different types of indexes were handled, see the blog post about
1. Take a scheduled downtime window. In all nodes of all sites, stop unnecessary GitLab services:
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
```shell
gitlab-ctl stop
gitlab-ctl start postgresql
```
1. In all PostgreSQL nodes, upgrade the OS.
1. In all PostgreSQL nodes,
@ -223,15 +223,15 @@ You can compare the behavior of `glibc` on your servers [using shell commands](.
The following table shows the `glibc` versions shipped for different operating systems:
|Operating system |`glibc` version|
|--------------------|-------------|
|CentOS 7 | 2.17 |
|RedHat Enterprise 8 | 2.28 |
|RedHat Enterprise 9 | 2.34 |
|Ubuntu 18.04 | 2.27 |
|Ubuntu 20.04 | 2.31 |
|Ubuntu 22.04 | 2.35 |
|Ubuntu 24.04 | 2.39 |
| Operating system | `glibc` version |
|---------------------|-----------------|
| CentOS 7 | 2.17 |
| RedHat Enterprise 8 | 2.28 |
| RedHat Enterprise 9 | 2.34 |
| Ubuntu 18.04 | 2.27 |
| Ubuntu 20.04 | 2.31 |
| Ubuntu 22.04 | 2.35 |
| Ubuntu 24.04 | 2.39 |
For example, suppose you are upgrading from CentOS 7 to RedHat
Enterprise 8. In this case, using PostgreSQL on this upgraded operating

View File

@ -418,7 +418,7 @@ Additionally, the following cloud provider services are recommended for use as p
### Recommendation notes for the database services
[When selecting to use an external database service](../postgresql/external.md), it should run a standard, performant, and [supported version](../../install/requirements.md#postgresql-requirements).
[When selecting to use an external database service](../postgresql/external.md), it should run a standard, performant, and [supported version](../../install/requirements.md#postgresql).
If you choose to use a third party external service:

View File

@ -31,7 +31,7 @@ Different call timeouts are available for different Gitaly operations.
| Timeout | Default | Description |
|:--------|:-----------|:------------|
| Default | 55 seconds | Timeout for most Gitaly calls (not enforced for `git` `fetch` and `push` operations, or Sidekiq jobs). For example, checking if a repository exists on disk. Makes sure that Gitaly calls made in a web request cannot exceed the entire request timeout. It should be shorter than the [worker timeout](../operations/puma.md#change-the-worker-timeout) that can be configured for [Puma](../../install/requirements.md#puma-settings). If a Gitaly call timeout exceeds the worker timeout, the remaining time from the worker timeout is used to avoid having to terminate the worker. |
| Default | 55 seconds | Timeout for most Gitaly calls (not enforced for `git` `fetch` and `push` operations, or Sidekiq jobs). For example, checking if a repository exists on disk. Makes sure that Gitaly calls made in a web request cannot exceed the entire request timeout. It should be shorter than the [worker timeout](../operations/puma.md#change-the-worker-timeout) that can be configured for [Puma](../../install/requirements.md#puma). If a Gitaly call timeout exceeds the worker timeout, the remaining time from the worker timeout is used to avoid having to terminate the worker. |
| Fast | 10 seconds | Timeout for fast Gitaly operations used in requests, sometimes multiple times. For example, checking if a repository exists on disk. If fast operations exceed this threshold, there may be a problem with a storage shard. Failing fast can help maintain the stability of the GitLab instance. |
| Medium | 30 seconds | Timeout for Gitaly operations that should be fast (possibly in requests) but preferably not used multiple times in a request. For example, loading blobs. Timeout that should be set between Default and Fast. |

View File

@ -16955,6 +16955,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="addonuserassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="addonuserassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="addonuserassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="addonuserassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="addonuserassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="addonuserassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -17003,6 +17004,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="addonuserauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="addonuserauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="addonuserauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="addonuserauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="addonuserauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="addonuserauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -17105,6 +17107,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="addonuserreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="addonuserreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="addonuserreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="addonuserreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="addonuserreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="addonuserreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -17807,6 +17810,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="autocompleteduserassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="autocompleteduserassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="autocompleteduserassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="autocompleteduserassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="autocompleteduserassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="autocompleteduserassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -17855,6 +17859,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="autocompleteduserauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="autocompleteduserauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="autocompleteduserauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="autocompleteduserauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="autocompleteduserauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="autocompleteduserauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -17969,6 +17974,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="autocompleteduserreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="autocompleteduserreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="autocompleteduserreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="autocompleteduserreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="autocompleteduserreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="autocompleteduserreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -20135,6 +20141,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="currentuserassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="currentuserassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="currentuserassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="currentuserassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="currentuserassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="currentuserassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -20187,6 +20194,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="currentuserassigneeorreviewermergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="currentuserassigneeorreviewermergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="currentuserassigneeorreviewermergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="currentuserassigneeorreviewermergerequestsassignedreviewstates"></a>`assignedReviewStates` | [`[MergeRequestReviewState!]`](#mergerequestreviewstate) | Reviewer states for merge requests the current user is assigned to. |
| <a id="currentuserassigneeorreviewermergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="currentuserassigneeorreviewermergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -20234,6 +20242,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="currentuserauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="currentuserauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="currentuserauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="currentuserauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="currentuserauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="currentuserauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -20336,6 +20345,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="currentuserreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="currentuserreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="currentuserreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="currentuserreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="currentuserreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="currentuserreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -23648,6 +23658,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="groupmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="groupmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="groupmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="groupmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="groupmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="groupmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -25706,6 +25717,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestassigneeassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestassigneeassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestassigneeassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestassigneeassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="mergerequestassigneeassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="mergerequestassigneeassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -25754,6 +25766,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestassigneeauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestassigneeauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestassigneeauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestassigneeauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestassigneeauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestassigneeauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -25856,6 +25869,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestassigneereviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestassigneereviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestassigneereviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestassigneereviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestassigneereviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestassigneereviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -26086,6 +26100,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestauthorassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestauthorassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestauthorassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestauthorassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="mergerequestauthorassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="mergerequestauthorassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -26134,6 +26149,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestauthorauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestauthorauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestauthorauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestauthorauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestauthorauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestauthorauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -26236,6 +26252,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestauthorreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestauthorreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestauthorreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestauthorreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestauthorreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestauthorreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -26512,6 +26529,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestparticipantassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestparticipantassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestparticipantassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestparticipantassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="mergerequestparticipantassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="mergerequestparticipantassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -26560,6 +26578,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestparticipantauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestparticipantauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestparticipantauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestparticipantauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestparticipantauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestparticipantauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -26662,6 +26681,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestparticipantreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestparticipantreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestparticipantreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestparticipantreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestparticipantreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestparticipantreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -26911,6 +26931,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestreviewerassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestreviewerassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestreviewerassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestreviewerassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="mergerequestreviewerassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="mergerequestreviewerassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -26959,6 +26980,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestreviewerauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestreviewerauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestreviewerauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestreviewerauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestreviewerauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestreviewerauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -27061,6 +27083,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="mergerequestreviewerreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="mergerequestreviewerreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="mergerequestreviewerreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="mergerequestreviewerreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="mergerequestreviewerreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="mergerequestreviewerreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -30187,6 +30210,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="projectmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="projectmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="projectmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="projectmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="projectmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="projectmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -33292,6 +33316,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="usercoreassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="usercoreassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="usercoreassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="usercoreassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="usercoreassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="usercoreassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -33340,6 +33365,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="usercoreauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="usercoreauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="usercoreauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="usercoreauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="usercoreauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="usercoreauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -33442,6 +33468,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="usercorereviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="usercorereviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="usercorereviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="usercorereviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="usercorereviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="usercorereviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
@ -40267,6 +40294,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="userassignedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="userassignedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="userassignedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="userassignedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |
| <a id="userassignedmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
| <a id="userassignedmergerequestscreatedbefore"></a>`createdBefore` | [`Time`](#time) | Merge requests created before the timestamp. |
@ -40315,6 +40343,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="userauthoredmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="userauthoredmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="userauthoredmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="userauthoredmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="userauthoredmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="userauthoredmergerequestscreatedafter"></a>`createdAfter` | [`Time`](#time) | Merge requests created after the timestamp. |
@ -40417,6 +40446,7 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| <a id="userreviewrequestedmergerequestsapproved"></a>`approved` | [`Boolean`](#boolean) | Limit results to approved merge requests. Available only when the feature flag `mr_approved_filter` is enabled. |
| <a id="userreviewrequestedmergerequestsapprovedby"></a>`approvedBy` | [`[String!]`](#string) | Usernames of the approvers. |
| <a id="userreviewrequestedmergerequestsapprover"></a>`approver` | [`[String!]`](#string) | Usernames of possible approvers. |
| <a id="userreviewrequestedmergerequestsassigneeusername"></a>`assigneeUsername` | [`String`](#string) | Username of the assignee. |
| <a id="userreviewrequestedmergerequestsassigneewildcardid"></a>`assigneeWildcardId` | [`AssigneeWildcardId`](#assigneewildcardid) | Filter by assignee presence. Incompatible with assigneeUsernames and assigneeUsername. |
| <a id="userreviewrequestedmergerequestsauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author. |

View File

@ -359,7 +359,7 @@ Now, it's time to create the database:
1. Go to the RDS dashboard, select **Databases** from the left menu, and select **Create database**.
1. Select **Standard Create** for the database creation method.
1. Select **PostgreSQL** as the database engine and select the minimum PostgreSQL version as defined for your GitLab version in our [database requirements](../../install/requirements.md#postgresql-requirements).
1. Select **PostgreSQL** as the database engine and select the minimum PostgreSQL version as defined for your GitLab version in our [database requirements](../../install/requirements.md#postgresql).
1. Because this is a production server, let's choose **Production** from the **Templates** section.
1. Under **Availability & durability**, select **Multi-AZ DB instance** to have a standby RDS instance provisioned in a different [Availability Zone](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
1. Under **Settings**, use:
@ -517,7 +517,7 @@ From the EC2 dashboard:
1. Use the section below titled "[Find official GitLab-created AMI IDs on AWS](#find-official-gitlab-created-ami-ids-on-aws)" to find the correct AMI and select **Launch**.
1. In the **Name and tags** section, set the **Name** to `GitLab`.
1. In the **Instance type** dropdown list, select an instance type based on your workload. Consult the [hardware requirements](../../install/requirements.md#hardware-requirements) to choose one that fits your needs (at least `c5.2xlarge`, which is sufficient to accommodate 100 users).
1. In the **Instance type** dropdown list, select an instance type based on your workload. Consult the [hardware requirements](../../install/requirements.md#hardware) to choose one that fits your needs (at least `c5.2xlarge`, which is sufficient to accommodate 100 users).
1. In the **Key pair** section, select **Create new key pair**.
1. Give the key pair a name (we use `gitlab`) and save the `gitlab.pem` file for later use.
1. In the **Network settings** section:

View File

@ -66,7 +66,7 @@ The first items you need to configure are the basic settings of the underlying v
1. In **Availability options**, select **Availability zone** and set it to `1`.
Read more about the [availability zones](https://learn.microsoft.com/en-us/azure/virtual-machines/availability).
1. Ensure the selected image is set to **GitLab - Gen1**.
1. Select the VM size based on the [hardware requirements](../requirements.md#hardware-requirements).
1. Select the VM size based on the [hardware requirements](../requirements.md#hardware).
Because the minimum system requirements to run a GitLab environment for up to 500 users
is covered by the `D4s_v3` size, select that option.
1. Set the authentication type to **SSH public key**.

View File

@ -42,7 +42,7 @@ To deploy GitLab on GCP you must create a virtual machine:
1. On the next page, you can select the type of VM as well as the
estimated costs. Provide the name of the instance, desired data center, and machine type.
Note our [hardware requirements for different user base sizes](../requirements.md#hardware-requirements).
Note our [hardware requirements for different user base sizes](../requirements.md#hardware).
![Launch on Compute Engine](img/vm_details.png)

View File

@ -297,7 +297,7 @@ sudo adduser --disabled-login --gecos 'GitLab' git
NOTE:
Only PostgreSQL is supported.
In GitLab 17.0 and later, we [require PostgreSQL 14+](requirements.md#postgresql-requirements).
In GitLab 17.0 and later, we [require PostgreSQL 14+](requirements.md#postgresql).
1. Install the database packages.

View File

@ -13,7 +13,7 @@ DETAILS:
This page includes information about the minimum requirements you need to install and use GitLab.
## Hardware requirements
## Hardware
### Storage
@ -57,10 +57,10 @@ While not recommended, in certain circumstances GitLab may run in a [memory cons
## Database
PostgreSQL is the only supported database, which is bundled with the Linux package.
You can also use an [external PostgreSQL database](https://docs.gitlab.com/omnibus/settings/database.html#using-a-non-packaged-postgresql-database-management-server).
### PostgreSQL
### PostgreSQL requirements
PostgreSQL is the only supported database and is bundled with the Linux package.
You can also use an [external PostgreSQL database](https://docs.gitlab.com/omnibus/settings/database.html#using-a-non-packaged-postgresql-database-management-server).
The server running PostgreSQL should have a certain amount of storage available, though the exact amount
[depends on the number of users](../administration/reference_architectures/index.md). For:
@ -89,8 +89,8 @@ used for development and testing:
1. PostgreSQL 14.x [tested against GitLab 15.11 only](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114624).
1. [Tested against GitLab 16.1 and later](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/119344).
You must also ensure the following extensions are loaded into every
GitLab database. [Read more about this requirement, and troubleshooting](postgresql_extensions.md).
You must also ensure the following extensions are loaded into every GitLab database.
For more information, see [managing PostgreSQL extensions](postgresql_extensions.md).
| Extension | Minimum GitLab version |
| ------------ | ---------------------- |
@ -104,16 +104,16 @@ The following managed PostgreSQL services are known to be incompatible and shoul
|----------------|-------------------------------------------------------|
| 14.4+ | Amazon Aurora (see [14.4.0](../update/versions/gitlab_14_changes.md#1440)) |
#### Additional requirements for GitLab Geo
#### GitLab Geo
If you're using [GitLab Geo](../administration/geo/index.md), we strongly recommend running instances installed by using the Linux package or using
[validated cloud-managed instances](../administration/reference_architectures/index.md#recommended-cloud-providers-and-services),
as we actively develop and test based on those.
We cannot guarantee compatibility with other external databases.
It is recommended to review the [full requirements for running Geo](../administration/geo/index.md#requirements-for-running-geo).
For more information, see [requirements for running Geo](../administration/geo/index.md#requirements-for-running-geo).
#### Operating system locale compatibility and silent index corruption
#### Locale compatibility
Changes to locale data in `glibc` means that PostgreSQL database files are not fully compatible
between different OS releases.
@ -127,13 +127,9 @@ when:
For more information, see how to [upgrade operating systems for PostgreSQL](../administration/postgresql/upgrading_os.md).
#### Gitaly Cluster database requirements
#### GitLab schemas
[Read more in the Gitaly Cluster documentation](../administration/gitaly/praefect.md).
#### Exclusive use of GitLab databases
Databases created or used for GitLab, Geo, Gitaly Cluster, or other components should be for the
Databases created or used for GitLab, Geo, [Gitaly Cluster](../administration/gitaly/praefect.md), or other components should be for the
exclusive use of GitLab. Do not make direct changes to the database, schemas, users, or other
properties except when following procedures in the GitLab documentation or following the directions
of GitLab Support or other GitLab engineers.
@ -156,7 +152,7 @@ of GitLab Support or other GitLab engineers.
Database migrations are tested against the schema definition in the GitLab codebase. GitLab
version upgrades may fail if the schema is modified.
## Puma settings
## Puma
The recommended settings for Puma are determined by the infrastructure on which it's running.
The Linux package defaults to the recommended Puma settings. Regardless of installation method, you can
@ -167,13 +163,17 @@ tune the Puma settings:
- If you're using the GitLab Helm chart, see the
[`webservice` chart](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html).
### Puma workers
### Workers
The recommended number of workers is calculated as the highest of the following:
- `2`
- A combination of CPU and memory resource availability (see how this is configured automatically for the [Linux package](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/ef9facdc927e7389db6a5e0655414ba8318c7b8a/files/gitlab-cookbooks/gitlab/libraries/puma.rb#L31-46)).
By default, each Puma worker is limited to 1.2 GB of memory.
To increase the number of Puma workers, set
[`puma['per_worker_max_memory_mb']`](../administration/operations/puma.md#reducing-memory-use) to a higher limit.
Take for example the following scenarios:
- A node with 2 cores / 8 GB memory should be configured with **2 Puma workers**.
@ -228,7 +228,7 @@ A higher number of Puma workers usually helps to reduce the response time of the
and increase the ability to handle parallel requests. You must perform testing to verify the
optimal settings for your infrastructure.
### Puma threads
### Threads
The recommended number of threads is dependent on several factors, including total memory.
@ -238,12 +238,6 @@ The recommended number of threads is dependent on several factors, including tot
higher, due to how [Ruby MRI multi-threading](https://en.wikipedia.org/wiki/Global_interpreter_lock)
works.
### Puma per worker maximum memory
By default, each Puma worker is limited to 1.2 GB of memory.
You can [adjust this memory setting](../administration/operations/puma.md#reducing-memory-use) and should do so
if you must increase the number of Puma workers.
## Redis
Redis stores all user sessions and the background task queue.
@ -262,14 +256,13 @@ Sidekiq processes the background jobs with a multi-threaded process.
This process starts with the entire Rails stack (200 MB+) but it can grow over time due to memory leaks.
On a very active server (10,000 billable users) the Sidekiq process can use 1 GB+ of memory.
## Prometheus and its exporters
## Prometheus
[Prometheus](https://prometheus.io) and its related exporters are enabled by
default to enable in depth monitoring of GitLab. With default settings, these
processes consume approximately 200 MB of memory.
By default, [Prometheus](https://prometheus.io) and its related exporters are enabled to monitor GitLab.
These processes consume approximately 200 MB of memory.
If you would like to disable Prometheus and it's exporters or read more information
about it, check the [Prometheus documentation](../administration/monitoring/prometheus/index.md).
For more information, see
[monitoring GitLab with Prometheus](../administration/monitoring/prometheus/index.md).
## GitLab Runner
@ -286,8 +279,7 @@ It's also not safe to install everything on a single machine, because of the
[security reasons](https://docs.gitlab.com/runner/security/), especially when you plan to use shell executor with GitLab
Runner.
We recommend using a separate machine for each GitLab Runner, if you plan to
use the CI features.
To use CI/CD features, you should use a separate machine for each GitLab Runner.
The GitLab Runner server requirements depend on:
- The type of [executor](https://docs.gitlab.com/runner/executors/) you configured on GitLab Runner.
@ -321,18 +313,6 @@ NOTE:
We don't support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
in the future because we have features such as issue boards which require JavaScript extensively.
## Security
## Related topics
After installation, be sure to read and follow guidance on [maintaining a secure GitLab installation](../security/index.md).
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
Each scenario can be a third-level heading, for example `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->
- [Secure your installation](../security/index.md)

View File

@ -141,7 +141,7 @@ GitLab 17.0 requires at least PostgreSQL 14.
The latest version of GitLab might depend on a more recent PostgreSQL version
than what you are running. You may also have to enable some
extensions. For more information, see the
[PostgreSQL requirements](../install/requirements.md#postgresql-requirements)
[PostgreSQL requirements](../install/requirements.md#postgresql)
To upgrade PostgreSQL, refer to its [documentation](https://www.postgresql.org/docs/11/upgrading.html).
@ -283,7 +283,7 @@ sudo systemctl daemon-reload
### 10. Install libraries, migrations, etc
Make sure you have the required
[PostgreSQL extensions](../install/requirements.md#postgresql-requirements),
[PostgreSQL extensions](../install/requirements.md#postgresql),
then proceed to install the needed libraries:
```shell

View File

@ -31351,7 +31351,9 @@ msgid "LICENSE"
msgstr ""
msgid "Label"
msgstr ""
msgid_plural "Labels"
msgstr[0] ""
msgstr[1] ""
msgid "Label %{labelName} was not found"
msgstr ""
@ -39013,6 +39015,12 @@ msgstr ""
msgid "Parent set successfully"
msgstr ""
msgid "Parse error: Expected `%{expected}`, but got `%{got}`."
msgstr ""
msgid "Parse error: Unexpected input near `%{input}`."
msgstr ""
msgid "Part of merge request changes"
msgstr ""
@ -57661,15 +57669,24 @@ msgstr ""
msgid "Unknown Error"
msgstr ""
msgid "Unknown data source: %{source}"
msgstr ""
msgid "Unknown encryption strategy: %{encrypted_strategy}!"
msgstr ""
msgid "Unknown format"
msgstr ""
msgid "Unknown function: %{name}"
msgstr ""
msgid "Unknown user"
msgstr ""
msgid "Unknown value type: %{type}"
msgstr ""
msgid "Unless otherwise agreed to in writing with GitLab, by selecting \"Add License\" you agree that your use of GitLab Software is subject to the %{eula_link_start}Terms of Service%{eula_link_end}."
msgstr ""
@ -64116,6 +64133,9 @@ msgstr ""
msgid "features adopted"
msgstr ""
msgid "field name"
msgstr ""
msgid "file"
msgid_plural "files"
msgstr[0] ""
@ -64150,6 +64170,9 @@ msgstr ""
msgid "frontmatter"
msgstr ""
msgid "function name"
msgstr ""
msgid "group"
msgid_plural "groups"
msgstr[0] ""
@ -65334,6 +65357,9 @@ msgstr ""
msgid "snippet"
msgstr ""
msgid "something to parse"
msgstr ""
msgid "source"
msgstr ""
@ -65361,6 +65387,9 @@ msgstr ""
msgid "starts on %{timebox_start_date}"
msgstr ""
msgid "string"
msgstr ""
msgid "structure is too large. Maximum size is %{max_size} characters"
msgstr ""

View File

@ -37,6 +37,7 @@ module RuboCop
reset_trigger_function
cleanup_conversion_of_integer_to_bigint
revert_initialize_conversion_of_integer_to_bigint
validate_foreign_key
].sort.freeze
MSG = "The method is not allowed to be called within the `with_lock_retries` block, the only allowed methods are: #{ALLOWED_MIGRATION_METHODS.join(', ')}".freeze

View File

@ -1,4 +1,3 @@
import { print } from 'graphql/language/printer';
import Executor from '~/glql/core/executor';
import createDefaultClient from '~/lib/graphql';
import { MOCK_ISSUES } from '../mock_data';
@ -28,125 +27,16 @@ describe('Executor', () => {
delete gon.current_username;
});
it('executes a query using GLQL compiler', async () => {
const { data, config } = await executor.compile('assignee = currentUser()').execute();
expect(print(queryFn.mock.calls[0][0].query)).toMatchInlineSnapshot(`
"{
issues(assigneeUsernames: "foobar", first: 100) {
nodes {
id
iid
title
webUrl
reference
state
type
}
pageInfo {
endCursor
hasNextPage
}
}
}
"
`);
expect(data).toEqual(MOCK_QUERY_RESPONSE);
// default config options
expect(config).toEqual({ display: 'list', fields: ['title'] });
});
it('includes fields provided in config, each field included just once', async () => {
const { data, config } = await executor
.compile(
`
---
fields: title, id, title, iid, author, title
---
assignee = currentUser()
`,
)
.execute();
expect(print(queryFn.mock.calls[0][0].query)).toMatchInlineSnapshot(`
"{
issues(assigneeUsernames: "foobar", first: 100) {
nodes {
id
iid
title
webUrl
reference
state
type
author {
id
avatarUrl
username
name
webUrl
it('executes a query using a graphql client', async () => {
const data = await executor.execute(`
{
issues(assigneeUsernames: "foobar", first: 100) {
nodes { id iid title webUrl reference state type }
pageInfo { endCursor hasNextPage }
}
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
"
`);
`);
expect(data).toEqual(MOCK_QUERY_RESPONSE);
expect(config).toEqual({ display: 'list', fields: ['title', 'id', 'iid', 'author'] });
});
it('correctly reads limit and display options from config', async () => {
const { data, config } = await executor
.compile(
`
---
limit: 5
display: list
---
assignee = currentUser()
`,
)
.execute();
expect(print(queryFn.mock.calls[0][0].query)).toMatchInlineSnapshot(`
"{
issues(assigneeUsernames: "foobar", first: 5) {
nodes {
id
iid
title
webUrl
reference
state
type
}
pageInfo {
endCursor
hasNextPage
}
}
}
"
`);
expect(data).toEqual(MOCK_QUERY_RESPONSE);
expect(config).toEqual({
display: 'list',
fields: ['title'],
limit: 5,
});
});
it('throws an error if the query compilation returns an error', () => {
expect(() => {
executor.compile('invalid query');
}).toThrow('Unexpected `q`, expected operator (one of IN, =, !=, >, or <)');
});
});

View File

@ -0,0 +1,329 @@
import {
str,
regex,
seq,
alt,
many,
optional,
whitespace,
token,
} from '~/glql/core/parser/combinators';
describe('Parser combinators', () => {
describe('str', () => {
it('should parse a string successfully', () => {
const parser = str('hello');
const result = parser.run('hello world');
expect(result).toEqual({
success: true,
value: 'hello',
rest: ' world',
});
});
it('should fail when string does not match', () => {
const parser = str('hello');
const result = parser.run('world');
expect(result).toEqual({
success: false,
expected: 'hello',
got: 'world',
});
});
});
describe('regex', () => {
it('should parse a regex successfully', () => {
const parser = regex(/^\d+/, 'number');
const result = parser.run('123abc');
expect(result).toEqual({
success: true,
value: '123',
rest: 'abc',
});
});
it('should fail when regex does not match', () => {
const parser = regex(/^\d+/, 'number');
const result = parser.run('abc123');
expect(result).toEqual({
success: false,
expected: 'number',
got: 'abc123',
});
});
});
describe('seq', () => {
it('should parse a simple sequence successfully', () => {
const parser = seq(str('hello'), str(' '), str('world'));
const result = parser.run('hello world!');
expect(result).toEqual({
success: true,
value: ['hello', ' ', 'world'],
rest: '!',
});
});
it('should parse a complex sequence with different parser types', () => {
const parser = seq(
str('start'),
whitespace,
regex(/^\d+/, 'number'),
optional(str('!')),
many(str('a')),
);
const result = parser.run('start 123!aaa end');
expect(result).toEqual({
success: true,
value: ['start', ' ', '123', '!', ['a', 'a', 'a']],
rest: ' end',
});
});
it('should fail when any parser in the sequence fails', () => {
const parser = seq(str('hello'), str(' '), str('world'), str('!'));
const result = parser.run('hello world');
expect(result).toEqual({
success: false,
expected: '!',
got: '',
});
});
it('should handle empty input correctly', () => {
const parser = seq(str('hello'), str(' '), str('world'));
const result = parser.run('');
expect(result).toEqual({
success: false,
expected: 'hello',
got: '',
});
});
});
describe('alt', () => {
it('should parse alternatives successfully', () => {
const parser = alt(str('hello'), str('hi'), str('hey'));
const result = parser.run('hi there');
expect(result).toEqual({
success: true,
value: 'hi',
rest: ' there',
});
});
it('should try all alternatives and succeed with the first match', () => {
const parser = alt(
seq(str('hello'), str(' '), str('world')),
seq(str('hi'), str(' '), str('there')),
seq(str('hey'), str(' '), str('you')),
);
const result = parser.run('hi there friend');
expect(result).toEqual({
success: true,
value: ['hi', ' ', 'there'],
rest: ' friend',
});
});
it('should fail when no alternative matches', () => {
const parser = alt(str('hello'), str('hi'), str('hey'));
const result = parser.run('greetings');
expect(result).toEqual({
success: false,
expected: 'something to parse',
got: 'greetings',
});
});
it('should handle complex alternatives with different parser types', () => {
const parser = alt(
seq(str('start'), whitespace, regex(/^\d+/, 'number')),
seq(str('begin'), whitespace, many(str('a'))),
token(str('end')),
);
const result = parser.run('begin aaa');
expect(result).toEqual({
success: true,
value: ['begin', ' ', ['a', 'a', 'a']],
rest: '',
});
});
});
describe('many', () => {
it('should parse multiple occurrences successfully', () => {
const parser = many(str('a'));
const result = parser.run('aaab');
expect(result).toEqual({
success: true,
value: ['a', 'a', 'a'],
rest: 'b',
});
});
it('should return an empty array when no matches', () => {
const parser = many(str('a'));
const result = parser.run('bbb');
expect(result).toEqual({
success: true,
value: [],
rest: 'bbb',
});
});
it('should parse complex repeated patterns', () => {
const parser = many(seq(str('('), regex(/^[^)]+/, 'content'), str(')')));
const result = parser.run('(hello)(world)(!)extra');
expect(result).toEqual({
success: true,
value: [
['(', 'hello', ')'],
['(', 'world', ')'],
['(', '!', ')'],
],
rest: 'extra',
});
});
it('should handle nested many parsers', () => {
const parser = many(seq(str('['), many(regex(/^[^\]]+/, 'item')), str(']')));
const result = parser.run('[a][b c][d e f]rest');
expect(result).toEqual({
success: true,
value: [
['[', ['a'], ']'],
['[', ['b c'], ']'],
['[', ['d e f'], ']'],
],
rest: 'rest',
});
});
it('should work with whitespace and tokens', () => {
const parser = many(token(regex(/^[a-z]+/, 'word')));
const result = parser.run(' hello world !');
expect(result).toEqual({
success: true,
value: ['hello', 'world'],
rest: ' !',
});
});
});
describe('optional', () => {
it('should parse optional element when present', () => {
const parser = optional(str('a'));
const result = parser.run('ab');
expect(result).toEqual({
success: true,
value: 'a',
rest: 'b',
});
});
it('should return null when optional element is not present', () => {
const parser = optional(str('a'));
const result = parser.run('b');
expect(result).toEqual({
success: true,
value: null,
rest: 'b',
});
});
});
describe('Parser', () => {
it('should map parser results', () => {
const parser = str('hello').map((value) => value.toUpperCase());
const result = parser.run('hello world');
expect(result).toEqual({
success: true,
value: 'HELLO',
rest: ' world',
});
});
it('should chain parsers', () => {
const parser = str('hello').chain((value) => str(` ${value}`));
const result = parser.run('hello hello');
expect(result).toEqual({
success: true,
value: ' hello',
rest: '',
});
});
});
describe('whitespace', () => {
it('should parse whitespace successfully', () => {
const result = whitespace.run(' hello');
expect(result).toEqual({
success: true,
value: ' ',
rest: 'hello',
});
});
it('should fail when no whitespace is present', () => {
const result = whitespace.run('hello');
expect(result).toEqual({
success: false,
expected: 'whitespace',
got: 'hello',
});
});
it('should parse different types of whitespace', () => {
const result = whitespace.run(' \t\n\rhello');
expect(result).toEqual({
success: true,
value: ' \t\n\r',
rest: 'hello',
});
});
});
describe('token', () => {
it('should parse a token with leading whitespace', () => {
const parser = token(str('hello'));
const result = parser.run(' hello world');
expect(result).toEqual({
success: true,
value: 'hello',
rest: ' world',
});
});
it('should parse a token without leading whitespace', () => {
const parser = token(str('hello'));
const result = parser.run('hello world');
expect(result).toEqual({
success: true,
value: 'hello',
rest: ' world',
});
});
it('should fail when the token is not present', () => {
const parser = token(str('hello'));
const result = parser.run(' world');
expect(result).toEqual({
success: false,
expected: 'hello',
got: 'world',
});
});
it('should work with different types of parsers', () => {
const parser = token(regex(/^\d+/, 'number'));
const result = parser.run(' \t\n123abc');
expect(result).toEqual({
success: true,
value: '123',
rest: 'abc',
});
});
});
});

View File

@ -0,0 +1,29 @@
import { parseConfig } from '~/glql/core/parser/config';
describe('parseConfig', () => {
it('parses the frontmatter and returns an object', () => {
const frontmatter = 'fields: title, assignees, dueDate\ndisplay: list';
expect(parseConfig(frontmatter)).toEqual({
fields: [
{ name: 'title', label: 'Title', key: 'title' },
{ name: 'assignees', label: 'Assignees', key: 'assignees' },
{ name: 'dueDate', label: 'Due date', key: 'dueDate' },
],
display: 'list',
});
});
it('returns default fields if none are provided', () => {
const frontmatter = 'display: list';
expect(parseConfig(frontmatter, { fields: ['title', 'assignees', 'dueDate'] })).toEqual({
fields: [
{ name: 'title', label: 'Title', key: 'title' },
{ name: 'assignees', label: 'Assignees', key: 'assignees' },
{ name: 'dueDate', label: 'Due date', key: 'dueDate' },
],
display: 'list',
});
});
});

View File

@ -0,0 +1,68 @@
import { parseFields } from '~/glql/core/parser/fields';
import * as ast from '~/glql/core/parser/ast';
describe('GLQL Fields Parser', () => {
describe('parseFields', () => {
it('parses a single field name', () => {
const result = parseFields('title');
expect(result).toEqual(ast.collection(ast.fieldName('title')));
});
it('parses multiple field names', () => {
const result = parseFields('title,description,createdAt');
expect(result).toEqual(
ast.collection(
ast.fieldName('title'),
ast.fieldName('description'),
ast.fieldName('createdAt'),
),
);
});
it('parses a function call', () => {
const result = parseFields('labels("bug")');
expect(result).toEqual(
ast.collection(ast.functionCall('labels', ast.collection(ast.string('bug')))),
);
});
it('parses a function call with multiple arguments', () => {
const result = parseFields('labels("bug", "feature")');
expect(result).toEqual(
ast.collection(
ast.functionCall('labels', ast.collection(ast.string('bug'), ast.string('feature'))),
),
);
});
it('parses a mix of field names and function calls', () => {
const result = parseFields('title,labels("bug"),description');
expect(result).toEqual(
ast.collection(
ast.fieldName('title'),
ast.functionCall('labels', ast.collection(ast.string('bug'))),
ast.fieldName('description'),
),
);
});
it('handles whitespace', () => {
const result = parseFields(' title , labels( "bug" ) , description ');
expect(result).toEqual(
ast.collection(
ast.fieldName('title'),
ast.functionCall('labels', ast.collection(ast.string('bug'))),
ast.fieldName('description'),
),
);
});
it('throws an error for invalid input', () => {
expect(() => parseFields('title,')).toThrow('Parse error');
});
it('throws an error for unclosed function call', () => {
expect(() => parseFields('labels("bug"')).toThrow('Parse error');
});
});
});

View File

@ -0,0 +1,25 @@
import { parseQueryText } from '~/glql/core/parser';
describe('parseQueryText', () => {
it('separates the presentation layer from the query and returns an object', () => {
const text = `---
fields: title, assignees, dueDate
display: list
---
assignee = currentUser()`;
expect(parseQueryText(text)).toEqual({
frontmatter: 'fields: title, assignees, dueDate\ndisplay: list',
query: 'assignee = currentUser()',
});
});
it('returns empty frontmatter if no frontmatter is present', () => {
const text = 'assignee = currentUser()';
expect(parseQueryText(text)).toEqual({
frontmatter: '',
query: 'assignee = currentUser()',
});
});
});

View File

@ -0,0 +1,100 @@
import { parse, print } from 'graphql';
import { parseQuery } from '~/glql/core/parser/query';
import { MOCK_FIELDS } from '../../mock_data';
const prettify = (query) => print(parse(query));
describe('GLQL Query Parser', () => {
describe('parseQuery', () => {
beforeEach(() => {
gon.current_username = 'foobar';
});
afterEach(() => {
delete gon.current_username;
});
it('parses a simple query by converting it to GraphQL', async () => {
const query = 'assignee = currentUser()';
const config = { fields: MOCK_FIELDS, limit: 50 };
const result = await parseQuery(query, config);
expect(prettify(result)).toMatchInlineSnapshot(`
"{
issues(assigneeUsernames: "foobar", first: 50) {
nodes {
id
iid
title
webUrl
reference
state
type
author {
id
avatarUrl
username
name
webUrl
}
description
}
pageInfo {
endCursor
hasNextPage
}
}
}
"
`);
});
it('handles complex queries with multiple conditions', async () => {
const query = 'assignee = currentUser() AND label IN ("bug", "feature")';
const config = { fields: MOCK_FIELDS, limit: 50 };
const result = await parseQuery(query, config);
expect(prettify(result)).toMatchInlineSnapshot(`
"{
issues(
assigneeUsernames: "foobar"
or: {labelNames: ["bug", "feature"]}
first: 50
) {
nodes {
id
iid
title
webUrl
reference
state
type
author {
id
avatarUrl
username
name
webUrl
}
description
}
pageInfo {
endCursor
hasNextPage
}
}
}
"
`);
});
it('throws an error for invalid queries', async () => {
const query = 'invalid query syntax';
const config = { fields: MOCK_FIELDS, limit: 100 };
await expect(parseQuery(query, config)).rejects.toThrow(
'Unexpected `q`, expected operator (one of IN, =, !=, >, or <)',
);
});
});
});

View File

@ -0,0 +1,82 @@
import { transformAstToDisplayFields } from '~/glql/core/transformer/ast';
import * as ast from '~/glql/core/parser/ast';
describe('transformAstToDisplayFields', () => {
it('transforms a single field name', () => {
const input = ast.fieldName('title');
const result = transformAstToDisplayFields(input);
expect(result).toEqual({
key: 'title',
label: 'Title',
name: 'title',
});
});
it('transforms multiple field names', () => {
const input = ast.collection(
ast.fieldName('title'),
ast.fieldName('description'),
ast.fieldName('createdAt'),
);
const result = transformAstToDisplayFields(input);
expect(result).toEqual([
{ key: 'title', label: 'Title', name: 'title' },
{ key: 'description', label: 'Description', name: 'description' },
{ key: 'createdAt', label: 'Created at', name: 'createdAt' },
]);
});
it('transforms multiple field names with aliases', () => {
const input = ast.collection(
ast.fieldName('assignee'),
ast.fieldName('due'),
ast.fieldName('closed'),
ast.fieldName('health'),
);
const result = transformAstToDisplayFields(input);
expect(result).toEqual([
{ key: 'assignees', label: 'Assignee', name: 'assignees' },
{ key: 'dueDate', label: 'Due', name: 'dueDate' },
{ key: 'closedAt', label: 'Closed', name: 'closedAt' },
{ key: 'healthStatus', label: 'Health', name: 'healthStatus' },
]);
});
it('transforms a function call with multiple arguments', () => {
const input = ast.functionCall(
'labels',
ast.collection(ast.string('bug'), ast.string('feature'), ast.string('test')),
);
const result = transformAstToDisplayFields(input);
expect(result).toMatchObject({
key: expect.stringMatching(/^labels_bug_feature_test_/),
label: 'Labels: Bug, Feature, Test',
name: expect.any(String),
transform: expect.any(Function),
});
});
it('transforms a mix of field names and function calls', () => {
const input = ast.collection(
ast.fieldName('title'),
ast.functionCall('labels', ast.collection(ast.string('bug'))),
ast.fieldName('description'),
);
const result = transformAstToDisplayFields(input);
expect(result).toEqual([
{ key: 'title', label: 'Title', name: 'title' },
{
key: expect.stringMatching(/^labels_bug_/),
label: 'Label: Bug',
name: expect.any(String),
transform: expect.any(Function),
},
{ key: 'description', label: 'Description', name: 'description' },
]);
});
it('throws an error for unknown AST node types', () => {
const input = { type: 'unknown', value: 'test' };
expect(() => transformAstToDisplayFields(input)).toThrow('Unknown value type: unknown');
});
});

View File

@ -0,0 +1,49 @@
import { transform } from '~/glql/core/transformer/data';
import * as functions from '~/glql/core/transformer/functions';
describe('GLQL Data Transformer', () => {
describe('transform', () => {
it('transforms data for issues source', () => {
const mockData = {
project: {
issues: {
nodes: [
{ id: '1', title: 'Issue 1', labels: { nodes: [{ title: 'bug' }] } },
{ id: '2', title: 'Issue 2', labels: { nodes: [{ title: 'feature' }] } },
],
},
},
};
const mockConfig = {
source: 'issues',
fields: [
{ key: 'title', name: 'title' },
{
key: 'labels_bug',
name: 'labels',
transform: functions.getFunction('labels').getTransformer('labels_bug', 'bug'),
},
],
};
const result = transform(mockData, mockConfig);
expect(result).toEqual({
nodes: [
{
id: '1',
title: 'Issue 1',
labels_bug: { nodes: [{ title: 'bug' }] },
labels: { nodes: [] },
},
{
id: '2',
title: 'Issue 2',
labels_bug: { nodes: [] },
labels: { nodes: [{ title: 'feature' }] },
},
],
});
});
});
});

View File

@ -0,0 +1,44 @@
import * as functions from '~/glql/core/transformer/functions';
describe('GLQL Transformer Functions', () => {
describe('labels', () => {
it('returns correct field name', () => {
const labelsFunction = functions.getFunction('labels');
expect(labelsFunction.getFieldName('bug', 'feature')).toBe('labels');
});
it('returns correct field label', () => {
const labelsFunction = functions.getFunction('labels');
expect(labelsFunction.getFieldLabel('bug', 'feature')).toBe('Labels: Bug, Feature');
});
it('transforms data correctly', () => {
const labelsFunction = functions.getFunction('labels');
const transformer = labelsFunction.getTransformer('custom_key', 'bug', 'feature');
const mockData = {
nodes: [
{ id: '1', labels: { nodes: [{ title: 'bug' }, { title: 'critical' }] } },
{ id: '2', labels: { nodes: [{ title: 'feature' }, { title: 'enhancement' }] } },
],
};
const result = transformer(mockData);
expect(result).toEqual({
nodes: [
{
id: '1',
custom_key: { nodes: [{ title: 'bug' }] },
labels: { nodes: [{ title: 'critical' }] },
},
{
id: '2',
custom_key: { nodes: [{ title: 'feature' }] },
labels: { nodes: [{ title: 'enhancement' }] },
},
],
});
});
});
});

View File

@ -1,7 +1,7 @@
import renderGlqlNodes from '~/glql';
jest.mock('~/lib/graphql');
jest.mock('~/glql/core/executor');
jest.mock('~/glql/core/parser/query');
describe('renderGlqlNodes', () => {
it('loops over all glql code blocks and renders them', async () => {

View File

@ -86,4 +86,9 @@ export const MOCK_ASSIGNEES = {
],
};
export const MOCK_FIELDS = ['title', 'author', 'state', 'description'];
export const MOCK_FIELDS = [
{ key: 'title', label: 'Title', name: 'title' },
{ key: 'author', label: 'Author', name: 'author' },
{ key: 'state', label: 'State', name: 'state' },
{ key: 'description', label: 'Description', name: 'description' },
];

View File

@ -1,9 +1,4 @@
import {
extractGroupOrProject,
parseQueryText,
parseFrontmatter,
toSentenceCase,
} from '~/glql/utils/common';
import { extractGroupOrProject, toSentenceCase } from '~/glql/utils/common';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
describe('extractGroupOrProject', () => {
@ -34,50 +29,6 @@ describe('extractGroupOrProject', () => {
});
});
describe('parseQueryText', () => {
it('separates the presentation layer from the query and returns an object', () => {
const text = `---
fields: title, assignees, dueDate
display: list
---
assignee = currentUser()`;
expect(parseQueryText(text)).toEqual({
frontmatter: 'fields: title, assignees, dueDate\ndisplay: list',
query: 'assignee = currentUser()',
});
});
it('returns empty frontmatter if no frontmatter is present', () => {
const text = 'assignee = currentUser()';
expect(parseQueryText(text)).toEqual({
frontmatter: '',
query: 'assignee = currentUser()',
});
});
});
describe('parseFrontmatter', () => {
it('parses the frontmatter and returns an object', () => {
const frontmatter = 'fields: title, assignees, dueDate\ndisplay: list';
expect(parseFrontmatter(frontmatter)).toEqual({
fields: ['title', 'assignees', 'dueDate'],
display: 'list',
});
});
it('returns default fields if none are provided', () => {
const frontmatter = 'display: list';
expect(parseFrontmatter(frontmatter, { fields: ['title', 'assignees', 'dueDate'] })).toEqual({
fields: ['title', 'assignees', 'dueDate'],
display: 'list',
});
});
});
describe('toSentenceCase', () => {
it.each`
str | expected

View File

@ -37,6 +37,7 @@ const REALTIME_REQUEST_STACK = [initialRequest, secondRequest];
describe('Issuable output', () => {
let axiosMock;
let wrapper;
const endpoint = '/gitlab-org/gitlab-shell/-/issues/9/realtime_changes/realtime_changes';
const findStickyHeader = () => wrapper.findComponent(StickyHeader);
const findTitle = () => wrapper.findComponent(TitleComponent);
@ -86,7 +87,6 @@ describe('Issuable output', () => {
jest.spyOn(eventHub, '$emit');
axiosMock = new MockAdapter(axios);
const endpoint = '/gitlab-org/gitlab-shell/-/issues/9/realtime_changes/realtime_changes';
axiosMock.onGet(endpoint).replyOnce(HTTP_STATUS_OK, REALTIME_REQUEST_STACK[0], {
'POLL-INTERVAL': '1',
@ -102,11 +102,9 @@ describe('Issuable output', () => {
});
describe('update', () => {
beforeEach(async () => {
await createComponent();
});
it('should render a title/description/edited and update title/description/edited on update', async () => {
await createComponent();
expect(findTitle().props('titleText')).toContain(initialRequest.title_text);
expect(findDescription().props('descriptionText')).toContain('this is a description');
@ -125,6 +123,36 @@ describe('Issuable output', () => {
expect(findEdited().props('updatedByPath')).toMatch(/\/other_user$/);
expect(findEdited().props('updatedAt')).toBe(secondRequest.updated_at);
});
it('does not update description if only a details tag is opened/closed', async () => {
axiosMock.reset();
axiosMock.onGet(endpoint).replyOnce(
HTTP_STATUS_OK,
{
...initialRequest,
description: '<details><summary>Details</summary>Some details</details>',
description_text: 'Some details',
},
{ 'POLL-INTERVAL': '1' },
);
axiosMock.onGet(endpoint).replyOnce(
HTTP_STATUS_OK,
{
...secondRequest,
description: '<details open><summary>Details</summary>Some details</details>',
description_text: 'Some details',
},
{ 'POLL-INTERVAL': '-1' },
);
await createComponent();
await advanceToNextPoll();
expect(findDescription().props('descriptionHtml')).toBe(
'<details><summary>Details</summary>Some details</details>',
);
});
});
describe('with permissions', () => {

View File

@ -335,7 +335,7 @@ RSpec.describe GitlabSchema.types['Project'], feature_category: :groups_and_proj
it { is_expected.to have_graphql_resolver(Resolvers::ProjectMergeRequestsResolver) }
it do
is_expected.to have_graphql_arguments(
is_expected.to include_graphql_arguments(
:iids,
:source_branches,
:target_branches,

View File

@ -8,6 +8,7 @@ RSpec.describe Ml::Model, feature_category: :mlops do
let_it_be(:existing_model) { create(:ml_models, name: 'an_existing_model', project: project1) }
let_it_be(:another_existing_model) { create(:ml_models, name: 'an_existing_model', project: project2) }
let_it_be(:valid_name) { 'a_valid_name' }
let_it_be(:valid_description) { 'Valid description' }
let_it_be(:default_experiment) { create(:ml_experiments, name: "[model]#{valid_name}", project: project1) }
describe 'associations' do
@ -23,8 +24,11 @@ RSpec.describe Ml::Model, feature_category: :mlops do
let(:name) { valid_name }
let(:description) { valid_description }
subject(:errors) do
m = described_class.new(name: name, project: project1, default_experiment: default_experiment)
m = described_class.new(name: name, project: project1, default_experiment: default_experiment,
description: description)
m.validate
m.errors
end
@ -33,6 +37,20 @@ RSpec.describe Ml::Model, feature_category: :mlops do
expect(errors).to be_empty
end
describe 'description' do
context 'when description is too large' do
let(:description) { 'a' * 10_001 }
it { expect(errors).to include(:description) }
end
context 'when description is below threshold' do
let(:description) { 'a' * 100 }
it { expect(errors).not_to include(:description) }
end
end
describe 'name' do
where(:ctx, :name) do
'name is blank' | ''

View File

@ -69,10 +69,16 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
describe 'description' do
context 'when description is too large' do
let(:description) { 'a' * 501 }
let(:description) { 'a' * 10_001 }
it { expect(errors).to include(:description) }
end
context 'when description is below threshold' do
let(:description) { 'a' * 100 }
it { expect(errors).not_to include(:description) }
end
end
describe 'model' do

View File

@ -24,7 +24,7 @@ RSpec.describe Ml::ModelVersions::UpdateModelVersionService, feature_category: :
end
context 'when description is invalid' do
let(:description) { 'a' * 501 }
let(:description) { 'a' * 10001 }
it { is_expected.to be_error }
end

View File

@ -106,16 +106,6 @@ end
RSpec::Matchers.define :have_graphql_arguments do |*expected|
include GraphqlHelpers
def expected_names(field)
@names ||= Array.wrap(expected).map { |name| GraphqlHelpers.fieldnamerize(name) }
if field.try(:type).try(:ancestors)&.include?(GraphQL::Types::Relay::BaseConnection)
@names | %w[after before first last]
else
@names
end
end
match do |field|
names = expected_names(field)
@ -130,6 +120,24 @@ RSpec::Matchers.define :have_graphql_arguments do |*expected|
end
end
RSpec::Matchers.define :include_graphql_arguments do |*expected|
include GraphqlHelpers
match do |field|
names = expected_names(field)
expect(field.arguments.keys).to include(*names)
end
failure_message do |field|
names = expected_names(field).inspect
args = field.arguments.keys.inspect
missing = names - args
"is missing fields: <#{missing.inspect}>" if missing.any?
end
end
module GraphQLTypeHelpers
def message(object, expected, **opts)
non_null = expected.non_null? || (opts.key?(:null) && !opts[:null])
@ -292,3 +300,13 @@ RSpec::Matchers.define :have_graphql_description do |expected|
end
end
end
def expected_names(field)
@names ||= Array.wrap(expected).map { |name| GraphqlHelpers.fieldnamerize(name) }
if field.try(:type).try(:ancestors)&.include?(GraphQL::Types::Relay::BaseConnection)
@names | %w[after before first last]
else
@names
end
end