Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
4863b73ae6
commit
407107485a
|
|
@ -3280,7 +3280,6 @@ Gitlab/BoundedContexts:
|
|||
- 'ee/app/services/llm/explain_code_service.rb'
|
||||
- 'ee/app/services/llm/generate_commit_message_service.rb'
|
||||
- 'ee/app/services/llm/generate_description_service.rb'
|
||||
- 'ee/app/services/llm/generate_summary_service.rb'
|
||||
- 'ee/app/services/llm/git_command_service.rb'
|
||||
- 'ee/app/services/llm/internal/categorize_chat_question_service.rb'
|
||||
- 'ee/app/services/llm/internal/completion_service.rb'
|
||||
|
|
|
|||
|
|
@ -274,7 +274,6 @@ Gitlab/RSpec/MisplacedEeSpecFile:
|
|||
- 'ee/spec/services/groups/destroy_service_spec.rb'
|
||||
- 'ee/spec/services/groups/ssh_certificates/create_service_spec.rb'
|
||||
- 'ee/spec/services/groups/ssh_certificates/destroy_service_spec.rb'
|
||||
- 'ee/spec/services/groups/transfer_service_spec.rb'
|
||||
- 'ee/spec/services/groups/update_service_spec.rb'
|
||||
- 'ee/spec/services/ide/schemas_config_service_spec.rb'
|
||||
- 'ee/spec/services/issues/build_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1637,7 +1637,6 @@ Layout/LineLength:
|
|||
- 'ee/spec/services/geo/container_repository_sync_spec.rb'
|
||||
- 'ee/spec/services/geo/framework_repository_sync_service_spec.rb'
|
||||
- 'ee/spec/services/gitlab_subscriptions/check_future_renewal_service_spec.rb'
|
||||
- 'ee/spec/services/groups/transfer_service_spec.rb'
|
||||
- 'ee/spec/services/groups/update_repository_storage_service_spec.rb'
|
||||
- 'ee/spec/services/incident_management/escalation_policies/create_service_spec.rb'
|
||||
- 'ee/spec/services/incident_management/escalation_policies/destroy_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -555,7 +555,6 @@ RSpec/BeforeAllRoleAssignment:
|
|||
- 'ee/spec/services/groups/destroy_service_spec.rb'
|
||||
- 'ee/spec/services/groups/enterprise_users/associate_service_spec.rb'
|
||||
- 'ee/spec/services/groups/epics_count_service_spec.rb'
|
||||
- 'ee/spec/services/groups/transfer_service_spec.rb'
|
||||
- 'ee/spec/services/ide/schemas_config_service_spec.rb'
|
||||
- 'ee/spec/services/incident_management/oncall_rotations/destroy_service_spec.rb'
|
||||
- 'ee/spec/services/incident_management/oncall_rotations/edit_service_spec.rb'
|
||||
|
|
@ -571,7 +570,6 @@ RSpec/BeforeAllRoleAssignment:
|
|||
- 'ee/spec/services/llm/chat_service_spec.rb'
|
||||
- 'ee/spec/services/llm/generate_commit_message_service_spec.rb'
|
||||
- 'ee/spec/services/llm/generate_description_service_spec.rb'
|
||||
- 'ee/spec/services/llm/generate_summary_service_spec.rb'
|
||||
- 'ee/spec/services/llm/merge_requests/summarize_review_service_spec.rb'
|
||||
- 'ee/spec/services/members/activate_service_spec.rb'
|
||||
- 'ee/spec/services/merge_trains/create_pipeline_service_spec.rb'
|
||||
|
|
|
|||
|
|
@ -892,7 +892,6 @@ RSpec/NamedSubject:
|
|||
- 'ee/spec/services/groups/compliance_report_csv_service_spec.rb'
|
||||
- 'ee/spec/services/groups/destroy_service_spec.rb'
|
||||
- 'ee/spec/services/groups/epics_count_service_spec.rb'
|
||||
- 'ee/spec/services/groups/transfer_service_spec.rb'
|
||||
- 'ee/spec/services/groups/update_repository_storage_service_spec.rb'
|
||||
- 'ee/spec/services/groups/update_service_spec.rb'
|
||||
- 'ee/spec/services/ide/schemas_config_service_spec.rb'
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -165,7 +165,7 @@ gem 'rack-cors', '~> 2.0.1', require: 'rack/cors', feature_category: :shared
|
|||
|
||||
# GraphQL API
|
||||
gem 'graphql', '2.5.11', feature_category: :api
|
||||
gem 'graphql-docs', '~> 5.0.0', group: [:development, :test], feature_category: :api
|
||||
gem 'graphql-docs', '~> 5.2.0', group: [:development, :test], feature_category: :api
|
||||
gem 'apollo_upload_server', '~> 2.1.6', feature_category: :api
|
||||
|
||||
# Cells
|
||||
|
|
|
|||
|
|
@ -293,7 +293,7 @@
|
|||
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
|
||||
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
|
||||
{"name":"graphql","version":"2.5.11","platform":"ruby","checksum":"1169ffc6e215fd4d60056455b672c40a0cafa0607262049c2cca343b0f6bdb5c"},
|
||||
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
|
||||
{"name":"graphql-docs","version":"5.2.0","platform":"ruby","checksum":"44d41724529f531adf9265ded7478b74b0c4b927cddc8b9f114337a73f32de08"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"arm64-darwin","checksum":"91b93a354508a9d1772f095554f2e4c04358c2b32d7a670e3705b7fc4695c996"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"ruby","checksum":"5f4383c4ee2886e92c31b90422261b7527f26e3baa585d877e9804e715983686"},
|
||||
|
|
|
|||
|
|
@ -952,13 +952,15 @@ GEM
|
|||
base64
|
||||
fiber-storage
|
||||
logger
|
||||
graphql-docs (5.0.0)
|
||||
graphql-docs (5.2.0)
|
||||
commonmarker (~> 0.23, >= 0.23.6)
|
||||
escape_utils (~> 1.2)
|
||||
extended-markdown-filter (~> 0.4)
|
||||
gemoji (~> 3.0)
|
||||
graphql (~> 2.0)
|
||||
html-pipeline (~> 2.14, >= 2.14.3)
|
||||
logger (~> 1.6)
|
||||
ostruct (~> 0.6)
|
||||
sass-embedded (~> 1.58)
|
||||
grpc (1.63.0)
|
||||
google-protobuf (~> 3.25)
|
||||
|
|
@ -2206,7 +2208,7 @@ DEPENDENCIES
|
|||
grape_logging (~> 1.8, >= 1.8.4)
|
||||
graphlyte (~> 1.0.0)
|
||||
graphql (= 2.5.11)
|
||||
graphql-docs (~> 5.0.0)
|
||||
graphql-docs (~> 5.2.0)
|
||||
grpc (= 1.63.0)
|
||||
gssapi (~> 1.3.1)
|
||||
guard-rspec
|
||||
|
|
|
|||
|
|
@ -293,7 +293,7 @@
|
|||
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
|
||||
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
|
||||
{"name":"graphql","version":"2.5.11","platform":"ruby","checksum":"1169ffc6e215fd4d60056455b672c40a0cafa0607262049c2cca343b0f6bdb5c"},
|
||||
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},
|
||||
{"name":"graphql-docs","version":"5.2.0","platform":"ruby","checksum":"44d41724529f531adf9265ded7478b74b0c4b927cddc8b9f114337a73f32de08"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"aarch64-linux","checksum":"dc75c5fd570b819470781d9512105dddfdd11d984f38b8e60bb946f92d1f79ee"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"arm64-darwin","checksum":"91b93a354508a9d1772f095554f2e4c04358c2b32d7a670e3705b7fc4695c996"},
|
||||
{"name":"grpc","version":"1.63.0","platform":"ruby","checksum":"5f4383c4ee2886e92c31b90422261b7527f26e3baa585d877e9804e715983686"},
|
||||
|
|
|
|||
|
|
@ -946,13 +946,15 @@ GEM
|
|||
base64
|
||||
fiber-storage
|
||||
logger
|
||||
graphql-docs (5.0.0)
|
||||
graphql-docs (5.2.0)
|
||||
commonmarker (~> 0.23, >= 0.23.6)
|
||||
escape_utils (~> 1.2)
|
||||
extended-markdown-filter (~> 0.4)
|
||||
gemoji (~> 3.0)
|
||||
graphql (~> 2.0)
|
||||
html-pipeline (~> 2.14, >= 2.14.3)
|
||||
logger (~> 1.6)
|
||||
ostruct (~> 0.6)
|
||||
sass-embedded (~> 1.58)
|
||||
grpc (1.63.0)
|
||||
google-protobuf (~> 3.25)
|
||||
|
|
@ -2201,7 +2203,7 @@ DEPENDENCIES
|
|||
grape_logging (~> 1.8, >= 1.8.4)
|
||||
graphlyte (~> 1.0.0)
|
||||
graphql (= 2.5.11)
|
||||
graphql-docs (~> 5.0.0)
|
||||
graphql-docs (~> 5.2.0)
|
||||
grpc (= 1.63.0)
|
||||
gssapi (~> 1.3.1)
|
||||
guard-rspec
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ export default {
|
|||
:placeholder="s__('Jobs|Search or filter jobs…')"
|
||||
:available-tokens="tokens"
|
||||
:value="filteredSearchValue"
|
||||
:search-text-option-label="__('Search for this text')"
|
||||
:search-text-option-label="__('Search for this text (experiment)')"
|
||||
terms-as-tokens
|
||||
@submit="onSubmit"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { identity, memoize, isEmpty } from 'lodash';
|
|||
import { initEmojiMap, getAllEmoji, searchEmoji } from '~/emoji';
|
||||
import { newDate } from '~/lib/utils/datetime_utility';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { currentAssignees } from '~/graphql_shared/issuable_client';
|
||||
import { COMMANDS } from '../constants';
|
||||
|
||||
export function defaultSorter(searchFields) {
|
||||
|
|
@ -116,6 +117,8 @@ export function createDataSource({
|
|||
}
|
||||
|
||||
export default class AutocompleteHelper {
|
||||
tiptapEditor;
|
||||
|
||||
constructor({ dataSourceUrls, sidebarMediator }) {
|
||||
this.updateDataSources(dataSourceUrls);
|
||||
|
||||
|
|
@ -175,13 +178,21 @@ export default class AutocompleteHelper {
|
|||
}),
|
||||
user: (items) =>
|
||||
items.filter((item) => {
|
||||
const assigned = this.sidebarMediator?.store?.assignees.some(
|
||||
let assigned = this.sidebarMediator?.store?.assignees.some(
|
||||
(assignee) => assignee.username === item.username,
|
||||
);
|
||||
const assignedReviewer = this.sidebarMediator?.store?.reviewers.some(
|
||||
(reviewer) => reviewer.username === item.username,
|
||||
);
|
||||
|
||||
const { workItemId } =
|
||||
this.tiptapEditor?.view.dom.closest('.js-gfm-wrapper')?.dataset || {};
|
||||
|
||||
if (workItemId) {
|
||||
const assignees = currentAssignees()[workItemId] || [];
|
||||
assigned = assignees.some((assignee) => assignee.username === item.username);
|
||||
}
|
||||
|
||||
if (command === COMMANDS.ASSIGN) return !assigned;
|
||||
if (command === COMMANDS.ASSIGN_REVIEWER) return !assignedReviewer;
|
||||
if (command === COMMANDS.UNASSIGN) return assigned;
|
||||
|
|
|
|||
|
|
@ -66,6 +66,8 @@ export const createContentEditor = ({
|
|||
const trackedExtensions = allExtensions.map(trackInputRulesAndShortcuts);
|
||||
const tiptapEditor = createTiptapEditor({ extensions: trackedExtensions, ...tiptapOptions });
|
||||
|
||||
autocompleteHelper.tiptapEditor = tiptapEditor;
|
||||
|
||||
return new ContentEditor({
|
||||
tiptapEditor,
|
||||
serializer,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import initPipelines from '~/commit/pipelines/pipelines_bundle';
|
|||
import MergeRequest from '~/merge_request';
|
||||
import CompareApp from '~/merge_requests/components/compare_app.vue';
|
||||
import { __ } from '~/locale';
|
||||
import { createRapidDiffsApp } from '~/rapid_diffs';
|
||||
|
||||
const mrNewCompareNode = document.querySelector('.js-merge-request-new-compare');
|
||||
if (mrNewCompareNode) {
|
||||
|
|
@ -119,6 +120,7 @@ if (mrNewCompareNode) {
|
|||
// eslint-disable-next-line no-new
|
||||
new MergeRequest({
|
||||
action: mrNewSubmitNode.dataset.mrSubmitAction,
|
||||
createRapidDiffsApp,
|
||||
});
|
||||
initPipelines();
|
||||
initMarkdownEditor();
|
||||
|
|
|
|||
|
|
@ -1,127 +0,0 @@
|
|||
import Vue from 'vue';
|
||||
|
||||
import { initMarkdownEditor } from 'ee_else_ce/pages/projects/merge_requests/init_markdown_editor';
|
||||
import { findTargetBranch } from 'ee_else_ce/pages/projects/merge_requests/creations/new/branch_finder';
|
||||
|
||||
import initPipelines from '~/commit/pipelines/pipelines_bundle';
|
||||
import MergeRequest from '~/merge_request';
|
||||
import CompareApp from '~/merge_requests/components/compare_app.vue';
|
||||
import { __ } from '~/locale';
|
||||
import { createRapidDiffsApp } from '~/rapid_diffs';
|
||||
|
||||
const mrNewCompareNode = document.querySelector('.js-merge-request-new-compare');
|
||||
if (mrNewCompareNode) {
|
||||
const targetCompareEl = document.getElementById('js-target-project-dropdown');
|
||||
const sourceCompareEl = document.getElementById('js-source-project-dropdown');
|
||||
const compareEl = document.querySelector('.js-merge-request-new-compare');
|
||||
const targetBranch = Vue.observable({ name: '' });
|
||||
const currentSourceBranch = JSON.parse(sourceCompareEl.dataset.currentBranch);
|
||||
const sourceBranch = Vue.observable(currentSourceBranch);
|
||||
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: sourceCompareEl,
|
||||
name: 'SourceCompareApp',
|
||||
provide: {
|
||||
currentProject: JSON.parse(sourceCompareEl.dataset.currentProject),
|
||||
branchCommitPath: compareEl.dataset.sourceBranchUrl,
|
||||
inputs: {
|
||||
project: {
|
||||
id: 'merge_request_source_project_id',
|
||||
name: 'merge_request[source_project_id]',
|
||||
},
|
||||
branch: {
|
||||
id: 'merge_request_source_branch',
|
||||
name: 'merge_request[source_branch]',
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
projectHeaderText: __('Select source project'),
|
||||
branchHeaderText: __('Select source branch'),
|
||||
},
|
||||
toggleClass: {
|
||||
project: 'js-source-project',
|
||||
branch: 'js-source-branch gl-font-monospace',
|
||||
},
|
||||
compareSide: 'source',
|
||||
},
|
||||
methods: {
|
||||
async selectedBranch(branchName) {
|
||||
const targetBranchName = await findTargetBranch(branchName);
|
||||
|
||||
if (targetBranchName) {
|
||||
targetBranch.name = targetBranchName;
|
||||
}
|
||||
|
||||
sourceBranch.value = branchName;
|
||||
sourceBranch.text = branchName;
|
||||
},
|
||||
},
|
||||
render(h) {
|
||||
return h(CompareApp, {
|
||||
props: {
|
||||
currentBranch: currentSourceBranch,
|
||||
},
|
||||
on: {
|
||||
'select-branch': this.selectedBranch,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const currentTargetBranch = JSON.parse(targetCompareEl.dataset.currentBranch);
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: targetCompareEl,
|
||||
name: 'TargetCompareApp',
|
||||
provide: {
|
||||
currentProject: JSON.parse(targetCompareEl.dataset.currentProject),
|
||||
projectsPath: targetCompareEl.dataset.targetProjectsPath,
|
||||
branchCommitPath: compareEl.dataset.targetBranchUrl,
|
||||
inputs: {
|
||||
project: {
|
||||
id: 'merge_request_target_project_id',
|
||||
name: 'merge_request[target_project_id]',
|
||||
},
|
||||
branch: {
|
||||
id: 'merge_request_target_branch',
|
||||
name: 'merge_request[target_branch]',
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
projectHeaderText: __('Select target project'),
|
||||
branchHeaderText: __('Select target branch'),
|
||||
},
|
||||
toggleClass: {
|
||||
project: 'js-target-project',
|
||||
branch: 'js-target-branch gl-font-monospace',
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
currentBranch() {
|
||||
if (targetBranch.name) {
|
||||
return { text: targetBranch.name, value: targetBranch.name };
|
||||
}
|
||||
|
||||
return currentTargetBranch;
|
||||
},
|
||||
isDisabled() {
|
||||
return !sourceBranch.value;
|
||||
},
|
||||
},
|
||||
render(h) {
|
||||
return h(CompareApp, {
|
||||
props: { currentBranch: this.currentBranch, disabled: this.isDisabled },
|
||||
});
|
||||
},
|
||||
});
|
||||
} else {
|
||||
const mrNewSubmitNode = document.querySelector('.js-merge-request-new-submit');
|
||||
// eslint-disable-next-line no-new
|
||||
new MergeRequest({
|
||||
action: mrNewSubmitNode.dataset.mrSubmitAction,
|
||||
createRapidDiffsApp,
|
||||
});
|
||||
initPipelines();
|
||||
initMarkdownEditor();
|
||||
}
|
||||
|
|
@ -63,6 +63,7 @@ export default {
|
|||
v-show="fileBrowserDrawerVisible"
|
||||
:open="openedOnce"
|
||||
:z-index="$options.DRAWER_Z_INDEX"
|
||||
header-sticky
|
||||
@close="close"
|
||||
>
|
||||
<template #title>
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ export default {
|
|||
<gl-button
|
||||
category="tertiary"
|
||||
size="small"
|
||||
class="settings-toggle gl-shrink-0 !gl-px-0"
|
||||
class="settings-toggle gl-shrink-0 !gl-px-0 !gl-pl-2"
|
||||
:aria-label="toggleButtonAriaLabel"
|
||||
:aria-expanded="ariaExpanded"
|
||||
:aria-controls="collapseId"
|
||||
|
|
|
|||
|
|
@ -257,7 +257,6 @@ export default {
|
|||
if (!res.data) {
|
||||
return;
|
||||
}
|
||||
this.activeChildItem = null;
|
||||
this.$emit('work-item-updated', this.workItem);
|
||||
if (isEmpty(this.workItem)) {
|
||||
this.setEmptyState();
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class Projects::BranchesController < Projects::ApplicationController
|
|||
fetch_merge_requests_for_branches
|
||||
|
||||
@refs_pipelines = @project.ci_pipelines.latest_successful_for_refs(@branches.map(&:name))
|
||||
@merged_branch_names = repository.merged_branch_names(@branches.map(&:name))
|
||||
@merged_branch_names = repository.merged_branch_names(@branches.map(&:name), include_identical: true)
|
||||
@branch_pipeline_statuses = Ci::CommitStatusesFinder.new(@project, repository, current_user, @branches).execute
|
||||
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/22851
|
||||
|
|
|
|||
|
|
@ -31,10 +31,7 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap
|
|||
|
||||
def new
|
||||
define_new_vars
|
||||
return unless rapid_diffs?
|
||||
|
||||
# load 'rapid_diffs' javascript entrypoint instead of 'new'
|
||||
@js_action_name = 'rapid_diffs'
|
||||
@rapid_diffs_presenter = ::RapidDiffs::MergeRequestCreationPresenter.new(
|
||||
@merge_request,
|
||||
project,
|
||||
|
|
@ -42,7 +39,6 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap
|
|||
diff_options,
|
||||
{ merge_request: merge_request_params }
|
||||
)
|
||||
render action: :rapid_diffs
|
||||
end
|
||||
|
||||
def create
|
||||
|
|
@ -157,15 +153,6 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap
|
|||
set_pipeline_variables
|
||||
end
|
||||
|
||||
def rapid_diffs?
|
||||
::Feature.enabled?(:rapid_diffs_on_mr_creation, current_user, type: :beta) &&
|
||||
!rapid_diffs_disabled?
|
||||
end
|
||||
|
||||
def rapid_diffs_disabled?
|
||||
::Feature.enabled?(:rapid_diffs_debug, current_user, type: :ops) && params[:rapid_diffs_disabled] == 'true'
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def selected_target_project
|
||||
return @project unless @project.forked?
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ module Types
|
|||
scopes: [:api, :read_api, :ai_workflows],
|
||||
description: 'Full path of the namespace.'
|
||||
field :name, GraphQL::Types::String, null: false,
|
||||
scopes: [:api, :read_api, :ai_workflows],
|
||||
description: 'Name of the namespace.'
|
||||
field :path, GraphQL::Types::String, null: false,
|
||||
description: 'Path of the namespace.'
|
||||
|
|
@ -34,6 +35,7 @@ module Types
|
|||
description: 'Indicates if the cross_project_pipeline feature is available for the namespace.'
|
||||
|
||||
field :description, GraphQL::Types::String, null: true,
|
||||
scopes: [:api, :read_api, :ai_workflows],
|
||||
description: 'Description of the namespace.'
|
||||
|
||||
field :lfs_enabled,
|
||||
|
|
@ -181,6 +183,12 @@ module Types
|
|||
method: :itself,
|
||||
experiment: { milestone: '18.1' }
|
||||
|
||||
field :web_url,
|
||||
GraphQL::Types::String,
|
||||
null: true,
|
||||
scopes: [:api, :read_api, :ai_workflows],
|
||||
description: 'URL of the namespace.'
|
||||
|
||||
markdown_field :description_html, null: true, &:namespace_details
|
||||
|
||||
def achievements_path
|
||||
|
|
|
|||
|
|
@ -84,7 +84,15 @@ module Ci
|
|||
inverse_of: :job
|
||||
# rubocop:enable Cop/ActiveRecordDependent
|
||||
|
||||
has_many :inputs,
|
||||
->(build) { in_partition(build) },
|
||||
class_name: 'Ci::JobInput',
|
||||
foreign_key: :job_id,
|
||||
partition_foreign_key: :partition_id,
|
||||
inverse_of: :job
|
||||
|
||||
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id, inverse_of: :job
|
||||
|
||||
has_many :job_annotations,
|
||||
->(build) { in_partition(build) },
|
||||
class_name: 'Ci::JobAnnotation',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class JobInput < Ci::ApplicationRecord
|
||||
include Ci::Partitionable
|
||||
|
||||
MAX_VALUE_SIZE = ::Gitlab::Ci::Config::Interpolation::Access::MAX_ACCESS_BYTESIZE
|
||||
|
||||
self.table_name = :p_ci_job_inputs
|
||||
self.primary_key = :id
|
||||
|
||||
partitionable scope: :job, partitioned: true
|
||||
|
||||
belongs_to :job, ->(build_name) { in_partition(build_name) },
|
||||
class_name: 'Ci::Build', partition_foreign_key: :partition_id,
|
||||
inverse_of: :inputs
|
||||
|
||||
belongs_to :project
|
||||
|
||||
validates :name, presence: true, length: { maximum: 255 }, uniqueness: { scope: [:job_id, :partition_id] }
|
||||
validates :project, presence: true
|
||||
validates :value, json_schema: { filename: 'ci_job_input_value', size_limit: 64.kilobytes }
|
||||
|
||||
# The maximum permitted size is equivalent to the maximum size permitted for an interpolated input value.
|
||||
validate :value_does_not_exceed_max_size
|
||||
|
||||
encrypts :value
|
||||
|
||||
private
|
||||
|
||||
def value_does_not_exceed_max_size
|
||||
return if Gitlab::Json.encode(value).size <= MAX_VALUE_SIZE
|
||||
|
||||
errors.add(:value, "exceeds max serialized size: #{MAX_VALUE_SIZE} characters")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -22,6 +22,7 @@ module Ci
|
|||
Ci::JobAnnotation
|
||||
Ci::JobArtifact
|
||||
Ci::JobArtifactReport
|
||||
Ci::JobInput
|
||||
Ci::JobVariable
|
||||
Ci::Pipeline
|
||||
Ci::PendingBuild
|
||||
|
|
|
|||
|
|
@ -1018,16 +1018,16 @@ class Repository
|
|||
|
||||
# If this method is not provided a set of branch names to check merge status,
|
||||
# it fetches all branches.
|
||||
def merged_branch_names(branch_names = [])
|
||||
def merged_branch_names(branch_names = [], include_identical: false)
|
||||
# Currently we should skip caching if requesting all branch names
|
||||
# This is only used in a few places, notably app/services/branches/delete_merged_service.rb,
|
||||
# and it could potentially result in a very large cache.
|
||||
return raw_repository.merged_branch_names(branch_names) if branch_names.empty?
|
||||
return raw_repository.merged_branch_names(branch_names, include_identical: include_identical) if branch_names.empty? || include_identical
|
||||
|
||||
cache = redis_hash_cache
|
||||
|
||||
merged_branch_names_hash = cache.fetch_and_add_missing(:merged_branch_names, branch_names) do |missing_branch_names, hash|
|
||||
merged = raw_repository.merged_branch_names(missing_branch_names)
|
||||
merged = raw_repository.merged_branch_names(missing_branch_names, include_identical: include_identical)
|
||||
|
||||
missing_branch_names.each do |bn|
|
||||
# Redis only stores strings in hset keys, use a fancy encoder
|
||||
|
|
|
|||
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"description": "This schema validates the structure of a CI job input."
|
||||
}
|
||||
|
|
@ -1 +1,6 @@
|
|||
= render "page"
|
||||
-# required for file browser styles, remove when .diff-tree-list is refactored into Rapid Diffs
|
||||
- add_page_specific_style 'page_bundles/merge_requests'
|
||||
- add_page_specific_style 'page_bundles/merge_request_creation_rapid_diffs'
|
||||
|
||||
= render "page" do
|
||||
= render ::RapidDiffs::AppComponent.new(@rapid_diffs_presenter)
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
-# required for file browser styles, remove when .diff-tree-list is refactored into Rapid Diffs
|
||||
- add_page_specific_style 'page_bundles/merge_requests'
|
||||
- add_page_specific_style 'page_bundles/merge_request_creation_rapid_diffs'
|
||||
|
||||
= render "page" do
|
||||
= render ::RapidDiffs::AppComponent.new(@rapid_diffs_presenter)
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: rapid_diffs_on_mr_creation
|
||||
feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/11559
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187955
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/539576
|
||||
milestone: '18.0'
|
||||
group: group::code review
|
||||
type: beta
|
||||
default_enabled: true
|
||||
|
|
@ -509,6 +509,10 @@ p_ci_job_artifacts:
|
|||
- table: projects
|
||||
column: project_id
|
||||
on_delete: async_delete
|
||||
p_ci_job_inputs:
|
||||
- table: projects
|
||||
column: project_id
|
||||
on_delete: async_delete
|
||||
p_ci_pipelines:
|
||||
- table: merge_requests
|
||||
column: merge_request_id
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ Gitlab::Database::Partitioning.register_models(
|
|||
Ci::JobAnnotation,
|
||||
Ci::JobArtifact,
|
||||
Ci::JobArtifactReport,
|
||||
Ci::JobInput,
|
||||
Ci::Pipeline,
|
||||
Ci::PipelineVariable,
|
||||
Ci::RunnerManagerBuild,
|
||||
|
|
|
|||
|
|
@ -255,7 +255,7 @@ if (EXPLICIT_VUE_VERSION) {
|
|||
if (USE_VUE3) {
|
||||
Object.assign(alias, {
|
||||
// ensure we always use the same type of module for Vue
|
||||
vue: '@vue/compat/dist/vue.runtime.esm-bundler.js',
|
||||
vue$: '@vue/compat/dist/vue.runtime.esm-bundler.js',
|
||||
vuex: path.join(ROOT_PATH, 'app/assets/javascripts/lib/utils/vue3compat/vuex.js'),
|
||||
'vue-apollo': path.join(ROOT_PATH, 'app/assets/javascripts/lib/utils/vue3compat/vue_apollo.js'),
|
||||
'vue-router': path.join(ROOT_PATH, 'app/assets/javascripts/lib/utils/vue3compat/vue_router.js'),
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: BackfillProjectIdOnCiBuildNeeds
|
||||
description: Backfills sharding key `ci_build_needs.project_id` from `p_ci_builds`.
|
||||
feature_category: continuous_integration
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/195899
|
||||
milestone: '18.3'
|
||||
queued_migration_version: 20250627183000
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -5,4 +5,4 @@ feature_category: package_registry
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/173869
|
||||
milestone: '17.10'
|
||||
queued_migration_version: 20250205125828
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
finalized_by: '20250714232416'
|
||||
|
|
|
|||
|
|
@ -8,6 +8,15 @@ description: Dependencies for a specific CI/CD job.
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31328
|
||||
milestone: '12.2'
|
||||
gitlab_schema: gitlab_ci
|
||||
sharding_key:
|
||||
project_id: projects
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: build_id
|
||||
table: p_ci_builds
|
||||
sharding_key: project_id
|
||||
belongs_to: build
|
||||
foreign_key_name: fk_rails_3cf221d4ed_p
|
||||
desired_sharding_key_migration_job_name: BackfillProjectIdOnCiBuildNeeds
|
||||
table_size: over_limit
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
table_name: p_ci_job_inputs
|
||||
classes:
|
||||
- Ci::JobInput
|
||||
feature_categories:
|
||||
- pipeline_composition
|
||||
description: Table that contains the job-level inputs configured for CI jobs
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/194719
|
||||
milestone: '18.3'
|
||||
gitlab_schema: gitlab_ci
|
||||
table_size: small
|
||||
sharding_key:
|
||||
project_id: projects
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateCiJobInputs < Gitlab::Database::Migration[2.3]
|
||||
include Gitlab::Database::PartitioningMigrationHelpers
|
||||
|
||||
JOB_ID_AND_NAME_INDEX_NAME = 'index_p_ci_job_inputs_on_job_id_and_name'
|
||||
PROJECT_INDEX_NAME = 'index_p_ci_job_inputs_on_project_id'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '18.3'
|
||||
|
||||
def up
|
||||
creation_opts = {
|
||||
primary_key: [:id, :partition_id],
|
||||
options: 'PARTITION BY LIST (partition_id)',
|
||||
if_not_exists: true
|
||||
}
|
||||
|
||||
create_table :p_ci_job_inputs, **creation_opts do |t|
|
||||
t.bigserial :id, null: false
|
||||
t.bigint :job_id, null: false
|
||||
t.bigint :partition_id, null: false
|
||||
t.bigint :project_id, null: false
|
||||
t.integer :input_type, null: false, limit: 2, default: 0
|
||||
t.boolean :sensitive, default: false, null: false
|
||||
t.text :name, null: false, limit: 255
|
||||
t.jsonb :value
|
||||
end
|
||||
|
||||
add_concurrent_partitioned_index :p_ci_job_inputs, :project_id, name: PROJECT_INDEX_NAME
|
||||
add_concurrent_partitioned_index :p_ci_job_inputs, [:job_id, :name, :partition_id],
|
||||
name: JOB_ID_AND_NAME_INDEX_NAME, unique: true
|
||||
end
|
||||
|
||||
def down
|
||||
drop_table :p_ci_job_inputs
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillProjectIdOnCiBuildNeeds < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.3'
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_ci
|
||||
|
||||
MIGRATION = "BackfillProjectIdOnCiBuildNeeds"
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
|
||||
def up
|
||||
return if Gitlab.com_except_jh?
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:ci_build_needs,
|
||||
:id,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :ci_build_needs, :id, [])
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBuildsForeignKeyToCiJobInputs < Gitlab::Database::Migration[2.3]
|
||||
include Gitlab::Database::PartitioningMigrationHelpers
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '18.3'
|
||||
|
||||
SOURCE_TABLE_NAME = :p_ci_job_inputs
|
||||
TARGET_TABLE_NAME = :p_ci_builds
|
||||
FK_NAME = :fk_rails_30a46abefe_p
|
||||
|
||||
def up
|
||||
add_concurrent_partitioned_foreign_key(
|
||||
SOURCE_TABLE_NAME, TARGET_TABLE_NAME,
|
||||
column: [:partition_id, :job_id],
|
||||
target_column: [:partition_id, :id],
|
||||
on_update: :cascade,
|
||||
on_delete: :cascade,
|
||||
reverse_lock_order: true,
|
||||
name: FK_NAME
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(
|
||||
SOURCE_TABLE_NAME, TARGET_TABLE_NAME,
|
||||
name: FK_NAME,
|
||||
reverse_lock_order: true
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeHkCreateMissingNugetSymbolFiles < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.3'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'CreateMissingNugetSymbolFiles',
|
||||
table_name: :packages_packages,
|
||||
column_name: :id,
|
||||
job_arguments: [],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down; end
|
||||
end
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
|
||||
# for more information on how to write migrations for GitLab.
|
||||
|
||||
class AddNamespaceTimestampIdIndexForAiUsageEvents < Gitlab::Database::Migration[2.3]
|
||||
include Gitlab::Database::PartitioningMigrationHelpers
|
||||
|
||||
disable_ddl_transaction!
|
||||
milestone '18.3'
|
||||
|
||||
TABLE_NAME = :ai_usage_events
|
||||
COLUMN_NAMES = [:namespace_id, :timestamp, :id]
|
||||
|
||||
INDEX_NAME = :index_ai_usage_events_on_namespace_id_timestamp_and_id
|
||||
|
||||
def up
|
||||
add_concurrent_partitioned_index(TABLE_NAME, COLUMN_NAMES, name: INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_partitioned_index_by_name(TABLE_NAME, INDEX_NAME)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
2bb7b3cacc37b3b508ee1a23dd492c5b7110fe9b007eb25c0266c57933fe87cf
|
||||
|
|
@ -0,0 +1 @@
|
|||
0baefc0585f4efa54b84651fe14e45217e6baa880a0e6fb3ea910b725e1c62e0
|
||||
|
|
@ -0,0 +1 @@
|
|||
8a1e87c52622a65603833e097a49a97287115cd5694e756b322a49ef6f83cbcc
|
||||
|
|
@ -0,0 +1 @@
|
|||
4d371e71fd9a63a6153930553fa26a06deff1a7725fef9ad37f8286e5842ae73
|
||||
|
|
@ -0,0 +1 @@
|
|||
f324f4fa777fc41e05a5f9dd3f37dc1e3a531338c125fd653ef7646547d948b0
|
||||
|
|
@ -4839,6 +4839,19 @@ CREATE TABLE p_ci_job_artifacts (
|
|||
)
|
||||
PARTITION BY LIST (partition_id);
|
||||
|
||||
CREATE TABLE p_ci_job_inputs (
|
||||
id bigint NOT NULL,
|
||||
job_id bigint NOT NULL,
|
||||
partition_id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
input_type smallint DEFAULT 0 NOT NULL,
|
||||
sensitive boolean DEFAULT false NOT NULL,
|
||||
name text NOT NULL,
|
||||
value jsonb,
|
||||
CONSTRAINT check_007134e1cd CHECK ((char_length(name) <= 255))
|
||||
)
|
||||
PARTITION BY LIST (partition_id);
|
||||
|
||||
CREATE TABLE p_ci_pipeline_variables (
|
||||
key character varying NOT NULL,
|
||||
value text,
|
||||
|
|
@ -19371,6 +19384,15 @@ CREATE SEQUENCE p_ci_job_annotations_id_seq
|
|||
|
||||
ALTER SEQUENCE p_ci_job_annotations_id_seq OWNED BY p_ci_job_annotations.id;
|
||||
|
||||
CREATE SEQUENCE p_ci_job_inputs_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE p_ci_job_inputs_id_seq OWNED BY p_ci_job_inputs.id;
|
||||
|
||||
CREATE SEQUENCE p_ci_workloads_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
|
|
@ -28354,6 +28376,8 @@ ALTER TABLE ONLY p_catalog_resource_sync_events ALTER COLUMN id SET DEFAULT next
|
|||
|
||||
ALTER TABLE ONLY p_ci_builds_metadata ALTER COLUMN id SET DEFAULT nextval('ci_builds_metadata_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY p_ci_job_inputs ALTER COLUMN id SET DEFAULT nextval('p_ci_job_inputs_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY p_ci_workloads ALTER COLUMN id SET DEFAULT nextval('p_ci_workloads_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY p_knowledge_graph_enabled_namespaces ALTER COLUMN id SET DEFAULT nextval('p_knowledge_graph_enabled_namespaces_id_seq'::regclass);
|
||||
|
|
@ -31137,6 +31161,9 @@ ALTER TABLE ONLY p_ci_job_artifact_reports
|
|||
ALTER TABLE ONLY p_ci_job_artifacts
|
||||
ADD CONSTRAINT p_ci_job_artifacts_pkey PRIMARY KEY (id, partition_id);
|
||||
|
||||
ALTER TABLE ONLY p_ci_job_inputs
|
||||
ADD CONSTRAINT p_ci_job_inputs_pkey PRIMARY KEY (id, partition_id);
|
||||
|
||||
ALTER TABLE ONLY p_ci_pipeline_variables
|
||||
ADD CONSTRAINT p_ci_pipeline_variables_pkey PRIMARY KEY (id, partition_id);
|
||||
|
||||
|
|
@ -34668,6 +34695,8 @@ CREATE INDEX index_ai_troubleshoot_job_events_on_project_id ON ONLY ai_troublesh
|
|||
|
||||
CREATE INDEX index_ai_troubleshoot_job_events_on_user_id ON ONLY ai_troubleshoot_job_events USING btree (user_id);
|
||||
|
||||
CREATE INDEX index_ai_usage_events_on_namespace_id_timestamp_and_id ON ONLY ai_usage_events USING btree (namespace_id, "timestamp", id);
|
||||
|
||||
CREATE INDEX index_ai_usage_events_on_organization_id ON ONLY ai_usage_events USING btree (organization_id);
|
||||
|
||||
CREATE INDEX index_ai_usage_events_on_user_id ON ONLY ai_usage_events USING btree (user_id);
|
||||
|
|
@ -37134,6 +37163,10 @@ CREATE INDEX index_p_ci_job_annotations_on_project_id ON ONLY p_ci_job_annotatio
|
|||
|
||||
CREATE INDEX index_p_ci_job_artifact_reports_on_project_id ON ONLY p_ci_job_artifact_reports USING btree (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_p_ci_job_inputs_on_job_id_and_name ON ONLY p_ci_job_inputs USING btree (job_id, name, partition_id);
|
||||
|
||||
CREATE INDEX index_p_ci_job_inputs_on_project_id ON ONLY p_ci_job_inputs USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_p_ci_pipeline_variables_on_project_id ON ONLY p_ci_pipeline_variables USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_p_ci_runner_machine_builds_on_project_id ON ONLY p_ci_runner_machine_builds USING btree (project_id);
|
||||
|
|
@ -45329,6 +45362,9 @@ ALTER TABLE ONLY saml_providers
|
|||
ALTER TABLE ONLY bulk_import_batch_trackers
|
||||
ADD CONSTRAINT fk_rails_307efb9f32 FOREIGN KEY (tracker_id) REFERENCES bulk_import_trackers(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE p_ci_job_inputs
|
||||
ADD CONSTRAINT fk_rails_30a46abefe_p FOREIGN KEY (partition_id, job_id) REFERENCES p_ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY pm_package_version_licenses
|
||||
ADD CONSTRAINT fk_rails_30ddb7f837 FOREIGN KEY (pm_package_version_id) REFERENCES pm_package_versions(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -796,7 +796,7 @@ network and LDAP server response time affects these metrics.
|
|||
|
||||
### Adjust LDAP user sync schedule
|
||||
|
||||
By default, GitLab runs a worker once per day at 01:30 a.m. server time to
|
||||
By default, GitLab runs a worker once per day at 01:30 AM server time to
|
||||
check and update GitLab users against LDAP.
|
||||
|
||||
{{< alert type="warning" >}}
|
||||
|
|
|
|||
|
|
@ -213,22 +213,22 @@ gitlab_kas['env'] = {
|
|||
|
||||
##### Agent server node settings
|
||||
|
||||
| Setting | Description |
|
||||
|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `gitlab_kas['private_api_listen_network']` | The network family KAS listens on. Defaults to `tcp` for both IPv4 and IPv6 networks. Set to `tcp4` for IPv4 or `tcp6` for IPv6. |
|
||||
| `gitlab_kas['private_api_listen_address']` | The address the KAS listens on. Set to `0.0.0.0:8155` or to an IP:PORT reachable by other nodes in the cluster. |
|
||||
| `gitlab_kas['api_secret_key']` | The shared secret used for authentication between KAS and GitLab. The value must be Base64-encoded and exactly 32 bytes long. |
|
||||
| `gitlab_kas['private_api_secret_key']` | The shared secret used for authentication between different KAS instances. The value must be Base64-encoded and exactly 32 bytes long. |
|
||||
| `OWN_PRIVATE_API_SCHEME` | Optional value used to specify what scheme to use when constructing `OWN_PRIVATE_API_URL`. Can be `grpc` or `grpcs`. |
|
||||
| `OWN_PRIVATE_API_URL` | The environment variable used by KAS for service discovery. Set to the hostname or IP address of the node you're configuring. The node must be reachable by other nodes in the cluster. |
|
||||
| `OWN_PRIVATE_API_HOST` | Optional value used to verify the TLS certificate hostname. <sup>1</sup> A client compares this value to the hostname in the server's TLS certificate file. |
|
||||
| `OWN_PRIVATE_API_PORT` | Optional value used to specify what port to use when constructing `OWN_PRIVATE_API_URL`. |
|
||||
| `OWN_PRIVATE_API_CIDR` | Optional value used to specify which IP addresses from the available networks to use when constructing `OWN_PRIVATE_API_URL`. |
|
||||
| `gitlab_kas['client_timeout_seconds']` | The timeout for the client to connect to the KAS. |
|
||||
| `gitlab_kas_external_url` | The user-facing URL for the in-cluster `agentk`. Can be a fully qualified domain or subdomain, <sup>2</sup> or a GitLab external URL. <sup>3</sup> If blank, defaults to a GitLab external URL. |
|
||||
| `gitlab_rails['gitlab_kas_external_url']` | The user-facing URL for the in-cluster `agentk`. If blank, defaults to the `gitlab_kas_external_url`. |
|
||||
| `gitlab_rails['gitlab_kas_external_k8s_proxy_url']` | The user-facing URL for Kubernetes API proxying. If blank, defaults to a URL based on `gitlab_kas_external_url`. |
|
||||
| `gitlab_rails['gitlab_kas_internal_url']` | The internal URL the GitLab backend uses to communicate with KAS. |
|
||||
| Setting | Description |
|
||||
|-----------------------------------------------------|-------------|
|
||||
| `gitlab_kas['private_api_listen_network']` | The network family KAS listens on. Defaults to `tcp` for both IPv4 and IPv6 networks. Set to `tcp4` for IPv4 or `tcp6` for IPv6. |
|
||||
| `gitlab_kas['private_api_listen_address']` | The address the KAS listens on. Set to `0.0.0.0:8155` or to an IP and port reachable by other nodes in the cluster. |
|
||||
| `gitlab_kas['api_secret_key']` | The shared secret used for authentication between KAS and GitLab. The value must be Base64-encoded and exactly 32 bytes long. |
|
||||
| `gitlab_kas['private_api_secret_key']` | The shared secret used for authentication between different KAS instances. The value must be Base64-encoded and exactly 32 bytes long. |
|
||||
| `OWN_PRIVATE_API_SCHEME` | Optional value used to specify what scheme to use when constructing `OWN_PRIVATE_API_URL`. Can be `grpc` or `grpcs`. |
|
||||
| `OWN_PRIVATE_API_URL` | The environment variable used by KAS for service discovery. Set to the hostname or IP address of the node you're configuring. The node must be reachable by other nodes in the cluster. |
|
||||
| `OWN_PRIVATE_API_HOST` | Optional value used to verify the TLS certificate hostname. <sup>1</sup> A client compares this value to the hostname in the server's TLS certificate file. |
|
||||
| `OWN_PRIVATE_API_PORT` | Optional value used to specify what port to use when constructing `OWN_PRIVATE_API_URL`. |
|
||||
| `OWN_PRIVATE_API_CIDR` | Optional value used to specify which IP addresses from the available networks to use when constructing `OWN_PRIVATE_API_URL`. |
|
||||
| `gitlab_kas['client_timeout_seconds']` | The timeout for the client to connect to the KAS. |
|
||||
| `gitlab_kas_external_url` | The user-facing URL for the in-cluster `agentk`. Can be a fully qualified domain or subdomain, <sup>2</sup> or a GitLab external URL. <sup>3</sup> If blank, defaults to a GitLab external URL. |
|
||||
| `gitlab_rails['gitlab_kas_external_url']` | The user-facing URL for the in-cluster `agentk`. If blank, defaults to the `gitlab_kas_external_url`. |
|
||||
| `gitlab_rails['gitlab_kas_external_k8s_proxy_url']` | The user-facing URL for Kubernetes API proxying. If blank, defaults to a URL based on `gitlab_kas_external_url`. |
|
||||
| `gitlab_rails['gitlab_kas_internal_url']` | The internal URL the GitLab backend uses to communicate with KAS. |
|
||||
|
||||
**Footnotes**:
|
||||
|
||||
|
|
@ -314,7 +314,7 @@ The path is incorrect for either:
|
|||
|
||||
To fix this issue, ensure that the paths are correct.
|
||||
|
||||
### `dial tcp <GITLAB_INTERNAL_IP>:443: connect: connection refused`
|
||||
### Error: `dial tcp <GITLAB_INTERNAL_IP>:443: connect: connection refused`
|
||||
|
||||
If you are running GitLab Self-Managed and:
|
||||
|
||||
|
|
@ -369,7 +369,7 @@ sudo gitlab-ctl reconfigure
|
|||
gitlab-ctl restart gitlab-kas
|
||||
```
|
||||
|
||||
### GRPC::DeadlineExceeded in Clusters::Agents::NotifyGitPushWorker
|
||||
### Error: `GRPC::DeadlineExceeded in Clusters::Agents::NotifyGitPushWorker`
|
||||
|
||||
This error likely occurs when the client does not receive a response within the default timeout period (5 seconds). To resolve the issue, you can increase the client timeout by modifying the `/etc/gitlab/gitlab.rb` configuration file.
|
||||
|
||||
|
|
|
|||
|
|
@ -27,12 +27,12 @@ During scheduled maintenance windows, the following tasks might be performed:
|
|||
|
||||
Maintenance is performed outside standard working hours:
|
||||
|
||||
| Region | Day | Time (UTC) |
|
||||
|--------|-----|------------|
|
||||
| Asia Pacific | Wednesday | 13:00 - 17:00 |
|
||||
| Europe, Middle East, and Africa | Tuesday | 01:00 - 05:00 |
|
||||
| Americas (Option 1) | Tuesday | 07:00 - 11:00 |
|
||||
| Americas (Option 2) | Sunday-Monday | 21:00 - 01:00 |
|
||||
| Region | Day | Time (UTC) |
|
||||
|---------------------------------|---------------|------------|
|
||||
| Asia Pacific | Wednesday | 1:00 PM-5:00 PM |
|
||||
| Europe, Middle East, and Africa | Tuesday | 1:00 AM-5:00 AM |
|
||||
| Americas (Option 1) | Tuesday | 7:00 AM-11:00 AM |
|
||||
| Americas (Option 2) | Sunday-Monday | 9:00 PM-1:00 AM |
|
||||
|
||||
View your maintenance window in [Switchboard](tenant_overview.md#maintenance-windows), including upcoming and recent maintenance.
|
||||
You can postpone scheduled maintenance to another window in the same week by contacting your Customer Success Manager at least one week in advance.
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ The following are GitLab upgrade validation tests we performed.
|
|||
- Outcome: Partial success because we observed downtime during the upgrade of the primary and secondary sites.
|
||||
- Follow up issues/actions:
|
||||
- [Fix zero-downtime upgrade process/instructions for multi-node Geo deployments](https://gitlab.com/gitlab-org/gitlab/-/issues/225684)
|
||||
- [Geo:check Rake task: Exclude AuthorizedKeysCommand check if node not running Puma](https://gitlab.com/gitlab-org/gitlab/-/issues/225454)
|
||||
- [`Geo:check` Rake task: Exclude AuthorizedKeysCommand check if node not running Puma](https://gitlab.com/gitlab-org/gitlab/-/issues/225454)
|
||||
- [Update instructions in the next upgrade issue to include monitoring HAProxy dashboards](https://gitlab.com/gitlab-org/gitlab/-/issues/225359)
|
||||
|
||||
[Upgrade Geo multi-node installation](https://gitlab.com/gitlab-org/gitlab/-/issues/208104):
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ requests redirected from the secondary to the primary site do not properly send
|
|||
Authorization header. This may result in either an infinite `Authorization <-> Redirect`
|
||||
loop, or Authorization error messages.
|
||||
|
||||
### Error: Net::ReadTimeout when pushing through SSH on a Geo secondary
|
||||
### Error: `Net::ReadTimeout` when pushing through SSH on a Geo secondary
|
||||
|
||||
When you push large repositories through SSH on a Geo secondary site, you may encounter a timeout.
|
||||
This is because Rails proxies the push to the primary and has a 60 second default timeout,
|
||||
|
|
|
|||
|
|
@ -461,7 +461,7 @@ The following metrics are available:
|
|||
| Metric | Type | Since | Description | Labels |
|
||||
|:-------------------------------------------------------- |:--------- |:------------------------------------------------------------- |:---------------------------------------------------------------------------------- |:---------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `db_load_balancing_hosts` | Gauge | [12.3](https://gitlab.com/gitlab-org/gitlab/-/issues/13630) | Current number of load balancing hosts | |
|
||||
| `sidekiq_load_balancing_count` | Counter | 13.11 | Sidekiq jobs using load balancing with data consistency set to :sticky or :delayed | `queue`, `boundary`, `external_dependencies`, `feature_category`, `job_status`, `urgency`, `data_consistency`, `load_balancing_strategy` |
|
||||
| `sidekiq_load_balancing_count` | Counter | 13.11 | Sidekiq jobs using load balancing with data consistency set to `:sticky` or `:delayed` | `queue`, `boundary`, `external_dependencies`, `feature_category`, `job_status`, `urgency`, `data_consistency`, `load_balancing_strategy` |
|
||||
| `gitlab_transaction_caught_up_replica_pick_count_total` | Counter | 14.1 | Number of search attempts for caught up replica | `result` |
|
||||
|
||||
## Database partitioning metrics
|
||||
|
|
|
|||
|
|
@ -389,7 +389,7 @@ that cannot be accessed directly from the internet, keep the following in mind:
|
|||
|
||||
This server block is an example of how to configure a reverse proxy for GitLab that works with Jira Cloud:
|
||||
|
||||
```json
|
||||
```nginx
|
||||
server {
|
||||
listen *:80;
|
||||
server_name gitlab.mycompany.com;
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ title: Rate limit on Groups API
|
|||
|
||||
You can configure the per minute rate limit per IP address and per user for requests to the following [groups API](../../api/groups.md).
|
||||
|
||||
| Limit | Default |
|
||||
|---------------------------------------------------------------|---------|
|
||||
| [GET /groups](../../api/groups.md#list-groups) | 200 |
|
||||
| [GET /groups/:id](../../api/groups.md#get-a-single-group) | 400 |
|
||||
| [GET /groups/:id/projects](../../api/groups.md#list-projects) | 600 |
|
||||
| Limit | Default |
|
||||
|-----------------------------------------------------------------|---------|
|
||||
| [`GET /groups`](../../api/groups.md#list-groups) | 200 |
|
||||
| [`GET /groups/:id`](../../api/groups.md#get-a-single-group) | 400 |
|
||||
| [`GET /groups/:id/projects`](../../api/groups.md#list-projects) | 600 |
|
||||
|
||||
To change the rate limit:
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ title: Rate limits on issue and epic creation
|
|||
|
||||
Rate limits control the pace at which new epics and issues can be created.
|
||||
For example, if you set the limit to `300`, the
|
||||
[Projects::IssuesController#create](https://gitlab.com/gitlab-org/gitlab/blob/master/app/controllers/projects/issues_controller.rb)
|
||||
[`Projects::IssuesController#create`](https://gitlab.com/gitlab-org/gitlab/blob/master/app/controllers/projects/issues_controller.rb)
|
||||
action blocks requests that exceed a rate of 300 per minute. Access to the endpoint is available after one minute.
|
||||
|
||||
## Set the rate limit
|
||||
|
|
|
|||
|
|
@ -33,5 +33,5 @@ The default value is `300`.
|
|||
Requests over the rate limit are logged into the `auth.log` file.
|
||||
|
||||
For example, if you set a limit of 300, requests using the
|
||||
[Projects::NotesController#create](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/controllers/projects/notes_controller.rb)
|
||||
[`Projects::NotesController#create`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/controllers/projects/notes_controller.rb)
|
||||
action exceeding a rate of 300 per minute are blocked. Access to the endpoint is allowed after one minute.
|
||||
|
|
|
|||
|
|
@ -25,14 +25,14 @@ title: Rate limit on Projects API
|
|||
|
||||
You can configure the rate limit per IP address and per user for requests to the following [projects API](../../api/projects.md#list-all-projects).
|
||||
|
||||
| Limit | Default | Interval |
|
||||
|-----------------------------------------------------------------------------------------------------------|---------|------------|
|
||||
| [GET /projects](../../api/projects.md#list-all-projects) (unauthenticated requests) | 400 | 10 minutes |
|
||||
| [GET /projects](../../api/projects.md#list-all-projects) (authenticated requests) | 2000 | 10 minutes |
|
||||
| [GET /projects/:id](../../api/projects.md#get-a-single-project) | 400 | 1 minute |
|
||||
| [GET /users/:user_id/projects](../../api/projects.md#list-a-users-projects) | 300 | 1 minute |
|
||||
| [GET /users/:user_id/contributed_projects](../../api/projects.md#list-projects-a-user-has-contributed-to) | 100 | 1 minute |
|
||||
| [GET /users/:user_id/starred_projects](../../api/project_starring.md#list-projects-starred-by-a-user) | 100 | 1 minute |
|
||||
| Limit | Default | Interval |
|
||||
|-------------------------------------------------------------------------------------------------------------|---------|----------|
|
||||
| [`GET /projects`](../../api/projects.md#list-all-projects) (unauthenticated requests) | 400 | 10 minutes |
|
||||
| [`GET /projects`](../../api/projects.md#list-all-projects) (authenticated requests) | 2000 | 10 minutes |
|
||||
| [`GET /projects/:id`](../../api/projects.md#get-a-single-project) | 400 | 1 minute |
|
||||
| [`GET /users/:user_id/projects`](../../api/projects.md#list-a-users-projects) | 300 | 1 minute |
|
||||
| [`GET /users/:user_id/contributed_projects`](../../api/projects.md#list-projects-a-user-has-contributed-to) | 100 | 1 minute |
|
||||
| [`GET /users/:user_id/starred_projects`](../../api/project_starring.md#list-projects-starred-by-a-user) | 100 | 1 minute |
|
||||
|
||||
To change the rate limit:
|
||||
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ You can configure the per minute rate limit per IP address and per user for requ
|
|||
|
||||
| Limit | Default |
|
||||
|-----------------------------------------------------------------|---------|
|
||||
| [GET /users/:id/followers](../../api/user_follow_unfollow.md#list-all-accounts-that-follow-a-user) | 100 each minute |
|
||||
| [GET /users/:id/following](../../api/user_follow_unfollow.md#list-all-accounts-followed-by-a-user) | 100 each minute |
|
||||
| [GET /users/:id/status](../../api/users.md#get-the-status-of-a-user) | 240 each minute |
|
||||
| [GET /users/:id/keys](../../api/user_keys.md#list-all-ssh-keys-for-a-user) | 120 each minute |
|
||||
| [GET /users/:id/keys/:key_id](../../api/user_keys.md#get-an-ssh-key) | 120 each minute |
|
||||
| [GET /users/:id/gpg_keys](../../api/user_keys.md#list-all-gpg-keys-for-a-user) | 120 each minute |
|
||||
| [GET /users/:id/gpg_keys/:key_id](../../api/user_keys.md#get-a-gpg-key-for-a-user) | 120 each minute |
|
||||
| [`GET /users/:id/followers`](../../api/user_follow_unfollow.md#list-all-accounts-that-follow-a-user) | 100 each minute |
|
||||
| [`GET /users/:id/following`](../../api/user_follow_unfollow.md#list-all-accounts-followed-by-a-user) | 100 each minute |
|
||||
| [`GET /users/:id/status`](../../api/users.md#get-the-status-of-a-user) | 240 each minute |
|
||||
| [`GET /users/:id/keys`](../../api/user_keys.md#list-all-ssh-keys-for-a-user) | 120 each minute |
|
||||
| [`GET /users/:id/keys/:key_id`](../../api/user_keys.md#get-an-ssh-key) | 120 each minute |
|
||||
| [`GET /users/:id/gpg_keys`](../../api/user_keys.md#list-all-gpg-keys-for-a-user) | 120 each minute |
|
||||
| [`GET /users/:id/gpg_keys/:key_id`](../../api/user_keys.md#get-a-gpg-key-for-a-user) | 120 each minute |
|
||||
|
||||
To change the rate limit:
|
||||
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ It requires at least the `SYS_PTRACE` capability, otherwise it terminates with a
|
|||
|
||||
{{< tabs >}}
|
||||
|
||||
::: TabTitle Kubernetes
|
||||
{{< tab title="Kubernetes" >}}
|
||||
|
||||
```yaml
|
||||
securityContext:
|
||||
|
|
@ -244,6 +244,8 @@ securityContext:
|
|||
add:
|
||||
- SYS_PTRACE
|
||||
```
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab title="Docker" >}}
|
||||
|
||||
|
|
@ -545,13 +547,13 @@ has number of drawbacks, as mentioned in [Why Ruby's Timeout is dangerous (and T
|
|||
|
||||
> This is where the implications get interesting, and terrifying. This means that an exception can get raised:
|
||||
>
|
||||
> - during a network request (ok, as long as the surrounding code is prepared to catch Timeout::Error)
|
||||
> - during a network request (ok, as long as the surrounding code is prepared to catch `Timeout::Error`)
|
||||
> - during the cleanup for the network request
|
||||
> - during a rescue block
|
||||
> - while creating an object to save to the database afterwards
|
||||
> - in any of your code, regardless of whether it could have possibly raised an exception before
|
||||
>
|
||||
> Nobody writes code to defend against an exception being raised on literally any line. That's not even possible. So Thread.raise is basically like a sneak attack on your code that could result in almost anything. It would probably be okay if it were pure-functional code that did not modify any state. But this is Ruby, so that's unlikely :)
|
||||
> Nobody writes code to defend against an exception being raised on literally any line. That's not even possible. So Thread.raise is basically like a sneak attack on your code that could result in almost anything. It would probably be okay if it were pure-functional code that did not modify any state. But this is Ruby, so that's unlikely `:)`
|
||||
|
||||
## Manually trigger a cron job
|
||||
|
||||
|
|
|
|||
|
|
@ -2159,7 +2159,6 @@ Input type: `AiActionInput`
|
|||
| <a id="mutationaiactionprojectid"></a>`projectId` | [`ProjectID`](#projectid) | Global ID of the project the user is acting on. |
|
||||
| <a id="mutationaiactionresolvevulnerability"></a>`resolveVulnerability` | [`AiResolveVulnerabilityInput`](#airesolvevulnerabilityinput) | Input for resolve_vulnerability AI action. |
|
||||
| <a id="mutationaiactionrootnamespaceid"></a>`rootNamespaceId` | [`NamespaceID`](#namespaceid) | Global ID of the top-level namespace the user is acting on. |
|
||||
| <a id="mutationaiactionsummarizecomments"></a>`summarizeComments` | [`AiSummarizeCommentsInput`](#aisummarizecommentsinput) | Input for summarize_comments AI action. |
|
||||
| <a id="mutationaiactionsummarizenewmergerequest"></a>`summarizeNewMergeRequest` | [`AiSummarizeNewMergeRequestInput`](#aisummarizenewmergerequestinput) | Input for summarize_new_merge_request AI action. |
|
||||
| <a id="mutationaiactionsummarizereview"></a>`summarizeReview` | [`AiSummarizeReviewInput`](#aisummarizereviewinput) | Input for summarize_review AI action. |
|
||||
| <a id="mutationaiactionthreadid"></a>`threadId` | [`AiConversationThreadID`](#aiconversationthreadid) | Global Id of the existing thread to continue the conversation. If it is not specified, a new thread will be created for the specified conversation_type. |
|
||||
|
|
@ -14251,6 +14250,29 @@ The edge type for [`AiSelfHostedModel`](#aiselfhostedmodel).
|
|||
| <a id="aiselfhostedmodeledgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
|
||||
| <a id="aiselfhostedmodeledgenode"></a>`node` | [`AiSelfHostedModel`](#aiselfhostedmodel) | The item at the end of the edge. |
|
||||
|
||||
#### `AiUsageEventConnection`
|
||||
|
||||
The connection type for [`AiUsageEvent`](#aiusageevent).
|
||||
|
||||
##### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aiusageeventconnectionedges"></a>`edges` | [`[AiUsageEventEdge]`](#aiusageeventedge) | A list of edges. |
|
||||
| <a id="aiusageeventconnectionnodes"></a>`nodes` | [`[AiUsageEvent]`](#aiusageevent) | A list of nodes. |
|
||||
| <a id="aiusageeventconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
|
||||
|
||||
#### `AiUsageEventEdge`
|
||||
|
||||
The edge type for [`AiUsageEvent`](#aiusageevent).
|
||||
|
||||
##### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aiusageeventedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
|
||||
| <a id="aiusageeventedgenode"></a>`node` | [`AiUsageEvent`](#aiusageevent) | The item at the end of the edge. |
|
||||
|
||||
#### `AiUserMetricsConnection`
|
||||
|
||||
The connection type for [`AiUserMetrics`](#aiusermetrics).
|
||||
|
|
@ -22347,8 +22369,20 @@ Usage data for events stored in the default PostgreSQL database. Data retained f
|
|||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aiusagedataall"></a>`all` | [`AiUsageEventConnection`](#aiusageeventconnection) | All Duo usage events. (see [Connections](#connections)) |
|
||||
| <a id="aiusagedatacodesuggestionevents"></a>`codeSuggestionEvents` | [`CodeSuggestionEventConnection`](#codesuggestioneventconnection) | Events related to code suggestions. (see [Connections](#connections)) |
|
||||
|
||||
### `AiUsageEvent`
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aiusageeventevent"></a>`event` | [`AiUsageEventType!`](#aiusageeventtype) | Type of the event. |
|
||||
| <a id="aiusageeventid"></a>`id` | [`ID!`](#id) | ID of the code suggestion event. |
|
||||
| <a id="aiusageeventtimestamp"></a>`timestamp` | [`Time!`](#time) | When the event happened. |
|
||||
| <a id="aiusageeventuser"></a>`user` | [`UserCore!`](#usercore) | User associated with the event. |
|
||||
|
||||
### `AiUserMetrics`
|
||||
|
||||
Pre-aggregated per-user metrics for GitLab Code Suggestions and GitLab Duo Chat. Require ClickHouse to be enabled and GitLab Ultimate with the Duo Enterprise add-on.
|
||||
|
|
@ -34860,7 +34894,7 @@ Product analytics events for a specific month and year.
|
|||
| <a id="namespacetotalrepositorysizeexcess"></a>`totalRepositorySizeExcess` | [`Float`](#float) | Total excess repository size of all projects in the root namespace in bytes. This only applies to namespaces under Project limit enforcement. |
|
||||
| <a id="namespaceuserpermissions"></a>`userPermissions` | [`NamespacePermissions!`](#namespacepermissions) | Permissions for the current user on the resource. |
|
||||
| <a id="namespacevisibility"></a>`visibility` | [`String`](#string) | Visibility of the namespace. |
|
||||
| <a id="namespaceweburl"></a>`webUrl` | [`String`](#string) | URL of the object. |
|
||||
| <a id="namespaceweburl"></a>`webUrl` | [`String`](#string) | URL of the namespace. |
|
||||
|
||||
#### Fields with arguments
|
||||
|
||||
|
|
@ -44919,6 +44953,20 @@ Type of code suggestion event.
|
|||
| <a id="aiusagecodesuggestioneventcode_suggestion_rejected_in_ide"></a>`CODE_SUGGESTION_REJECTED_IN_IDE` | Code suggestion rejected. |
|
||||
| <a id="aiusagecodesuggestioneventcode_suggestion_shown_in_ide"></a>`CODE_SUGGESTION_SHOWN_IN_IDE` | Code suggestion shown. |
|
||||
|
||||
### `AiUsageEventType`
|
||||
|
||||
Type of AI usage event.
|
||||
|
||||
| Value | Description |
|
||||
| ----- | ----------- |
|
||||
| <a id="aiusageeventtypecode_suggestion_accepted_in_ide"></a>`CODE_SUGGESTION_ACCEPTED_IN_IDE` | Code Suggestion was accepted in IDE. |
|
||||
| <a id="aiusageeventtypecode_suggestion_direct_access_token_refresh"></a>`CODE_SUGGESTION_DIRECT_ACCESS_TOKEN_REFRESH` | Code Suggestion token was refreshed (old data only). |
|
||||
| <a id="aiusageeventtypecode_suggestion_rejected_in_ide"></a>`CODE_SUGGESTION_REJECTED_IN_IDE` | Code Suggestion was rejected in IDE. |
|
||||
| <a id="aiusageeventtypecode_suggestion_requested"></a>`CODE_SUGGESTION_REQUESTED` | Code Suggestion was requested (old data only). |
|
||||
| <a id="aiusageeventtypecode_suggestion_shown_in_ide"></a>`CODE_SUGGESTION_SHOWN_IN_IDE` | Code Suggestion was shown in IDE. |
|
||||
| <a id="aiusageeventtyperequest_duo_chat_response"></a>`REQUEST_DUO_CHAT_RESPONSE` | Duo Chat response was requested. |
|
||||
| <a id="aiusageeventtypetroubleshoot_job"></a>`TROUBLESHOOT_JOB` | Troubleshoot job feature was used. |
|
||||
|
||||
### `AlertManagementAlertSort`
|
||||
|
||||
Values for sorting alerts.
|
||||
|
|
@ -51896,14 +51944,6 @@ see the associated mutation type above.
|
|||
| <a id="airesolvevulnerabilityinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
|
||||
| <a id="airesolvevulnerabilityinputvulnerablemergerequestid"></a>`vulnerableMergeRequestId` | [`MergeRequestID`](#mergerequestid) | Global ID of the merge request which the merge request containing the vulnerability resolution will target. |
|
||||
|
||||
### `AiSummarizeCommentsInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="aisummarizecommentsinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
|
||||
|
||||
### `AiSummarizeNewMergeRequestInput`
|
||||
|
||||
Summarize a new merge request based on two branches. Returns `null` if the `add_ai_summary_for_new_mr` feature flag is disabled.
|
||||
|
|
|
|||
|
|
@ -214,3 +214,7 @@ Example response:
|
|||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Comments on wiki pages
|
||||
|
||||
Wiki comments are called `notes`. You can interact with them using the [Notes API](notes.md#group-wikis).
|
||||
|
|
|
|||
|
|
@ -262,7 +262,55 @@ If successful, returns a [`204 No Content`](rest/troubleshooting.md#status-codes
|
|||
|
||||
Use the following endpoints to configure and manage upstream Maven registries.
|
||||
|
||||
### List all upstream registries
|
||||
### List all upstream registries for a top-level group
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/550728) in GitLab 18.3 [with a flag](../administration/feature_flags/_index.md) named `maven_virtual_registry`. Enabled by default.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
Lists all upstream registries for a top-level group.
|
||||
|
||||
```plaintext
|
||||
GET /groups/:id/-/virtual_registries/packages/maven/upstreams
|
||||
```
|
||||
|
||||
Supported attributes:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|:----------|:-----|:---------|:------------|
|
||||
| `id` | string/integer | yes | The group ID or full group path. Must be a top-level group. |
|
||||
| `page` | integer | no | The page number. Defaults to 1. |
|
||||
| `per_page` | integer | no | The number of items per page. Defaults to 20. |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" \
|
||||
--header "Accept: application/json" \
|
||||
--url "https://gitlab.example.com/api/v4/groups/5/-/virtual_registries/packages/maven/upstreams"
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"group_id": 5,
|
||||
"url": "https://repo.maven.apache.org/maven2",
|
||||
"name": "Maven Central",
|
||||
"description": "Maven Central repository",
|
||||
"cache_validity_hours": 24,
|
||||
"username": "user",
|
||||
"created_at": "2024-05-30T12:28:27.855Z",
|
||||
"updated_at": "2024-05-30T12:28:27.855Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### List all upstream registries for a virtual registry
|
||||
|
||||
{{< history >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -221,3 +221,7 @@ Example response:
|
|||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Comments on wiki pages
|
||||
|
||||
Wiki comments are called `notes`. You can interact with them using the [Notes API](notes.md#project-wikis).
|
||||
|
|
|
|||
|
|
@ -18,53 +18,22 @@ title: Dashboard layout framework
|
|||
|
||||
{{< /history >}}
|
||||
|
||||
The [`dashboard_layout.vue`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/assets/javascripts/vue_shared/components/customizable_dashboard/dashboard_layout.vue)
|
||||
component provides an easy way to render dashboards using a configuration. This is
|
||||
part of our broader effort to standardize dashboards across the platform
|
||||
The dashboard layout framework is part of a broader effort to standardize dashboards across the platform
|
||||
as described in [Epic #13801](https://gitlab.com/groups/gitlab-org/-/epics/13801).
|
||||
|
||||
For more in depth details on the dashboard layout framework, see the [architecture design document](https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/dashboard_layout_framework/).
|
||||
|
||||
## Interactive examples
|
||||
## Rendering dashboards
|
||||
|
||||
Try it in your browser using our interactive examples:
|
||||
To render dashboard layouts it's recommended to use the [GlDashboardLayout](https://design.gitlab.com/storybook/?path=/docs/dashboards-dashboards-layout--docs)
|
||||
component. It provides an easy way to render dashboards using
|
||||
a configuration which aligns with our [Pajamas guidelines](https://design.gitlab.com/patterns/dashboards/).
|
||||
|
||||
- [dashboard_layout](https://gitlab-org.gitlab.io/gitlab/storybook/?path=/docs/vue-shared-components-customizable-dashboard-dashboard-layout--docs)
|
||||
- [extended_dashboard_panel](https://gitlab-org.gitlab.io/gitlab/storybook/?path=/docs/vue-shared-components-customizable-dashboard-extended-dashboard-panel--docs)
|
||||
Note that GlDashboardLayout supplants the deprecated `dashboard_layout.vue` component in the vue shared directory.
|
||||
|
||||
## When to use this component
|
||||
### Panel guidelines
|
||||
|
||||
This component should be used when:
|
||||
|
||||
- You want an easy way to create a dashboard interface.
|
||||
- You want your dashboard to align with our [Pajamas guidelines](https://design.gitlab.com/patterns/dashboards).
|
||||
- You want to benefit from future add-on features such as customizable layouts with resizable, draggable elements.
|
||||
|
||||
For existing dashboards, follow the [migration guide](#migration-guide) below.
|
||||
|
||||
## Current limitations
|
||||
|
||||
The component is limited to rendering dashboards. As defined in our architecture design document
|
||||
it does not provide:
|
||||
|
||||
- Data exploration outside defined panel visualizations
|
||||
- User-driven customization and management of dashboards
|
||||
- Navigation placement for dashboards
|
||||
|
||||
While user customization is not supported yet, the foundation has been developed
|
||||
and we plan to release an upgrade path from a static dashboard layout to a
|
||||
customizable dashboard layout as part of GitLab issue [#546201](https://gitlab.com/gitlab-org/gitlab/-/issues/546201).
|
||||
|
||||
## The component
|
||||
|
||||
The `dashboard_layout.vue` component takes a dashboard configuration object as input
|
||||
and renders a dashboard layout with title, description, actions, and panels in a
|
||||
cross-browser 12-column grid system. The grid is responsive and collapses down
|
||||
to a single column at the [medium breakpoint](https://design.gitlab.com/product-foundations/layout/#breakpoints).
|
||||
|
||||
### Dashboard panels
|
||||
|
||||
The component is not opinionated about the panel component used. You are free to
|
||||
You are free to
|
||||
choose whichever panel component best suits your needs. However, to ensure consistency
|
||||
with our design patterns, it's strongly recommended that you use one of the
|
||||
following components:
|
||||
|
|
@ -72,127 +41,25 @@ following components:
|
|||
- [GlDashboardPanel](https://gitlab-org.gitlab.io/gitlab-ui/?path=/docs/dashboards-dashboards-panel--docs): The official Pajamas dashboard panel
|
||||
- [`extended_dashboard_panel.vue`](https://gitlab-org.gitlab.io/gitlab/storybook/?path=/docs/vue-shared-components-extended-dashboard-panel--docs): Extends `GlDashboardPanel` with easy alert styling and i18n strings
|
||||
|
||||
### Filters
|
||||
## Migration guide
|
||||
|
||||
The component provides a `#filters` slot to render your filters in the dashboard
|
||||
layout. The component does not manage or sync filters and leaves it up to the
|
||||
consumer to manage this state.
|
||||
|
||||
We expect dashboards using the framework to implement two types of filters:
|
||||
|
||||
- Global filters: Applied to every visualization in the dashboard
|
||||
- Per-panel filters: Applied to individual panels (future support planned)
|
||||
|
||||
For URL synchronization, you can use the shared [`UrlSync`](https://gitlab.com/gitlab-org/gitlab/blob/master/app/assets/javascripts/vue_shared/components/url_sync.vue) component.
|
||||
|
||||
### Additional slots
|
||||
|
||||
For a full list of supported slots see the [interactive examples](#interactive-examples).
|
||||
|
||||
### Basic implementation
|
||||
|
||||
```vue
|
||||
<script>
|
||||
// app/assets/javascripts/feature/components/dashboard.vue
|
||||
import DashboardLayout from '~/vue_shared/components/customizable_dashboard/dashboard_layout.vue';
|
||||
import ExtendedDashboardPanel from '~/vue_shared/components/customizable_dashboard/extended_dashboard_panel.vue';
|
||||
|
||||
import UsersVisualization from './my_users_visualization.vue';
|
||||
import EventsVisualization from './my_events_visualization.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
DashboardLayout,
|
||||
ExtendedDashboardPanel,
|
||||
UsersVisualization,
|
||||
EventsVisualization,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
dashboard: {
|
||||
title: __('My dashboard title'),
|
||||
description: __('The dashboard description to render'),
|
||||
panels: [
|
||||
{
|
||||
id: '1',
|
||||
extendedDashboardPanelProps: {
|
||||
title: __('Active users over time'),
|
||||
// Any additional ExtendedDashboardPanel props go here
|
||||
},
|
||||
component: UsersVisualization,
|
||||
componentProps: {
|
||||
apiPath: '/example-users-api',
|
||||
// Any props you want to pass to your component
|
||||
},
|
||||
gridAttributes: {
|
||||
width: 6,
|
||||
height: 4,
|
||||
yPos: 0,
|
||||
xPos: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
extendedDashboardPanelProps: {
|
||||
title: __('Events over time'),
|
||||
// Any additional ExtendedDashboardPanel props go here
|
||||
},
|
||||
component: EventsVisualization,
|
||||
componentProps: {
|
||||
apiPath: '/example-events-api',
|
||||
// Any props you want to pass to your component
|
||||
},
|
||||
gridAttributes: {
|
||||
width: 6,
|
||||
height: 4,
|
||||
yPos: 0,
|
||||
xPos: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<dashboard-layout :config="dashboard">
|
||||
<template #panel="{ panel }">
|
||||
<extended-dashboard-panel v-bind="panel.extendedDashboardPanelProps">
|
||||
<template #body>
|
||||
<component
|
||||
:is="panel.component"
|
||||
class="gl-h-full gl-overflow-hidden"
|
||||
v-bind="panel.componentProps"
|
||||
/>
|
||||
</template>
|
||||
</extended-dashboard-panel>
|
||||
</template>
|
||||
</dashboard-layout>
|
||||
</template>
|
||||
```
|
||||
|
||||
### Migration guide
|
||||
|
||||
Migrating an existing dashboard to the `dashboard_layout.vue` should be relatively
|
||||
Migrating an existing dashboard to the GlDashboardLayout should be relatively
|
||||
straightforward. In most cases because you only need to replace the dashboard shell
|
||||
and can keep existing visualizations. A typical migration path could look like this:
|
||||
|
||||
1. Create a feature flag to conditionally render your new dashboard.
|
||||
1. Create a new dashboard using `dashboard_layout.vue` and `extended_dashboard_panel.vue`.
|
||||
1. Create a new dashboard using GlDashboardLayout and `extended_dashboard_panel.vue`.
|
||||
1. Create a dashboard config object that mimics your old dashboard layout.
|
||||
1. Optionally, use `dashboard_layout.vue`'s slots to render your dashboard's
|
||||
1. Optionally, use GlDashboardLayout's slots to render your dashboard's
|
||||
filters, actions, or custom title or description.
|
||||
1. Ensure your new dashboard, panels, and visualizations render correctly.
|
||||
1. Remove the feature flag and your old dashboard.
|
||||
|
||||
See the [basic implementation](#basic-implementation) example above on how to render
|
||||
existing visualization components using the dashboard layout component.
|
||||
See the basic implementation on [GitLab UI](https://design.gitlab.com/storybook/?path=/docs/dashboards-dashboards-layout--docs)
|
||||
for an example on how to render existing visualization components using the dashboard layout component.
|
||||
|
||||
### Example implementations
|
||||
|
||||
Real world implementations and migrations using the `dashboard_layout.vue`
|
||||
component:
|
||||
Real world implementations and migrations using the GlDashboardLayout component:
|
||||
|
||||
- New security dashboard added in MR [!191974](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/191974)
|
||||
|
|
|
|||
|
|
@ -286,9 +286,10 @@ end
|
|||
|
||||
Removes specified fields from an index.
|
||||
|
||||
Requires the `index_name` method and `DOCUMENT_TYPE` constant. If there is one field to remove, add the `field_to_remove` method, otherwise add `fields_to_remove` with an array of fields.
|
||||
Checks in batches if any documents that match `DOCUMENT_TYPE` have the fields specified in Elasticsearch. If documents exist, uses a Painless script to perform `update_by_query`.
|
||||
|
||||
Checks in batches if any documents that match `document_type` have the fields specified in Elasticsearch. If documents exist, uses a Painless script to perform `update_by_query`.
|
||||
- For single fields, define `field_to_remove` method and `DOCUMENT_TYPE` constant
|
||||
- For multiple fields, define `fields_to_remove` method and `DOCUMENT_TYPE` constant
|
||||
|
||||
```ruby
|
||||
class MigrationName < Elastic::Migration
|
||||
|
|
@ -297,16 +298,10 @@ class MigrationName < Elastic::Migration
|
|||
batched!
|
||||
throttle_delay 1.minute
|
||||
|
||||
DOCUMENT_TYPE = User
|
||||
|
||||
private
|
||||
|
||||
def index_name
|
||||
User.__elasticsearch__.index_name
|
||||
end
|
||||
|
||||
def document_type
|
||||
'user'
|
||||
end
|
||||
|
||||
def fields_to_remove
|
||||
%w[two_factor_enabled has_projects]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -573,6 +573,7 @@ The `bypass_settings` field allows you to specify exceptions to the policy for c
|
|||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/18113) in GitLab 18.2 [with a flag](../../../administration/feature_flags/_index.md) named `approval_policy_branch_exceptions`. Enabled by default
|
||||
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/543778) in GitLab 18.3. Feature flag `approval_policy_branch_exceptions` removed.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -211,6 +211,8 @@ A scheduled pipeline:
|
|||
- Runs under a `security_policy_bot` user account in the project, with the Guest role and
|
||||
permissions to create pipelines and read the repository's content from a CI/CD job. This account
|
||||
is created when the policy is linked to a group or project.
|
||||
- On GitLab.com, only the first 10 `schedule` rules in a scan execution policy are enforced. Rules
|
||||
that exceed the limit have no effect.
|
||||
|
||||
| Field | Type | Required | Possible values | Description |
|
||||
|------------|------|----------|-----------------|-------------|
|
||||
|
|
|
|||
|
|
@ -84,11 +84,17 @@ module Banzai
|
|||
end
|
||||
|
||||
def placeholders_disabled?
|
||||
return true unless context[:project]&.markdown_placeholders_feature_flag_enabled? ||
|
||||
return true unless resolve_project&.markdown_placeholders_feature_flag_enabled? ||
|
||||
context[:group]&.markdown_placeholders_feature_flag_enabled?
|
||||
|
||||
context[:disable_placeholders] || context[:broadcast_message_placeholders]
|
||||
end
|
||||
|
||||
def resolve_project
|
||||
return context[:project] unless context[:project].is_a?(Namespaces::ProjectNamespace)
|
||||
|
||||
context[:project].project
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillProjectIdOnCiBuildNeeds < BatchedMigrationJob
|
||||
feature_category :continuous_integration
|
||||
|
||||
operation_name :backfill_project_id_on_ci_build_needs
|
||||
scope_to ->(relation) { relation.where(project_id: nil) } # rubocop: disable Database/AvoidScopeTo -- `project_id` is an indexed column
|
||||
|
||||
def construct_query(sub_batch:)
|
||||
<<~SQL
|
||||
UPDATE ci_build_needs
|
||||
SET project_id = p_ci_builds.project_id
|
||||
FROM p_ci_builds
|
||||
WHERE ci_build_needs.build_id = p_ci_builds.id
|
||||
AND ci_build_needs.id IN (#{sub_batch.select(:id).to_sql})
|
||||
AND p_ci_builds.partition_id = ci_build_needs.partition_id
|
||||
SQL
|
||||
end
|
||||
|
||||
def perform
|
||||
each_sub_batch do |sub_batch|
|
||||
connection.execute(construct_query(sub_batch: sub_batch))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -488,18 +488,18 @@ module Gitlab
|
|||
gitaly_commit_client.ancestor?(from, to)
|
||||
end
|
||||
|
||||
def merged_branch_names(branch_names = [])
|
||||
def merged_branch_names(branch_names = [], include_identical: false)
|
||||
return [] unless root_ref
|
||||
|
||||
root_sha = find_branch(root_ref)&.target
|
||||
root_branch = find_branch(root_ref)
|
||||
return [] unless root_branch
|
||||
|
||||
return [] unless root_sha
|
||||
root_sha = root_branch.target
|
||||
root_branch_name = root_branch.name
|
||||
|
||||
branches = wrapped_gitaly_errors do
|
||||
gitaly_merged_branch_names(branch_names, root_sha)
|
||||
wrapped_gitaly_errors do
|
||||
Set.new(gitaly_merged_branch_names(branch_names, root_sha, root_branch_name, include_identical: include_identical))
|
||||
end
|
||||
|
||||
Set.new(branches)
|
||||
end
|
||||
|
||||
# Returns an array of DiffBlob objects that represent a diff between
|
||||
|
|
@ -1280,12 +1280,13 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def gitaly_merged_branch_names(branch_names, root_sha)
|
||||
def gitaly_merged_branch_names(branch_names, root_sha, root_branch_name, include_identical: false)
|
||||
qualified_branch_names = branch_names.map { |b| "refs/heads/#{b}" }
|
||||
merged_branches = gitaly_ref_client.merged_branches(qualified_branch_names)
|
||||
|
||||
gitaly_ref_client.merged_branches(qualified_branch_names)
|
||||
.reject { |b| b.target == root_sha }
|
||||
.map(&:name)
|
||||
return merged_branches.reject { |b| b.name == root_branch_name }.map(&:name) if include_identical
|
||||
|
||||
merged_branches.reject { |b| b.target == root_sha }.map(&:name)
|
||||
end
|
||||
|
||||
def process_count_commits_options(options)
|
||||
|
|
|
|||
|
|
@ -25,19 +25,29 @@ module Gitlab
|
|||
["#{result.stdout}#{result.stderr}", status]
|
||||
end
|
||||
|
||||
# Returns Result
|
||||
def popen_with_detail(cmd, path = nil, vars = {})
|
||||
raise "System commands must be given as an array of strings" unless cmd.is_a?(Array)
|
||||
def popen_with_streaming(cmd, path = nil, vars = {}, &block)
|
||||
vars, options = prepare_popen_command(cmd, path, vars)
|
||||
|
||||
if cmd.one? && cmd.first.match?(/\s/)
|
||||
raise "System commands must be split into an array of space-separated values"
|
||||
cmd_status = nil
|
||||
block_mutex = Mutex.new if block
|
||||
|
||||
Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
|
||||
stdin.close # Close stdin immediately since we're not using it for streaming
|
||||
|
||||
stdout_thread = read_stream_in_thread(stdout, :stdout, block_mutex, &block)
|
||||
stderr_thread = read_stream_in_thread(stderr, :stderr, block_mutex, &block)
|
||||
|
||||
stdout_thread.join
|
||||
stderr_thread.join
|
||||
|
||||
cmd_status = wait_thr.value&.exitstatus || wait_thr.value.to_i
|
||||
end
|
||||
|
||||
path ||= Dir.pwd
|
||||
vars['PWD'] = path
|
||||
options = { chdir: path }
|
||||
cmd_status
|
||||
end
|
||||
|
||||
FileUtils.mkdir_p(path) unless File.directory?(path)
|
||||
def popen_with_detail(cmd, path = nil, vars = {})
|
||||
vars, options = prepare_popen_command(cmd, path, vars)
|
||||
|
||||
cmd_stdout = ''
|
||||
cmd_stderr = ''
|
||||
|
|
@ -60,5 +70,30 @@ module Gitlab
|
|||
|
||||
Result.new(cmd, cmd_stdout, cmd_stderr, cmd_status, Time.now.to_f - start)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def prepare_popen_command(cmd, path, vars)
|
||||
raise "Commands must be given as an array of strings" unless cmd.is_a?(Array)
|
||||
raise "Commands must be split into an array of space-separated values" if cmd.one? && cmd.first.match?(/\s/)
|
||||
|
||||
path ||= Dir.pwd
|
||||
vars['PWD'] = path
|
||||
options = { chdir: path }
|
||||
|
||||
FileUtils.mkdir_p(path) unless File.directory?(path)
|
||||
|
||||
[vars, options]
|
||||
end
|
||||
|
||||
def read_stream_in_thread(stream, stream_type, mutex, &block)
|
||||
Thread.new do
|
||||
stream.each_line do |line|
|
||||
mutex.synchronize { yield(stream_type, line) } if block
|
||||
end
|
||||
rescue IOError
|
||||
# This is expected when the process exits and closes its streams. No action needed.
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,126 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'net/http'
|
||||
|
||||
module Observability
|
||||
class O11yToken
|
||||
AuthenticationError = Class.new(StandardError)
|
||||
ConfigurationError = Class.new(StandardError)
|
||||
NetworkError = Class.new(StandardError)
|
||||
|
||||
class TokenResponse
|
||||
attr_reader :user_id, :access_jwt, :refresh_jwt
|
||||
|
||||
def self.from_json(data)
|
||||
data ||= {}
|
||||
new(
|
||||
user_id: data.dig('data', 'userId'),
|
||||
access_jwt: data.dig('data', 'accessJwt'),
|
||||
refresh_jwt: data.dig('data', 'refreshJwt')
|
||||
)
|
||||
end
|
||||
|
||||
def initialize(user_id:, access_jwt:, refresh_jwt:)
|
||||
@user_id = user_id
|
||||
@access_jwt = access_jwt
|
||||
@refresh_jwt = refresh_jwt
|
||||
end
|
||||
|
||||
def to_h
|
||||
{
|
||||
userId: user_id,
|
||||
accessJwt: access_jwt,
|
||||
refreshJwt: refresh_jwt
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def self.generate_tokens(o11y_settings)
|
||||
new(o11y_settings).generate_tokens
|
||||
end
|
||||
|
||||
def initialize(o11y_settings)
|
||||
@o11y_settings = o11y_settings
|
||||
@http_client = HttpClient.new
|
||||
end
|
||||
|
||||
def generate_tokens
|
||||
validate_settings!
|
||||
|
||||
response = authenticate_user
|
||||
parse_response(response)
|
||||
rescue ConfigurationError, AuthenticationError, NetworkError => e
|
||||
Gitlab::ErrorTracking.log_exception(e)
|
||||
{}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :o11y_settings, :http_client
|
||||
|
||||
def validate_settings!
|
||||
raise ConfigurationError, "O11y settings are not set" if o11y_settings.blank?
|
||||
|
||||
raise ConfigurationError, "o11y_service_url is not configured" if o11y_settings.o11y_service_url.blank?
|
||||
|
||||
if o11y_settings.o11y_service_user_email.blank?
|
||||
raise ConfigurationError,
|
||||
"o11y_service_user_email is not configured"
|
||||
end
|
||||
|
||||
raise ConfigurationError, "o11y_service_password is not configured" if o11y_settings.o11y_service_password.blank?
|
||||
end
|
||||
|
||||
def authenticate_user
|
||||
payload = build_payload
|
||||
http_client.post(login_url, payload)
|
||||
rescue *Gitlab::HTTP::HTTP_ERRORS => e
|
||||
raise NetworkError, "Failed to connect to O11y service (#{e.class.name}): #{e.message}"
|
||||
end
|
||||
|
||||
def build_payload
|
||||
{
|
||||
email: o11y_settings.o11y_service_user_email,
|
||||
password: o11y_settings.o11y_service_password
|
||||
}
|
||||
end
|
||||
|
||||
def login_url
|
||||
"#{o11y_settings.o11y_service_url}/api/v1/login"
|
||||
end
|
||||
|
||||
def parse_response(response)
|
||||
unless response.code.to_i == 200
|
||||
Gitlab::AppLogger.warn("O11y authentication failed with status #{response.code}")
|
||||
return {}
|
||||
end
|
||||
|
||||
response_body = response.body.to_s.strip
|
||||
raise AuthenticationError, "Empty response from O11y service" if response_body.blank?
|
||||
|
||||
data = Gitlab::Json.parse(response.body)
|
||||
TokenResponse.from_json(data).to_h
|
||||
rescue JSON::ParserError => e
|
||||
raise AuthenticationError, "Invalid response format from O11y service: #{e.message}"
|
||||
end
|
||||
|
||||
class HttpClient
|
||||
def post(url, payload)
|
||||
::Gitlab::HTTP.post(
|
||||
url,
|
||||
headers: { 'Content-Type' => 'application/json' },
|
||||
body: Gitlab::Json.dump(payload),
|
||||
allow_local_requests: allow_local_requests?
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def allow_local_requests?
|
||||
Rails.env.development? ||
|
||||
Rails.env.test? ||
|
||||
::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -55371,6 +55371,9 @@ msgstr ""
|
|||
msgid "Search for this text"
|
||||
msgstr ""
|
||||
|
||||
msgid "Search for this text (experiment)"
|
||||
msgstr ""
|
||||
|
||||
msgid "Search forks"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -104,71 +104,17 @@ RSpec.describe Projects::MergeRequests::CreationsController, feature_category: :
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'renders rapid diffs' do
|
||||
it 'renders the rapid_diffs template' do
|
||||
get :new, params: params
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(response).to render_template(:rapid_diffs)
|
||||
end
|
||||
|
||||
it 'sets @js_action_name to "rapid_diffs"' do
|
||||
get :new, params: params
|
||||
|
||||
expect(assigns(:js_action_name)).to eq('rapid_diffs')
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'renders default new template' do
|
||||
it 'renders default template' do
|
||||
get :new, params: params
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(response).to render_template(:new)
|
||||
expect(response).not_to render_template(:rapid_diffs)
|
||||
end
|
||||
|
||||
it 'does NOT set @js_action_name to "rapid_diffs"' do
|
||||
get :new, params: params
|
||||
|
||||
expect(assigns(:js_action_name)).not_to eq('rapid_diffs')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when rapid diffs are enabled' do
|
||||
context 'for a new new request' do
|
||||
render_views
|
||||
|
||||
let(:params) { get_diff_params }
|
||||
|
||||
before do
|
||||
stub_feature_flags(rapid_diffs_on_mr_creation: true, rapid_diffs_debug: true)
|
||||
it 'renders the default template' do
|
||||
get :new, params: params
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(response).to render_template(:new)
|
||||
end
|
||||
|
||||
include_examples 'renders rapid diffs'
|
||||
|
||||
context 'when rapid_diffs_disabled parameter is "true"' do
|
||||
let(:params) { get_diff_params.merge(rapid_diffs_disabled: true) }
|
||||
|
||||
include_examples 'renders default new template'
|
||||
|
||||
context 'and rapid_diffs_debug feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(rapid_diffs_debug: false)
|
||||
end
|
||||
|
||||
include_examples 'renders rapid diffs'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when rapid diffs are disabled' do
|
||||
let(:params) { get_diff_params }
|
||||
|
||||
before do
|
||||
stub_feature_flags(rapid_diffs_on_mr_creation: false)
|
||||
end
|
||||
|
||||
include_examples 'renders default new template'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :ci_job_input, class: 'Ci::JobInput' do
|
||||
sequence(:name) { |n| "input_#{n}" }
|
||||
|
||||
sensitive { false }
|
||||
input_type { 'string' }
|
||||
value { 'value' }
|
||||
|
||||
job factory: :ci_build
|
||||
end
|
||||
end
|
||||
|
|
@ -55,7 +55,9 @@ describe('Jobs filtered search', () => {
|
|||
it('displays filtered search text label', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findFilteredSearch().props('searchTextOptionLabel')).toBe('Search for this text');
|
||||
expect(findFilteredSearch().props('searchTextOptionLabel')).toBe(
|
||||
'Search for this text (experiment)',
|
||||
);
|
||||
expect(findFilteredSearch().props('termsAsTokens')).toBe(true);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -270,6 +270,33 @@ exports[`AutocompleteHelper for reference type "wiki", searches for "ho" correct
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`AutocompleteHelper for work items filters users using apollo cache for command "/assign" 1`] = `
|
||||
[
|
||||
"florida.schoen",
|
||||
"root",
|
||||
"all",
|
||||
"lakeesha.batz",
|
||||
"laurene_blick",
|
||||
"myrtis",
|
||||
"patty",
|
||||
"Commit451",
|
||||
"flightjs",
|
||||
"gitlab-instance-ade037f9",
|
||||
"gitlab-org",
|
||||
"gnuwget",
|
||||
"h5bp",
|
||||
"jashkenas",
|
||||
"twitter",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`AutocompleteHelper for work items filters users using apollo cache for command "/unassign" 1`] = `
|
||||
[
|
||||
"errol",
|
||||
"evelynn_olson",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`AutocompleteHelper returns expected results before and after updating data sources 1`] = `
|
||||
[
|
||||
"florida.schoen",
|
||||
|
|
|
|||
|
|
@ -29,6 +29,29 @@ jest.mock('~/emoji', () => ({
|
|||
getAllEmoji: () => [{ name: 'thumbsup' }],
|
||||
}));
|
||||
|
||||
jest.mock('~/graphql_shared/issuable_client', () => ({
|
||||
currentAssignees: jest.fn().mockReturnValue({
|
||||
1: [
|
||||
{
|
||||
type: 'User',
|
||||
username: 'errol',
|
||||
name: "Linnie O'Connell",
|
||||
avatar_url:
|
||||
'https://www.gravatar.com/avatar/d3d9a468a9884eb217fad5ca5b2b9bd7?s=80\u0026d=identicon',
|
||||
availability: null,
|
||||
},
|
||||
{
|
||||
type: 'User',
|
||||
username: 'evelynn_olson',
|
||||
name: 'Dimple Dare',
|
||||
avatar_url:
|
||||
'https://www.gravatar.com/avatar/bc1e51ee3512c2b4442f51732d655107?s=80\u0026d=identicon',
|
||||
availability: null,
|
||||
},
|
||||
],
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('defaultSorter', () => {
|
||||
it('returns items as is if query is empty', () => {
|
||||
const items = [{ name: 'abc' }, { name: 'bcd' }, { name: 'cde' }];
|
||||
|
|
@ -120,10 +143,11 @@ describe('AutocompleteHelper', () => {
|
|||
let mock;
|
||||
let autocompleteHelper;
|
||||
let dateNowOld;
|
||||
let dataSourceUrls;
|
||||
|
||||
beforeEach(() => {
|
||||
mock = new MockAdapter(axios);
|
||||
const dataSourceUrls = {
|
||||
dataSourceUrls = {
|
||||
members: '/members',
|
||||
issues: '/issues',
|
||||
snippets: '/snippets',
|
||||
|
|
@ -224,6 +248,32 @@ describe('AutocompleteHelper', () => {
|
|||
},
|
||||
);
|
||||
|
||||
describe('for work items', () => {
|
||||
beforeEach(() => {
|
||||
autocompleteHelper = new AutocompleteHelper({
|
||||
dataSourceUrls,
|
||||
sidebarMediator: {
|
||||
store: { assignees: [], reviewers: [] },
|
||||
},
|
||||
});
|
||||
|
||||
autocompleteHelper.tiptapEditor = {
|
||||
view: { dom: { closest: () => ({ dataset: { workItemId: 1 } }) } },
|
||||
};
|
||||
});
|
||||
|
||||
it.each`
|
||||
command
|
||||
${'/assign'}
|
||||
${'/unassign'}
|
||||
`('filters users using apollo cache for command "$command"', async ({ command }) => {
|
||||
const dataSource = autocompleteHelper.getDataSource('user', { command });
|
||||
const results = await dataSource.search();
|
||||
|
||||
expect(results.map(({ username }) => username)).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
it('filters items correctly for the second time, when the first command was different', async () => {
|
||||
let dataSource = autocompleteHelper.getDataSource('label', { command: '/label' });
|
||||
let results = await dataSource.search();
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ import { createTestContentEditorExtension } from '../test_utils';
|
|||
|
||||
jest.mock('~/emoji');
|
||||
jest.mock('~/content_editor/services/gl_api_markdown_deserializer');
|
||||
jest.mock('~/graphql_shared/issuable_client', () => ({
|
||||
currentAssignees: jest.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
describe('content_editor/services/create_content_editor', () => {
|
||||
let renderMarkdown;
|
||||
|
|
|
|||
|
|
@ -45,6 +45,11 @@ describe('FileBrowserDrawer', () => {
|
|||
expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
|
||||
});
|
||||
|
||||
it('has sticky header for the drawer', () => {
|
||||
createComponent();
|
||||
expect(wrapper.findComponent(GlDrawer).props('headerSticky')).toBe(true);
|
||||
});
|
||||
|
||||
it('shows file browser', async () => {
|
||||
createComponent();
|
||||
useFileBrowser().fileBrowserDrawerVisible = true;
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ RSpec.describe GitlabSchema.types['Namespace'], feature_category: :shared do
|
|||
end
|
||||
|
||||
describe 'fields with :ai_workflows scope' do
|
||||
%w[id fullPath workItem workItems].each do |field_name|
|
||||
%w[id name description fullPath workItem workItems webUrl].each do |field_name|
|
||||
it "includes :ai_workflows scope for the #{field_name} field" do
|
||||
field = described_class.fields[field_name]
|
||||
expect(field.instance_variable_get(:@scopes)).to include(:ai_workflows)
|
||||
|
|
|
|||
|
|
@ -59,25 +59,27 @@ RSpec.describe Banzai::Filter::MarkdownEngines::GlfmMarkdown, feature_category:
|
|||
|
||||
describe 'placeholder detection' do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:group_project) { create(:project, :in_group) }
|
||||
|
||||
it 'turns off placeholder detection when :markdown_placeholders disabled' do
|
||||
stub_feature_flags(markdown_placeholders: false)
|
||||
let(:project_reference) { project }
|
||||
|
||||
engine = described_class.new({ project: project, no_sourcepos: true })
|
||||
expected = <<~TEXT
|
||||
<p>%{test}</p>
|
||||
TEXT
|
||||
shared_examples 'enables placeholder rendering by default' do
|
||||
it 'processes %{} syntax as placeholders' do
|
||||
engine = described_class.new({ project: project_reference, no_sourcepos: true })
|
||||
expected = <<~TEXT
|
||||
<p><span data-placeholder>%{test}</span></p>
|
||||
TEXT
|
||||
|
||||
expect(engine.render('%{test}')).to eq expected
|
||||
expect(engine.render('%{test}')).to eq expected
|
||||
end
|
||||
end
|
||||
|
||||
it 'defaults to on' do
|
||||
engine = described_class.new({ project: project, no_sourcepos: true })
|
||||
expected = <<~TEXT
|
||||
<p><span data-placeholder>%{test}</span></p>
|
||||
TEXT
|
||||
it_behaves_like 'enables placeholder rendering by default'
|
||||
|
||||
expect(engine.render('%{test}')).to eq expected
|
||||
context 'when project is project namespace' do
|
||||
let(:project_reference) { group_project.project_namespace }
|
||||
|
||||
it_behaves_like 'enables placeholder rendering by default'
|
||||
end
|
||||
|
||||
it 'turns off placeholder detection when :disable_placeholders' do
|
||||
|
|
@ -97,5 +99,16 @@ RSpec.describe Banzai::Filter::MarkdownEngines::GlfmMarkdown, feature_category:
|
|||
|
||||
expect(engine.render('%{test}')).to eq expected
|
||||
end
|
||||
|
||||
it 'turns off placeholder detection when :markdown_placeholders disabled' do
|
||||
stub_feature_flags(markdown_placeholders: false)
|
||||
|
||||
engine = described_class.new({ project: project, no_sourcepos: true })
|
||||
expected = <<~TEXT
|
||||
<p>%{test}</p>
|
||||
TEXT
|
||||
|
||||
expect(engine.render('%{test}')).to eq expected
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectIdOnCiBuildNeeds, feature_category: :continuous_integration, migration: :gitlab_ci do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :ci_build_needs }
|
||||
let(:backfill_column) { :project_id }
|
||||
let(:backfill_via_table) { :p_ci_builds }
|
||||
let(:backfill_via_column) { :project_id }
|
||||
let(:backfill_via_foreign_key) { :build_id }
|
||||
let(:partition_column) { :partition_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -429,6 +429,7 @@ builds:
|
|||
- trace_chunks
|
||||
- report_results
|
||||
- namespace
|
||||
- inputs
|
||||
- job_artifacts
|
||||
- job_variables
|
||||
- sourced_pipelines
|
||||
|
|
|
|||
|
|
@ -25,6 +25,162 @@ RSpec.describe Gitlab::Popen, feature_category: :shared do
|
|||
it { expect(popen_result.duration).to be_kind_of(Numeric) }
|
||||
end
|
||||
|
||||
describe '.popen_with_streaming' do
|
||||
context 'with basic command' do
|
||||
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1);$stderr.puts(2);exit(3)] }
|
||||
let(:status) { klass.new.popen_with_streaming(cmd) }
|
||||
|
||||
it { expect(status).to eq(3) }
|
||||
end
|
||||
|
||||
context 'with zero status' do
|
||||
let(:status) { klass.new.popen_with_streaming(%w[ls], path) }
|
||||
|
||||
it { expect(status).to eq(0) }
|
||||
end
|
||||
|
||||
context 'with non-zero status' do
|
||||
let(:status) { klass.new.popen_with_streaming(%w[cat NOTHING], path) }
|
||||
|
||||
it { expect(status).to eq(1) }
|
||||
end
|
||||
|
||||
context 'with non-zero status with a kill' do
|
||||
let(:cmd) { [Gem.ruby, "-e", "thr = Thread.new { sleep 5 }; Process.kill(9, Process.pid); thr.join"] }
|
||||
let(:status) { klass.new.popen_with_streaming(cmd) }
|
||||
|
||||
it { expect(status).to eq(9) }
|
||||
end
|
||||
|
||||
context 'with unsafe string command' do
|
||||
it 'raises an error when it gets called with a string argument' do
|
||||
expect { klass.new.popen_with_streaming('ls', path) }.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with unsafe array command' do
|
||||
it 'raises an error when it gets called with an unsafe array' do
|
||||
expect { klass.new.popen_with_streaming(['ls -l'], path) }.to raise_error(RuntimeError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with custom options' do
|
||||
let(:vars) { { 'foobar' => 123, 'PWD' => path } }
|
||||
let(:options) { { chdir: path } }
|
||||
|
||||
it 'calls popen3 with the provided environment variables' do
|
||||
expect(Open3).to receive(:popen3).with(vars, 'ls', options)
|
||||
|
||||
klass.new.popen_with_streaming(%w[ls], path, { 'foobar' => 123 })
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a process that writes a lot of data to stderr' do
|
||||
let(:test_string) { 'The quick brown fox jumped over the lazy dog' }
|
||||
# The pipe buffer is typically 64K. This string is about 440K.
|
||||
let(:spew_command) { ['bash', '-c', "for i in {1..10000}; do echo '#{test_string}' 1>&2; done"] }
|
||||
let(:captured_stderr) { [] }
|
||||
let(:status) do
|
||||
klass.new.popen_with_streaming(spew_command, path) do |stream_type, line|
|
||||
captured_stderr << line if stream_type == :stderr
|
||||
end
|
||||
end
|
||||
|
||||
it 'handles large stderr output without blocking' do
|
||||
expect(status).to eq(0)
|
||||
expect(captured_stderr.join).to include(test_string)
|
||||
end
|
||||
end
|
||||
|
||||
context 'without a directory argument' do
|
||||
let(:status) { klass.new.popen_with_streaming(%w[ls]) }
|
||||
|
||||
it { expect(status).to eq(0) }
|
||||
end
|
||||
|
||||
context 'when binary is absent' do
|
||||
it 'raises error' do
|
||||
expect do
|
||||
klass.new.popen_with_streaming(%w[foobar])
|
||||
end.to raise_error(Errno::ENOENT)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with streaming block' do
|
||||
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts('line1');$stdout.puts('line2');$stderr.puts('error1')] }
|
||||
let(:streamed_output) { [] }
|
||||
let(:status) do
|
||||
klass.new.popen_with_streaming(cmd) do |stream_type, line|
|
||||
streamed_output << [stream_type, line]
|
||||
end
|
||||
end
|
||||
|
||||
it 'yields stdout and stderr lines as they are produced' do
|
||||
expect(status).to eq(0)
|
||||
expect(streamed_output).to include([:stdout, "line1\n"])
|
||||
expect(streamed_output).to include([:stdout, "line2\n"])
|
||||
expect(streamed_output).to include([:stderr, "error1\n"])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with custom environment variables' do
|
||||
let(:cmd) { [Gem.ruby, '-e', 'puts ENV["TEST_VAR"]'] }
|
||||
let(:vars) { { 'TEST_VAR' => 'test_value' } }
|
||||
let(:captured_stdout) { [] }
|
||||
let(:status) do
|
||||
klass.new.popen_with_streaming(cmd, nil, vars) do |stream_type, line|
|
||||
captured_stdout << line if stream_type == :stdout
|
||||
end
|
||||
end
|
||||
|
||||
it 'passes environment variables to the command' do
|
||||
expect(status).to eq(0)
|
||||
expect(captured_stdout.join).to include('test_value')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with concurrent stdout and stderr output' do
|
||||
# Output to both streams simultaneously to force concurrency
|
||||
let(:cmd) do
|
||||
['bash', '-c', 'for i in {1..100}; do echo "out$i" & echo "err$i" >&2 & done; wait']
|
||||
end
|
||||
|
||||
it 'handles concurrent stream processing safely' do
|
||||
counter = 0
|
||||
|
||||
status = klass.new.popen_with_streaming(cmd) do |_stream_type, _line|
|
||||
# This block should be executed atomically
|
||||
# Simulate some processing time to increase chance of race condition
|
||||
current = counter
|
||||
sleep(0.0001)
|
||||
counter = current + 1
|
||||
end
|
||||
|
||||
expect(status).to eq(0)
|
||||
# Without mutex, we lose some increments due to race conditions
|
||||
expect(counter).to eq(200) # 100 stdout + 100 stderr lines
|
||||
end
|
||||
|
||||
it 'prevents data corruption in shared data structures' do
|
||||
shared_array = []
|
||||
|
||||
status = klass.new.popen_with_streaming(cmd) do |stream_type, line|
|
||||
# Without mutex, concurrent Array#<< could corrupt the array
|
||||
shared_array << [stream_type, line.strip]
|
||||
end
|
||||
|
||||
expect(status).to eq(0)
|
||||
expect(shared_array.size).to eq(200)
|
||||
|
||||
# Verify all expected lines are present
|
||||
(1..100).each do |i|
|
||||
expect(shared_array).to include([:stdout, "out#{i}"])
|
||||
expect(shared_array).to include([:stderr, "err#{i}"])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with zero status' do
|
||||
let(:popen_result) { klass.new.popen(%w[ls], path) }
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,357 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Observability::O11yToken, feature_category: :observability do
|
||||
let(:o11y_settings) do
|
||||
instance_double(
|
||||
Observability::GroupO11ySetting,
|
||||
o11y_service_url: 'https://o11y.example.com',
|
||||
o11y_service_user_email: 'test@example.com',
|
||||
o11y_service_password: 'password123'
|
||||
)
|
||||
end
|
||||
|
||||
let(:success_response) do
|
||||
{
|
||||
'data' => {
|
||||
'userId' => '123',
|
||||
'accessJwt' => 'access_token_123',
|
||||
'refreshJwt' => 'refresh_token_456'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
let(:http_response) do
|
||||
instance_double(
|
||||
HTTParty::Response,
|
||||
code: 200,
|
||||
body: Gitlab::Json.dump(success_response)
|
||||
)
|
||||
end
|
||||
|
||||
describe '.generate_tokens' do
|
||||
subject(:generate_tokens) { described_class.generate_tokens(o11y_settings) }
|
||||
|
||||
context 'when authentication is successful' do
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'returns tokens and user ID' do
|
||||
expect(generate_tokens).to eq(
|
||||
userId: '123',
|
||||
accessJwt: 'access_token_123',
|
||||
refreshJwt: 'refresh_token_456'
|
||||
)
|
||||
end
|
||||
|
||||
it 'makes HTTP request with correct parameters' do
|
||||
expect(Gitlab::HTTP).to receive(:post).with(
|
||||
'https://o11y.example.com/api/v1/login',
|
||||
headers: { 'Content-Type' => 'application/json' },
|
||||
body: Gitlab::Json.dump({
|
||||
email: 'test@example.com',
|
||||
password: 'password123'
|
||||
}),
|
||||
allow_local_requests: anything
|
||||
).and_return(http_response)
|
||||
|
||||
generate_tokens
|
||||
end
|
||||
end
|
||||
|
||||
context 'when o11y_settings is nil' do
|
||||
let(:o11y_settings) { nil }
|
||||
|
||||
it 'returns empty hash and logs error' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::ConfigurationError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when o11y_settings values are blank' do
|
||||
shared_examples 'returns empty hash and logs error' do |field_name|
|
||||
it "returns empty hash and logs error when #{field_name} is blank" do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::ConfigurationError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when o11y_service_url is blank' do
|
||||
let(:o11y_settings) do
|
||||
instance_double(
|
||||
Observability::GroupO11ySetting,
|
||||
o11y_service_url: nil,
|
||||
o11y_service_user_email: 'test@example.com',
|
||||
o11y_service_password: 'password123'
|
||||
)
|
||||
end
|
||||
|
||||
include_examples 'returns empty hash and logs error', 'o11y_service_url'
|
||||
end
|
||||
|
||||
context 'when o11y_service_user_email is blank' do
|
||||
let(:o11y_settings) do
|
||||
instance_double(
|
||||
Observability::GroupO11ySetting,
|
||||
o11y_service_url: 'https://o11y.example.com',
|
||||
o11y_service_user_email: nil,
|
||||
o11y_service_password: 'password123'
|
||||
)
|
||||
end
|
||||
|
||||
include_examples 'returns empty hash and logs error', 'o11y_service_user_email'
|
||||
end
|
||||
|
||||
context 'when o11y_service_password is blank' do
|
||||
let(:o11y_settings) do
|
||||
instance_double(
|
||||
Observability::GroupO11ySetting,
|
||||
o11y_service_url: 'https://o11y.example.com',
|
||||
o11y_service_user_email: 'test@example.com',
|
||||
o11y_service_password: nil
|
||||
)
|
||||
end
|
||||
|
||||
include_examples 'returns empty hash and logs error', 'o11y_service_password'
|
||||
end
|
||||
|
||||
context 'when all o11y_settings values are blank' do
|
||||
let(:o11y_settings) do
|
||||
instance_double(
|
||||
Observability::GroupO11ySetting,
|
||||
o11y_service_url: nil,
|
||||
o11y_service_user_email: nil,
|
||||
o11y_service_password: nil
|
||||
)
|
||||
end
|
||||
|
||||
include_examples 'returns empty hash and logs error', 'all fields'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when HTTP request fails' do
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post)
|
||||
.and_raise(SocketError.new('Connection failed'))
|
||||
end
|
||||
|
||||
it 'returns empty hash and logs error' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::NetworkError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when response is not successful' do
|
||||
let(:http_response) do
|
||||
instance_double(HTTParty::Response, code: '401', body: 'Unauthorized')
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'returns empty hash and logs warning' do
|
||||
expect(Gitlab::AppLogger).to receive(:warn)
|
||||
.with("O11y authentication failed with status 401")
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when response body is invalid JSON' do
|
||||
let(:http_response) do
|
||||
instance_double(HTTParty::Response, code: 200, body: 'invalid json')
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'returns empty hash and logs error' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::AuthenticationError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when response body is nil' do
|
||||
let(:http_response) do
|
||||
instance_double(HTTParty::Response, code: 200, body: nil)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'returns empty hash and logs error' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::AuthenticationError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when response body is blank' do
|
||||
[
|
||||
{ description: 'when body is empty string', body: '' },
|
||||
{ description: 'when body is whitespace only', body: ' ' },
|
||||
{ description: 'when body is newline only', body: "\n" },
|
||||
{ description: 'when body is tab only', body: "\t" }
|
||||
].each do |test_case|
|
||||
context test_case[:description] do
|
||||
let(:http_response) do
|
||||
instance_double(HTTParty::Response, code: 200, body: test_case[:body])
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'returns empty hash and logs error' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(instance_of(Observability::O11yToken::AuthenticationError))
|
||||
|
||||
expect(generate_tokens).to eq({})
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#initialize' do
|
||||
subject(:o11y_token) { described_class.new(o11y_settings) }
|
||||
|
||||
it 'creates instance with o11y_settings' do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
expect { o11y_token.generate_tokens }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
describe Observability::O11yToken::TokenResponse do
|
||||
let(:token_response) do
|
||||
described_class.new(
|
||||
user_id: '123',
|
||||
access_jwt: 'access_token',
|
||||
refresh_jwt: 'refresh_token'
|
||||
)
|
||||
end
|
||||
|
||||
describe '#to_h' do
|
||||
it 'returns hash with correct keys' do
|
||||
expect(token_response.to_h).to eq(
|
||||
userId: '123',
|
||||
accessJwt: 'access_token',
|
||||
refreshJwt: 'refresh_token'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.from_json' do
|
||||
let(:json_data) do
|
||||
{
|
||||
'data' => {
|
||||
'userId' => '456',
|
||||
'accessJwt' => 'new_access_token',
|
||||
'refreshJwt' => 'new_refresh_token'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
it 'creates TokenResponse from JSON data' do
|
||||
result = described_class.from_json(json_data)
|
||||
|
||||
expect(result.user_id).to eq('456')
|
||||
expect(result.access_jwt).to eq('new_access_token')
|
||||
expect(result.refresh_jwt).to eq('new_refresh_token')
|
||||
end
|
||||
|
||||
context 'when data is missing' do
|
||||
[
|
||||
{ description: 'when json_data is empty hash', data: {} },
|
||||
{ description: 'when json_data is nil', data: nil }
|
||||
].each do |test_case|
|
||||
context test_case[:description] do
|
||||
let(:json_data) { test_case[:data] }
|
||||
|
||||
it 'creates TokenResponse with nil values' do
|
||||
result = described_class.from_json(json_data)
|
||||
|
||||
expect(result.user_id).to be_nil
|
||||
expect(result.access_jwt).to be_nil
|
||||
expect(result.refresh_jwt).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Observability::O11yToken::HttpClient do
|
||||
let(:http_client) { described_class.new }
|
||||
|
||||
describe '#post' do
|
||||
let(:url) { 'https://example.com/api/login' }
|
||||
let(:payload) { { email: 'test@example.com', password: 'password' } }
|
||||
|
||||
before do
|
||||
allow(Gitlab::HTTP).to receive(:post).and_return(http_response)
|
||||
end
|
||||
|
||||
it 'makes HTTP POST request' do
|
||||
expect(Gitlab::HTTP).to receive(:post).with(
|
||||
url,
|
||||
headers: { 'Content-Type' => 'application/json' },
|
||||
body: Gitlab::Json.dump(payload),
|
||||
allow_local_requests: anything
|
||||
).and_return(http_response)
|
||||
|
||||
http_client.post(url, payload)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#allow_local_requests?' do
|
||||
context 'in development environment' do
|
||||
before do
|
||||
allow(Rails.env).to receive_messages(development?: true, test?: false)
|
||||
end
|
||||
|
||||
it 'returns true' do
|
||||
expect(http_client.send(:allow_local_requests?)).to be true
|
||||
end
|
||||
end
|
||||
|
||||
context 'in test environment' do
|
||||
before do
|
||||
allow(Rails.env).to receive_messages(development?: false, test?: true)
|
||||
end
|
||||
|
||||
it 'returns true' do
|
||||
expect(Gitlab::HTTP).to receive(:post).with(
|
||||
anything,
|
||||
hash_including(allow_local_requests: true)
|
||||
)
|
||||
http_client.post('http://example.com', {})
|
||||
end
|
||||
end
|
||||
|
||||
context 'in production environment' do
|
||||
before do
|
||||
allow(Rails.env).to receive_messages(development?: false, test?: false)
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
expect(http_client.send(:allow_local_requests?)).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillProjectIdOnCiBuildNeeds, migration: :gitlab_ci, feature_category: :continuous_integration do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
gitlab_schema: :gitlab_ci,
|
||||
table_name: :ci_build_needs,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -42,6 +42,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
.inverse_of(:build)
|
||||
end
|
||||
|
||||
it { is_expected.to have_many(:inputs).with_foreign_key(:job_id) }
|
||||
it { is_expected.to have_many(:job_variables).with_foreign_key(:job_id) }
|
||||
it { is_expected.to have_many(:report_results).with_foreign_key(:build_id) }
|
||||
it { is_expected.to have_many(:pages_deployments).with_foreign_key(:ci_build_id) }
|
||||
|
|
@ -78,11 +79,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
describe 'partition query' do
|
||||
subject { build.reload }
|
||||
|
||||
it_behaves_like 'including partition key for relation', :inputs
|
||||
it_behaves_like 'including partition key for relation', :trace_chunks
|
||||
it_behaves_like 'including partition key for relation', :build_source
|
||||
it_behaves_like 'including partition key for relation', :job_artifacts
|
||||
it_behaves_like 'including partition key for relation', :job_annotations
|
||||
it_behaves_like 'including partition key for relation', :runner_manager_build
|
||||
|
||||
Ci::JobArtifact.file_types.each_key do |key|
|
||||
it_behaves_like 'including partition key for relation', :"job_artifacts_#{key}"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::JobInput, feature_category: :pipeline_composition do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:job) { create(:ci_build, project: project) }
|
||||
let_it_be_with_reload(:job_input) { create(:ci_job_input, job: job, project: project) }
|
||||
|
||||
subject(:input) { job_input }
|
||||
|
||||
it_behaves_like 'cleanup by a loose foreign key' do
|
||||
let!(:model) { create(:ci_job_input, project: project) }
|
||||
let!(:parent) { model.project }
|
||||
end
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:job) }
|
||||
it { is_expected.to belong_to(:project) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
describe 'name' do
|
||||
it { is_expected.to validate_presence_of(:name) }
|
||||
it { is_expected.to validate_length_of(:name).is_at_most(255) }
|
||||
it { is_expected.to validate_uniqueness_of(:name).scoped_to([:job_id, :partition_id]) }
|
||||
end
|
||||
|
||||
describe 'project' do
|
||||
it { is_expected.to validate_presence_of(:project) }
|
||||
end
|
||||
|
||||
describe 'value' do
|
||||
context 'when the serialized length of the value is less than the maximum permitted size' do
|
||||
it 'is valid' do
|
||||
input.value = [1, 2]
|
||||
|
||||
expect(input).to be_valid
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the serialized length of the value is greater than the maximum permitted size' do
|
||||
it 'is invalid' do
|
||||
stub_const("#{described_class}::MAX_VALUE_SIZE", 4)
|
||||
|
||||
input.value = [1, 2]
|
||||
|
||||
expect(input).not_to be_valid
|
||||
expect(input.errors.full_messages).to contain_exactly('Value exceeds max serialized size: 4 characters')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -99,7 +99,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
|
|||
queuing_entry runtime_metadata trace_metadata
|
||||
dast_site_profile dast_scanner_profile stage_id dast_site_profiles_build
|
||||
dast_scanner_profiles_build auto_canceled_by_partition_id execution_config_id execution_config
|
||||
build_source id_value].freeze
|
||||
build_source id_value inputs].freeze
|
||||
end
|
||||
|
||||
before_all do
|
||||
|
|
|
|||
|
|
@ -210,14 +210,8 @@ RSpec.describe NamespaceSetting, feature_category: :groups_and_projects, type: :
|
|||
build(:namespace_settings, allow_enterprise_bypass_placeholder_confirmation: true)
|
||||
end
|
||||
|
||||
around do |example|
|
||||
travel_to(Time.current.change(hour: 10)) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
let(:valid_times) { [1.day.from_now + 1.second, 30.days.from_now, 1.year.from_now - 1.day] }
|
||||
let(:invalid_times) { [nil, 1.minute.ago, Time.current, 1.hour.from_now, 1.year.from_now] }
|
||||
let(:valid_times) { [1.day.from_now, 30.days.from_now, 1.year.from_now - 1.day] }
|
||||
let(:invalid_times) { [nil, 1.day.ago, Time.zone.today, 1.year.from_now] }
|
||||
|
||||
it 'does not allow invalid expiration times' do
|
||||
invalid_times.each do |time|
|
||||
|
|
|
|||
|
|
@ -878,7 +878,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
let(:cache_key) { cache.cache_key(:merged_branch_names) }
|
||||
|
||||
before do
|
||||
allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names).and_return(already_merged)
|
||||
allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names, include_identical: false).and_return(already_merged)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(already_merged) }
|
||||
|
|
@ -944,7 +944,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
|
||||
context "cache is partially complete" do
|
||||
before do
|
||||
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
|
||||
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"], hash_including(include_identical: anything)).and_return([])
|
||||
hash = write_hash.except("boop")
|
||||
cache.write(:merged_branch_names, hash)
|
||||
end
|
||||
|
|
@ -952,12 +952,43 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
it { is_expected.to eq(already_merged) }
|
||||
|
||||
it "does fetch from the disk" do
|
||||
expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"])
|
||||
expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"], include_identical: false)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context "with include_identical" do
|
||||
let(:root_ref) { repository.root_ref }
|
||||
let(:identical_branch) { 'identical-branch' }
|
||||
let(:stale_branch) { 'stale-branch' }
|
||||
|
||||
before do
|
||||
allow(repository.raw_repository)
|
||||
.to receive(:merged_branch_names)
|
||||
.and_call_original
|
||||
|
||||
repository.create_branch(identical_branch, root_ref)
|
||||
repository.create_branch(stale_branch, 'HEAD~1')
|
||||
end
|
||||
|
||||
context 'when include_identical: true' do
|
||||
it 'includes branches identical to root ref' do
|
||||
merged = repository.merged_branch_names([identical_branch, stale_branch], include_identical: true)
|
||||
|
||||
expect(merged).to match_array([identical_branch, stale_branch])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when include_identical: false' do
|
||||
it 'excludes branches identical to root ref' do
|
||||
merged = repository.merged_branch_names([identical_branch, stale_branch], include_identical: false)
|
||||
|
||||
expect(merged).to match_array([stale_branch])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "requested branches array is empty" do
|
||||
let(:branch_names) { [] }
|
||||
|
||||
|
|
|
|||
|
|
@ -2070,7 +2070,6 @@
|
|||
- './ee/spec/services/groups/schedule_bulk_repository_shard_moves_service_spec.rb'
|
||||
- './ee/spec/services/groups/seat_usage_export_service_spec.rb'
|
||||
- './ee/spec/services/groups/sync_service_spec.rb'
|
||||
- './ee/spec/services/groups/transfer_service_spec.rb'
|
||||
- './ee/spec/services/groups/update_repository_storage_service_spec.rb'
|
||||
- './ee/spec/services/groups/update_service_spec.rb'
|
||||
- './ee/spec/services/historical_user_data/csv_service_spec.rb'
|
||||
|
|
|
|||
Loading…
Reference in New Issue