Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-03 21:12:07 +00:00
parent dc1018cb5d
commit 110fa7ec9a
45 changed files with 877 additions and 341 deletions

View File

@ -474,7 +474,7 @@ end
gem 'warning', '~> 1.3.0', feature_category: :shared
group :development do
gem 'lefthook', '~> 1.10.0', require: false, feature_category: :tooling
gem 'lefthook', '~> 1.11.0', require: false, feature_category: :tooling
gem 'rubocop', feature_category: :tooling
gem 'solargraph', '~> 0.47.2', require: false, feature_category: :shared

View File

@ -238,7 +238,7 @@
{"name":"gitlab-markup","version":"1.9.0","platform":"ruby","checksum":"7eda045a08ec2d110084252fa13a8c9eac8bdac0e302035ca7db4b82bcbd7ed4"},
{"name":"gitlab-net-dns","version":"0.10.0","platform":"ruby","checksum":"73b4613d8c851480b7b4e631f117bce4bbb4b6b8073ecf4eb167407e46097c6e"},
{"name":"gitlab-sdk","version":"0.3.1","platform":"ruby","checksum":"48ba49084f4ab92df7c7ef9f347020d9dfdf6ed9c1e782b67264e98ffe6ea710"},
{"name":"gitlab-secret_detection","version":"0.18.0","platform":"ruby","checksum":"c143c22c8a70898b11141dd530fc740725835c54a5cd01117c1ecfefab798ae6"},
{"name":"gitlab-secret_detection","version":"0.19.0","platform":"ruby","checksum":"995d87ef652dec742de8af5015018f975a01c8961a7c71892e9be19417215613"},
{"name":"gitlab-security_report_schemas","version":"0.1.2.min15.0.0.max15.2.1","platform":"ruby","checksum":"300037487ec9d51a814f648514ff521cb82b94fc51d9fe53389175b36ac680ae"},
{"name":"gitlab-styles","version":"13.1.0","platform":"ruby","checksum":"46c7c5729616355868b7b40a4ffcd052b36346076042abe8cafaee1688cbf2c1"},
{"name":"gitlab_chronic_duration","version":"0.12.0","platform":"ruby","checksum":"0d766944d415b5c831f176871ee8625783fc0c5bfbef2d79a3a616f207ffc16d"},
@ -366,7 +366,7 @@
{"name":"kubeclient","version":"4.11.0","platform":"ruby","checksum":"4985fcd749fb8c364a668a8350a49821647f03aa52d9ee6cbc582beb8e883fcc"},
{"name":"language_server-protocol","version":"3.17.0.3","platform":"ruby","checksum":"3d5c58c02f44a20d972957a9febe386d7e7468ab3900ce6bd2b563dd910c6b3f"},
{"name":"launchy","version":"2.5.2","platform":"ruby","checksum":"8aa0441655aec5514008e1d04892c2de3ba57bd337afb984568da091121a241b"},
{"name":"lefthook","version":"1.10.10","platform":"ruby","checksum":"344ddb720cdcc5ffaefabd47784766d931f4294babb9961f75db19d7765feea7"},
{"name":"lefthook","version":"1.11.2","platform":"ruby","checksum":"4bcd3c5bba4996727d96fffcf02e090f50bf672ca76677f46ba0f98a862eddc2"},
{"name":"letter_opener","version":"1.10.0","platform":"ruby","checksum":"2ff33f2e3b5c3c26d1959be54b395c086ca6d44826e8bf41a14ff96fdf1bdbb2"},
{"name":"letter_opener_web","version":"3.0.0","platform":"ruby","checksum":"3f391efe0e8b9b24becfab5537dfb17a5cf5eb532038f947daab58cb4b749860"},
{"name":"libyajl2","version":"2.1.0","platform":"ruby","checksum":"aa5df6c725776fc050c8418450de0f7c129cb7200b811907c4c0b3b5c0aea0ef"},

View File

@ -775,7 +775,7 @@ GEM
activesupport (>= 5.2.0)
rake (~> 13.0)
snowplow-tracker (~> 0.8.0)
gitlab-secret_detection (0.18.0)
gitlab-secret_detection (0.19.0)
grpc (~> 1.63)
grpc-tools (~> 1.63)
grpc_reflection (~> 0.1)
@ -1097,7 +1097,7 @@ GEM
language_server-protocol (3.17.0.3)
launchy (2.5.2)
addressable (~> 2.8)
lefthook (1.10.10)
lefthook (1.11.2)
letter_opener (1.10.0)
launchy (>= 2.2, < 4)
letter_opener_web (3.0.0)
@ -2181,7 +2181,7 @@ DEPENDENCIES
knapsack (~> 4.0.0)
kramdown (~> 2.5.0)
kubeclient (~> 4.11.0)
lefthook (~> 1.10.0)
lefthook (~> 1.11.0)
letter_opener_web (~> 3.0.0)
license_finder (~> 7.0)
licensee (~> 9.16)
@ -2366,4 +2366,4 @@ DEPENDENCIES
yajl-ruby (~> 1.4.3)
BUNDLED WITH
2.5.11
2.6.5

View File

@ -238,7 +238,7 @@
{"name":"gitlab-markup","version":"1.9.0","platform":"ruby","checksum":"7eda045a08ec2d110084252fa13a8c9eac8bdac0e302035ca7db4b82bcbd7ed4"},
{"name":"gitlab-net-dns","version":"0.10.0","platform":"ruby","checksum":"73b4613d8c851480b7b4e631f117bce4bbb4b6b8073ecf4eb167407e46097c6e"},
{"name":"gitlab-sdk","version":"0.3.1","platform":"ruby","checksum":"48ba49084f4ab92df7c7ef9f347020d9dfdf6ed9c1e782b67264e98ffe6ea710"},
{"name":"gitlab-secret_detection","version":"0.18.0","platform":"ruby","checksum":"c143c22c8a70898b11141dd530fc740725835c54a5cd01117c1ecfefab798ae6"},
{"name":"gitlab-secret_detection","version":"0.19.0","platform":"ruby","checksum":"995d87ef652dec742de8af5015018f975a01c8961a7c71892e9be19417215613"},
{"name":"gitlab-security_report_schemas","version":"0.1.2.min15.0.0.max15.2.1","platform":"ruby","checksum":"300037487ec9d51a814f648514ff521cb82b94fc51d9fe53389175b36ac680ae"},
{"name":"gitlab-styles","version":"13.1.0","platform":"ruby","checksum":"46c7c5729616355868b7b40a4ffcd052b36346076042abe8cafaee1688cbf2c1"},
{"name":"gitlab_chronic_duration","version":"0.12.0","platform":"ruby","checksum":"0d766944d415b5c831f176871ee8625783fc0c5bfbef2d79a3a616f207ffc16d"},
@ -369,7 +369,7 @@
{"name":"kubeclient","version":"4.11.0","platform":"ruby","checksum":"4985fcd749fb8c364a668a8350a49821647f03aa52d9ee6cbc582beb8e883fcc"},
{"name":"language_server-protocol","version":"3.17.0.3","platform":"ruby","checksum":"3d5c58c02f44a20d972957a9febe386d7e7468ab3900ce6bd2b563dd910c6b3f"},
{"name":"launchy","version":"2.5.2","platform":"ruby","checksum":"8aa0441655aec5514008e1d04892c2de3ba57bd337afb984568da091121a241b"},
{"name":"lefthook","version":"1.10.10","platform":"ruby","checksum":"344ddb720cdcc5ffaefabd47784766d931f4294babb9961f75db19d7765feea7"},
{"name":"lefthook","version":"1.11.2","platform":"ruby","checksum":"4bcd3c5bba4996727d96fffcf02e090f50bf672ca76677f46ba0f98a862eddc2"},
{"name":"letter_opener","version":"1.10.0","platform":"ruby","checksum":"2ff33f2e3b5c3c26d1959be54b395c086ca6d44826e8bf41a14ff96fdf1bdbb2"},
{"name":"letter_opener_web","version":"3.0.0","platform":"ruby","checksum":"3f391efe0e8b9b24becfab5537dfb17a5cf5eb532038f947daab58cb4b749860"},
{"name":"libyajl2","version":"2.1.0","platform":"ruby","checksum":"aa5df6c725776fc050c8418450de0f7c129cb7200b811907c4c0b3b5c0aea0ef"},
@ -802,8 +802,8 @@
{"name":"webmock","version":"3.25.0","platform":"ruby","checksum":"573c23fc4887008c830f22da588db339ca38b6d59856fd57f5a068959474198e"},
{"name":"webrick","version":"1.8.2","platform":"ruby","checksum":"431746a349199546ff9dd272cae10849c865f938216e41c402a6489248f12f21"},
{"name":"websocket","version":"1.2.10","platform":"ruby","checksum":"2cc1a4a79b6e63637b326b4273e46adcddf7871caa5dc5711f2ca4061a629fa8"},
{"name":"websocket-driver","version":"0.7.7","platform":"java","checksum":"e2520a6049feb88691e042d631063fa96d50620fb7f53b30180ae6fb2cf75eb1"},
{"name":"websocket-driver","version":"0.7.7","platform":"ruby","checksum":"056d99f2cd545712cfb1291650fde7478e4f2661dc1db6a0fa3b966231a146b4"},
{"name":"websocket-driver","version":"0.7.6","platform":"java","checksum":"bc894b9e9d5aee55ac04b61003e1957c4ef411a5a048199587d0499785b505c3"},
{"name":"websocket-driver","version":"0.7.6","platform":"ruby","checksum":"f69400be7bc197879726ad8e6f5869a61823147372fd8928836a53c2c741d0db"},
{"name":"websocket-extensions","version":"0.1.5","platform":"ruby","checksum":"1c6ba63092cda343eb53fc657110c71c754c56484aad42578495227d717a8241"},
{"name":"wikicloth","version":"0.8.1","platform":"ruby","checksum":"7ac8a9ca0a948cf472851e521afc6c2a6b04a8f91ef1d824ba6a61ffbd60e6ca"},
{"name":"wisper","version":"2.0.1","platform":"ruby","checksum":"ce17bc5c3a166f241a2e6613848b025c8146fce2defba505920c1d1f3f88fae6"},

View File

@ -787,7 +787,7 @@ GEM
activesupport (>= 5.2.0)
rake (~> 13.0)
snowplow-tracker (~> 0.8.0)
gitlab-secret_detection (0.18.0)
gitlab-secret_detection (0.19.0)
grpc (~> 1.63)
grpc-tools (~> 1.63)
grpc_reflection (~> 0.1)
@ -1114,7 +1114,7 @@ GEM
language_server-protocol (3.17.0.3)
launchy (2.5.2)
addressable (~> 2.8)
lefthook (1.10.10)
lefthook (1.11.2)
letter_opener (1.10.0)
launchy (>= 2.2, < 4)
letter_opener_web (3.0.0)
@ -2010,8 +2010,7 @@ GEM
hashdiff (>= 0.4.0, < 2.0.0)
webrick (1.8.2)
websocket (1.2.10)
websocket-driver (0.7.7)
base64
websocket-driver (0.7.6)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
wikicloth (0.8.1)
@ -2216,7 +2215,7 @@ DEPENDENCIES
knapsack (~> 4.0.0)
kramdown (~> 2.5.0)
kubeclient (~> 4.11.0)
lefthook (~> 1.10.0)
lefthook (~> 1.11.0)
letter_opener_web (~> 3.0.0)
license_finder (~> 7.0)
licensee (~> 9.16)
@ -2401,4 +2400,4 @@ DEPENDENCIES
yajl-ruby (~> 1.4.3)
BUNDLED WITH
2.5.11
2.6.5

View File

@ -256,12 +256,39 @@ export function removeParams(params, url = window.location.href, skipEncoding =
return `${root}${writableQuery}${writableFragment}`;
}
export function updateHistory({ state = {}, title = '', url, replace = false, win = window } = {}) {
if (win.history) {
if (replace) {
win.history.replaceState(state, title, url);
} else {
win.history.pushState(state, title, url);
}
}
}
/**
* Returns value after the '#' in the location hash
* @returns Current value of the hash, undefined if not set
*/
export const getLocationHash = () => window.location.hash?.split('#')[1];
/**
* Sets location hash to the given value.
* When value is undefined, the hash is removed.
* @param {string} hash - use undefined to remove location hash
*/
export const setLocationHash = (hash) => {
if (hash === undefined) {
updateHistory({
title: document.title,
url: window.location.pathname + window.location.search,
replace: true,
});
} else {
window.location.hash = hash;
}
};
/**
* Returns a boolean indicating whether the URL hash contains the given string value
* @param {string} hashName
@ -299,16 +326,6 @@ export const setUrlFragment = (url, fragment) => {
return `${rootUrl}#${encodedFragment}`;
};
export function updateHistory({ state = {}, title = '', url, replace = false, win = window } = {}) {
if (win.history) {
if (replace) {
win.history.replaceState(state, title, url);
} else {
win.history.pushState(state, title, url);
}
}
}
export const escapeFileUrl = (fileUrl) => encodeURIComponent(fileUrl).replace(/%2F/g, '/');
export function webIDEUrl(route = undefined) {

View File

@ -1,6 +1,7 @@
<script>
import { GlButton, GlButtonGroup, GlFormGroup, GlIcon, GlAlert } from '@gitlab/ui';
import { s__, sprintf } from '~/locale';
import { getLocationHash, setLocationHash } from '~/lib/utils/url_utility';
import SafeHtml from '~/vue_shared/directives/safe_html';
import MultiStepFormTemplate from '~/vue_shared/components/multi_step_form_template.vue';
import SingleChoiceSelector from '~/vue_shared/components/single_choice_selector.vue';
@ -149,6 +150,13 @@ export default {
step2Component() {
return this.selectedProjectOption.component;
},
additionalBreadcrumb() {
return this.currentStep === 2 ? this.selectedProjectOption : null;
},
},
created() {
this.setStepFromLocationHash();
},
methods: {
@ -163,9 +171,20 @@ export default {
},
onBack() {
this.currentStep -= 1;
setLocationHash();
},
onNext() {
this.currentStep += 1;
setLocationHash(this.selectedProjectType);
},
setStepFromLocationHash() {
const hash = getLocationHash();
if (this.availableProjectTypes.some((type) => type.value === hash)) {
this.selectedProjectType = hash;
this.currentStep = 2;
} else {
this.currentStep = 1;
}
},
},
};
@ -173,7 +192,7 @@ export default {
<template>
<div>
<breadcrumb />
<breadcrumb :selected-project-type="additionalBreadcrumb" />
<multi-step-form-template
v-if="currentStep === 1"

View File

@ -10,6 +10,13 @@ export default {
SuperSidebarToggle,
},
inject: ['rootPath', 'projectsUrl', 'parentGroupUrl', 'parentGroupName'],
props: {
selectedProjectType: {
type: Object,
required: false,
default: null,
},
},
computed: {
breadcrumbs() {
const breadcrumbs = this.parentGroupUrl
@ -19,6 +26,14 @@ export default {
{ text: s__('ProjectsNew|Projects'), href: this.projectsUrl },
];
breadcrumbs.push({ text: s__('ProjectsNew|New project'), href: '#' });
if (this.selectedProjectType) {
breadcrumbs.push({
text: this.selectedProjectType.title,
href: `#${this.selectedProjectType.value}`,
});
}
return breadcrumbs;
},
},

View File

@ -7,6 +7,7 @@ module Mutations
graphql_name 'CiJobTokenScopeAutopopulateAllowlist'
include FindsProject
include Gitlab::InternalEventsTracking
authorize :admin_project
@ -22,6 +23,15 @@ module Mutations
def resolve(project_path:)
project = authorized_find!(project_path)
track_internal_event(
'ci_job_token_autopopulate_allowlist',
user: current_user,
project: project,
additional_properties: {
label: 'ui'
}
)
result = ::Ci::JobToken::ClearAutopopulatedAllowlistService.new(project, current_user).execute
result = ::Ci::JobToken::AutopopulateAllowlistService.new(project, current_user).execute if result.success?

View File

@ -4,6 +4,7 @@ module Ci
module JobToken
class AllowlistMigrationTask
include Gitlab::Utils::StrongMemoize
include Gitlab::InternalEventsTracking
attr_reader :only_ids, :exclude_ids
@ -77,6 +78,14 @@ module Ci
end
def perform_migration!(project)
track_internal_event(
'ci_job_token_autopopulate_allowlist',
user: @user,
project: project,
additional_properties: {
label: 'rake'
}
)
::Ci::JobToken::AutopopulateAllowlistService # rubocop:disable CodeReuse/ServiceClass -- This class is not an ActiveRecord model
.new(project, @user)
.unsafe_execute!

View File

@ -0,0 +1,20 @@
---
description: Tracks when the CI JobToken Allowlist Autopopulation action is performed for a project
internal_events: true
action: ci_job_token_autopopulate_allowlist
identifiers:
- project
- namespace
- user
additional_properties:
label:
description: The method of execution, e.g. UI or rake task
product_group: pipeline_security
product_categories:
- secrets_management
milestone: '17.10'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182635
tiers:
- free
- premium
- ultimate

View File

@ -0,0 +1,23 @@
---
key_path: counts.count_total_allowlist_autopopulation
description: Count of times the CI JobToken Allowlist Autopopulation action has been performed
product_group: pipeline_security
product_categories:
- secrets_management
performance_indicator_type: []
value_type: number
status: active
milestone: '17.10'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182635
time_frame:
- 28d
- 7d
- all
data_source: internal_events
data_category: optional
tiers:
- free
- premium
- ultimate
events:
- name: ci_job_token_autopopulate_allowlist

View File

@ -1,8 +1,9 @@
---
migration_job_name: BackfillProtectedEnvironmentDeployAccessLevelsProtectedEnvironmentGroupId
description: Backfills sharding key `protected_environment_deploy_access_levels.protected_environment_group_id` from `protected_environments`.
description: Backfills sharding key `protected_environment_deploy_access_levels.protected_environment_group_id`
from `protected_environments`.
feature_category: continuous_delivery
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162834
milestone: '17.4'
queued_migration_version: 20240815083843
finalized_by: # version of the migration that finalized this BBM
finalized_by: '20250301231529'

View File

@ -8,14 +8,6 @@ description: Join table relating packages_packages with ci_pipelines
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19796
milestone: '12.6'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: package_id
table: packages_packages
sharding_key: project_id
belongs_to: package
desired_sharding_key_migration_job_name: BackfillPackagesBuildInfosProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class MigrateVSCodeExtensionMarketplaceFeatureFlagToData < Gitlab::Database::Migration[2.2]
restrict_gitlab_migration gitlab_schema: :gitlab_main
milestone '17.10'
# NOTE: This approach is lovingly borrowed from this migration:
# https://gitlab.com/gitlab-org/gitlab/-/blob/eae8739ac9d5e4c8316fefb03507cdaeac452a0a/db/migrate/20250109055316_migrate_global_search_settings_in_application_settings.rb#L12
class ApplicationSetting < MigrationRecord
self.table_name = 'application_settings'
end
def up
# TODO: This migration should be noop'd when the feature flag is default enabled or removed
# why: This is not the desired default behavior, only the behavior we want to carry over for
# customers that have chosen to opt-in early by explicitly enabling the flag.
return unless extension_marketplace_flag_enabled?
ApplicationSetting.reset_column_information
application_setting = ApplicationSetting.last
return unless application_setting
application_setting.update_columns(
vscode_extension_marketplace: { enabled: true, preset: "open_vsx" },
updated_at: Time.current
)
end
def down
return unless extension_marketplace_flag_enabled?
application_setting = ApplicationSetting.last
return unless application_setting
application_setting.update_column(:vscode_extension_marketplace, {})
end
private
def extension_marketplace_flag_enabled?
# NOTE: It's possible the flag is only enabled for a specific user, but in that case we'll assume
# the instance admin didn't want the feature globally available and we won't initialize the data.
Feature.enabled?(:web_ide_extensions_marketplace, nil) && Feature.enabled?(:vscode_web_ide, nil)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ValidatePackagesBuildInfosProjectIdNotNull < Gitlab::Database::Migration[2.2]
milestone '17.10'
def up
validate_not_null_constraint :packages_build_infos, :project_id, constraint_name: 'check_d979c653e1'
end
def down
# no-op
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class FinalizeHkBackfillProtectedEnvironmentDeployAccessLevelsProtected8203 < Gitlab::Database::Migration[2.2]
milestone '17.10'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'BackfillProtectedEnvironmentDeployAccessLevelsProtectedEnvironmentGroupId',
table_name: :protected_environment_deploy_access_levels,
column_name: :id,
job_arguments: [:protected_environment_group_id, :protected_environments, :group_id, :protected_environment_id],
finalize: true
)
end
def down; end
end

View File

@ -0,0 +1 @@
3e3f921cc622a45637f3855bf30aac412e2517dba3cffd15603db91b3574dc76

View File

@ -0,0 +1 @@
29a25fcc2d11173ff1b1b056c962f5ecff287383abe27c5760910d34d2d91d72

View File

@ -0,0 +1 @@
6f9a848970dda7dfe012062750baf6e32e0a7c6eaff41c43d8e40c7c71fdb48b

View File

@ -17736,7 +17736,8 @@ CREATE TABLE packages_build_infos (
id bigint NOT NULL,
package_id bigint NOT NULL,
pipeline_id bigint,
project_id bigint
project_id bigint,
CONSTRAINT check_d979c653e1 CHECK ((project_id IS NOT NULL))
);
CREATE SEQUENCE packages_build_infos_id_seq
@ -27405,9 +27406,6 @@ ALTER TABLE group_import_states
ALTER TABLE packages_packages
ADD CONSTRAINT check_d6301aedeb CHECK ((char_length(status_message) <= 255)) NOT VALID;
ALTER TABLE packages_build_infos
ADD CONSTRAINT check_d979c653e1 CHECK ((project_id IS NOT NULL)) NOT VALID;
ALTER TABLE sprints
ADD CONSTRAINT check_df3816aed7 CHECK ((due_date IS NOT NULL)) NOT VALID;

View File

@ -1442,122 +1442,120 @@ Returns:
Example response:
```json
[
{
"id": 1,
"blocking_merge_request": {
"id": 145,
"iid": 12,
"project_id": 7,
"title": "Interesting MR",
"description": "Does interesting things.",
"state": "opened",
"created_at": "2024-07-05T21:29:11.172Z",
"updated_at": "2024-07-05T21:29:11.172Z",
"merged_by": null,
"merge_user": null,
"merged_at": null,
"merge_after": "2018-09-07T11:16:00.000Z",
"closed_by": null,
"closed_at": null,
"target_branch": "master",
"source_branch": "v2.x",
"user_notes_count": 0,
"upvotes": 0,
"downvotes": 0,
"author": {
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
"assignees": [
{
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
}
],
"assignee": {
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
"reviewers": [
{
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
{
"id": 1,
"username": "root",
"name": "Administrator",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/root"
}
],
"source_project_id": 7,
"target_project_id": 7,
"labels": [],
"draft": false,
"imported": false,
"imported_from": "none",
"work_in_progress": false,
"milestone": null,
"merge_when_pipeline_succeeds": false,
"merge_status": "unchecked",
"detailed_merge_status": "unchecked",
"sha": "ce7e4f2d0ce13cb07479bb39dc10ee3b861c08a6",
"merge_commit_sha": null,
"squash_commit_sha": null,
"discussion_locked": null,
"should_remove_source_branch": null,
"force_remove_source_branch": true,
"prepared_at": null,
"reference": "!12",
"references": {
"short": "!12",
"relative": "!12",
"full": "my-group/my-project!12"
},
"web_url": "https://localhost/my-group/my-project/-/merge_requests/12",
"time_stats": {
"time_estimate": 0,
"total_time_spent": 0,
"human_time_estimate": null,
"human_total_time_spent": null
},
"squash": false,
"squash_on_merge": false,
"task_completion_status": {
"count": 0,
"completed_count": 0
},
"has_conflicts": false,
"blocking_discussions_resolved": true,
"approvals_before_merge": null
{
"id": 1,
"blocking_merge_request": {
"id": 145,
"iid": 12,
"project_id": 7,
"title": "Interesting MR",
"description": "Does interesting things.",
"state": "opened",
"created_at": "2024-07-05T21:29:11.172Z",
"updated_at": "2024-07-05T21:29:11.172Z",
"merged_by": null,
"merge_user": null,
"merged_at": null,
"merge_after": "2018-09-07T11:16:00.000Z",
"closed_by": null,
"closed_at": null,
"target_branch": "master",
"source_branch": "v2.x",
"user_notes_count": 0,
"upvotes": 0,
"downvotes": 0,
"author": {
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
"project_id": 7
}
]
"assignees": [
{
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
}
],
"assignee": {
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
"reviewers": [
{
"id": 2,
"username": "aiguy123",
"name": "AI GUY",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/aiguy123"
},
{
"id": 1,
"username": "root",
"name": "Administrator",
"state": "active",
"locked": false,
"avatar_url": "https://www.gravatar.com/avatar/0?s=80&d=identicon",
"web_url": "https://localhost/root"
}
],
"source_project_id": 7,
"target_project_id": 7,
"labels": [],
"draft": false,
"imported": false,
"imported_from": "none",
"work_in_progress": false,
"milestone": null,
"merge_when_pipeline_succeeds": false,
"merge_status": "unchecked",
"detailed_merge_status": "unchecked",
"sha": "ce7e4f2d0ce13cb07479bb39dc10ee3b861c08a6",
"merge_commit_sha": null,
"squash_commit_sha": null,
"discussion_locked": null,
"should_remove_source_branch": null,
"force_remove_source_branch": true,
"prepared_at": null,
"reference": "!12",
"references": {
"short": "!12",
"relative": "!12",
"full": "my-group/my-project!12"
},
"web_url": "https://localhost/my-group/my-project/-/merge_requests/12",
"time_stats": {
"time_estimate": 0,
"total_time_spent": 0,
"human_time_estimate": null,
"human_total_time_spent": null
},
"squash": false,
"squash_on_merge": false,
"task_completion_status": {
"count": 0,
"completed_count": 0
},
"has_conflicts": false,
"blocking_discussions_resolved": true,
"approvals_before_merge": null
},
"project_id": 7
}
```
## Get merge request blocked MRs

View File

@ -160,7 +160,7 @@ The cost factors on GitLab Self-Managed [are different](../../administration/cic
Community contributors can use up to 300,000 minutes on instance runners when contributing to open source projects
maintained by GitLab. The maximum of 300,000 minutes would only be possible if contributing exclusively to projects
[part of the GitLab product](https://handbook.gitlab.com/handbook/engineering/metrics/#projects-that-are-part-of-the-product).
[part of the GitLab product](https://handbook.gitlab.com/handbook/product/groups/product-analysis/engineering/metrics/#projects-that-are-part-of-the-product).
The total number of minutes available on instance runners is reduced by the compute minutes used by pipelines from
other projects. The 300,000 minutes applies to all GitLab.com tiers.

View File

@ -1349,6 +1349,13 @@ link outside it.
- In [GitLab Runner 13.0 and later](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620),
[`doublestar.Glob`](https://pkg.go.dev/github.com/bmatcuk/doublestar@v1.2.2?tab=doc#Match).
- In GitLab Runner 12.10 and earlier, [`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
- For [GitLab Pages job](#pages):
- In [GitLab 17.10 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/428018),
the [`pages:pages.publish`](#pagespagespublish) path is automatically appended to `artifacts:paths`,
so you don't need to specify it again.
- In [GitLab 17.10 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/428018),
when the [`pages:pages.publish`](#pagespagespublish) path is not specified,
the `public` directory is automatically appended to `artifacts:paths`.
CI/CD variables [are supported](../variables/where_variables_can_be_used.md#gitlab-ciyml-file).
@ -3594,13 +3601,10 @@ You must:
**Example of `pages`**:
```yaml
pages:
pages: # specifies that this is a Pages job and publishes the default public directory
stage: deploy
script:
- mv my-html-content public
artifacts:
paths:
- public
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
@ -3616,6 +3620,7 @@ This directory is exported as an artifact and published with GitLab Pages.
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/415821) in GitLab 16.1.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
- [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `publish` property under the `pages` keyword in GitLab 17.9.
- [Appended](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) the `pages:pages.publish` path automatically to `artifacts:paths` in GitLab 17.10.
{{< /history >}}
@ -3625,6 +3630,9 @@ The top-level `publish` keyword is deprecated as of GitLab 17.9 and must now be
**Keyword type**: Job keyword. You can use it only as part of a `pages` job.
**Supported values**: A path to a directory containing the Pages content.
In [GitLab 17.10 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/428018),
if not specified, the default `public` directory is used and if specified,
this path is automatically appended to [`artifacts:paths`](#artifactspaths).
**Example of `pages.publish`**:
@ -3633,11 +3641,8 @@ pages:
stage: deploy
script:
- npx @11ty/eleventy --input=path/to/eleventy/root --output=dist
artifacts:
paths:
- dist
pages:
publish: dist
publish: dist # this path is automatically appended to artifacts:paths
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
@ -3655,11 +3660,8 @@ pages:
script:
- mkdir -p $CUSTOM_FOLDER/$CUSTOM_PATH
- cp -r public $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
artifacts:
paths:
- $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
pages:
publish: $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER
publish: $CUSTOM_FOLDER/$CUSTOM_SUBFOLDER # this path is automatically appended to artifacts:paths
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables:
@ -3707,11 +3709,8 @@ pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}/${CI_COMMIT_BRANCH}"
pages:
pages: # specifies that this is a Pages job and publishes the default public directory
path_prefix: "$CI_COMMIT_BRANCH"
artifacts:
paths:
- public
```
In this example, a different pages deployment is created for each branch.
@ -3761,11 +3760,8 @@ pages:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}"
pages:
pages: # specifies that this is a Pages job and publishes the default public directory
expire_in: 1 week
artifacts:
paths:
- public
```
### `parallel`

View File

@ -39,7 +39,7 @@ At a high level, we could map each category with an equivalent non-VueX code pat
- Static properties: Provide/Inject from Vue API.
- Reactive mutable properties: Vue events and props, Apollo Client.
- Getters: Utils functions, Apollo `update` hook, computed properties.
- Getters: utility functions, Apollo `update` hook, computed properties.
- API data: Apollo Client.
Let's go through an example. In each section we refer to this state and slowly go through migrating it fully:

View File

@ -184,6 +184,7 @@ Prerequisites:
specified in a security policy project using the `content` type. To do so, enable the setting **Pipeline execution policies** in the general settings of the security policy project.
Enabling this setting grants the user who triggered the pipeline access to
read the CI/CD configuration file enforced by the pipeline execution policy. This setting does not grant the user access to any other parts of the project where the configuration file is stored.
For more details, see [Grant access automatically](#grant-access-automatically).
### `skip_ci` type
@ -210,6 +211,56 @@ To customize policy enforcement, you can define a policy's scope to either inclu
specified projects, groups, or compliance framework labels. For more details, see
[Scope](_index.md#scope).
## Manage access to the CI/CD configuration
When you enforce pipeline execution policies on a project, users that trigger pipelines must have at least read-only access to the project that contains the policy CI/CD configuration. You can grant access to the project manually or automatically.
### Grant access manually
To allow users or groups to run pipelines with enforced pipeline execution policies, you can invite them to the project that contains the policy CI/CD configuration.
### Grant access automatically
You can automatically grant access to the policy CI/CD configuration for all users who run pipelines in projects with enforced pipeline execution policies.
Prerequisites:
- Make sure the pipeline execution policy CI/CD configuration is stored in a security policy project.
- In the general settings of the security policy project, enable the **Pipeline execution policies** setting.
If you don't yet have a security policy project and you are creating the first pipeline execution policy, create an empty project and link it as a security policy project. To link the project:
- In the group or project where you want to enforce the policy, select **Secure** > **Policies** > **Edit policy project**, and select the security policy project.
The project becomes a security policy project, and the setting becomes available.
#### Configuration
1. In the policy project, select **Settings** > **General** > **Visibility, project features, permissions**.
1. Enable the setting **Pipeline execution policies: Grant access to the CI/CD configurations for projects linked to this security policy project as the source for security policies.**
1. In the policy project, create a file for the policy CI/CD configuration.
```yaml
# policy-ci.yml
policy-job:
script: ...
```
1. In the group or project where you want to enforce the policy, create a pipeline execution policy and specify the CI/CD configuration file for the security policy project.
```yaml
pipeline_execution_policy:
- name: My pipeline execution policy
description: Enforces CI/CD jobs
enabled: true
pipeline_config_strategy: inject_policy
content:
include:
- project: my-group/my-security-policy-project
file: policy-ci.yml
```
## Pipeline configuration strategies
Pipeline configuration strategy defines the method for merging the policy configuration with the project pipeline. Pipeline execution policies execute the jobs defined in the `.gitlab-ci.yml` file in isolated pipelines, which are merged into the pipelines of the target projects.
@ -401,7 +452,7 @@ the only jobs that run are the pipeline execution policy jobs.
{{< history >}}
- Updated handling of workflow rules [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175088) in GitLab 17.8 [with a flag](../../../administration/feature_flags.md) named `policies_always_override_project_ci`. Enabled by default.
- Updated handling of workflow rules [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175088) in GitLab 17.8 [with a flag](../../../administration/feature_flags.md) named `policies_always_override_project_ci`. Enabled by default.
- Updated handling of workflow rules [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/512877) in GitLab 17.10. Feature flag `policies_always_override_project_ci` removed.
{{< /history >}}
@ -619,7 +670,7 @@ pipeline_execution_policy:
pipeline_config_strategy: override_project_ci
content:
include:
- project: verify-issue-469027/policy-ci
- project: my-group/pipeline-execution-ci-project
file: policy-ci.yml
ref: main # optional
policy_scope:
@ -734,7 +785,7 @@ pipeline_execution_policy:
- name: Pipeline execution policy
description: ''
enabled: true
pipeline_config_strategy: inject_ci
pipeline_config_strategy: inject_policy
content:
include:
- project: my-group/pipeline-execution-ci-project

View File

@ -13,9 +13,12 @@ title: Contribution analytics
{{< /details >}}
Contribution analytics provide an overview of the
[contribution events](../../profile/contributions_calendar.md#user-contribution-events) your group's members made in the last week, month, or three months.
[contribution events](../../profile/contributions_calendar.md#user-contribution-events)
your group's members made in the last week, month, or three months.
Interactive bar charts and a detailed table show contribution events
(such as push events, issues, and merge requests) by group member.
(push events, issues, and merge requests) by group member.
![Contribution analytics bar graphs](img/contribution_analytics_push_v17_7.png)
Use contribution analytics to get insights into team activity and individual performance, and use this information for:
@ -40,53 +43,23 @@ To view contribution analytics:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Analyze > Contribution analytics**.
1. Optional. Filter the results:
Three bar charts and a table illustrate the number of contributions made by each group member:
- To view contribution analytics for last week, month, or three months, select one of the three tabs.
The selected time period applies to all charts and the table.
- To zoom in on a bar chart to display only a subset of group members,
select the sliders ({{< icon name="status-paused" >}}) below the chart and slide them along the axis.
- To sort the contributions table by a column, select the column header or the chevron
({{< icon name="chevron-lg-down" >}} for descending order, {{< icon name="chevron-lg-up" >}} for ascending order).
- Push events
- Created, merged, and closed merge requests
- Created and closed issues
1. Optional. To view a group member's contributions, either:
![Contribution analytics bar graphs](img/contribution_analytics_push_v17_7.png)
- On the **Contribution analytics** bar charts, hover over the bar with the member's name.
- In the **Contributions per group member** table, select the member's name.
The member's GitLab profile is displayed, and you can explore their [contributions calendar](../../../user/profile/contributions_calendar.md).
To retrieve metrics for user contributions, you can also use the [GraphQL API](../../../api/graphql/reference/_index.md#groupcontributions).
### View a member's contributions
You can view the number of events associated with a specific group member.
To view a member's contributions:
1. On the **Contribution analytics** bar charts, hover over the bar with the member's name.
1. To view individual contributions, in the **Contributions per group member** table, select the member's name. The member's GitLab profile is displayed, and you can explore their [contributions calendar](../../../user/profile/contributions_calendar.md).
### Zoom in on a chart
You can zoom in on a bar chart to display only a subset of group members.
To do this, select the sliders ({{< icon name="status-paused" >}}) below the chart and slide them along the axis.
### Sort contributions
Contributions per group member are also displayed in tabular format.
The table columns include the members' names and the number of contributions for different events.
To sort the table by a column, select the column header or the chevron ({{< icon name="chevron-lg-down" >}}
for descending order, {{< icon name="chevron-lg-up" >}} for ascending order).
## Change the time period
You can display contribution analytics over different time periods:
- Last week (default)
- Last month
- Last three months
To change the time period of the contribution analytics, select one of the three tabs
under **Contribution Analytics**.
The selected time period applies to all charts and the table.
## Contribution analytics with ClickHouse
On GitLab.com, contribution analytics run through the ClickHouse Cloud cluster.

View File

@ -219,11 +219,8 @@ deploy-pages:
stage: deploy
script:
- ...
pages: # specifies that this is a Pages job
pages: # specifies that this is a Pages job and publishes the default public directory
expire_in: 1 week
artifacts:
paths:
- public
```
Expired deployments are stopped by a cron job that runs every 10 minutes.
@ -287,10 +284,7 @@ deploy-my-pages-site:
stage: deploy
script:
- npm run build
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
```
For example, using a hash:
@ -300,11 +294,8 @@ deploy-pages-review-app:
stage: deploy
script:
- npm run build
pages: # specifies that this is a Pages job
pages: # specifies that this is a Pages job and publishes the default public directory
path_prefix: '_staging'
artifacts:
paths:
- public
```
If the `pages` property of a job named `pages` is set to `false`, no

View File

@ -149,9 +149,17 @@ deploy-pages:
## Specify the `public` directory for artifacts
Now that Jekyll has output the files to the `public` directory,
the runner needs to know where to get them. The artifacts are stored
in the `public` directory:
{{< history >}}
- Automatically appending `pages:pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
{{< /history >}}
Now that Jekyll has output the files to the `public` directory, the runner needs to know where
to get them. In GitLab 17.10 and later, for Pages jobs only, the `public` directory is
appended automatically to [`artifacts:paths`](../../../../ci/yaml/_index.md#artifactspaths)
when the [`pages:pages.publish`](../../../../ci/yaml/_index.md#pagespagespublish) path
is not explicitly specified:
```yaml
deploy-pages:
@ -159,10 +167,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
```
Your `.gitlab-ci.yml` file should now look like this:
@ -176,10 +181,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
```
## Deploy and view your website
@ -224,10 +226,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
```
Then configure the pipeline to run the job for the
@ -246,10 +245,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
```
@ -280,10 +276,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
environment: production
@ -306,10 +299,7 @@ deploy-pages:
- gem install bundler
- bundle install
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
environment: production
@ -361,10 +351,7 @@ deploy-pages:
stage: deploy
script:
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
environment: production
@ -407,10 +394,7 @@ deploy-pages:
stage: deploy
script:
- bundle exec jekyll build -d public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
environment: production

View File

@ -124,10 +124,7 @@ deploy-pages:
- mkdir .public
- cp -r * .public
- mv .public public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: $CI_COMMIT_BRANCH == "main"
```
@ -168,10 +165,7 @@ deploy-pages:
script:
- gem install jekyll
- jekyll build -d public/
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
rules:
- if: '$CI_COMMIT_REF_NAME == "pages"'
```
@ -380,10 +374,7 @@ deploy-pages:
ARTIFACT_COMPRESSION_LEVEL: "fastest"
script:
- echo "Deploying pages"
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
environment: production
```

View File

@ -21,6 +21,7 @@ title: GitLab Pages parallel deployments
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/507423) to allow periods in `path_prefix` in GitLab 17.8.
- [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/500000) to allow variables when passed to `publish` property in GitLab 17.9.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/487161) in GitLab 17.9. Feature flag `pages_multiple_versions_setting` removed.
- Automatically appending `pages:pages.publish` path to `artifacts:paths` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/428018) in GitLab 17.10 for Pages jobs only.
{{< /history >}}
@ -55,11 +56,8 @@ To create a parallel deployment:
stage: deploy
script:
- echo "Pages accessible through ${CI_PAGES_URL}/${CI_COMMIT_BRANCH}"
pages:
pages: # specifies that this is a Pages job and publishes the default public directory
path_prefix: "$CI_COMMIT_BRANCH"
artifacts:
paths:
- public
```
The `path_prefix` value:
@ -172,11 +170,8 @@ deploy-pages:
- echo "Pages accessible through ${CI_PAGES_URL}"
variables:
PAGES_PREFIX: "" # No prefix by default (main)
pages: # specifies that this is a Pages job
pages: # specifies that this is a Pages job and publishes the default public directory
path_prefix: "$PAGES_PREFIX"
artifacts:
paths:
- public
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Run on default branch (with default PAGES_PREFIX)
- if: $CI_COMMIT_BRANCH == "staging" # Run on main (with default PAGES_PREFIX)
@ -208,14 +203,11 @@ deploy-pages:
- echo "Pages accessible through ${CI_PAGES_URL}"
variables:
PAGES_PREFIX: "" # no prefix by default (master)
pages: # specifies that this is a Pages job
pages: # specifies that this is a Pages job and publishes the default public directory
path_prefix: "$PAGES_PREFIX"
environment:
name: "Pages ${PAGES_PREFIX}"
url: $CI_PAGES_URL
artifacts:
paths:
- public
rules:
- if: $CI_COMMIT_BRANCH == "staging" # ensure to run on master (with default PAGES_PREFIX)
variables:

View File

@ -131,10 +131,7 @@ deploy-pages:
script:
- npm run build
- mv out/* public
pages: true # specifies that this is a Pages job
artifacts:
paths:
- public
pages: true # specifies that this is a Pages job and publishes the default public directory
```
The previous YAML example uses [user-defined job names](_index.md#user-defined-job-names).

View File

@ -4,15 +4,35 @@
module Gitlab
module Database
module Sos
TASKS = [
DURATION = 5.minutes
SAMPLING_INTERVAL = 3.seconds
SINGLE_TASKS = [
Sos::ArSchemaDump,
Sos::DbStatsActivity
].freeze
LONG_RUNNING_TASKS = [
Sos::DbLoopStatsActivity
].freeze
def self.run(output_file)
Output.writing(output_file, mode: :directory) do |output|
Gitlab::Database::EachDatabase.each_connection(include_shared: false) do |conn, name|
TASKS.each { |t| t.new(conn, name, output).run }
SINGLE_TASKS.each do |t|
t.new(conn, name, output).run
end
end
duration = DURATION.from_now
while duration.future?
Gitlab::Database::EachDatabase.each_connection(include_shared: false) do |conn, name|
LONG_RUNNING_TASKS.each do |t|
t.new(conn, name, output).run
end
end
sleep(SAMPLING_INTERVAL)
end
end
end

View File

@ -19,17 +19,23 @@ module Gitlab
[]
end
def write_to_csv(query_name, result)
file_path = File.join(name, "#{query_name}.csv")
def write_to_csv(query_name, result, include_timestamp: false)
timestamp = Time.zone.now.strftime("%Y%m%d_%H%M%S")
file_path = if include_timestamp
File.join(name, query_name.to_s, "#{timestamp}.csv")
else
File.join(name, "#{query_name}.csv")
end
output.write_file(file_path) do |f|
CSV.open(f, 'w+') do |csv|
CSV.open(f, "w+") do |csv|
csv << result.fields
result.each { |row| csv << row.values }
end
end
rescue StandardError => e
Gitlab::AppLogger.error("Error writing CSV for DB:#{name} query:#{query_name} error message:#{e.message}")
Gitlab::AppLogger.error("Error writing CSV for DB:#{name} query:#{query_name} error_message:#{e.message}")
end
end
end

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
require 'csv'
module Gitlab
module Database
module Sos
class DbLoopStatsActivity < BaseDbStatsHandler
QUERIES = {
pg_stat_user_tables: <<~SQL,
SELECT now() AS timestamp, *
FROM pg_stat_user_tables;
SQL
pg_stat_user_indexes: <<~SQL,
SELECT now() AS timestamp, *
FROM pg_stat_user_indexes;
SQL
pg_statio_user_tables: <<~SQL,
SELECT now() AS timestamp, *
FROM pg_statio_user_tables;
SQL
pg_statio_user_indexes: <<~SQL,
SELECT now() AS timestamp, *
FROM pg_statio_user_indexes;
SQL
table_relation_size: <<~SQL.squish,
SELECT
now() AS timestamp,
n.nspname || '.' || c.relname AS "relation",
pg_total_relation_size(c.oid) AS "total_size_bytes"
FROM
pg_class c
JOIN
pg_namespace n ON n.oid = c.relnamespace
WHERE
n.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
ORDER BY
pg_total_relation_size(c.oid) DESC;
SQL
pg_lock_stat_activity: <<~SQL.squish
SELECT
now() AS timestamp,
a.pid,
a.usename,
a.application_name,
a.client_addr,
a.backend_start,
a.query_start,
a.state,
a.wait_event_type,
a.wait_event,
a.query,
l.locktype,
l.mode,
l.granted,
l.relation::regclass AS locked_relation
FROM
pg_stat_activity a
LEFT JOIN
pg_locks l ON l.pid = a.pid
WHERE
a.state != 'idle'
ORDER BY
a.query_start DESC;
SQL
}.freeze
def run
QUERIES.each do |query_name, query|
result = execute_query(query)
write_to_csv(query_name, result, include_timestamp: true)
end
end
end
end
end
end

View File

@ -1,4 +1,4 @@
ARG GDK_SHA=62e5ecefa52de2b263f7bf1dd5bf6d3e189eef89
ARG GDK_SHA=931439442a9f64f20b9a173a81f8306c7c3a4a66
# Use tag prefix when running on 'stable' branch to make sure 'protected' image is used which is not deleted by registry cleanup
ARG GDK_BASE_TAG_PREFIX

View File

@ -122,13 +122,18 @@ end
RSpec.shared_examples 'merge train pipeline' do
let(:ci_merge_request_event_type) { 'merge_train' }
let(:expected_job_names) do
%w[
dont-interrupt-me
pre-merge-checks
]
end
it "succeeds with expected job" do
expect(pipeline.yaml_errors).to be_nil
expect(pipeline.errors).to be_empty
expect(pipeline.status).to eq('created')
expect(jobs).to include('pre-merge-checks')
expect(jobs).not_to include('upload-frontend-fixtures')
expect(jobs).to eq(expected_job_names)
end
end

View File

@ -2,6 +2,7 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as urlUtils from '~/lib/utils/url_utility';
import { setGlobalAlerts } from '~/lib/utils/global_alerts';
import { setLocationHash } from '~/lib/utils/url_utility';
import { validURLs, invalidURLs } from './mock_data';
jest.mock('~/lib/utils/global_alerts', () => ({
@ -361,6 +362,50 @@ describe('URL utility', () => {
});
});
describe('setLocationHash', () => {
let originalLocation;
let originalHistory;
const mockPathname = '/some/path';
const mockSearch = '?some=query';
beforeEach(() => {
originalLocation = window.location;
originalHistory = window.history;
Object.defineProperty(window, 'location', {
writable: true,
value: {
hash: jest.fn(),
pathname: mockPathname,
search: mockSearch,
},
});
Object.defineProperty(window, 'history', {
writable: true,
value: {
replaceState: jest.fn(),
},
});
});
afterEach(() => {
window.location = originalLocation;
window.history = originalHistory;
});
it('when hash is undefined', () => {
setLocationHash();
expect(window.history.replaceState).toHaveBeenCalledWith({}, '', mockPathname + mockSearch);
});
it('when hash is a string', () => {
setLocationHash('hash-value');
expect(window.location.hash).toBe('hash-value');
});
});
describe('doesHashExistInUrl', () => {
beforeEach(() => {
setWindowLocation('#note_1');

View File

@ -1,10 +1,13 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { getLocationHash, setLocationHash } from '~/lib/utils/url_utility';
import App from '~/projects/new_v2/components/app.vue';
import FormBreadcrumb from '~/projects/new_v2/components/form_breadcrumb.vue';
import CommandLine from '~/projects/new_v2/components/command_line.vue';
import SingleChoiceSelector from '~/vue_shared/components/single_choice_selector.vue';
jest.mock('~/lib/utils/url_utility');
describe('New project creation app', () => {
let wrapper;
@ -61,6 +64,37 @@ describe('New project creation app', () => {
expect(findSingleChoiceSelector().props('checked')).toBe('blank_project');
});
describe('when location hash is present', () => {
afterEach(() => {
getLocationHash.mockReset();
});
describe('and is valid projectType', () => {
beforeEach(() => {
getLocationHash.mockReturnValue('cicd_for_external_repo');
createComponent({ isCiCdAvailable: true });
});
it('renders step 2 component from hash', () => {
expect(findStep2().exists()).toBe(true);
expect(findStep2().props('option').value).toBe('cicd_for_external_repo');
});
});
describe('and is invalid projectType', () => {
beforeEach(() => {
getLocationHash.mockReturnValue('nonexistent');
createComponent();
});
it('renders step 1', () => {
expect(findStep1().exists()).toBe(true);
});
});
});
describe('personal namespace project', () => {
it('starts with personal namespace when no namespaceId provided', () => {
createComponent();
@ -133,6 +167,10 @@ describe('New project creation app', () => {
expect(findStep2().props('option').value).toBe('create_from_template');
});
it('updates location hash', () => {
expect(setLocationHash).toHaveBeenLastCalledWith('create_from_template');
});
describe('and "Back" event is emitted from step 2', () => {
beforeEach(() => {
findStep2().vm.$emit('back');
@ -145,6 +183,10 @@ describe('New project creation app', () => {
it('hides step 2 component', () => {
expect(findStep2().exists()).toBe(false);
});
it('removes location hash', () => {
expect(setLocationHash).toHaveBeenLastCalledWith();
});
});
});
});

View File

@ -5,12 +5,15 @@ import FormBreadcrumb from '~/projects/new_v2/components/form_breadcrumb.vue';
describe('New project form breadcrumbs', () => {
let wrapper;
const createComponent = (props = {}) => {
const createComponent = ({ props = {}, provide = {} } = {}) => {
wrapper = shallowMountExtended(FormBreadcrumb, {
propsData: {
...props,
},
provide: {
rootPath: '/',
projectsUrl: '/dashboard/projects',
...props,
...provide,
},
});
};
@ -18,7 +21,7 @@ describe('New project form breadcrumbs', () => {
const findBreadcrumb = () => wrapper.findComponent(GlBreadcrumb);
it('renders personal namespace breadcrumbs', () => {
createComponent({ parentGroupUrl: null, parentGroupName: null });
createComponent({ provide: { parentGroupUrl: null, parentGroupName: null } });
expect(findBreadcrumb().props('items')).toStrictEqual([
{ text: 'Your work', href: '/' },
@ -28,11 +31,36 @@ describe('New project form breadcrumbs', () => {
});
it('renders group namespace breadcrumbs', () => {
createComponent({ parentGroupUrl: '/group/projects', parentGroupName: 'test group' });
createComponent({
provide: { parentGroupUrl: '/group/projects', parentGroupName: 'test group' },
});
expect(findBreadcrumb().props('items')).toStrictEqual([
{ text: 'test group', href: '/group/projects' },
{ text: 'New project', href: '#' },
]);
});
it('renders breadcrumbs with additional hash', () => {
createComponent({
props: {
selectedProjectType: {
key: 'blank',
value: 'blank_project',
selector: '#blank-project-pane',
title: 'Create blank project',
description:
'Create a blank project to store your files, plan your work, and collaborate on code, among other things.',
},
},
provide: { parentGroupUrl: null, parentGroupName: null },
});
expect(findBreadcrumb().props('items')).toStrictEqual([
{ text: 'Your work', href: '/' },
{ text: 'Projects', href: '/dashboard/projects' },
{ text: 'New project', href: '#' },
{ text: 'Create blank project', href: '#blank_project' },
]);
});
});

View File

@ -49,6 +49,23 @@ RSpec.describe Mutations::Ci::JobTokenScope::AutopopulateAllowlist, feature_cate
end.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)
end
it 'triggers the tracking events' do
expect do
resolver
end
.to trigger_internal_events('ci_job_token_autopopulate_allowlist')
.with(
user: current_user,
project: project,
additional_properties: {
label: 'ui'
}
).exactly(:once)
.and increment_usage_metrics(
'counts.count_total_allowlist_autopopulation'
).by(1)
end
context 'when the clear service returns an error' do
let(:service) { instance_double(::Ci::JobToken::ClearAutopopulatedAllowlistService) }

View File

@ -5,20 +5,26 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Sos::BaseDbStatsHandler, feature_category: :database do
let(:temp_directory) { Dir.mktmpdir }
let(:output_file_path) { temp_directory }
let(:expected_file_path) { File.join(output_file_path, db_name, "#{query.each_key.first}.csv") }
let(:output) { Gitlab::Database::Sos::Output.new(output_file_path, mode: :directory) }
let(:db_name) { 'test_db' }
let(:connection) { ApplicationRecord.connection }
let(:handler) { described_class.new(connection, db_name, output) }
let(:query) { { pg_show_all_settings: "SHOW ALL;" } }
let(:result) { ApplicationRecord.connection.execute(query[:pg_show_all_settings]) }
before do
allow(Gitlab::Database::Sos::DbStatsActivity).to receive(:queries).and_return({
pg_show_all_settings: 'SHOW ALL;'
})
let(:queries) do
{
pg_show_all_settings: 'SHOW ALL;',
pg_statio_user_tables: 'SELECT now() AS timestamp, * FROM pg_statio_user_tables;'
}
end
let(:result) { ApplicationRecord.connection.execute(queries[:pg_show_all_settings]) }
let(:result_with_timestamp) { ApplicationRecord.connection.execute(queries[:pg_statio_user_tables]) }
let(:timestamp) { Time.zone.now.strftime("%Y%m%d_%H%M%S") }
let(:file_path_with_timestamp) do
File.join(output_file_path, db_name, queries.keys.last.to_s, "#{timestamp}.csv")
end
let(:file_path_without_timestamp) { File.join(output_file_path, db_name, "#{queries.each_key.first}.csv") }
after do
FileUtils.remove_entry(temp_directory)
end
@ -34,7 +40,7 @@ RSpec.describe Gitlab::Database::Sos::BaseDbStatsHandler, feature_category: :dat
describe '#execute_query' do
context "when a query is sucessfully executed" do
it 'executes the query and returns the result' do
result = handler.execute_query(query[:pg_show_all_settings])
result = handler.execute_query(queries[:pg_show_all_settings])
expect(result).to be_an(PG::Result)
expect(result.ntuples).to be > 0
end
@ -55,14 +61,26 @@ RSpec.describe Gitlab::Database::Sos::BaseDbStatsHandler, feature_category: :dat
end
describe '#write_to_csv' do
before do
allow(Time.zone).to receive(:now).and_return(Time.zone.parse('2023-01-01 12:00:00 UTC'))
allow(Gitlab::Database::Sos::DbStatsActivity).to receive(:queries).and_return({
pg_show_all_settings: 'SHOW ALL;'
})
allow(Gitlab::Database::Sos::DbLoopStatsActivity).to receive(:queries).and_return({
pg_statio_user_tables: 'SELECT now() AS timestamp, * FROM pg_statio_user_tables;'
})
end
context 'when result exists' do
it 'creates a CSV file with the correct headers and data (if applicable)' do
handler.write_to_csv(query.each_key.first, result)
it 'creates a CSV file with the correct headers and data (if applicable) without timestamps' do
handler.write_to_csv(queries.each_key.first, result)
output.finish
expect(File.exist?(expected_file_path)).to be true
expect(File.exist?(file_path_without_timestamp)).to be true
csv_content = CSV.read(expected_file_path)
csv_content = CSV.read(file_path_without_timestamp)
expect(csv_content.first).to eq(%w[name setting description])
@ -73,17 +91,36 @@ RSpec.describe Gitlab::Database::Sos::BaseDbStatsHandler, feature_category: :dat
# it's safe to say this value will not change for us.
expect(block_size_row[1]).to eq('8192')
end
it 'creates a CSV file with the correct headers and data (if applicable) with timestamps' do
handler.write_to_csv(queries.keys.last, result_with_timestamp, include_timestamp: true)
output.finish
expect(File.exist?(file_path_with_timestamp)).to be true
csv_content = CSV.read(file_path_with_timestamp)
expect(csv_content.first).to include("timestamp", "relid", "schemaname")
end
end
context 'when result is empty' do
let(:empty_result) { [] }
it 'creates an empty CSV file' do
handler.write_to_csv(query.each_key.first, empty_result)
it 'creates an empty CSV file without timestamp' do
handler.write_to_csv(queries.each_key.first, empty_result)
output.finish
expect(File.exist?(expected_file_path)).to be true
expect(File.zero?(expected_file_path)).to be true
expect(File.exist?(file_path_without_timestamp)).to be true
expect(File.zero?(file_path_without_timestamp)).to be true
end
it 'creates an empty CSV file with timestamp' do
handler.write_to_csv(queries.keys.last, empty_result, include_timestamp: true)
output.finish
expect(File.exist?(file_path_with_timestamp)).to be true
expect(File.zero?(file_path_with_timestamp)).to be true
end
end
@ -94,9 +131,9 @@ RSpec.describe Gitlab::Database::Sos::BaseDbStatsHandler, feature_category: :dat
it 'logs the error' do
expect(Gitlab::AppLogger).to receive(:error) do |message|
expect(message).to include("Error writing CSV for DB:#{db_name} query:#{query.each_key.first} error message")
expect(message).to include("Error writing CSV for DB:#{db_name} query:#{queries.each_key.first} ")
end
handler.write_to_csv(query.each_key.first, result)
handler.write_to_csv(queries.each_key.first, result)
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Sos::DbLoopStatsActivity, feature_category: :database do
let(:temp_directory) { Dir.mktmpdir }
let(:output_file_path) { temp_directory }
let(:output) { Gitlab::Database::Sos::Output.new(temp_directory, mode: :directory) }
let(:db_name) { 'test_db' }
let(:connection) { ApplicationRecord.connection }
let(:handler) { described_class.new(connection, db_name, output) }
let(:query) { { pg_stat_user_indexes: "SELECT * FROM pg_stat_user_indexes;" } }
let(:result) { ApplicationRecord.connection.execute(query[:pg_stat_user_indexes]) }
let(:timestamp) { Time.zone.now.strftime("%Y%m%d_%H%M%S") }
after do
FileUtils.remove_entry(temp_directory)
end
describe '#run' do
it 'successfully writes each query result to csv' do
expect_next_instance_of(Gitlab::Database::Sos::Output) do |instance|
expect(instance).to receive(:write_file).exactly(described_class::QUERIES.count).times
end
handler.run
end
end
describe 'individual queries' do
described_class::QUERIES.each do |name, query|
it "successfully executes and returns results for #{name}" do
result = handler.execute_query(query)
expect(result).to be_a(PG::Result)
expect(result.nfields).to be > 0
case name
when :pg_stat_user_tables
expect(result.fields).to include("timestamp", "relid", "schemaname", "relname", "seq_scan")
when :pg_stat_user_indexes
expect(result.fields).to include("timestamp", "relid", "indexrelid", "schemaname", "relname")
when :pg_statio_user_tables
expect(result.fields).to include("timestamp", "relid", "schemaname", "relname", "heap_blks_read")
when :pg_statio_user_indexes
expect(result.fields).to include("timestamp", "relid", "indexrelid", "schemaname", "relname", "idx_blks_read")
when :table_relation_size
expect(result.fields).to eq %w[timestamp relation total_size_bytes]
when :pg_lock_stat_activity
expect(result.fields).to include("timestamp", "pid", "usename", "application_name", "client_addr")
end
end
end
end
end

View File

@ -2,22 +2,26 @@
require 'spec_helper'
# WIP
RSpec.describe Gitlab::Database::Sos, feature_category: :database do
describe '#run' do
let(:temp_directory) { Dir.mktmpdir }
let(:output_file_path) { temp_directory }
let(:task) { Gitlab::Database::Sos::DbStatsActivity }
let(:connection) { ApplicationRecord.connection }
let(:db_name) { 'test_db' }
before do
stub_const("#{described_class}::DURATION", 3.seconds)
stub_const("#{described_class}::TIME", 0)
allow(Gitlab::Database::EachDatabase).to receive(:each_connection).and_yield(connection, db_name)
end
after do
FileUtils.remove_entry(temp_directory)
end
it "creates temp directory of pg data" do
stub_const("#{described_class}::TASKS", [task])
result = described_class.run(output_file_path)
expect(result.size).to be >= 1
expect(Dir.glob(File.join(temp_directory, '**', '*.csv'))).not_to be_empty
it "creates a temp directory of pg data" do
described_class.run(output_file_path)
expect(Dir.glob(File.join(output_file_path, '**', '*.csv'))).not_to be_empty
end
end
end

View File

@ -79,6 +79,7 @@ RSpec.describe Ci::JobToken::AllowlistMigrationTask, :silence_stdout, feature_ca
messages << "Migration complete."
task.execute
messages.each do |message|
expect(output_stream.string).to include(message)
end
@ -86,6 +87,39 @@ RSpec.describe Ci::JobToken::AllowlistMigrationTask, :silence_stdout, feature_ca
expect(output_stream.string).not_to include("project id(s) failed to migrate:")
end
it 'triggers the tracking events' do
expect do
task.execute
end
.to trigger_internal_events('ci_job_token_autopopulate_allowlist')
.with(
user: user,
project: accessed_projects[0],
additional_properties: {
label: 'rake'
}
).exactly(:once)
.and trigger_internal_events('ci_job_token_autopopulate_allowlist')
.with(
user: user,
project: accessed_projects[1],
additional_properties: {
label: 'rake'
}
).exactly(:once)
.and trigger_internal_events('ci_job_token_autopopulate_allowlist')
.with(
user: user,
project: accessed_projects[2],
additional_properties: {
label: 'rake'
}
).exactly(:once)
.and increment_usage_metrics(
'counts.count_total_allowlist_autopopulation'
).by(3)
end
context "when a handled exception is raised" do
let(:project) { create(:project) }
let(:only_ids) { project.id.to_s }