Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-24 12:19:40 +00:00
parent eb1079ac27
commit cd63ffdd51
86 changed files with 1268 additions and 179 deletions

View File

@ -2235,10 +2235,6 @@
when: never
- if: '$ANTHROPIC_API_KEY == null'
when: never
- if: '$VERTEX_AI_PROJECT == null'
when: never
- if: '$VERTEX_AI_CREDENTIALS == null'
when: never
- <<: *if-fork-merge-request
when: never

View File

@ -1142,7 +1142,7 @@ RSpec/FeatureCategory:
- 'ee/spec/views/admin/push_rules/_merge_request_approvals.html.haml_spec.rb'
- 'ee/spec/views/admin/users/index.html.haml_spec.rb'
- 'ee/spec/views/clusters/clusters/show.html.haml_spec.rb'
- 'ee/spec/views/compliance_management/compliance_framework/_compliance_framework_badge.html.haml_spec.rb'
- 'ee/spec/views/compliance_management/compliance_framework/_compliance_frameworks_info.html.haml_spec.rb'
- 'ee/spec/views/devise/sessions/new.html.haml_spec.rb'
- 'ee/spec/views/groups/hook_logs/show.html.haml_spec.rb'
- 'ee/spec/views/groups/hooks/edit.html.haml_spec.rb'

View File

@ -6,7 +6,7 @@ import environmentClusterAgentQuery from '~/environments/graphql/queries/environ
import { createK8sAccessConfiguration } from '~/environments/helpers/k8s_integration_helper';
import LogsViewer from '~/vue_shared/components/logs_viewer/logs_viewer.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { s__ } from '~/locale';
import { s__, __ } from '~/locale';
export default {
components: {
@ -112,6 +112,11 @@ export default {
},
headerData() {
const data = [
{
icon: 'kubernetes-agent',
label: this.$options.i18n.agent,
value: this.gitlabAgentId,
},
{ icon: 'namespace', label: this.$options.i18n.namespace, value: this.namespace },
{ icon: 'pod', label: this.$options.i18n.pod, value: this.podName },
];
@ -129,16 +134,20 @@ export default {
emptyStateTitleForContainer: s__(
'KubernetesLogs|No logs available for container %{containerName} of pod %{podName}',
),
agent: s__('KubernetesLogs|Agent ID'),
pod: s__('KubernetesLogs|Pod'),
container: s__('KubernetesLogs|Container'),
namespace: s__('KubernetesLogs|Namespace'),
error: __('Error'),
},
EmptyStateSvg,
};
</script>
<template>
<div>
<gl-alert v-if="error" variant="danger" :dismissible="false">{{ error }}</gl-alert>
<gl-alert v-if="error" variant="danger" :dismissible="false"
>{{ $options.i18n.error }}: {{ error }}</gl-alert
>
<gl-loading-icon v-if="isLoading" />
<logs-viewer v-else-if="logLines" :log-lines="logLines" :highlighted-line="highlightedLineHash"

View File

@ -5,6 +5,7 @@ import {
EVENT_PLAIN_TEXT,
EVENT_ERROR,
} from '@gitlab/cluster-client';
import { throttle } from 'lodash';
import k8sLogsQuery from '~/environments/graphql/queries/k8s_logs.query.graphql';
export const buildWatchPath = ({ resource, api = 'api/v1', namespace = '' }) => {
@ -61,12 +62,19 @@ export const k8sLogs = (_, { configuration, namespace, podName, containerName },
watchApi
.subscribeToStream(watchPath, watchQuery)
.then((watcher) => {
let logsData = [];
const writeLogsThrottled = throttle(() => {
const currentLogsData = cacheWrapper.readLogsData();
if (currentLogsData.length !== logsData.length) {
cacheWrapper.writeLogsData(logsData);
}
}, 100);
watcher.on(EVENT_PLAIN_TEXT, (data) => {
const logsData = cacheWrapper.readLogsData();
logsData = [...logsData, { id: logsData.length + 1, content: data }];
const updatedLogsData = [...logsData, { id: logsData.length + 1, content: data }];
cacheWrapper.writeLogsData(updatedLogsData);
writeLogsThrottled();
});
watcher.on(EVENT_TIMEOUT, (err) => {

View File

@ -40,8 +40,6 @@ export default {
ItemStats,
ProjectAvatar,
VisibilityIcon,
FrameworkBadge: () =>
import('ee_component/compliance_dashboard/components/shared/framework_badge.vue'),
},
inject: {
currentGroupVisibility: {
@ -244,11 +242,6 @@ export default {
<user-access-role-badge v-if="group.permission" size="sm" class="gl-mr-2">
{{ group.permission }}
</user-access-role-badge>
<framework-badge
v-if="hasComplianceFramework"
:framework="complianceFramework"
:show-edit="false"
/>
</div>
<div v-if="group.description" class="description gl-font-sm gl-mt-1">
<span

View File

@ -70,6 +70,7 @@ export default {
projectPath: this.projectPath,
canWriteModelRegistry: this.canWriteModelRegistry,
maxAllowedFileSize: this.maxAllowedFileSize,
latestVersion: this.latestVersion,
};
},
props: {
@ -101,6 +102,11 @@ export default {
type: Number,
required: true,
},
latestVersion: {
type: String,
required: false,
default: null,
},
},
apollo: {
model: {

View File

@ -9,7 +9,7 @@ import {
GlModal,
GlModalDirective,
} from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { __, s__, sprintf } from '~/locale';
import { visitUrl } from '~/lib/utils/url_utility';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { uploadModel } from '../services/upload_model';
@ -31,7 +31,7 @@ export default {
directives: {
GlModal: GlModalDirective,
},
inject: ['projectPath', 'maxAllowedFileSize'],
inject: ['projectPath', 'maxAllowedFileSize', 'latestVersion'],
props: {
modelGid: {
type: String,
@ -47,6 +47,16 @@ export default {
versionData: null,
};
},
computed: {
versionDescription() {
if (this.latestVersion) {
return sprintf(s__('MlModelRegistry|Latest version is %{latestVersion}'), {
latestVersion: this.latestVersion,
});
}
return s__('MlModelRegistry|Enter a semver version.');
},
},
methods: {
async createModelVersion() {
const { data } = await this.$apollo.mutate({
@ -114,8 +124,7 @@ export default {
actionSecondary: {
text: __('Cancel'),
},
versionDescription: s__('MlModelRegistry|Enter a semver version.'),
versionPlaceholder: s__('MlModelRegistry|For example 1.0.0'),
versionPlaceholder: s__('MlModelRegistry|A semver version like 1.0.0'),
descriptionPlaceholder: s__('MlModelRegistry|Enter some description'),
buttonTitle: s__('MlModelRegistry|Create model version'),
title: s__('MlModelRegistry|Create model version & import artifacts'),
@ -137,9 +146,10 @@ export default {
>
<gl-form>
<gl-form-group
data-testid="versionDescriptionId"
label="Version:"
label-for="versionId"
:description="$options.modal.versionDescription"
:description="versionDescription"
>
<gl-form-input
id="versionId"

View File

@ -20,7 +20,7 @@ export default {
<template>
<a
:id="lineId"
class="gl-inline-block gl-w-8 gl-pr-2 -gl-ml-8 gl-select-none gl-text-right"
class="gl-inline-block gl-w-9 gl-pr-2 -gl-ml-9 gl-select-none gl-text-right"
:href="lineHref"
>
{{ lineNumber }}

View File

@ -50,7 +50,7 @@ export default {
return h(
'div',
{
class: ['gl-text-white', 'gl-pl-8', { 'gl-bg-gray-700': isHighlighted }],
class: ['gl-text-white', 'gl-pl-9', { 'gl-bg-gray-700': isHighlighted }],
},
[
h(LineNumber, {

View File

@ -101,5 +101,3 @@ class DashboardController < Dashboard::ApplicationController
end
end
end
DashboardController.prepend_mod

View File

@ -8,6 +8,11 @@ module Resolvers
required: false,
description: 'Search query.'
argument :sort, Types::Projects::ProjectSortEnum,
required: false,
description: "List starred projects by sort order.",
default_value: :name_asc
alias_method :user, :object
def resolve(**args)

View File

@ -27,7 +27,8 @@ module Projects
mlflow_tracking_url: mlflow_tracking_url(project),
model_id: model.id,
model_name: model.name,
max_allowed_file_size: max_allowed_file_size(project)
max_allowed_file_size: max_allowed_file_size(project),
latest_version: model.latest_version&.version
}
to_json(data)

View File

@ -54,10 +54,7 @@ class BulkImport < ApplicationRecord
# rubocop:disable Style/SymbolProc
after_transition any => [:finished, :failed, :timeout] do |bulk_import|
bulk_import.update_has_failures
if Feature.enabled?(:notify_owners_of_finished_direct_transfer, bulk_import.user)
bulk_import.notify_owners_of_completion
end
bulk_import.notify_owners_of_completion
end
# rubocop:enable Style/SymbolProc

View File

@ -4,7 +4,7 @@
= render ::Layouts::PageHeadingComponent.new(_('Gitaly servers'))
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card' }, header_options: { class: 'gl-new-card-header' }, body_options: { class: 'gl-new-card-body gl-px-0' }) do |c|
- c.with_header do
.gl-new-card-title-wrapper
.gl-new-card-title-wrapper{ data: { event_tracking_load: 'true', event_tracking: 'view_admin_gitaly_servers_pageload' } }
%h3.gl-new-card-title
= _('Gitaly servers')
.gl-new-card-count

View File

@ -7,7 +7,6 @@
%h1.home-panel-title.gl-heading-1.gl-mt-3.gl-display-flex.gl-align-items-center.gl-flex-wrap.gl-gap-3.gl-break-anywhere{ class: 'gl-mb-0!', data: { testid: 'project-name-content' }, itemprop: 'name' }
= @project.name
= visibility_level_content(@project, css_class: 'visibility-icon gl-display-inline-flex gl-text-secondary', icon_css_class: 'icon')
= render_if_exists 'compliance_management/compliance_framework/compliance_framework_badge', project: @project, additional_classes: 'gl-align-self-center'
- if @project.catalog_resource
= render partial: 'shared/ci_catalog_badge', locals: { href: explore_catalog_path(@project.catalog_resource), css_class: 'gl-mx-0' }
- if @project.group

View File

@ -61,6 +61,9 @@
- else
= render 'stat_anchor_list', anchors: statistics_buttons, project_buttons: true
-# Compliance frameworks
= render_if_exists 'compliance_management/compliance_framework/compliance_frameworks_info', project: @project
-# Project created
.project-page-sidebar-block.gl-py-4
%p.gl-font-bold.gl-text-gray-900.gl-m-0.gl-mb-1= s_('ProjectPage|Created on')

View File

@ -55,8 +55,6 @@
= sprite_icon('scale', size: 14, css_class: 'gl-mr-2')
= project_license_name(project)
- if !explore_projects_tab?
= render_if_exists 'compliance_management/compliance_framework/compliance_framework_badge', project: project, additional_classes: 'gl-ml-3!'
- c.with_body do
.project-controls{ data: { testid: 'project_controls'} }
.gl-display-flex.gl-align-items-center.gl-gap-2.gl-mb-2.gl-justify-content-space-between.gl-flex-wrap

View File

@ -0,0 +1,13 @@
---
description: Audit event triggered
internal_events: true
action: trigger_audit_event
label_description: Name of the triggered audit event
identifiers:
product_group: compliance
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/151878
distributions:
- ee
tiers:
- ultimate

View File

@ -0,0 +1,16 @@
---
description: Tracks pageviews for the admin gitaly servers page
internal_events: true
action: view_admin_gitaly_servers_pageload
identifiers:
- user
product_group: personal_productivity
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156811
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -1,9 +0,0 @@
---
name: notify_owners_of_finished_direct_transfer
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/458115
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/155262
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/465487
milestone: '17.1'
group: group::import and integrate
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,22 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_view_admin_gitaly_servers_pageload_monthly
description: Monthly count of unique users who visit the gitaly servers page
product_group: personal_productivity
performance_indicator_type: []
value_type: number
status: active
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156811
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: view_admin_gitaly_servers_pageload
unique: user.id

View File

@ -0,0 +1,21 @@
---
key_path: counts.count_total_view_admin_gitaly_servers_pageload_monthly
description: Monthly count of total users who visit the gitaly servers page
product_group: personal_productivity
performance_indicator_type: []
value_type: number
status: active
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156811
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: view_admin_gitaly_servers_pageload

View File

@ -0,0 +1,22 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_view_admin_gitaly_servers_pageload_weekly
description: Weekly count of unique users who visit the gitaly servers page
product_group: personal_productivity
performance_indicator_type: []
value_type: number
status: active
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156811
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: view_admin_gitaly_servers_pageload
unique: user.id

View File

@ -0,0 +1,21 @@
---
key_path: counts.count_total_view_admin_gitaly_servers_pageload_weekly
description: Weekly count of total users who visit the gitaly servers page
product_group: personal_productivity
performance_indicator_type: []
value_type: number
status: active
milestone: '17.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156811
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: view_admin_gitaly_servers_pageload

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class CreateDuoChatEventsTable < ClickHouse::Migration
def up
execute <<~SQL
CREATE TABLE IF NOT EXISTS duo_chat_events
(
user_id UInt64 DEFAULT 0,
event UInt8 DEFAULT 0,
namespace_path String DEFAULT '0/',
timestamp DateTime64(6, 'UTC') DEFAULT now64()
) ENGINE = ReplacingMergeTree
PARTITION BY toYear(timestamp)
ORDER BY (user_id, event, timestamp)
SQL
end
def down
execute <<~SQL
DROP VIEW IF EXISTS duo_chat_events
SQL
end
end

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillPackagesMavenMetadataProjectId
description: Backfills sharding key `packages_maven_metadata.project_id` from `packages_packages`.
feature_category: package_registry
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/157034
milestone: '17.2'
queued_migration_version: 20240621120705
finalize_after: '2024-07-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillVulnerabilityFindingEvidencesProjectId
description: Backfills sharding key `vulnerability_finding_evidences.project_id` from `vulnerability_occurrences`.
feature_category: vulnerability_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/157032
milestone: '17.2'
queued_migration_version: 20240621115731
finalize_after: '2024-07-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: packages_packages
sharding_key: project_id
belongs_to: package
desired_sharding_key_migration_job_name: BackfillPackagesMavenMetadataProjectId

View File

@ -19,3 +19,4 @@ desired_sharding_key:
table: vulnerability_occurrences
sharding_key: project_id
belongs_to: finding
desired_sharding_key_migration_job_name: BackfillVulnerabilityFindingEvidencesProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToVulnerabilityFindingEvidences < Gitlab::Database::Migration[2.2]
milestone '17.2'
def change
add_column :vulnerability_finding_evidences, :project_id, :bigint
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToPackagesMavenMetadata < Gitlab::Database::Migration[2.2]
milestone '17.2'
def change
add_column :packages_maven_metadata, :project_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexVulnerabilityFindingEvidencesOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.2'
disable_ddl_transaction!
INDEX_NAME = 'index_vulnerability_finding_evidences_on_project_id'
def up
add_concurrent_index :vulnerability_finding_evidences, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :vulnerability_finding_evidences, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddVulnerabilityFindingEvidencesProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.2'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :vulnerability_finding_evidences, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :vulnerability_finding_evidences, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddVulnerabilityFindingEvidencesProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.2'
def up
install_sharding_key_assignment_trigger(
table: :vulnerability_finding_evidences,
sharding_key: :project_id,
parent_table: :vulnerability_occurrences,
parent_sharding_key: :project_id,
foreign_key: :vulnerability_occurrence_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :vulnerability_finding_evidences,
sharding_key: :project_id,
parent_table: :vulnerability_occurrences,
parent_sharding_key: :project_id,
foreign_key: :vulnerability_occurrence_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillVulnerabilityFindingEvidencesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.2'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillVulnerabilityFindingEvidencesProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:vulnerability_finding_evidences,
:id,
:project_id,
:vulnerability_occurrences,
:project_id,
:vulnerability_occurrence_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:vulnerability_finding_evidences,
:id,
[
:project_id,
:vulnerability_occurrences,
:project_id,
:vulnerability_occurrence_id
]
)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexPackagesMavenMetadataOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.2'
disable_ddl_transaction!
INDEX_NAME = 'index_packages_maven_metadata_on_project_id'
def up
add_concurrent_index :packages_maven_metadata, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_maven_metadata, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddPackagesMavenMetadataProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.2'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :packages_maven_metadata, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :packages_maven_metadata, column: :project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddPackagesMavenMetadataProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.2'
def up
install_sharding_key_assignment_trigger(
table: :packages_maven_metadata,
sharding_key: :project_id,
parent_table: :packages_packages,
parent_sharding_key: :project_id,
foreign_key: :package_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :packages_maven_metadata,
sharding_key: :project_id,
parent_table: :packages_packages,
parent_sharding_key: :project_id,
foreign_key: :package_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillPackagesMavenMetadataProjectId < Gitlab::Database::Migration[2.2]
milestone '17.2'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillPackagesMavenMetadataProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:packages_maven_metadata,
:id,
:project_id,
:packages_packages,
:project_id,
:package_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:packages_maven_metadata,
:id,
[
:project_id,
:packages_packages,
:project_id,
:package_id
]
)
end
end

View File

@ -0,0 +1 @@
87e10a60c27d45dc9ab5e31d0290aad07070e39c25aa80bb29d1e52f5612d96c

View File

@ -0,0 +1 @@
f53a3e8e6812bdd14ff9890e6e95178c660f4cc7ab88eb5be4ae20bc1225086f

View File

@ -0,0 +1 @@
1cc562767546e5cfb017dc51a1bf0946893ac4b6540cd33b5984e2b274fae7a5

View File

@ -0,0 +1 @@
3740b89f47ed72101f15f1b936d99f9a43ebf5d9625c574d86a75c51a47ac72c

View File

@ -0,0 +1 @@
11873f1781c7bc9ae4ffa733c91c7cdcf10d6f660eb1bd653da762c6d67bd55d

View File

@ -0,0 +1 @@
e9a4760d67ccf23f6eb3287a656446934a23695cb14db5238dbb847a98433c9e

View File

@ -0,0 +1 @@
4500beb38bc5230576eea4259a80e61c5f216aa496dfdac7665262fc9b95c751

View File

@ -0,0 +1 @@
a482821dcac73463bf84447dff312f36994000ea0da5f9fc1a704a9c16851d04

View File

@ -0,0 +1 @@
37e6290b16625161fdf167cc91ed34afc246c0e67ee98118af743a662e1db4a6

View File

@ -0,0 +1 @@
bd353f9a09663687ede6dcfd4e60a336d2d61343117be7a1a5cbcc13116a2f19

View File

@ -829,6 +829,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_1ed40f4d5f4e() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "packages_packages"
WHERE "packages_packages"."id" = NEW."package_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_207005e8e995() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -1441,6 +1457,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_a7e0fb195210() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "vulnerability_occurrences"
WHERE "vulnerability_occurrences"."id" = NEW."vulnerability_occurrence_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_af3f17817e4d() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -14049,7 +14081,8 @@ CREATE TABLE packages_maven_metadata (
app_group character varying NOT NULL,
app_name character varying NOT NULL,
app_version character varying,
path character varying(512) NOT NULL
path character varying(512) NOT NULL,
project_id bigint
);
CREATE SEQUENCE packages_maven_metadata_id_seq
@ -18719,7 +18752,8 @@ CREATE TABLE vulnerability_finding_evidences (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
vulnerability_occurrence_id bigint NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL
data jsonb DEFAULT '{}'::jsonb NOT NULL,
project_id bigint
);
CREATE SEQUENCE vulnerability_finding_evidences_id_seq
@ -27947,6 +27981,8 @@ CREATE INDEX index_packages_maven_metadata_on_package_id_and_path ON packages_ma
CREATE INDEX index_packages_maven_metadata_on_path ON packages_maven_metadata USING btree (path);
CREATE INDEX index_packages_maven_metadata_on_project_id ON packages_maven_metadata USING btree (project_id);
CREATE UNIQUE INDEX index_packages_npm_metadata_caches_on_object_storage_key ON packages_npm_metadata_caches USING btree (object_storage_key);
CREATE INDEX index_packages_npm_metadata_caches_on_project_id ON packages_npm_metadata_caches USING btree (project_id);
@ -29181,6 +29217,8 @@ CREATE INDEX index_vulnerability_feedback_on_merge_request_id ON vulnerability_f
CREATE INDEX index_vulnerability_feedback_on_pipeline_id ON vulnerability_feedback USING btree (pipeline_id);
CREATE INDEX index_vulnerability_finding_evidences_on_project_id ON vulnerability_finding_evidences USING btree (project_id);
CREATE INDEX index_vulnerability_finding_signatures_on_finding_id ON vulnerability_finding_signatures USING btree (finding_id);
CREATE INDEX index_vulnerability_finding_signatures_on_project_id ON vulnerability_finding_signatures USING btree (project_id);
@ -31257,6 +31295,8 @@ CREATE TRIGGER trigger_13d4aa8fe3dd BEFORE INSERT OR UPDATE ON draft_notes FOR E
CREATE TRIGGER trigger_174b23fa3dfb BEFORE INSERT OR UPDATE ON approval_project_rules_users FOR EACH ROW EXECUTE FUNCTION trigger_174b23fa3dfb();
CREATE TRIGGER trigger_1ed40f4d5f4e BEFORE INSERT OR UPDATE ON packages_maven_metadata FOR EACH ROW EXECUTE FUNCTION trigger_1ed40f4d5f4e();
CREATE TRIGGER trigger_207005e8e995 BEFORE INSERT OR UPDATE ON operations_strategies FOR EACH ROW EXECUTE FUNCTION trigger_207005e8e995();
CREATE TRIGGER trigger_219952df8fc4 BEFORE INSERT OR UPDATE ON merge_request_blocks FOR EACH ROW EXECUTE FUNCTION trigger_219952df8fc4();
@ -31337,6 +31377,8 @@ CREATE TRIGGER trigger_a253cb3cacdf BEFORE INSERT OR UPDATE ON dora_daily_metric
CREATE TRIGGER trigger_a4e4fb2451d9 BEFORE INSERT OR UPDATE ON epic_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_a4e4fb2451d9();
CREATE TRIGGER trigger_a7e0fb195210 BEFORE INSERT OR UPDATE ON vulnerability_finding_evidences FOR EACH ROW EXECUTE FUNCTION trigger_a7e0fb195210();
CREATE TRIGGER trigger_af3f17817e4d BEFORE INSERT OR UPDATE ON protected_tag_create_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_af3f17817e4d();
CREATE TRIGGER trigger_b2612138515d BEFORE INSERT OR UPDATE ON project_relation_exports FOR EACH ROW EXECUTE FUNCTION trigger_b2612138515d();
@ -32068,6 +32110,9 @@ ALTER TABLE ONLY users
ALTER TABLE ONLY analytics_devops_adoption_snapshots
ADD CONSTRAINT fk_78c9eac821 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_maven_metadata
ADD CONSTRAINT fk_7a170ee0a3 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_relation_exports
ADD CONSTRAINT fk_7a4d3d5c0f FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -32809,6 +32854,9 @@ ALTER TABLE ONLY protected_tag_create_access_levels
ALTER TABLE ONLY application_settings
ADD CONSTRAINT fk_f9867b3540 FOREIGN KEY (web_ide_oauth_application_id) REFERENCES oauth_applications(id) ON DELETE SET NULL;
ALTER TABLE ONLY vulnerability_finding_evidences
ADD CONSTRAINT fk_fa3efd4e94 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE p_ci_stages
ADD CONSTRAINT fk_fb57e6cc56 FOREIGN KEY (pipeline_id) REFERENCES ci_pipelines(id) ON DELETE CASCADE;

View File

@ -16316,6 +16316,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="addonuserstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="addonuserstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `AddOnUser.timelogs`
@ -17140,6 +17141,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="autocompleteduserstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="autocompleteduserstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `AutocompletedUser.timelogs`
@ -19329,6 +19331,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="currentuserstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="currentuserstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `CurrentUser.timelogs`
@ -24663,6 +24666,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestassigneestarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="mergerequestassigneestarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `MergeRequestAssignee.timelogs`
@ -25008,6 +25012,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestauthorstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="mergerequestauthorstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `MergeRequestAuthor.timelogs`
@ -25400,6 +25405,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestparticipantstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="mergerequestparticipantstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `MergeRequestParticipant.timelogs`
@ -25781,6 +25787,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestreviewerstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="mergerequestreviewerstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `MergeRequestReviewer.timelogs`
@ -31637,6 +31644,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="usercorestarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="usercorestarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
##### `UserCore.timelogs`
@ -38119,6 +38127,7 @@ four standard [pagination arguments](#pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="userstarredprojectssearch"></a>`search` | [`String`](#string) | Search query. |
| <a id="userstarredprojectssort"></a>`sort` | [`ProjectSort`](#projectsort) | List starred projects by sort order. |
###### `User.timelogs`

View File

@ -167,7 +167,7 @@ In order of highest priority first, the component version can be:
- A commit SHA, for example `e3262fdd0914fa823210cdb79a8c421e2cef79d8`.
- A tag, for example: `1.0.0`. If a tag and commit SHA exist with the same name,
the commit SHA takes precedence over the tag. Components released to the CI/CD
the commit SHA takes precedence over the tag. Components released to the CI/CD Catalog
should be tagged with a [semantic version](#semantic-versioning).
- A branch name, for example `main`. If a branch and tag exist with the same name,
the tag takes precedence over the branch.

View File

@ -1154,6 +1154,20 @@ Instead of:
- In GitLab 14.1 and above...
- In GitLab 14.1 and newer...
## level
If you can, avoid using `level` in the context of an instance or group.
Use:
- This setting is turned on for the instance.
- This setting is turned on for the group and its subgroups.
Instead of:
- This setting is turned on at the instance level.
- This setting is turned on at the group level.
## list
Do not use **list** when referring to a [**dropdown list**](#dropdown-list).

View File

@ -14,7 +14,7 @@ Check for sensitive file disclosure. This check looks for files that may contain
Information leakage is an application weakness where an application reveals sensitive data, such as technical details of the web application, environment, or user-specific data. Sensitive data may be used by an attacker to exploit the target web application, its hosting network, or its users. Therefore, leakage of sensitive data should be limited or prevented whenever possible. Information Leakage, in its most common form,is the result of one or more of the following conditions: A failure to scrub out HTML/Script comments containing sensitive information, improper application or server configurations, or differences in page responses for valid versus invalid data.
In the case of this failure, one or more files and/or folders are accessable that should not be. This can include files common in home folders like such as command histories or files that contain secrets such as passwords.
In the case of this failure, one or more files and/or folders are accessible that should not be. This can include files common in home folders like such as command histories or files that contain secrets such as passwords.
## Links

View File

@ -19,7 +19,7 @@ injection attack, in which SQL commands are injected into data-plane input in
order to effect the execution of predefined SQL commands. This check modifies
parameters in the request (path, query string, headers, JSON, XML, etc.) to try
and create a syntax error in the SQL or NoSQL query. Logs and responses are then
analyzed to try and detect if an error occured. If an error is detected there is
analyzed to try and detect if an error occurred. If an error is detected there is
a high likelihood that a vulnerability exists.
## Remediation

View File

@ -799,7 +799,7 @@ The exclude parameters uses `body-json` when the request uses a content type `ap
##### Excluding multiple JSON nodes using JSON Path
To exclude the property `password` on each entry of an array of `users` at the root level, set the `body-json` property's value to an array with the JSON Path expression `[ "$.users[*].paswword" ]`.
To exclude the property `password` on each entry of an array of `users` at the root level, set the `body-json` property's value to an array with the JSON Path expression `[ "$.users[*].password" ]`.
The JSON Path expression starts with `$` to refer to the root node and uses `.` to refer to the current node. Then, it uses `users` to refer to a property and the characters `[` and `]` to enclose the index in the array you want to use, instead of providing a number as an index you use `*` to specify any index. After the index reference, we find `.` which now refers to any given selected index in the array, preceded by a property name `password`.
@ -807,7 +807,7 @@ For instance, the JSON document looks like this:
```json
{
"body-json": [ "$.users[*].paswword" ]
"body-json": [ "$.users[*].password" ]
}
```

View File

@ -71,7 +71,7 @@ Prerequisites:
- A custom domain name `example.com` or subdomain `subdomain.example.com`.
- Access to your domain's server control panel to set up a DNS `TXT` record to verify your domain's ownership.
- A project in the group.
- A project in the group. This project will be linked to the verified domains, and should not be deleted.
- You must have the Owner role in the top-level group.
Domain verification applies at the top-level group and to all subgroups and projects
@ -87,7 +87,7 @@ Setting up a verified domain is similar to [setting up a custom domain on GitLab
- Must link the domain to a single project, despite domain verification applying
at the top-level group and to all nested subgroups and projects, because domain
verification:
- Is tied to the project you choose.
- Is tied to the project you choose. If the project is deleted, the domain verification is removed.
- Reuses the GitLab Pages custom domain verification feature, which requires a project.
- Must configure the `TXT` only in the DNS record to verify the domain's ownership.

View File

@ -93,6 +93,80 @@ the Admin Area.
WARNING:
User approvals are associated with a user ID. Other user contributions do not have an associated user ID. When you delete a user and their contributions are moved to a "Ghost User", the approval contributions refer to a missing or invalid user ID. Instead of deleting users, consider [blocking](../../../administration/moderate_users.md#block-a-user), [banning](../../../administration/moderate_users.md#ban-a-user), or [deactivating](../../../administration/moderate_users.md#deactivate-a-user) them.
## Delete the root account on a self-managed instance
DETAILS:
**Offering:** Self-managed
WARNING:
The root account is the most privileged account on the system. Deleting the root account might result in losing access to the instance [Admin Area](../../../administration/admin_area.md) if there is no other administrator available on the instance.
You can delete the root account using either the UI or the [GitLab Rails console](../../../administration/operations/rails_console.md).
Before you delete the root account:
1. If you have created any [project](../../project/settings/project_access_tokens.md) or [personal access tokens](../../profile/personal_access_tokens.md) for the root account and use them in your workflow, transfer any necessary permissions or ownership from the root account to the new administrator.
1. [Back up your self-managed instance](../../../administration/backup_restore/backup_gitlab.md).
1. Consider [deactivating](../../../administration/moderate_users.md#deactivate-a-user) or [blocking](../../../administration/moderate_users.md#block-and-unblock-users) the root account instead.
### Use the UI
Prerequisites:
- You must be an administrator for the self-managed instance.
To delete the root account:
1. In the Admin Area, [create a new user with administrator access](../../profile/account/create_accounts.md#create-users-in-admin-area). This ensures that you maintain administrator access to the instance whilst mitigating the risks associated with deleting the root account.
1. [Delete the root account](#delete-users-and-user-contributions).
### Use the GitLab Rails console
WARNING:
Commands that change data can cause damage if not run correctly or under the right conditions. Always run commands in a test environment first and have a backup instance ready to restore.
Prerequisites:
- You must have access to the GitLab Rails console.
To delete the root account, in the Rails console:
1. Give another existing user administrator access:
```ruby
user = User.find(username: 'Username') # or use User.find_by(email: 'email@example.com') to find by email
user.admin = true
user.save!
```
This ensures that you maintain administrator access to the instance whilst mitigating the risks associated with deleting the root account.
1. To delete the root account, do either of the following:
- Block the root account:
```ruby
# This needs to be a current admin user
current_user = User.find(username: 'Username')
# This is the root user we want to block
user = User.find(username: 'Username')
::Users::BlockService.new(current_user).execute(user)
```
- Deactivate the root user:
```ruby
# This needs to be a current admin user
current_user = User.find(username: 'Username')
# This is the root user we want to deactivate
user = User.find(username: 'Username')
::Users::DeactivateService.new(current_user, skip_authorization: true).execute(user)
```
## Troubleshooting
### Deleting a user results in a PostgreSQL null value error

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPackagesMavenMetadataProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_packages_maven_metadata_project_id
feature_category :package_registry
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillVulnerabilityFindingEvidencesProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_vulnerability_finding_evidences_project_id
feature_category :vulnerability_management
end
end
end

View File

@ -2,63 +2,53 @@
module Gitlab
module Ci
class Jwt
class Jwt < JwtBase
NOT_BEFORE_TIME = 5
DEFAULT_EXPIRE_TIME = 60 * 5
NoSigningKeyError = Class.new(StandardError)
def self.for_build(build)
self.new(build, ttl: build.metadata_timeout).encoded
end
def initialize(build, ttl:)
super()
@build = build
@ttl = ttl
end
def payload
custom_claims.merge(reserved_claims)
end
def encoded
headers = { kid: kid, typ: 'JWT' }
JWT.encode(payload, key, 'RS256', headers)
end
private
attr_reader :build, :ttl
delegate :project, :user, :pipeline, :runner, to: :build
delegate :source_ref, :source_ref_path, to: :pipeline
delegate :public_key, to: :key
delegate :namespace, to: :project
def reserved_claims
def default_payload
now = Time.now.to_i
{
super.merge(
jti: SecureRandom.uuid,
iss: Settings.gitlab.host,
iat: now,
nbf: now - NOT_BEFORE_TIME,
exp: now + (ttl || DEFAULT_EXPIRE_TIME),
sub: "job_#{build.id}"
}
)
end
def custom_claims
def predefined_claims
project_claims.merge(ci_claims)
end
def project_claims
::JSONWebToken::ProjectTokenClaims
.new(project: project, user: user)
.generate
end
def ci_claims
fields = {
namespace_id: namespace.id.to_s,
namespace_path: namespace.full_path,
project_id: project.id.to_s,
project_path: project.full_path,
user_id: user&.id.to_s,
user_login: user&.username,
user_email: user&.email,
user_access_level: user_access_level,
pipeline_id: pipeline.id.to_s,
pipeline_source: pipeline.source.to_s,
job_id: build.id.to_s,
@ -83,20 +73,6 @@ module Gitlab
fields
end
def key
@key ||= begin
key_data = Gitlab::CurrentSettings.ci_jwt_signing_key
raise NoSigningKeyError unless key_data
OpenSSL::PKey::RSA.new(key_data)
end
end
def kid
public_key.to_jwk[:kid]
end
def ref_type
::Ci::BuildRunnerPresenter.new(build).ref_type
end
@ -108,12 +84,6 @@ module Gitlab
def environment_protected?
false # Overridden in EE
end
def user_access_level
return unless user
project.team.human_max_access(user.id)&.downcase
end
end
end
end

33
lib/gitlab/ci/jwt_base.rb Normal file
View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Gitlab
module Ci
class JwtBase < ::JSONWebToken::Token
NoSigningKeyError = Class.new(StandardError)
def self.decode(token, key)
::JSONWebToken::RSAToken.decode(token, key)
end
def encoded
::JSONWebToken::RSAToken.encode(payload, key, kid)
end
private
def key
@key ||= begin
key_data = Gitlab::CurrentSettings.ci_jwt_signing_key
raise NoSigningKeyError unless key_data
OpenSSL::PKey::RSA.new(key_data)
end
end
def kid
key.public_key.to_jwk[:kid]
end
end
end
end

View File

@ -23,7 +23,7 @@ module Gitlab
attr_reader :aud, :target_audience
def reserved_claims
def default_payload
super.merge({
iss: Gitlab.config.gitlab.url,
sub: "project_path:#{project.full_path}:ref_type:#{ref_type}:ref:#{source_ref}",
@ -32,13 +32,14 @@ module Gitlab
}.compact)
end
def custom_claims
def predefined_claims
additional_custom_claims = {
runner_id: runner&.id,
runner_environment: runner_environment,
sha: pipeline.sha,
project_visibility: Gitlab::VisibilityLevel.string_level(project.visibility_level),
user_identities: user_identities
user_identities: user_identities,
target_audience: target_audience
}.compact
mapper = ClaimMapper.new(project_config, pipeline)

View File

@ -36,3 +36,7 @@
'{event_counters}_create_merge_request_note': USAGE_NOTE_CREATE_MERGEREQUEST
'{event_counters}_create_commit_from_web_ide': WEB_IDE_COMMITS_COUNT
'{event_counters}_create_merge_request_from_web_ide': WEB_IDE_MERGE_REQUESTS_COUNT
'{event_counters}_trigger_audit_event-filter:[label:delete_epic]': USAGE_AUDIT_EVENTS_DELETE_EPIC
'{event_counters}_trigger_audit_event-filter:[label:delete_issue]': USAGE_AUDIT_EVENTS_DELETE_ISSUE
'{event_counters}_trigger_audit_event-filter:[label:delete_merge_request]': USAGE_AUDIT_EVENTS_DELETE_MERGE_REQUEST
'{event_counters}_trigger_audit_event-filter:[label:delete_work_item]': USAGE_AUDIT_EVENTS_DELETE_WORK_ITEM

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
# rubocop:disable Gitlab/BoundedContexts -- Shared ownership
module JSONWebToken
class ProjectTokenClaims
def initialize(project:, user:)
@project = project
@user = user
end
def generate
{
namespace_id: namespace.id.to_s,
namespace_path: namespace.full_path,
project_id: project.id.to_s,
project_path: project.full_path,
user_id: user&.id.to_s,
user_login: user&.username,
user_email: user&.email,
user_access_level: user_access_level
}
end
private
attr_reader :project, :user
delegate :namespace, to: :project
def user_access_level
return unless user
project.team.human_max_access(user.id)&.downcase
end
end
end
# rubocop:enable Gitlab/BoundedContexts

View File

@ -12,10 +12,11 @@ module JSONWebToken
end
def encoded
headers = {
kid: kid,
typ: 'JWT'
}
self.class.encode(payload, key, kid)
end
def self.encode(payload, key, kid)
headers = { kid: kid, typ: 'JWT' }
JWT.encode(payload, key, ALGORITHM, headers)
end

View File

@ -33,11 +33,17 @@ module JSONWebToken
end
def payload
@custom_payload.merge(default_payload)
predefined_claims
.merge(@custom_payload)
.merge(default_payload)
end
private
def predefined_claims
{}
end
def default_payload
{
jti: id,

View File

@ -13433,6 +13433,9 @@ msgstr ""
msgid "ComplianceFrameworks|Compliance framework created"
msgstr ""
msgid "ComplianceFrameworks|Compliance frameworks applied"
msgstr ""
msgid "ComplianceFrameworks|Compliance frameworks applied to %{projectName}"
msgstr ""
@ -13532,6 +13535,9 @@ msgstr ""
msgid "ComplianceFrameworks|There can be only one default framework."
msgstr ""
msgid "ComplianceFrameworks|This project is covered by the rules defined in the compliance framework. Only group owners and maintainers can view details of the compliance framework."
msgstr ""
msgid "ComplianceFrameworks|To link this policy and framework, edit the policy's scope."
msgstr ""
@ -30316,6 +30322,9 @@ msgstr ""
msgid "KubernetesDashboard|You can select an agent from a project's environment page."
msgstr ""
msgid "KubernetesLogs|Agent ID"
msgstr ""
msgid "KubernetesLogs|Container"
msgstr ""
@ -33455,6 +33464,9 @@ msgid_plural "MlModelRegistry|%d versions"
msgstr[0] ""
msgstr[1] ""
msgid "MlModelRegistry|A semver version like 1.0.0"
msgstr ""
msgid "MlModelRegistry|Add a model"
msgstr ""
@ -33569,9 +33581,6 @@ msgstr ""
msgid "MlModelRegistry|File \"%{name}\" is %{size}. It is larger than max allowed size of %{maxAllowedFileSize}"
msgstr ""
msgid "MlModelRegistry|For example 1.0.0"
msgstr ""
msgid "MlModelRegistry|For example 1.0.0. Must be a semantic version."
msgstr ""
@ -33590,6 +33599,9 @@ msgstr ""
msgid "MlModelRegistry|Latest version"
msgstr ""
msgid "MlModelRegistry|Latest version is %{latestVersion}"
msgstr ""
msgid "MlModelRegistry|Leave empty to skip version creation."
msgstr ""
@ -58843,6 +58855,12 @@ msgstr ""
msgid "Vulnerability|Warning: possible secrets detected"
msgstr ""
msgid "Vulnerability|You can also %{message}."
msgstr ""
msgid "Vulnerability|use AI by asking GitLab Duo Chat to explain this vulnerability and suggest a solution"
msgstr ""
msgid "WARNING:"
msgstr ""
@ -62254,9 +62272,6 @@ msgstr ""
msgid "ciReport|RPS"
msgstr ""
msgid "ciReport|Resolve with AI"
msgstr ""
msgid "ciReport|Resolve with merge request"
msgstr ""

View File

@ -58,6 +58,14 @@ module QA
has_element?('feature-status', feature: 'dependency_scanning_false_status')
end
def has_true_secret_detection_status?
has_element?('feature-status', feature: 'pre_receive_secret_detection_true_status')
end
def has_false_secret_detection_status?
has_element?('feature-status', feature: 'pre_receive_secret_detection_false_status')
end
def has_auto_devops_container?
has_element?('autodevops-container')
end
@ -76,6 +84,14 @@ module QA
go_to_tab('Compliance')
end
def enable_secret_detection
card = find_security_testing_card('Secret push protection')
within(card) do
# The GitLabUI toggle uses a Close Icon button
click_element('close-icon')
end
end
private
def go_to_tab(name)
@ -83,6 +99,14 @@ module QA
find('.nav-item', text: name).click
end
end
def find_security_testing_card(header_text)
cards = all_elements('security-testing-card', minimum: 1)
cards.each do |card|
title = card.find('h3').text
return card if title.eql? header_text
end
end
end
end
end

View File

@ -25,10 +25,11 @@ describe('kubernetes_logs', () => {
const kasTunnelUrl = mockKasTunnelUrl;
const projectPath = 'gitlab-org/test-project';
const gitlabAgentId = '1';
const configuration = createK8sAccessConfiguration({
kasTunnelUrl,
gitlabAgentId: '1',
gitlabAgentId,
});
let k8sLogsQueryMock;
let environmentDataMock;
@ -127,7 +128,7 @@ describe('kubernetes_logs', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('should render error state', () => {
expect(findAlert().text()).toBe(error.message);
expect(findAlert().text()).toBe(`Error: ${error.message}`);
});
it('should render empty state', () => {
expect(findEmptyState().exists()).toBe(true);
@ -206,7 +207,7 @@ describe('kubernetes_logs', () => {
});
it('should provide correct header details to the logs viewer', () => {
expect(findLogsViewer().text()).toBe(
`Namespace: ${kubernetesNamespace}Pod: ${defaultProps.podName}`,
`Agent ID: ${gitlabAgentId}Namespace: ${kubernetesNamespace}Pod: ${defaultProps.podName}`,
);
});
});
@ -226,7 +227,7 @@ describe('kubernetes_logs', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('should render error state', () => {
expect(findAlert().text()).toBe(errorMessage);
expect(findAlert().text()).toBe(`Error: ${errorMessage}`);
});
it('should render empty state', () => {
expect(findEmptyState().exists()).toBe(true);
@ -269,7 +270,7 @@ describe('kubernetes_logs', () => {
await waitForPromises();
expect(findLogsViewer().text()).toBe(
`Namespace: ${kubernetesNamespace}Pod: ${defaultProps.podName}Container: my-container`,
`Agent ID: ${gitlabAgentId}Namespace: ${kubernetesNamespace}Pod: ${defaultProps.podName}Container: my-container`,
);
});
});

View File

@ -1,8 +1,11 @@
import { EVENT_TIMEOUT, EVENT_PLAIN_TEXT, EVENT_ERROR } from '@gitlab/cluster-client';
import throttle from 'lodash/throttle';
import k8sLogsQuery from '~/environments/graphql/queries/k8s_logs.query.graphql';
import { buildWatchPath, k8sLogs } from '~/environments/graphql/resolvers/kubernetes/k8s_logs';
import { bootstrapWatcherMock } from '../watcher_mock_helper';
jest.mock('lodash/throttle', () => jest.fn());
describe('buildWatchPath', () => {
it('should return the correct path with namespace', () => {
const resource = 'my-pod';
@ -51,6 +54,7 @@ describe('k8sLogs', () => {
);
});
throttle.mockImplementation(jest.requireActual('lodash/throttle'));
const errorMessage = 'event error message';
const logContent = 'Plain text log data';
it.each([
@ -64,6 +68,13 @@ describe('k8sLogs', () => {
watchStream.triggerEvent(eventName, eventData);
if (eventName === EVENT_PLAIN_TEXT) {
expect(throttle).toHaveBeenCalledTimes(1);
expect(throttle).toHaveBeenCalledWith(expect.any(Function), 100);
}
jest.runOnlyPendingTimers();
expect(client.writeQuery).toHaveBeenCalledWith({
query: k8sLogsQuery,
variables: {

View File

@ -90,6 +90,7 @@ describe('ml/model_registry/apps/show_ml_model', () => {
mlflowTrackingUrl: 'path/to/tracking',
canWriteModelRegistry,
maxAllowedFileSize: 99999,
latestVersion: '',
},
stubs: { GlTab, DeleteModel, LoadOrErrorOrShow },
});

View File

@ -43,6 +43,7 @@ describe('ModelVersionCreate', () => {
const createWrapper = (
createResolver = jest.fn().mockResolvedValue(createModelVersionResponses.success),
provide = {},
) => {
const requestHandlers = [[createModelVersionMutation, createResolver]];
apolloProvider = createMockApollo(requestHandlers);
@ -51,6 +52,8 @@ describe('ModelVersionCreate', () => {
provide: {
projectPath: 'some/project',
maxAllowedFileSize: 99999,
latestVersion: null,
...provide,
},
directives: {
GlModal: createMockDirective('gl-modal'),
@ -96,7 +99,9 @@ describe('ModelVersionCreate', () => {
});
it('renders the version input label', () => {
expect(wrapper.find('[description="Enter a semver version."]').exists()).toBe(true);
expect(wrapper.findByTestId('versionDescriptionId').attributes().description).toBe(
'Enter a semver version.',
);
});
it('renders the description input', () => {
@ -142,6 +147,18 @@ describe('ModelVersionCreate', () => {
});
});
describe('Latest version available', () => {
beforeEach(() => {
createWrapper(undefined, { latestVersion: '1.2.3' });
});
it('renders the version input label', () => {
expect(wrapper.findByTestId('versionDescriptionId').attributes().description).toBe(
'Latest version is 1.2.3',
);
});
});
describe('Successful flow', () => {
beforeEach(async () => {
createWrapper();

View File

@ -38,7 +38,7 @@ RSpec.describe Projects::Ml::ModelRegistryHelper, feature_category: :mlops do
describe '#show_ml_model_data' do
let_it_be(:model) do
build_stubbed(:ml_models, project: project, name: 'cool_model')
build_stubbed(:ml_models, :with_latest_version_and_package, project: project, name: 'cool_model')
end
subject(:parsed) { Gitlab::Json.parse(helper.show_ml_model_data(model, user)) }
@ -51,7 +51,8 @@ RSpec.describe Projects::Ml::ModelRegistryHelper, feature_category: :mlops do
'maxAllowedFileSize' => 10737418240,
'mlflowTrackingUrl' => "http://localhost/api/v4/projects/#{project.id}/ml/mlflow/",
'modelId' => model.id,
'modelName' => 'cool_model'
'modelName' => 'cool_model',
'latestVersion' => '1.0.1-alpha+test'
})
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPackagesMavenMetadataProjectId,
feature_category: :package_registry,
schema: 20240621120701 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :packages_maven_metadata }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :packages_packages }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :package_id }
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityFindingEvidencesProjectId,
feature_category: :vulnerability_management,
schema: 20240621115727 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :vulnerability_finding_evidences }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :vulnerability_occurrences }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :vulnerability_occurrence_id }
end
end

View File

@ -0,0 +1,84 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::JwtBase, :freeze_time, feature_category: :secrets_management do
let(:key) { OpenSSL::PKey::RSA.generate(3072) }
let(:key_data) { key.to_s }
let(:kid) { key.public_key.to_jwk[:kid] }
let(:headers) { { kid: kid, typ: 'JWT' } }
let(:now) { Time.zone.now.to_i }
let(:uuid) { SecureRandom.uuid }
let(:default_payload) do
{
jti: uuid,
iat: now,
nbf: now - described_class::DEFAULT_NOT_BEFORE_TIME,
exp: now + described_class::DEFAULT_EXPIRE_TIME
}
end
before do
stub_application_setting(ci_jwt_signing_key: key_data)
allow(SecureRandom).to receive(:uuid).and_return(uuid)
end
shared_examples 'raises NoSigningKeyError' do
it do
expect { subject }.to raise_error(described_class::NoSigningKeyError)
end
end
describe '.decode' do
let(:token) { described_class.new.encoded }
subject(:decoded) { described_class.decode(token, key) }
it 'decodes the JWT' do
expect(decoded[0]).to include(default_payload.stringify_keys)
expect(decoded[1]).to include({ 'alg' => ::JSONWebToken::RSAToken::ALGORITHM }.merge(headers.stringify_keys))
end
context 'when signing key is missing' do
let(:key_data) { nil }
it_behaves_like 'raises NoSigningKeyError'
end
end
describe '#encoded' do
subject(:encoded) { described_class.new.encoded }
it 'generates the JWT' do
expect(OpenSSL::PKey::RSA).to receive(:new).and_return(key)
expect(::JSONWebToken::RSAToken).to receive(:encode).with(default_payload, key, kid).and_call_original
expect(encoded).to be_a(String)
end
context 'when signing key is missing' do
let(:key_data) { nil }
it_behaves_like 'raises NoSigningKeyError'
end
end
describe '#payload' do
let(:jwt_token) { described_class.new }
subject(:payload) { jwt_token.payload }
before do
jwt_token['key'] = 'value'
end
it 'includes custom payload' do
expect(payload).to include('key' => 'value')
end
it 'includes default payload' do
expect(payload).to include(default_payload)
end
end
end

View File

@ -3,15 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::ClickHouse, feature_category: :database do
subject { described_class }
context 'when ClickHouse is not configured' do
it 'returns false' do
expect(described_class).not_to be_configured
end
it { is_expected.not_to be_configured }
end
context 'when ClickHouse is configured', :click_house do
it 'returns false' do
expect(described_class).to be_configured
end
it { is_expected.to be_configured }
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe JSONWebToken::ProjectTokenClaims, feature_category: :shared do
describe '#generate' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let(:namespace) { project.namespace }
subject(:project_claims) { described_class.new(project: project, user: user).generate }
it 'generates JWT project claims' do
expect(project_claims).to include(
namespace_id: namespace.id.to_s,
namespace_path: namespace.full_path,
project_id: project.id.to_s,
project_path: project.full_path,
user_id: user.id.to_s,
user_login: user.username,
user_email: user.email,
user_access_level: nil
)
end
context 'without user' do
let_it_be(:user) { nil }
it 'generates JWT project claims' do
expect(project_claims).to include(
user_id: '',
user_login: nil,
user_email: nil,
user_access_level: nil
)
end
end
context 'with a developer role' do
before_all do
project.add_developer(user)
end
it 'has correct access level' do
expect(project_claims[:user_access_level]).to eq('developer')
end
end
end
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.describe JSONWebToken::RSAToken do
require 'spec_helper'
RSpec.describe JSONWebToken::RSAToken, feature_category: :shared do
let_it_be(:rsa_key) do
OpenSSL::PKey::RSA.new <<-EOS.strip_heredoc
-----BEGIN RSA PRIVATE KEY-----
@ -50,6 +52,18 @@ RSpec.describe JSONWebToken::RSAToken do
end
end
describe '.encode' do
let(:payload) { { key: 'value' } }
let(:kid) { rsa_key.public_key.to_jwk[:kid] }
let(:headers) { { kid: kid, typ: 'JWT' } }
it 'generates the JWT' do
expect(JWT).to receive(:encode).with(payload, rsa_key, described_class::ALGORITHM, headers).and_call_original
expect(described_class.encode(payload, rsa_key, kid)).to be_a(String)
end
end
describe '.decode' do
let(:decoded_token) { described_class.decode(rsa_encoded, rsa_key) }
@ -74,8 +88,14 @@ RSpec.describe JSONWebToken::RSAToken do
let_it_be(:rsa_key_2) { OpenSSL::PKey::RSA.new 2048 }
before do
# rsa_key is returned for encoding, and rsa_key_2 for decoding
allow_any_instance_of(described_class).to receive(:key).and_return(rsa_key, rsa_key_2)
# rsa_key is used for encoding, and rsa_key_2 for decoding
allow(JWT)
.to receive(:decode)
.with(rsa_encoded, rsa_key, true, { algorithm: described_class::ALGORITHM })
.and_wrap_original do |original_method, *args|
args[1] = rsa_key_2
original_method.call(*args)
end
end
it "raises exception saying 'Signature verification failed" do

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillVulnerabilityFindingEvidencesProjectId, feature_category: :vulnerability_management do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :vulnerability_finding_evidences,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:vulnerability_occurrences,
:project_id,
:vulnerability_occurrence_id
]
)
}
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPackagesMavenMetadataProjectId, feature_category: :package_registry do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :packages_maven_metadata,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:project_id,
:packages_packages,
:project_id,
:package_id
]
)
}
end
end
end

View File

@ -189,16 +189,6 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
expect { import.send(:"#{event}!") }.not_to notify_owner_of_completion
end
end
context 'when notify_owners_of_finished_direct_transfer flag is disabled' do
before do
stub_feature_flags(notify_owners_of_finished_direct_transfer: false)
end
it { expect { import.finish! }.not_to notify_owner_of_completion }
it { expect { import.fail_op! }.not_to notify_owner_of_completion }
it { expect { import.cleanup_stale! }.not_to notify_owner_of_completion }
end
end
describe '#parent_group_entity' do

View File

@ -11,9 +11,9 @@ RSpec.describe 'Getting starredProjects of the user', feature_category: :groups_
let(:user_params) { { username: user.username } }
let_it_be(:project_a) { create(:project, :public) }
let_it_be(:project_b) { create(:project, :private) }
let_it_be(:project_c) { create(:project, :private) }
let_it_be(:project_a) { create(:project, :public, name: 'ProjectA', path: 'Project-A', star_count: 30) }
let_it_be(:project_b) { create(:project, :private, name: 'ProjectB', path: 'Project-B', star_count: 20) }
let_it_be(:project_c) { create(:project, :private, name: 'ProjectC', path: 'Project-C', star_count: 10) }
let_it_be(:user, reload: true) { create(:user) }
let(:user_fields) { 'starredProjects { nodes { id } }' }
@ -100,5 +100,183 @@ RSpec.describe 'Getting starredProjects of the user', feature_category: :groups_
)
end
end
context 'when sort parameter is provided' do
let(:user_fields_with_sort) { "starredProjects(sort: #{sort_parameter}) { nodes { id name } }" }
let(:query_with_sort) { graphql_query_for(:user, user_params, user_fields_with_sort) }
let(:current_user) { user }
let(:path) { %i[user starred_projects nodes] }
context 'when sort parameter provided is invalid' do
let(:sort_parameter) { 'does_not_exist' }
it 'raises an exception' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_errors).to include(
a_hash_including(
'message' => "Argument 'sort' on Field 'starredProjects' has an invalid value (#{sort_parameter}). " \
"Expected type 'ProjectSort'."
)
)
end
end
context 'when sort parameter for id is provided' do
context 'when ID_ASC is provided' do
let(:sort_parameter) { 'ID_ASC' }
it 'sorts starred projects by id in ascending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_a.to_global_id.to_s,
project_b.to_global_id.to_s,
project_c.to_global_id.to_s
])
end
end
context 'when ID_DESC is provided' do
let(:sort_parameter) { 'ID_DESC' }
it 'sorts starred projects by id in descending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_c.to_global_id.to_s,
project_b.to_global_id.to_s,
project_a.to_global_id.to_s
])
end
end
end
context 'when sort parameter for latest activity is provided' do
before do
project_a.update!(last_activity_at: 2.hours.from_now)
project_b.update!(last_activity_at: 3.hours.from_now)
project_c.update!(last_activity_at: 4.hours.from_now)
end
context 'when LATEST_ACTIVITY_ASC is provided' do
let(:sort_parameter) { 'LATEST_ACTIVITY_ASC' }
it 'sorts starred projects by latest activity in ascending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_a.to_global_id.to_s,
project_b.to_global_id.to_s,
project_c.to_global_id.to_s
])
end
end
context 'when LATEST_ACTIVITY_DESC is provided' do
let(:sort_parameter) { 'LATEST_ACTIVITY_DESC' }
it 'sorts starred projects by latest activity in descending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_c.to_global_id.to_s,
project_b.to_global_id.to_s,
project_a.to_global_id.to_s
])
end
end
end
context 'when sort parameter for name is provided' do
context 'when NAME_ASC is provided' do
let(:sort_parameter) { 'NAME_ASC' }
it 'sorts starred projects by name in ascending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_a.to_global_id.to_s,
project_b.to_global_id.to_s,
project_c.to_global_id.to_s
])
end
end
context 'when NAME_DESC is provided' do
let(:sort_parameter) { 'NAME_DESC' }
it 'sorts starred projects by name in descending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_c.to_global_id.to_s,
project_b.to_global_id.to_s,
project_a.to_global_id.to_s
])
end
end
end
context 'when sort parameter for path is provided' do
context 'when PATH_ASC is provided' do
let(:sort_parameter) { 'PATH_ASC' }
it 'sorts starred projects by path in ascending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_a.to_global_id.to_s,
project_b.to_global_id.to_s,
project_c.to_global_id.to_s
])
end
end
context 'when PATH_DESC is provided' do
let(:sort_parameter) { 'PATH_DESC' }
it 'sorts starred projects by path in descending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_c.to_global_id.to_s,
project_b.to_global_id.to_s,
project_a.to_global_id.to_s
])
end
end
end
context 'when sort parameter for stars is provided' do
context 'when STARS_ASC is provided' do
let(:sort_parameter) { 'STARS_ASC' }
it 'sorts starred projects by stars in ascending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_c.to_global_id.to_s,
project_b.to_global_id.to_s,
project_a.to_global_id.to_s
])
end
end
context 'when STARS_DESC is provided' do
let(:sort_parameter) { 'STARS_DESC' }
it 'sorts starred projects by stars in descending order' do
post_graphql(query_with_sort, current_user: current_user)
expect(graphql_data_at(*path).pluck('id')).to eq([
project_a.to_global_id.to_s,
project_b.to_global_id.to_s,
project_c.to_global_id.to_s
])
end
end
end
end
end
end

View File

@ -4,10 +4,6 @@
# See https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#test-order.
#
---
- './ee/spec/components/billing/plan_component_spec.rb'
- './ee/spec/components/namespaces/free_user_cap/usage_quota_alert_component_spec.rb'
- './ee/spec/components/namespaces/free_user_cap/usage_quota_trial_alert_component_spec.rb'
- './ee/spec/components/namespaces/storage/limit_alert_component_spec.rb'
- './ee/spec/config/metrics/every_metric_definition_spec.rb'
- './ee/spec/controllers/admin/applications_controller_spec.rb'
- './ee/spec/controllers/admin/application_settings_controller_spec.rb'
@ -2781,7 +2777,7 @@
- './ee/spec/views/admin/users/index.html.haml_spec.rb'
- './ee/spec/views/admin/users/show.html.haml_spec.rb'
- './ee/spec/views/clusters/clusters/show.html.haml_spec.rb'
- './ee/spec/views/compliance_management/compliance_framework/_compliance_framework_badge.html.haml_spec.rb'
- './ee/spec/views/compliance_management/compliance_framework/_compliance_frameworks_info.html.haml_spec.rb'
- './ee/spec/views/devise/sessions/new.html.haml_spec.rb'
- './ee/spec/views/groups/billings/index.html.haml_spec.rb'
- './ee/spec/views/groups/edit.html.haml_spec.rb'
@ -2965,22 +2961,6 @@
- './spec/channels/application_cable/connection_spec.rb'
- './spec/commands/metrics_server/metrics_server_spec.rb'
- './spec/commands/sidekiq_cluster/cli_spec.rb'
- './spec/components/diffs/overflow_warning_component_spec.rb'
- './spec/components/diffs/stats_component_spec.rb'
- './spec/components/layouts/horizontal_section_component_spec.rb'
- './spec/components/pajamas/alert_component_spec.rb'
- './spec/components/pajamas/avatar_component_spec.rb'
- './spec/components/pajamas/banner_component_spec.rb'
- './spec/components/pajamas/button_component_spec.rb'
- './spec/components/pajamas/card_component_spec.rb'
- './spec/components/pajamas/checkbox_component_spec.rb'
- './spec/components/pajamas/checkbox_tag_component_spec.rb'
- './spec/components/pajamas/component_spec.rb'
- './spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb'
- './spec/components/pajamas/concerns/checkbox_radio_options_spec.rb'
- './spec/components/pajamas/radio_component_spec.rb'
- './spec/components/pajamas/spinner_component_spec.rb'
- './spec/components/pajamas/toggle_component_spec.rb'
- './spec/config/application_spec.rb'
- './spec/config/inject_enterprise_edition_module_spec.rb'
- './spec/config/mail_room_spec.rb'

View File

@ -11,29 +11,41 @@
# - label
# - property
# - value
# - event_attribute_overrides
RSpec.shared_examples 'internal event tracking' do
let(:all_metrics) do
Gitlab::Usage::MetricDefinition.all.filter_map do |definition|
definition.key if definition.events.include?(event)
matching_rules = definition.event_selection_rules.map do |event_selection_rule|
next unless event_selection_rule.name == event
event_selection_rule.filter.all? do |property_name, value|
try(property_name) == value
end
end
definition.key if matching_rules.flatten.any?
end
end
it 'logs to Snowplow, Redis, and product analytics tooling', :clean_gitlab_redis_shared_state, :aggregate_failures do
expected_attributes = {
project: try(:project),
user: try(:user),
namespace: try(:namespace) || try(:project)&.namespace,
category: try(:category) || 'InternalEventTracking',
feature_enabled_by_namespace_ids: try(:feature_enabled_by_namespace_ids),
**{
label: try(:label),
property: try(:property),
value: try(:value)
}.compact
}.merge(try(:event_attribute_overrides) || {})
expect { subject }
.to trigger_internal_events(event)
.with(
project: try(:project),
user: try(:user),
namespace: try(:namespace) || try(:project)&.namespace,
category: try(:category) || 'InternalEventTracking',
feature_enabled_by_namespace_ids: try(:feature_enabled_by_namespace_ids),
**{
label: try(:label),
property: try(:property),
value: try(:value)
}.compact
).and increment_usage_metrics(*all_metrics)
.with(expected_attributes)
.and increment_usage_metrics(*all_metrics)
end
end