Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-07-15 18:18:58 +00:00
parent 1267f69d16
commit 4a50c598f3
79 changed files with 1315 additions and 381 deletions

View File

@ -1,7 +1,7 @@
include:
- template: Jobs/SAST.gitlab-ci.yml
- template: Jobs/Secret-Detection.gitlab-ci.yml
- template: Jobs/Dependency-Scanning.gitlab-ci.yml
- template: Jobs/Dependency-Scanning.latest.gitlab-ci.yml
.sast-analyzer:
# We need to re-`extends` from `sast` as the `extends` here overrides the one from the template.
@ -45,7 +45,8 @@ secret_detection:
needs: []
variables:
DS_EXCLUDED_PATHS: "qa/qa/ee/fixtures/secure_premade_reports, spec, ee/spec, tmp, storybook" # GitLab-specific
DS_EXCLUDED_ANALYZERS: "gemnasium-maven"
DS_ENFORCE_NEW_ANALYZER: "true"
DS_STATIC_REACHABILITY_ENABLED: "false"
artifacts:
paths:
- gl-dependency-scanning-report.json # GitLab-specific
@ -55,13 +56,10 @@ secret_detection:
# Remove sboms in test fixtures so that they are not ingested and scanned.
- find spec ee/spec qa -path '**/fixtures/**/gl-sbom*.cdx.json' -delete
gemnasium-dependency_scanning:
variables:
DS_REMEDIATE: "false"
rules: !reference [".reports:rules:gemnasium-dependency_scanning", rules]
gemnasium-python-dependency_scanning:
rules: !reference [".reports:rules:gemnasium-python-dependency_scanning", rules]
dependency-scanning:
extends:
- .ds-analyzer
- .reports:rules:dependency_scanning
# Analyze dependencies for malicious behavior
# See https://gitlab.com/gitlab-com/gl-security/security-research/package-hunter

View File

@ -2840,7 +2840,7 @@
- <<: *if-default-refs
changes: *code-backstage-qa-patterns
.reports:rules:gemnasium-dependency_scanning:
.reports:rules:dependency_scanning:
rules:
- <<: *if-merge-request-labels-pipeline-expedite
when: never
@ -2851,17 +2851,6 @@
- <<: *if-default-refs
changes: *dependency-patterns
.reports:rules:gemnasium-python-dependency_scanning:
rules:
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- if: '$DEPENDENCY_SCANNING_DISABLED || $GITLAB_FEATURES !~ /\bdependency_scanning\b/ || $DS_EXCLUDED_ANALYZERS =~ /gemnasium-python/'
when: never
# Run Dependency Scanning on master until https://gitlab.com/gitlab-org/gitlab/-/issues/504908#note_2218591981 is resolved
- <<: *if-default-branch-refs
- <<: *if-default-refs
changes: *python-patterns
.reports:rules:test-dast:
rules:
- <<: *if-merge-request-labels-pipeline-expedite

View File

@ -97,9 +97,30 @@ export-predictive-test-metrics:
mkdir -p "$GLCI_CRYSTALBALL_MAPPING_DIR/coverage"
retrieve_tests_mapping "$RSPEC_PACKED_TESTS_MAPPING_ALT_PATH" "$GLCI_CRYSTALBALL_MAPPING_DIR/coverage/mapping.json"
script:
- tooling/bin/predictive_tests --export-selected-test-metrics
- tooling/bin/predictive_tests --export-predictive-backend-metrics
artifacts:
expire_in: 7d
paths:
- $GLCI_CRYSTALBALL_MAPPING_DIR
- $GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR
export-predictive-test-metrics-frontend:
extends:
- export-predictive-test-metrics
- .yarn-cache
- .with-ci-node-image
dependencies:
- detect-tests
- graphql-schema-dump
variables:
GLCI_JEST_FAILED_TESTS_FILE: "${GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR}/jest_failed_tests.txt"
before_script:
- !reference [export-predictive-test-metrics, before_script]
- yarn_install_script
script:
- tooling/bin/predictive_tests --export-predictive-frontend-metrics
artifacts:
expire_in: 7d
paths:
- $GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR
- $JEST_MATCHING_TEST_FILES_PATH

View File

@ -159,7 +159,7 @@
{"name":"extended-markdown-filter","version":"0.7.0","platform":"ruby","checksum":"c8eeef7409fbae18c6b407cd3e4eeb5d25c35cb08fe1ac06f375df3db2d4f138"},
{"name":"factory_bot","version":"6.5.0","platform":"ruby","checksum":"6374b3a3593b8077ee9856d553d2e84d75b47b912cc24eafea4062f9363d2261"},
{"name":"factory_bot_rails","version":"6.5.0","platform":"ruby","checksum":"4a7b61635424a57cc60412a18b72b9dcfb02fabfce2c930447a01dce8b37c0a2"},
{"name":"faraday","version":"2.13.1","platform":"ruby","checksum":"cc531eb5467e7d74d4517630fa96f1a7003647cbf20a9a3e067d098941217b75"},
{"name":"faraday","version":"2.13.2","platform":"ruby","checksum":"5c19762e3bbe78e61d8007c5119f2968373c5296d6c6d6aa05b6f9cec34f2a1a"},
{"name":"faraday-follow_redirects","version":"0.3.0","platform":"ruby","checksum":"d92d975635e2c7fe525dd494fcd4b9bb7f0a4a0ec0d5f4c15c729530fdb807f9"},
{"name":"faraday-http-cache","version":"2.5.0","platform":"ruby","checksum":"64b7366d66e508e1c3dd855ebb20ce9da429330e412a23d9ebbc0a7a7b227463"},
{"name":"faraday-multipart","version":"1.1.1","platform":"ruby","checksum":"77a18ff40149030fd1aef55bb4fc7a67ce46419a8a3fcd010e28c2526e8d8903"},
@ -457,7 +457,7 @@
{"name":"omniauth-google-oauth2","version":"1.1.1","platform":"ruby","checksum":"4496f126e84eaf760f9c6a5c69e5e7511f98092d7f25ad79fd2c0ae5e09b5039"},
{"name":"omniauth-oauth2","version":"1.8.0","platform":"ruby","checksum":"b2f8e9559cc7e2d4efba57607691d6d2b634b879fc5b5b6ccfefa3da85089e78"},
{"name":"omniauth-oauth2-generic","version":"0.2.8","platform":"ruby","checksum":"ce6e8539019d5ebf2f48867072b9f248f148bb4cbe7166dee655865abfae7613"},
{"name":"omniauth-saml","version":"2.2.3","platform":"ruby","checksum":"d4e0dbdcb304e4bb74410eb75deaa1873b08a42afa7634c9c3171be0b34751b0"},
{"name":"omniauth-saml","version":"2.2.4","platform":"ruby","checksum":"5acc45aee728ecc5c8f9f02b56c424cb359b62d9a25fb8039f15432c1a61f6eb"},
{"name":"omniauth-shibboleth-redux","version":"2.0.0","platform":"ruby","checksum":"e9b353fd103405fcc8549e8510b9cad857acf0b286d764fac5dba8a93ab8ffe1"},
{"name":"omniauth_openid_connect","version":"0.8.0","platform":"ruby","checksum":"1f2f3890386e2a742221cee0d2e903b78d874e6fab9ea3bfa31c1462f4793d25"},
{"name":"open4","version":"1.3.4","platform":"ruby","checksum":"a1df037310624ecc1ea1d81264b11c83e96d0c3c1c6043108d37d396dcd0f4b1"},

View File

@ -612,7 +612,7 @@ GEM
factory_bot_rails (6.5.0)
factory_bot (~> 6.5)
railties (>= 6.1.0)
faraday (2.13.1)
faraday (2.13.2)
faraday-net_http (>= 2.0, < 3.5)
json
logger
@ -1288,7 +1288,7 @@ GEM
omniauth-oauth2-generic (0.2.8)
omniauth-oauth2 (~> 1.0)
rake
omniauth-saml (2.2.3)
omniauth-saml (2.2.4)
omniauth (~> 2.1)
ruby-saml (~> 1.18)
omniauth-shibboleth-redux (2.0.0)

View File

@ -159,7 +159,7 @@
{"name":"extended-markdown-filter","version":"0.7.0","platform":"ruby","checksum":"c8eeef7409fbae18c6b407cd3e4eeb5d25c35cb08fe1ac06f375df3db2d4f138"},
{"name":"factory_bot","version":"6.5.0","platform":"ruby","checksum":"6374b3a3593b8077ee9856d553d2e84d75b47b912cc24eafea4062f9363d2261"},
{"name":"factory_bot_rails","version":"6.5.0","platform":"ruby","checksum":"4a7b61635424a57cc60412a18b72b9dcfb02fabfce2c930447a01dce8b37c0a2"},
{"name":"faraday","version":"2.13.1","platform":"ruby","checksum":"cc531eb5467e7d74d4517630fa96f1a7003647cbf20a9a3e067d098941217b75"},
{"name":"faraday","version":"2.13.2","platform":"ruby","checksum":"5c19762e3bbe78e61d8007c5119f2968373c5296d6c6d6aa05b6f9cec34f2a1a"},
{"name":"faraday-follow_redirects","version":"0.3.0","platform":"ruby","checksum":"d92d975635e2c7fe525dd494fcd4b9bb7f0a4a0ec0d5f4c15c729530fdb807f9"},
{"name":"faraday-http-cache","version":"2.5.0","platform":"ruby","checksum":"64b7366d66e508e1c3dd855ebb20ce9da429330e412a23d9ebbc0a7a7b227463"},
{"name":"faraday-multipart","version":"1.1.1","platform":"ruby","checksum":"77a18ff40149030fd1aef55bb4fc7a67ce46419a8a3fcd010e28c2526e8d8903"},
@ -457,7 +457,7 @@
{"name":"omniauth-google-oauth2","version":"1.1.1","platform":"ruby","checksum":"4496f126e84eaf760f9c6a5c69e5e7511f98092d7f25ad79fd2c0ae5e09b5039"},
{"name":"omniauth-oauth2","version":"1.8.0","platform":"ruby","checksum":"b2f8e9559cc7e2d4efba57607691d6d2b634b879fc5b5b6ccfefa3da85089e78"},
{"name":"omniauth-oauth2-generic","version":"0.2.8","platform":"ruby","checksum":"ce6e8539019d5ebf2f48867072b9f248f148bb4cbe7166dee655865abfae7613"},
{"name":"omniauth-saml","version":"2.2.3","platform":"ruby","checksum":"d4e0dbdcb304e4bb74410eb75deaa1873b08a42afa7634c9c3171be0b34751b0"},
{"name":"omniauth-saml","version":"2.2.4","platform":"ruby","checksum":"5acc45aee728ecc5c8f9f02b56c424cb359b62d9a25fb8039f15432c1a61f6eb"},
{"name":"omniauth-shibboleth-redux","version":"2.0.0","platform":"ruby","checksum":"e9b353fd103405fcc8549e8510b9cad857acf0b286d764fac5dba8a93ab8ffe1"},
{"name":"omniauth_openid_connect","version":"0.8.0","platform":"ruby","checksum":"1f2f3890386e2a742221cee0d2e903b78d874e6fab9ea3bfa31c1462f4793d25"},
{"name":"open4","version":"1.3.4","platform":"ruby","checksum":"a1df037310624ecc1ea1d81264b11c83e96d0c3c1c6043108d37d396dcd0f4b1"},

View File

@ -606,7 +606,7 @@ GEM
factory_bot_rails (6.5.0)
factory_bot (~> 6.5)
railties (>= 6.1.0)
faraday (2.13.1)
faraday (2.13.2)
faraday-net_http (>= 2.0, < 3.5)
json
logger
@ -1282,7 +1282,7 @@ GEM
omniauth-oauth2-generic (0.2.8)
omniauth-oauth2 (~> 1.0)
rake
omniauth-saml (2.2.3)
omniauth-saml (2.2.4)
omniauth (~> 2.1)
ruby-saml (~> 1.18)
omniauth-shibboleth-redux (2.0.0)

View File

@ -56,7 +56,7 @@ export default {
<template>
<visibility-change-detector class="gl-px-4" @visible="reload">
<h4>{{ __('Activity') }}</h4>
<h4 class="gl-heading-4 gl-my-4">{{ __('Activity') }}</h4>
<gl-skeleton-loader v-if="isLoading" :width="200">
<rect width="5" height="3" rx="1" y="2" />
<rect width="160" height="3" rx="1" x="8" y="2" />

View File

@ -0,0 +1,24 @@
<script>
import { __, sprintf } from '~/locale';
export default {
i18n: {
welcome: __('Welcome %{name},'),
},
computed: {
userFirstName() {
return gon.current_user_fullname?.trim().split(' ')[0] || null;
},
welcomeMessage() {
return sprintf(this.$options.i18n.welcome, { name: this.userFirstName });
},
},
};
</script>
<template>
<header class="gl-my-6">
<p v-if="userFirstName" class="gl-heading-4 gl-mb-2">{{ welcomeMessage }}</p>
<h1 class="gl-heading-1 gl-m-0">{{ __("Today's highlights") }}</h1>
</header>
</template>

View File

@ -1,5 +1,6 @@
<script>
import { GlAlert } from '@gitlab/ui';
import GreetingHeader from './greeting_header.vue';
import MergeRequestsWidget from './merge_requests_widget.vue';
import WorkItemsWidget from './work_items_widget.vue';
import ActivityWidget from './activity_widget.vue';
@ -9,6 +10,7 @@ import TodosWidget from './todos_widget.vue';
export default {
components: {
GlAlert,
GreetingHeader,
MergeRequestsWidget,
WorkItemsWidget,
ActivityWidget,
@ -44,7 +46,7 @@ export default {
<template>
<div>
<h1 class="gl-mb-6">{{ __("Today's highlights") }}</h1>
<greeting-header />
<div class="gl-grid gl-grid-cols-1 gl-gap-6 md:gl-grid-cols-3">
<div class="gl-flex gl-flex-col gl-gap-6 md:gl-col-span-2">
<gl-alert

View File

@ -71,7 +71,7 @@ export default {
<template>
<visibility-change-detector class="gl-border gl-rounded-lg gl-px-4 gl-py-1" @visible="reload">
<h4 class="gl-flex gl-items-center gl-gap-2">
<h4 class="gl-heading-4 gl-my-4 gl-flex gl-items-center gl-gap-2">
<gl-icon name="merge-request" :size="16" />{{ __('Merge requests') }}
</h4>
<ul class="gl-list-none gl-p-0">

View File

@ -53,7 +53,7 @@ export default {
<template>
<visibility-change-detector @visible="reload">
<h4 class="gl-mt-0">{{ __('Recently viewed') }}</h4>
<h4 class="gl-heading-4 gl-mb-4 gl-mt-0">{{ __('Recently viewed') }}</h4>
<div v-if="error">
<span>{{ __('Something went wrong.') }}</span>

View File

@ -68,7 +68,7 @@ export default {
<template>
<visibility-change-detector class="gl-border gl-rounded-lg gl-px-4 gl-py-1" @visible="reload">
<h4 class="gl-flex gl-items-center gl-gap-2">
<h4 class="gl-heading-4 gl-my-4 gl-flex gl-items-center gl-gap-2">
<gl-icon name="issues" :size="16" />{{ __('Issues') }}
</h4>
<ul class="gl-list-none gl-p-0">

View File

@ -1,40 +0,0 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { s__ } from '~/locale';
import OrganizationSelect from '~/vue_shared/components/entity_select/organization_select.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
export const initHomeOrganizationSetting = () => {
const el = document.getElementById('js-home-organization-setting');
if (!el) return false;
const {
dataset: { appData },
} = el;
const { initialSelection } = convertObjectPropsToCamelCase(JSON.parse(appData));
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
return new Vue({
el,
name: 'HomeOrganizationSetting',
apolloProvider,
render(createElement) {
return createElement(OrganizationSelect, {
props: {
block: true,
label: s__('Organization|Home organization'),
description: s__('Organization|Choose what organization you want to see by default.'),
inputName: 'user[home_organization_id]',
inputId: 'user_home_organization_id',
initialSelection,
toggleClass: 'gl-form-input-xl',
},
});
},
});
};

View File

@ -1,7 +1,5 @@
import initProfilePreferences from '~/profile/preferences/profile_preferences_bundle';
import initProfilePreferencesDiffsColors from '~/profile/preferences/profile_preferences_diffs_colors';
import { initHomeOrganizationSetting } from '~/organizations/profile/preferences';
initProfilePreferences();
initProfilePreferencesDiffsColors();
initHomeOrganizationSetting();

View File

@ -44,7 +44,6 @@ class Profiles::PreferencesController < Profiles::ApplicationController
:dark_color_scheme_id,
:diffs_deletion_color,
:diffs_addition_color,
:home_organization_id,
:layout,
:dashboard,
:project_view,

View File

@ -231,7 +231,7 @@ class ProjectsController < Projects::ApplicationController
end
def archive
::Projects::UpdateService.new(@project, current_user, archived: true).execute
::Projects::ArchiveService.new(project: @project, current_user: current_user).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }
@ -239,7 +239,7 @@ class ProjectsController < Projects::ApplicationController
end
def unarchive
::Projects::UpdateService.new(@project, current_user, archived: false).execute
::Projects::UnarchiveService.new(project: @project, current_user: current_user).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }

View File

@ -47,12 +47,6 @@ module Organizations
}.to_json
end
def home_organization_setting_app_data
{
initial_selection: current_user.home_organization_id
}.to_json
end
def organization_groups_new_app_data(organization)
{
default_visibility_level: default_group_visibility

View File

@ -19,7 +19,6 @@ module Ci
accepts_nested_attributes_for :metadata
delegate :timeout, to: :metadata, prefix: true, allow_nil: true
delegate :interruptible, to: :metadata, prefix: false, allow_nil: true
delegate :id_tokens, to: :metadata, allow_nil: true
delegate :exit_code, to: :metadata, allow_nil: true
@ -129,6 +128,12 @@ module Ci
metadata&.debug_trace_enabled?
end
def timeout_value
# TODO: need to add the timeout to p_ci_builds later
# See https://gitlab.com/gitlab-org/gitlab/-/work_items/538183#note_2542611159
try(:timeout) || metadata&.timeout
end
private
def read_metadata_attribute(legacy_key, metadata_key, default_value = nil)

View File

@ -3,6 +3,10 @@
# Add capabilities to increment a numeric model attribute efficiently by
# using Redis and flushing the increments asynchronously to the database
# after a period of time (10 minutes).
#
# The ActiveRecord model is required to either have a project_id or a
# group_id foreign key.
#
# When an attribute is incremented by a value, the increment is added
# to a Redis key. Then, FlushCounterIncrementsWorker will execute
# `commit_increment!` which removes increments from Redis for a
@ -48,6 +52,18 @@
# To increment the counter we can use the method:
# increment_amount(:commit_count, 3)
#
# Bumping counters relies on the Rails .update_counters class method. As such, we can pass a :touch option
# that can accept true, timestamp columns are updated, or attribute names, which will be updated along with
# updated_at/on
#
# @example:
#
# class ProjectStatistics
# include CounterAttribute
#
# counter_attribute :my_counter, touch: :my_counter_updated_at
# end
#
# This method would determine whether it would increment the counter using Redis,
# or fallback to legacy increment on ActiveRecord counters.
#
@ -66,12 +82,12 @@ module CounterAttribute
include Gitlab::Utils::StrongMemoize
class_methods do
def counter_attribute(attribute, if: nil, returns_current: false)
counter_attributes << {
attribute: attribute,
def counter_attribute(attribute, if: nil, returns_current: false, touch: nil)
counter_attributes[attribute] = {
if_proc: binding.local_variable_get(:if), # can't read `if` directly
returns_current: returns_current
}
returns_current: returns_current,
touch: touch
}.compact
if returns_current
define_method(attribute) do
@ -85,7 +101,7 @@ module CounterAttribute
end
def counter_attributes
@counter_attributes ||= []
@counter_attributes ||= {}.with_indifferent_access
end
def after_commit_callbacks
@ -99,7 +115,7 @@ module CounterAttribute
end
def counter_attribute_enabled?(attribute)
counter_attribute = self.class.counter_attributes.find { |registered| registered[:attribute] == attribute }
counter_attribute = self.class.counter_attributes[attribute]
return false unless counter_attribute
return true unless counter_attribute[:if_proc]
@ -142,6 +158,11 @@ module CounterAttribute
end
def update_counters(increments)
touch = increments.each_key.flat_map do |attribute|
self.class.counter_attributes.dig(attribute, :touch)
end
increments[:touch] = touch if touch.any?
self.class.update_counters(id, increments)
end
@ -170,6 +191,10 @@ module CounterAttribute
end
end
def counters_key_prefix
with_parent { |type, id| "#{type}:{#{id}}" }
end
private
def build_counter_for(attribute)
@ -189,7 +214,7 @@ module CounterAttribute
end
def database_lock_key
"project:{#{project_id}}:#{self.class}:#{id}"
"#{counters_key_prefix}:#{self.class}:#{id}"
end
# This method uses a lease to monitor access to the model row.
@ -208,7 +233,7 @@ module CounterAttribute
message: 'Acquiring lease for project statistics update',
model: self.class.name,
model_id: id,
project_id: project.id,
**parent_log_fields,
**log_fields,
**Gitlab::ApplicationContext.current
)
@ -221,7 +246,7 @@ module CounterAttribute
message: 'Concurrent project statistics update detected',
model: self.class.name,
model_id: id,
project_id: project.id,
**parent_log_fields,
**log_fields,
**Gitlab::ApplicationContext.current
)
@ -233,11 +258,11 @@ module CounterAttribute
payload = Gitlab::ApplicationContext.current.merge(
message: 'Increment counter attribute',
attribute: attribute,
project_id: project_id,
increment: increment.amount,
ref: increment.ref,
new_counter_value: new_value,
current_db_value: read_attribute(attribute)
current_db_value: read_attribute(attribute),
**parent_log_fields
)
Gitlab::AppLogger.info(payload)
@ -257,9 +282,20 @@ module CounterAttribute
payload = Gitlab::ApplicationContext.current.merge(
message: 'Clear counter attribute',
attribute: attribute,
project_id: project_id
**parent_log_fields
)
Gitlab::AppLogger.info(payload)
end
def parent_log_fields
with_parent { |type, id| { "#{type}_id": id } }
end
def with_parent
return yield(:project, project_id) if self.respond_to?(:project_id)
return yield(:group, group_id) if self.respond_to?(:group_id)
raise ArgumentError, 'counter record must have either a project_id or a group_id column'
end
end

View File

@ -2954,6 +2954,10 @@ class Project < ApplicationRecord
archived? || project_namespace.self_or_ancestors_archived?
end
def ancestors_archived?
ancestors.archived.exists?
end
def renamed?
persisted? && path_changed?
end

View File

@ -453,7 +453,6 @@ class User < ApplicationRecord
:pinned_nav_items, :pinned_nav_items=,
:achievements_enabled, :achievements_enabled=,
:enabled_following, :enabled_following=,
:home_organization, :home_organization_id, :home_organization_id=,
:dpop_enabled, :dpop_enabled=,
:use_work_items_view, :use_work_items_view=,
:text_editor, :text_editor=,

View File

@ -1,6 +1,9 @@
# frozen_string_literal: true
class UserPreference < ApplicationRecord
include IgnorableColumns
ignore_columns(:home_organization_id, remove_with: '18.5', remove_after: '2025-09-20')
# We could use enums, but Rails 4 doesn't support multiple
# enum options with same name for multiple fields, also it creates
# extra methods that aren't really needed here.
@ -8,7 +11,6 @@ class UserPreference < ApplicationRecord
TIME_DISPLAY_FORMATS = { system: 0, non_iso_format: 1, iso_format: 2 }.freeze
belongs_to :user
belongs_to :home_organization, class_name: "Organizations::Organization", optional: true
scope :with_user, -> { joins(:user) }
scope :gitpod_enabled, -> { where(gitpod_enabled: true) }
@ -38,8 +40,6 @@ class UserPreference < ApplicationRecord
validates :work_items_display_settings, json_schema: { filename: 'user_preference_work_items_display_settings' }
validate :user_belongs_to_home_organization, if: :home_organization_changed?
attribute :dark_color_scheme_id, default: -> { Gitlab::CurrentSettings.default_dark_syntax_highlighting_theme }
attribute :tab_width, default: -> { Gitlab::TabWidth::DEFAULT }
attribute :time_display_relative, default: true
@ -137,16 +137,6 @@ class UserPreference < ApplicationRecord
private
def user_belongs_to_home_organization
# If we don't ignore the default organization id below then all users need to have their corresponding entry
# with default organization id as organization id in the `organization_users` table.
# Otherwise, the user won't be able to set the default organization as the home organization.
return if home_organization.default?
return if home_organization.user?(user)
errors.add(:user, _("is not part of the given organization"))
end
def notes_filter_field_for(resource)
field_key =
if resource.is_a?(Issuable)

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Projects
module ArchiveEvents
def publish_events
publish_project_archived_event
publish_project_attributed_changed_event
end
def publish_project_archived_event
event = Projects::ProjectArchivedEvent.new(data: {
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
})
Gitlab::EventStore.publish(event)
end
def publish_project_attributed_changed_event
event = Projects::ProjectAttributesChangedEvent.new(data: {
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id,
attributes: project.previous_changes.keys
})
Gitlab::EventStore.publish(event)
end
end
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Projects
class ArchiveService < ::BaseProjectService
include ::Projects::ArchiveEvents
NotAuthorizedError = ServiceResponse.error(
message: "You don't have permissions to archive this project."
)
AncestorAlreadyArchivedError = ServiceResponse.error(
message: 'Cannot archive project since one of the ancestors is already archived.'
)
ArchivingFailedError = ServiceResponse.error(
message: 'Failed to archive project.'
)
def execute
return NotAuthorizedError unless can?(current_user, :archive_project, project)
return AncestorAlreadyArchivedError if project.ancestors_archived?
if project.update(archived: true)
after_archive
ServiceResponse.success
else
errors = project.errors.full_messages.to_sentence
return ServiceResponse.error(message: errors) if errors.presence
ArchivingFailedError
end
end
private
def after_archive
system_hook_service.execute_hooks_for(project, :update)
publish_events
UnlinkForkService.new(project, current_user).execute
end
end
end

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Projects
class UnarchiveService < ::BaseProjectService
include ::Projects::ArchiveEvents
NotAuthorizedError = ServiceResponse.error(
message: "You don't have permissions to unarchive this project."
)
AncestorArchivedError = ServiceResponse.error(
message: 'Cannot unarchive project since one of the ancestors is archived.'
)
UnarchivingFailedError = ServiceResponse.error(
message: 'Failed to unarchive project.'
)
def execute
return NotAuthorizedError unless can?(current_user, :archive_project, project)
return AncestorArchivedError if project.ancestors_archived?
if project.update(archived: false)
after_unarchive
ServiceResponse.success
else
errors = project.errors.full_messages.to_sentence
return ServiceResponse.error(message: errors) if errors.presence
UnarchivingFailedError
end
end
private
def after_unarchive
system_hook_service.execute_hooks_for(project, :update)
publish_events
end
end
end

View File

@ -37,8 +37,6 @@ module Projects
update_project!
after_update
UnlinkForkService.new(project, current_user).execute if archiving_project?
success
rescue ActiveRecord::ActiveRecordError
update_failed!
@ -56,10 +54,6 @@ module Projects
private
def archiving_project?
project.previous_changes[:archived] == [false, true]
end
def update_project!
if Feature.disabled?(:replicate_deletion_schedule_operations, project)
return project.update!(params.except(*non_assignable_project_params))
@ -334,23 +328,10 @@ module Projects
end
def publish_events
publish_project_archived_event
publish_project_attributed_changed_event
publish_project_features_changed_event
end
def publish_project_archived_event
return unless project.archived_previously_changed?
event = Projects::ProjectArchivedEvent.new(data: {
project_id: @project.id,
namespace_id: @project.namespace_id,
root_namespace_id: @project.root_namespace.id
})
Gitlab::EventStore.publish(event)
end
def publish_project_attributed_changed_event
changes = @project.previous_changes

View File

@ -112,9 +112,6 @@
= f.gitlab_ui_radio_component :text_editor, :rich_text_editor, s_('TextEditor|Rich text editor'), help_text: rich_text_editor_help_text
= f.gitlab_ui_radio_component :text_editor, :plain_text_editor, s_('TextEditor|Plain text editor'), help_text: plain_text_editor_help_text
- if Feature.enabled?(:ui_for_organizations, current_user)
#js-home-organization-setting{ data: { app_data: home_organization_setting_app_data } }
.js-listbox-input{ data: { label: s_('Preferences|Homepage'), description: s_('Preferences|Choose what content you want to see by default on your homepage.'), name: 'user[dashboard]', items: dashboard_choices.to_json, value: dashboard_value, block: true.to_s, toggle_class: 'gl-form-input-xl' } }
= render_if_exists 'profiles/preferences/group_overview_selector', f: f # EE-specific

View File

@ -11,7 +11,8 @@
- if repository_file_tree_browser_enabled
.gl-flex.navigation-root
.gl-w-full.gl-min-w-0
#js-file-browser
.gl-w-full.gl-min-w-0.gl-pl-4
#tree-holder.tree-holder.clearfix.js-per-page.gl-mt-5{ data: { blame_per_page: Gitlab::Git::BlamePagination::PAGINATION_PER_PAGE } }
= render 'projects/tree_content', project: project, ref: ref, pipeline: pipeline, tree: @tree, ref_type: @ref_type
- else

View File

@ -14,13 +14,7 @@
= render 'projects/last_push'
- if repository_file_tree_browser_enabled
.gl-flex.navigation-root
#js-file-browser
.gl-w-full.gl-min-w-0.gl-pl-4
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
- else
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
= render 'shared/web_ide_path'

View File

@ -26,7 +26,7 @@ class FlushCounterIncrementsWorker
return unless self.class.const_defined?(model_name)
model_class = model_name.constantize
model = model_class.find_by_id(model_id)
model = model_class.primary_key_in([model_id]).take # rubocop: disable CodeReuse/ActiveRecord -- we work on a dynamic model name
return unless model
Gitlab::Counters::BufferedCounter.new(model, attribute).commit_increment!

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class AddDownloadsCountersToVirtualRegistriesPackagesMavenCacheEntries < Gitlab::Database::Migration[2.3]
milestone '18.3'
TABLE_NAME = :virtual_registries_packages_maven_cache_entries
def change
add_column TABLE_NAME, :downloads_count, :bigint, default: 0, null: false
add_column TABLE_NAME, :downloaded_at, :datetime_with_timezone
end
end

View File

@ -0,0 +1 @@
5446d2063527a8fea8388977eb74daceb4cfc2f30a801e9b269b8b04da863c25

View File

@ -7261,6 +7261,8 @@ CREATE TABLE virtual_registries_packages_maven_cache_entries (
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7287,6 +7289,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7312,6 +7316,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7337,6 +7343,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7362,6 +7370,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7387,6 +7397,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7412,6 +7424,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7437,6 +7451,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7462,6 +7478,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7487,6 +7505,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7512,6 +7532,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7537,6 +7559,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7562,6 +7586,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7587,6 +7613,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7612,6 +7640,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7637,6 +7667,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),
@ -7662,6 +7694,8 @@ CREATE TABLE gitlab_partitions_static.virtual_registries_packages_maven_cache_en
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
file_md5 bytea,
file_sha1 bytea NOT NULL,
downloads_count bigint DEFAULT 0 NOT NULL,
downloaded_at timestamp with time zone,
CONSTRAINT check_215f531366 CHECK ((char_length(content_type) <= 255)),
CONSTRAINT check_2a52b4e0fc CHECK ((char_length(file) <= 1024)),
CONSTRAINT check_36391449ea CHECK ((char_length(object_storage_key) <= 1024)),

View File

@ -1337,6 +1337,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="queryprojectstopics"></a>`topics` | [`[String!]`](#string) | Filter projects by topics. |
| <a id="queryprojectstrending"></a>`trending` | [`Boolean`](#boolean) | Return only projects that are trending. |
| <a id="queryprojectsvisibilitylevel"></a>`visibilityLevel` | [`VisibilityLevelsEnum`](#visibilitylevelsenum) | Filter projects by visibility level. |
| <a id="queryprojectswithcodeembeddingsindexed"></a>`withCodeEmbeddingsIndexed` {{< icon name="warning-solid" >}} | [`Boolean`](#boolean) | **Introduced** in GitLab 18.2. **Status**: Experiment. Include projects with indexed code embeddings. Requires `ids` to be sent. Applies only if the feature flag `allow_with_code_embeddings_indexed_projects_filter` is enabled. |
| <a id="queryprojectswithissuesenabled"></a>`withIssuesEnabled` | [`Boolean`](#boolean) | Return only projects with issues enabled. |
| <a id="queryprojectswithmergerequestsenabled"></a>`withMergeRequestsEnabled` | [`Boolean`](#boolean) | Return only projects with merge requests enabled. |
@ -35644,6 +35645,7 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="organizationprojectstopics"></a>`topics` | [`[String!]`](#string) | Filter projects by topics. |
| <a id="organizationprojectstrending"></a>`trending` | [`Boolean`](#boolean) | Return only projects that are trending. |
| <a id="organizationprojectsvisibilitylevel"></a>`visibilityLevel` | [`VisibilityLevelsEnum`](#visibilitylevelsenum) | Filter projects by visibility level. |
| <a id="organizationprojectswithcodeembeddingsindexed"></a>`withCodeEmbeddingsIndexed` {{< icon name="warning-solid" >}} | [`Boolean`](#boolean) | **Introduced** in GitLab 18.2. **Status**: Experiment. Include projects with indexed code embeddings. Requires `ids` to be sent. Applies only if the feature flag `allow_with_code_embeddings_indexed_projects_filter` is enabled. |
| <a id="organizationprojectswithissuesenabled"></a>`withIssuesEnabled` | [`Boolean`](#boolean) | Return only projects with issues enabled. |
| <a id="organizationprojectswithmergerequestsenabled"></a>`withMergeRequestsEnabled` | [`Boolean`](#boolean) | Return only projects with merge requests enabled. |

View File

@ -252,7 +252,9 @@ describe('MyComponent', () => {
## Migrating from Vuex
Decide what your primary [state manager](state_management.md) should be first.
GitLab is actively migrating from Vuex, you can contribute and follow this progress [here](https://gitlab.com/groups/gitlab-org/-/epics/18476).
Before migrating decide what your primary [state manager](state_management.md) should be first.
Proceed with this guide if Pinia was your choice.
Migration to Pinia could be completed in two ways: a single step migration and a multi-step one.
@ -282,6 +284,11 @@ If your diff starts to exceed reviewable size prefer the multi-step migration.
[Learn about the official Vuex migration guide](https://pinia.vuejs.org/cookbook/migration-vuex.html).
A walkthrough is available in the two part video series:
1. [Migrating the store (part 1)](https://youtu.be/aWVYvhktYfM)
1. [Migrating the components (part 2)](https://youtu.be/9G7h4YmoHRw)
Follow these steps to iterate over the migration process and split the work onto smaller merge requests:
1. Identify the store you are going to migrate.
@ -311,6 +318,42 @@ Follow these steps to iterate over the migration process and split the work onto
1. Remove CODEOWNERS rule.
1. Close the migration issue.
#### Example migration breakdown
You can use the [merge requests migration](https://gitlab.com/groups/gitlab-org/-/epics/16505) breakdown as a reference:
1. Diffs store
1. [Copy store to a new location and introduce CODEOWNERS rules](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163826)
1. [Automated store migration](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163827)
1. Also creates MrNotes store
1. Specs migration ([actions](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165733), [getters](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167176), [mutations](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167434))
1. Notes store
1. [Copy store to a new location](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167450)
1. [Automated store migration](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167946)
1. Specs migration ([actions](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169681), [getters](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170547), [mutations](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170549))
1. Batch comments store
1. [Copy store to a new location](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176485)
1. [Automated store migration](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176486)
1. Specs migration ([actions](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176487), [getters](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176490), [mutations](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176488))
1. [Sync Vuex stores with Pinia stores](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178302)
1. Diffs store components migration
1. [Diffs app](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186121)
1. [Non diffs components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186365)
1. [File browser](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186370)
1. [Diffs components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186381)
1. [Diff file components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186382)
1. [Rest of diffs components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186962)
1. [Batch comments components migration](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180129)
1. [MrNotes components migration](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178291)
1. Notes store components migration
1. [Diffs components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188273)
1. [Simple notes components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193248)
1. [More notes components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/195975)
1. [Rest of notes components](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196142)
1. [Notes app](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/197331)
1. [Remove Vuex from merge requests](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/196307)
1. Also removes the CODEOWNERS rules
### Post migration steps
Once your store is migrated consider refactoring it to follow our best practices. Split big stores into smaller ones.

View File

@ -6,14 +6,6 @@ description: Get help from a suite of AI-native features while you work in GitLa
title: GitLab Duo
---
{{< history >}}
- [First GitLab Duo features introduced](https://about.gitlab.com/blog/2023/05/03/gitlab-ai-assisted-features/) in GitLab 16.0.
- [Removed third-party AI setting](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136144) in GitLab 16.6.
- [Removed support for OpenAI from all GitLab Duo features](https://gitlab.com/groups/gitlab-org/-/epics/10964) in GitLab 16.6.
{{< /history >}}
GitLab Duo is a suite of AI-native features that assist you while you work in GitLab.
These features aim to help increase velocity and solve key pain points across the software development lifecycle.

View File

@ -6,6 +6,14 @@ description: AI-native features and functionality.
title: Summary of GitLab Duo features
---
{{< history >}}
- [First GitLab Duo features introduced](https://about.gitlab.com/blog/2023/05/03/gitlab-ai-assisted-features/) in GitLab 16.0.
- [Removed third-party AI setting](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136144) in GitLab 16.6.
- [Removed support for OpenAI from all GitLab Duo features](https://gitlab.com/groups/gitlab-org/-/epics/10964) in GitLab 16.6.
{{< /history >}}
The following features are generally available on GitLab.com, GitLab Self-Managed, and GitLab Dedicated.
They require a Premium or Ultimate subscription and one of the available add-ons.

View File

@ -56,7 +56,7 @@ For an overview, see [GitLab Duo Agentic Chat](https://youtu.be/uG9-QLAJrrg?si=c
## Use Agentic Chat
You can use Agentic Chat in the GitLab UI or in VS Code.
You can use Agentic Chat in the GitLab UI, VS Code, or a JetBrains IDE.
Prerequisites:
@ -118,7 +118,7 @@ To use Agentic Chat:
<!-- markdownlint-enable MD044 -->
Conversations in Agentic Chat do not expire and are stored permanently. You cannot delete these conversations.
#### Create custom rules
### Create custom rules
{{< history >}}
@ -126,11 +126,12 @@ Conversations in Agentic Chat do not expire and are stored permanently. You cann
{{< /history >}}
If you have specific instructions that you want Agentic Chat to follow in every conversation, you can create custom rules.
In VS Code or a JetBrains IDE, if you have specific instructions that you want
Agentic Chat to follow in every conversation, you can create custom rules.
Prerequisites:
- You have [installed and configured the GitLab Workflow extension for VS Code](../../editor_extensions/visual_studio_code/setup.md) version 6.32.2 or later.
- For VS Code, [install and configure the GitLab Workflow extension for VS Code](../../editor_extensions/visual_studio_code/setup.md) version 6.32.2 or later.
{{< alert type="note" >}}
@ -138,7 +139,9 @@ Conversations that existed before you created any custom rules do not follow tho
{{< /alert >}}
1. Create a `.gitlab/duo/chat-rules.md` file.
1. Create a custom rules file:
- For VS Code, create a `.gitlab/duo/chat-rules.md` file.
- For JetBrains IDEs, create a `.gitlab/rules/chat.md` file.
1. Enter the custom rules into the file. For example:
```markdown

View File

@ -124,33 +124,6 @@ If you do not choose a default text editor, your last used choice is preserved.
1. Choose either **Rich text editor** or **Plain text editor** as your default.
1. Select **Save changes**.
### Choose your home organization
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/419079) in GitLab 16.6 [with a flag](../../administration/feature_flags/_index.md) named `ui_for_organizations`. Disabled by default.
{{< /history >}}
{{< alert type="flag" >}}
The availability of this feature is controlled by a feature flag. For more information, see the history.
On GitLab.com and GitLab Dedicated, this feature is not available.
This feature is not ready for production use.
{{< /alert >}}
If you are a member of two or more [organizations](../organization/_index.md), you can choose a home organization.
This is the organization you are in by default when you first sign in to GitLab.
To choose your home organization:
1. On the left sidebar, select your avatar.
1. Select **Preferences**.
1. Scroll to the **Behavior** section.
1. From the **Home organization** dropdown list, select an option.
1. Select **Save changes**.
### Choose your homepage
{{< history >}}

View File

@ -5,7 +5,7 @@ module API
module Ci
module JobRequest
class RunnerInfo < Grape::Entity
expose :metadata_timeout, as: :timeout
expose :timeout_value, as: :timeout
expose :runner_session_url
end
end

View File

@ -633,9 +633,13 @@ module API
post ':id/archive', feature_category: :groups_and_projects do
authorize!(:archive_project, user_project)
::Projects::UpdateService.new(user_project, current_user, archived: true).execute
result = ::Projects::ArchiveService.new(project: user_project, current_user: current_user).execute
present_project user_project, with: Entities::Project, current_user: current_user
if result.success?
present_project user_project, with: Entities::Project, current_user: current_user
else
render_api_error!(result.message, 400)
end
end
desc 'Unarchive a project' do
@ -648,9 +652,13 @@ module API
post ':id/unarchive', feature_category: :groups_and_projects, urgency: :default do
authorize!(:archive_project, user_project)
::Projects::UpdateService.new(user_project, current_user, archived: false).execute
result = ::Projects::UnarchiveService.new(project: user_project, current_user: current_user).execute
present_project user_project, with: Entities::Project, current_user: current_user
if result.success?
present_project user_project, with: Entities::Project, current_user: current_user
else
render_api_error!(result.message, 400)
end
end
desc 'Star a project' do

View File

@ -73,7 +73,7 @@ module Ci
end
def expire_time(job)
ttl = [::JSONWebToken::Token::DEFAULT_EXPIRE_TIME, job.metadata_timeout.to_i].max
ttl = [::JSONWebToken::Token::DEFAULT_EXPIRE_TIME, job.timeout_value.to_i].max
Time.current + ttl + LEEWAY
end

View File

@ -15,7 +15,7 @@ module Gitlab
def from_commands(job)
self.new(:script).tap do |step|
step.script = job.options[:before_script].to_a + job.options[:script].to_a
step.timeout = job.metadata_timeout
step.timeout = job.timeout_value
step.when = WHEN_ON_SUCCESS
end
end
@ -25,7 +25,7 @@ module Gitlab
self.new(:release).tap do |step|
step.script = Gitlab::Ci::Build::Releaser.new(job: job).script
step.timeout = job.metadata_timeout
step.timeout = job.timeout_value
step.when = WHEN_ON_SUCCESS
end
end
@ -36,7 +36,7 @@ module Gitlab
self.new(:after_script).tap do |step|
step.script = after_script
step.timeout = job.metadata_timeout
step.timeout = job.timeout_value
step.when = WHEN_ALWAYS
step.allow_failure = true
end

View File

@ -7,7 +7,7 @@ module Gitlab
DEFAULT_EXPIRE_TIME = 60 * 5
def self.for_build(build)
self.new(build, ttl: build.metadata_timeout).encoded
self.new(build, ttl: build.timeout_value).encoded
end
def initialize(build, ttl:)

View File

@ -12,7 +12,7 @@ module Gitlab
def self.for_build(
build, aud:, sub_components: [:project_path, :ref_type,
:ref], target_audience: nil)
new(build, ttl: build.metadata_timeout, aud: aud, sub_components: sub_components,
new(build, ttl: build.timeout_value, aud: aud, sub_components: sub_components,
target_audience: target_audience).encoded
end

View File

@ -173,11 +173,10 @@ module Gitlab
end
def key
project_id = counter_record.project_id
record_name = counter_record.class
record_id = counter_record.id
"project:{#{project_id}}:counters:#{record_name}:#{record_id}:#{attribute}"
"#{counter_record.counters_key_prefix}:counters:#{record_name}:#{record_id}:#{attribute}"
end
def flushed_key

View File

@ -6,7 +6,7 @@ module Gitlab
def initialize(collection)
@collection = collection
@logger = Gitlab::AppLogger
@counter_attributes = collection.counter_attributes.pluck(:attribute)
@counter_attributes = collection.counter_attributes.keys.map(&:to_sym)
end
def execute

View File

@ -1374,6 +1374,7 @@ ee:
excluded_attributes:
project:
- :vulnerability_hooks_integrations
- :ready_active_context_code_repository
approval_rules:
- :created_at
- :updated_at

View File

@ -43782,9 +43782,6 @@ msgstr ""
msgid "Organization|Choose organization visibility level."
msgstr ""
msgid "Organization|Choose what organization you want to see by default."
msgstr ""
msgid "Organization|Copy organization ID"
msgstr ""
@ -43800,9 +43797,6 @@ msgstr ""
msgid "Organization|Get started with organizations"
msgstr ""
msgid "Organization|Home organization"
msgstr ""
msgid "Organization|If you proceed with this change you will lose your owner permissions for this organization, including access to this page."
msgstr ""
@ -58256,6 +58250,9 @@ msgstr ""
msgid "SecurityReports|Undo dismiss"
msgstr ""
msgid "SecurityReports|Vulnerabilities over time"
msgstr ""
msgid "SecurityReports|Vulnerability Resolution available"
msgstr ""
@ -70254,6 +70251,9 @@ msgstr ""
msgid "Weight - %{maxIssueWeight}"
msgstr ""
msgid "Welcome %{name},"
msgstr ""
msgid "Welcome back! Your account had been deactivated due to inactivity but is now reactivated."
msgstr ""
@ -74722,9 +74722,6 @@ msgstr ""
msgid "is not one of"
msgstr ""
msgid "is not part of the given organization"
msgstr ""
msgid "is not valid."
msgstr ""

View File

@ -65,7 +65,7 @@
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language-rust": "0.13.1",
"@gitlab/svgs": "3.138.0",
"@gitlab/ui": "115.4.0",
"@gitlab/ui": "115.5.0",
"@gitlab/vue-router-vue3": "npm:vue-router@4.5.1",
"@gitlab/vuex-vue3": "npm:vuex@4.1.0",
"@gitlab/web-ide": "^0.0.1-dev-20250704091020",

View File

@ -2,13 +2,11 @@
require 'spec_helper'
RSpec.describe Profiles::PreferencesController do
let_it_be(:home_organization) { create(:organization) }
RSpec.describe Profiles::PreferencesController, feature_category: :user_profile do
let(:user) { create(:user) }
before do
sign_in(user)
create(:organization_user, organization: home_organization, user: user)
allow(subject).to receive(:current_user).and_return(user)
end
@ -31,7 +29,6 @@ RSpec.describe Profiles::PreferencesController do
color_mode_id: '1',
color_scheme_id: '1',
dashboard: 'stars',
home_organization_id: home_organization.id,
theme_id: '1'
)
@ -54,7 +51,6 @@ RSpec.describe Profiles::PreferencesController do
diffs_deletion_color: '#123456',
diffs_addition_color: '#abcdef',
dashboard: 'stars',
home_organization_id: home_organization.id.to_s,
theme_id: '2',
first_day_of_week: '1',
preferred_language: 'jp',

View File

@ -0,0 +1,53 @@
import { shallowMount } from '@vue/test-utils';
import GreetingHeader from '~/homepage/components/greeting_header.vue';
describe('GreetingHeader', () => {
let wrapper;
const createComponent = (gonData = {}) => {
window.gon = { current_user_fullname: 'John Doe', ...gonData };
wrapper = shallowMount(GreetingHeader);
};
it('renders welcome message with first name', () => {
createComponent();
expect(wrapper.find('p').text()).toBe('Welcome John,');
});
it('does not render welcome message when user has no name', () => {
createComponent({ current_user_fullname: null });
expect(wrapper.find('p').exists()).toBe(false);
});
it('handles single name correctly', () => {
createComponent({ current_user_fullname: 'Madonna' });
expect(wrapper.find('p').text()).toBe('Welcome Madonna,');
});
it('uses only first name for multi-word names', () => {
createComponent({ current_user_fullname: 'John Doe Smith Jr' });
expect(wrapper.find('p').text()).toBe('Welcome John,');
});
it('handles empty string name', () => {
createComponent({ current_user_fullname: '' });
expect(wrapper.find('p').exists()).toBe(false);
});
it('handles whitespace-only name', () => {
createComponent({ current_user_fullname: ' ' });
expect(wrapper.find('p').exists()).toBe(false);
});
it('handles name with extra whitespace', () => {
createComponent({ current_user_fullname: ' John Doe ' });
expect(wrapper.find('p').text()).toBe('Welcome John,');
});
});

View File

@ -229,19 +229,6 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :organizatio
end
end
describe '#home_organization_setting_app_data' do
it 'returns expected json' do
current_user = build_stubbed(:user)
allow(helper).to receive(:current_user).and_return(current_user)
expect(Gitlab::Json.parse(helper.home_organization_setting_app_data)).to eq(
{
'initial_selection' => current_user.user_preference.home_organization_id
}
)
end
end
describe '#organization_settings_general_app_data' do
it 'returns expected json' do
expect(organization).to receive(:avatar_url).with(size: 192).and_return('avatar.jpg')

View File

@ -128,16 +128,16 @@ RSpec.describe Ci::JobToken::Jwt, feature_category: :secrets_management do
it 'returns expiration time with leeway' do
freeze_time do
allow(job).to receive(:metadata_timeout).and_return(2.hours)
allow(job).to receive(:timeout_value).and_return(2.hours)
expected_time = Time.current + 2.hours + described_class::LEEWAY
expect(expire_time).to eq(expected_time)
end
end
it 'uses default expire time when metadata_timeout is smaller' do
it 'uses default expire time when timeout_value is smaller' do
freeze_time do
allow(job).to receive(:metadata_timeout).and_return(1.minute)
allow(job).to receive(:timeout_value).and_return(1.minute)
expected_time = Time.current +
::JSONWebToken::Token::DEFAULT_EXPIRE_TIME +

View File

@ -244,7 +244,7 @@ RSpec.describe Gitlab::Ci::Jwt, feature_category: :secrets_management do
end
it 'generates JWT for the given job with ttl equal to build timeout' do
expect(build).to receive(:metadata_timeout).and_return(3_600)
expect(build).to receive(:timeout_value).and_return(3_600)
payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
ttl = payload["exp"] - payload["iat"]
@ -253,7 +253,7 @@ RSpec.describe Gitlab::Ci::Jwt, feature_category: :secrets_management do
end
it 'generates JWT for the given job with default ttl if build timeout is not set' do
expect(build).to receive(:metadata_timeout).and_return(nil)
expect(build).to receive(:timeout_value).and_return(nil)
payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
ttl = payload["exp"] - payload["iat"]

View File

@ -12,51 +12,14 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
let(:attribute) { :build_artifacts_size }
describe '#get' do
it 'returns the value when there is an existing value stored in the counter' do
Gitlab::Redis::SharedState.with do |redis|
redis.set(counter.key, 456)
end
expect(counter.get).to eq(456)
end
it 'returns 0 when there is no existing value' do
expect(counter.get).to eq(0)
end
it_behaves_like 'handling a buffered counter in redis'
end
describe '#increment' do
let(:increment) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }
let(:other_increment) { Gitlab::Counters::Increment.new(amount: 100, ref: 2) }
context 'when the counter is not undergoing refresh' do
it 'sets a new key by the given value' do
counter.increment(increment)
expect(counter.get).to eq(increment.amount)
end
it 'increments an existing key by the given value' do
counter.increment(other_increment)
counter.increment(increment)
expect(counter.get).to eq(other_increment.amount + increment.amount)
end
it 'returns the value of the key after the increment' do
counter.increment(increment)
result = counter.increment(other_increment)
expect(result).to eq(increment.amount + other_increment.amount)
end
it 'schedules a worker to commit the counter key into database' do
expect(FlushCounterIncrementsWorker).to receive(:perform_in)
.with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute.to_s)
counter.increment(increment)
end
end
it_behaves_like 'incrementing a buffered counter when not undergoing a refresh'
context 'when the counter is undergoing refresh' do
let(:increment_1) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }

View File

@ -618,6 +618,7 @@ project:
- alert_hooks_integrations
- incident_hooks_integrations
- vulnerability_hooks_integrations
- ready_active_context_code_repository
- apple_app_store_integration
- google_cloud_platform_artifact_registry_integration
- google_cloud_platform_workload_identity_federation_integration

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::Metadatable, feature_category: :continuous_integration do
describe '#timeout_value' do
using RSpec::Parameterized::TableSyntax
let(:ci_processable) { build(:ci_processable, metadata: ci_build_metadata) }
let(:ci_build_metadata) { build(:ci_build_metadata, timeout: metadata_timeout) }
subject(:timeout_value) { ci_processable.timeout_value }
before do
allow(ci_processable).to receive_messages(timeout: build_timeout)
end
where(:build_timeout, :metadata_timeout, :expected_timeout) do
nil | nil | nil
nil | 100 | 100
200 | nil | 200
200 | 100 | 200
end
with_them do
it { is_expected.to eq(expected_timeout) }
end
end
end

View File

@ -2,13 +2,13 @@
require 'spec_helper'
RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_state do
RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_state, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
let(:project_statistics) { create(:project_statistics) }
let(:model) { CounterAttributeModel.find(project_statistics.id) }
it_behaves_like described_class, [:build_artifacts_size, :commit_count, :packages_size] do
it_behaves_like described_class, %w[build_artifacts_size commit_count packages_size uploads_size] do
let(:model) { CounterAttributeModel.find(project_statistics.id) }
end

View File

@ -1356,6 +1356,51 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
describe '#ancestors_archived?' do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:user_namespace_project) { create(:project) }
let_it_be_with_reload(:group_project) { create(:project, group: group) }
let_it_be_with_reload(:subgroup_project) { create(:project, group: subgroup) }
context 'when project itself is archived' do
it 'returns false' do
group_project.update!(archived: true)
expect(group_project.ancestors_archived?).to eq(false)
end
end
context 'when project is not archived but parent group is archived' do
it 'returns true' do
group.archive
expect(group_project.ancestors_archived?).to eq(true)
end
end
context 'when project is not archived but parent subgroup is archived' do
it 'returns true' do
subgroup.archive
expect(subgroup_project.ancestors_archived?).to eq(true)
end
end
context 'when neither project nor any ancestor group is archived' do
it 'returns false' do
expect(subgroup_project.ancestors_archived?).to eq(false)
end
end
context 'when project and any its ancestor are not archived' do
it 'returns false' do
expect(user_namespace_project.ancestors_archived?).to eq(false)
end
end
end
describe '#ci_pipelines' do
let_it_be(:project) { create(:project) }

View File

@ -9,7 +9,7 @@ RSpec.describe Projects::DataTransfer, feature_category: :source_code_management
# tests DataTransferCounterAttribute with the appropiate attributes
it_behaves_like CounterAttribute,
%i[repository_egress artifacts_egress packages_egress registry_egress] do
%w[repository_egress artifacts_egress packages_egress registry_egress] do
let(:model) { create(:project_data_transfer, project: project) }
end

View File

@ -102,36 +102,6 @@ RSpec.describe UserPreference, feature_category: :user_profile do
it { is_expected.to define_enum_for(:organization_groups_projects_display).with_values(projects: 0, groups: 1) }
end
describe 'user belongs to the home organization' do
let_it_be(:organization) { create(:organization) }
before do
user_preference.home_organization = organization
end
context 'when user is an organization user' do
before do
create(:organization_user, organization: organization, user: user)
end
it 'does not add any validation errors' do
user_preference.home_organization = organization
expect(user_preference).to be_valid
expect(user_preference.errors).to be_empty
end
end
context 'when user is not an organization user' do
it 'adds a validation error' do
user_preference.home_organization = organization
expect(user_preference).to be_invalid
expect(user_preference.errors.messages[:user].first).to eq(_("is not part of the given organization"))
end
end
end
describe 'timezone_valid' do
context 'when timezone is nil' do
it { is_expected.to be_valid }
@ -168,10 +138,6 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
end
describe 'associations' do
it { is_expected.to belong_to(:home_organization).class_name('Organizations::Organization').optional }
end
describe 'notes filters global keys' do
it 'contains expected values' do
expect(UserPreference::NOTES_FILTERS.keys).to match_array([:all_notes, :only_comments, :only_activity])

View File

@ -147,10 +147,6 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:organization_groups_projects_display).to(:user_preference) }
it { is_expected.to delegate_method(:organization_groups_projects_display=).to(:user_preference).with_arguments(:args) }
it { is_expected.to delegate_method(:home_organization).to(:user_preference) }
it { is_expected.to delegate_method(:home_organization_id).to(:user_preference) }
it { is_expected.to delegate_method(:home_organization_id=).to(:user_preference).with_arguments(:args) }
it { is_expected.to delegate_method(:dpop_enabled).to(:user_preference) }
it { is_expected.to delegate_method(:dpop_enabled=).to(:user_preference).with_arguments(:args) }

View File

@ -217,12 +217,12 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let(:expected_steps) do
[{ 'name' => 'script',
'script' => %w[echo],
'timeout' => job.metadata_timeout,
'timeout' => job.timeout_value,
'when' => 'on_success',
'allow_failure' => false },
{ 'name' => 'after_script',
'script' => %w[ls date],
'timeout' => job.metadata_timeout,
'timeout' => job.timeout_value,
'when' => 'always',
'allow_failure' => true }]
end

View File

@ -5204,7 +5204,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
context 'on an archived project' do
before do
::Projects::UpdateService.new(project, user, archived: true).execute
project.update!(archived: true)
end
it 'remains archived' do
@ -5249,6 +5249,19 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
end
end
end
context 'when archive service returns an error' do
it 'returns the error message' do
allow_next_instance_of(::Projects::ArchiveService) do |instance|
allow(instance).to receive(:execute).and_return(Projects::ArchiveService::ArchivingFailedError)
end
post api(path, user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Failed to archive project.')
end
end
end
describe 'POST /projects/:id/unarchive' do
@ -5265,7 +5278,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
context 'on an archived project' do
before do
::Projects::UpdateService.new(project, user, archived: true).execute
project.update!(archived: true)
end
it 'unarchives the project' do
@ -5287,6 +5300,19 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when unarchive service returns an error' do
it 'returns the error message' do
allow_next_instance_of(::Projects::UnarchiveService) do |instance|
allow(instance).to receive(:execute).and_return(Projects::UnarchiveService::UnarchivingFailedError)
end
post api(path, user)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Failed to unarchive project.')
end
end
end
describe 'POST /projects/:id/star' do

View File

@ -0,0 +1,128 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ArchiveService, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, namespace: group) }
subject(:service) { described_class.new(project: project, current_user: user) }
describe '#execute' do
context 'when user is not authorized to archive project' do
before_all do
project.add_maintainer(user)
end
it 'returns not authorized error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq("You don't have permissions to archive this project.")
end
end
context 'when user is authorized to archive project' do
before_all do
project.add_owner(user)
end
context 'when project ancestors are already archived' do
before do
group.update!(archived: true)
end
it 'returns ancestor already archived error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Cannot archive project since one of the ancestors is already archived.')
end
end
context 'when project ancestors are not archived' do
context 'when archiving project fails' do
before do
allow(project).to receive(:update).with(archived: true).and_return(false)
allow(project).to receive_message_chain(:errors, :full_messages, :to_sentence)
.and_return('Validation failed')
end
it 'returns error with validation messages' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Validation failed')
end
end
context 'when archiving project fails without specific error messages' do
before do
allow(project).to receive(:update).with(archived: true).and_return(false)
allow(project).to receive_message_chain(:errors, :full_messages, :to_sentence).and_return('')
end
it 'returns generic archiving failed error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Failed to archive project.')
end
end
context 'when archiving project succeeds' do
let(:unlink_fork_service) { instance_double(Projects::UnlinkForkService, execute: true) }
let(:system_hook_service) { instance_double(SystemHooksService) }
it 'returns success' do
result = service.execute
expect(result).to be_success
end
it 'updates the project archived status to true' do
expect { service.execute }.to change { project.reload.archived }.from(false).to(true)
end
it 'executes system hooks' do
allow(service).to receive(:system_hook_service).and_return(system_hook_service)
expect(system_hook_service).to receive(:execute_hooks_for).with(project, :update)
service.execute
end
it 'unlinks fork' do
allow(Projects::UnlinkForkService).to receive(:new).and_return(unlink_fork_service)
expect(unlink_fork_service).to receive(:execute)
service.execute
end
it 'publishes a ProjectArchivedEvent' do
expect { service.execute }.to publish_event(Projects::ProjectArchivedEvent)
.with(
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
)
end
it 'publishes ProjectAttributesChangedEvent' do
allow(service).to receive(:publish_project_archived_event)
expect { service.execute }.to publish_event(Projects::ProjectAttributesChangedEvent)
.with(
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id,
attributes: %w[updated_at archived]
)
end
end
end
end
end
end

View File

@ -0,0 +1,119 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::UnarchiveService, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, namespace: group, archived: true) }
subject(:service) { described_class.new(project: project, current_user: user) }
describe '#execute' do
context 'when user is not authorized to unarchive project' do
before_all do
project.add_maintainer(user)
end
it 'returns not authorized error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq("You don't have permissions to unarchive this project.")
end
end
context 'when user is authorized to unarchive project' do
before_all do
project.add_owner(user)
end
context 'when project ancestors are already archived' do
before do
group.update!(archived: true)
end
it 'returns ancestor already archived error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Cannot unarchive project since one of the ancestors is archived.')
end
end
context 'when project ancestors are not archived' do
context 'when unarchiving project fails' do
before do
allow(project).to receive(:update).with(archived: false).and_return(false)
allow(project).to receive_message_chain(:errors, :full_messages, :to_sentence)
.and_return('Validation failed')
end
it 'returns error with validation messages' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Validation failed')
end
end
context 'when unarchiving project fails without specific error messages' do
before do
allow(project).to receive(:update).with(archived: false).and_return(false)
allow(project).to receive_message_chain(:errors, :full_messages, :to_sentence).and_return('')
end
it 'returns generic unarchiving failed error' do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Failed to unarchive project.')
end
end
context 'when unarchiving project succeeds' do
let(:system_hook_service) { instance_double(SystemHooksService) }
it 'returns success' do
result = service.execute
expect(result).to be_success
end
it 'updates the project archived status to false' do
expect { service.execute }.to change { project.reload.archived }.from(true).to(false)
end
it 'executes system hooks' do
allow(service).to receive(:system_hook_service).and_return(system_hook_service)
expect(system_hook_service).to receive(:execute_hooks_for).with(project, :update)
service.execute
end
it 'publishes a ProjectArchivedEvent' do
expect { service.execute }.to publish_event(Projects::ProjectArchivedEvent)
.with(
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
)
end
it 'publishes ProjectAttributesChangedEvent' do
allow(service).to receive(:publish_project_archived_event)
expect { service.execute }.to publish_event(Projects::ProjectAttributesChangedEvent)
.with(
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id,
attributes: %w[updated_at archived]
)
end
end
end
end
end
end

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects do
@ -508,46 +509,6 @@ RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects d
end
end
context 'when archiving a project' do
before do
allow(Projects::UnlinkForkService).to receive(:new).and_return(unlink_fork_service)
end
let(:unlink_fork_service) { instance_double(Projects::UnlinkForkService, execute: true) }
it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent',
params: { archived: true },
attributes: %w[updated_at archived]
it 'publishes a ProjectTransferedEvent' do
expect { update_project(project, user, archived: true) }
.to publish_event(Projects::ProjectArchivedEvent)
.with(
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
)
end
context 'when project is being archived' do
it 'calls UnlinkForkService' do
project.update!(archived: false)
expect(Projects::UnlinkForkService).to receive(:new).with(project, user).and_return(unlink_fork_service)
update_project(project, user, archived: true)
end
end
context 'when project is not being archived' do
it 'does not call UnlinkForkService' do
expect(Projects::UnlinkForkService).not_to receive(:new)
update_project(project, user, archived: false)
end
end
end
context 'when changing operations feature visibility' do
let(:feature_params) { { operations_access_level: ProjectFeature::DISABLED } }

View File

@ -12,6 +12,7 @@ RSpec.configure do |config|
counter_attribute :build_artifacts_size
counter_attribute :commit_count
counter_attribute :packages_size, if: ->(instance) { instance.allow_package_size_counter }
counter_attribute :uploads_size, touch: :updated_at
attr_accessor :flushed, :allow_package_size_counter

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
RSpec.shared_examples 'handling a buffered counter in redis' do
it 'returns the value when there is an existing value stored in the counter' do
Gitlab::Redis::SharedState.with do |redis|
redis.set(counter.key, 456)
end
expect(counter.get).to eq(456)
end
it 'returns 0 when there is no existing value' do
expect(counter.get).to eq(0)
end
end
RSpec.shared_examples 'incrementing a buffered counter when not undergoing a refresh' do
context 'when the counter is not undergoing refresh' do
it 'sets a new key by the given value' do
counter.increment(increment)
expect(counter.get).to eq(increment.amount)
end
it 'increments an existing key by the given value' do
counter.increment(other_increment)
counter.increment(increment)
expect(counter.get).to eq(other_increment.amount + increment.amount)
end
it 'returns the value of the key after the increment' do
counter.increment(increment)
result = counter.increment(other_increment)
expect(result).to eq(increment.amount + other_increment.amount)
end
it 'schedules a worker to commit the counter key into database' do
expect(FlushCounterIncrementsWorker).to receive(:perform_in)
.with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute.to_s)
counter.increment(increment)
end
end
end

View File

@ -8,7 +8,7 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
end
it 'defines a method to store counters' do
registered_attributes = model.class.counter_attributes.map { |e| e[:attribute] } # rubocop:disable Rails/Pluck
registered_attributes = model.class.counter_attributes.keys
expect(registered_attributes).to contain_exactly(*counter_attributes)
end
@ -19,9 +19,7 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
let(:increment) { Gitlab::Counters::Increment.new(amount: amount, ref: 3) }
let(:counter_key) { model.counter(attribute).key }
let(:returns_current) do
model.class.counter_attributes
.find { |a| a[:attribute] == attribute }
.fetch(:returns_current, false)
model.class.counter_attributes[attribute].fetch(:returns_current, false)
end
subject { model.increment_counter(attribute, increment) }

View File

@ -0,0 +1,256 @@
# frozen_string_literal: true
# rubocop:disable Gitlab/Json -- no Rails environment
require "tempfile"
require "fileutils"
require "fast_spec_helper"
require_relative "../../../../../tooling/lib/tooling/predictive_tests/frontend_metrics_exporter"
RSpec.describe Tooling::PredictiveTests::FrontendMetricsExporter, feature_category: :tooling do
include StubENV
subject(:exporter) do
described_class.new(
rspec_changed_files_path: changed_files_path,
rspec_matching_js_files_path: rspec_matching_js_files_path,
jest_failed_test_files_path: jest_failed_tests_path,
output_dir: output_dir
)
end
let(:event_tracker) { instance_double(Tooling::Events::TrackPipelineEvents, send_event: nil) }
let(:logger) { instance_double(Logger, info: nil, error: nil, warn: nil) }
let(:output_dir) { Dir.mktmpdir("predictive-tests-output") }
let(:changed_files_path) { create_temp_file("changed_files.txt", changed_files_content) }
let(:rspec_matching_js_files_path) { create_temp_file("matching_js.txt", matching_js_files_content) }
let(:jest_failed_tests_path) { create_temp_file("failed_tests.txt", failed_tests_content) }
let(:changed_files_content) do
"app/assets/javascripts/issues/show.js app/models/user.rb " \
"ee/app/assets/javascripts/security/scanner.vue spec/frontend/issues/show_spec.js"
end
let(:matching_js_files_content) do
"app/assets/javascripts/pages/projects/merge_requests/show/index.js " \
"ee/app/assets/javascripts/security_dashboard/coponents/app.vue"
end
let(:failed_tests_content) do
"spec/frontend/issues/show_spec.js spec/frontend/security/scanner_spec.js " \
"spec/frontend/not_predicted_spec.js"
end
let(:predicted_tests_content) do
"spec/frontend/issues/show_spec.js " \
"spec/frontend/security/scanner_spec.js"
end
let(:jest_script_path) { "scripts/frontend/find_jest_predictive_tests.js" }
let(:event_name) { "glci_predictive_tests_metrics" }
let(:extra_properties) { { ci_job_id: "123456", test_type: "frontend" } }
before do
stub_env({ "CI_JOB_ID" => extra_properties[:ci_job_id] })
allow(Tooling::Events::TrackPipelineEvents).to receive(:new).and_return(event_tracker)
allow(Logger).to receive(:new).and_return(logger)
end
after do
FileUtils.rm_rf(output_dir)
[changed_files_path, rspec_matching_js_files_path, jest_failed_tests_path].each do |path|
FileUtils.rm_f(path) if path && File.exist?(path)
end
end
describe "#execute" do
context "with successful Jest script execution" do
let(:expected_env) do
{
'RSPEC_CHANGED_FILES_PATH' => changed_files_path,
'RSPEC_MATCHING_JS_FILES_PATH' => rspec_matching_js_files_path,
'JEST_MATCHING_TEST_FILES_PATH' => match(%r{frontend/jest_matching_test_files\.txt$})
}
end
before do
mock_jest_script_execution(success: true, create_output: true)
end
it "calls Jest script with correct environment variables" do
exporter.execute
expect(exporter).to have_received(:system).with(
hash_including(expected_env),
anything
)
end
it "generates Jest predictive test list", :aggregate_failures do
exporter.execute
jest_output_path = File.join(output_dir, "frontend", "jest_matching_test_files.txt")
expect(File.exist?(jest_output_path)).to be true
expect(File.read(jest_output_path).split(" ")).to contain_exactly(
"spec/frontend/issues/show_spec.js",
"spec/frontend/security/scanner_spec.js"
)
end
it "creates metrics JSON file with correct content", :aggregate_failures do
exporter.execute
metrics_path = File.join(output_dir, "frontend", "metrics_frontend.json")
expect(File.exist?(metrics_path)).to be true
metrics = JSON.parse(File.read(metrics_path))
expect(metrics).to include(
"test_framework" => "jest",
"timestamp" => be_a(String),
"core_metrics" => {
"changed_files_count" => 4,
"predicted_test_files_count" => 2,
"missed_failing_test_files" => 1, # not_predicted_spec.js
"changed_js_files_count" => 3, # .js, .vue, but not .rb
"backend_triggered_js_files_count" => 2
}
)
end
it "sends correct events to tracker", :aggregate_failures do
exporter.execute
expect(event_tracker).to have_received(:send_event).exactly(3).times
expect(event_tracker).to have_received(:send_event).with(
event_name,
label: "changed_files_count",
value: 4,
property: "jest_built_in",
extra_properties: extra_properties
)
expect(event_tracker).to have_received(:send_event).with(
event_name,
label: "predicted_test_files_count",
value: 2,
property: "jest_built_in",
extra_properties: extra_properties
)
expect(event_tracker).to have_received(:send_event).with(
event_name,
label: "missed_failing_test_files",
value: 1,
property: "jest_built_in",
extra_properties: extra_properties
)
end
end
context "with failed Jest script execution" do
before do
mock_jest_script_execution(success: false)
end
it "rescues the error and logs failure" do
expect { exporter.execute }.not_to raise_error
expect(logger).to have_received(:error).with("Failed to generate Jest predictive tests")
expect(logger).to have_received(:info).with("Skipping metrics export due to Jest script issues")
end
it "does not send any events" do
exporter.execute
expect(event_tracker).not_to have_received(:send_event)
end
end
context "when Jest script is missing" do
before do
script_path_pattern = /find_jest_predictive_tests\.js$/
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(script_path_pattern).and_return(false)
end
it "logs warning and skips metrics without raising error" do
expect { exporter.execute }.not_to raise_error
expect(logger).to have_received(:warn).with(/Jest predictive test script not found/)
expect(logger).to have_received(:info).with("Skipping metrics export due to Jest script issues")
end
it "does not send any events" do
exporter.execute
expect(event_tracker).not_to have_received(:send_event)
end
end
context "with empty input files" do
let(:changed_files_content) { "" }
let(:matching_js_files_content) { "" }
let(:failed_tests_content) { "" }
let(:predicted_tests_content) { "" }
before do
mock_jest_script_execution(success: true)
end
it "handles empty files gracefully", :aggregate_failures do
exporter.execute
metrics_path = File.join(output_dir, "frontend", "metrics_frontend.json")
metrics = JSON.parse(File.read(metrics_path))
expect(metrics["core_metrics"]).to eq({
"changed_files_count" => 0,
"predicted_test_files_count" => 0,
"missed_failing_test_files" => 0,
"changed_js_files_count" => 0,
"backend_triggered_js_files_count" => 0
})
end
end
context "when metrics save fails" do
before do
mock_jest_script_execution(success: true)
allow(File).to receive(:write).and_call_original
allow(File).to receive(:write).with(/metrics_frontend\.json/, anything).and_raise(Errno::EACCES)
end
it "logs the error but does not raise" do
expect { exporter.execute }.not_to raise_error
expect(logger).to have_received(:error).with(/Failed to export frontend metrics.*Permission denied/)
expect(logger).to have_received(:error).with(array_including(/tooling/))
end
end
end
private
def create_temp_file(name, content)
file = Tempfile.new(name)
file.write(content)
file.close
file.path
end
def mock_jest_script_execution(success: true, create_output: true)
allow(exporter).to receive(:system) do |env, _|
if success && create_output
jest_output_path = env['JEST_MATCHING_TEST_FILES_PATH']
FileUtils.mkdir_p(File.dirname(jest_output_path))
File.write(jest_output_path, predicted_tests_content)
end
success
end
end
end
# rubocop:enable Gitlab/Json

View File

@ -25,7 +25,7 @@ RSpec.describe Tooling::PredictiveTests::MetricsExporter, feature_category: :too
let(:logger) { instance_double(Logger, info: nil, error: nil) }
let(:event_name) { "glci_predictive_tests_metrics" }
let(:extra_properties) { { ci_job_id: "123" } }
let(:extra_properties) { { ci_job_id: "123", test_type: "backend" } }
# temporary folder for inputs and outputs
let(:input_dir) { Dir.mktmpdir("predictive-tests-input") }
@ -111,7 +111,7 @@ RSpec.describe Tooling::PredictiveTests::MetricsExporter, feature_category: :too
.and_return(changed_files)
allow(Tooling::PredictiveTests::TestSelector).to receive(:new).and_return(test_selector)
allow(Tooling::Events::TrackPipelineEvents).to receive(:new).and_return(event_tracker)
allow(Logger).to receive(:new).with($stdout, progname: "predictive testing").and_return(logger)
allow(Logger).to receive(:new).with($stdout, progname: "rspec predictive testing").and_return(logger)
end
describe "#execute" do

View File

@ -15,7 +15,8 @@ RSpec.describe FlushCounterIncrementsWorker, :counter_attribute, feature_categor
subject(:service) { worker.perform(model.class.name, model.id, attribute) }
it 'commits increments to database' do
expect(model.class).to receive(:find_by_id).and_return(model)
relation_double = instance_double(ActiveRecord::Relation, take: model)
expect(model.class).to receive(:primary_key_in).with([model.id]).and_return(relation_double)
expect_next_instance_of(Gitlab::Counters::BufferedCounter, model, attribute) do |service|
expect(service).to receive(:commit_increment!)
end

View File

@ -23,8 +23,12 @@ OptionParser.new do |opts|
options[:select_tests] = true
end
opts.on('--export-selected-test-metrics', 'Export selected test metrics') do
options[:export_metrics] = true
opts.on('--export-predictive-backend-metrics', 'Export predictive test metrics for rspec') do
options[:export_rspec_metrics] = true
end
opts.on('--export-predictive-frontend-metrics', 'Export predictive test metrics for frontend') do
options[:export_jest_metrics] = true
end
opts.on('-h', '--help', 'Show this help message') do
@ -35,7 +39,10 @@ end.parse!
# Validate that exactly one option is provided
if options.empty?
puts "Error: Please specify either --select-tests or --export-selected-test-metrics"
puts "Error: Please specify one of these flags:
1. --select-tests
2. --export-predictive-backend-metrics
3. --export-predictive-frontend-metrics"
puts "Use --help for more information"
exit 1
elsif options.size > 1
@ -76,7 +83,7 @@ if options[:select_tests]
# File with a list of mr changes is also used by frontend related pipelines/jobs
File.write(ENV['RSPEC_CHANGED_FILES_PATH'], changed_files.join("\n"))
elsif options[:export_metrics]
elsif options[:export_rspec_metrics]
require_relative '../lib/tooling/predictive_tests/metrics_exporter'
validate_required_env_variables!(%w[
@ -91,4 +98,19 @@ elsif options[:export_metrics]
frontend_fixtures_mapping_file: ENV['FRONTEND_FIXTURES_MAPPING_PATH'],
output_dir: ENV['GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR']
).execute
elsif options[:export_jest_metrics]
require_relative '../lib/tooling/predictive_tests/frontend_metrics_exporter'
validate_required_env_variables!(%w[
JEST_MATCHING_TEST_FILES_PATH
GLCI_JEST_FAILED_TESTS_FILE
GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR
])
Tooling::PredictiveTests::FrontendMetricsExporter.new(
rspec_changed_files_path: ENV['RSPEC_CHANGED_FILES_PATH'],
rspec_matching_js_files_path: ENV['RSPEC_MATCHING_JS_FILES_PATH'],
jest_failed_test_files_path: ENV['GLCI_JEST_FAILED_TESTS_FILE'],
output_dir: ENV['GLCI_PREDICTIVE_TEST_METRICS_OUTPUT_DIR']
).execute
end

View File

@ -0,0 +1,163 @@
# frozen_string_literal: true
require_relative "test_selector"
require_relative "../helpers/file_handler"
require_relative "../events/track_pipeline_events"
require "logger"
module Tooling
module PredictiveTests
class FrontendMetricsExporter
include Helpers::FileHandler
TEST_TYPE = 'frontend'
DEFAULT_STRATEGY = "jest_built_in"
JS_FILE_FILTER_REGEX = /\.(js|json|vue|ts|tsx)$/
PREDICTIVE_TEST_METRICS_EVENT = "glci_predictive_tests_metrics"
JEST_PREDICTIVE_TESTS_SCRIPT_PATH = "scripts/frontend/find_jest_predictive_tests.js"
def initialize(
rspec_changed_files_path:,
rspec_matching_js_files_path:,
jest_failed_test_files_path:,
output_dir: nil
)
@rspec_changed_files_path = rspec_changed_files_path
@rspec_matching_js_files_path = rspec_matching_js_files_path
@jest_failed_test_files_path = jest_failed_test_files_path
@output_dir = output_dir
@logger = Logger.new($stdout, progname: "jest predictive testing")
end
def execute
logger.info("Running frontend metrics export...")
# If Jest script generation fails, just return early
unless generate_jest_predictive_tests
logger.info("Skipping metrics export due to Jest script issues")
return
end
metrics = generate_metrics_data
save_metrics(metrics)
send_metrics_events(metrics)
logger.info("Frontend metrics export completed")
rescue StandardError => e
logger.error("Failed to export frontend metrics: #{e.message}")
logger.error(e.backtrace.select { |entry| entry.include?(project_root) }) if e.backtrace
end
private
attr_reader :rspec_changed_files_path, :rspec_matching_js_files_path, :jest_failed_test_files_path, :output_dir,
:logger
def project_root
@project_root ||= File.expand_path("../../../..", __dir__)
end
def output_path
@output_path ||= File.join(output_dir, "frontend").tap { |path| FileUtils.mkdir_p(path) }
end
def tracker
@tracker ||= Tooling::Events::TrackPipelineEvents.new(logger: logger)
end
def jest_matching_test_files_path
@jest_matching_test_files_path ||= File.join(output_path, "jest_matching_test_files.txt")
end
def generate_jest_predictive_tests
logger.info("Generating Jest predictive test list...")
script_path = File.join(project_root, JEST_PREDICTIVE_TESTS_SCRIPT_PATH)
unless File.exist?(script_path)
logger.warn("Jest predictive test script not found at #{script_path}")
return false # Return false instead of continuing
end
# Set environment variables for the Jest script
env = {
# get these artifacts from previous job
'RSPEC_CHANGED_FILES_PATH' => rspec_changed_files_path,
'RSPEC_MATCHING_JS_FILES_PATH' => rspec_matching_js_files_path,
# path to save predictive jest files
'JEST_MATCHING_TEST_FILES_PATH' => jest_matching_test_files_path
}
success = system(env, script_path)
unless success
logger.error("Failed to generate Jest predictive tests")
return false
end
true
end
def generate_metrics_data
logger.info("Generating frontend metrics data...")
changed_files = read_array_from_file(rspec_changed_files_path)
changed_js_files = changed_files.select { |f| f.match?(JS_FILE_FILTER_REGEX) }
backend_triggered_js_files = read_array_from_file(rspec_matching_js_files_path)
predicted_frontend_test_files = read_array_from_file(jest_matching_test_files_path)
failed_frontend_test_files = read_array_from_file(jest_failed_test_files_path)
missed_failing_test_files = (failed_frontend_test_files - predicted_frontend_test_files).size
{
timestamp: Time.now.iso8601,
test_framework: 'jest',
core_metrics: {
changed_files_count: changed_files.size,
predicted_test_files_count: predicted_frontend_test_files.size,
missed_failing_test_files: missed_failing_test_files,
changed_js_files_count: changed_js_files.size,
backend_triggered_js_files_count: backend_triggered_js_files.size
}
}
end
def save_metrics(metrics)
metrics_file = File.join(output_path, "metrics_frontend.json")
File.write(metrics_file, JSON.pretty_generate(metrics)) # rubocop:disable Gitlab/Json -- not in Rails environment
logger.info("Frontend metrics saved to #{metrics_file}")
end
def send_metrics_events(metrics)
core = metrics[:core_metrics]
extra_properties = {
ci_job_id: ENV["CI_JOB_ID"],
test_type: TEST_TYPE
}
tracker.send_event(
PREDICTIVE_TEST_METRICS_EVENT,
label: "changed_files_count",
value: core[:changed_files_count],
property: DEFAULT_STRATEGY,
extra_properties: extra_properties
)
tracker.send_event(
PREDICTIVE_TEST_METRICS_EVENT,
label: "predicted_test_files_count",
value: core[:predicted_test_files_count],
property: DEFAULT_STRATEGY,
extra_properties: extra_properties
)
tracker.send_event(
PREDICTIVE_TEST_METRICS_EVENT,
label: "missed_failing_test_files",
value: core[:missed_failing_test_files],
property: DEFAULT_STRATEGY,
extra_properties: extra_properties
)
end
end
end
end

View File

@ -19,6 +19,7 @@ module Tooling
PREDICTIVE_TEST_METRICS_EVENT = "glci_predictive_tests_metrics"
STRATEGIES = [:coverage, :described_class].freeze
TEST_TYPE = "backend"
def initialize(
rspec_all_failed_tests_file:,
@ -30,7 +31,7 @@ module Tooling
@crystalball_mapping_dir = crystalball_mapping_dir
@frontend_fixtures_mapping_file = frontend_fixtures_mapping_file
@output_dir = output_dir
@logger = Logger.new($stdout, progname: "predictive testing")
@logger = Logger.new($stdout, progname: "rspec predictive testing")
end
# Execute metrics export
@ -123,7 +124,7 @@ module Tooling
).execute
end
# Create, save and export metrics for selected tests for specific strategy
# Create, save and export metrics for selected RSpec tests for specific strategy
#
# @param strategy [Symbol]
# @return [void]
@ -148,7 +149,7 @@ module Tooling
save_metrics(metrics, strategy)
send_metrics_events(metrics, strategy)
logger.info("Metrics generation completed")
logger.info("Metrics generation completed for strategy '#{strategy}'")
end
# Create metrics hash with all calculated metrics based on crystalball mapping and selected test strategy
@ -199,7 +200,7 @@ module Tooling
# @return [void]
def send_metrics_events(metrics, strategy)
core = metrics[:core_metrics]
extra_properties = { ci_job_id: ENV["CI_JOB_ID"] }
extra_properties = { ci_job_id: ENV["CI_JOB_ID"], test_type: TEST_TYPE }
tracker.send_event(
PREDICTIVE_TEST_METRICS_EVENT,

View File

@ -1449,14 +1449,14 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.138.0.tgz#5db6d76ceedcf3716e9ce624b272a58052d8d121"
integrity sha512-Jzd7GhmKxsQdCTttOe6C4AjqGvq8L91N6uUYnAmwnLGeY3aRD12BKBSgId5FrTH6rvk2w36o1+AwIqP+YuHV4g==
"@gitlab/ui@115.4.0":
version "115.4.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-115.4.0.tgz#f5fb265560c0d7bc98e5537c82ca701318be72f0"
integrity sha512-IjeeXHFZdnXIUoGqACdwwAPXTDT7OZdJ2kVPDOTfvc66Yow4OIXRNsZr300Fd3dvItyyDy5f/hCqhbyLMWb9hQ==
"@gitlab/ui@115.5.0":
version "115.5.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-115.5.0.tgz#88100a1dc152bf4bb045293c5faf12f1ae7c8c15"
integrity sha512-CGDQogwLATAvtBqUcECSnJiNUBvipAMKl1UhSF2JBDVYJ1etYjFaLSb6lho0MM7iaTL95LKGhoQf+lhwprywZw==
dependencies:
"@floating-ui/dom" "1.7.2"
echarts "^5.6.0"
gridstack "^12.2.1"
gridstack "^12.2.2"
iframe-resizer "^4.4.5"
lodash "^4.17.21"
popper.js "^1.16.1"
@ -8475,10 +8475,10 @@ graphql@16.11.0:
resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.11.0.tgz#96d17f66370678027fdf59b2d4c20b4efaa8a633"
integrity sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==
gridstack@^12.2.1:
version "12.2.1"
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.2.1.tgz#0e82e3d9d11e5229388d73bd57f8ef1a0e7059c4"
integrity sha512-xU69tThmmVxgMHTuM/z3rIKiiGm0zW4tcB6yRcuwiOUUBiwb3tslzFOrUjWz+PwaxoAW+JChT4fqOLl+oKAxZA==
gridstack@^12.2.1, gridstack@^12.2.2:
version "12.2.2"
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.2.2.tgz#a9ec300cefc93516bcb8dd966510a728027be358"
integrity sha512-eK9XAbBWQp+QniqL6ipvofWSrCelm46j5USag73LNq8tOWSL2DeeGBWU9mTibLI6i66n0r7xYS+1/g2qqTqKcw==
gzip-size@^6.0.0:
version "6.0.0"