Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-06-13 03:14:15 +00:00
parent b3bd59a59d
commit 7acd0bde92
73 changed files with 1036 additions and 333 deletions

View File

@ -2696,7 +2696,6 @@ Layout/LineLength:
- 'spec/finders/security/security_jobs_finder_spec.rb'
- 'spec/finders/snippets_finder_spec.rb'
- 'spec/finders/tags_finder_spec.rb'
- 'spec/finders/todos_finder_spec.rb'
- 'spec/finders/user_group_notification_settings_finder_spec.rb'
- 'spec/finders/user_groups_counter_spec.rb'
- 'spec/finders/user_recent_events_finder_spec.rb'

View File

@ -562,7 +562,6 @@ RSpec/BeEq:
- 'spec/finders/concerns/finder_with_cross_project_access_spec.rb'
- 'spec/finders/packages/go/package_finder_spec.rb'
- 'spec/finders/repositories/changelog_tag_finder_spec.rb'
- 'spec/finders/todos_finder_spec.rb'
- 'spec/graphql/mutations/issues/move_spec.rb'
- 'spec/graphql/mutations/merge_requests/update_spec.rb'
- 'spec/graphql/mutations/releases/update_spec.rb'

View File

@ -86,7 +86,6 @@ RSpec/ContainExactly:
- 'spec/finders/personal_access_tokens_finder_spec.rb'
- 'spec/finders/projects_finder_spec.rb'
- 'spec/finders/repositories/branch_names_finder_spec.rb'
- 'spec/finders/todos_finder_spec.rb'
- 'spec/graphql/mutations/ci/runner/update_spec.rb'
- 'spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb'
- 'spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb'
@ -114,7 +113,6 @@ RSpec/ContainExactly:
- 'spec/graphql/types/packages/package_type_enum_spec.rb'
- 'spec/graphql/types/project_member_relation_enum_spec.rb'
- 'spec/graphql/types/security/report_types_enum_spec.rb'
- 'spec/graphql/types/todo_target_enum_spec.rb'
- 'spec/graphql/types/visibility_pipeline_id_type_enum_spec.rb'
- 'spec/helpers/integrations_helper_spec.rb'
- 'spec/helpers/nav_helper_spec.rb'

View File

@ -1183,7 +1183,6 @@ RSpec/ContextWording:
- 'spec/finders/tags_finder_spec.rb'
- 'spec/finders/template_finder_spec.rb'
- 'spec/finders/terraform/states_finder_spec.rb'
- 'spec/finders/todos_finder_spec.rb'
- 'spec/finders/user_group_notification_settings_finder_spec.rb'
- 'spec/finders/user_recent_events_finder_spec.rb'
- 'spec/frontend/fixtures/merge_requests.rb'

View File

@ -35,7 +35,6 @@ RSpec/SubjectDeclaration:
- 'spec/finders/labels_finder_spec.rb'
- 'spec/finders/merge_requests_finder_spec.rb'
- 'spec/finders/snippets_finder_spec.rb'
- 'spec/finders/todos_finder_spec.rb'
- 'spec/helpers/labels_helper_spec.rb'
- 'spec/helpers/routing/pseudonymization_helper_spec.rb'
- 'spec/lib/banzai/filter/references/design_reference_filter_spec.rb'

View File

@ -1 +1 @@
dd0a9c91bee7b2b9babc04d4e4abe99d41d8055f
7722e3619144b4e551afe08b52782278efd18920

View File

@ -0,0 +1,3 @@
<template>
<router-view />
</template>

View File

@ -0,0 +1,12 @@
query userWorkflows {
currentUser @client {
id
workflows {
nodes {
id
type
name
}
}
}
}

View File

@ -0,0 +1,61 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import AiCatalogApp from './ai_catalog_app.vue';
import { createRouter } from './router';
import userWorkflowsQuery from './graphql/user_workflows.query.graphql';
export const initAiCatalog = (selector = '#js-ai-catalog') => {
const el = document.querySelector(selector);
if (!el) {
return null;
}
const { dataset } = el;
const { aiCatalogIndexPath } = dataset;
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
/* eslint-disable @gitlab/require-i18n-strings */
apolloProvider.clients.defaultClient.cache.writeQuery({
query: userWorkflowsQuery,
data: {
currentUser: {
id: 1,
workflows: {
nodes: [
{
id: 1,
name: 'Workflow 1',
type: 'Type 1',
},
{
id: 2,
name: 'Workflow 2',
type: 'Type 2',
},
],
},
},
},
});
/* eslint-enable @gitlab/require-i18n-strings */
return new Vue({
el,
name: 'AiCatalogRoot',
router: createRouter(aiCatalogIndexPath),
apolloProvider,
render(h) {
return h(AiCatalogApp);
},
});
};

View File

@ -0,0 +1,41 @@
<script>
import { GlSkeletonLoader } from '@gitlab/ui';
import userWorkflowsQuery from '../graphql/user_workflows.query.graphql';
export default {
name: 'AiCatalogIndex',
components: {
GlSkeletonLoader,
},
apollo: {
userWorkflows: {
query: userWorkflowsQuery,
update: (data) => data.currentUser.workflows.nodes,
},
},
data() {
return {
userWorkflows: [],
};
},
computed: {
isLoading() {
return this.$apollo.queries.userWorkflows.loading;
},
},
};
</script>
<template>
<div>
<h1>{{ s__('AI|AI Catalog') }}</h1>
<div v-if="isLoading">
<gl-skeleton-loader />
</div>
<div v-else>
<div v-for="workflow in userWorkflows" :key="workflow.id">
<p>{{ workflow.name }}</p>
<p>{{ workflow.type }}</p>
</div>
</div>
</div>
</template>

View File

@ -0,0 +1 @@
export const AI_CATALOG_INDEX_ROUTE = 'ai-catalog';

View File

@ -0,0 +1,20 @@
import Vue from 'vue';
import VueRouter from 'vue-router';
import AiCatalogIndex from '../pages/ai_catalog_index.vue';
import { AI_CATALOG_INDEX_ROUTE } from './constants';
Vue.use(VueRouter);
export const createRouter = (base) => {
return new VueRouter({
base,
mode: 'history',
routes: [
{
name: AI_CATALOG_INDEX_ROUTE,
path: '',
component: AiCatalogIndex,
},
],
});
};

View File

@ -61,9 +61,12 @@ export default {
},
},
data() {
const currentTab = this.$route.params.filter || '';
return {
currentTab: this.$route.params.filter || '',
currentTab,
isVisible: !Visibility.hidden(),
visitedTabs: new Set([currentTab]),
};
},
computed: {
@ -82,6 +85,13 @@ export default {
if (this.currentTab === key) return;
this.currentTab = key;
// For tabs that we have already visited we cache that its been visited
// and with this value we then stop the lazy rendering of the tabs
// which causes GitLab UI tabs to not destroy and then re-create
// the components inside.
this.visitedTabs.add(key);
this.$router.push({ path: key || '/' });
},
queriesForTab(tab) {
@ -124,7 +134,7 @@ export default {
v-for="tab in tabs"
:key="tab.title"
:active="tab.key === currentTab"
lazy
:lazy="!visitedTabs.has(tab.key)"
data-testid="merge-request-dashboard-tab"
@click="clickTab(tab)"
>

View File

@ -0,0 +1,3 @@
import { initAiCatalog } from '~/ai/catalog/';
initAiCatalog();

View File

@ -7,7 +7,7 @@ module TodosActions
todo = TodoService.new.mark_todo(issuable, current_user)
render json: {
count: TodosFinder.new(current_user, state: :pending).execute.count,
count: TodosFinder.new(users: current_user, state: :pending).execute.count,
delete_path: dashboard_todo_path(todo)
}
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Explore
class AiCatalogController < Explore::ApplicationController
feature_category :duo_workflow
before_action :check_feature_flag
private
def check_feature_flag
render_404 unless Feature.enabled?(:global_ai_catalog, current_user)
end
end
end

View File

@ -46,7 +46,7 @@ module FinderWithCrossProjectAccess
end
end
# We can skip the cross project check for finding indivitual records.
# We can skip the cross project check for finding individual records.
# this would be handled by the `can?(:read_*, result)` call in `FinderMethods`
# itself.
override :find_by!

View File

@ -5,15 +5,14 @@
# Used to filter Todos by set of params
#
# Arguments:
# current_user - which user use
# params:
# action_id: integer
# author_id: integer
# project_id; integer
# target_id; integer
# state: 'pending' (default) or 'done'
# is_snoozed: boolean
# type: 'Issue' or 'MergeRequest' or ['Issue', 'MergeRequest']
# users: which user or users, provided as a list, to use.
# action_id: integer
# author_id: integer
# project_id; integer
# target_id; integer
# state: 'pending' (default) or 'done'
# is_snoozed: boolean
# type: 'Issue' or 'MergeRequest' or ['Issue', 'MergeRequest']
#
class TodosFinder
@ -31,7 +30,7 @@ class TodosFinder
WikiPage::Meta]
).freeze
attr_accessor :current_user, :params
attr_accessor :params
class << self
def todo_types
@ -39,16 +38,17 @@ class TodosFinder
end
end
def initialize(current_user, params = {})
@current_user = current_user
def initialize(users:, **params)
@users = users
@params = params
self.should_skip_cross_project_check = true if skip_cross_project_check?
end
def execute
return Todo.none if current_user.nil?
return Todo.none if users.blank?
raise ArgumentError, invalid_type_message unless valid_types?
items = current_user.todos
items = Todo.for_user(users)
items = without_hidden(items)
items = by_action_id(items)
items = by_action(items)
@ -65,15 +65,27 @@ class TodosFinder
sort(items)
end
# Returns `true` if the current user has any todos for the given target with the optional given state.
#
# target - The value of the `target_type` column, such as `Issue`.
# state - The value of the `state` column, such as `pending` or `done`.
def any_for_target?(target, state = nil)
current_user.todos.any_for_target?(target, state)
private
attr_reader :users
def skip_cross_project_check?
users.blank? || users_list.size > 1
end
private
def current_user
# This is needed by the FinderMethods module and by the FinderWithCrossProjectAccess module
# when they do permission checks for a user.
# When there are multiple users, we should find another way to check permissions if needed
# outside this layer.
raise NoMethodError, 'This method is not available when executing with multiple users' if skip_cross_project_check?
users_list.first
end
def users_list
Array.wrap(users)
end
def action_id?
action_id.present? && Todo.action_names.key?(action_id.to_i)
@ -249,11 +261,15 @@ class TodosFinder
def without_hidden(items)
return items.pending_without_hidden if filter_pending_only?
return items if filter_done_only?
return items if filter_done_only? || filter_all?
items.all_without_hidden
end
def filter_all?
Array.wrap(params[:state]).map(&:to_sym) == [:all]
end
def filter_pending_only?
params[:state].blank? || Array.wrap(params[:state]).map(&:to_sym) == [:pending]
end

View File

@ -68,7 +68,7 @@ module Mutations
finder_params[:target_id] = target.id
end
todos = TodosFinder.new(current_user, finder_params).execute
todos = TodosFinder.new(users: current_user, **finder_params).execute
TodoService.new.resolve_todos(todos, current_user, resolved_by_action: :api_all_done)
end

View File

@ -52,7 +52,7 @@ module Resolvers
track_bot_user if current_user.bot?
TodosFinder.new(current_user, todo_finder_params(args)).execute.with_entity_associations
TodosFinder.new(users: current_user, **todo_finder_params(args)).execute.with_entity_associations
end
private

View File

@ -26,7 +26,7 @@ module Types
targets_by_id = targets.index_by(&:id)
ids = targets_by_id.keys
results = TodosFinder.new(current_user, state: state, type: klass_name, target_id: ids).execute
results = TodosFinder.new(users: current_user, state: state, type: klass_name, target_id: ids).execute
by_target_id = results.group_by(&:target_id)

View File

@ -2,7 +2,6 @@
module Types
class TodoTargetEnum < BaseEnum
value 'USER', value: 'User', description: 'User.'
value 'COMMIT', value: 'Commit', description: 'Commit.'
value 'ISSUE', value: 'Issue', description: 'Issue.'
value 'WORKITEM', value: 'WorkItem', description: 'Work item.'

View File

@ -267,7 +267,8 @@ class Todo < ApplicationRecord
end
def distinct_user_ids
distinct.pluck(:user_id)
# When used from the todos finder that applies a default order, we need to reset it.
reorder(nil).distinct.pluck(:user_id)
end
# Count pending todos grouped by user_id and state

View File

@ -2213,7 +2213,7 @@ class User < ApplicationRecord
def todos_pending_count(force: false)
Rails.cache.fetch(['users', id, 'todos_pending_count'], force: force, expires_in: COUNT_CACHE_VALIDITY_PERIOD) do
TodosFinder.new(self, state: :pending).execute.count
TodosFinder.new(users: self, state: :pending).execute.count
end
end

View File

@ -490,7 +490,7 @@ class IssuableBaseService < ::BaseContainerService
when 'add'
todo_service.mark_todo(issuable, current_user)
when 'done'
todo = TodosFinder.new(current_user).find_by(target: issuable)
todo = TodosFinder.new(users: current_user).find_by(target: issuable)
todo_service.resolve_todo(todo, current_user) if todo
end
end

View File

@ -262,9 +262,13 @@ module Projects
# Until we compare the inconsistency rates of the new specialized worker and
# the old approach, we still run AuthorizedProjectsWorker
# but with some delay and lower urgency as a safety net.
UserProjectAccessChangedService.new(user_ids).execute(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
if Feature.enabled?(:project_authorizations_update_in_background_in_transfer_service, project)
AuthorizedProjectUpdate::EnqueueUsersRefreshAuthorizedProjectsWorker.perform_async(user_ids)
else
UserProjectAccessChangedService.new(user_ids).execute(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
end
def rollback_side_effects

View File

@ -212,7 +212,7 @@ class TodoService
end
def todo_exist?(issuable, current_user)
TodosFinder.new(current_user).any_for_target?(issuable, :pending)
current_user.todos.any_for_target?(issuable, :pending)
end
# Resolves all todos related to target for the current_user

View File

@ -23,7 +23,7 @@ module WorkItems
end
def mark_as_done(todo_id)
todos = TodosFinder.new(current_user, state: :pending, target_id: work_item.id).execute
todos = TodosFinder.new(users: current_user, state: :pending, target_id: work_item.id).execute
todos = todo_id ? todos.id_in(todo_id) : todos
return if todos.empty?

View File

@ -0,0 +1,3 @@
- page_title s_('AI|AI Catalog')
#js-ai-catalog{ data: { ai_catalog_index_path: explore_ai_catalog_path } }

View File

@ -23,6 +23,16 @@
:idempotent: true
:tags: []
:queue_namespace: :authorized_project_update
- :name: authorized_project_update:authorized_project_update_enqueue_users_refresh_authorized_projects
:worker_name: AuthorizedProjectUpdate::EnqueueUsersRefreshAuthorizedProjectsWorker
:feature_category: :permissions
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
:queue_namespace: :authorized_project_update
- :name: authorized_project_update:authorized_project_update_project_recalculate
:worker_name: AuthorizedProjectUpdate::ProjectRecalculateWorker
:feature_category: :permissions

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate # rubocop:disable Gitlab/BoundedContexts -- keeping related workers in the same module
class EnqueueUsersRefreshAuthorizedProjectsWorker
include ApplicationWorker
feature_category :permissions
urgency :low
data_consistency :delayed
queue_namespace :authorized_project_update
defer_on_database_health_signal :gitlab_main, [:project_authorizations], 1.minute
idempotent!
deduplicate :until_executing, including_scheduled: true
def perform(user_ids)
return unless user_ids.present?
UserProjectAccessChangedService.new(user_ids).execute(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
end
end

View File

@ -0,0 +1,8 @@
---
name: global_ai_catalog
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193705
rollout_issue_url:
milestone: '18.2'
type: development
group: group::workflow catalog
default_enabled: false

View File

@ -0,0 +1,10 @@
---
name: project_authorizations_update_in_background_in_transfer_service
description: Enqueues the worker that calls the UserProjectAccessChangedService in Projects::TransferService
feature_issue_url: https://gitlab.com/gitlab-com/request-for-help/-/issues/2980
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/194252
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/548979
milestone: '18.1'
group: group::authorization
type: gitlab_com_derisk
default_enabled: false

View File

@ -15,6 +15,7 @@ namespace :explore do
get '/' => 'catalog#index', as: :catalog_index
get '/*full_path' => 'catalog#show', as: :catalog, constraints: { full_path: /.*/ }
end
get '/ai-catalog/(*vueroute)' => 'ai_catalog#index', as: :ai_catalog, format: false
resources :snippets, only: [:index]
root to: 'projects#index'
end

View File

@ -87,6 +87,8 @@
- 2
- - authorized_project_update:authorized_project_update_enqueue_group_members_refresh_authorized_projects
- 1
- - authorized_project_update:authorized_project_update_enqueue_users_refresh_authorized_projects
- 1
- - authorized_project_update:authorized_project_update_project_recalculate
- 1
- - authorized_project_update:authorized_project_update_project_recalculate_per_user

View File

@ -0,0 +1,14 @@
---
name: gitlab_docs.ShortCodeFormat
description: |
Makes sure SVGs use the correct shortcodes.
extends: existence
message: "SVGs are defined with Hugo shortcodes. View the style guide for details."
link: https://docs.gitlab.com/development/documentation/styleguide/#gitlab-svg-icons
vocab: false
ignorecase: true
level: error
nonword: true
scope: raw
tokens:
- '\*\*\{[^\}]*\}\*\*'

View File

@ -220,22 +220,22 @@ It does not cover all data types.
In this context, accelerated reads refer to read requests served from the secondary site, provided that the data is up to date for the component on the secondary site. If the data on the secondary site is determined to be out of date, the request is forwarded to the primary site. Read requests for components not listed in the table below are always automatically forwarded to the primary site.
| Feature / component | Accelerated reads? | Notes |
| :-------------------------------------------------- | :--------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Project, wiki, design repository (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Project, wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| Project, Personal Snippet (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Project, Personal Snippet (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| Group wiki repository (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Group wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| User uploads | {{< icon name="dotted-circle" >}} No | |
| LFS objects (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| LFS objects (using Git) | {{< icon name="check-circle" >}} Yes | |
| Pages | {{< icon name="dotted-circle" >}} No | Pages can use the same URL (without access control), but must be configured separately and are not proxied. |
| Advanced search (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Feature / component | Accelerated reads? | Notes |
|:----------------------------------------------------|:-------------------------------------|-------|
| Project, wiki, design repository (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Project, wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| Project, Personal Snippet (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Project, Personal Snippet (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| Group wiki repository (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Group wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the local secondary while pushes get proxied to the primary. Selective sync or cases where repositories don't exist locally on the Geo secondary throw a "not found" error. |
| User uploads | {{< icon name="dotted-circle" >}} No | |
| LFS objects (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| LFS objects (using Git) | {{< icon name="check-circle" >}} Yes | |
| Pages | {{< icon name="dotted-circle" >}} No | Pages can use the same URL (without access control), but must be configured separately and are not proxied. |
| Advanced search (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Container registry | {{< icon name="dotted-circle" >}} No | The container registry is only recommended for Disaster Recovery scenarios. If the secondary site's container registry is not up to date, the read request is served with old data as the request is not forwarded to the primary site. Accelerating the container registry is planned, upvote or comment in the [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/365864) to indicate your interest or ask your GitLab representative to do so on your behalf. |
| Dependency Proxy | {{< icon name="dotted-circle" >}} No | Read requests to a Geo secondary site's Dependency Proxy are always proxied to the primary site. |
| All other data | {{< icon name="dotted-circle" >}} No | Read requests for components not listed in this table are always automatically forwarded to the primary site. |
| Dependency Proxy | {{< icon name="dotted-circle" >}} No | Read requests to a Geo secondary site's Dependency Proxy are always proxied to the primary site. |
| All other data | {{< icon name="dotted-circle" >}} No | Read requests for components not listed in this table are always automatically forwarded to the primary site. |
To request acceleration of a feature, check if an issue already exists in [epic 8239](https://gitlab.com/groups/gitlab-org/-/epics/8239) and upvote or comment on it to indicate your interest or ask your GitLab representative to do so on your behalf. If an applicable issue doesn't exist, open one and mention it in the epic.
@ -252,22 +252,22 @@ Disabling the proxying feature flag has the following general effects:
- Other than Git requests, any HTTP request which may write data fails. Read requests generally succeed.
- The secondary site UI shows a banner: ![Secondary Site UI Banner for Read-Only](img/secondary_proxy_read_only_v17_8.png)
| Feature / component | Succeed | Notes |
| :-------------------------------------------------- | :------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Project, wiki, design repository (using the web UI) | **{dotted-circle}** Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Project, wiki repository (using Git) | **{check-circle}** Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Project, Personal Snippet (using the web UI) | **{dotted-circle}** Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Project, Personal Snippet (using Git) | **{check-circle}** Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Group wiki repository (using the web UI) | **{dotted-circle}** Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Group wiki repository (using Git) | **{check-circle}** Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| User uploads | **{dotted-circle}** Maybe | Upload files are served from the locally stored data. Attempting to upload a file on a secondary causes an error. |
| LFS objects (using the web UI) | **{dotted-circle}** Maybe | Reads are served from the locally stored data. Writes cause an error. |
| LFS objects (using Git) | **{check-circle}** Yes | LFS objects are served from the locally stored data, while pushes get proxied to the primary. If an LFS object doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Pages | **{dotted-circle}** Maybe | Pages can use the same URL (without access control), but must be configured separately and are not proxied. |
| Advanced search (using the web UI) | **{dotted-circle}** No | |
| Container registry | **{dotted-circle}** No | The container registry is only recommended for Disaster Recovery scenarios. If the secondary site's container registry is not up to date, the read request is served with old data as the request is not forwarded to the primary site. Accelerating the container registry is planned, upvote or comment in the [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/365864) to indicate your interest or ask your GitLab representative to do so on your behalf. |
| Dependency Proxy | **{dotted-circle}** No | |
| All other data | **{dotted-circle}** Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Feature / component | Succeed | Notes |
|:----------------------------------------------------|:----------------------------------------|-------|
| Project, wiki, design repository (using the web UI) | {{< icon name="dotted-circle" >}} Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Project, wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Project, Personal Snippet (using the web UI) | {{< icon name="dotted-circle" >}} Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Project, Personal Snippet (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Group wiki repository (using the web UI) | {{< icon name="dotted-circle" >}} Maybe | Reads are served from the locally stored data. Writes cause an error. |
| Group wiki repository (using Git) | {{< icon name="check-circle" >}} Yes | Git reads are served from the locally stored data, while pushes get proxied to the primary. If a repository doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| User uploads | {{< icon name="dotted-circle" >}} Maybe | Upload files are served from the locally stored data. Attempting to upload a file on a secondary causes an error. |
| LFS objects (using the web UI) | {{< icon name="dotted-circle" >}} Maybe | Reads are served from the locally stored data. Writes cause an error. |
| LFS objects (using Git) | {{< icon name="check-circle" >}} Yes | LFS objects are served from the locally stored data, while pushes get proxied to the primary. If an LFS object doesn't exist locally on the Geo secondary, for example due to exclusion by selective sync, it causes a "not found" error. |
| Pages | {{< icon name="dotted-circle" >}} Maybe | Pages can use the same URL (without access control), but must be configured separately and are not proxied. |
| Advanced search (using the web UI) | {{< icon name="dotted-circle" >}} No | |
| Container registry | {{< icon name="dotted-circle" >}} No | The container registry is only recommended for Disaster Recovery scenarios. If the secondary site's container registry is not up to date, the read request is served with old data as the request is not forwarded to the primary site. Accelerating the container registry is planned, upvote or comment in the [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/365864) to indicate your interest or ask your GitLab representative to do so on your behalf. |
| Dependency Proxy | {{< icon name="dotted-circle" >}} No | |
| All other data | {{< icon name="dotted-circle" >}} Maybe | Reads are served from the locally stored data. Writes cause an error. |
You should use the feature flag over using the `GEO_SECONDARY_PROXY` environment variable.

View File

@ -147,7 +147,7 @@ To skip confirmation when administrators reassign placeholder users:
1. Under **Skip confirmation when administrators reassign placeholder users**, select the **Enabled** checkbox.
When this setting is enabled, administrators can reassign contributions and memberships
to users with any of the following states:
to non-bot users with any of the following states:
- `active`
- `banned`

View File

@ -482,8 +482,8 @@ job:
Bamboo has a number of options for [triggering builds](https://confluence.atlassian.com/bamboo/triggering-builds-289276897.html),
which can be based on code changes, a schedule, the outcomes of other plans, or on demand.
A plan can be configured to periodically poll a project for new changes,
as shown below.
A plan can be configured to periodically poll a project for new changes.
For example, in a Bamboo build plan:

View File

@ -456,7 +456,7 @@ In addition to the `build` and `helper` containers, six more containers are crea
> Instead, it represents the service's position when no available alias is found.
>
> - When an invalid alias is provided (doesn't meet Kubernetes constraint), the job fails with the
> error below (example with the alias `alpine_edge`). This failure occurs because aliases are
> following error (example with the alias `alpine_edge`). This failure occurs because aliases are
> also used to create local DNS entries on the job Pod.
>
> ```plaintext
@ -500,7 +500,7 @@ For this solution to work, you must:
## How Docker integration works
Below is a high level overview of the steps performed by Docker during job
The following is a high level overview of the steps performed by Docker during job
time.
1. Create any service container: `mysql`, `postgresql`, `mongodb`, `redis`.

View File

@ -13,8 +13,7 @@ title: Using PostgreSQL
{{< /details >}}
As many applications depend on PostgreSQL as their database, you
eventually need it in order for your tests to run. Below you are guided how to
do this with the Docker and Shell executors of GitLab Runner.
have to use it to run your tests.
## Use PostgreSQL with the Docker executor
@ -96,7 +95,7 @@ sudo -u postgres psql -d template1
```
Then create a user (in our case `runner`) which is used by your
application. Change `$password` in the command below to a real strong password.
application. Change `$password` in the following command to a strong password.
{{< alert type="note" >}}

View File

@ -13,8 +13,7 @@ title: Using Redis
{{< /details >}}
As many applications depend on Redis as their key-value store, you
eventually need it in order for your tests to run. Below you are guided how to
do this with the Docker and Shell executors of GitLab Runner.
have to use it to run your tests.
## Use Redis with the Docker executor

View File

@ -196,49 +196,7 @@ Notice that only updates to total counters are batched. If `n` unique metrics an
### Backend testing
When testing code that simply triggers an internal event and make sure it increments all the related metrics,
you can use the `internal_event_tracking` shared example.
```ruby
it_behaves_like 'internal event tracking' do
let(:event) { 'update_issue_severity' }
let(:category) { described_class.name }
let(:project) { issue.project }
let(:user) { issue.author }
let(:additional_properties) { { label: issue.issueable_severity } }
subject(:service_action) { described_class.new(issue).execute }
end
```
It requires a context containing:
- `subject` - the action that triggers the event
- `event` - the name of the event
Optionally, the context can contain:
- `user`
- `project`
- `namespace`. If not provided, `project.namespace` will be used (if `project` is available).
- `category`
- `additional_properties`
If present in the context, the following legacy options will be respected by the shared example but are discouraged:
- `label`
- `property`
- `value`
Prefer including these attributes via `additional_properties` instead.
```ruby
let(:additional_properties) { { label: "value" } }
```
#### Composable matchers
When a singe action triggers an event multiple times, triggers multiple different events, or increments some metrics but not others for the event,
you can use the `trigger_internal_events` and `increment_usage_metrics` matchers on a block argument.
When testing code that triggers internal events or increments metrics, you can use the `trigger_internal_events` and `increment_usage_metrics` matchers on a block argument.
```ruby
expect { subject }
@ -586,7 +544,7 @@ describe('DeleteApplication', () => {
#### Haml with data attributes
If you are using [data attributes](#data-event-attribute) to track internal events at the Haml layer,
you can use the [`trigger_internal_events` matcher](#composable-matchers) to assert that the expected properties are present.
you can use the [`trigger_internal_events` matcher](#backend-testing) to assert that the expected properties are present.
For example, if you need to test the below Haml,

View File

@ -1016,7 +1016,7 @@ Indexing vulnerability records on GitLab Self-Managed is proposed in
### Guidance on choosing optimal cluster configuration
For basic guidance on choosing a cluster configuration you may refer to [Elastic Cloud Calculator](https://cloud.elastic.co/pricing). You can find more information below.
For basic guidance on choosing a cluster configuration, see also [Elastic Cloud Calculator](https://cloud.elastic.co/pricing).
- Generally, you want to use at least a 2-node cluster configuration with one replica, which allows you to have resilience. If your storage usage is growing quickly, you may want to plan horizontal scaling (adding more nodes) beforehand.
- It's not recommended to use HDD storage with the search cluster, because it takes a hit on performance. It's better to use SSD storage (NVMe or SATA SSD drives for example).
@ -1290,7 +1290,9 @@ However, some larger installations may wish to tune the merge policy settings:
{{< alert type="warning" >}}
Most instances should not need to configure this. The steps below use an advanced setting of Sidekiq called [routing rules](../../administration/sidekiq/processing_specific_job_classes.md#routing-rules).
For most instances, you do not have to configure dedicated Sidekiq nodes or processes.
The following steps use an advanced setting of Sidekiq
called [routing rules](../../administration/sidekiq/processing_specific_job_classes.md#routing-rules).
Be sure to fully understand about the implication of using routing rules to avoid losing jobs entirely.
{{< /alert >}}
@ -1312,7 +1314,7 @@ To handle this, we generally recommend one of the following two options. You can
- [Use two queue groups on one single node](#single-node-two-processes).
- [Use two queue groups, one on each node](#two-nodes-one-process-for-each).
For the steps below, consider the entry of `sidekiq['routing_rules']`:
For the following steps, consider the entry of `sidekiq['routing_rules']`:
- `["feature_category=global_search", "global_search"]` as all indexing jobs are routed to the `global_search` queue.
- `["*", "default"]` as all other non-indexing jobs are routed to the `default` queue.

View File

@ -73,8 +73,7 @@ mattermost_nginx['redirect_http_to_https'] = true
```
If you haven't named your certificate and key `mattermost.gitlab.example.crt`
and `mattermost.gitlab.example.key` then you need to also add the full paths
as shown below.
and `mattermost.gitlab.example.key`, then you must also add the following full paths:
```ruby
mattermost_nginx['ssl_certificate'] = "/etc/gitlab/ssl/mattermost-nginx.crt"
@ -142,7 +141,8 @@ By default GitLab Mattermost requires all users to sign up with GitLab and disab
### Reauthorize GitLab Mattermost
To reauthorize GitLab Mattermost, you first need to revoke the existing
authorization. This can be done in the **Settings > Applications** area of GitLab. Then follow the steps below to complete authorization.
authorization. This can be done in the **Settings > Applications** area of GitLab. Then follow the steps
in the following section to complete authorization.
### Authorize GitLab Mattermost
@ -370,7 +370,7 @@ For a complete list of upgrade notices and special considerations for older vers
### GitLab Mattermost versions and edition shipped with the Linux package
Below is a list of Mattermost version changes for GitLab 15.0 and later:
The following table outlines Mattermost version changes for GitLab 15.0 and later:
| GitLab version | Mattermost version | Notes |
| :------------- | :----------------- | ---------------------------------------------------------------------------------------- |

View File

@ -37,7 +37,7 @@ The OSS Library License Check provides:
- GitLab Ultimate tier
- Administrator access to your GitLab instance or group
- [Dependency scanning](../../user/application_security/dependency_scanning/_index.md) enabled for your projects (this can optionally be enabled and enforced for all projects of a specified scope by following the [Dependency Scanning Setup](#setting-up-dependency-scanning-from-scratch) instructions below)
- [Dependency scanning](../../user/application_security/dependency_scanning/_index.md) enabled for your projects (this can optionally be enabled and enforced for all projects of a specified scope by following the [Dependency Scanning Setup](#setting-up-dependency-scanning-from-scratch) instructions)
## Implementation Guide
@ -105,7 +105,7 @@ If you already have a security policy project but don't have dependency and/or l
1. Navigate to your group's Security policy project.
1. Navigate to the `policy.yml` file in `.gitlab/security-policies/`.
1. Click on **Edit** > **Edit single file**.
1. Add the `scan_execution_policy` and `approval_policy` sections from the configuration below.
1. Add the `scan_execution_policy` and `approval_policy` sections from [Complete Policy Configuration](#complete-policy-configuration).
1. Make sure to:
- Maintain the existing YAML structure
- Place these sections at the same level as other top-level sections

View File

@ -93,10 +93,12 @@ This guide covers the steps to configure the policy to run secret detection for
### Configure Secret Detection Policy
To run secret detection automatically in the pipeline as the enforced global policy, set up the policy at the highest level, in this case the top group level. Follow the steps outlined below to create the new secret detection policy.
To run secret detection automatically in the pipeline as the enforced global policy,
set up the policy at the highest level (in this case, for the top-level group).
To create the new secret detection policy:
1. Create the policy: In the same group `Secret Detection`, navigate to that group's **Secure > Policies** page.
1. Click on **New policy**.
1. Select **New policy**.
1. Select **Scan execution policy**.
1. Configure the policy: Give the policy name `Secret Detection Policy` and enter a description and select `Secret Detection` scan
1. Set the **Policy scope** by selecting either "All projects in this group" (and optionally set exceptions) or "Specific projects" (and select the projects from the dropdown).
@ -154,7 +156,7 @@ Once the policy is running. all the projects associated with the global policy w
Secrets will be detected and surfaced. If there is a merge request, the net new secrets will be displayed in the MR widget. If it is the default branch merged, they will be shown in the security vulnerability report as following:
![Security Dashboard](img/secret_detection_pwd_vuln_v17_9.png)
An example password in clear is shown below:
The following is an example password in clear text:
![Security Dashboard](img/secret_detection_pwd_v17_9.png)
## Troubleshooting

View File

@ -88,7 +88,7 @@ Why Use Internal IPs?
### GitLab
The rest of this guide assumes you already have a instance of GitLab up and running that meets the requirements below:
The rest of this guide assumes you already have an instance of GitLab up and running that meets the following requirements:
#### Licensing
@ -162,7 +162,8 @@ Designed for simplicity and performance, Ollama empowers users to harness the po
### AI Gateway
While the official installation guide is available [here](../../install/install_ai_gateway.md), here's a streamlined approach for setting up the AI Gateway. As of January 2025, the image `gitlab/model-gateway:self-hosted-v17.6.0-ee` has been verified to work with GitLab 17.7.
While the official installation guide is available in [Install the GitLab AI gateway](../../install/install_ai_gateway.md), here's a streamlined approach for setting up the AI Gateway. As of January
2025, the image `gitlab/model-gateway:self-hosted-v17.6.0-ee` has been verified to work with GitLab 17.7.
1. Ensure that ...
@ -180,7 +181,7 @@ While the official installation guide is available [here](../../install/install_
gitlab/model-gateway:self-hosted-v17.6.0-ee
```
Below is a table explaining key environment variables and their roles in setting up your instance:
The following table explains key environment variables and their roles in setting up your instance:
| **Variable** | **Description** |
|------------------------------|-----------------|

View File

@ -41,7 +41,7 @@ You might need the user ID if you want to interact with it using the [GitLab API
To find the user ID:
1. Go to the users' profile page.
1. On the profile page, in the upper-right corner, select **Actions** (**{ellipsis_v}**).
1. On the profile page, in the upper-right corner, select **Actions** ({{< icon name="ellipsis_v" >}}).
1. Select **Copy user ID**.
## Access your user settings

View File

@ -370,7 +370,7 @@ Every change creates a system note, which is not affected by the placeholder use
### Reassign contributions and memberships
Users with the Owner role for a top-level group can reassign contributions and memberships
from placeholder users to existing active (non-bot) users.
from placeholder users to existing active non-bot users.
On the destination instance, users with the Owner role for a top-level group can:
- Request users to review reassignment of contributions and memberships [in the UI](#request-reassignment-in-ui)
@ -382,7 +382,7 @@ On the destination instance, users with the Owner role for a top-level group can
- Choose not to reassign contributions and memberships and [keep them assigned to placeholder users](#keep-as-placeholder).
On GitLab Self-Managed and GitLab Dedicated, administrators can reassign
contributions and memberships immediately without user confirmation.
contributions and memberships to active and inactive non-bot users immediately without their confirmation.
For more information, see [skip confirmation when administrators reassign placeholder users](../../../administration/settings/import_and_export_settings.md#skip-confirmation-when-administrators-reassign-placeholder-users).
#### Reassigning contributions from multiple placeholder users
@ -413,6 +413,10 @@ Users that receive a reassignment request can:
In subsequent imports to the same top-level group, contributions and memberships that belong to the same source user
are mapped automatically to the user who previously accepted reassignments for that source user.
On GitLab Self-Managed and GitLab Dedicated, administrators can reassign
contributions and memberships to active and inactive non-bot users immediately without their confirmation.
For more information, see [skip confirmation when administrators reassign placeholder users](../../../administration/settings/import_and_export_settings.md#skip-confirmation-when-administrators-reassign-placeholder-users).
#### Completing the reassignment
The reassignment process must be fully completed before you:
@ -472,6 +476,10 @@ Contributions of only one placeholder user can be reassigned to an active non-bo
Before a user accepts the reassignment, you can [cancel the request](#cancel-reassignment-request).
On GitLab Self-Managed and GitLab Dedicated, administrators can reassign
contributions and memberships to active and inactive non-bot users immediately without their confirmation.
For more information, see [skip confirmation when administrators reassign placeholder users](../../../administration/settings/import_and_export_settings.md#skip-confirmation-when-administrators-reassign-placeholder-users).
#### Request reassignment by using a CSV file
{{< history >}}
@ -522,6 +530,10 @@ to each active non-bot user on the destination instance.
Users receive an email to review and [accept any contributions](#accept-contribution-reassignment) you've reassigned to them.
You can [cancel the reassignment request](#cancel-reassignment-request) before the user reviews it.
On GitLab Self-Managed and GitLab Dedicated, administrators can reassign
contributions and memberships to active and inactive non-bot users immediately without their confirmation.
For more information, see [skip confirmation when administrators reassign placeholder users](../../../administration/settings/import_and_export_settings.md#skip-confirmation-when-administrators-reassign-placeholder-users).
After you reassign contributions, GitLab sends you an email with the number of:
- Successfully processed rows
@ -630,6 +642,7 @@ You can also sort the table by reassignment status.
When [**Skip confirmation when administrators reassign placeholder users**](../../../administration/settings/import_and_export_settings.md#skip-confirmation-when-administrators-reassign-placeholder-users) is enabled:
- Administrators can reassign contributions immediately without user confirmation.
- Administrators can reassign contributions to active and inactive non-bot users.
- You receive an email informing you that you've been reassigned contributions.
If this setting is not enabled, you can [accept](#accept-contribution-reassignment)

View File

@ -13,6 +13,10 @@ module API
end
resource :projects, requirements: ::API::API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
before do
@alert = find_project_alert(params[:alert_iid])
not_found! if Feature.enabled?(:hide_incident_management_features, @alert&.project)
end
namespace ':id/alert_management_alerts/:alert_iid/metric_images' do
desc 'Workhorse authorize metric image file upload' do
success code: 200
@ -56,12 +60,10 @@ module API
require_gitlab_workhorse!
bad_request!('File is too large') if max_file_size_exceeded?
alert = find_project_alert(params[:alert_iid])
authorize!(:upload_alert_management_metric_image, alert)
authorize!(:upload_alert_management_metric_image, @alert)
upload = ::AlertManagement::MetricImages::UploadService.new(
alert,
@alert,
current_user,
params.slice(:file, :url, :url_text)
).execute
@ -85,10 +87,8 @@ module API
tags %w[alert_management]
end
get do
alert = find_project_alert(params[:alert_iid])
if can?(current_user, :read_alert_management_metric_image, alert)
present alert.metric_images.order_created_at_asc, with: Entities::MetricImage
if can?(current_user, :read_alert_management_metric_image, @alert)
present @alert.metric_images.order_created_at_asc, with: Entities::MetricImage
else
render_api_error!('Alert not found', 404)
end
@ -112,11 +112,9 @@ module API
documentation: { example: 'An example metric' }
end
put ':metric_image_id' do
alert = find_project_alert(params[:alert_iid])
authorize!(:update_alert_management_metric_image, @alert)
authorize!(:update_alert_management_metric_image, alert)
metric_image = alert.metric_images.find_by_id(params[:metric_image_id])
metric_image = @alert.metric_images.find_by_id(params[:metric_image_id])
render_api_error!('Metric image not found', 404) unless metric_image
@ -140,11 +138,9 @@ module API
documentation: { example: 42 }
end
delete ':metric_image_id' do
alert = find_project_alert(params[:alert_iid])
authorize!(:destroy_alert_management_metric_image, @alert)
authorize!(:destroy_alert_management_metric_image, alert)
metric_image = alert.metric_images.find_by_id(params[:metric_image_id])
metric_image = @alert.metric_images.find_by_id(params[:metric_image_id])
render_api_error!('Metric image not found', 404) unless metric_image

View File

@ -53,7 +53,7 @@ module API
end
def find_todos
TodosFinder.new(current_user, declared_params(include_missing: false)).execute
TodosFinder.new(users: current_user, **declared_params(include_missing: false)).execute
end
def issuable_and_awardable?(type)

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
module Sidebars # rubocop: disable Gitlab/BoundedContexts -- unknown
module Explore
module Menus
class AiCatalogMenu < ::Sidebars::Menu
override :link
def link
explore_ai_catalog_path
end
override :title
def title
s_('AI|AI Catalog')
end
override :sprite_icon
def sprite_icon
'tanuki-ai'
end
override :render?
def render?
Feature.enabled?(:global_ai_catalog, current_user)
end
override :active_routes
def active_routes
{ controller: ['explore/ai_catalog'] }
end
end
end
end
end

View File

@ -24,6 +24,7 @@ module Sidebars
add_menu(Sidebars::Explore::Menus::ProjectsMenu.new(context))
add_menu(Sidebars::Explore::Menus::GroupsMenu.new(context))
add_menu(Sidebars::Explore::Menus::CatalogMenu.new(context))
add_menu(Sidebars::Explore::Menus::AiCatalogMenu.new(context))
add_menu(Sidebars::Explore::Menus::TopicsMenu.new(context))
add_menu(Sidebars::Explore::Menus::SnippetsMenu.new(context))
end

View File

@ -2395,6 +2395,9 @@ msgstr ""
msgid "AISummary|View summary"
msgstr ""
msgid "AI|AI Catalog"
msgstr ""
msgid "AI|Accept & Insert"
msgstr ""
@ -15960,6 +15963,9 @@ msgstr ""
msgid "ComplianceReport|Full target branch name"
msgstr ""
msgid "ComplianceReport|Have questions or thoughts on the new improvements? %{linkStart}Please provide feedback on your experience%{linkEnd}."
msgstr ""
msgid "ComplianceReport|However, there is still a configured compliance pipeline that must be removed. Otherwise, the compliance pipeline will continue to take precedence over the new pipeline execution policy."
msgstr ""
@ -16005,6 +16011,9 @@ msgstr ""
msgid "ComplianceReport|Select frameworks"
msgstr ""
msgid "ComplianceReport|Show old report"
msgstr ""
msgid "ComplianceReport|This compliance framework's compliance pipeline has been migrated to a pipeline execution policy."
msgstr ""
@ -16026,6 +16035,9 @@ msgstr ""
msgid "ComplianceReport|View the framework details"
msgstr ""
msgid "ComplianceReport|We are replacing the violations report with a new version that includes enhanced features for your compliance workflow."
msgstr ""
msgid "ComplianceReport|You are viewing the compliance centre for %{project}. To see information for all projects, go to %{linkStart}group%{linkEnd}."
msgstr ""

View File

@ -56,7 +56,7 @@
"@babel/preset-env": "^7.23.7",
"@csstools/postcss-global-data": "^2.1.1",
"@cubejs-client/core": "^1.0.0",
"@floating-ui/dom": "^1.7.0",
"@floating-ui/dom": "^1.7.1",
"@gitlab/application-sdk-browser": "^0.3.4",
"@gitlab/at.js": "1.5.7",
"@gitlab/cluster-client": "^3.0.0",

View File

@ -365,7 +365,8 @@ RSpec.describe LabelsFinder, feature_category: :team_planning do
context 'external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:label, project: project) }
let(:project_params) { { project_id: project.id } }
let(:execute) { described_class.new(user).execute }
let(:project_execute) { described_class.new(user, project_id: project.id).execute }
end
end
end

View File

@ -1206,7 +1206,8 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
context 'external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:merge_request, source_project: project) }
let(:project_params) { { project_id: project.id } }
let(:execute) { described_class.new(user).execute }
let(:project_execute) { described_class.new(user, project_id: project.id).execute }
end
end
end

View File

@ -409,7 +409,8 @@ RSpec.describe SnippetsFinder do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:project_snippet, project: project) }
let(:project_params) { { project: project } }
let(:execute) { described_class.new(user).execute }
let(:project_execute) { described_class.new(user, project: project).execute }
end
it 'includes the result if the external service allows access' do

View File

@ -11,128 +11,109 @@ RSpec.describe TodosFinder, feature_category: :notifications do
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:banned_user) { create(:user, :banned) }
let(:finder) { described_class }
before_all do
group.add_developer(user)
end
describe '#execute' do
it 'returns no todos if user is nil' do
expect(described_class.new(nil, {}).execute).to be_empty
expect(execute(users: nil)).to be_empty
end
context 'filtering' do
context 'when users is not passed' do
it 'raises an argument error' do
expect { described_class.new.execute }.to raise_error(ArgumentError)
end
end
context 'with filtering' do
let!(:todo1) { create(:todo, user: user, project: project, target: issue) }
let!(:todo2) { create(:todo, user: user, group: group, target: merge_request) }
let!(:banned_pending_todo) { create(:todo, :pending, user: user, project: project, target: issue, author: banned_user) }
let!(:banned_pending_todo) do
create(:todo, :pending, user: user, project: project, target: issue, author: banned_user)
end
it 'returns excluding pending todos authored by banned users' do
todos = finder.new(user, {}).execute
expect(todos).to match_array([todo1, todo2])
expect(execute).to match_array([todo1, todo2])
end
it 'returns correct todos when filtered by a project' do
todos = finder.new(user, { project_id: project.id }).execute
expect(todos).to match_array([todo1])
expect(execute(project_id: project.id)).to match_array([todo1])
end
it 'returns correct todos when filtered by a group' do
todos = finder.new(user, { group_id: group.id }).execute
expect(execute(group_id: group.id)).to match_array([todo1, todo2])
end
expect(todos).to match_array([todo1, todo2])
context 'with multiple users sent to the finder' do
it 'returns correct todos for the users passed' do
todo3 = create(:todo)
user2 = todo3.user
create(:todo)
expect(execute(users: [user, user2])).to match_array([todo1, todo2, todo3])
end
end
context 'when filtering by type' do
it 'returns todos by type when filtered by a single type' do
todos = finder.new(user, { type: 'Issue' }).execute
expect(todos).to match_array([todo1])
expect(execute(type: 'Issue')).to match_array([todo1])
end
it 'returns todos by type when filtered by multiple types' do
design_todo = create(:todo, user: user, group: group, target: create(:design))
create(:todo, user: user, group: group, target: create(:design))
todos = finder.new(user, { type: %w[Issue MergeRequest] }).execute
expect(todos).to contain_exactly(todo1, todo2)
expect(todos).not_to include(design_todo)
expect(execute(type: %w[Issue MergeRequest])).to contain_exactly(todo1, todo2)
end
it 'returns all todos when type is nil' do
todos = finder.new(user, { type: nil }).execute
expect(todos).to contain_exactly(todo1, todo2)
expect(execute(type: nil)).to contain_exactly(todo1, todo2)
end
it 'returns all todos when type is an empty collection' do
todos = finder.new(user, { type: [] }).execute
expect(todos).to contain_exactly(todo1, todo2)
expect(execute(type: [])).to contain_exactly(todo1, todo2)
end
it 'returns all todos when type is blank' do
todos = finder.new(user, { type: '' }).execute
expect(todos).to contain_exactly(todo1, todo2)
expect(execute(type: '')).to contain_exactly(todo1, todo2)
end
it 'returns todos by type when blank type is in type collection' do
todos = finder.new(user, { type: ['', 'MergeRequest'] }).execute
expect(todos).to contain_exactly(todo2)
expect(execute(type: ['', 'MergeRequest'])).to contain_exactly(todo2)
end
it 'returns todos of all types when only blanks are in a collection' do
todos = finder.new(user, { type: ['', ''] }).execute
expect(todos).to contain_exactly(todo1, todo2)
end
it 'returns all todos when no type param' do
todos = finder.new(user).execute
expect(todos).to contain_exactly(todo1, todo2)
expect(execute(type: ['', ''])).to contain_exactly(todo1, todo2)
end
it 'raises an argument error when invalid type is passed' do
todos_finder = finder.new(user, { type: %w[Issue MergeRequest NotAValidType] })
expect { todos_finder.execute }.to raise_error(ArgumentError)
expect { execute(type: %w[Issue MergeRequest NotAValidType]) }.to raise_error(ArgumentError)
end
end
context 'when filtering for actions' do
let!(:todo1) { create(:todo, user: user, project: project, target: issue, action: Todo::ASSIGNED) }
let!(:todo2) { create(:todo, user: user, group: group, target: merge_request, action: Todo::DIRECTLY_ADDRESSED) }
let!(:todo2) do
create(:todo, user: user, group: group, target: merge_request, action: Todo::DIRECTLY_ADDRESSED)
end
context 'by action ids' do
context 'with by action ids' do
it 'returns the expected todos' do
todos = finder.new(user, { action_id: Todo::DIRECTLY_ADDRESSED }).execute
expect(todos).to match_array([todo2])
expect(execute(action_id: Todo::DIRECTLY_ADDRESSED)).to match_array([todo2])
end
it 'returns the expected todos when filtering for multiple action ids' do
todos = finder.new(user, { action_id: [Todo::DIRECTLY_ADDRESSED, Todo::ASSIGNED] }).execute
expect(todos).to match_array([todo2, todo1])
expect(execute(action_id: [Todo::DIRECTLY_ADDRESSED, Todo::ASSIGNED])).to match_array([todo2, todo1])
end
end
context 'by action names' do
context 'with by action names' do
it 'returns the expected todos' do
todos = finder.new(user, { action: :directly_addressed }).execute
expect(todos).to match_array([todo2])
expect(execute(action: :directly_addressed)).to match_array([todo2])
end
it 'returns the expected todos when filtering for multiple action names' do
todos = finder.new(user, { action: [:directly_addressed, :assigned] }).execute
expect(todos).to match_array([todo2, todo1])
expect(execute(action: [:directly_addressed, :assigned])).to match_array([todo2, todo1])
end
end
end
@ -145,34 +126,28 @@ RSpec.describe TodosFinder, feature_category: :notifications do
let!(:todo2) { create(:todo, user: user, author: author2) }
it 'returns correct todos when filtering by an author' do
todos = finder.new(user, { author_id: author1.id }).execute
expect(todos).to match_array([todo1])
expect(execute(author_id: author1.id)).to match_array([todo1])
end
context 'querying for multiple authors' do
context 'with querying for multiple authors' do
it 'returns the correct todo items' do
todos = finder.new(user, { author_id: [author2.id, author1.id] }).execute
expect(todos).to match_array([todo2, todo1])
expect(execute(author_id: [author2.id, author1.id])).to match_array([todo2, todo1])
end
end
end
context 'by groups' do
context 'with by groups' do
context 'with subgroups' do
let_it_be(:subgroup) { create(:group, parent: group) }
let!(:todo3) { create(:todo, user: user, group: subgroup, target: issue) }
it 'returns todos from subgroups when filtered by a group' do
todos = finder.new(user, { group_id: group.id }).execute
expect(todos).to match_array([todo1, todo2, todo3])
expect(execute(group_id: group.id)).to match_array([todo1, todo2, todo3])
end
end
context 'filtering for multiple groups' do
context 'with filtering for multiple groups' do
let_it_be(:group2) { create(:group) }
let_it_be(:group3) { create(:group) }
let_it_be(:subgroup1) { create(:group, parent: group) }
@ -186,53 +161,53 @@ RSpec.describe TodosFinder, feature_category: :notifications do
let!(:todo6) { create(:todo, user: user, group: group3, target: issue) }
it 'returns the expected groups' do
todos = finder.new(user, { group_id: [group.id, group2.id] }).execute
expect(todos).to match_array([todo1, todo2, todo3, todo4, todo5])
expect(execute(group_id: [group.id, group2.id])).to match_array([todo1, todo2, todo3, todo4, todo5])
end
end
end
context 'by state' do
context 'with by state' do
let!(:todo1) { create(:todo, user: user, group: group, target: issue, state: :done) }
let!(:todo2) { create(:todo, user: user, group: group, target: issue, state: :done, author: banned_user) }
let!(:todo3) { create(:todo, user: user, group: group, target: issue, state: :pending) }
let!(:todo4) { create(:todo, user: user, group: group, target: issue, state: :pending, author: banned_user) }
let!(:todo5) { create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.from_now) }
let!(:todo6) { create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.ago) }
let!(:todo5) do
create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.from_now)
end
let!(:todo6) do
create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.ago)
end
it 'returns the expected items when no state is provided' do
todos = finder.new(user, {}).execute
expect(todos).to match_array([todo3, todo6])
expect(execute).to match_array([todo3, todo6])
end
it 'returns the expected items when a state is provided' do
todos = finder.new(user, { state: :done }).execute
expect(todos).to match_array([todo1, todo2])
expect(execute(state: :done)).to match_array([todo1, todo2])
end
it 'returns the expected items when multiple states are provided' do
todos = finder.new(user, { state: [:pending, :done] }).execute
expect(todos).to match_array([todo1, todo2, todo3, todo5, todo6])
expect(execute(state: [:pending, :done])).to match_array([todo1, todo2, todo3, todo5, todo6])
end
end
context 'by snoozed state' do
context 'with by snoozed state' do
let_it_be(:todo1) { create(:todo, user: user, group: group, target: issue, state: :pending) }
let_it_be(:todo2) { create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.from_now) }
let_it_be(:todo3) { create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.ago) }
let_it_be(:todo2) do
create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.from_now)
end
let_it_be(:todo3) do
create(:todo, user: user, group: group, target: issue, state: :pending, snoozed_until: 1.hour.ago)
end
it 'returns the snoozed todos only' do
todos = finder.new(user, { is_snoozed: true }).execute
expect(todos).to match_array([todo2])
expect(execute(is_snoozed: true)).to match_array([todo2])
end
end
context 'by project' do
context 'with by project' do
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project) }
let_it_be(:project3) { create(:project) }
@ -242,84 +217,75 @@ RSpec.describe TodosFinder, feature_category: :notifications do
let!(:todo3) { create(:todo, user: user, project: project3, state: :pending) }
it 'returns the expected todos for one project' do
todos = finder.new(user, { project_id: project2.id }).execute
expect(todos).to match_array([todo2])
expect(execute(project_id: project2.id)).to match_array([todo2])
end
it 'returns the expected todos for many projects' do
todos = finder.new(user, { project_id: [project2.id, project1.id] }).execute
expect(todos).to match_array([todo2, todo1])
expect(execute(project_id: [project2.id, project1.id])).to match_array([todo2, todo1])
end
end
context 'when filtering by target id' do
it 'returns the expected todos for the target' do
todos = finder.new(user, { type: 'Issue', target_id: issue.id }).execute
expect(todos).to match_array([todo1])
expect(execute(type: 'Issue', target_id: issue.id)).to match_array([todo1])
end
it 'returns the expected todos for multiple target ids' do
another_issue = create(:issue, project: project)
todo3 = create(:todo, user: user, project: project, target: another_issue)
todos = finder.new(user, { type: 'Issue', target_id: [issue.id, another_issue.id] }).execute
expect(todos).to match_array([todo1, todo3])
expect(execute(type: 'Issue', target_id: [issue.id, another_issue.id])).to match_array([todo1, todo3])
end
it 'returns the expected todos for empty target id collection' do
todos = finder.new(user, { target_id: [] }).execute
expect(todos).to match_array([todo1, todo2])
expect(execute(target_id: [])).to match_array([todo1, todo2])
end
end
end
context 'external authorization' do
context 'with external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:todo, project: project, user: user) }
let(:project_params) { { project_id: project.id } }
let!(:subject) { create(:todo, project: project, user: user) } # rubocop:disable RSpec/SubjectDeclaration -- In context subject is the right word
let(:execute) { described_class.new(users: user).execute }
let(:project_execute) { described_class.new(users: user, project_id: project.id).execute }
end
end
end
describe '#sort' do
context 'by date' do
context 'with by date' do
let!(:todo1) { create(:todo, user: user, project: project) }
let!(:todo2) { create(:todo, user: user, project: project, created_at: 3.hours.ago) }
let!(:todo3) { create(:todo, user: user, project: project, snoozed_until: 1.hour.ago) }
context 'when sorting by ascending date' do
subject { finder.new(user, { sort: :created_asc }).execute }
subject { execute(sort: :created_asc) }
it { is_expected.to eq([todo2, todo3, todo1]) }
end
context 'when sorting by descending date' do
subject { finder.new(user, { sort: :created_desc }).execute }
subject { execute(sort: :created_desc) }
it { is_expected.to eq([todo1, todo3, todo2]) }
end
context 'when not querying pending to-dos only' do
context 'when sorting by ascending date' do
subject { finder.new(user, { sort: :created_asc, state: [:done, :pending] }).execute }
subject { execute(sort: :created_asc, state: [:done, :pending]) }
it { is_expected.to eq([todo1, todo2, todo3]) }
end
context 'when sorting by descending date' do
subject { finder.new(user, { sort: :created_desc, state: [:done, :pending] }).execute }
subject { execute(sort: :created_desc, state: [:done, :pending]) }
it { is_expected.to eq([todo3, todo2, todo1]) }
end
end
end
it "sorts by priority" do
it 'sorts by priority' do
project_2 = create(:project)
label_1 = create(:label, title: 'label_1', project: project, priority: 1)
@ -350,16 +316,20 @@ RSpec.describe TodosFinder, feature_category: :notifications do
project_2.add_developer(user)
todos_asc_1 = finder.new(user, { sort: :priority }).execute
todos_asc_1 = execute(sort: :priority)
expect(todos_asc_1).to eq([todo_3, todo_5, todo_4, todo_2, todo_1])
todos_asc_2 = finder.new(user, { sort: :label_priority_asc }).execute
todos_asc_2 = execute(sort: :label_priority_asc)
expect(todos_asc_2).to eq([todo_3, todo_5, todo_4, todo_2, todo_1])
todos_desc = finder.new(user, { sort: :label_priority_desc }).execute
todos_desc = execute(sort: :label_priority_desc)
expect(todos_desc).to eq([todo_1, todo_2, todo_4, todo_5, todo_3])
end
end
def execute(users: user, **kwargs)
described_class.new(users: users, **kwargs).execute
end
end
describe '.todo_types' do
@ -369,28 +339,12 @@ RSpec.describe TodosFinder, feature_category: :notifications do
expected_result =
if Gitlab.ee?
%w[Epic Vulnerability] + shared_types
%w[Epic Vulnerability User] + shared_types
else
shared_types
end
expect(described_class.todo_types).to contain_exactly(*expected_result)
end
end
describe '#any_for_target?' do
it 'returns true if there are any todos for the given target' do
todo = create(:todo, :pending)
finder = described_class.new(todo.user)
expect(finder.any_for_target?(todo.target)).to eq(true)
end
it 'returns false if there are no todos for the given target' do
issue = create(:issue)
finder = described_class.new(issue.author)
expect(finder.any_for_target?(issue)).to eq(false)
expect(described_class.todo_types).to match_array(expected_result)
end
end
end

View File

@ -0,0 +1,45 @@
import { createWrapper } from '@vue/test-utils';
import { initAiCatalog } from '~/ai/catalog/index';
import AiCatalogApp from '~/ai/catalog/ai_catalog_app.vue';
import * as Router from '~/ai/catalog/router';
describe('AI Catalog Index', () => {
let mockElement;
let wrapper;
const findAiCatalog = () => wrapper.findComponent(AiCatalogApp);
afterEach(() => {
mockElement = null;
});
describe('initAiCatalog', () => {
beforeEach(() => {
mockElement = document.createElement('div');
mockElement.id = 'js-ai-catalog';
mockElement.dataset.aiCatalogIndexPath = '/ai/catalog';
document.body.appendChild(mockElement);
jest.spyOn(Router, 'createRouter');
wrapper = createWrapper(initAiCatalog(`#${mockElement.id}`));
});
it('renders the AiCatalog component', () => {
expect(findAiCatalog().exists()).toBe(true);
});
it('creates router with correct base path', () => {
initAiCatalog();
expect(Router.createRouter).toHaveBeenCalledWith('/ai/catalog');
});
});
describe('when the element does not exist', () => {
it('returns `null`', () => {
expect(initAiCatalog('foo')).toBeNull();
});
});
});

View File

@ -0,0 +1,111 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import AiCatalogIndex from '~/ai/catalog/pages/ai_catalog_index.vue';
describe('AiCatalogIndex', () => {
let wrapper;
const mockWorkflowsData = [
{
id: 1,
name: 'Test Workflow 1',
type: 'Type A',
},
{
id: 2,
name: 'Test Workflow 2',
type: 'Type B',
},
{
id: 3,
name: 'Test Workflow 3',
type: 'Type C',
},
];
const emptyWorkflowsData = [];
const createComponent = ({ loading = false, mockData = mockWorkflowsData } = {}) => {
wrapper = shallowMountExtended(AiCatalogIndex, {
data() {
return { userWorkflows: mockData };
},
mocks: {
$apollo: {
queries: {
userWorkflows: {
loading,
},
},
},
},
});
return waitForPromises();
};
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findTitle = () => wrapper.find('h1');
const findAllParagraphs = () => wrapper.findAll('p');
describe('component initialization', () => {
it('renders the page title', async () => {
await createComponent();
expect(findTitle().text()).toBe('AI Catalog');
});
});
describe('loading state', () => {
it('shows skeleton loader when loading', () => {
createComponent({ loading: true });
expect(findSkeletonLoader().exists()).toBe(true);
expect(findAllParagraphs()).toHaveLength(0);
});
});
describe('with workflow data', () => {
beforeEach(async () => {
await createComponent();
});
it('displays workflow names and types correctly', () => {
const paragraphs = findAllParagraphs();
// Should have 6 paragraphs total (2 per workflow: name and type)
expect(paragraphs).toHaveLength(6);
// Check workflow names (even indices: 0, 2, 4)
expect(paragraphs.at(0).text()).toBe('Test Workflow 1');
expect(paragraphs.at(2).text()).toBe('Test Workflow 2');
expect(paragraphs.at(4).text()).toBe('Test Workflow 3');
// Check workflow types (odd indices: 1, 3, 5)
expect(paragraphs.at(1).text()).toBe('Type A');
expect(paragraphs.at(3).text()).toBe('Type B');
expect(paragraphs.at(5).text()).toBe('Type C');
});
it('does not show skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(false);
});
});
describe('with empty workflow data', () => {
beforeEach(async () => {
await createComponent({ mockData: emptyWorkflowsData });
});
it('renders no workflow paragraphs', () => {
expect(findAllParagraphs()).toHaveLength(0);
});
it('does not show skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(false);
});
});
});

View File

@ -5,10 +5,9 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['TodoTargetEnum'], feature_category: :notifications do
specify { expect(described_class.graphql_name).to eq('TodoTargetEnum') }
it 'exposes all TodosFinder.todo_types as a value except User' do
enums = described_class.values.values.reject { |enum| enum.value == "User" }
expect(enums).to contain_exactly(
*::TodosFinder.todo_types.map do |class_name|
it 'exposes all TodosFinder.todo_types as a value' do
expect(described_class.values.values).to match_array(
::TodosFinder.todo_types.map do |class_name|
have_attributes(value: class_name)
end
)

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Sidebars::Explore::Menus::AiCatalogMenu, feature_category: :navigation do
let_it_be(:current_user) { build(:user) }
let_it_be(:user) { build(:user) }
let(:context) { Sidebars::Context.new(current_user: current_user, container: user) }
subject(:menu_item) { described_class.new(context) }
describe '#link' do
it 'matches the expected path pattern' do
expect(menu_item.link).to match %r{explore/ai-catalog}
end
end
describe '#title' do
it 'returns the correct title' do
expect(menu_item.title).to eq 'AI Catalog'
end
end
describe '#sprite_icon' do
it 'returns the correct icon' do
expect(menu_item.sprite_icon).to eq 'tanuki-ai'
end
end
describe '#active_routes' do
it 'returns the correct active routes' do
expect(menu_item.active_routes).to eq({ controller: ['explore/ai_catalog'] })
end
end
describe '#render?' do
it 'renders the menu' do
expect(menu_item.render?).to be(true)
end
context 'when global_ai_catalog feature flag is disabled' do
before do
stub_feature_flags(global_ai_catalog: false)
end
it 'does not render the menu' do
expect(menu_item.render?).to be(false)
end
end
end
describe 'feature flag integration' do
it 'calls Feature.enabled? with correct parameters' do
expect(Feature).to receive(:enabled?).with(:global_ai_catalog, current_user)
menu_item.render?
end
end
end

View File

@ -16,6 +16,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
before do
project.add_developer(user)
stub_feature_flags(hide_incident_management_features: false)
end
subject do
@ -133,6 +134,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
before do
# Local storage
stub_uploads_object_storage(MetricImageUploader, enabled: false)
stub_feature_flags(hide_incident_management_features: false)
allow_next_instance_of(MetricImageUploader) do |uploader|
allow(uploader).to receive(:file_storage?).and_return(true)
end
@ -145,6 +147,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
context 'file size too large' do
before do
stub_feature_flags(hide_incident_management_features: false)
allow_next_instance_of(UploadedFile) do |upload_file|
allow(upload_file).to receive(:size).and_return(AlertManagement::MetricImage::MAX_FILE_SIZE + 1)
end
@ -160,6 +163,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
context 'error when saving' do
before do
stub_feature_flags(hide_incident_management_features: false)
project.add_developer(user)
allow_next_instance_of(::AlertManagement::MetricImages::UploadService) do |service|
@ -184,6 +188,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
allow_next_instance_of(MetricImageUploader) do |uploader|
allow(uploader).to receive(:file_storage?).and_return(true)
end
stub_feature_flags(hide_incident_management_features: false)
project.add_developer(user)
end
@ -241,6 +246,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
with_them do
before do
stub_feature_flags(hide_incident_management_features: false)
project.send("add_#{user_role}", user) unless user_role == :not_member
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) unless public_project
end
@ -289,6 +295,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
with_them do
before do
stub_feature_flags(hide_incident_management_features: false)
project.send("add_#{user_role}", user) unless user_role == :not_member
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) unless public_project
end
@ -298,6 +305,8 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
context 'when user has access' do
before do
stub_feature_flags(hide_incident_management_features: false)
project.add_developer(user)
end
@ -366,6 +375,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
with_them do
before do
stub_feature_flags(hide_incident_management_features: false)
project.send("add_#{user_role}", user) unless user_role == :not_member
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) unless public_project
end
@ -375,6 +385,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
context 'when user has access' do
before do
stub_feature_flags(hide_incident_management_features: false)
project.add_developer(user)
end
@ -395,6 +406,7 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
context 'when error when deleting' do
before do
stub_feature_flags(hide_incident_management_features: false)
allow_next_instance_of(AlertManagement::AlertsFinder) do |finder|
allow(finder).to receive(:execute).and_return([alert])
end
@ -412,4 +424,34 @@ RSpec.describe API::AlertManagementAlerts, feature_category: :incident_managemen
end
end
end
describe 'with feature flag hide_incident_management_features enabled' do
include WorkhorseHelpers
include_context 'workhorse headers'
let(:path) { "/projects/#{project.id}/alert_management_alerts/#{alert.iid}/metric_images" }
let(:headers) { workhorse_headers }
let!(:image) { create(:alert_metric_image, alert: alert) }
shared_examples 'feature flag returns 404' do |http_method|
before do
project.add_developer(user)
end
it "returns 404 for #{http_method}" do
request_path = %i[put delete].include?(http_method) ? "#{path}/#{image.id}" : path
send(http_method, api(request_path), headers: headers)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not Found')
end
end
context 'when making a POST request' do
it_behaves_like 'feature flag returns 404', :post
it_behaves_like 'feature flag returns 404', :put
it_behaves_like 'feature flag returns 404', :delete
it_behaves_like 'feature flag returns 404', :get
end
end
end

View File

@ -0,0 +1,164 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Explore::AiCatalogController, feature_category: :duo_workflow do
let_it_be(:user) { create(:user) }
describe 'GET #index' do
let(:path) { explore_ai_catalog_path }
before do
stub_feature_flags(global_ai_catalog: true)
end
context 'when user is signed in' do
before do
sign_in(user)
end
it 'responds with success' do
get path
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the index template' do
get path
expect(response).to render_template('index')
end
it 'uses the explore layout' do
get path
expect(response).to render_template(layout: 'explore')
end
end
context 'when user is not signed in' do
it 'responds with success' do
get path
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the index template' do
get path
expect(response).to render_template('index')
end
it 'uses the explore layout' do
get path
expect(response).to render_template(layout: 'explore')
end
end
context 'when public visibility is restricted' do
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
context 'when user is signed in' do
before do
sign_in(user)
end
it 'responds with success' do
get path
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the index template' do
get path
expect(response).to render_template('index')
end
end
context 'when user is not signed in' do
it 'redirects to login page' do
get path
expect(response).to redirect_to new_user_session_path
end
end
end
context 'when global_ai_catalog feature flag is disabled' do
before do
stub_feature_flags(global_ai_catalog: false)
end
context 'when user is signed in' do
before do
sign_in(user)
end
it 'renders 404' do
get path
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is not signed in' do
it 'renders 404' do
get path
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when global_ai_catalog feature flag is enabled for specific user' do
let_it_be(:enabled_user) { create(:user) }
let_it_be(:disabled_user) { create(:user) }
before do
stub_feature_flags(global_ai_catalog: enabled_user)
end
context 'when enabled user is signed in' do
before do
sign_in(enabled_user)
end
it 'responds with success' do
get path
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the index template' do
get path
expect(response).to render_template('index')
end
end
context 'when disabled user is signed in' do
before do
sign_in(disabled_user)
end
it 'renders 404' do
get path
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when user is not signed in' do
it 'renders 404' do
get path
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
end

View File

@ -726,6 +726,8 @@ RSpec.describe Projects::TransferService, feature_category: :groups_and_projects
let(:group) { create(:group) }
let(:member_of_new_group) { create(:user) }
let(:user_ids) { [user.id, member_of_old_group.id, member_of_new_group.id] }
before do
old_group.add_developer(member_of_old_group)
group.add_maintainer(member_of_new_group)
@ -743,20 +745,25 @@ RSpec.describe Projects::TransferService, feature_category: :groups_and_projects
execute_transfer
end
it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
it 'enqueues a EnqueueUsersRefreshAuthorizedProjectsWorker job' do
expect(AuthorizedProjectUpdate::EnqueueUsersRefreshAuthorizedProjectsWorker)
.to receive(:perform_async).with(user_ids)
user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] }
execute_transfer
end
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
1.hour,
user_ids,
batch_delay: 30.seconds, batch_size: 100
)
)
context 'when project_authorizations_update_in_background_in_transfer_service feature flag is disabled' do
before do
stub_feature_flags(project_authorizations_update_in_background_in_transfer_service: false)
end
subject
it 'calls UserProjectAccessChangedService with user_ids to update project authorizations' do
expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |instance|
expect(instance).to receive(:execute).with(priority: UserProjectAccessChangedService::LOW_PRIORITY)
end
execute_transfer
end
end
it 'refreshes the permissions of the members of the old and new namespace', :sidekiq_inline do

View File

@ -302,6 +302,10 @@ RSpec.shared_context '"Explore" navbar structure' do
nav_item: _("CI/CD Catalog"),
nav_sub_items: []
},
{
nav_item: s_("AI|AI Catalog"),
nav_sub_items: []
},
{
nav_item: _("Topics"),
nav_sub_items: []

View File

@ -1,5 +1,8 @@
# frozen_string_literal: true
# This example group is deprecated in favor of composable matchers, it should not be used in any new code.
# https://gitlab.com/gitlab-org/gitlab/-/issues/536223
# Requires a context containing:
# - subject
# - event

View File

@ -11,7 +11,7 @@ RSpec.shared_examples 'a finder with external authorization service' do
end
it 'finds the subject' do
expect(described_class.new(user).execute).to include(subject)
expect(execute).to include(subject)
end
context 'with an external authorization service' do
@ -20,11 +20,13 @@ RSpec.shared_examples 'a finder with external authorization service' do
end
it 'does not include the subject when no project was given' do
expect(described_class.new(user).execute).not_to include(subject)
expect(execute).not_to include(subject)
end
it 'includes the subject when a project id was given' do
expect(described_class.new(user, project_params).execute).to include(subject)
context 'with a project param' do
it 'includes the subject when a project id was given' do
expect(project_execute).to include(subject)
end
end
end
end

View File

@ -1129,7 +1129,8 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
context 'external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(factory, project: project) }
let(:project_params) { { project_id: project.id } }
let(:execute) { described_class.new(user).execute }
let(:project_execute) { described_class.new(user, project_id: project.id).execute }
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::EnqueueUsersRefreshAuthorizedProjectsWorker, feature_category: :permissions do
describe '#perform' do
let_it_be(:user_ids) { [1, 2, 3] }
subject(:execute) { described_class.new.perform(user_ids) }
it 'calls UserProjectAccessChangedService' do
expect_next_instance_of(UserProjectAccessChangedService, user_ids) do |service|
expect(service).to receive(:execute).with(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
execute
end
context 'with an empty array of user_ids' do
let(:user_ids) { [] }
it 'does not call UserProjectAccessChangedService' do
expect(UserProjectAccessChangedService).not_to receive(:new)
execute
end
end
end
end

View File

@ -1331,14 +1331,14 @@
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-3.1.1.tgz#af3aea7f1e52ec916d8b5c9dcc0f09d4c060a3fc"
integrity sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==
"@floating-ui/core@^1.7.0":
version "1.7.0"
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.7.0.tgz#1aff27a993ea1b254a586318c29c3b16ea0f4d0a"
integrity sha512-FRdBLykrPPA6P76GGGqlex/e7fbe0F1ykgxHYNXQsH/iTEtjMj/f9bpY5oQqbjt5VgZvgz/uKXbGuROijh3VLA==
"@floating-ui/core@^1.7.0", "@floating-ui/core@^1.7.1":
version "1.7.1"
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.7.1.tgz#1abc6b157d4a936174f9dbd078278c3a81c8bc6b"
integrity sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==
dependencies:
"@floating-ui/utils" "^0.2.9"
"@floating-ui/dom@1.7.0", "@floating-ui/dom@^1.0.0", "@floating-ui/dom@^1.7.0":
"@floating-ui/dom@1.7.0":
version "1.7.0"
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.7.0.tgz#f9f83ee4fee78ac23ad9e65b128fc11a27857532"
integrity sha512-lGTor4VlXcesUMh1cupTUTDoCxMb0V6bm3CnxHzQcw8Eaf1jQbgQX4i02fYgT0vJ82tb5MZ4CZk1LRGkktJCzg==
@ -1346,6 +1346,14 @@
"@floating-ui/core" "^1.7.0"
"@floating-ui/utils" "^0.2.9"
"@floating-ui/dom@^1.0.0", "@floating-ui/dom@^1.7.1":
version "1.7.1"
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.7.1.tgz#76a4e3cbf7a08edf40c34711cf64e0cc8053d912"
integrity sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==
dependencies:
"@floating-ui/core" "^1.7.1"
"@floating-ui/utils" "^0.2.9"
"@floating-ui/react-dom@^2.0.0":
version "2.1.2"
resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-2.1.2.tgz#a1349bbf6a0e5cb5ded55d023766f20a4d439a31"