Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-13 15:12:32 +00:00
parent c1553a2569
commit aefada43de
102 changed files with 1198 additions and 267 deletions

View File

@ -609,7 +609,6 @@ export default {
'ee/app/assets/javascripts/work_items/components/work_item_links/work_item_rolled_up_health_status.vue',
'ee/app/assets/javascripts/work_items/components/work_item_progress.vue',
'ee/app/assets/javascripts/work_items/components/work_item_rolledup_dates.vue',
'ee/app/assets/javascripts/work_items/components/work_item_weight.vue',
'ee/app/assets/javascripts/workspaces/common/components/workspaces_list/workspaces_table.vue',
'ee/app/assets/javascripts/workspaces/dropdown_group/components/workspace_dropdown_item.vue',
'ee/app/assets/javascripts/workspaces/user/pages/list.vue',

View File

@ -1 +1 @@
8ee24aca568eff50b309b6555a96545d71b77979
5ba826bb0ae2842ea51fbfb42d6cdb5a0fd12f2f

View File

@ -68,6 +68,17 @@
"Submodule",
"TreeEntry"
],
"EventTargetType": [
"Design",
"Issue",
"MergeRequest",
"Milestone",
"Note",
"Project",
"Snippet",
"UserCore",
"WikiPage"
],
"Eventable": [
"BoardEpic",
"Epic"

View File

@ -21,7 +21,7 @@ export default {
import('ee_component/dora/components/change_failure_rate_charts.vue'),
ProjectQualitySummary: () => import('ee_component/project_quality_summary/app.vue'),
},
piplelinesTabEvent: 'p_analytics_ci_cd_pipelines',
pipelinesTabEvent: 'p_analytics_ci_cd_pipelines',
deploymentFrequencyTabEvent: 'p_analytics_ci_cd_deployment_frequency',
leadTimeTabEvent: 'p_analytics_ci_cd_lead_time',
timeToRestoreServiceTabEvent: 'visit_ci_cd_time_to_restore_service_tab',
@ -94,7 +94,7 @@ export default {
<gl-tab
:title="__('Pipelines')"
data-testid="pipelines-tab"
@click="trackEvent($options.piplelinesTabEvent)"
@click="trackEvent($options.pipelinesTabEvent)"
>
<component :is="pipelineChartsComponent" />
</gl-tab>

View File

@ -0,0 +1,13 @@
<script>
export default {
name: 'DashboardHeader',
};
</script>
<template>
<div>
<h2 class="gl-heading-2 gl-mt-3">
<slot></slot>
</h2>
<slot name="description"></slot>
</div>
</template>

View File

@ -20,6 +20,7 @@ import {
} from '../constants';
import getPipelineCountByStatus from '../graphql/queries/get_pipeline_count_by_status.query.graphql';
import getProjectPipelineStatistics from '../graphql/queries/get_project_pipeline_statistics.query.graphql';
import DashboardHeader from './dashboard_header.vue';
import StatisticsList from './statistics_list.vue';
const defaultAnalyticsValues = {
@ -51,6 +52,7 @@ export default {
GlColumnChart,
GlChartSeriesLabel,
GlSkeletonLoader,
DashboardHeader,
StatisticsList,
CiCdAnalyticsCharts,
},
@ -316,9 +318,9 @@ export default {
<gl-alert v-if="showFailureAlert" :variant="failure.variant" @dismiss="hideAlert">{{
failure.text
}}</gl-alert>
<div class="gl-mb-3">
<h4>{{ s__('PipelineCharts|CI/CD Analytics') }}</h4>
</div>
<dashboard-header>
{{ s__('PipelineCharts|Pipelines') }}
</dashboard-header>
<gl-skeleton-loader v-if="loading" :lines="5" />
<statistics-list v-else :counts="formattedCounts" />
<h4>{{ __('Pipelines charts') }}</h4>

View File

@ -10,6 +10,7 @@ import {
DATE_RANGE_LAST_180_DAYS,
} from '../constants';
import getPipelineAnalytics from '../graphql/queries/get_pipeline_analytics.query.graphql';
import DashboardHeader from './dashboard_header.vue';
import StatisticsList from './statistics_list.vue';
import PipelineDurationChart from './pipeline_duration_chart.vue';
import PipelineStatusChart from './pipeline_status_chart.vue';
@ -18,6 +19,7 @@ export default {
components: {
GlCollapsibleListbox,
GlFormGroup,
DashboardHeader,
StatisticsList,
PipelineDurationChart,
PipelineStatusChart,
@ -106,7 +108,9 @@ export default {
</script>
<template>
<div>
<h2>{{ s__('PipelineCharts|Pipelines') }}</h2>
<dashboard-header>
{{ s__('PipelineCharts|Pipelines') }}
</dashboard-header>
<div class="gl-mb-4 gl-bg-subtle gl-p-4 gl-pb-2">
<gl-form-group :label="__('Date range')" label-for="date-range">
<gl-collapsible-listbox

View File

@ -131,7 +131,9 @@ export default {
@click="$emit('edit')"
>{{ __('Edit') }}
</gl-button>
<gl-link v-else :href="headerLinkHref">{{ headerLinkTitle }}</gl-link>
<gl-link v-else-if="headerLinkHref && headerLinkTitle" :href="headerLinkHref">{{
headerLinkTitle
}}</gl-link>
</template>
<span
v-if="showEmptyState && !$scopedSlots.content"

View File

@ -1,11 +1,12 @@
<script>
import { GlButton, GlOutsideDirective as Outside } from '@gitlab/ui';
import { GlButton, GlLoadingIcon, GlOutsideDirective as Outside } from '@gitlab/ui';
import { Mousetrap } from '~/lib/mousetrap';
import { keysFor, SIDEBAR_CLOSE_WIDGET } from '~/behaviors/shortcuts/keybindings';
export default {
components: {
GlButton,
GlLoadingIcon,
},
directives: {
Outside,
@ -56,6 +57,7 @@ export default {
<h3 class="gl-heading-5 gl-mb-0">
<slot name="title"></slot>
</h3>
<gl-loading-icon v-if="isUpdating" />
<gl-button
v-if="canUpdate && !isEditing"
key="edit-button"

View File

@ -55,8 +55,6 @@ class ProjectsController < Projects::ApplicationController
push_force_frontend_feature_flag(:work_items, !!@project&.work_items_feature_flag_enabled?)
push_force_frontend_feature_flag(:work_items_beta, !!@project&.work_items_beta_feature_flag_enabled?)
push_force_frontend_feature_flag(:work_items_alpha, !!@project&.work_items_alpha_feature_flag_enabled?)
# FF to enable setting to allow webhook execution on 30D and 60D notification delivery too
push_frontend_feature_flag(:extended_expiry_webhook_execution_setting, @project&.namespace)
end
layout :determine_layout
@ -472,10 +470,7 @@ class ProjectsController < Projects::ApplicationController
emails_enabled
]
if ::Feature.enabled?(:extended_expiry_webhook_execution_setting, @project&.namespace) &&
can?(current_user, :admin_project, project)
attributes << :extended_prat_expiry_webhooks_execute
end
attributes << :extended_prat_expiry_webhooks_execute if can?(current_user, :admin_project, project)
attributes
end

View File

@ -33,6 +33,15 @@ module Types
null: true,
experiment: { milestone: '17.10' },
description: 'User preferences for the given work item type and namespace.'
field :activity,
Users::ActivityStreamType,
description: 'Recent user activity.',
experiment: { milestone: '17.10' }
def activity
object if Feature.enabled?(:activity_stream_graphql, current_user)
end
end
# rubocop:enable Graphql/AuthorizeTypes
end

View File

@ -29,8 +29,34 @@ module Types
description: 'When this event was updated.',
null: false
field :project, Types::ProjectType,
description: 'Project of this event.',
null: true
field :target, Types::Users::EventTargetType,
description: 'The target of the event',
calls_gitaly: true
def author
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.author_id).find
end
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.project_id).find
end
def target
# If we don't have target info, bail
return unless object.target_type && object.target_id
Gitlab::Graphql::Loaders::BatchModelLoader.new(target_type_class, object.target_id).find
end
private
def target_type_class
klass = object.target_type&.safe_constantize
klass if klass.is_a?(Class)
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Types
module Users
class ActivityStreamType < BaseObject
graphql_name 'ActivityStream'
description 'Activity streams associated with a user'
authorize :read_user_profile
field :followed_users_activity,
Types::EventType.connection_type,
description: 'Activity from users followed by the current user.',
experiment: { milestone: '17.10' } do
argument :target, EventTargetEnum, default_value: EventFilter::ALL, description: "Event target."
end
def followed_users_activity(target: nil, last: 20)
scope = current_user.followees
user_events(scope, target, last)
end
private
def user_events(scope, target, last)
UserRecentEventsFinder.new(current_user, scope, EventFilter.new(target), { limit: last }).execute
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module Types
module Users
class EventTargetEnum < BaseEnum
graphql_name 'EventTarget'
description 'Event target'
mock_filter = ::EventFilter.new('')
mock_filter.filters.each do |target_type|
value target_type.upcase, value: target_type, description: "#{target_type.titleize} events"
end
end
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
module Types
module Users
class EventTargetType < BaseUnion
graphql_name 'EventTargetType'
description 'Represents an object that can be the subject of an event.'
possible_types Types::IssueType, Types::MilestoneType,
Types::MergeRequestType, Types::ProjectType,
Types::SnippetType, Types::UserType, Types::Wikis::WikiPageType,
Types::DesignManagement::DesignType, Types::Notes::NoteType
def self.resolve_type(object, _context)
case object
when Issue
Types::IssueType
when Milestone
Types::MilestoneType
when MergeRequest
Types::MergeRequestType
when Note
Types::Notes::NoteType
when Project
Types::ProjectType
when Snippet
Types::SnippetType
when User
Types::UserType
when WikiPage::Meta
Types::Wikis::WikiPageType
when ::DesignManagement::Design
Types::DesignManagement::DesignType
else
raise "Unsupported event target type: #{object.class.name}"
end
end
end
end
end

View File

@ -2012,7 +2012,6 @@ class Project < ApplicationRecord
# seven_days interval but we have a setting to allow webhook execution
# for thirty_days and sixty_days interval too.
if hooks_scope == :resource_access_token_hooks &&
::Feature.enabled?(:extended_expiry_webhook_execution_setting, self.namespace) &&
data[:interval] != :seven_days &&
!self.extended_prat_expiry_webhooks_execute?

View File

@ -1,34 +1,40 @@
# frozen_string_literal: true
module Users
class AutoBanService < BaseService
class AutoBanService
Error = Class.new(StandardError)
def initialize(user:, reason:)
@user = user
@reason = reason
end
def execute
if user.ban
record_custom_attribute
ban_duplicate_users
success
else
messages = user.errors.full_messages
error(messages.uniq.join('. '))
end
ban_user
end
def execute!
user.ban!
record_custom_attribute
ban_duplicate_users
success
result = ban_user
raise Error, result[:message] if result[:status] == :error
end
private
attr_reader :user, :reason
def ban_user
result = ::Users::BanService.new(admin_bot).execute(user)
record_custom_attribute if result[:status] == :success
result
end
def admin_bot
Users::Internal.admin_bot
end
def ban_duplicate_users
AntiAbuse::BanDuplicateUsersWorker.perform_async(user.id)
end

View File

@ -1,5 +1,7 @@
- page_title _('CI/CD Analytics')
= render ::Layouts::PageHeadingComponent.new(_("CI/CD Analytics"))
#js-project-pipelines-charts-app{ data: { project_path: @project.full_path,
project_id: @project.id,
should_render_dora_charts: should_render_dora_charts.to_s,

View File

@ -10,14 +10,27 @@ module Pages
feature_category :pages
MAX_NUM_DELETIONS = 10000
BATCH_SIZE = 1000
def perform
scope = PagesDeployment.expired
scope = PagesDeployment.active.expired
iterator = Gitlab::Pagination::Keyset::Iterator.new(scope: scope)
count = 0
start = Time.current
iterator.each_batch do |deployments|
iterator.each_batch(of: BATCH_SIZE) do |deployments|
deployments.each(&:deactivate)
count += deployments.length
break if count >= MAX_NUM_DELETIONS
end
log_extra_metadata_on_done(:deactivate_expired_pages_deployments, {
deactivated_deployments: count,
duration: Time.current - start
})
end
end
end

View File

@ -129,12 +129,7 @@ module PersonalAccessTokens
# project bot does not have more than 1 token
expiring_user_token = project_bot.personal_access_tokens.first
# If feature flag is not enabled webhooks will only execute if interval is seven_days
resource_namespace = bot_resource_namepace(project_bot.resource_bot_resource)
if Feature.enabled?(:extended_expiry_webhook_execution_setting, resource_namespace) ||
interval == :seven_days
execute_web_hooks(project_bot, expiring_user_token, { interval: interval })
end
interval_days = PersonalAccessToken.notification_interval(interval)
deliver_bot_notifications(project_bot, expiring_user_token.name, days_to_expire: interval_days)
@ -221,13 +216,5 @@ module PersonalAccessTokens
NotificationService.new
end
strong_memoize_attr :notification_service
def bot_resource_namepace(resource)
if resource.is_a?(Project)
resource.namespace
else
resource
end
end
end
end

View File

@ -72,9 +72,9 @@ module.exports = {
fileName: './config/webpack.config.js',
},
cache: {
// Use Yarn's cache directory
folder: './tmp/cache/depcruise-cache',
strategy: 'content',
// NOTE: if we want to store cache on CI, set the value to 'content'
strategy: 'metadata',
// With compression the cache is around 2MB
// Without Compression, cache is 20 times larger
compress: true,

View File

@ -1,9 +1,9 @@
---
name: extended_expiry_webhook_execution_setting
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/499732
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178266
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/513684
milestone: '17.9'
group: group::authentication
type: gitlab_com_derisk
name: activity_stream_graphql
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/514804
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183872
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/524717
milestone: '17.10'
group: group::personal productivity
type: beta
default_enabled: false

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddActualStateUpdatedAtToWorkspaces < Gitlab::Database::Migration[2.2]
milestone '17.11'
def change
add_column :workspaces, :actual_state_updated_at, :datetime_with_timezone, null: false, default: '1970-01-01'
end
end

View File

@ -0,0 +1 @@
9f7a9198505162ceb254c9c9ed141395be5a34306e0619c9f1ef4ae0323fc8c7

View File

@ -24660,6 +24660,7 @@ CREATE TABLE workspaces (
workspaces_agent_config_version integer NOT NULL,
desired_config_generator_version integer,
project_ref text,
actual_state_updated_at timestamp with time zone DEFAULT '1970-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
CONSTRAINT check_15543fb0fa CHECK ((char_length(name) <= 64)),
CONSTRAINT check_157d5f955c CHECK ((char_length(namespace) <= 64)),
CONSTRAINT check_2b401b0034 CHECK ((char_length(deployment_resource_version) <= 64)),

View File

@ -243,8 +243,6 @@ To request acceleration of a feature, check if an issue already exists in [epic
Secondary site HTTP proxying is enabled by default on a secondary site when it uses a unified URL, meaning, it is configured with the same `external_url` as the primary site. Disabling proxying in this case tends not to be helpful due to completely different behavior being served at the same URL, depending on routing.
Secondary site HTTP proxying is enabled by default on a secondary site when it uses a unified URL, meaning, it is configured with the same `external_url` as the primary site. Disabling proxying in this case tends not to be helpful due to completely different behavior being served at the same URL, depending on routing.
### What happens if you disable secondary proxying
Disabling the proxying feature flag has the following general effects:

View File

@ -489,7 +489,7 @@ control over how the Pages daemon runs and serves content in your environment.
| `sentry_dsn` | The address for sending Sentry crash reporting to. |
| `sentry_enabled` | Enable reporting and logging with Sentry, true/false. |
| `sentry_environment` | The environment for Sentry crash reporting. |
| `status_uri` | The URL path for a status page, for example, `/@status`. |
| `status_uri` | The URL path for a status page, for example, `/@status`. Configure to enable health check endpoint on GitLab Pages. |
| `tls_max_version` | Specifies the maximum TLS version ("tls1.2" or "tls1.3"). |
| `tls_min_version` | Specifies the minimum TLS version ("tls1.2" or "tls1.3"). |
| `use_http2` | Enable HTTP2 support. |

View File

@ -19826,6 +19826,33 @@ Representation of a GitLab user.
| <a id="achievementupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp the achievement was last updated. |
| <a id="achievementuserachievements"></a>`userAchievements` {{< icon name="warning-solid" >}} | [`UserAchievementConnection`](#userachievementconnection) | **Introduced** in GitLab 15.10. **Status**: Experiment. Recipients for the achievement. |
### `ActivityStream`
Activity streams associated with a user.
#### Fields with arguments
##### `ActivityStream.followedUsersActivity`
Activity from users followed by the current user.
{{< details >}}
**Introduced** in GitLab 17.10.
**Status**: Experiment.
{{< /details >}}
Returns [`EventConnection`](#eventconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#pagination-arguments):
`before: String`, `after: String`, `first: Int`, and `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="activitystreamfollowedusersactivitytarget"></a>`target` | [`EventTarget!`](#eventtarget) | Event target. |
### `AddOnPurchase`
Represents AddOn purchase for Namespace.
@ -23621,6 +23648,7 @@ The currently authenticated GitLab user.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="currentuseractive"></a>`active` | [`Boolean`](#boolean) | Indicates if the user is active. |
| <a id="currentuseractivity"></a>`activity` {{< icon name="warning-solid" >}} | [`ActivityStream`](#activitystream) | **Introduced** in GitLab 17.10. **Status**: Experiment. Recent user activity. |
| <a id="currentuseravatarurl"></a>`avatarUrl` | [`String`](#string) | URL of the user's avatar. |
| <a id="currentuserbio"></a>`bio` | [`String`](#string) | Bio of the user. |
| <a id="currentuserbot"></a>`bot` | [`Boolean!`](#boolean) | Indicates if the user is a bot. |
@ -26061,6 +26089,8 @@ Representing an event.
| <a id="eventauthor"></a>`author` | [`UserCore!`](#usercore) | Author of this event. |
| <a id="eventcreatedat"></a>`createdAt` | [`Time!`](#time) | When this event was created. |
| <a id="eventid"></a>`id` | [`ID!`](#id) | ID of the event. |
| <a id="eventproject"></a>`project` | [`Project`](#project) | Project of this event. |
| <a id="eventtarget"></a>`target` | [`EventTargetType`](#eventtargettype) | The target of the event. |
| <a id="eventupdatedat"></a>`updatedAt` | [`Time!`](#time) | When this event was updated. |
### `ExternalAuditEventDestination`
@ -42251,6 +42281,22 @@ Event action.
| <a id="eventactionreopened"></a>`REOPENED` | Reopened action. |
| <a id="eventactionupdated"></a>`UPDATED` | Updated action. |
### `EventTarget`
Event target.
| Value | Description |
| ----- | ----------- |
| <a id="eventtargetall"></a>`ALL` | All events. |
| <a id="eventtargetcomments"></a>`COMMENTS` | Comments events. |
| <a id="eventtargetdesigns"></a>`DESIGNS` | Designs events. |
| <a id="eventtargetepic"></a>`EPIC` | Epic events. |
| <a id="eventtargetissue"></a>`ISSUE` | Issue events. |
| <a id="eventtargetmerged"></a>`MERGED` | Merged events. |
| <a id="eventtargetpush"></a>`PUSH` | Push events. |
| <a id="eventtargetteam"></a>`TEAM` | Team events. |
| <a id="eventtargetwiki"></a>`WIKI` | Wiki events. |
### `ExclusionScannerEnum`
Enum for the security scanners used with exclusions.
@ -45843,6 +45889,22 @@ One of:
- [`NugetDependencyLinkMetadata`](#nugetdependencylinkmetadata)
#### `EventTargetType`
Represents an object that can be the subject of an event.
One of:
- [`Design`](#design)
- [`Issue`](#issue)
- [`MergeRequest`](#mergerequest)
- [`Milestone`](#milestone)
- [`Note`](#note)
- [`Project`](#project)
- [`Snippet`](#snippet)
- [`UserCore`](#usercore)
- [`WikiPage`](#wikipage)
#### `ExpressionValue`
Represents possible value types for an expression.

View File

@ -116,6 +116,52 @@ Example response:
}
```
## Update a service account user
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182607/) in GitLab 17.10.
{{< /history >}}
Updates a service account user in a given top-level group.
{{< alert type="note" >}}
This endpoint only works on top-level groups.
{{< /alert >}}
```plaintext
PATCH /groups/:id/service_accounts/:user_id
```
Parameters:
| Attribute | Type | Required | Description |
|:-----------|:---------------|:---------|:----------------------------------------------------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the target group](rest/_index.md#namespaced-paths). |
| `user_id` | integer | yes | The ID of the service account user. |
| `name` | string | no | Name of the user. |
| `username` | string | no | Username of the user. |
Example request:
```shell
curl --request PATCH --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/345/service_accounts/57" --data "name=Updated Service Account"
```
Example response:
```json
{
"id": 57,
"username": "service_account_group_345_6018816a18e515214e0c34c2b33523fc",
"name": "Updated Service Account",
"email": "service_account_group_345_<random_hash>@noreply.gitlab.example.com"
}
```
## Delete a service account user
{{< history >}}

View File

@ -0,0 +1,72 @@
---
stage: AI-powered
group: Custom Models
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
title: Amazon Q integration for testing and evaluation
---
> This guide combines and build on top of the following guides and sources. It describes Amazon Q setup for testing and evaluation purposes:
>
> - [Set up GitLab Duo with Amazon Q](../../user/duo_amazon_q/setup.md)
> - [code-suggestions development guide](../code_suggestions/_index.md)
This guide describes how to set up Amazon Q in a GitLab Linux package running in a VM, using the staging AI Gateway. The reason we need a GitLab Linux package instance instead of GDK is that the GitLab instance needs an HTTPS URL that can be accessed by Amazon Q.
## Install and configure a GitLab Linux package on a virtual machine
1. Create a VM in either GCP or AWS
- Follow this [guide](../../install/google_cloud_platform/_index.md) on how to create a VM in GCP
- Create a VM in AWS
1. Go to [cloud sandbox](https://gitlabsandbox.cloud/cloud), and login with OKTA
1. Click "Create Individual Account", and choose `aws-***` (not `aws-services-***` or `aws-dedicated-***`). This will create a AWS sandbox and display login credentials
1. Configure an EC2 machine of similar spec as GCP VM
A few things to note:
- Need to enable both http and https traffic under firewall setting.
- Copy the external IP of the VM instance created.
1. Install GitLab
1. Follow this [guide](https://about.gitlab.com/install/#ubuntu) on how to install GitLab Linux package.
We need to set up the external URL and an initial password. Install GitLab using the following command:
```shell
sudo GITLAB_ROOT_PASSWORD="your_password" EXTERNAL_URL="https://<vm-instance-external-ip>.nip.io" apt install gitlab-ee
```
This will use nip.io as the DNS service so the GitLab instance can be accessed through HTTPs
1. Config the newly installed GitLab instance
1. SSH into the VM, and add the following config into `/etc/gitlab/gitlab.rb`
```ruby
gitlab_rails['env'] = {
"GITLAB_LICENSE_MODE" => "test",
"CUSTOMER_PORTAL_URL" => "https://customers.staging.gitlab.com",
"AI_GATEWAY_URL" => "https://cloud.staging.gitlab.com/ai"
}
```
1. Apply the config changes by `sudo gitlab-ctl reconfigure`
1. Obtain and activate a self-managed ultimate license
1. Go to [staging customers portal](https://customers.staging.gitlab.com/), select "Signin with GitLab.com account".
1. Instead of clicking "Buy new subscription", go to the [product page](https://customers.staging.gitlab.com/subscriptions/new?plan_id=2c92a00c76f0c6c20176f2f9328b33c9) directly. For reason of this, see [buy_subscription](https://gitlab.com/gitlab-org/customers-gitlab-com/-/blob/8aa922840091ad5c5d96ada43d0065a1b6198841/doc/flows/buy_subscription.md)
1. Purchase the subscription using [a test credit card](https://gitlab.com/gitlab-org/customers-gitlab-com/#testing-credit-card-information). An activation code will be given. Do not purchase a duo-pro add-on, because currently duo-pro and Q are mutually exclusive.
1. Go to the GitLab instance created earlier (`https://<vm-instance-external-ip>.nip.io`), log in with root account. Then on the left sidebar, go to **Admin > Subscription**, and enter the activation code
## Create and configure an AWS sandbox
1. Follow the [same step](#install-and-configure-a-gitlab-linux-package-on-a-virtual-machine) described above on how to create an AWS sandbox if you haven't had one already.
1. Login into the newly created AWS account and create an **Identity Provider** following this [instruction](../../user/duo_amazon_q/setup.md#create-an-iam-identity-provider) with slight modifications:
- Provider URL: `https://glgo.staging.runway.gitlab.net/cc/oidc/<your_gitlab_instance_id>`
- Audience: `gitlab-cc-<your_gitlab_instance_id>`
The GitLab instance ID can be found at `<gitlab_url>/admin/ai/amazon_q_settings`
1. Create a new role using the identity provider. For this, we can follow [this section](../../user/duo_amazon_q/setup.md#create-an-iam-role) exactly.
## Add Amazon Q to GitLab
1. Follow [Enter the ARN in GitLab and enable Amazon Q](../../user/duo_amazon_q/setup.md#enter-the-arn-in-gitlab-and-enable-amazon-q) exactly
1. [Invite Amazon Q user to your project](../../user/duo_amazon_q/setup.md#add-the-amazon-q-user-to-your-project). For this step, we do not need to configure AI Gateway again, because we've already done it when configuring our GitLab instance.
1. Now Q should be working. We can test it like [this](https://gitlab.com/gitlab-com/ops-sub-department/aws-gitlab-ai-integration/integration-motion-planning/-/wikis/integration-docs#testing-q)

View File

@ -1103,6 +1103,16 @@ unless we've gone through a legal review and have been told to promote the partn
This guidance follows the [Use of Third-party Trademarks](https://handbook.gitlab.com/handbook/legal/policies/product-third-party-trademarks-guidelines/#dos--donts-for-use-of-third-party-trademarks-in-gitlab).
## GitLab AI vendor model
Use **GitLab AI vendor model** to refer to a [language model](#language-model-large-language-model)
that is hosted by GitLab, and that customers access through the GitLab-hosted
[AI gateway](#ai-gateway).
Do not use this term when the [language model is hosted by a customer](#self-hosted-model),
or when the customer is using the [GitLab Duo Self-Hosted](#gitlab-duo-self-hosted)
feature.
## GitLab Dedicated
Use **GitLab Dedicated** to refer to the product offering. It refers to a GitLab instance that's hosted and managed by GitLab for customers.

View File

@ -37,8 +37,19 @@ generate them from a cron-based Sidekiq job:
- For Geo related metrics, check `Geo::MetricsUpdateService`.
- For other "global" / instance-wide metrics, check: `Metrics::GlobalMetricsUpdateService`.
When exporting data from Sidekiq in an installation with more than one Sidekiq instance,
you are not guaranteed that the same exporter will always be queried.
{{< alert type="warning" >}}
You can read more and understand the caveats in [issue 406583](https://gitlab.com/gitlab-org/gitlab/-/issues/406583),
When exporting metrics from Sidekiq in a multi-instance deployment:
- The same exporter is not guaranteed to be queried consistently.
- This is especially problematic for gauge metrics, as each Sidekiq worker will continue reporting the last recorded value
until that specific worker runs the metric collection code again.
- This can lead to inconsistent or stale metrics data across your monitoring system.
For more reliable metrics collection, consider creating the exporter as a custom exporter
in [`gitlab-exporter`](https://gitlab.com/gitlab-org/ruby/gems/gitlab-exporter/)
{{< /alert >}}
For more details, see [issue 406583](https://gitlab.com/gitlab-org/gitlab/-/issues/406583),
where we also discuss a possible solution using a push-gateway.

View File

@ -183,7 +183,7 @@ More threads would lead to excessive swapping and lower performance.
[Redis](https://redis.io/) stores all user sessions and background tasks
and requires about 25 kB per user on average.
In GitLab 16.0 and later, Redis 6.x or 7.x is required.
In GitLab 16.0 and later, Redis 6.2.x or 7.x is required.
For more information about end-of-life dates, see the
[Redis documentation](https://redis.io/docs/latest/operate/rs/installing-upgrading/product-lifecycle/).

View File

@ -38,14 +38,15 @@ or the [Support team](https://about.gitlab.com/support/).
The zero-downtime upgrade process has the following requirements:
- Zero-downtime upgrades are only supported on multi-node GitLab environments built with the Linux package that have Load Balancing and HA mechanisms configured as follows:
- Zero-downtime upgrades are only supported on multi-node GitLab environments built with the Linux package that have Load Balancing and available HA mechanisms configured as follows:
- External Load Balancer configured for Rails nodes with health checks enabled against the [Readiness](../administration/monitoring/health_check.md#readiness) (`/-/readiness`) endpoint.
- Internal Load Balancer configured for any PgBouncer and Praefect components with TCP health checks enabled.
- HA mechanisms configured for the Consul, Postgres and Redis components if present.
- Any of these components that are not deployed in a HA fashion will need to be upgraded separately with downtime.
- HA mechanisms configured for the Consul, Postgres, Redis components if present.
- Any of these components that are not deployed in a HA fashion need to be upgraded separately with downtime.
- For databases, the [Linux package only supports HA for the main GitLab database](https://gitlab.com/groups/gitlab-org/-/epics/7814). For any other databases, such as the [Praefect database](#praefect-gitaly-cluster), a third party database solution is required to achieve HA and subsequently to avoid downtime.
- **You can only upgrade one minor release at a time**. So from `16.1` to `16.2`, not to `16.3`. If you skip releases, database modifications may be run in the wrong sequence [and leave the database schema in a broken state](https://gitlab.com/gitlab-org/gitlab/-/issues/321542).
- You have to use [post-deployment migrations](../development/database/post_deployment_migrations.md).
- [Zero-downtime upgrades are not available with the GitLab Charts](https://docs.gitlab.com/charts/installation/upgrade.html) but are with [GitLab Operator](https://docs.gitlab.com/operator/gitlab_upgrades.html).
- [Zero-downtime upgrades are not available with the GitLab Charts](https://docs.gitlab.com/charts/installation/upgrade.html). Support is available with the [GitLab Operator](https://docs.gitlab.com/operator/gitlab_upgrades.html) but there are [known limitations](https://docs.gitlab.com/operator/#known-issues) with this deployment method and as such it's not covered in this guide at this time.
In addition to the above, please be aware of the following considerations:
@ -59,9 +60,10 @@ In addition to the above, please be aware of the following considerations:
- Certain major or minor releases may require a set of background migrations to be finished. While this doesn't require downtime (if the above conditions are met), it's required that you [wait for background migrations to complete](background_migrations.md) between each major or minor release upgrade.
- The time necessary to complete these migrations can be reduced by increasing the number of Sidekiq workers that can process jobs in the
`background_migration` queue. To see the size of this queue, [check for background migrations before upgrading](background_migrations.md).
- Zero downtime upgrades can be performed for [Gitaly](#gitaly) when it's set up in its Cluster or Sharded setups due to a graceful reload mechanism. For the [Praefect (Gitaly Cluster)](#praefect-gitaly-cluster) component it can also be directly upgraded without downtime, however the GitLab Linux package does not offer HA and subsequently Zero Downtime support for it's database - A third party database solution is required to avoid downtime.
- [PostgreSQL major version upgrades](../administration/postgresql/replication_and_failover.md#near-zero-downtime-upgrade-of-postgresql-in-a-patroni-cluster) are a separate process and not covered by zero-downtime upgrades (smaller upgrades are covered).
- Zero-downtime upgrades are supported for any GitLab components you've deployed with the GitLab Linux package. If you've deployed select components through a supported third party service, such as PostgreSQL in AWS RDS or Redis in GCP Memorystore, upgrades for those services will need to be performed separately as per their standard processes.
- As a general guideline, the larger amount of data you have, the more time it will take for the upgrade to complete. In testing, any database smaller than 10 GB shouldn't generally take longer than an hour, but your mileage may vary.
- Zero-downtime upgrades are supported for the noted GitLab components you've deployed with the GitLab Linux package. If you've deployed select components through a supported third party service, such as PostgreSQL in AWS RDS or Redis in GCP Memorystore, upgrades for those services need to be performed separately as per their standard processes.
- As a general guideline, the larger amount of data you have, the more time is needed for the upgrade to complete. In testing, any database smaller than 10 GB shouldn't generally take longer than an hour, but your mileage may vary.
{{< alert type="note" >}}
@ -153,7 +155,7 @@ This process applies to both Gitaly Sharded and Cluster setups. Run through the
sudo gitlab-ctl restart consul node-exporter logrotate
```
### Praefect
#### Praefect (Gitaly Cluster)
For Gitaly Cluster setups, you must deploy and upgrade Praefect in a similar way by using a graceful reload.
@ -165,6 +167,12 @@ In the future this functionality may be changed, [refer to this Epic](https://gi
{{< /alert >}}
{{< alert type="note" >}}
This section focuses exclusively on the Praefect component, not its [required PostgreSQL database](../administration/gitaly/praefect.md#postgresql). The [GitLab Linux package does not offer HA](https://gitlab.com/groups/gitlab-org/-/epics/7814) and subsequently Zero Downtime support for the Praefect database. A third party database solution is required to avoid downtime.
{{< /alert >}}
One additional step though for Praefect is that it will also need to run through its database migrations to upgrade its data.
Migrations need to be run on only one Praefect node to avoid clashes. This is best done by selecting one of the
nodes to be a deploy node. This target node will be configured to run migrations while the rest are not. We'll refer to this as the **Praefect deploy node** below:

View File

@ -172,6 +172,7 @@ To set this default:
- [Added](https://gitlab.com/gitlab-org/gitlab/-/issues/463016) 60 day and 30 days triggers to project and group access tokens webhooks in GitLab 17.9 [with a flag](../../../administration/feature_flags.md) named `extended_expiry_webhook_execution_setting`. Disabled by default.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/513684) in GitLab 17.10.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/513684) in GitLab 17.10. Feature flag `extended_expiry_webhook_execution_setting` removed.
{{< /history >}}

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module ActiveContext
class Embeddings
def self.generate_embeddings(content)
embeddings = Gitlab::Llm::VertexAi::Embeddings::Text
.new(content, user: nil, tracking_context: { action: 'embedding' }, unit_primitive: 'semantic_search_issue')
.execute
embeddings.all?(Array) ? embeddings : [embeddings]
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module ActiveContext
module Preprocessors
module Embeddings
extend ActiveSupport::Concern
IndexingError = Class.new(StandardError)
# Vertex bulk limit is 250 so we choose a lower batch size
# Gitlab::Llm::VertexAi::Embeddings::Text::BULK_LIMIT
BATCH_SIZE = 100
class_methods do
def bulk_embeddings(refs)
unless respond_to?(:embedding_content)
raise IndexingError, "#{self} should implement :embedding_content method"
end
refs.each_slice(BATCH_SIZE) do |batch|
contents = batch.map { |ref| embedding_content(ref) }
embeddings = ActiveContext::Embeddings.generate_embeddings(contents)
batch.each_with_index do |ref, index|
ref.embedding = embeddings[index]
end
end
refs
rescue StandardError => e
::ActiveContext::Logger.exception(e)
refs # we will generate each embedding on the fly if bulk fails
end
end
end
end
end

View File

@ -0,0 +1,104 @@
# frozen_string_literal: true
RSpec.describe ActiveContext::Preprocessors::Embeddings do
let(:reference_class) do
Class.new(Test::References::MockWithDatabaseRecord) do
include ::ActiveContext::Preprocessors::Embeddings
add_preprocessor :bulk_embeddings do |refs|
bulk_embeddings(refs)
end
attr_accessor :embedding
end
end
let(:reference_1) { reference_class.new(collection_id, partition, object_id) }
let(:reference_2) { reference_class.new(collection_id, partition, object_id) }
let(:mock_adapter) { double }
let(:mock_collection) { double(name: collection_name, partition_for: partition) }
let(:mock_object) { double(id: object_id) }
let(:mock_relation) { double(find_by: mock_object) }
let(:mock_connection) { double(id: connection_id) }
let(:connection_id) { 3 }
let(:partition) { 2 }
let(:collection_id) { 1 }
let(:object_id) { 5 }
let(:collection_name) { 'mock_collection' }
let(:embeddings) { [[1, 2], [3, 4]] }
let(:embedding_content) { 'some text' }
subject(:preprocess_refs) { ActiveContext::Reference.preprocess_references([reference_1, reference_2]) }
before do
allow(ActiveContext).to receive(:adapter).and_return(mock_adapter)
allow(ActiveContext::CollectionCache).to receive(:fetch).and_return(mock_collection)
allow(ActiveContext::Logger).to receive(:exception).and_return(nil)
allow(reference_class).to receive(:model_klass).and_return(mock_relation)
end
context 'when the reference klass implements :embedding_content' do
before do
allow(reference_class).to receive(:embedding_content).and_return(embedding_content)
end
it 'generates embeddings in bulk and sets the embeddings for each reference' do
expect(ActiveContext::Embeddings).to receive(:generate_embeddings)
.with([embedding_content, embedding_content])
.and_return(embeddings)
preprocess_refs
expect(reference_1.embedding).to eq(embeddings.first)
expect(reference_2.embedding).to eq(embeddings.last)
end
context 'when generating for a single reference' do
it 'generates embeddings in bulk and sets the embeddings for the reference' do
expect(ActiveContext::Embeddings).to receive(:generate_embeddings)
.with([embedding_content])
.and_return([embeddings.first])
ActiveContext::Reference.preprocess_references([reference_1])
expect(reference_1.embedding).to eq(embeddings.first)
end
end
context 'when generate_embeddings returns an error' do
let(:error) { StandardError }
before do
allow(ActiveContext::Embeddings).to receive(:generate_embeddings).and_raise(error)
end
it 'logs and returns all references without embeddings' do
expect(::ActiveContext::Logger).to receive(:exception).with(error)
expect(preprocess_refs).to eq([reference_1, reference_2])
expect(reference_1.embedding).to be_nil
expect(reference_2.embedding).to be_nil
end
end
end
context 'when the reference does not implement :embedding_content' do
it 'logs and does not raise an error' do
expect(ActiveContext::Embeddings).not_to receive(:generate_embeddings)
expect(::ActiveContext::Logger).to receive(:exception)
.with(ActiveContext::Preprocessors::Embeddings::IndexingError)
expect { preprocess_refs }.not_to raise_error
end
it 'returns references without embeddings' do
expect(preprocess_refs).to eq([reference_1, reference_2])
expect(reference_1.embedding).to be_nil
expect(reference_2.embedding).to be_nil
end
end
end

View File

@ -119,6 +119,14 @@ class EventFilter
end
end
def filters
[ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM, WIKI, DESIGNS]
end
def ==(other)
other.is_a?(self.class) && filter == other.filter
end
private
def in_operator_params(array_data:, scope: nil, in_column: nil, in_values: nil, order_hint_column: nil)
@ -213,10 +221,6 @@ class EventFilter
def design_events(events)
events.for_design
end
def filters
[ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM, WIKI, DESIGNS]
end
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -197,6 +197,15 @@ semgrep-sast:
- '**/*.swift'
- '**/*.m'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
## In case gitlab-advanced-sast already covers all the files that semgrep-sast would have scanned
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/ &&
@ -230,6 +239,15 @@ semgrep-sast:
- '**/*.m'
- '**/*.rb'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
sobelow-sast:
extends: .sast-analyzer

View File

@ -250,6 +250,15 @@ semgrep-sast:
- '**/*.swift'
- '**/*.m'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
## In case gitlab-advanced-sast already covers all the files that semgrep-sast would have scanned
- if: $CI_PIPELINE_SOURCE == "merge_request_event" &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/ &&
@ -283,6 +292,15 @@ semgrep-sast:
- '**/*.m'
- '**/*.rb'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
- if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline.
when: never
# If there's no open merge request, add it to a *branch* pipeline instead.
@ -308,6 +326,15 @@ semgrep-sast:
- '**/*.swift'
- '**/*.m'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
## In case gitlab-advanced-sast already covers all the files that semgrep-sast would have scanned
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bsast_advanced\b/ &&
@ -341,6 +368,15 @@ semgrep-sast:
- '**/*.m'
- '**/*.rb'
- '**/*.kt'
- '**/*.properties'
- '**/application*.yml'
- '**/management*.yml'
- '**/actuator*.yml'
- '**/bootstrap*.yml'
- '**/application*.yaml'
- '**/management*.yaml'
- '**/actuator*.yaml'
- '**/bootstrap*.yaml'
sobelow-sast:
extends: .sast-analyzer

View File

@ -45,14 +45,7 @@ module Gitlab
end
command :set_parent, :epic do |parent_param|
if quick_action_target.instance_of?(WorkItem)
parent = extract_work_items(parent_param).first
if parent && current_user.can?(:read_work_item, parent)
@updates[:set_parent] = parent
@execution_message[:set_parent] = success_msg[:set_parent]
else
@execution_message[:set_parent] = _("This parent does not exist or you don't have sufficient permission.")
end
handle_set_parent(parent_param)
elsif quick_action_target.instance_of?(Issue)
handle_set_epic(parent_param)
end
@ -210,6 +203,35 @@ module Gitlab
# overridden in EE
def handle_set_epic(parent_param); end
# rubocop:disable Gitlab/ModuleWithInstanceVariables -- @updates is already defined and part of
# Gitlab::QuickActions::Dsl implementation
def handle_set_parent(parent_param)
parent = extract_work_items(parent_param).first
child = quick_action_target
message =
if parent && current_user.can?(:read_work_item, parent)
if child&.work_item_parent == parent
format(_('Work item %{work_item_reference} has already been added to parent %{parent_reference}.'),
work_item_reference: child&.to_reference, parent_reference: parent.to_reference)
elsif parent.confidential? && !child&.confidential?
_("Cannot assign a confidential parent to a non-confidential work item. Make the work item " \
"confidential and try again")
elsif ::WorkItems::HierarchyRestriction.find_by_parent_type_id_and_child_type_id(parent.work_item_type_id,
child&.work_item_type_id).nil?
_("Cannot assign this work item type to parent type")
else
@updates[:set_parent] = parent
success_msg[:set_parent]
end
else
_("This parent does not exist or you don't have sufficient permission.")
end
@execution_message[:set_parent] = message
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# overridden in EE
def show_epic_alias?; end
end

View File

@ -11694,6 +11694,12 @@ msgstr ""
msgid "Cannot assign a confidential epic to a non-confidential issue. Make the issue confidential and try again"
msgstr ""
msgid "Cannot assign a confidential parent to a non-confidential work item. Make the work item confidential and try again"
msgstr ""
msgid "Cannot assign this work item type to parent type"
msgstr ""
msgid "Cannot be merged automatically"
msgstr ""
@ -42621,9 +42627,6 @@ msgstr ""
msgid "PipelineCharts|An unknown error occurred while processing CI/CD analytics."
msgstr ""
msgid "PipelineCharts|CI/CD Analytics"
msgstr ""
msgid "PipelineCharts|Failure rate"
msgstr ""
@ -48241,6 +48244,9 @@ msgstr ""
msgid "RegistrationFeatures|use this feature"
msgstr ""
msgid "Registration|There are no seats left on your GitLab instance. Please contact your GitLab administrator."
msgstr ""
msgid "Registries enqueued to be resynced"
msgstr ""
@ -54673,6 +54679,9 @@ msgstr ""
msgid "Service account token expiration"
msgstr ""
msgid "Service account was successfully updated."
msgstr ""
msgid "Service accounts"
msgstr ""
@ -54715,6 +54724,9 @@ msgstr ""
msgid "ServiceAccount|User does not have permission to delete a service account."
msgstr ""
msgid "ServiceAccount|You are not authorized to update service accounts in this namespace."
msgstr ""
msgid "ServiceDesk|%{customEmail} with SMTP host %{smtpAddress} is %{badgeStart}verified%{badgeEnd}"
msgstr ""
@ -63362,6 +63374,9 @@ msgstr[1] ""
msgid "User is blocked"
msgstr ""
msgid "User is not a service account"
msgstr ""
msgid "User is not allowed to resolve thread"
msgstr ""
@ -66167,6 +66182,9 @@ msgstr ""
msgid "Work in progress limit: %{wipLimit}"
msgstr ""
msgid "Work item %{work_item_reference} has already been added to parent %{parent_reference}."
msgstr ""
msgid "Work item not supported"
msgstr ""

View File

@ -65,7 +65,7 @@
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language-rust": "0.4.2",
"@gitlab/svgs": "3.123.0",
"@gitlab/ui": "110.1.0",
"@gitlab/ui": "111.0.0",
"@gitlab/vue-router-vue3": "npm:vue-router@4.5.0",
"@gitlab/vuex-vue3": "npm:vuex@4.1.0",
"@gitlab/web-ide": "^0.0.1-dev-20250309164831",

View File

@ -7,7 +7,7 @@ To see the full list of circular dependencies, run the command ${chalk.bold.cyan
If you have fixed existing circular dependencies or find false positives, you can add/remove them from the
exclusions list in the 'config/dependency-cruiser.js' file.\n
${chalk.italic('If the above command fails because of memory issues, increase the memory by prepending it with the following')}
${chalk.bold.cyan('NODE_OPTIONS="--max_old_space_size=4096"')}
${chalk.bold.cyan('NODE_OPTIONS="--max-old-space-size=4096"')}
`);
};

View File

@ -51,7 +51,7 @@ class StaticAnalysis
Task.new(%w[scripts/lint-vendored-gems.sh], 10),
Task.new(%w[yarn run check-dependencies], 1),
Task.new(%w[scripts/gemfile_lock_changed.sh], 1),
Task.new(%w[yarn run deps:check:all], 60)
Task.new(%w[yarn run deps:check:all --no-cache], 60)
].compact.freeze
def run_tasks!(options = {})

View File

@ -953,28 +953,6 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
expect(project.emails_disabled?).to eq(!result)
expect(project.extended_prat_expiry_webhooks_execute?).to eq(result)
end
context 'when extended_expiry_webhook_execution_setting feature flag is false' do
before do
stub_feature_flags(extended_expiry_webhook_execution_setting: false)
end
it "does not update extended_expiry_webhook_execution_setting" do
put :update, params: {
namespace_id: project.namespace,
id: project.path,
project: {
project_setting_attributes: {
extended_prat_expiry_webhooks_execute: boolean_value
}
}
}
project.reload
expect(project.extended_prat_expiry_webhooks_execute?).to be false
end
end
end
end

View File

@ -659,8 +659,8 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :grou
context 'when deployment does not have a deployable' do
let!(:second_deployment) { create(:deployment, :success, environment: environment, deployable: nil) }
it 'has an empty href' do
expect(find_by_testid('job-deployment-link')['href']).to be_empty
it 'has a href of #' do
expect(page).to have_selector('[data-testid="job-deployment-link"][href="#"]')
end
end
end

View File

@ -1267,6 +1267,7 @@ export const mockPipelineWithoutMR = {
path: 'pipeline/28029444',
ref: {
name: 'test-branch',
path: 'test-branch',
},
};

View File

@ -9,12 +9,12 @@ exports[`NewCluster renders the cluster component correctly 1`] = `
</h4>
<p>
Enter details about your cluster.
<b-link-stub
<a
class="gl-link"
href="/help/user/project/clusters/add_existing_cluster"
>
How do I use a certificate to connect to my cluster?
</b-link-stub>
</a>
</p>
</div>
`;

View File

@ -27,7 +27,7 @@ describe('BrowserSupportAlert', () => {
createComponent({ mountFn: mount });
expect(findLink().attributes()).toMatchObject({
target: '_blank',
rel: 'noopener',
rel: 'noopener noreferrer',
});
});
});

View File

@ -79,13 +79,11 @@ describe('OrganizationGroupsNewApp', () => {
expect(findAllParagraphs().at(0).text()).toMatchInterpolatedText(
'Groups allow you to manage and collaborate across multiple projects. Members of a group have access to all of its projects.',
);
expect(findAllLinks().at(0).attributes('href')).toBe(helpPagePath('user/group/index'));
expect(findAllLinks().at(0).props('href')).toBe(helpPagePath('user/group/index'));
expect(findAllParagraphs().at(1).text()).toContain(
'Groups can also be nested by creating subgroups.',
);
expect(findAllLinks().at(1).attributes('href')).toBe(
helpPagePath('user/group/subgroups/index'),
);
expect(findAllLinks().at(1).props('href')).toBe(helpPagePath('user/group/subgroups/index'));
});
it('renders form and passes correct props', () => {

View File

@ -34,13 +34,14 @@ exports[`packages_list_app renders 1`] = `
class="gl-mb-0 gl-mt-4 gl-text-subtle"
>
Learn how to
<b-link-stub
<a
class="gl-link"
href="/help/user/packages/terraform_module_registry/_index"
rel="noopener noreferrer"
target="_blank"
>
publish and share your packages
</b-link-stub>
</a>
with GitLab.
</p>
<div

View File

@ -226,7 +226,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
class="gl-link"
data-testid="pypi-docs-link"
href="/help/user/packages/pypi_repository/_index"
rel="noopener"
rel="noopener noreferrer"
target="_blank"
>
see the documentation

View File

@ -0,0 +1,30 @@
import { shallowMount } from '@vue/test-utils';
import DashboardHeader from '~/projects/pipelines/charts/components/dashboard_header.vue';
describe('DashboardHeader', () => {
let wrapper;
const createComponent = ({ ...options }) => {
wrapper = shallowMount(DashboardHeader, { ...options });
};
it('shows heading', () => {
createComponent({
slots: {
default: 'My Heading',
},
});
expect(wrapper.find('h2').text()).toBe('My Heading');
});
it('shows description', () => {
createComponent({
slots: {
description: '<p>My Description</p>',
},
});
expect(wrapper.find('p').text()).toContain('My Description');
});
});

View File

@ -108,16 +108,35 @@ describe('Branch rule protection', () => {
});
describe('When `edit_branch_rules` FF is disabled', () => {
beforeEach(() => createComponent({ editBranchRules: false }));
it('does not render `Edit` button', () => {
createComponent({ editBranchRules: false });
expect(findEditButton().exists()).toBe(false);
});
describe('when headerLinkHref and headerLinkTitle are set', () => {
beforeEach(() => {
createComponent({ editBranchRules: false });
});
it('renders link to manage branch protections', () => {
expect(findLink().text()).toBe(protectionPropsMock.headerLinkTitle);
expect(findLink().attributes('href')).toBe(protectionPropsMock.headerLinkHref);
});
});
describe('when headerLinkHref and headerLinkTitle are not set', () => {
beforeEach(() => {
createComponent(
{ editBranchRules: false },
{ headerLinkHref: null, headerLinkTitle: null },
);
});
it('does not render link to manage branch protections', () => {
expect(findLink().exists()).toBe(false);
});
});
it('renders a protection row for status checks', () => {
createComponent({ editBranchRules: false }, { statusChecks: statusChecksRulesMock });

View File

@ -1,3 +1,4 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { Mousetrap } from '~/lib/mousetrap';
@ -111,11 +112,17 @@ describe('WorkItemSidebarWidget component', () => {
});
describe('when updating', () => {
it('renders Edit button as disabled', () => {
beforeEach(() => {
createComponent({ canUpdate: true, isUpdating: true });
});
it('renders Edit button as disabled', () => {
expect(findEditButton().props('disabled')).toBe(true);
});
it('shows loading icon', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
});

View File

@ -7,5 +7,7 @@ RSpec.describe Types::EventType do
specify { expect(described_class).to require_graphql_authorizations(:read_event) }
specify { expect(described_class).to have_graphql_fields(:id, :author, :action, :created_at, :updated_at) }
specify do
expect(described_class).to have_graphql_fields(:id, :author, :action, :project, :target, :created_at, :updated_at)
end
end

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['ActivityStream'], feature_category: :user_profile do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
before_all do
project.add_developer(current_user)
end
specify { expect(described_class.graphql_name).to eq('ActivityStream') }
specify { expect(described_class).to require_graphql_authorizations(:read_user_profile) }
it 'exposes the expected fields' do
expected_fields = %i[followed_users_activity]
expect(described_class).to have_graphql_fields(*expected_fields)
end
describe "#followed_users_activity" do
let_it_be(:followed_user) { create(:user) }
let_it_be(:joined_project_event) { create(:event, :joined, project: project, author: followed_user) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:closed_issue_event) { create(:event, :closed, author: followed_user, project: project, target: issue) }
let(:scope) { current_user.followees }
let(:filter) { EventFilter.new('ALL') }
let(:params) { { limit: 20 } }
let(:field) { resolve_field(:followed_users_activity, current_user, ctx: { current_user: current_user }) }
before do
current_user.follow(followed_user)
end
it 'calls UserRecentEventsFinder' do
expect_next_instance_of(UserRecentEventsFinder, current_user, scope, filter, params) do |finder|
expect(finder).to receive(:execute).and_call_original
end
expect(field.items.length).to be(2)
expect(field.items.first.action).to eq "closed"
expect(field.items.second.action).to eq "joined"
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['EventTarget'], feature_category: :user_profile do
specify { expect(described_class.graphql_name).to eq('EventTarget') }
it 'exposes all the existing event target types' do
expected = EventFilter.new('').filters.map(&:upcase) # varies between foss/ee
expect(described_class.values.keys).to match_array(expected)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Types::Users::EventTargetType, feature_category: :user_profile do
it 'returns possible types' do
expect(described_class.possible_types).to include(Types::IssueType, Types::MilestoneType,
Types::MergeRequestType, Types::ProjectType,
Types::SnippetType, Types::UserType, Types::Wikis::WikiPageType,
Types::DesignManagement::DesignType, Types::Notes::NoteType)
end
describe '.resolve_type' do
using RSpec::Parameterized::TableSyntax
where(:factory, :graphql_type) do
:issue | Types::IssueType
:milestone | Types::MilestoneType
:merge_request | Types::MergeRequestType
:note | Types::Notes::NoteType
:project | Types::ProjectType
:project_snippet | Types::SnippetType
:user | Types::UserType
:wiki_page_meta | Types::Wikis::WikiPageType
:design | Types::DesignManagement::DesignType
end
with_them do
it 'correctly maps type in object to GraphQL type' do
expect(described_class.resolve_type(build(factory), {})).to eq(graphql_type)
end
end
it 'raises an error if the type is not supported' do
expect do
described_class.resolve_type(build(:group), {})
end.to raise_error(RuntimeError, /Unsupported event target type/)
end
end
end

View File

@ -120,11 +120,17 @@ RSpec.describe Keeps::OverdueFinalizeBackgroundMigration, feature_category: :too
end
end
context 'when using multiple databases' do
before do
skip_if_shared_database(:ci)
end
context 'when schema is gitlab_ci' do
let(:gitlab_schema) { 'gitlab_ci' }
it 'returns the database name' do
expect(database_name).to eq(database_exists?(:ci) ? 'ci' : 'main')
expect(database_name).to eq('ci')
end
end
end
end

View File

@ -6453,16 +6453,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it_behaves_like 'webhook is added to execution list'
end
context 'when feature flag is disabled' do
let(:data) { { interval: :thirty_days } }
before do
stub_feature_flags(extended_expiry_webhook_execution_setting: false)
end
it_behaves_like 'webhook is added to execution list'
end
context 'when setting extended_prat_expiry_webhooks_execute is disabled' do
before do
project.update!(extended_prat_expiry_webhooks_execute: false)

View File

@ -0,0 +1,123 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'ActivityStream GraphQL Query', feature_category: :user_profile do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:followed_user_1) { create(:user) }
let_it_be(:followed_user_2) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let(:graphql_response) { post_graphql(query, current_user: user) }
let(:activity_stream) { graphql_data_at(:current_user, :activity, :followed_users_activity, :nodes) }
let(:query) do
<<~GRAPHQL
query UserActivity {
currentUser {
activity {
followedUsersActivity {
nodes {
author {
name
}
action
project {
name
}
target {
... on Design {
id
}
... on Issue {
title
}
... on Note {
id
}
... on MergeRequest {
title
}
... on Milestone {
title
}
... on Project {
fullPath
}
... on Snippet {
title
}
... on UserCore {
username
}
... on WikiPage {
title
}
}
}
}
}
}
}
GRAPHQL
end
before do
user.follow(followed_user_1)
user.follow(followed_user_2)
end
context 'when there are no events in the activity stream' do
it 'returns empty nodes array' do
graphql_response
expect(activity_stream).to eq([])
end
end
context 'when there are events in the activity stream' do
let_it_be(:joined_project_event) { create(:event, :joined, project: project, author: followed_user_1) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:closed_issue_event) { create(:event, :closed, author: followed_user_1, project: project, target: issue) }
let_it_be(:left_event) { create(:event, :left, author: followed_user_2, target: project) }
it 'returns followed user\'s activity' do
graphql_response
expect(activity_stream).to eq(
[
{
"action" => "LEFT",
"author" => { "name" => followed_user_2.name },
"project" => nil,
"target" => { "fullPath" => project.full_path }
},
{
"action" => "CLOSED",
"author" => { "name" => followed_user_1.name },
"project" => { "name" => project.name },
"target" => { "title" => issue.title }
},
{
"action" => "JOINED",
"author" => { "name" => followed_user_1.name },
"project" => { "name" => project.name },
"target" => nil
}
]
)
end
end
context 'when the activity_stream_graphql feature flag is disabled' do
before do
stub_feature_flags(activity_stream_graphql: false)
end
it 'returns `nil`' do
graphql_response
expect(activity_stream).to be_nil
end
end
end

View File

@ -3316,7 +3316,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :text_editors d
let_it_be(:parent_ref) { parent.to_reference(project) }
context 'on a work item' do
context 'when the parent reference is valid' do
context 'with a valid parent reference' do
let(:content) { "/set_parent #{parent_ref}" }
it 'returns success message' do
@ -3342,11 +3342,54 @@ RSpec.describe QuickActions::InterpretService, feature_category: :text_editors d
expect(updates).to be_empty
expect(message).to eq("This parent does not exist or you don't have sufficient permission.")
expect(task_work_item.reload.work_item_parent).to be_nil
end
end
context 'when the parent is already set to the same work item' do
let_it_be(:task_work_item_with_parent) do
create(:work_item, :task, project: project, work_item_parent: parent)
end
it 'does not assign the parent and returns an appropriate error' do
_, updates, message = service.execute(content, task_work_item_with_parent)
expect(updates).to be_empty
expect(message).to eq("Work item #{task_work_item_with_parent.to_reference} has already been added to " \
"parent #{parent.to_reference}.")
expect(task_work_item_with_parent.reload.work_item_parent).to eq parent
end
end
context 'when the child is not confidential but the parent is confidential' do
let_it_be(:confidential_parent) { create(:work_item, :issue, :confidential, project: project) }
let(:content) { "/set_parent #{confidential_parent.to_reference(project)}" }
it 'does not assign the parent and returns an appropriate error' do
_, updates, message = service.execute(content, task_work_item)
expect(updates).to be_empty
expect(message).to eq("Cannot assign a confidential parent to a non-confidential work item. Make the " \
"work item confidential and try again")
expect(task_work_item.reload.work_item_parent).to be_nil
end
end
context 'when the child and parent are incompatible types' do
let(:other_task_work_item) { create(:work_item, :task, project: project) }
let(:content) { "/set_parent #{other_task_work_item.to_reference(project)}" }
it 'does not assign the parent and returns an appropriate error' do
_, updates, message = service.execute(content, task_work_item)
expect(updates).to be_empty
expect(message).to eq("Cannot assign this work item type to parent type")
expect(task_work_item.reload.work_item_parent).to be_nil
end
end
end
context 'when the parent reference is invalid' do
context 'with an invalid parent reference' do
let(:content) { "/set_parent not_a_valid_parent" }
it 'does not assign the parent and returns an appropriate error' do

View File

@ -51,7 +51,7 @@ RSpec.describe Users::AutoBanService, feature_category: :instance_resiliency do
response = execute
expect(response[:status]).to eq(:error)
expect(response[:message]).to match('State cannot transition via "ban"')
expect(response[:message]).to match('You cannot ban blocked users.')
end
it 'does not modify the BannedUser record or user state' do
@ -76,7 +76,7 @@ RSpec.describe Users::AutoBanService, feature_category: :instance_resiliency do
end
it 'raises an error and does not ban the user', :aggregate_failures do
expect { execute! }.to raise_error(StateMachines::InvalidTransition)
expect { execute! }.to raise_error(described_class::Error)
.and not_change { Users::BannedUser.count }
.and not_change { user.state }
end

View File

@ -1,6 +1,10 @@
# frozen_string_literal: true
RSpec.shared_examples 'sets work item parent' do
after do
noteable.reload
end
it 'leaves the note empty' do
expect(execute(note)).to be_empty
end

View File

@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Pages::DeactivateExpiredDeploymentsCronWorker, feature_category: :pages do
subject(:worker) { described_class.new }
let(:expired_pages_deployment) { create(:pages_deployment, expires_at: 3.minutes.ago) }
let(:not_yet_expired_pages_deployment) { create(:pages_deployment, expires_at: 1.hour.from_now) }
let(:never_expire_pages_deployment) { create(:pages_deployment, expires_at: nil) }
let!(:expired_pages_deployment) { create(:pages_deployment, expires_at: 3.minutes.ago) }
let!(:not_yet_expired_pages_deployment) { create(:pages_deployment, expires_at: 1.hour.from_now) }
let!(:never_expire_pages_deployment) { create(:pages_deployment, expires_at: nil) }
it 'deactivates all expired pages deployments' do
expect { worker.perform }
@ -15,4 +15,34 @@ RSpec.describe Pages::DeactivateExpiredDeploymentsCronWorker, feature_category:
.and not_change { not_yet_expired_pages_deployment.reload.active? }
.and not_change { never_expire_pages_deployment.reload.active? }
end
it 'logs extra metadata on done' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:deactivate_expired_pages_deployments, {
deactivated_deployments: 1,
duration: be > 0
})
worker.perform
end
it 'uses the expected values for batching and limiting' do
expect(Pages::DeactivateExpiredDeploymentsCronWorker::MAX_NUM_DELETIONS).to be(10000)
expect(Pages::DeactivateExpiredDeploymentsCronWorker::BATCH_SIZE).to be(1000)
end
describe 'batching and limiting' do
before do
stub_const('Pages::DeactivateExpiredDeploymentsCronWorker::MAX_NUM_DELETIONS', 9)
stub_const('Pages::DeactivateExpiredDeploymentsCronWorker::BATCH_SIZE', 5)
11.times do # we already have 1 deployment from the outer scope
create(:pages_deployment, expires_at: 3.minutes.ago)
end
end
it 'processes a maximum number of deletions, but will complete the last batch of deletions' do
expect { worker.perform }
.to change { PagesDeployment.active.expired.count }.from(12).to(2)
end
end
end

View File

@ -266,29 +266,6 @@ RSpec.describe PersonalAccessTokens::ExpiringWorker, type: :worker, feature_cate
worker.perform
end
context 'when feature flag extended_expiry_webhook_execution_setting is disabled' do
before do
stub_feature_flags(extended_expiry_webhook_execution_setting: false)
end
it "does not call execute_web_hooks for interval 30 days" do
expiring_token.update!(expires_at: 30.days.from_now)
project_hook = create(:project_hook, project: project, resource_access_token_events: true)
expect(Gitlab::DataBuilder::ResourceAccessTokenPayload).not_to receive(:build)
expect(WebHookService)
.not_to receive(:new)
.with(
project_hook,
{},
'resource_access_token_hooks',
idempotency_key: anything
) { fake_wh_service }
worker.perform
end
end
context 'with multiple batches of tokens' do
let_it_be(:expiring_tokens) { create_list(:resource_access_token, 4, expires_at: 6.days.from_now) }

View File

@ -1417,7 +1417,8 @@
resolved "https://registry.yarnpkg.com/@gitlab/fonts/-/fonts-1.3.0.tgz#df89c1bb6714e4a8a5d3272568aa4de7fb337267"
integrity sha512-DoMUIN3DqjEn7wvcxBg/b7Ite5fTdF5EmuOZoBRo2j0UBGweDXmNBi+9HrTZs4cBU660dOxcf1hATFcG3npbPg==
"@gitlab/noop@^1.0.1":
"@gitlab/noop@^1.0.1", jackspeak@^3.1.2, "jackspeak@npm:@gitlab/noop@1.0.1":
name jackspeak
version "1.0.1"
resolved "https://registry.yarnpkg.com/@gitlab/noop/-/noop-1.0.1.tgz#71a831146ee02732b4a61d2d3c11204564753454"
integrity sha512-s++4wjMYeDvBp9IO59DBrWjy8SE/gFkjTDO5ck2W0S6Vv7OlqgErwL7pHngAnrSmTJAzyUG8wHGqo0ViS4jn5Q==
@ -1441,10 +1442,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.123.0.tgz#1fa3b1a709755ff7c8ef67e18c0442101655ebf0"
integrity sha512-yjVn+utOTIKk8d9JlvGo6EgJ4TQ+CKpe3RddflAqtsQqQuL/2MlVdtaUePybxYzWIaumFuh5LouQ6BrWyw1niQ==
"@gitlab/ui@110.1.0":
version "110.1.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-110.1.0.tgz#5a38aafb92d67b589318c39f72b5ba622fa89b83"
integrity sha512-tCezdqWgSNKuksfvVfm8TWBSIbkuK0jhCoffFFKl3HzBf9FWnCqS5+XEHLU3nPttZBTi5T761BTNqqHx8SZUAg==
"@gitlab/ui@111.0.0":
version "111.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-111.0.0.tgz#b827f61df673074d1cb0d3d074314635bcb74aff"
integrity sha512-AynSxduL6i5xIMSysKZWhXaSXYkJGZlmCpPz2gWCBy0+IX0r8giXwHfhGqNJ/VYaZCpkTVYeY1gTiFguPlCsbg==
dependencies:
"@floating-ui/dom" "1.4.3"
echarts "^5.3.2"
@ -9487,11 +9488,6 @@ iterall@^1.2.1:
resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea"
integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==
jackspeak@^3.1.2, "jackspeak@npm:@gitlab/noop@1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@gitlab/noop/-/noop-1.0.1.tgz#71a831146ee02732b4a61d2d3c11204564753454"
integrity sha512-s++4wjMYeDvBp9IO59DBrWjy8SE/gFkjTDO5ck2W0S6Vv7OlqgErwL7pHngAnrSmTJAzyUG8wHGqo0ViS4jn5Q==
jed@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/jed/-/jed-1.1.1.tgz#7a549bbd9ffe1585b0cd0a191e203055bee574b4"