Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-09 12:11:50 +00:00
parent 50b7d6cec7
commit 3e4f0c1745
44 changed files with 726 additions and 199 deletions

View File

@ -79,9 +79,7 @@ export default {
"
>
<template #link="{ content }">
<gl-link :href="absoluteUserPreferencesPath">
{{ content }}
</gl-link>
<gl-link :href="absoluteUserPreferencesPath">{{ content }}</gl-link>
</template>
</gl-sprintf>
</span>

View File

@ -27,6 +27,7 @@ module Ci
foreign_key: :commit_id,
partition_foreign_key: :partition_id,
inverse_of: :builds
belongs_to :project_mirror, primary_key: :project_id, foreign_key: :project_id, inverse_of: :builds
RUNNER_FEATURES = {
upload_multiple_artifacts: -> (build) { build.publishes_artifacts_reports? },

View File

@ -7,6 +7,7 @@ module Ci
include FromUnion
belongs_to :namespace
has_many :project_mirrors, primary_key: :namespace_id, foreign_key: :namespace_id, inverse_of: :namespace_mirror
scope :by_group_and_descendants, -> (id) do
where('traversal_ids @> ARRAY[?]::int[]', id)

View File

@ -7,6 +7,8 @@ module Ci
include FromUnion
belongs_to :project
belongs_to :namespace_mirror, primary_key: :namespace_id, foreign_key: :namespace_id, inverse_of: :project_mirrors
has_many :builds, primary_key: :project_id, foreign_key: :project_id, inverse_of: :project_mirror
scope :by_namespace_id, -> (namespace_id) { where(namespace_id: namespace_id) }
scope :by_project_id, -> (project_id) { where(project_id: project_id) }

View File

@ -7,26 +7,40 @@ module Integrations
included do
field :webhook,
help: -> { webhook_help },
description: -> do
Kernel.format(_("%{title} webhook (for example, `%{example}`)."), title: title, example: webhook_help)
end,
required: true,
if: -> { requires_webhook? }
field :username,
placeholder: 'GitLab-integration',
description: -> { Kernel.format(_("%{title} username."), title: title) },
if: -> { requires_webhook? }
field :channel,
description: -> { _('Default channel to use if no other channel is configured.') },
api_only: true
field :notify_only_broken_pipelines,
type: :checkbox,
section: Integration::SECTION_TYPE_CONFIGURATION,
description: -> { _('Send notifications for broken pipelines.') },
help: 'Do not send notifications for successful pipelines.'
field :branches_to_be_notified,
type: :select,
section: Integration::SECTION_TYPE_CONFIGURATION,
title: -> { s_('Integration|Branches for which notifications are to be sent') },
description: -> {
_('Branches to send notifications for. Valid options are `all`, `default`, `protected`, ' \
'and `default_and_protected`. The default value is `default`.')
},
choices: -> { branch_choices }
field :labels_to_be_notified,
section: Integration::SECTION_TYPE_CONFIGURATION,
description: -> { _('Labels to send notifications for. Leave blank to receive notifications for all events.') },
placeholder: '~backend,~frontend',
help: 'Send notifications for issue, merge request, and comment events with the listed labels only. ' \
'Leave blank to receive notifications for all events.'
@ -34,6 +48,10 @@ module Integrations
field :labels_to_be_notified_behavior,
type: :select,
section: Integration::SECTION_TYPE_CONFIGURATION,
description: -> {
_('Labels to be notified for. Valid options are `match_any` and `match_all`. ' \
'The default value is `match_any`.')
},
choices: [
['Match any of the labels', Integrations::BaseChatNotification::MATCH_ANY_LABEL],
['Match all of the labels', Integrations::BaseChatNotification::MATCH_ALL_LABELS]

View File

@ -27,7 +27,7 @@ module Integrations
end
def self.webhook_help
'http://mattermost.example.com/hooks/'
'http://mattermost.example.com/hooks/...'
end
override :configurable_channels?

View File

@ -18,7 +18,7 @@ module Integrations
end
def self.webhook_help
'https://hooks.slack.com/services/'
'https://hooks.slack.com/services/...'
end
private

View File

@ -4,6 +4,9 @@ class NamespaceSetting < ApplicationRecord
include CascadingNamespaceSettingAttribute
include Sanitizable
include ChronicDurationAttribute
include IgnorableColumns
ignore_column :project_import_level, remove_with: '16.10', remove_after: '2024-02-22'
cascading_attr :delayed_project_removal
cascading_attr :toggle_security_policy_custom_ci

View File

@ -3,6 +3,7 @@
class Route < MainClusterwide::ApplicationRecord
include CaseSensitivity
include Gitlab::SQL::Pattern
include EachBatch
belongs_to :source, polymorphic: true, inverse_of: :route # rubocop:disable Cop/PolymorphicAssociations
belongs_to :namespace, inverse_of: :namespace_route
@ -26,30 +27,39 @@ class Route < MainClusterwide::ApplicationRecord
def rename_descendants
return unless saved_change_to_path? || saved_change_to_name?
descendant_routes = self.class.inside_path(path_before_last_save)
if Feature.disabled?(:batch_route_updates, Feature.current_request, type: :gitlab_com_derisk)
descendant_routes = self.class.inside_path(path_before_last_save)
descendant_routes.each do |route|
attributes = {}
descendant_routes.each do |route|
attributes = {}
if saved_change_to_path? && route.path.present?
attributes[:path] = route.path.sub(path_before_last_save, path)
if saved_change_to_path? && route.path.present?
attributes[:path] = route.path.sub(path_before_last_save, path)
end
if saved_change_to_name? && name_before_last_save.present? && route.name.present?
attributes[:name] = route.name.sub(name_before_last_save, name)
end
next if attributes.empty?
old_path = route.path
# Callbacks must be run manually
route.update_columns(attributes.merge(updated_at: Time.current))
# We are not calling route.delete_conflicting_redirects here, in hopes
# of avoiding deadlocks. The parent (self, in this method) already
# called it, which deletes conflicts for all descendants.
route.create_redirect(old_path) if attributes[:path]
end
else
changes = {
path: { saved: saved_change_to_path?, old_value: path_before_last_save },
name: { saved: saved_change_to_name?, old_value: name_before_last_save }
}
if saved_change_to_name? && name_before_last_save.present? && route.name.present?
attributes[:name] = route.name.sub(name_before_last_save, name)
end
next if attributes.empty?
old_path = route.path
# Callbacks must be run manually
route.update_columns(attributes.merge(updated_at: Time.current))
# We are not calling route.delete_conflicting_redirects here, in hopes
# of avoiding deadlocks. The parent (self, in this method) already
# called it, which deletes conflicts for all descendants.
route.create_redirect(old_path) if attributes[:path]
Routes::RenameDescendantsService.new(self).execute(changes) # rubocop: disable CodeReuse/ServiceClass -- Need a service class to encapsulate all the logic.
end
end

View File

@ -71,7 +71,6 @@ module BulkImports
unless @remote_content_validated
validate_content_type
validate_content_length
@remote_content_validated = true
end

View File

@ -0,0 +1,135 @@
# frozen_string_literal: true
module Routes
class RenameDescendantsService
BATCH_SIZE = 100
class RouteChanges
attr_reader :saved_change_to_parent_path, :saved_change_to_parent_name, :old_path_of_parent, :old_name_of_parent
def initialize(changes)
path_details = changes.fetch(:path)
name_details = changes.fetch(:name)
@saved_change_to_parent_path = path_details.fetch(:saved)
@old_path_of_parent = path_details.fetch(:old_value)
@saved_change_to_parent_name = name_details.fetch(:saved)
@old_name_of_parent = name_details.fetch(:old_value)
end
end
def initialize(parent_route)
@parent_route = parent_route
@routes_to_update = []
@redirect_routes_to_insert = []
end
def execute(changes)
process_changes(changes)
update_routes_for_descendants
create_redirect_routes_for_descendants
end
private
def process_changes(changes)
changes = RouteChanges.new(changes)
saved_change_to_parent_path = changes.saved_change_to_parent_path
saved_change_to_parent_name = changes.saved_change_to_parent_name
return unless saved_change_to_parent_path || saved_change_to_parent_name
old_path_of_parent = changes.old_path_of_parent
old_name_of_parent = changes.old_name_of_parent
descendant_routes_inside(old_path_of_parent).each_batch(of: BATCH_SIZE) do |relation|
relation.each do |descendant_route|
attributes_to_update = {}
if saved_change_to_parent_path && descendant_route.path.present?
attributes_to_update[:path] = descendant_route.path.sub(
old_path_of_parent, current_path_of_parent
)
end
if saved_change_to_parent_name && old_name_of_parent.present? && descendant_route.name.present?
attributes_to_update[:name] = descendant_route.name.sub(
old_name_of_parent, current_name_of_parent
)
end
push_to_routes_data(descendant_route, attributes_to_update)
push_to_redirect_routes_data(descendant_route) if attributes_to_update[:path]
end
end
end
def push_to_routes_data(descendant_route, attributes_to_update)
return if attributes_to_update.empty?
# We merge updated attributes with all existing attributes of the `Route` record.
# This comprehensive attribute set is required for the initial attempt of `upsert_all` to function effectively.
# During the first phase (insertion attempt), `upsert_all` tries to insert new records into the database,
# necessitating the presence of all attributes, including NOT NULL attributes, to create new entries.
# Attributes like `source_id` and `source_type` are crucial, as they are NOT NULL attributes essential
# for record creation.
# In the event of conflicts (e.g., existing Route records with conflicting `id`s),
# `upsert_all` switches to an update operation for those specific conflicted records.
# And this is the way we get to update `path` and/or `name` of multiple, existing route records in one go.
@routes_to_update << descendant_route
.attributes.symbolize_keys
.merge(attributes_to_update)
end
def push_to_redirect_routes_data(descendant_route)
@redirect_routes_to_insert << {
source_id: descendant_route.source_id,
source_type: descendant_route.source_type,
path: descendant_route.path
}
end
def update_routes_for_descendants
return if @routes_to_update.blank?
@routes_to_update.each_slice(BATCH_SIZE) do |data|
# Utilizing `upsert_all` with `unique_by: :id` ensures that only updates occur,
# as the provided data contains attributes exclusively for existing `Route` records,
# identified by their unique `id`.
# This upsert operation is hence guaranteed to solely execute updates, never inserts.
Route.upsert_all(
data,
unique_by: :id,
update_only: [:path, :name], # on conflicts, we need to update only path/name.
record_timestamps: true # this makes sure that `updated_at` is updated.
)
end
end
def create_redirect_routes_for_descendants
return if @redirect_routes_to_insert.blank?
@redirect_routes_to_insert.each_slice(BATCH_SIZE) do |data|
RedirectRoute.insert_all(
data,
# We need to make sure no duplicates are inserted.
# We use the value of `lower(path)` to make this check,
# which is already a UNIQUE index on this table.
unique_by: :index_redirect_routes_on_path_unique_text_pattern_ops
)
end
end
def current_name_of_parent
@parent_route.name
end
def current_path_of_parent
@parent_route.path
end
def descendant_routes_inside(path)
Route.inside_path(path)
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module WorkItems
module Callbacks
class Description < Base
def before_update
params[:description] = nil if excluded_in_new_type?
return unless params.present? && params.key?(:description)
return unless has_permission?(:update_work_item)
work_item.description = params[:description]
work_item.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
end
end
end

View File

@ -1,19 +0,0 @@
# frozen_string_literal: true
module WorkItems
module Widgets
module DescriptionService
class UpdateService < WorkItems::Widgets::BaseService
def before_update_callback(params: {})
params[:description] = nil if new_type_excludes_widget?
return unless params.present? && params.key?(:description)
return unless has_permission?(:update_work_item)
work_item.description = params[:description]
work_item.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
end
end
end
end

View File

@ -0,0 +1,9 @@
---
name: batch_route_updates
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/432065
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139782
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17360
milestone: '16.8'
group: group::tenant scale
type: gitlab_com_derisk
default_enabled: false

View File

@ -159,6 +159,13 @@ production: &base
# Number of seconds to wait for HTTP response after sending webhook HTTP POST request (default: 10)
# webhook_timeout: 10
## HTTP client settings
http_client:
# Filename of HTTP client pem
# tls_client_cert_file:
# PEM password (optional)
# tls_client_cert_password:
### GraphQL Settings
# Tells the rails application how long it has to complete a GraphQL request.
# We suggest this value to be higher than the database timeout value

View File

@ -24,3 +24,11 @@ Gitlab::HTTP_V2.configure do |config|
Gitlab::SilentMode.log_info(message: message, outbound_http_request_method: http_method)
end
end
if Gitlab.config.gitlab['http_client']
pem = File.read(Gitlab.config.gitlab['http_client']['tls_client_cert_file'])
password = Gitlab.config.gitlab['http_client']['tls_client_cert_password']
Gitlab::HTTP_V2::Client.pem(pem, password)
Gitlab::LegacyHTTP.pem(pem, password)
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddPreviousWeightToResourceWeightEvents < Gitlab::Database::Migration[2.2]
milestone '16.8'
def change
add_column :resource_weight_events, :previous_weight, :integer
end
end

View File

@ -0,0 +1 @@
b6c62664a45db815b8e2a924255214269b70e6af2bb0c909eee774f1d33c6397

View File

@ -23321,7 +23321,8 @@ CREATE TABLE resource_weight_events (
user_id bigint,
issue_id bigint NOT NULL,
weight integer,
created_at timestamp with time zone NOT NULL
created_at timestamp with time zone NOT NULL,
previous_weight integer
);
CREATE SEQUENCE resource_weight_events_id_seq

View File

@ -6,7 +6,12 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Labels administration **(FREE SELF)**
To manage labels for the GitLab instance, in the Admin Area, on the left sidebar, select **Labels**. For more details on how to manage labels, see [Labels](../user/project/labels.md).
To manage labels for the GitLab instance:
1. On the left sidebar, at the bottom, select **Admin Area**.
1. Select **Labels**.
For more details on how to manage labels, see [Labels](../user/project/labels.md).
Labels created in the Admin Area are automatically added to new projects.
They are not available to new groups.

View File

@ -8666,6 +8666,7 @@ Input type: `WorkItemUpdateInput`
| <a id="mutationworkitemupdateassigneeswidget"></a>`assigneesWidget` | [`WorkItemWidgetAssigneesInput`](#workitemwidgetassigneesinput) | Input for assignees widget. |
| <a id="mutationworkitemupdateawardemojiwidget"></a>`awardEmojiWidget` | [`WorkItemWidgetAwardEmojiUpdateInput`](#workitemwidgetawardemojiupdateinput) | Input for emoji reactions widget. |
| <a id="mutationworkitemupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationworkitemupdatecolorwidget"></a>`colorWidget` | [`WorkItemWidgetColorInput`](#workitemwidgetcolorinput) | Input for color widget. |
| <a id="mutationworkitemupdateconfidential"></a>`confidential` | [`Boolean`](#boolean) | Sets the work item confidentiality. |
| <a id="mutationworkitemupdatecurrentusertodoswidget"></a>`currentUserTodosWidget` | [`WorkItemWidgetCurrentUserTodosInput`](#workitemwidgetcurrentusertodosinput) | Input for to-dos widget. |
| <a id="mutationworkitemupdatedescriptionwidget"></a>`descriptionWidget` | [`WorkItemWidgetDescriptionInput`](#workitemwidgetdescriptioninput) | Input for description widget. |
@ -34647,6 +34648,14 @@ Attributes for value stream stage.
| <a id="workitemwidgetawardemojiupdateinputaction"></a>`action` | [`WorkItemAwardEmojiUpdateAction!`](#workitemawardemojiupdateaction) | Action for the update. |
| <a id="workitemwidgetawardemojiupdateinputname"></a>`name` | [`String!`](#string) | Emoji name. |
### `WorkItemWidgetColorInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workitemwidgetcolorinputcolor"></a>`color` | [`Color!`](#color) | Color of the work item. |
### `WorkItemWidgetCurrentUserTodosInput`
#### Arguments

View File

@ -478,7 +478,7 @@ Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | Discord webhook (for example, `https://discord.com/api/webhooks/`). |
| `webhook` | string | true | Discord webhook (for example, `https://discord.com/api/webhooks/...`). |
| `branches_to_be_notified` | string | false | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events. |
| `confidential_issue_channel` | string | false | The webhook override to receive notifications for confidential issue events. |
@ -710,12 +710,14 @@ Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | `https://hooks.slack.com/services/...`. |
| `username` | string | false | username. |
| `channel` | string | false | Default channel to use if others are not configured. |
| `webhook` | string | true | Slack notifications webhook (for example, `https://hooks.slack.com/services/...`). |
| `username` | string | false | Slack notifications username. |
| `channel` | string | false | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | false | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | false | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | false | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | false | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
| `alert_channel` | string | false | The name of the channel to receive notifications for alert events. |
| `alert_events` | boolean | false | Enable notifications for alert events. |
| `commit_events` | boolean | false | Enable notifications for commit events. |
@ -1047,12 +1049,14 @@ Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | The Mattermost webhook (for example, `http://mattermost_host/hooks/...`). |
| `username` | string | false | username. |
| `channel` | string | false | Default channel to use if others are not configured. |
| `webhook` | string | true | Mattermost notifications webhook (for example, `http://mattermost.example.com/hooks/...`). |
| `username` | string | false | Mattermost notifications username. |
| `channel` | string | false | Default channel to use if no other channel is configured. |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines. |
| `notify_only_default_branch` | boolean | false | **Deprecated:** This parameter has been replaced with `branches_to_be_notified`. |
| `branches_to_be_notified` | string | false | Branches to send notifications for. Valid options are `all`, `default`, `protected`, and `default_and_protected`. The default value is `default`. |
| `labels_to_be_notified` | string | false | Labels to send notifications for. Leave blank to receive notifications for all events. |
| `labels_to_be_notified_behavior` | string | false | Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`. |
| `push_events` | boolean | false | Enable notifications for push events. |
| `issues_events` | boolean | false | Enable notifications for issue events. |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events. |

View File

@ -322,7 +322,7 @@ Jobs on project runners are not affected by the compute quota.
### GitLab SaaS usage notifications
On GitLab SaaS an email notification is sent to the namespace owners when:
On GitLab SaaS an in-app banner is displayed and an email notification sent to the namespace owners when:
- The remaining compute minutes is below 30% of the quota.
- The remaining compute minutes is below 5% of the quota.

View File

@ -2439,7 +2439,10 @@ The value given is added to the current time at which the response is validated.
::EndTabs
### Designate a unique attribute for the `uid`
### Designate a unique attribute for the `uid` (optional)
By default, the users `uid` is set as the `NameID` attribute in the SAML response. To designate
a different attribute for the `uid`, you can set the `uid_attribute`.
Before setting the `uid` to a unique attribute, make sure that you have configured
the following attributes so your SAML users cannot change them:
@ -2450,10 +2453,7 @@ the following attributes so your SAML users cannot change them:
If users can change these attributes, they can sign in as other authorized users.
See your SAML IdP documentation for information on how to make these attributes
unchangeable.
By default, the `uid` is set as the `name_id` in the SAML response. To designate
a unique attribute for the `uid`, you can set the `uid_attribute`. In the following
example, the value of `uid` attribute in the SAML response is set as the `uid_attribute`.
In the following example, the value of `uid` attribute in the SAML response is set as the `uid_attribute`.
::Tabs

View File

@ -711,7 +711,10 @@ To enable dependency scanning:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select **Build > Pipeline editor**.
1. Copy and paste the following to the bottom of the `.gitlab-ci.yml` file:
1. If no `.gitlab-ci.yml` file exists, select **Configure pipeline**, then delete the example
content.
1. Copy and paste the following to the bottom of the `.gitlab-ci.yml` file. If an `include` line
already exists, add only the `template` line below it.
```yaml
include:
@ -720,14 +723,14 @@ To enable dependency scanning:
1. Select the **Validate** tab, then select **Validate pipeline**.
Continue if you see the message **Simulation completed successfully**. That indicates the file is
valid.
The message **Simulation completed successfully** confirms the file is valid.
1. Select the **Edit** tab.
1. Complete the fields. Do not use the default branch for the **Branch** field.
1. Select **Commit changes**.
1. Select **Code > Merge requests**.
1. Select the merge request just created.
1. Review the merge request, then select **Merge**.
1. Select the **Start a new merge request with these changes** checkbox, then select **Commit
changes**.
1. Complete the fields according to your standard workflow, then select **Create
merge request**.
1. Review and edit the merge request according to your standard workflow, then select **Merge**.
Pipelines now include a dependency scanning job.

View File

@ -111,6 +111,26 @@ Changing the SAML or SCIM configuration or provider can cause the following prob
the SCIM app.
1. Use the same SCIM API to update the SCIM `extern_uid` for the user on GitLab.com.
## The member's email address is not allowed for this group
SCIM provisioning may fail with HTTP status `412` and the following error message:
```plaintext
The member's email address is not allowed for this group. Check with your administrator.
```
This error occurs when both of the following are true:
- [Restrict group access by domain](../access_and_permissions.md) is configured
for the group.
- The user account being provisioned has an email domain that is not allowed.
To resolve this issue, you can do either of the following:
- Add the user account's email domain to the list of allowed domains.
- Disable the [Restrict group access by domain](../access_and_permissions.md)
feature by removing all domains.
## Search Rails logs for SCIM requests **(PREMIUM SAAS)**
GitLab.com administrators can search for SCIM requests in the `api_json.log` using the `pubsub-rails-inf-gprd-*` index in

View File

@ -20,12 +20,14 @@ provided that the agent is properly configured for remote development.
## Remote development settings
| Setting | Description |
|-------------------------------------------------------|:---------------------------------------------------------------------|
| [`enabled`](#enabled) | Indicates whether remote development is enabled for the GitLab agent |
| [`dns_zone`](#dns_zone) | DNS zone where workspaces are available |
| [`gitlab_workspaces_proxy`](#gitlab_workspaces_proxy) | Namespace where [`gitlab-workspaces-proxy`](https://gitlab.com/gitlab-org/remote-development/gitlab-workspaces-proxy) is installed |
| [`network_policy`](#network_policy) | Firewall rules for workspaces |
| Setting | Description |
|-------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------|
| [`enabled`](#enabled) | Indicates whether remote development is enabled for the GitLab agent. |
| [`dns_zone`](#dns_zone) | DNS zone where workspaces are available. |
| [`gitlab_workspaces_proxy`](#gitlab_workspaces_proxy) | Namespace where [`gitlab-workspaces-proxy`](https://gitlab.com/gitlab-org/remote-development/gitlab-workspaces-proxy) is installed. |
| [`network_policy`](#network_policy) | Firewall rules for workspaces. |
| [`default_resources_per_workspace_container`](#default_resources_per_workspace_container) | Default requests and limits for CPU and memory per workspace container. |
| [`max_resources_per_workspace`](#max_resources_per_workspace) | Maximum requests and limits for CPU and memory per workspace. |
NOTE:
If a setting has an invalid value, it's not possible to update any setting until you fix that value.
@ -142,6 +144,64 @@ In this example, traffic from the workspace is allowed if:
- The destination IP is any range except `10.0.0.0/8`, `172.16.0.0/12`, or `192.168.0.0/16`.
- The destination IP is `172.16.123.1/32`.
### `default_resources_per_workspace_container`
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/11625) in GitLab 16.8.
Use this setting to define the default [requests and limits](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/#requests-and-limits)
for CPU and memory per workspace container.
Any resources you define in your [devfile](index.md#devfile) override this setting.
For `default_resources_per_workspace_container`, `requests` and `limits` are required.
For more information about possible CPU and memory values, see [Resource units in Kubernetes](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/#resource-units-in-kubernetes).
When you change any of these values, existing workspaces restart immediately for the changes to take effect.
**Example configuration:**
```yaml
remote_development:
default_resources_per_workspace_container:
requests:
cpu: "0.5"
memory: "512Mi"
limits:
cpu: "1"
memory: "1Gi"
```
### `max_resources_per_workspace`
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/11625) in GitLab 16.8.
Use this setting to define the maximum [requests and limits](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/#requests-and-limits)
for CPU and memory per workspace.
For `max_resources_per_workspace`, `requests` and `limits` are required.
For more information about possible CPU and memory values, see:
- [Resource units in Kubernetes](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/#resource-units-in-kubernetes)
- [Resource quotas](https://kubernetes.io/docs/concepts/policy/resource-quotas/)
When you change any of these values, existing workspaces restart immediately for the changes to take effect.
Workspaces fail when they exceed the values you set for `requests` and `limits`.
**Example configuration:**
```yaml
remote_development:
max_resources_per_workspace:
requests:
cpu: "1"
memory: "1Gi"
limits:
cpu: "2"
memory: "2Gi"
```
The maximum resources you define must include any resources required for init containers
to perform bootstrapping operations such as cloning the project repository.
## Configuring user access with remote development
You can configure the `user_access` module to access the connected Kubernetes cluster with your GitLab credentials.

View File

@ -7,35 +7,6 @@ module API
# The data structures inside this model are returned using class methods,
# allowing EE to extend them where necessary.
module IntegrationsHelpers
def self.chat_notification_settings
[
{
required: true,
name: :webhook,
type: String,
desc: 'The chat webhook'
},
{
required: false,
name: :username,
type: String,
desc: 'The chat username'
},
{
required: false,
name: :channel,
type: String,
desc: 'The default chat channel'
},
{
required: false,
name: :branches_to_be_notified,
type: String,
desc: 'Branches for which notifications are to be sent'
}
].freeze
end
def self.chat_notification_flags
[
{
@ -533,8 +504,7 @@ module API
'youtrack' => ::Integrations::Youtrack.api_fields,
'clickup' => ::Integrations::Clickup.api_fields,
'slack' => [
chat_notification_settings,
chat_notification_flags,
::Integrations::Slack.api_fields,
chat_notification_channels
].flatten,
'microsoft-teams' => [
@ -553,8 +523,7 @@ module API
chat_notification_flags
].flatten,
'mattermost' => [
chat_notification_settings,
chat_notification_flags,
::Integrations::Mattermost.api_fields,
chat_notification_channels
].flatten,
'teamcity' => [

View File

@ -38,20 +38,14 @@ module BulkImports
raise_error 'Invalid downloaded file'
end
def validate_content_length
validate_size!(response_headers['content-length'])
end
def validate_size!(size)
if size.blank?
raise_error 'Missing content-length header'
elsif file_size_limit > 0 && size.to_i > file_size_limit
raise_error format(
"File size %{size} exceeds limit of %{limit}",
size: ActiveSupport::NumberHelper.number_to_human_size(size),
limit: ActiveSupport::NumberHelper.number_to_human_size(file_size_limit)
)
end
return unless file_size_limit > 0 && size.to_i > file_size_limit
raise_error format(
"File size %{size} exceeds limit of %{limit}",
size: ActiveSupport::NumberHelper.number_to_human_size(size),
limit: ActiveSupport::NumberHelper.number_to_human_size(file_size_limit)
)
end
end
end

View File

@ -11,7 +11,7 @@ module Gitlab
UnsupportedAttachmentError = Class.new(StandardError)
FILENAME_SIZE_LIMIT = 255 # chars before the extension
DEFAULT_FILE_SIZE_LIMIT = 25.megabytes
DEFAULT_FILE_SIZE_LIMIT = Gitlab::CurrentSettings.max_attachment_size.megabytes
TMP_DIR = File.join(Dir.tmpdir, 'github_attachments').freeze
attr_reader :file_url, :filename, :file_size_limit, :options
@ -26,7 +26,6 @@ module Gitlab
end
def perform
validate_content_length
validate_filepath
download_url = get_assets_download_redirection_url
@ -46,11 +45,6 @@ module Gitlab
raise DownloadError, message
end
def response_headers
@response_headers ||=
Gitlab::HTTP.perform_request(Net::HTTP::Head, file_url, {}).headers
end
# Github /assets redirection link will redirect to aws which has its own authorization.
# Keeping our bearer token will cause request rejection
# eg. Only one auth mechanism allowed; only the X-Amz-Algorithm query parameter,
@ -78,7 +72,19 @@ module Gitlab
def download_from(url)
file = File.open(filepath, 'wb')
Gitlab::HTTP.perform_request(Net::HTTP::Get, url, stream_body: true) { |batch| file.write(batch) }
Gitlab::HTTP.perform_request(Net::HTTP::Get, url, stream_body: true) do |chunk|
next if [301, 302, 303, 307, 308].include?(chunk.code)
raise DownloadError, "Error downloading file from #{url}. Error code: #{chunk.code}" if chunk.code != 200
file.write(chunk)
validate_size!(file.size)
rescue Gitlab::GithubImport::AttachmentsDownloader::DownloadError
delete
raise
end
file
end

View File

@ -1265,6 +1265,12 @@ msgstr ""
msgid "%{time} UTC"
msgstr ""
msgid "%{title} username."
msgstr ""
msgid "%{title} webhook (for example, `%{example}`)."
msgstr ""
msgid "%{totalCpu} (%{freeSpacePercentage}%{percentSymbol} free)"
msgstr ""
@ -16267,6 +16273,9 @@ msgstr ""
msgid "Default branch and protected branches"
msgstr ""
msgid "Default channel to use if no other channel is configured."
msgstr ""
msgid "Default description template for issues"
msgstr ""
@ -28319,6 +28328,12 @@ msgstr ""
msgid "Labels can be applied to issues, merge requests, and epics. Group labels are available for any project within the group."
msgstr ""
msgid "Labels to be notified for. Valid options are `match_any` and `match_all`. The default value is `match_any`."
msgstr ""
msgid "Labels to send notifications for. Leave blank to receive notifications for all events."
msgstr ""
msgid "Labels with no issues in this iteration:"
msgstr ""

View File

@ -8,27 +8,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
let_it_be(:file_url) { 'https://example.com/avatar.png' }
let_it_be(:content_type) { 'application/octet-stream' }
let(:content_length) { 1000 }
let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 200) }
let(:headers_double) do
instance_double(
HTTParty::Response,
code: 200,
success?: true,
parsed_response: {},
headers: {
'content-length' => content_length,
'content-type' => content_type
}
)
end
describe '#perform' do
before do
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Head, file_url, {}).and_return(headers_double)
end
context 'when file valid' do
@ -71,12 +56,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
context 'when file size exceeds limit' do
let(:content_length) { 26.megabytes }
subject(:downloader) { described_class.new(file_url, file_size_limit: 1.byte) }
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
'File size 26 MiB exceeds limit of 25 MiB'
'File size 57 B exceeds limit of 1 B'
)
end
end
@ -94,6 +79,33 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
end
context 'when chunk download returns a redirect' do
let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 302, http_response: {}) }
it 'skips the redirect and continues' do
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
file = downloader.perform
expect(File.exist?(file.path)).to eq(true)
end
end
context 'when chunk download returns an error' do
let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 500, http_response: {}) }
it 'raises expected exception' do
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
"Error downloading file from #{file_url}. Error code: #{chunk_double.code}"
)
end
end
context 'when attachment is behind a github asset endpoint' do
let(:file_url) { "https://github.com/test/project/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
let(:redirect_url) { "https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.jpg" }

View File

@ -439,6 +439,7 @@ builds:
- dast_scanner_profile
- job_annotations
- job_artifacts_annotations
- project_mirror
bridges:
- user
- pipeline

View File

@ -68,10 +68,17 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
describe 'associations' do
it { is_expected.to belong_to(:project_mirror) }
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
expect(Project.reflect_on_association(:builds).has_inverse?).to eq(:project)
end
it 'has a bidirectional relationship with project mirror' do
expect(described_class.reflect_on_association(:project_mirror).has_inverse?).to eq(:builds)
expect(Ci::ProjectMirror.reflect_on_association(:builds).has_inverse?).to eq(:project_mirror)
end
end
describe 'callbacks' do

View File

@ -21,6 +21,16 @@ RSpec.describe Ci::NamespaceMirror do
)
end
describe 'associations' do
it { is_expected.to belong_to(:namespace) }
it { is_expected.to have_many(:project_mirrors) }
it 'has a bidirectional relationship with project mirrors' do
expect(described_class.reflect_on_association(:project_mirrors).has_inverse?).to eq(:namespace_mirror)
expect(Ci::ProjectMirror.reflect_on_association(:namespace_mirror).has_inverse?).to eq(:project_mirrors)
end
end
context 'scopes' do
describe '.by_group_and_descendants' do
let_it_be(:another_group) { create(:group) }

View File

@ -83,7 +83,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
let(:ignore_accessors) do
%i[type namespace lock_version target_url base_tags trace_sections
commit_id deployment erased_by_id project_id
commit_id deployment erased_by_id project_id project_mirror
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines sourced_pipeline artifacts_file_store artifacts_metadata_store

View File

@ -8,6 +8,22 @@ RSpec.describe Ci::ProjectMirror do
let!(:project) { create(:project, namespace: group2) }
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:namespace_mirror) }
it { is_expected.to have_many(:builds) }
it 'has a bidirectional relationship with namespace mirror' do
expect(described_class.reflect_on_association(:namespace_mirror).has_inverse?).to eq(:project_mirrors)
expect(Ci::NamespaceMirror.reflect_on_association(:project_mirrors).has_inverse?).to eq(:namespace_mirror)
end
it 'has a bidirectional relationship with builds' do
expect(described_class.reflect_on_association(:builds).has_inverse?).to eq(:project_mirror)
expect(Ci::Build.reflect_on_association(:project_mirror).has_inverse?).to eq(:builds)
end
end
context 'scopes' do
let_it_be(:another_project) { create(:project, namespace: group1) }

View File

@ -74,7 +74,7 @@ RSpec.describe Route do
let!(:another_group) { create(:group, path: 'gittlab', name: 'gitllab') }
let!(:another_group_nested) { create(:group, path: 'git_lab', name: 'git_lab', parent: another_group) }
context 'path update' do
shared_examples_for 'path update' do
context 'when route name is set' do
before do
route.update!(path: 'bar')
@ -116,7 +116,7 @@ RSpec.describe Route do
end
end
context 'name update' do
shared_examples_for 'name update' do
it 'updates children routes with new path' do
route.update!(name: 'bar')
@ -134,6 +134,18 @@ RSpec.describe Route do
.to change { route.name }.from(nil).to('bar')
end
end
it_behaves_like 'path update'
it_behaves_like 'name update'
context 'when the feature flag `batch_route_updates` if turned off' do
before do
stub_feature_flags(batch_route_updates: false)
end
it_behaves_like 'path update'
it_behaves_like 'name update'
end
end
describe '#create_redirect_for_old_path' do

View File

@ -123,7 +123,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
name: 'webhook',
title: nil,
placeholder: nil,
help: 'http://mattermost.example.com/hooks/',
help: 'http://mattermost.example.com/hooks/...',
required: true,
choices: nil,
value: '************',

View File

@ -12,11 +12,9 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
let_it_be(:content_length) { 1000 }
let(:headers) do
{
'content-length' => content_length,
'content-type' => content_type,
'content-disposition' => content_disposition
}
@ -102,51 +100,27 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
end
context 'when content-length is not valid' do
context 'when content-length exceeds limit' do
context 'when file size is not valid' do
context 'when size exceeds limit' do
let(:file_size_limit) { 1 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 1000 B exceeds limit of 1 B'
)
end
end
context 'when content-length is missing' do
let(:content_length) { nil }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Missing content-length header'
'File size 100 B exceeds limit of 1 B'
)
end
end
end
context 'when content-length is equals the file size limit' do
let(:content_length) { 150 }
let(:file_size_limit) { 150 }
context 'when size is equals the file size limit' do
let(:file_size_limit) { 100 }
it 'does not raise an error' do
expect { subject.execute }.not_to raise_error
end
end
context 'when partially downloaded file exceeds limit' do
let(:content_length) { 151 }
let(:file_size_limit) { 150 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 151 B exceeds limit of 150 B'
)
end
end
context 'when chunk code is not 200' do
let(:chunk_code) { 404 }
@ -203,25 +177,23 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
context 'on redirect chunk' do
let(:chunk_code) { 303 }
it 'does not run content type & length validations' do
it 'does not run content type & validation' do
expect(service).not_to receive(:validate_content_type)
expect(service).not_to receive(:validate_content_length)
service.execute
end
end
context 'when there is one data chunk' do
it 'validates content type & length' do
it 'validates content type' do
expect(service).to receive(:validate_content_type)
expect(service).to receive(:validate_content_length)
service.execute
end
end
context 'when there are multiple data chunks' do
it 'validates content type & length only once' do
it 'validates content type only once' do
data_chunk = double(
'data chunk',
size: 1000,
@ -237,7 +209,6 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
expect(service).to receive(:validate_content_type).once
expect(service).to receive(:validate_content_length).once
service.execute
end

View File

@ -0,0 +1,208 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Routes::RenameDescendantsService, feature_category: :groups_and_projects do
let_it_be(:parent_group) { create(:group, name: 'old-name', path: 'old-path') }
let_it_be(:parent_route) { parent_group.route }
let_it_be(:subgroups) { create_list(:group, 4, parent: parent_group) }
let_it_be(:subgroup_projects) { subgroups.map { |subgroup| create(:project, group: subgroup) } }
let(:subgroup_routes) { Route.for_routable(subgroups) }
let(:subgroup_projects_routes) { Route.for_routable(subgroup_projects) }
let(:subgroup_routes_with_old_path) { subgroup_routes.where('path LIKE ?', '%old-path%') }
let(:subgroup_projects_routes_with_old_path) { subgroup_projects_routes.where('path LIKE ?', '%old-path%') }
let(:subgroup_routes_with_new_path) { subgroup_routes.where('path LIKE ?', '%new-path%') }
let(:subgroup_projects_routes_with_new_path) { subgroup_projects_routes.where('path LIKE ?', '%new-path%') }
let(:subgroup_routes_with_old_name) { subgroup_routes.where('name LIKE ?', '%old-name%') }
let(:subgroup_projects_routes_with_old_name) { subgroup_projects_routes.where('name LIKE ?', '%old-name%') }
let(:subgroup_routes_with_new_name) { subgroup_routes.where('name LIKE ?', '%new-name%') }
let(:subgroup_projects_routes_with_new_name) { subgroup_projects_routes.where('name LIKE ?', '%new-name%') }
describe '#execute' do
shared_examples_for 'descendant paths are updated' do
it do
expect { execute }.to change {
subgroup_routes_with_old_path.size
}.from(4).to(0).and change {
subgroup_projects_routes_with_old_path.size
}.from(4).to(0).and change {
subgroup_routes_with_new_path.size
}.from(0).to(4).and change {
subgroup_projects_routes_with_new_path.size
}.from(0).to(4)
end
end
shared_examples_for 'descendant paths are not updated' do
it do
expect { execute }.to change {
subgroup_routes_with_old_path.size
}.by(0).and change {
subgroup_projects_routes_with_old_path.size
}.by(0).and change {
subgroup_routes_with_new_path.size
}.by(0).and change {
subgroup_projects_routes_with_new_path.size
}.by(0)
end
end
shared_examples_for 'descendant names are updated' do
it do
expect { execute }.to change {
subgroup_routes_with_old_name.size
}.from(4).to(0).and change {
subgroup_projects_routes_with_old_name.size
}.from(4).to(0).and change {
subgroup_routes_with_new_name.size
}.from(0).to(4).and change {
subgroup_projects_routes_with_new_name.size
}.from(0).to(4)
end
end
shared_examples_for 'descendant names are not updated' do
it do
expect { execute }.to change {
subgroup_routes_with_old_name.size
}.by(0).and change {
subgroup_projects_routes_with_old_name.size
}.by(0).and change {
subgroup_routes_with_new_name.size
}.by(0).and change {
subgroup_projects_routes_with_new_name.size
}.by(0)
end
end
shared_examples_for 'creates redirect_routes for all descendants' do
let(:subgroup_redirect_routes) { RedirectRoute.where(source: subgroups) }
let(:subgroup_projects_redirect_routes) { RedirectRoute.where(source: subgroup_projects) }
it do
expect { execute }.to change {
subgroup_redirect_routes.where('path LIKE ?', '%old-path%').size
}.from(0).to(4).and change {
subgroup_projects_redirect_routes.where('path LIKE ?', '%old-path%').size
}.from(0).to(4)
end
end
shared_examples_for 'does not create any redirect_routes' do
it do
expect { execute }.not_to change { RedirectRoute.count }
end
end
subject(:execute) do
described_class.new(parent_route).execute(changes)
end
before do
parent_route.name = 'new-name'
parent_route.path = 'new-path'
end
context 'on updating both name and path' do
let!(:changes) do
{
path: { saved: true, old_value: 'old-path' },
name: { saved: true, old_value: 'old-name' }
}
end
it_behaves_like 'descendant paths are updated'
it_behaves_like 'descendant names are updated'
it_behaves_like 'creates redirect_routes for all descendants'
end
context 'on updating only path' do
let!(:changes) do
{
path: { saved: true, old_value: 'old-path' },
name: { saved: false, old_value: 'old-name' }
}
end
it_behaves_like 'descendant paths are updated'
it_behaves_like 'descendant names are not updated'
it_behaves_like 'creates redirect_routes for all descendants'
end
context 'on updating only name' do
let!(:changes) do
{
path: { saved: false, old_value: 'old-path' },
name: { saved: true, old_value: 'old-name' }
}
end
it_behaves_like 'descendant paths are not updated'
it_behaves_like 'descendant names are updated'
it_behaves_like 'does not create any redirect_routes'
end
context 'on not updating both path and name' do
let!(:changes) do
{
path: { saved: false, old_value: 'old-path' },
name: { saved: false, old_value: 'old-name' }
}
end
it_behaves_like 'descendant paths are not updated'
it_behaves_like 'descendant names are not updated'
it_behaves_like 'does not create any redirect_routes'
end
context 'when `changes` are not in the expected format' do
let!(:changes) do
{
not_path: { saved: false, old_value: 'old-path' },
name: { saved: true, old_value: 'old-name' }
}
end
it 'errors out' do
expect { execute }.to raise_error(KeyError)
end
end
context 'for batching' do
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
let!(:changes) do
{
path: { saved: true, old_value: 'old-path' },
name: { saved: true, old_value: 'old-name' }
}
end
it 'bulk updates and bulk inserts records in batches' do
query_recorder = ActiveRecord::QueryRecorder.new do
execute
end
# There are 8 descendants to this group.
# 4 subgroups, and 1 project each in each subgroup == total of 8.
# With a batch size of 2, that is
# 4 queries to update `routes` and 4 queries to insert `redirect_routes`
update_routes_queries = query_recorder.log.grep(
/INSERT INTO "routes" .* ON CONFLICT \("id"\) DO UPDATE SET/
)
insert_redirect_routes_queries = query_recorder.log.grep(
/INSERT INTO "redirect_routes" .* ON CONFLICT \(lower\(\(path\)::text\) varchar_pattern_ops\) DO NOTHING/
)
expect(update_routes_queries.count).to eq(4)
expect(insert_redirect_routes_queries.count).to eq(4)
end
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_category: :portfolio_management do
RSpec.describe WorkItems::Callbacks::Description, feature_category: :portfolio_management do
let_it_be(:random_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:guest) { create(:user) }
@ -22,12 +22,10 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
)
end
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Description) } }
describe '#update' do
let(:service) { described_class.new(widget: widget, current_user: current_user) }
let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
subject(:before_update_callback) { service.before_update_callback(params: params) }
subject(:before_update_callback) { service.before_update }
shared_examples 'sets work item description' do
it 'correctly sets work item description value' do
@ -59,7 +57,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project reporter' do
let(:current_user) { reporter }
before do
before_all do
project.add_reporter(reporter)
end
@ -91,7 +89,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
let(:params) { {} }
before do
allow(service).to receive(:new_type_excludes_widget?).and_return(true)
allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.update!(description: 'test')
end
@ -108,7 +106,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project guest' do
let(:current_user) { guest }
before do
before_all do
project.add_guest(guest)
end

View File

@ -191,14 +191,14 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
let(:supported_widgets) do
[
{ klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :before_update_callback, params: { description: 'foo' } },
{ klass: WorkItems::Callbacks::Description, callback: :before_update },
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
]
end
end
context 'when updating widgets' do
let(:widget_service_class) { WorkItems::Widgets::DescriptionService::UpdateService }
let(:widget_service_class) { WorkItems::Callbacks::Description }
let(:widget_params) { { description_widget: { description: 'changed' } } }
context 'when widget service is not present' do
@ -215,8 +215,8 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
before do
allow_next_instance_of(widget_service_class) do |instance|
allow(instance)
.to receive(:before_update_callback)
.with(params: { description: 'changed' }).and_return(nil)
.to receive(:before_update)
.and_return(nil)
end
end
@ -269,7 +269,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(service).to receive(:update).and_call_original
expect(service).not_to receive(:execute_widgets).with(callback: :update, widget_params: widget_params)
expect { update_work_item }.not_to change(work_item, :description)
expect do
update_work_item
work_item.reload
end.not_to change(work_item, :description)
end
end
end

View File

@ -4,7 +4,11 @@ RSpec.shared_examples_for 'work item widgetable service' do
it 'executes callbacks for expected widgets' do
supported_widgets.each do |widget|
expect_next_instance_of(widget[:klass]) do |widget_instance|
expect(widget_instance).to receive(widget[:callback]).with(params: widget[:params])
if widget[:params].present?
expect(widget_instance).to receive(widget[:callback]).with(params: widget[:params])
else
expect(widget_instance).to receive(widget[:callback])
end
end
end