Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-19 18:08:48 +00:00
parent b6d63c915a
commit bb915e6375
95 changed files with 748 additions and 352 deletions

View File

@ -1445,3 +1445,19 @@ ee/lib/ee/api/entities/project.rb @gitlab-org/manage/manage-workspace/backend-ap
[Manage::Foundations]
/lib/sidebars/ @gitlab-org/manage/foundations/engineering
/ee/lib/sidebars/ @gitlab-org/manage/foundations/engineering
# JiHu GitLab rules. See https://gitlab.com/gitlab-jh/gitlab-jh-enablement/-/issues/213#note_1024367528
[JH Frontend] @jihulab/maintainers/frontend
/jh/app/assets/
/jh/*.scss
/jh/*.js
/jh/*.vue
[JH Rails Backend] @jihulab/maintainers/rails-backend
/jh/*.rb
/jh/*.rake
/jh/qa/ @jihulab/maintainers/quality
[JH Technical Writer] @jihulab/maintainers/technical-writer
/jh/doc/

View File

@ -841,7 +841,7 @@ Layout/EmptyLineAfterMagicComment:
- 'spec/services/wikis/create_attachment_service_spec.rb'
- 'spec/support/fips.rb'
- 'spec/support/generate-seed-repo-rb'
- 'spec/support/graphql/fake_query_type.rb'
- 'spec/support/helpers/graphql/fake_query_type.rb'
- 'spec/support/helpers/fake_webauthn_device.rb'
- 'spec/support/helpers/features/access_token_helpers.rb'
- 'spec/support/helpers/features/iteration_helpers.rb'

View File

@ -1 +1 @@
24badf7502c1864dc59e4f19bbebe53a1d36b638
5660b156be328c7be18df892ccb7149fea21e491

View File

@ -8,7 +8,7 @@ import ServiceDeskSetting from './service_desk_setting.vue';
export default {
customEmailHelpPath: helpPagePath('/user/project/service_desk.html', {
anchor: 'using-a-custom-email-address',
anchor: 'use-a-custom-email-address',
}),
components: {
GlAlert,

View File

@ -102,12 +102,12 @@ export default {
},
emailSuffixHelpUrl() {
return helpPagePath('user/project/service_desk.html', {
anchor: 'configuring-a-custom-email-address-suffix',
anchor: 'configure-a-custom-email-address-suffix',
});
},
customEmailAddressHelpUrl() {
return helpPagePath('user/project/service_desk.html', {
anchor: 'using-a-custom-email-address',
anchor: 'use-a-custom-email-address',
});
},
},

View File

@ -25,6 +25,9 @@ export default {
showBlobFilter() {
return this.currentScope === SCOPE_BLOB;
},
showOldNavigation() {
return Boolean(this.currentScope);
},
},
};
</script>

View File

@ -27,7 +27,7 @@ export default {
},
},
computed: {
...mapState(['query']),
...mapState(['query', 'useNewNavigation']),
...mapGetters(['queryLanguageFilters']),
dataFilters() {
return Object.values(this.filtersData?.filters || []);
@ -69,7 +69,7 @@ export default {
<template>
<div class="gl-mx-5">
<h5 class="gl-mt-0">{{ filtersData.header }}</h5>
<h5 class="gl-mt-0" :class="{ 'gl-font-sm': useNewNavigation }">{{ filtersData.header }}</h5>
<gl-form-checkbox-group v-model="selectedFilter">
<gl-form-checkbox
v-for="f in dataFilters"

View File

@ -1,5 +1,7 @@
<script>
import { mapState } from 'vuex';
import { confidentialFilterData } from '../constants/confidential_filter_data';
import { HR_DEFAULT_CLASSES } from '../constants';
import RadioFilter from './radio_filter.vue';
export default {
@ -7,13 +9,17 @@ export default {
components: {
RadioFilter,
},
computed: {
...mapState(['useNewNavigation']),
},
confidentialFilterData,
HR_DEFAULT_CLASSES,
};
</script>
<template>
<div>
<radio-filter class="gl-px-5" :filter-data="$options.confidentialFilterData" />
<hr class="gl-my-5 gl-mx-5 gl-border-gray-100" />
<hr v-if="!useNewNavigation" :class="$options.HR_DEFAULT_CLASSES" />
</div>
</template>

View File

@ -38,7 +38,7 @@ export default {
reset: s__('GlobalSearch|Reset filters'),
},
computed: {
...mapState(['aggregations', 'sidebarDirty']),
...mapState(['aggregations', 'sidebarDirty', 'useNewNavigation']),
...mapGetters([
'languageAggregationBuckets',
'currentUrlQueryHasLanguageFilters',
@ -117,7 +117,7 @@ export default {
class="gl-pt-5 gl-md-pt-0 language-filter-checkbox"
@submit.prevent="submitQuery"
>
<hr :class="dividerClasses" />
<hr v-if="!useNewNavigation" :class="dividerClasses" />
<div
v-if="!aggregations.error"
class="gl-overflow-x-hidden gl-overflow-y-auto"
@ -147,7 +147,7 @@ export default {
</gl-button>
</div>
<div v-if="!aggregations.error">
<hr :class="$options.HR_DEFAULT_CLASSES" />
<hr v-if="!useNewNavigation" :class="$options.HR_DEFAULT_CLASSES" />
<div
class="gl-display-flex gl-align-items-center gl-justify-content-space-between gl-mt-4 gl-mx-5"
>

View File

@ -16,7 +16,7 @@ export default {
},
},
computed: {
...mapState(['query']),
...mapState(['query', 'useNewNavigation']),
...mapGetters(['currentScope']),
ANY() {
return this.filterData.filters.ANY;
@ -56,7 +56,7 @@ export default {
<template>
<div>
<h5 class="gl-mt-0">{{ filterData.header }}</h5>
<h5 class="gl-mt-0" :class="{ 'gl-font-sm': useNewNavigation }">{{ filterData.header }}</h5>
<gl-form-radio-group v-model="selectedFilter">
<gl-form-radio v-for="f in filtersArray" :key="f.value" :value="f.value">
{{ radioLabel(f) }}

View File

@ -1,6 +1,7 @@
<script>
import { GlButton, GlLink } from '@gitlab/ui';
import { mapActions, mapState, mapGetters } from 'vuex';
import { HR_DEFAULT_CLASSES } from '../constants/index';
import { confidentialFilterData } from '../constants/confidential_filter_data';
import { stateFilterData } from '../constants/state_filter_data';
import ConfidentialityFilter from './confidentiality_filter.vue';
@ -15,7 +16,7 @@ export default {
ConfidentialityFilter,
},
computed: {
...mapState(['urlQuery', 'sidebarDirty']),
...mapState(['urlQuery', 'sidebarDirty', 'useNewNavigation']),
...mapGetters(['currentScope']),
showReset() {
return this.urlQuery.state || this.urlQuery.confidential;
@ -26,6 +27,9 @@ export default {
showStatusFilter() {
return Object.values(stateFilterData.scopes).includes(this.currentScope);
},
hrClasses() {
return [...HR_DEFAULT_CLASSES, 'gl-display-none', 'gl-md-display-block'];
},
},
methods: {
...mapActions(['applyQuery', 'resetQuery']),
@ -35,7 +39,7 @@ export default {
<template>
<form class="gl-pt-5 gl-md-pt-0" @submit.prevent="applyQuery">
<hr class="gl-my-5 gl-mx-5 gl-border-gray-100 gl-display-none gl-md-display-block" />
<hr v-if="!useNewNavigation" :class="hrClasses" />
<status-filter v-if="showStatusFilter" />
<confidentiality-filter v-if="showConfidentialityFilter" />
<div class="gl-display-flex gl-align-items-center gl-mt-4 gl-px-5">

View File

@ -1,5 +1,7 @@
<script>
import { mapState } from 'vuex';
import { stateFilterData } from '../constants/state_filter_data';
import { HR_DEFAULT_CLASSES } from '../constants';
import RadioFilter from './radio_filter.vue';
export default {
@ -7,13 +9,17 @@ export default {
components: {
RadioFilter,
},
computed: {
...mapState(['useNewNavigation']),
},
stateFilterData,
HR_DEFAULT_CLASSES,
};
</script>
<template>
<div>
<radio-filter class="gl-px-5" :filter-data="$options.stateFilterData" />
<hr class="gl-my-5 gl-mx-5 gl-border-gray-100" />
<hr v-if="!useNewNavigation" :class="$options.HR_DEFAULT_CLASSES" />
</div>
</template>

View File

@ -9,7 +9,7 @@ $notification-box-shadow-color: rgba(0, 0, 0, 0.25);
&.sticky {
position: sticky;
top: $flash-container-top;
top: $calc-application-header-height;
z-index: 251;
.flash-alert,
@ -114,17 +114,3 @@ $notification-box-shadow-color: rgba(0, 0, 0, 0.25);
left: -50%;
}
}
.with-system-header .flash-container.sticky {
top: $flash-container-top + $system-header-height;
}
.with-performance-bar {
.flash-container.sticky {
top: $flash-container-top + $performance-bar-height;
}
&.with-system-header .flash-container.sticky {
top: $flash-container-top + $performance-bar-height + $system-header-height;
}
}

View File

@ -482,7 +482,6 @@ $system-header-height: 16px;
$system-footer-height: $system-header-height;
$mr-review-bar-height: calc(2rem + 13px);
$flash-height: 52px;
$flash-container-top: 48px;
$context-header-height: 60px;
$top-bar-height: 48px;
$home-panel-title-row-height: 64px;

View File

@ -672,6 +672,7 @@ body.navless {
:root {
--performance-bar-height: 0px;
--system-header-height: 0px;
--top-bar-height: 0px;
--system-footer-height: 0px;
--mr-review-bar-height: 0px;
}
@ -702,7 +703,11 @@ hr {
}
.flash-container.sticky {
position: sticky;
top: 48px;
top: calc(
var(--header-height, 48px) +
calc(var(--system-header-height) + var(--performance-bar-height)) +
var(--top-bar-height)
);
z-index: 251;
}
.flash-container.flash-container-page {

View File

@ -34,25 +34,30 @@ module Resolvers
.where(runner_id: runner_ids)
.pluck(:runner_id, :project_id)
project_ids = plucked_runner_and_project_ids.collect { |_runner_id, project_id| project_id }.uniq
unique_project_ids = plucked_runner_and_project_ids.collect { |_runner_id, project_id| project_id }.uniq
projects = ProjectsFinder
.new(current_user: current_user,
params: project_finder_params(args),
project_ids_relation: project_ids)
project_ids_relation: unique_project_ids)
.execute
projects = apply_lookahead(projects)
Preloaders::ProjectPolicyPreloader.new(projects, current_user).execute
sorted_project_ids = projects.map(&:id)
projects_by_id = projects.index_by(&:id)
# In plucked_runner_and_project_ids, first() represents the runner ID, and second() the project ID,
# so let's group the project IDs by runner ID
runner_project_ids_by_runner_id =
project_ids_by_runner_id =
plucked_runner_and_project_ids
.group_by(&:first)
.transform_values { |values| values.map(&:second).filter_map { |project_id| projects_by_id[project_id] } }
.transform_values { |runner_id_and_project_id| runner_id_and_project_id.map(&:second) }
# Reorder the project IDs according to the order in sorted_project_ids
sorted_project_ids_by_runner_id =
project_ids_by_runner_id.transform_values { |project_ids| sorted_project_ids.intersection(project_ids) }
runner_ids.each do |runner_id|
runner_projects = runner_project_ids_by_runner_id[runner_id] || []
runner_project_ids = sorted_project_ids_by_runner_id[runner_id] || []
runner_projects = runner_project_ids.map { |id| projects_by_id[id] }
loader.call(runner_id, runner_projects)
end

View File

@ -40,6 +40,7 @@ module ResolvesMergeRequests
def preloads
{
assignees: [:assignees],
award_emoji: { award_emoji: [:awardable] },
reviewers: [:reviewers],
participants: MergeRequest.participant_includes,
author: [:author],

View File

@ -219,6 +219,10 @@ module Types
field :timelogs, Types::TimelogType.connection_type, null: false,
description: 'Timelogs on the merge request.'
field :award_emoji, Types::AwardEmojis::AwardEmojiType.connection_type,
null: true,
description: 'List of award emojis associated with the merge request.'
markdown_field :title_html, null: true
markdown_field :description_html, null: true

View File

@ -5,7 +5,9 @@
- elsif @search_objects.blank?
= render partial: "search/results/empty"
- else
.gl-md-pl-5
- statusBarClass = !show_super_sidebar? ? 'gl-md-pl-5' : ''
.section{ class: statusBarClass }
- if @scope == 'commits'
%ul.content-list.commit-list
= render partial: "search/results/commit", collection: @search_objects

View File

@ -1,5 +1,7 @@
- return unless @search_service_presenter.show_results_status?
.gl-md-pl-5
- statusBarClass = !show_super_sidebar? ? 'gl-md-pl-5' : ''
.section{ class: statusBarClass }
.search-results-status
.gl-display-flex.gl-flex-direction-column
.gl-p-5.gl-display-flex

View File

@ -0,0 +1,10 @@
---
table_name: audit_events_instance_external_audit_event_destinations
classes:
- AuditEvents::InstanceExternalAuditEventDestination
feature_categories:
- audit_events
description: Stores external destination urls for instance level audit events.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/116909
milestone: '15.11'
gitlab_schema: gitlab_main

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class CreateInstanceExternalAuditEventDestinations < Gitlab::Database::Migration[2.1]
enable_lock_retries!
def change
create_table :audit_events_instance_external_audit_event_destinations do |t|
t.timestamps_with_timezone null: false
t.text :destination_url, null: false, limit: 255 # rubocop:disable Migration/AddLimitToTextColumns
t.binary :encrypted_verification_token, null: false
t.binary :encrypted_verification_token_iv, null: false
end
end
end

View File

@ -0,0 +1 @@
95bad3e986fc44b20460c3e88b5195be1aa461e4a1e7d8427e9f731bca090f5c

View File

@ -12177,6 +12177,25 @@ CREATE SEQUENCE audit_events_id_seq
ALTER SEQUENCE audit_events_id_seq OWNED BY audit_events.id;
CREATE TABLE audit_events_instance_external_audit_event_destinations (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
destination_url text NOT NULL,
encrypted_verification_token bytea NOT NULL,
encrypted_verification_token_iv bytea NOT NULL,
CONSTRAINT check_4dc67167ce CHECK ((char_length(destination_url) <= 255))
);
CREATE SEQUENCE audit_events_instance_external_audit_event_destinations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE audit_events_instance_external_audit_event_destinations_id_seq OWNED BY audit_events_instance_external_audit_event_destinations.id;
CREATE TABLE audit_events_streaming_event_type_filters (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -24674,6 +24693,8 @@ ALTER TABLE ONLY audit_events ALTER COLUMN id SET DEFAULT nextval('audit_events_
ALTER TABLE ONLY audit_events_external_audit_event_destinations ALTER COLUMN id SET DEFAULT nextval('audit_events_external_audit_event_destinations_id_seq'::regclass);
ALTER TABLE ONLY audit_events_instance_external_audit_event_destinations ALTER COLUMN id SET DEFAULT nextval('audit_events_instance_external_audit_event_destinations_id_seq'::regclass);
ALTER TABLE ONLY audit_events_streaming_event_type_filters ALTER COLUMN id SET DEFAULT nextval('audit_events_streaming_event_type_filters_id_seq'::regclass);
ALTER TABLE ONLY audit_events_streaming_headers ALTER COLUMN id SET DEFAULT nextval('audit_events_streaming_headers_id_seq'::regclass);
@ -26425,6 +26446,9 @@ ALTER TABLE ONLY atlassian_identities
ALTER TABLE ONLY audit_events_external_audit_event_destinations
ADD CONSTRAINT audit_events_external_audit_event_destinations_pkey PRIMARY KEY (id);
ALTER TABLE ONLY audit_events_instance_external_audit_event_destinations
ADD CONSTRAINT audit_events_instance_external_audit_event_destinations_pkey PRIMARY KEY (id);
ALTER TABLE ONLY audit_events
ADD CONSTRAINT audit_events_pkey PRIMARY KEY (id, created_at);

View File

@ -12,7 +12,7 @@ The following are Service Desk email-related Rake tasks.
## Secrets
GitLab can use [Service Desk email](../../user/project/service_desk.md#configuring-a-custom-mailbox) secrets read from an encrypted file instead of storing them in plaintext in the file system. The following Rake tasks are provided for updating the contents of the encrypted file.
GitLab can use [Service Desk email](../../user/project/service_desk.md#configure-a-custom-mailbox) secrets read from an encrypted file instead of storing them in plaintext in the file system. The following Rake tasks are provided for updating the contents of the encrypted file.
### Show secret

View File

@ -16269,6 +16269,7 @@ Defines which user roles, users, or groups can merge into a protected branch.
| <a id="mergerequestautomergeenabled"></a>`autoMergeEnabled` | [`Boolean!`](#boolean) | Indicates if auto merge is enabled for the merge request. |
| <a id="mergerequestautomergestrategy"></a>`autoMergeStrategy` | [`String`](#string) | Selected auto merge strategy. |
| <a id="mergerequestavailableautomergestrategies"></a>`availableAutoMergeStrategies` | [`[String!]`](#string) | Array of available auto merge strategies. |
| <a id="mergerequestawardemoji"></a>`awardEmoji` | [`AwardEmojiConnection`](#awardemojiconnection) | List of award emojis associated with the merge request. (see [Connections](#connections)) |
| <a id="mergerequestcommenters"></a>`commenters` | [`UserCoreConnection!`](#usercoreconnection) | All commenters on this noteable. (see [Connections](#connections)) |
| <a id="mergerequestcommitcount"></a>`commitCount` | [`Int`](#int) | Number of commits in the merge request. |
| <a id="mergerequestcommits"></a>`commits` | [`CommitConnection`](#commitconnection) | Merge request commits. (see [Connections](#connections)) |

View File

@ -86,12 +86,13 @@ To disable these notifications:
### Custom additional text in deactivation emails **(FREE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/355964) in GitLab 15.9 [with a flag](../../../administration/feature_flags.md) named `deactivation_email_additional_text`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/355964) in GitLab 15.9 [with a flag](../../../administration/feature_flags.md) named `deactivation_email_additional_text`. Disabled by default.
> - [Enabled on self-managed and GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111882) in GitLab 15.9.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, ask an
administrator to [enable the feature flag](../../../administration/feature_flags.md) named
`deactivation_email_additional_text`. On GitLab.com, this feature is unavailable.
On self-managed GitLab, by default this feature is available. To hide the feature, ask an
administrator to [disable the feature flag](../../../administration/feature_flags.md) named
`deactivation_email_additional_text`.
You can add additional text at the bottom of the email that GitLab sends to users when their account
is deactivated. This email text is separate from the [custom additional text](#custom-additional-text)

View File

@ -158,7 +158,7 @@ see [Email notification for unknown sign-ins](../../profile/notifications.md#not
All users that are not logged in are redirected to the page represented by the configured
**Home page URL** if value is not empty.
All users are redirected to the page represented by the configured **After sign-out path**
All users are redirected to the page represented by the configured **Sign-out page URL**
after sign out if value is not empty.
In the **Sign-in restrictions** section, scroll to the **Sign-in text** field. You can add a

View File

@ -65,7 +65,7 @@ The IP addresses for `mg.gitlab.com` are subject to change at any time.
On GitLab.com, there's a mailbox configured for Service Desk with the email address:
`contact-project+%{key}@incoming.gitlab.com`. To use this mailbox, configure the
[custom suffix](../project/service_desk.md#configuring-a-custom-email-address-suffix) in project
[custom suffix](../project/service_desk.md#configure-a-custom-email-address-suffix) in project
settings.
## Backups

View File

@ -62,7 +62,7 @@ To enable Service Desk in your project:
1. Expand **Service Desk**.
1. Turn on the **Activate Service Desk** toggle.
1. Optional. Complete the fields.
- [Add a suffix](#configuring-a-custom-email-address-suffix) to your Service Desk email address.
- [Add a suffix](#configure-a-custom-email-address-suffix) to your Service Desk email address.
- If the list below **Template to append to all Service Desk issues** is empty, create a
[description template](description_templates.md) in your repository.
1. Select **Save changes**.
@ -178,25 +178,27 @@ To edit the custom email display name:
1. Below **Email display name**, enter a new name.
1. Select **Save changes**.
### Using a custom email address **(FREE SELF)**
### Use a custom email address **(FREE SELF)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/2201) in GitLab 13.0.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/284656) in GitLab 13.8.
It is possible to customize the email address used by Service Desk. To do this, you must configure
a [custom mailbox](#configuring-a-custom-mailbox). If you want you can also configure a
[custom suffix](#configuring-a-custom-email-address-suffix).
You can use a custom email address with Service Desk.
#### Configuring a custom mailbox
To do this, you must configure
a [custom mailbox](#configure-a-custom-mailbox). You can also configure a
[custom suffix](#configure-a-custom-email-address-suffix).
#### Configure a custom mailbox
NOTE:
On GitLab.com a custom mailbox is already configured with `contact-project+%{key}@incoming.gitlab.com` as the email address, you can still configure the
[custom suffix](#configuring-a-custom-email-address-suffix) in project settings.
[custom suffix](#configure-a-custom-email-address-suffix) in project settings.
Service Desk uses the [incoming email](../../administration/incoming_email.md)
configuration by default. However, by using the `service_desk_email` configuration,
you can customize the mailbox used by Service Desk. This allows you to have
a separate email address for Service Desk by also configuring a [custom suffix](#configuring-a-custom-email-address-suffix)
a separate email address for Service Desk by also configuring a [custom suffix](#configure-a-custom-email-address-suffix)
in project settings.
Prerequisites:
@ -392,7 +394,8 @@ read about [Helm IMAP secrets](https://docs.gitlab.com/charts/installation/secre
> - Alternative Azure deployments [introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5978) in GitLab 14.9.
Service Desk can be configured to read Microsoft Exchange Online mailboxes with the Microsoft
Graph API instead of IMAP. Follow the [documentation in the incoming email section for setting up an OAuth 2.0 application for Microsoft Graph](../../administration/incoming_email.md#microsoft-graph).
Graph API instead of IMAP. Set up an OAuth 2.0 application for Microsoft Graph
[the same way as for incoming email](../../administration/incoming_email.md#microsoft-graph).
- Example for Omnibus GitLab installations:
@ -411,32 +414,44 @@ Graph API instead of IMAP. Follow the [documentation in the incoming email secti
}
```
For Microsoft Cloud for US Government or [other Azure deployments](https://learn.microsoft.com/en-us/graph/deployments), configure the `azure_ad_endpoint` and `graph_endpoint` settings.
For Microsoft Cloud for US Government or [other Azure deployments](https://learn.microsoft.com/en-us/graph/deployments),
configure the `azure_ad_endpoint` and `graph_endpoint` settings.
- Example for Microsoft Cloud for US Government:
```ruby
gitlab_rails['service_desk_email_inbox_options'] = {
'azure_ad_endpoint': 'https://login.microsoftonline.us',
'graph_endpoint': 'https://graph.microsoft.us',
'tenant_id': '<YOUR-TENANT-ID>',
'client_id': '<YOUR-CLIENT-ID>',
'client_secret': '<YOUR-CLIENT-SECRET>',
'poll_interval': 60 # Optional
}
gitlab_rails['service_desk_email_inbox_options'] = {
'azure_ad_endpoint': 'https://login.microsoftonline.us',
'graph_endpoint': 'https://graph.microsoft.us',
'tenant_id': '<YOUR-TENANT-ID>',
'client_id': '<YOUR-CLIENT-ID>',
'client_secret': '<YOUR-CLIENT-SECRET>',
'poll_interval': 60 # Optional
}
```
The Microsoft Graph API is not yet supported in source installations. See [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/326169) for more details.
The Microsoft Graph API is not yet supported in source installations.
For more information, see [issue 326169](https://gitlab.com/gitlab-org/gitlab/-/issues/326169).
#### Configuring a custom email address suffix
#### Configure a custom email address suffix
You can set a custom suffix in your project's Service Desk settings after you have configured a [custom mailbox](#configuring-a-custom-mailbox).
It can contain only lowercase letters (`a-z`), numbers (`0-9`), or underscores (`_`).
You can set a custom suffix in your project's Service Desk settings.
A suffix can contain only lowercase letters (`a-z`), numbers (`0-9`), or underscores (`_`).
When configured, the custom suffix creates a new Service Desk email address, consisting of the
`service_desk_email_address` setting and a key of the format: `<project_full_path>-<custom_suffix>`
Prerequisites:
- You must have configured a [custom mailbox](#configure-a-custom-mailbox).
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Settings > General**.
1. Expand **Service Desk**.
1. Below **Email address suffix**, enter the suffix to use.
1. Select **Save changes**.
For example, suppose the `mygroup/myproject` project Service Desk settings has the following configured:
- Email address suffix is set to `support`.
@ -445,13 +460,23 @@ For example, suppose the `mygroup/myproject` project Service Desk settings has t
The Service Desk email address for this project is: `contact+mygroup-myproject-support@example.com`.
The [incoming email](../../administration/incoming_email.md) address still works.
If you don't configure the custom suffix, the default project identification is used for identifying the project. You can see that email address in the project settings.
If you don't configure a custom suffix, the default project identification is used for identifying
the project.
## Use Service Desk
You can use Service Desk to [create an issue](#as-an-end-user-issue-creator) or [respond to one](#as-a-responder-to-the-issue).
In these issues, you can also see our friendly neighborhood [Support Bot](#support-bot-user).
### View Service Desk email address
To check what the Service Desk email address is for your project:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Issues > Service Desk**.
The email address is available at the top of the issue list.
### As an end user (issue creator)
> Support for additional email headers [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/346600) in GitLab 14.6. In earlier versions, the Service Desk email address had to be in the "To" field.

View File

@ -33683,9 +33683,6 @@ msgstr ""
msgid "ProductAnalytics|Instrument your application"
msgstr ""
msgid "ProductAnalytics|Instrumentation details"
msgstr ""
msgid "ProductAnalytics|Loading instance"
msgstr ""

View File

@ -218,7 +218,7 @@
"yaml": "^2.0.0-10"
},
"devDependencies": {
"@gitlab/eslint-plugin": "18.3.2",
"@gitlab/eslint-plugin": "19.0.0",
"@gitlab/stylelint-config": "4.1.0",
"@graphql-eslint/eslint-plugin": "3.18.0",
"@testing-library/dom": "^7.16.2",

View File

@ -23,10 +23,17 @@ gitlab:
gitaly:
resources:
requests:
cpu: 1200m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitaly%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average is around 0.100vCPU (setting request accordingly)
#
# The maximum CPU usage was 0.196vCPU (setting limit accordingly)
cpu: 150m
memory: 600Mi
limits:
cpu: 1800m
cpu: 300m
memory: 1000Mi
persistence:
size: 10Gi
@ -42,10 +49,17 @@ gitlab:
gitlab-shell:
resources:
requests:
cpu: 500m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitlab-shell%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average is around 0.01vCPU (setting request accordingly)
#
# The maximum CPU usage was 0.127vCPU (setting limit accordingly)
cpu: 10m
memory: 100Mi
limits:
cpu: 750m
cpu: 150m
memory: 150Mi
minReplicas: 1
maxReplicas: 1
@ -87,7 +101,14 @@ gitlab:
toolbox:
resources:
requests:
cpu: 300m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22toolbox%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.100vCPU
#
# The maximum CPU usage was 0.250vCPU (setting limit accordingly)
cpu: 150m
memory: 1927Mi
limits:
cpu: 450m
@ -124,10 +145,18 @@ gitlab:
gitlab-runner:
resources:
requests:
cpu: 675m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*gitlab-runner$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.01vCPU
#
# The maximum CPU usage was 0.015vCPU (setting limit accordingly)
cpu: 10m
memory: 100Mi
limits:
cpu: 1015m
# In case somebody would like to use runners in review-apps, we set the limit higher than the requests
cpu: 400m
memory: 150Mi
nodeSelector:
preemptible: "true"
@ -153,10 +182,17 @@ nginx-ingress:
ssl-ciphers: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4
resources:
requests:
cpu: 300m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22controller%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.02vCPU
#
# The maximum CPU usage was 0.07vCPU (setting limit accordingly)
cpu: 10m
memory: 450Mi
limits:
cpu: 600m
cpu: 20m
memory: 675Mi
service:
enableHttp: false
@ -182,10 +218,17 @@ postgresql:
enabled: false
resources:
requests:
cpu: 600m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*-postgresql$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.150vCPU
#
# The maximum CPU usage was 0.420vCPU (setting limit accordingly)
cpu: 150m
memory: 1000Mi
limits:
cpu: 1300m
cpu: 1000m
memory: 1600Mi
master:
nodeSelector:
@ -201,10 +244,17 @@ redis:
enabled: false
resources:
requests:
cpu: 100m
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22redis%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.03vCPU
#
# The maximum CPU usage was 0.500vCPU (setting limit accordingly)
cpu: 10m
memory: 60Mi
limits:
cpu: 200m
cpu: 500m
memory: 130Mi
master:
nodeSelector:
@ -217,11 +267,18 @@ registry:
minReplicas: 1
maxReplicas: 1
resources:
# Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22registry%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
#
# Data over the 3 months (2023-02-24 - 2023-04-19)
#
# The average seems to be around 0.0005vCPU
#
# The maximum CPU usage was 0.0.003vCPU (setting limit accordingly)
requests:
cpu: 100m
cpu: 10m
memory: 30Mi
limits:
cpu: 200m
cpu: 50m
memory: 45Mi
nodeSelector:
preemptible: "true"

View File

@ -1,20 +1,19 @@
import newWithInternalUserRegex from 'test_fixtures/admin/users/new_with_internal_user_regex.html';
import {
setupInternalUserRegexHandler,
ID_USER_EMAIL,
ID_USER_EXTERNAL,
ID_WARNING,
} from '~/admin/users/new';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
describe('admin/users/new', () => {
const FIXTURE = 'admin/users/new_with_internal_user_regex.html';
let elExternal;
let elUserEmail;
let elWarningMessage;
beforeEach(() => {
loadHTMLFixture(FIXTURE);
setHTMLFixture(newWithInternalUserRegex);
setupInternalUserRegexHandler();
elExternal = document.getElementById(ID_USER_EXTERNAL);

View File

@ -78,7 +78,7 @@ describe('ServiceDeskRoot', () => {
const alertBodyLink = alertEl.findComponent(GlLink);
expect(alertBodyLink.exists()).toBe(true);
expect(alertBodyLink.attributes('href')).toBe(
'/help/user/project/service_desk.html#using-a-custom-email-address',
'/help/user/project/service_desk.html#use-a-custom-email-address',
);
expect(alertBodyLink.text()).toBe('How do I create a custom email address?');
});

View File

@ -1,5 +1,6 @@
import prometheusIntegration from 'test_fixtures/integrations/prometheus/prometheus_integration.html';
import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PANEL_STATE from '~/prometheus_metrics/constants';
@ -7,7 +8,6 @@ import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import { metrics1 as metrics } from './mock_data';
describe('PrometheusMetrics', () => {
const FIXTURE = 'integrations/prometheus/prometheus_integration.html';
const customMetricsEndpoint =
'http://test.host/frontend-fixtures/integrations-project/prometheus/metrics';
let mock;
@ -17,7 +17,7 @@ describe('PrometheusMetrics', () => {
mock.onGet(customMetricsEndpoint).reply(HTTP_STATUS_OK, {
metrics,
});
loadHTMLFixture(FIXTURE);
setHTMLFixture(prometheusIntegration);
});
afterEach(() => {

View File

@ -1,5 +1,6 @@
import prometheusIntegration from 'test_fixtures/integrations/prometheus/prometheus_integration.html';
import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
@ -8,10 +9,8 @@ import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import { metrics2 as metrics, missingVarMetrics } from './mock_data';
describe('PrometheusMetrics', () => {
const FIXTURE = 'integrations/prometheus/prometheus_integration.html';
beforeEach(() => {
loadHTMLFixture(FIXTURE);
setHTMLFixture(prometheusIntegration);
});
describe('constructor', () => {

View File

@ -1,25 +1,52 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter.vue';
import RadioFilter from '~/search/sidebar/components/radio_filter.vue';
Vue.use(Vuex);
describe('ConfidentialityFilter', () => {
let wrapper;
const createComponent = (initProps) => {
const createComponent = (state) => {
const store = new Vuex.Store({
state,
});
wrapper = shallowMount(ConfidentialityFilter, {
...initProps,
store,
});
};
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
const findHR = () => wrapper.findComponent('hr');
describe('template', () => {
describe('old sidebar', () => {
beforeEach(() => {
createComponent();
createComponent({ useNewNavigation: false });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
it('renders the divider', () => {
expect(findHR().exists()).toBe(true);
});
});
describe('new sidebar', () => {
beforeEach(() => {
createComponent({ useNewNavigation: true });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
it("doesn't render the divider", () => {
expect(findHR().exists()).toBe(false);
});
});
});

View File

@ -1,25 +1,52 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import RadioFilter from '~/search/sidebar/components/radio_filter.vue';
import StatusFilter from '~/search/sidebar/components/status_filter.vue';
Vue.use(Vuex);
describe('StatusFilter', () => {
let wrapper;
const createComponent = (initProps) => {
const createComponent = (state) => {
const store = new Vuex.Store({
state,
});
wrapper = shallowMount(StatusFilter, {
...initProps,
store,
});
};
const findRadioFilter = () => wrapper.findComponent(RadioFilter);
const findHR = () => wrapper.findComponent('hr');
describe('template', () => {
describe('old sidebar', () => {
beforeEach(() => {
createComponent();
createComponent({ useNewNavigation: false });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
it('renders the divider', () => {
expect(findHR().exists()).toBe(true);
});
});
describe('new sidebar', () => {
beforeEach(() => {
createComponent({ useNewNavigation: true });
});
it('renders the component', () => {
expect(findRadioFilter().exists()).toBe(true);
});
it("doesn't render the divider", () => {
expect(findHR().exists()).toBe(false);
});
});
});

View File

@ -27,6 +27,28 @@ RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :runner_
end
end
context 'with sort argument' do
let(:args) { { sort: sort } }
context 'when :id_asc' do
let(:sort) { :id_asc }
it 'returns a lazy value with projects sorted by :id_asc' do
expect(subject).to be_a(GraphQL::Execution::Lazy)
expect(subject.value.items).to eq([project1, project2, project3])
end
end
context 'when :id_desc' do
let(:sort) { :id_desc }
it 'returns a lazy value with projects sorted by :id_desc' do
expect(subject).to be_a(GraphQL::Execution::Lazy)
expect(subject.value.items).to eq([project3, project2, project1])
end
end
end
context 'with supported arguments' do
let(:args) { { membership: true, search_namespaces: true, topics: %w[xyz] } }
@ -47,9 +69,9 @@ RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :runner_
end
context 'without arguments' do
it 'returns a lazy value with all projects' do
it 'returns a lazy value with all projects sorted by :id_asc' do
expect(subject).to be_a(GraphQL::Execution::Lazy)
expect(subject.value).to contain_exactly(project1, project2, project3)
expect(subject.value.items).to eq([project1, project2, project3])
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['MergeRequest'] do
RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_review_workflow do
include GraphqlHelpers
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) }
@ -36,7 +36,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
commit_count current_user_todos conflicts auto_merge_enabled approved_by source_branch_protected
squash_on_merge available_auto_merge_strategies
has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message
auto_merge_strategy merge_user
auto_merge_strategy merge_user award_emoji
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least

View File

@ -2,7 +2,6 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
require "support/graphql/fake_query_type"
RSpec.describe Gitlab::Graphql::KnownOperations do
using RSpec::Parameterized::TableSyntax

View File

@ -2,7 +2,6 @@
require 'spec_helper'
require 'rspec-parameterized'
require "support/graphql/fake_query_type"
RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do
using RSpec::Parameterized::TableSyntax

View File

@ -1,7 +1,5 @@
# frozen_string_literal: true
require "fast_spec_helper"
require "support/graphql/fake_tracer"
require "support/graphql/fake_query_type"
RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do
let(:expected_duration) { 5 }

View File

@ -480,4 +480,31 @@ RSpec.describe 'getting merge request information nested in a project', feature_
merge_request.assignees << user
end
end
context 'when selecting `awardEmoji`' do
let_it_be(:award_emoji) { create(:award_emoji, awardable: merge_request, user: current_user) }
let(:mr_fields) do
<<~QUERY
awardEmoji {
nodes {
user {
username
}
name
}
}
QUERY
end
it 'includes award emojis' do
post_graphql(query, current_user: current_user)
response = merge_request_graphql_data['awardEmoji']['nodes']
expect(response.length).to eq(1)
expect(response.first['user']['username']).to eq(current_user.username)
expect(response.first['name']).to eq(award_emoji.name)
end
end
end

View File

@ -353,7 +353,7 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
end
context 'when award emoji votes' do
let(:requested_fields) { [:upvotes, :downvotes] }
let(:requested_fields) { 'upvotes downvotes awardEmoji { nodes { name } }' }
before do
create_list(:award_emoji, 2, name: 'thumbsup', awardable: merge_request_a)

View File

@ -50,7 +50,7 @@ RSpec.describe Clusters::CreateService, feature_category: :deployment_management
end
context 'when project has a cluster' do
include_context 'valid cluster create params'
include_context 'with valid cluster create params'
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'creates another cluster' do

View File

@ -0,0 +1,166 @@
# frozen_string_literal: true
# rubocop:disable Layout/LineLength
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
# rubocop:disable Metrics/AbcSize
# Note: The ABC size is large here because we have a method generating test cases with
# multiple nested contexts. This shouldn't count as a violation.
module CycleAnalyticsHelpers
module TestGeneration
# Generate the most common set of specs that all value stream analytics phases need to have.
#
# Arguments:
#
# phase: Which phase are we testing? Will call `CycleAnalytics.new.send(phase)` for the final assertion
# data_fn: A function that returns a hash, constituting initial data for the test case
# start_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with
# `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`).
# Each `condition_fn` is expected to implement a case which consitutes the start of the given value stream analytics phase.
# end_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with
# `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`).
# Each `condition_fn` is expected to implement a case which consitutes the end of the given value stream analytics phase.
# before_end_fn: This function is run before calling the end time conditions. Used for setup that needs to be run between the start and end conditions.
# post_fn: Code that needs to be run after running the end time conditions.
def generate_cycle_analytics_spec(phase:, data_fn:, start_time_conditions:, end_time_conditions:, before_end_fn: nil, post_fn: nil)
combinations_of_start_time_conditions = (1..start_time_conditions.size).flat_map { |size| start_time_conditions.combination(size).to_a }
combinations_of_end_time_conditions = (1..end_time_conditions.size).flat_map { |size| end_time_conditions.combination(size).to_a }
scenarios = combinations_of_start_time_conditions.product(combinations_of_end_time_conditions)
scenarios.each do |start_time_conditions, end_time_conditions|
let_it_be(:other_project) { create(:project, :repository) }
before do
other_project.add_developer(user)
end
context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do
context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do
it "finds the median of available durations between the two conditions", :sidekiq_might_not_need_inline do
time_differences = Array.new(5) do |index|
data = data_fn[self]
start_time = (index * 10).days.from_now
end_time = start_time + rand(1..5).days
start_time_conditions.each_value do |condition_fn|
travel_to(start_time) { condition_fn[self, data] }
end
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
travel_to(start_time + ((end_time - start_time) / 2)) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each_value do |condition_fn|
travel_to(end_time) { condition_fn[self, data] }
end
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
end_time - start_time
end
median_time_difference = time_differences.sort[2]
expect(subject[phase].project_median).to be_within(5).of(median_time_difference)
end
context "when the data belongs to another project" do
it "returns nil" do
# Use a stub to "trick" the data/condition functions
# into using another project. This saves us from having to
# define separate data/condition functions for this particular
# test case.
allow(self).to receive(:project) { other_project }
data = data_fn[self]
start_time = Time.now
end_time = rand(1..10).days.from_now
start_time_conditions.each_value do |condition_fn|
travel_to(start_time) { condition_fn[self, data] }
end
end_time_conditions.each_value do |condition_fn|
travel_to(end_time) { condition_fn[self, data] }
end
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
# Turn off the stub before checking assertions
allow(self).to receive(:project).and_call_original
expect(subject[phase].project_median).to be_nil
end
end
context "when the end condition happens before the start condition" do
it 'returns nil' do
data = data_fn[self]
start_time = Time.now
end_time = start_time + rand(1..5).days
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
travel_to(start_time + ((end_time - start_time) / 2)) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each_value do |condition_fn|
travel_to(start_time) { condition_fn[self, data] }
end
start_time_conditions.each_value do |condition_fn|
travel_to(end_time) { condition_fn[self, data] }
end
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
end
end
context "end condition NOT PRESENT: #{end_time_conditions.map(&:first).to_sentence}" do
it "returns nil" do
data = data_fn[self]
start_time = Time.now
start_time_conditions.each_value do |condition_fn|
travel_to(start_time) { condition_fn[self, data] }
end
post_fn[self, data] if post_fn
expect(subject[phase].project_median).to be_nil
end
end
end
context "start condition NOT PRESENT: #{start_time_conditions.map(&:first).to_sentence}" do
context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do
it "returns nil" do
data = data_fn[self]
end_time = rand(1..10).days.from_now
end_time_conditions.each_with_index do |(_condition_name, condition_fn), index|
travel_to(end_time + index.days) { condition_fn[self, data] }
end
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
end
end
end
context "when none of the start / end conditions are matched" do
it "returns nil" do
expect(subject[phase].project_median).to be_nil
end
end
end
end
end
# rubocop:enable Layout/LineLength
# rubocop:enable Metrics/CyclomaticComplexity
# rubocop:enable Metrics/PerceivedComplexity
# rubocop:enable Metrics/AbcSize

View File

@ -88,66 +88,68 @@ module GoogleApi
# rubocop:disable Metrics/PerceivedComplexity
def cloud_platform_cluster_body(options)
{
"name": options[:name] || 'string',
"description": options[:description] || 'string',
"initialNodeCount": options[:initialNodeCount] || 'number',
"masterAuth": {
"username": options[:username] || 'string',
"password": options[:password] || 'string',
"clusterCaCertificate": options[:clusterCaCertificate] || load_sample_cert,
"clientCertificate": options[:clientCertificate] || 'string',
"clientKey": options[:clientKey] || 'string'
name: options[:name] || 'string',
description: options[:description] || 'string',
initialNodeCount: options[:initialNodeCount] || 'number',
masterAuth: {
username: options[:username] || 'string',
password: options[:password] || 'string',
clusterCaCertificate: options[:clusterCaCertificate] || load_sample_cert,
clientCertificate: options[:clientCertificate] || 'string',
clientKey: options[:clientKey] || 'string'
},
"loggingService": options[:loggingService] || 'string',
"monitoringService": options[:monitoringService] || 'string',
"network": options[:network] || 'string',
"clusterIpv4Cidr": options[:clusterIpv4Cidr] || 'string',
"subnetwork": options[:subnetwork] || 'string',
"enableKubernetesAlpha": options[:enableKubernetesAlpha] || 'boolean',
"labelFingerprint": options[:labelFingerprint] || 'string',
"selfLink": options[:selfLink] || 'string',
"zone": options[:zone] || 'string',
"endpoint": options[:endpoint] || 'string',
"initialClusterVersion": options[:initialClusterVersion] || 'string',
"currentMasterVersion": options[:currentMasterVersion] || 'string',
"currentNodeVersion": options[:currentNodeVersion] || 'string',
"createTime": options[:createTime] || 'string',
"status": options[:status] || 'RUNNING',
"statusMessage": options[:statusMessage] || 'string',
"nodeIpv4CidrSize": options[:nodeIpv4CidrSize] || 'number',
"servicesIpv4Cidr": options[:servicesIpv4Cidr] || 'string',
"currentNodeCount": options[:currentNodeCount] || 'number',
"expireTime": options[:expireTime] || 'string'
loggingService: options[:loggingService] || 'string',
monitoringService: options[:monitoringService] || 'string',
network: options[:network] || 'string',
clusterIpv4Cidr: options[:clusterIpv4Cidr] || 'string',
subnetwork: options[:subnetwork] || 'string',
enableKubernetesAlpha: options[:enableKubernetesAlpha] || 'boolean',
labelFingerprint: options[:labelFingerprint] || 'string',
selfLink: options[:selfLink] || 'string',
zone: options[:zone] || 'string',
endpoint: options[:endpoint] || 'string',
initialClusterVersion: options[:initialClusterVersion] || 'string',
currentMasterVersion: options[:currentMasterVersion] || 'string',
currentNodeVersion: options[:currentNodeVersion] || 'string',
createTime: options[:createTime] || 'string',
status: options[:status] || 'RUNNING',
statusMessage: options[:statusMessage] || 'string',
nodeIpv4CidrSize: options[:nodeIpv4CidrSize] || 'number',
servicesIpv4Cidr: options[:servicesIpv4Cidr] || 'string',
currentNodeCount: options[:currentNodeCount] || 'number',
expireTime: options[:expireTime] || 'string'
}
end
# rubocop:enable Metrics/CyclomaticComplexity
# rubocop:enable Metrics/PerceivedComplexity
def cloud_platform_operation_body(options)
{
"name": options[:name] || 'operation-1234567891234-1234567',
"zone": options[:zone] || 'us-central1-a',
"operationType": options[:operationType] || 'CREATE_CLUSTER',
"status": options[:status] || 'PENDING',
"detail": options[:detail] || 'detail',
"statusMessage": options[:statusMessage] || '',
"selfLink": options[:selfLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/operations/operation-1234567891234-1234567',
"targetLink": options[:targetLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/clusters/test-cluster',
"startTime": options[:startTime] || '2017-09-13T16:49:13.055601589Z',
"endTime": options[:endTime] || ''
name: options[:name] || 'operation-1234567891234-1234567',
zone: options[:zone] || 'us-central1-a',
operationType: options[:operationType] || 'CREATE_CLUSTER',
status: options[:status] || 'PENDING',
detail: options[:detail] || 'detail',
statusMessage: options[:statusMessage] || '',
selfLink: options[:selfLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/operations/operation-1234567891234-1234567',
targetLink: options[:targetLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/clusters/test-cluster',
startTime: options[:startTime] || '2017-09-13T16:49:13.055601589Z',
endTime: options[:endTime] || ''
}
end
def cloud_platform_projects_body(options)
{
"projects": [
projects: [
{
"projectNumber": options[:project_number] || "1234",
"projectId": options[:project_id] || "test-project-1234",
"lifecycleState": "ACTIVE",
"name": options[:name] || "test-project",
"createTime": "2017-12-16T01:48:29.129Z",
"parent": {
"type": "organization",
"id": "12345"
projectNumber: options[:project_number] || "1234",
projectId: options[:project_id] || "test-project-1234",
lifecycleState: "ACTIVE",
name: options[:name] || "test-project",
createTime: "2017-12-16T01:48:29.129Z",
parent: {
type: "organization",
id: "12345"
}
}
]
@ -156,10 +158,10 @@ module GoogleApi
def cloud_platform_projects_billing_info_body(project_id, billing_enabled)
{
"name": "projects/#{project_id}/billingInfo",
"projectId": project_id.to_s,
"billingAccountName": "account-name",
"billingEnabled": billing_enabled
name: "projects/#{project_id}/billingInfo",
projectId: project_id.to_s,
billingAccountName: "account-name",
billingEnabled: billing_enabled
}
end
end

View File

@ -1,4 +1,5 @@
# frozen_string_literal: true
require 'graphql'
module Graphql

View File

@ -13,7 +13,7 @@ module Graphql
attr_reader :mock_broadcasted_messages
def stream_from(stream_name, coder: nil, &block)
def stream_from(stream_name, coder: nil, &block) # rubocop:disable Lint/UnusedMethodArgument
# Rails uses `coder`, we don't
block ||= ->(msg) { @mock_broadcasted_messages << msg }
MockActionCable.mock_stream_for(stream_name).add_mock_channel(self, block)
@ -30,7 +30,7 @@ module Graphql
end
def mock_broadcast(message)
@mock_channels.each do |channel, handler|
@mock_channels.each_value do |handler|
handler && handler.call(message)
end
end

View File

@ -31,9 +31,7 @@ module HttpIOHelpers
def remote_url_response_headers(response_status, from, to, size)
{ 'Content-Type' => 'text/plain' }.tap do |headers|
if response_status == 206
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{size}")
end
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{size}") if response_status == 206
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module MigrationHelpers
module NamespacesHelpers
def create_namespace(name, visibility, options = {})
table(:namespaces).create!(
{
name: name,
path: name,
type: 'Group',
visibility_level: visibility
}.merge(options))
end
end
end

View File

@ -7,7 +7,8 @@
# to find and use schema prior to specified one.
#
# @example
# RSpec.describe CleanupThings, :migration, schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ...
# RSpec.describe CleanupThings, :migration,
# schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ...
#
# SchemaVersionFinder returns schema version prior to the one specified, which allows to then add
# invalid records to the database, which in return allows to properly test data migration.

View File

@ -7,7 +7,7 @@ module MigrationHelpers
{
project_fingerprint: SecureRandom.hex(20),
location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)),
location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)), # rubocop:disable Fips/SHA1
uuid: uuid,
name: "Vulnerability Finding #{uuid}",
metadata_version: '1.3',
@ -20,7 +20,7 @@ module MigrationHelpers
"description" => "The cipher does not provide data integrity update 1",
"message" => "The cipher does not provide data integrity",
"cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER",
"solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.",
"solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.", # rubocop:disable Layout/LineLength
"location" => {
"file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java",
"start_line" => 29,
@ -49,8 +49,8 @@ module MigrationHelpers
"body" => nil,
"headers" => [
{
"name" => "Accept",
"value" => "*/*"
"name" => "Accept",
"value" => "*/*"
}
]
},
@ -60,8 +60,8 @@ module MigrationHelpers
"body" => nil,
"headers" => [
{
"name" => "Content-Length",
"value" => "0"
"name" => "Content-Length",
"value" => "0"
}
]
},

View File

@ -19,7 +19,7 @@ module PartitioningTesting
class_methods do
# Allowing partition callback to be used with BulkInsertSafe
def _bulk_insert_callback_allowed?(name, args)
super || args.first == :after && args.second == :check_partition_cascade_value
super || (args.first == :after && args.second == :check_partition_cascade_value)
end
end
end

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
class MergeRequestWithoutMergeRequestDiff < ::MergeRequest
class MergeRequestWithoutMergeRequestDiff < ::MergeRequest # rubocop:disable Gitlab/NamespacedClass
self.inheritance_column = :_type_disabled
def ensure_merge_request_diff; end

View File

@ -16,9 +16,9 @@ module Prometheus
def simple_metrics(added_metric_name: 'metric_a')
[
simple_metric(required_metrics: %W(#{added_metric_name} metric_b), queries: simple_queries),
simple_metric(required_metrics: %W[#{added_metric_name} metric_b], queries: simple_queries),
simple_metric(required_metrics: [added_metric_name], queries: [simple_query('empty')]),
simple_metric(required_metrics: %w{metric_c})
simple_metric(required_metrics: %w[metric_c])
]
end

View File

@ -43,7 +43,7 @@ module TestReportsHelper
end
def sample_rspec_failed_message
<<-EOF.strip_heredoc
<<-TEST_REPORT_MESSAGE.strip_heredoc
Failure/Error: is_expected.to eq(3)
expected: 3
@ -51,7 +51,7 @@ module TestReportsHelper
(compared using ==)
./spec/test_spec.rb:12:in `block (4 levels) in &lt;top (required)&gt;&apos;
EOF
TEST_REPORT_MESSAGE
end
def create_test_case_java_success(name = 'addTest')
@ -92,12 +92,12 @@ module TestReportsHelper
end
def sample_java_failed_message
<<-EOF.strip_heredoc
<<-TEST_REPORT_MESSAGE.strip_heredoc
junit.framework.AssertionFailedError: expected:&lt;1&gt; but was:&lt;3&gt;
at CalculatorTest.subtractExpression(Unknown Source)
at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.database.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
EOF
TEST_REPORT_MESSAGE
end
end

View File

@ -1,14 +0,0 @@
# frozen_string_literal: true
module MigrationHelpers
module NamespacesHelpers
def create_namespace(name, visibility, options = {})
table(:namespaces).create!({
name: name,
path: name,
type: 'Group',
visibility_level: visibility
}.merge(options))
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
RSpec.shared_context 'with valid cluster create params' do
let(:clusterable) { Clusters::Instance.new }
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: 'gcp-project',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
},
clusterable: clusterable
}
end
end

View File

@ -6,7 +6,7 @@
# let(:reference) { '#42' }
RSpec.shared_examples 'a reference containing an element node' do
let(:inner_html) { 'element <code>node</code> inside' }
let(:reference_with_element) { %{<a href="#{reference}">#{inner_html}</a>} }
let(:reference_with_element) { %(<a href="#{reference}">#{inner_html}</a>) }
it 'does not escape inner html' do
doc = reference_filter(reference_with_element)
@ -29,7 +29,7 @@ RSpec.shared_examples 'user reference or project reference' do
end
end
context 'mentioning a resource' do
context 'when mentioning a resource' do
it_behaves_like 'a reference containing an element node'
it_behaves_like 'it contains a data- attribute'
@ -66,12 +66,12 @@ RSpec.shared_examples 'user reference or project reference' do
doc = reference_filter("Hey #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
expect(link).not_to match %r(https?://)
expect(link).not_to match %r{https?://}
expect(link).to eq urls.send "#{subject_name}_path", subject
end
context 'referencing a resource in a link href' do
let(:reference) { %Q{<a href="#{get_reference(subject)}">Some text</a>} }
describe 'referencing a resource in a link href' do
let(:reference) { %(<a href="#{get_reference(subject)}">Some text</a>) }
it_behaves_like 'it contains a data- attribute'

View File

@ -6,13 +6,13 @@ RSpec.shared_examples 'additional metrics query' do
let(:metric_group_class) { Gitlab::Prometheus::MetricGroup }
let(:metric_class) { Gitlab::Prometheus::Metric }
let(:metric_names) { %w{metric_a metric_b} }
let(:metric_names) { %w[metric_a metric_b] }
let(:query_range_result) do
[{ 'metric': {}, 'values': [[1488758662.506, '0.00002996364761904785'], [1488758722.506, '0.00003090239047619091']] }]
[{ metric: {}, values: [[1488758662.506, '0.00002996364761904785'], [1488758722.506, '0.00003090239047619091']] }]
end
let(:client) { double('prometheus_client') }
let(:client) { instance_double('Gitlab::PrometheusClient') }
let(:query_result) { described_class.new(client).query(*query_params) }
let(:project) { create(:project, :repository) }
let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
@ -22,12 +22,13 @@ RSpec.shared_examples 'additional metrics query' do
allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])])
end
context 'metrics query context' do
describe 'metrics query context' do
subject! { described_class.new(client) }
shared_examples 'query context containing environment slug and filter' do
it 'contains ci_environment_slug' do
expect(subject).to receive(:query_metrics).with(project, environment, hash_including(ci_environment_slug: environment.slug))
expect(subject)
.to receive(:query_metrics).with(project, environment, hash_including(ci_environment_slug: environment.slug))
subject.query(*query_params)
end
@ -54,7 +55,8 @@ RSpec.shared_examples 'additional metrics query' do
it_behaves_like 'query context containing environment slug and filter'
it 'query context contains kube_namespace' do
expect(subject).to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: kube_namespace))
expect(subject)
.to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: kube_namespace))
subject.query(*query_params)
end
@ -77,7 +79,7 @@ RSpec.shared_examples 'additional metrics query' do
allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group])
end
context 'some queries return results' do
context 'when some queries return results' do
before do
allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result)
@ -118,7 +120,7 @@ RSpec.shared_examples 'additional metrics query' do
allow(client).to receive(:label_values).and_return(metric_names)
end
context 'both queries return results' do
context 'when both queries return results' do
before do
allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result)
@ -138,7 +140,7 @@ RSpec.shared_examples 'additional metrics query' do
end
end
context 'one query returns result' do
context 'when one query returns result' do
before do
allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result)
allow(client).to receive(:query_range).with('query_range_b', any_args).and_return([])

View File

@ -98,7 +98,7 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl
context 'when resque.yml exists' do
before do
File.write(File.join(rails_root, 'config/resque.yml'), {
'test' => { 'foobar' => 123 }
'test' => { 'foobar' => 123 }
}.to_json)
end

View File

@ -87,7 +87,9 @@ RSpec.shared_examples "redis_shared_examples" do
context 'with the namespace' do
let(:namespace) { 'namespace_name' }
let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}" }
let(:redis_store_to_s) do
"Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}"
end
subject { described_class.new(rails_env).store(namespace: namespace) }
@ -188,12 +190,13 @@ RSpec.shared_examples "redis_shared_examples" do
with_them do
it 'returns hash with cluster and password' do
is_expected.to include(password: 'myclusterpassword',
cluster: [
{ host: "#{host}1", port: redis_port },
{ host: "#{host}2", port: redis_port }
]
)
is_expected.to include(
password: 'myclusterpassword',
cluster: [
{ host: "#{host}1", port: redis_port },
{ host: "#{host}2", port: redis_port }
]
)
is_expected.not_to have_key(:url)
end
end
@ -237,6 +240,7 @@ RSpec.shared_examples "redis_shared_examples" do
before do
clear_pool
end
after do
clear_pool
end
@ -408,7 +412,7 @@ RSpec.shared_examples "redis_shared_examples" do
end
it 'has a value for the legacy default URL' do
allow(subject).to receive(:fetch_config) { nil }
allow(subject).to receive(:fetch_config).and_return(nil)
expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z}))
end
@ -436,11 +440,11 @@ RSpec.shared_examples "redis_shared_examples" do
expect(subject).to eq(nil)
end
context 'but resque.yml exists' do
context 'when resque.yml exists' do
before do
FileUtils.mkdir_p(File.join(rails_root, 'config'))
File.write(File.join(rails_root, 'config/resque.yml'), {
'test' => { 'foobar' => 123 }
'test' => { 'foobar' => 123 }
}.to_json)
end

View File

@ -1,41 +1,5 @@
# frozen_string_literal: true
RSpec.shared_context 'valid cluster create params' do
let(:clusterable) { Clusters::Instance.new }
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: 'gcp-project',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
},
clusterable: clusterable
}
end
end
RSpec.shared_context 'invalid cluster create params' do
let(:clusterable) { Clusters::Instance.new }
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: '!!!!!!!',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a'
},
clusterable: clusterable
}
end
end
RSpec.shared_examples 'create cluster service success' do
it 'creates a cluster object' do
expect { subject }

View File

@ -50,7 +50,9 @@ RSpec.shared_examples 'a deploy token creation service' do
end
context 'when the deploy token is invalid' do
let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false, write_registry: false) }
let(:deploy_token_params) do
attributes_for(:deploy_token, read_repository: false, read_registry: false, write_registry: false)
end
it 'does not create a new DeployToken' do
expect { subject }.not_to change { DeployToken.count }
@ -75,7 +77,7 @@ RSpec.shared_examples 'a deploy token deletion service' do
.and change { DeployToken.count }.by(-1)
end
context 'invalid token id' do
context 'with invalid token id' do
let(:deploy_token_params) { { token_id: 9999 } }
it 'raises an error' do

View File

@ -47,7 +47,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it_behaves_like 'an issuable importer'
end
context 'comma delimited file' do
context 'with comma delimited file' do
let(:file) { fixture_file_upload('spec/fixtures/csv_comma.csv') }
it 'imports CSV without errors' do
@ -66,7 +66,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it_behaves_like 'an issuable importer'
end
context 'tab delimited file with error row' do
context 'with tab delimited file with error row' do
let(:file) { fixture_file_upload('spec/fixtures/csv_tab.csv') }
it 'imports CSV with some error rows' do
@ -85,7 +85,7 @@ RSpec.shared_examples 'issuable import csv service' do |issuable_type|
it_behaves_like 'an issuable importer'
end
context 'semicolon delimited file with CRLF' do
context 'with semicolon delimited file with CRLF' do
let(:file) { fixture_file_upload('spec/fixtures/csv_semicolon.csv') }
it 'imports CSV with a blank row' do

View File

@ -5,10 +5,10 @@ RSpec.shared_examples 'issuable update service' do
described_class.new(project, user, opts).execute(open_issuable)
end
context 'changing state' do
describe 'changing state' do
let(:hook_event) { :"#{closed_issuable.class.name.underscore.to_sym}_hooks" }
context 'to reopened' do
describe 'to reopened' do
let(:expected_payload) do
include(
changes: include(
@ -27,11 +27,15 @@ RSpec.shared_examples 'issuable update service' do
expect(hooks_container).to receive(:execute_hooks).with(expected_payload, hook_event)
expect(hooks_container).to receive(:execute_integrations).with(expected_payload, hook_event)
described_class.new(**described_class.constructor_container_arg(project), current_user: user, params: { state_event: 'reopen' }).execute(closed_issuable)
described_class.new(
**described_class.constructor_container_arg(project),
current_user: user,
params: { state_event: 'reopen' }
).execute(closed_issuable)
end
end
context 'to closed' do
describe 'to closed' do
let(:expected_payload) do
include(
changes: include(
@ -50,7 +54,11 @@ RSpec.shared_examples 'issuable update service' do
expect(hooks_container).to receive(:execute_hooks).with(expected_payload, hook_event)
expect(hooks_container).to receive(:execute_integrations).with(expected_payload, hook_event)
described_class.new(**described_class.constructor_container_arg(project), current_user: user, params: { state_event: 'close' }).execute(open_issuable)
described_class.new(
**described_class.constructor_container_arg(project),
current_user: user,
params: { state_event: 'close' }
).execute(open_issuable)
end
end
end
@ -99,3 +107,31 @@ RSpec.shared_examples 'broadcasting issuable labels updates' do
end
end
end
RSpec.shared_examples_for 'issuable update service updating last_edited_at values' do
context 'when updating the title of the issuable' do
let(:update_params) { { title: 'updated title' } }
it 'does not update last_edited values' do
expect { update_issuable }.to change { issuable.title }.from(issuable.title).to('updated title').and(
not_change(issuable, :last_edited_at)
).and(
not_change(issuable, :last_edited_by)
)
end
end
context 'when updating the description of the issuable' do
let(:update_params) { { description: 'updated description' } }
it 'updates last_edited values' do
expect do
update_issuable
end.to change { issuable.description }.from(issuable.description).to('updated description').and(
change { issuable.last_edited_at }
).and(
change { issuable.last_edited_by }
)
end
end
end

View File

@ -1,29 +0,0 @@
# frozen_string_literal: true
RSpec.shared_examples_for 'issuable update service updating last_edited_at values' do
context 'when updating the title of the issuable' do
let(:update_params) { { title: 'updated title' } }
it 'does not update last_edited values' do
expect { update_issuable }.to change { issuable.title }.from(issuable.title).to('updated title').and(
not_change(issuable, :last_edited_at)
).and(
not_change(issuable, :last_edited_by)
)
end
end
context 'when updating the description of the issuable' do
let(:update_params) { { description: 'updated description' } }
it 'updates last_edited values' do
expect do
update_issuable
end.to change { issuable.description }.from(issuable.description).to('updated description').and(
change { issuable.last_edited_at }
).and(
change { issuable.last_edited_by }
)
end
end
end

View File

@ -54,10 +54,10 @@ RSpec.shared_examples "migrating a deleted user's associated records to the ghos
end
end
context "race conditions" do
describe "race conditions" do
context "when #{record_class_name} migration fails and is rolled back" do
before do
allow_any_instance_of(ActiveRecord::Associations::CollectionProxy)
allow_next_instance_of(ActiveRecord::Associations::CollectionProxy)
.to receive(:update_all).and_raise(ActiveRecord::StatementTimeout)
end

View File

@ -6,9 +6,7 @@ RSpec.shared_examples 'returning an error service response' do |message: nil|
expect(result).to be_error
if message
expect(result.message).to eq(message)
end
expect(result.message).to eq(message) if message
end
end
@ -18,8 +16,6 @@ RSpec.shared_examples 'returning a success service response' do |message: nil|
expect(result).to be_success
if message
expect(result.message).to eq(message)
end
expect(result.message).to eq(message) if message
end
end

View File

@ -81,7 +81,7 @@
resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.19.0.tgz#2a592fd89bacb1fcde68de31bee4f2f2dacb0e86"
integrity sha512-y5rqgTTPTmaF5e2nVhOxw+Ur9HDJLsWb6U/KpgUzRZEdPfE6VOubXBKLdbcUTijzRptednSBDQbYZBOSqJxpJw==
"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.17.0", "@babel/core@^7.18.5":
"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.18.5":
version "7.19.0"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.0.tgz#d2f5f4f2033c00de8096be3c9f45772563e150c3"
integrity sha512-reM4+U7B9ss148rh2n1Qs9ASS+w94irYXga7c2jaQv9RVzpS7Mv1a9rnYYwuDa45G+DkORt9g6An2k/V4d9LbQ==
@ -102,22 +102,6 @@
json5 "^2.2.1"
semver "^6.3.0"
"@babel/eslint-parser@^7.17.0":
version "7.17.0"
resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.17.0.tgz#eabb24ad9f0afa80e5849f8240d0e5facc2d90d6"
integrity sha512-PUEJ7ZBXbRkbq3qqM/jZ2nIuakUBqCYc7Qf52Lj7dlZ6zERnqisdHioL0l4wwQZnmskMeasqUNzLBFKs3nylXA==
dependencies:
eslint-scope "^5.1.1"
eslint-visitor-keys "^2.1.0"
semver "^6.3.0"
"@babel/eslint-plugin@^7.17.7":
version "7.17.7"
resolved "https://registry.yarnpkg.com/@babel/eslint-plugin/-/eslint-plugin-7.17.7.tgz#4ee1d5b29b79130f3bb5a933358376bcbee172b8"
integrity sha512-JATUoJJXSgwI0T8juxWYtK1JSgoLpIGUsCHIv+NMXcUDA2vIe6nvAHR9vnuJgs/P1hOFw7vPwibixzfqBBLIVw==
dependencies:
eslint-rule-composer "^0.3.0"
"@babel/generator@^7.19.0", "@babel/generator@^7.7.2":
version "7.19.0"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.0.tgz#785596c06425e59334df2ccee63ab166b738419a"
@ -1093,18 +1077,13 @@
axios "^0.24.0"
core-js "^3.29.1"
"@gitlab/eslint-plugin@18.3.2":
version "18.3.2"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-plugin/-/eslint-plugin-18.3.2.tgz#dc4d5b487e26a1473106c1a3e34ae3ea219d4dd1"
integrity sha512-Lz0RnEW5isZ/jkeHcr2k6NqaHISwgKeWN/vkWUU5J4Ax7oYPR0CgA2KO/dEnOvIPmGfbnUKowsekBmmy5SUQHA==
"@gitlab/eslint-plugin@19.0.0":
version "19.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-plugin/-/eslint-plugin-19.0.0.tgz#bbf5b535876421f000255f0e9ac8f971b8aa6042"
integrity sha512-clAN/hIs3fVWIJtGLUJy4cL/cXP0jOPCLHOFhlPyYCUpEZXIka73FtPYfaC3NJoRheLA3u2Ue5KKOAj6pi1ZrA==
dependencies:
"@babel/core" "^7.17.0"
"@babel/eslint-parser" "^7.17.0"
"@babel/eslint-plugin" "^7.17.7"
eslint-config-airbnb-base "^15.0.0"
eslint-config-prettier "^6.10.0"
eslint-plugin-babel "^5.3.0"
eslint-plugin-import "^2.26.0"
eslint-plugin-jest "^27.2.1"
eslint-plugin-promise "^6.0.1"
@ -5575,13 +5554,6 @@ eslint-module-utils@^2.7.4:
dependencies:
debug "^3.2.7"
eslint-plugin-babel@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-babel/-/eslint-plugin-babel-5.3.0.tgz#2e7f251ccc249326da760c1a4c948a91c32d0023"
integrity sha512-HPuNzSPE75O+SnxHIafbW5QB45r2w78fxqwK3HmjqIUoPfPzVrq6rD+CINU3yzoDSzEhUkX07VUphbF73Lth/w==
dependencies:
eslint-rule-composer "^0.3.0"
eslint-plugin-import@^2.26.0, eslint-plugin-import@^2.27.5:
version "2.27.5"
resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz#876a6d03f52608a3e5bb439c2550588e51dd6c65"
@ -5658,11 +5630,6 @@ eslint-plugin-vue@^9.3.0:
vue-eslint-parser "^9.0.1"
xml-name-validator "^4.0.0"
eslint-rule-composer@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz#79320c927b0c5c0d3d3d2b76c8b4a488f25bbaf9"
integrity sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==
eslint-scope@^4.0.3:
version "4.0.3"
resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848"
@ -5694,7 +5661,7 @@ eslint-utils@^3.0.0:
dependencies:
eslint-visitor-keys "^2.0.0"
eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0:
eslint-visitor-keys@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303"
integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==