Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-09-12 15:12:04 +00:00
parent 0127158127
commit 1ba682300f
31 changed files with 545 additions and 657 deletions

View File

@ -2,6 +2,13 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 16.3.3 (2023-09-12)
### Fixed (2 changes)
- [Prevent pipeline creation while import is running](gitlab-org/gitlab@b4e374ed7f5b264f04a2589a99004e568ef92319) ([merge request](gitlab-org/gitlab!131156))
- [Create iid sequence for ci_pipelines with new projects](gitlab-org/gitlab@a74b9ac352e0d9783ec39adaadbe2b65028f8e0c) ([merge request](gitlab-org/gitlab!130835))
## 16.3.2 (2023-09-05)
### Fixed (2 changes)
@ -818,6 +825,14 @@ entry.
- [Fix test pollution in count_deployments_metric_spec](gitlab-org/gitlab@610e6a033fe9b20aabc237b18837cddf150d4d1b) ([merge request](gitlab-org/gitlab!126808))
- [Update BulkImports::PipelineBatchWorker resource boundary](gitlab-org/gitlab@7d2477d81bcc2d035be26587802706f7098b6e44) ([merge request](gitlab-org/gitlab!126696))
## 16.2.6 (2023-09-12)
### Fixed (3 changes)
- [Prevent pipeline creation while import is running](gitlab-org/gitlab@457561758ed262b3958ff202f31a3f4d1098e983) ([merge request](gitlab-org/gitlab!131155))
- [Create iid sequence for ci_pipelines with new projects](gitlab-org/gitlab@386708854a916b28154535bf76777526ffb78a31) ([merge request](gitlab-org/gitlab!130836))
- [Drop bridge jobs on unknown failures](gitlab-org/gitlab@0cf3c9c5fc59bf6a8ea66d6017b33960c109852f) ([merge request](gitlab-org/gitlab!130834))
## 16.2.5 (2023-08-31)
### Fixed (1 change)

View File

@ -1,25 +1,15 @@
<script>
import {
GlDropdown,
GlDropdownDivider,
GlDropdownItem,
GlIcon,
GlLoadingIcon,
GlSearchBoxByType,
} from '@gitlab/ui';
import { debounce } from 'lodash';
import { GlCollapsibleListbox, GlButton } from '@gitlab/ui';
import { debounce, memoize } from 'lodash';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { __, sprintf } from '~/locale';
import { __, n__, sprintf } from '~/locale';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
export default {
components: {
GlDropdown,
GlDropdownDivider,
GlDropdownItem,
GlSearchBoxByType,
GlIcon,
GlLoadingIcon,
GlButton,
GlCollapsibleListbox,
},
inject: ['environmentsEndpoint'],
data() {
@ -34,69 +24,96 @@ export default {
noResultsLabel: __('No matching results'),
},
computed: {
srOnlyResultsCount() {
return n__('%d environment found', '%d environments found', this.results.length);
},
createEnvironmentLabel() {
return sprintf(__('Create %{environment}'), { environment: this.environmentSearch });
},
},
methods: {
addEnvironment(newEnvironment) {
this.$emit('add', newEnvironment);
this.environmentSearch = '';
this.results = [];
isCreateEnvironmentShown() {
return !this.isLoading && this.results.length === 0 && Boolean(this.environmentSearch);
},
fetchEnvironments: debounce(function debouncedFetchEnvironments() {
},
mounted() {
this.fetchEnvironments();
},
unmounted() {
// cancel debounce if the component is unmounted to avoid unnecessary fetches
this.fetchEnvironments.cancel();
},
created() {
this.fetch = memoize(async function fetchEnvironmentsFromApi(query) {
this.isLoading = true;
axios
.get(this.environmentsEndpoint, { params: { query: this.environmentSearch } })
.then(({ data }) => {
this.results = data || [];
try {
const { data } = await axios.get(this.environmentsEndpoint, { params: { query } });
return data;
} catch {
createAlert({
message: __('Something went wrong on our end. Please try again.'),
});
return [];
} finally {
this.isLoading = false;
}
});
this.fetchEnvironments = debounce(function debouncedFetchEnvironments(query = '') {
this.fetch(query)
.then((data) => {
this.results = data.map((item) => ({ text: item, value: item }));
})
.catch(() => {
createAlert({
message: __('Something went wrong on our end. Please try again.'),
});
})
.finally(() => {
this.isLoading = false;
this.results = [];
});
}, 250),
setFocus() {
this.$refs.searchBox.focusInput();
}, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
},
methods: {
onSelect(selected) {
this.$emit('add', selected[0]);
},
addEnvironment(newEnvironment) {
this.$emit('add', newEnvironment);
this.results = [];
},
onSearch(query) {
this.environmentSearch = query;
this.fetchEnvironments(query);
},
},
};
</script>
<template>
<gl-dropdown class="js-new-environments-dropdown" @shown="setFocus">
<template #button-content>
<span class="d-md-none mr-1">
{{ $options.translations.addEnvironmentsLabel }}
</span>
<gl-icon class="d-none d-md-inline-flex gl-mr-1" name="plus" />
<gl-collapsible-listbox
icon="plus"
data-testid="new-environments-dropdown"
:toggle-text="$options.translations.addEnvironmentsLabel"
:items="results"
:searching="isLoading"
:header-text="$options.translations.addEnvironmentsLabel"
searchable
multiple
@search="onSearch"
@select="onSelect"
>
<template #footer>
<div
v-if="isCreateEnvironmentShown"
class="gl-border-t-solid gl-border-t-1 gl-border-t-gray-200 gl-p-2"
>
<gl-button
category="tertiary"
block
class="gl-justify-content-start!"
data-testid="add-environment-button"
@click="addEnvironment(environmentSearch)"
>
{{ createEnvironmentLabel }}
</gl-button>
</div>
</template>
<gl-search-box-by-type
ref="searchBox"
v-model.trim="environmentSearch"
@focus="fetchEnvironments"
@keyup="fetchEnvironments"
/>
<gl-loading-icon v-if="isLoading" size="sm" />
<gl-dropdown-item
v-for="environment in results"
v-else-if="results.length"
:key="environment"
@click="addEnvironment(environment)"
>
{{ environment }}
</gl-dropdown-item>
<template v-else-if="environmentSearch.length">
<span ref="noResults" class="text-secondary gl-p-3">
{{ $options.translations.noMatchingResults }}
</span>
<gl-dropdown-divider />
<gl-dropdown-item @click="addEnvironment(environmentSearch)">
{{ createEnvironmentLabel }}
</gl-dropdown-item>
<template #search-summary-sr-only>
{{ srOnlyResultsCount }}
</template>
</gl-dropdown>
</gl-collapsible-listbox>
</template>

View File

@ -77,6 +77,7 @@ export default {
if (!this.isMarkup || !this.remainingContent.length) {
this.$emit(CONTENT_LOADED_EVENT);
this.isLoading = false;
return;
}
@ -89,6 +90,7 @@ export default {
fileContent.append(...content);
if (nextChunkEnd < this.remainingContent.length) return;
this.$emit(CONTENT_LOADED_EVENT);
this.isLoading = false;
}, i);
}
},
@ -99,5 +101,9 @@ export default {
};
</script>
<template>
<markdown-field-view ref="content" v-safe-html:[$options.safeHtmlConfig]="rawContent" />
<markdown-field-view
ref="content"
v-safe-html:[$options.safeHtmlConfig]="rawContent"
:is-loading="isLoading"
/>
</template>

View File

@ -2,8 +2,26 @@
import { renderGFM } from '~/behaviors/markdown/render_gfm';
export default {
props: {
isLoading: {
type: Boolean,
required: false,
default: false,
},
},
watch: {
isLoading() {
this.handleGFM();
},
},
mounted() {
renderGFM(this.$el);
this.handleGFM();
},
methods: {
handleGFM() {
if (this.isLoading) return;
renderGFM(this.$el);
},
},
};
</script>

View File

@ -88,10 +88,14 @@ module Types
description: 'Play path of the job.'
field :playable, GraphQL::Types::Boolean, null: false, method: :playable?,
description: 'Indicates the job can be played.'
field :previous_stage_jobs, Types::Ci::JobType.connection_type,
null: true,
description: 'Jobs from the previous stage.'
field :previous_stage_jobs_or_needs, Types::Ci::JobNeedUnion.connection_type,
null: true,
description: 'Jobs that must complete before the job runs. Returns `BuildNeed`, ' \
'which is the needed jobs if the job uses the `needs` keyword, or the previous stage jobs otherwise.'
'which is the needed jobs if the job uses the `needs` keyword, or the previous stage jobs otherwise.',
deprecated: { reason: 'Replaced by previousStageJobs and needs fields', milestone: '16.4' }
field :ref_name, GraphQL::Types::String, null: true,
description: 'Ref name of the job.'
field :ref_path, GraphQL::Types::String, null: true,
@ -176,17 +180,17 @@ module Types
end
def previous_stage_jobs
BatchLoader::GraphQL.for([object.pipeline, object.stage_idx - 1]).batch(default_value: []) do |tuples, loader|
tuples.group_by(&:first).each do |pipeline, keys|
positions = keys.map(&:second)
BatchLoader::GraphQL.for([object.pipeline_id, object.stage_idx - 1]).batch(default_value: []) do |tuples, loader|
pipeline_ids = tuples.map(&:first).uniq
stage_idxs = tuples.map(&:second).uniq
stages = pipeline.stages.by_position(positions)
# This query can fetch unneeded jobs when querying for more than one pipeline.
# It was decided that fetching and discarding the jobs is preferable to making a more complex query.
jobs = CommitStatus.in_pipelines(pipeline_ids).for_stage(stage_idxs).latest
grouped_jobs = jobs.group_by { |job| [job.pipeline_id, job.stage_idx] }
stages.each do |stage|
# Without `.to_a`, the memoization will only preserve the activerecord relation object. And when there is
# a call, the SQL query will be executed again.
loader.call([pipeline, stage.position], stage.latest_statuses.to_a)
end
tuples.each do |tuple|
loader.call(tuple, grouped_jobs.fetch(tuple, []))
end
end
end

View File

@ -19,6 +19,7 @@ module Issuable
include Awardable
include Taskable
include Importable
include Transitionable
include Editable
include AfterCommitQueue
include Sortable
@ -239,6 +240,10 @@ module Issuable
super + [:notes]
end
def importing_or_transitioning?
importing? || transitioning?
end
private
def validate_description_length?

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Transitionable
extend ActiveSupport::Concern
attr_accessor :transitioning
def transitioning?
return false unless transitioning && Feature.enabled?(:skip_validations_during_transitions, project)
true
end
def enable_transitioning
self.transitioning = true
end
def disable_transitioning
self.transitioning = false
end
end

View File

@ -279,6 +279,12 @@ class MergeRequest < ApplicationRecord
def check_state?(merge_status)
[:unchecked, :cannot_be_merged_recheck, :checking, :cannot_be_merged_rechecking].include?(merge_status.to_sym)
end
# rubocop: disable Style/SymbolProc
before_transition { |merge_request| merge_request.enable_transitioning }
after_transition { |merge_request| merge_request.disable_transitioning }
# rubocop: enable Style/SymbolProc
end
# Returns current merge_status except it returns `cannot_be_merged_rechecking` as `checking`
@ -292,10 +298,14 @@ class MergeRequest < ApplicationRecord
validates :target_project, presence: true
validates :target_branch, presence: true
validates :merge_user, presence: true, if: :auto_merge_enabled?, unless: :importing?
validate :validate_branches, unless: [:allow_broken, :importing?, :closed_or_merged_without_fork?]
validate :validate_branches, unless: [
:allow_broken,
:importing_or_transitioning?,
:closed_or_merged_without_fork?
]
validate :validate_fork, unless: :closed_or_merged_without_fork?
validate :validate_target_project, on: :create, unless: :importing?
validate :validate_reviewer_size_length, unless: :importing?
validate :validate_target_project, on: :create, unless: :importing_or_transitioning?
validate :validate_reviewer_size_length, unless: :importing_or_transitioning?
scope :by_source_or_target_branch, ->(branch_name) do
where("source_branch = :branch OR target_branch = :branch", branch: branch_name)

View File

@ -1,33 +0,0 @@
# frozen_string_literal: true
module PerformanceMonitoring
class PrometheusMetric
include ActiveModel::Model
attr_accessor :id, :unit, :label, :query, :query_range
validates :unit, presence: true
validates :query, presence: true, unless: :query_range
validates :query_range, presence: true, unless: :query
class << self
def from_json(json_content)
build_from_hash(json_content).tap(&:validate!)
end
private
def build_from_hash(attributes)
return new unless attributes.is_a?(Hash)
new(
id: attributes['id'],
unit: attributes['unit'],
label: attributes['label'],
query: attributes['query'],
query_range: attributes['query_range']
)
end
end
end
end

View File

@ -1,42 +0,0 @@
# frozen_string_literal: true
module PerformanceMonitoring
class PrometheusPanel
include ActiveModel::Model
attr_accessor :type, :title, :y_label, :weight, :metrics, :y_axis, :max_value
validates :title, presence: true
validates :metrics, array_members: { member_class: PerformanceMonitoring::PrometheusMetric }
class << self
def from_json(json_content)
build_from_hash(json_content).tap(&:validate!)
end
private
def build_from_hash(attributes)
return new unless attributes.is_a?(Hash)
new(
type: attributes['type'],
title: attributes['title'],
y_label: attributes['y_label'],
weight: attributes['weight'],
metrics: initialize_children_collection(attributes['metrics'])
)
end
def initialize_children_collection(children)
return unless children.is_a?(Array)
children.map { |metrics| PerformanceMonitoring::PrometheusMetric.from_json(metrics) }
end
end
def id(group_title)
Digest::SHA2.hexdigest([group_title, type, title].join)
end
end
end

View File

@ -0,0 +1,8 @@
---
name: skip_validations_during_transitions
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129848
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/423092
milestone: '16.4'
type: development
group: group::code review
default_enabled: false

View File

@ -204,6 +204,24 @@ Returns [`CiStage`](#cistage).
| ---- | ---- | ----------- |
| <a id="querycipipelinestageid"></a>`id` | [`CiStageID!`](#cistageid) | Global ID of the CI stage. |
### `Query.ciQueueingHistory`
Time it took for ci job to be picked up by runner in percentiles.
WARNING:
**Introduced** in 16.4.
This feature is an Experiment. It can be changed or removed at any time.
Returns [`QueueingDelayHistory`](#queueingdelayhistory).
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryciqueueinghistoryfromtime"></a>`fromTime` | [`Time`](#time) | Start of the requested time frame. Defaults to 3 hours ago. |
| <a id="queryciqueueinghistoryrunnertype"></a>`runnerType` | [`CiRunnerType`](#cirunnertype) | Filter jobs by the type of runner that executed them. |
| <a id="queryciqueueinghistorytotime"></a>`toTime` | [`Time`](#time) | End of the requested time frame. Defaults to current time. |
### `Query.ciVariables`
List of the instance's CI/CD variables.
@ -13863,7 +13881,8 @@ CI/CD variables for a GitLab instance.
| <a id="cijobpipeline"></a>`pipeline` | [`Pipeline`](#pipeline) | Pipeline the job belongs to. |
| <a id="cijobplaypath"></a>`playPath` | [`String`](#string) | Play path of the job. |
| <a id="cijobplayable"></a>`playable` | [`Boolean!`](#boolean) | Indicates the job can be played. |
| <a id="cijobpreviousstagejobsorneeds"></a>`previousStageJobsOrNeeds` | [`JobNeedUnionConnection`](#jobneedunionconnection) | Jobs that must complete before the job runs. Returns `BuildNeed`, which is the needed jobs if the job uses the `needs` keyword, or the previous stage jobs otherwise. (see [Connections](#connections)) |
| <a id="cijobpreviousstagejobs"></a>`previousStageJobs` | [`CiJobConnection`](#cijobconnection) | Jobs from the previous stage. (see [Connections](#connections)) |
| <a id="cijobpreviousstagejobsorneeds"></a>`previousStageJobsOrNeeds` **{warning-solid}** | [`JobNeedUnionConnection`](#jobneedunionconnection) | **Deprecated** in 16.4. Replaced by previousStageJobs and needs fields. |
| <a id="cijobproject"></a>`project` | [`Project`](#project) | Project that the job belongs to. |
| <a id="cijobqueuedat"></a>`queuedAt` | [`Time`](#time) | When the job was enqueued and marked as pending. |
| <a id="cijobqueuedduration"></a>`queuedDuration` | [`Duration`](#duration) | How long the job was enqueued before starting. |
@ -23200,6 +23219,31 @@ Pypi metadata.
| <a id="querycomplexitylimit"></a>`limit` | [`Int`](#int) | GraphQL query complexity limit. See [GitLab documentation on this limit](https://docs.gitlab.com/ee/api/graphql/index.html#max-query-complexity). |
| <a id="querycomplexityscore"></a>`score` | [`Int`](#int) | GraphQL query complexity score. |
### `QueueingDelayHistory`
Aggregated statistics about queueing times for CI jobs.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queueingdelayhistorytimeseries"></a>`timeSeries` | [`[QueueingHistoryTimeSeries!]`](#queueinghistorytimeseries) | Time series. |
### `QueueingHistoryTimeSeries`
The amount of time for a job to be picked up by a runner, in percentiles.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queueinghistorytimeseriesp25"></a>`p25` **{warning-solid}** | [`Duration`](#duration) | **Introduced** in 16.4. This feature is an Experiment. It can be changed or removed at any time. 25th percentile. 25% of the durations are lower than this value. |
| <a id="queueinghistorytimeseriesp50"></a>`p50` **{warning-solid}** | [`Duration`](#duration) | **Introduced** in 16.4. This feature is an Experiment. It can be changed or removed at any time. 50th percentile. 50% of the durations are lower than this value. |
| <a id="queueinghistorytimeseriesp90"></a>`p90` **{warning-solid}** | [`Duration`](#duration) | **Introduced** in 16.4. This feature is an Experiment. It can be changed or removed at any time. 90th percentile. 90% of the durations are lower than this value. |
| <a id="queueinghistorytimeseriesp95"></a>`p95` **{warning-solid}** | [`Duration`](#duration) | **Introduced** in 16.4. This feature is an Experiment. It can be changed or removed at any time. 95th percentile. 95% of the durations are lower than this value. |
| <a id="queueinghistorytimeseriesp99"></a>`p99` **{warning-solid}** | [`Duration`](#duration) | **Introduced** in 16.4. This feature is an Experiment. It can be changed or removed at any time. 99th percentile. 99% of the durations are lower than this value. |
| <a id="queueinghistorytimeseriestime"></a>`time` | [`Time!`](#time) | Start of the time interval. |
### `RecentFailures`
Recent failure history of a test case.

View File

@ -11,7 +11,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> `version` value for the `order_by` attribute [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95150) in GitLab 15.4.
Get a list of repository tags from a project, sorted by update date and time in
descending order. If the repository is publicly accessible, authentication
descending order.
NOTE:
If the repository is publicly accessible, authentication
(`--header "PRIVATE-TOKEN: <your_access_token>"`) is not required.
```plaintext
@ -27,6 +30,13 @@ Parameters:
| `sort` | string | no | Return tags sorted in `asc` or `desc` order. Default is `desc`. |
| `search` | string | no | Return a list of tags matching the search criteria. You can use `^term` and `term$` to find tags that begin and end with `term` respectively. No other regular expressions are supported. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/projects/5/repository/tags"
```
Example Response:
```json
[
{

View File

@ -116,7 +116,7 @@ For more information on:
omniauth:
enabled: true
allowSingleSignOn: ['saml']
blockAutoCreatedUsers: true
blockAutoCreatedUsers: false
```
1. Optional. You can automatically link SAML users with existing GitLab users if their

View File

@ -1,45 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Prometheus
module AdditionalMetricsParser
CONFIG_ROOT = 'config/prometheus'
MUTEX = Mutex.new
extend self
def load_groups_from_yaml(file_name)
yaml_metrics_raw(file_name).map(&method(:group_from_entry))
end
private
def validate!(obj)
raise ParsingError, obj.errors.full_messages.join('\n') unless obj.valid?
end
def group_from_entry(entry)
entry[:name] = entry.delete(:group)
entry[:metrics]&.map! do |entry|
Metric.new(entry).tap(&method(:validate!))
end
MetricGroup.new(entry).tap(&method(:validate!))
end
def yaml_metrics_raw(file_name)
load_yaml_file(file_name)&.map(&:deep_symbolize_keys).freeze
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def load_yaml_file(file_name)
return YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name)) if Rails.env.development?
MUTEX.synchronize do
@loaded_yaml_cache ||= {}
@loaded_yaml_cache[file_name] ||= YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name))
end
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end

View File

@ -252,6 +252,11 @@ msgid_plural "%d days"
msgstr[0] ""
msgstr[1] ""
msgid "%d environment found"
msgid_plural "%d environments found"
msgstr[0] ""
msgstr[1] ""
msgid "%d epic"
msgid_plural "%d epics"
msgstr[0] ""

View File

@ -45,7 +45,7 @@ module QA
end
def install_kubernetes_agent(agent_token:, kas_address:, agent_name: "gitlab-agent")
shell <<~CMD.tr("\n", ' ')
cmd_str = <<~CMD.tr("\n", ' ')
helm repo add gitlab https://charts.gitlab.io &&
helm repo update &&
helm upgrade --install gitlab-agent gitlab/gitlab-agent
@ -56,6 +56,7 @@ module QA
--set config.kasAddress=#{kas_address}
--set config.kasHeaders="{Cookie: gitlab_canary=#{target_canary?}}"
CMD
shell(cmd_str, mask_secrets: [agent_token])
end
def uninstall_kubernetes_agent(agent_name: "gitlab-agent")
@ -78,7 +79,7 @@ module QA
end
def install_gitlab_workspaces_proxy
shell <<~CMD.tr("\n", ' ')
cmd_str = <<~CMD.tr("\n", ' ')
helm repo add gitlab-workspaces-proxy \
https://gitlab.com/api/v4/projects/gitlab-org%2fremote-development%2fgitlab-workspaces-proxy/packages/helm/devel &&
helm repo update &&
@ -100,6 +101,8 @@ module QA
--set="ingress.tls.wildcardDomainKey=$(cat #{Runtime::Env.workspaces_wildcard_key})" \
--set="ingress.className=nginx"
CMD
shell(cmd_str, mask_secrets: [Runtime::Env.workspaces_oauth_app_secret, Runtime::Env.workspaces_oauth_signing_key])
end
def update_dns(load_balancer_ip)

View File

@ -7,13 +7,14 @@ RSpec.describe 'User creates feature flag', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
let!(:environment) { create(:environment, :production, project: project) }
before do
project.add_developer(user)
sign_in(user)
end
it 'user creates a flag enabled for user ids' do
it 'user creates a flag enabled for user ids with existing environment' do
visit(new_project_feature_flag_path(project))
set_feature_flag_info('test_feature', 'Test feature')
within_strategy_row(1) do
@ -29,6 +30,22 @@ RSpec.describe 'User creates feature flag', :js do
expect(page).to have_text('test_feature')
end
it 'user creates a flag enabled for user ids with non-existing environment' do
visit(new_project_feature_flag_path(project))
set_feature_flag_info('test_feature', 'Test feature')
within_strategy_row(1) do
select 'User IDs', from: 'Type'
fill_in 'User IDs', with: 'user1, user2'
environment_plus_button.click
environment_search_input.set('foo-bar')
environment_search_create_button.first.click
end
click_button 'Create feature flag'
expect_user_to_see_feature_flags_index_page
expect(page).to have_text('test_feature')
end
it 'user creates a flag with default environment scopes' do
visit(new_project_feature_flag_path(project))
set_feature_flag_info('test_flag', 'Test flag')
@ -74,14 +91,18 @@ RSpec.describe 'User creates feature flag', :js do
end
def environment_plus_button
find('.js-new-environments-dropdown')
find('[data-testid=new-environments-dropdown]')
end
def environment_search_input
find('.js-new-environments-dropdown input')
find('[data-testid=new-environments-dropdown] input')
end
def environment_search_results
all('.js-new-environments-dropdown button.dropdown-item')
all('[data-testid=new-environments-dropdown] li')
end
def environment_search_create_button
all('[data-testid=new-environments-dropdown] button')
end
end

View File

@ -1,7 +1,6 @@
import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlCollapsibleListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
@ -13,87 +12,78 @@ describe('New Environments Dropdown', () => {
let wrapper;
let axiosMock;
beforeEach(() => {
const createWrapper = (axiosResult = []) => {
axiosMock = new MockAdapter(axios);
wrapper = shallowMount(NewEnvironmentsDropdown, {
axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, axiosResult);
wrapper = shallowMountExtended(NewEnvironmentsDropdown, {
provide: { environmentsEndpoint: TEST_HOST },
stubs: {
GlCollapsibleListbox,
},
});
});
};
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findCreateEnvironmentButton = () => wrapper.findByTestId('add-environment-button');
afterEach(() => {
axiosMock.restore();
});
describe('before results', () => {
beforeEach(() => {
createWrapper();
});
it('should show a loading icon', () => {
axiosMock.onGet(TEST_HOST).reply(() => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
return axios.waitForAll();
expect(findListbox().props('searching')).toBe(true);
});
it('should not show any dropdown items', () => {
axiosMock.onGet(TEST_HOST).reply(() => {
expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(0);
});
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
return axios.waitForAll();
expect(findListbox().props('items')).toEqual([]);
});
});
describe('with empty results', () => {
let item;
beforeEach(async () => {
axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, []);
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
createWrapper();
findListbox().vm.$emit('search', TEST_SEARCH);
await axios.waitForAll();
await nextTick();
item = wrapper.findComponent(GlDropdownItem);
});
it('should display a Create item label', () => {
expect(item.text()).toBe('Create production');
});
it('should display that no matching items are found', () => {
expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(true);
expect(findCreateEnvironmentButton().text()).toBe(`Create ${TEST_SEARCH}`);
});
it('should emit a new scope when selected', () => {
item.vm.$emit('click');
findCreateEnvironmentButton().vm.$emit('click');
expect(wrapper.emitted('add')).toEqual([[TEST_SEARCH]]);
});
});
describe('with results', () => {
let items;
beforeEach(() => {
axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, ['prod', 'production']);
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'prod');
return axios.waitForAll().then(() => {
items = wrapper.findAllComponents(GlDropdownItem);
});
beforeEach(async () => {
createWrapper(['prod', 'production']);
findListbox().vm.$emit('search', TEST_SEARCH);
await axios.waitForAll();
});
it('should display one item per result', () => {
expect(items).toHaveLength(2);
it('should populate results properly', () => {
expect(findListbox().props().items).toHaveLength(2);
});
it('should emit an add if an item is clicked', () => {
items.at(0).vm.$emit('click');
it('should emit an add on selection', () => {
findListbox().vm.$emit('select', ['prod']);
expect(wrapper.emitted('add')).toEqual([['prod']]);
});
it('should not display a create label', () => {
items = items.filter((i) => i.text().startsWith('Create'));
expect(items).toHaveLength(0);
});
it('should not display a message about no results', () => {
expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(false);
});
it('should not display a footer with the create button', () => {
expect(findCreateEnvironmentButton().exists()).toBe(false);
});
});
});

View File

@ -1,11 +1,14 @@
import { GlAlert, GlFormSelect, GlLink, GlToken, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import { last } from 'lodash';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import Strategy from '~/feature_flags/components/strategy.vue';
import StrategyParameters from '~/feature_flags/components/strategy_parameters.vue';
import {
@ -22,16 +25,18 @@ import { userList } from '../mock_data';
jest.mock('~/api');
const TEST_HOST = '/test';
const provide = {
strategyTypeDocsPagePath: 'link-to-strategy-docs',
environmentsScopeDocsPath: 'link-scope-docs',
environmentsEndpoint: '',
environmentsEndpoint: TEST_HOST,
};
Vue.use(Vuex);
describe('Feature flags strategy', () => {
let wrapper;
let axiosMock;
const findStrategyParameters = () => wrapper.findComponent(StrategyParameters);
const findDocsLinks = () => wrapper.findAllComponents(GlLink);
@ -45,6 +50,8 @@ describe('Feature flags strategy', () => {
provide,
},
) => {
axiosMock = new MockAdapter(axios);
axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, []);
wrapper = mount(Strategy, { store: createStore({ projectId: '1' }), ...opts });
};
@ -52,6 +59,10 @@ describe('Feature flags strategy', () => {
Api.searchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
});
afterEach(() => {
axiosMock.restore();
});
describe('helper links', () => {
const propsData = { strategy: {}, index: 0, userLists: [userList] };
factory({ propsData, provide });

View File

@ -29,6 +29,8 @@ describe('Blob Rich Viewer component', () => {
beforeEach(() => createComponent());
const findMarkdownFieldView = () => wrapper.findComponent(MarkdownFieldView);
describe('Markdown content', () => {
const generateDummyContent = (contentLength) => {
let generatedContent = '';
@ -48,14 +50,17 @@ describe('Blob Rich Viewer component', () => {
expect(wrapper.text()).toContain('Line: 10');
expect(wrapper.text()).not.toContain('Line: 50');
expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toBeUndefined();
expect(findMarkdownFieldView().props('isLoading')).toBe(true);
});
it('renders the rest of the file later and emits a content loaded event', () => {
it('renders the rest of the file later and emits a content loaded event', async () => {
jest.runAllTimers();
await nextTick();
expect(wrapper.text()).toContain('Line: 10');
expect(wrapper.text()).toContain('Line: 50');
expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
expect(findMarkdownFieldView().props('isLoading')).toBe(false);
});
it('sanitizes the content', () => {
@ -72,6 +77,7 @@ describe('Blob Rich Viewer component', () => {
it('renders the entire file immediately and emits a content loaded event', () => {
expect(wrapper.text()).toContain('Line: 5');
expect(wrapper.emitted(CONTENT_LOADED_EVENT)).toHaveLength(1);
expect(findMarkdownFieldView().props('isLoading')).toBe(false);
});
it('sanitizes the content', () => {
@ -97,7 +103,7 @@ describe('Blob Rich Viewer component', () => {
});
it('is using Markdown View Field', () => {
expect(wrapper.findComponent(MarkdownFieldView).exists()).toBe(true);
expect(findMarkdownFieldView().exists()).toBe(true);
});
it('scrolls to the hash location', () => {

View File

@ -6,15 +6,27 @@ import { renderGFM } from '~/behaviors/markdown/render_gfm';
jest.mock('~/behaviors/markdown/render_gfm');
describe('Markdown Field View component', () => {
function createComponent() {
shallowMount(MarkdownFieldView);
function createComponent(isLoading = false) {
shallowMount(MarkdownFieldView, { propsData: { isLoading } });
}
beforeEach(() => {
createComponent();
});
it('processes rendering with GFM', () => {
createComponent();
expect(renderGFM).toHaveBeenCalledTimes(1);
});
describe('watchers', () => {
it('does not process rendering with GFM if isLoading is true', () => {
createComponent(true);
expect(renderGFM).not.toHaveBeenCalled();
});
it('processes rendering with GFM when isLoading is updated to `false`', () => {
createComponent(false);
expect(renderGFM).toHaveBeenCalledTimes(1);
});
});
});

View File

@ -32,6 +32,7 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do
needs
pipeline
playable
previousStageJobs
previousStageJobsOrNeeds
project
queued_at

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe SidekiqHelper do
RSpec.describe SidekiqHelper, feature_category: :shared do
describe 'parse_sidekiq_ps' do
it 'parses line with time' do
line = '55137 10,0 2,1 S+ 2:30pm sidekiq 4.1.4 gitlab [0 of 25 busy] '

View File

@ -1,248 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
include Prometheus::MetricBuilders
let(:parser_error_class) { Gitlab::Prometheus::ParsingError }
describe '#load_groups_from_yaml' do
subject { described_class.load_groups_from_yaml('dummy.yaml') }
describe 'parsing sample yaml' do
let(:sample_yaml) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: "title"
required_metrics: [ metric_a, metric_b ]
weight: 1
queries: [{ query_range: 'query_range_a', label: label, unit: unit }]
- title: "title"
required_metrics: [metric_a]
weight: 1
queries: [{ query_range: 'query_range_empty' }]
- group: group_b
priority: 1
metrics:
- title: title
required_metrics: ['metric_a']
weight: 1
queries: [{query_range: query_range_a}]
EOF
end
before do
allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(sample_yaml) }
end
it 'parses to two metric groups with 2 and 1 metric respectively' do
expect(subject.count).to eq(2)
expect(subject[0].metrics.count).to eq(2)
expect(subject[1].metrics.count).to eq(1)
end
it 'provide group data' do
expect(subject[0]).to have_attributes(name: 'group_a', priority: 1)
expect(subject[1]).to have_attributes(name: 'group_b', priority: 1)
end
it 'provides metrics data' do
metrics = subject.flat_map(&:metrics)
expect(metrics.count).to eq(3)
expect(metrics[0]).to have_attributes(title: 'title', required_metrics: %w(metric_a metric_b), weight: 1)
expect(metrics[1]).to have_attributes(title: 'title', required_metrics: %w(metric_a), weight: 1)
expect(metrics[2]).to have_attributes(title: 'title', required_metrics: %w{metric_a}, weight: 1)
end
it 'provides query data' do
queries = subject.flat_map(&:metrics).flat_map(&:queries)
expect(queries.count).to eq(3)
expect(queries[0]).to eq(query_range: 'query_range_a', label: 'label', unit: 'unit')
expect(queries[1]).to eq(query_range: 'query_range_empty')
expect(queries[2]).to eq(query_range: 'query_range_a')
end
end
shared_examples 'required field' do |field_name|
context "when #{field_name} is nil" do
before do
allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_missing) }
end
it 'throws parsing error' do
expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i)
end
end
context "when #{field_name} are not specified" do
before do
allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_nil) }
end
it 'throws parsing error' do
expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i)
end
end
end
describe 'group required fields' do
it_behaves_like 'required field', 'metrics' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
EOF
end
end
it_behaves_like 'required field', 'name' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group:
priority: 1
metrics: []
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- priority: 1
metrics: []
EOF
end
end
it_behaves_like 'required field', 'priority' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority:
metrics: []
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
metrics: []
EOF
end
end
end
describe 'metrics fields parsing' do
it_behaves_like 'required field', 'title' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title:
required_metrics: []
weight: 1
queries: []
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- required_metrics: []
weight: 1
queries: []
EOF
end
end
it_behaves_like 'required field', 'required metrics' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
required_metrics:
weight: 1
queries: []
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
weight: 1
queries: []
EOF
end
end
it_behaves_like 'required field', 'weight' do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
required_metrics: []
weight:
queries: []
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
required_metrics: []
queries: []
EOF
end
end
it_behaves_like 'required field', :queries do
let(:field_nil) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
required_metrics: []
weight: 1
queries:
EOF
end
let(:field_missing) do
<<-EOF.strip_heredoc
- group: group_a
priority: 1
metrics:
- title: title
required_metrics: []
weight: 1
EOF
end
end
end
end
end

View File

@ -626,6 +626,21 @@ RSpec.describe Issuable, feature_category: :team_planning do
end
end
describe "#importing_or_transitioning?" do
let(:merge_request) { build(:merge_request, transitioning: transitioning, importing: importing) }
where(:transitioning, :importing, :result) do
true | false | true
false | true | true
true | true | true
false | false | false
end
with_them do
it { expect(merge_request.importing_or_transitioning?).to eq(result) }
end
end
describe '#labels_array' do
let(:project) { create(:project) }
let(:bug) { create(:label, project: project, title: 'bug') }

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Transitionable, feature_category: :code_review_workflow do
let(:klass) do
Class.new do
include Transitionable
def initialize(transitioning)
@transitioning = transitioning
end
def project
Project.new
end
end
end
let(:object) { klass.new(transitioning) }
describe 'For a class' do
using RSpec::Parameterized::TableSyntax
describe '#transitioning?' do
where(:transitioning, :feature_flag, :result) do
true | true | true
false | false | false
true | false | false
false | true | false
end
with_them do
before do
stub_feature_flags(skip_validations_during_transitions: feature_flag)
end
it { expect(object.transitioning?).to eq(result) }
end
end
end
end

View File

@ -360,6 +360,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
describe "#validate_reviewer_size_length" do
let(:merge_request) { build(:merge_request, transitioning: transitioning) }
where(:transitioning, :to_or_not_to) do
false | :to
true | :not_to
end
with_them do
it do
expect(merge_request).send(to_or_not_to, receive(:validate_reviewer_size_length))
merge_request.valid?
end
end
end
describe '#validate_target_project' do
let(:merge_request) do
build(:merge_request, source_project: project, target_project: project, importing: importing)
@ -386,6 +403,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { expect(merge_request.valid?(false)).to eq true }
end
end
context "with the skip_validations_during_transition_feature_flag" do
let(:merge_request) { build(:merge_request, transitioning: transitioning) }
where(:transitioning, :to_or_not_to) do
false | :to
true | :not_to
end
with_them do
it do
expect(merge_request).send(to_or_not_to, receive(:validate_target_project))
merge_request.valid?
end
end
end
end
end
@ -4487,6 +4521,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
shared_examples 'for an invalid state transition' do
specify 'is not a valid state transition' do
expect { transition! }.to raise_error(StateMachines::InvalidTransition)
expect(subject.transitioning?).to be_falsey
end
end
@ -4496,6 +4531,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
.to change { subject.merge_status }
.from(merge_status.to_s)
.to(expected_merge_status)
expect(subject.transitioning?).to be_falsey
end
end

View File

@ -1,67 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe PerformanceMonitoring::PrometheusMetric do
let(:json_content) do
{
"id" => "metric_of_ages",
"unit" => "count",
"label" => "Metric of Ages",
"query_range" => "http_requests_total"
}
end
describe '.from_json' do
subject { described_class.from_json(json_content) }
it 'creates a PrometheusMetric object' do
expect(subject).to be_a described_class
expect(subject.id).to eq(json_content['id'])
expect(subject.unit).to eq(json_content['unit'])
expect(subject.label).to eq(json_content['label'])
expect(subject.query_range).to eq(json_content['query_range'])
end
describe 'validations' do
context 'json_content is not a hash' do
let(:json_content) { nil }
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
context 'when unit is missing' do
before do
json_content['unit'] = nil
end
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
context 'when query and query_range is missing' do
before do
json_content['query_range'] = nil
end
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
context 'when query_range is missing but query is available' do
before do
json_content['query_range'] = nil
json_content['query'] = 'http_requests_total'
end
subject { described_class.from_json(json_content) }
it { is_expected.to be_valid }
end
end
end
end

View File

@ -1,74 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe PerformanceMonitoring::PrometheusPanel do
let(:json_content) do
{
"max_value" => 1,
"type" => "area-chart",
"title" => "Chart Title",
"y_label" => "Y-Axis",
"weight" => 1,
"metrics" => [{
"id" => "metric_of_ages",
"unit" => "count",
"label" => "Metric of Ages",
"query_range" => "http_requests_total"
}]
}
end
describe '#new' do
it 'accepts old schema format' do
expect { described_class.new(json_content) }.not_to raise_error
end
it 'accepts new schema format' do
expect { described_class.new(json_content.merge("y_axis" => { "precision" => 0 })) }.not_to raise_error
end
end
describe '.from_json' do
describe 'validations' do
context 'json_content is not a hash' do
let(:json_content) { nil }
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
context 'when title is missing' do
before do
json_content['title'] = nil
end
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
context 'when metrics are missing' do
before do
json_content.delete('metrics')
end
subject { described_class.from_json(json_content) }
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
end
end
end
describe '.id' do
it 'returns hexdigest of group_title, type and title as the panel id' do
group_title = 'Business Group'
panel_type = 'area-chart'
panel_title = 'New feature requests made'
expect(Digest::SHA2).to receive(:hexdigest).with("#{group_title}#{panel_type}#{panel_title}").and_return('hexdigest')
expect(described_class.new(title: panel_title, type: panel_type).id(group_title)).to eql 'hexdigest'
end
end
end

View File

@ -139,7 +139,10 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
let(:pipeline) do
pipeline = create(:ci_pipeline, project: project, user: user)
stage = create(:ci_stage, project: project, pipeline: pipeline, name: 'first', position: 1)
create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job', scheduling_type: :stage)
create(
:ci_build, pipeline: pipeline, name: 'my test job',
scheduling_type: :stage, stage_id: stage.id, stage_idx: stage.position
)
pipeline
end
@ -180,10 +183,10 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
previousStageJobsOrNeeds {
nodes {
... on CiBuildNeed {
#{all_graphql_fields_for('CiBuildNeed')}
name
}
... on CiJob {
#{all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis])}
name
}
}
}
@ -211,10 +214,12 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
before do
build_stage = create(:ci_stage, position: 2, name: 'build', project: project, pipeline: pipeline)
test_stage = create(:ci_stage, position: 3, name: 'test', project: project, pipeline: pipeline)
deploy_stage = create(:ci_stage, position: 4, name: 'deploy', project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, ci_stage: build_stage, stage_idx: build_stage.position)
create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, ci_stage: test_stage, stage_idx: test_stage.position)
create(:ci_build, pipeline: pipeline, name: 'deploy', scheduling_type: :stage, ci_stage: deploy_stage, stage_idx: deploy_stage.position)
test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, ci_stage: test_stage, stage_idx: test_stage.position)
create(:ci_build_need, build: test_job, name: 'my test job')
@ -255,6 +260,14 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
'previousStageJobsOrNeeds' => { 'nodes' => [
a_hash_including('name' => 'my test job')
] }
),
a_hash_including(
'name' => 'deploy',
'needs' => { 'nodes' => [] },
'previousStageJobsOrNeeds' => { 'nodes' => [
a_hash_including('name' => 'rspec 1 2'),
a_hash_including('name' => 'rspec 2 2')
] }
)
)
end
@ -613,3 +626,87 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
end
end
end
RSpec.describe 'previousStageJobs', feature_category: :pipeline_composition do
include GraphqlHelpers
let_it_be(:project) { create(:project, :public) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let(:query) do
<<~QUERY
{
project(fullPath: "#{project.full_path}") {
pipeline(iid: "#{pipeline.iid}") {
stages {
nodes {
groups {
nodes {
jobs {
nodes {
name
previousStageJobs {
nodes {
name
downstreamPipeline {
id
}
}
}
}
}
}
}
}
}
}
}
}
QUERY
end
it 'does not produce N+1 queries', :request_store, :use_sql_query_cache do
user1 = create(:user)
user2 = create(:user)
create_stage_with_build_and_bridge('build', 0)
create_stage_with_build_and_bridge('test', 1)
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user1)
end
expect(graphql_data_previous_stage_jobs).to eq(
'build_build' => [],
'test_build' => %w[build_build]
)
create_stage_with_build_and_bridge('deploy', 2)
expect do
post_graphql(query, current_user: user2)
end.to issue_same_number_of_queries_as(control)
expect(graphql_data_previous_stage_jobs).to eq(
'build_build' => [],
'test_build' => %w[build_build],
'deploy_build' => %w[test_build]
)
end
def create_stage_with_build_and_bridge(stage_name, stage_position)
stage = create(:ci_stage, position: stage_position, name: "#{stage_name}_stage", project: project, pipeline: pipeline)
create(:ci_build, pipeline: pipeline, name: "#{stage_name}_build", ci_stage: stage, stage_idx: stage.position)
end
def graphql_data_previous_stage_jobs
stages = graphql_data.dig('project', 'pipeline', 'stages', 'nodes')
groups = stages.flat_map { |stage| stage.dig('groups', 'nodes') }
jobs = groups.flat_map { |group| group.dig('jobs', 'nodes') }
jobs.each_with_object({}) do |job, previous_stage_jobs|
previous_stage_jobs[job['name']] = job.dig('previousStageJobs', 'nodes').pluck('name')
end
end
end