Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-10-12 15:12:08 +00:00
parent 57a3a42c88
commit 13f15365a3
67 changed files with 703 additions and 306 deletions

View File

@ -101,9 +101,7 @@ export default {
return !loading && !availableProjects.length;
},
selectedItems() {
return sortByProjectName(
this.availableProjects.filter(({ id }) => this.selectedProjectIds.includes(id)),
);
return sortByProjectName(this.selectedProjects);
},
unselectedItems() {
return this.availableProjects.filter(({ id }) => !this.selectedProjectIds.includes(id));

View File

@ -51,6 +51,7 @@ export default {
'features',
'createdBefore',
'createdAfter',
'pagination',
]),
...mapGetters(['pathNavigationData', 'filterParams']),
displayStageEvents() {
@ -99,7 +100,12 @@ export default {
},
},
methods: {
...mapActions(['fetchStageData', 'setSelectedStage', 'setDateRange']),
...mapActions([
'fetchStageData',
'setSelectedStage',
'setDateRange',
'updateStageTablePagination',
]),
onSetDateRange({ startDate, endDate }) {
this.setDateRange({
createdAfter: new Date(startDate),
@ -108,6 +114,7 @@ export default {
},
onSelectStage(stage) {
this.setSelectedStage(stage);
this.updateStageTablePagination({ ...this.pagination, page: 1 });
},
dismissOverviewDialog() {
this.isOverviewDialogDismissed = true;
@ -117,6 +124,9 @@ export default {
const { permissions } = this;
return Boolean(permissions?.[id]);
},
onHandleUpdatePagination(data) {
this.updateStageTablePagination(data);
},
},
dayRangeOptions: [7, 30, 90],
i18n: {
@ -163,8 +173,8 @@ export default {
:empty-state-title="emptyStageTitle"
:empty-state-message="emptyStageText"
:no-data-svg-path="noDataSvgPath"
:pagination="null"
:sortable="false"
:pagination="pagination"
@handleUpdatePagination="onHandleUpdatePagination"
/>
</div>
</template>

View File

@ -194,6 +194,9 @@ export default {
><formatted-stage-count :stage-count="stageCount"
/></gl-badge>
</template>
<template #head(duration)="data">
<span data-testid="vsa-stage-header-duration">{{ data.label }}</span>
</template>
<template #cell(end_event)="{ item }">
<div data-testid="vsa-stage-event">
<div v-if="item.id" data-testid="vsa-stage-content">

View File

@ -6,6 +6,7 @@ import {
getValueStreamStageRecords,
getValueStreamStageCounts,
} from '~/api/analytics_api';
import { normalizeHeaders, parseIntPagination } from '~/lib/utils/common_utils';
import createFlash from '~/flash';
import { __ } from '~/locale';
import { DEFAULT_VALUE_STREAM, I18N_VSA_ERROR_STAGE_MEDIAN } from '../constants';
@ -72,16 +73,21 @@ export const fetchCycleAnalyticsData = ({
});
};
export const fetchStageData = ({ getters: { requestParams, filterParams }, commit }) => {
export const fetchStageData = ({
getters: { requestParams, filterParams, paginationParams },
commit,
}) => {
commit(types.REQUEST_STAGE_DATA);
return getValueStreamStageRecords(requestParams, filterParams)
.then(({ data }) => {
return getValueStreamStageRecords(requestParams, { ...filterParams, ...paginationParams })
.then(({ data, headers }) => {
// when there's a query timeout, the request succeeds but the error is encoded in the response data
if (data?.error) {
commit(types.RECEIVE_STAGE_DATA_ERROR, data.error);
} else {
commit(types.RECEIVE_STAGE_DATA_SUCCESS, data);
const { page = null, nextPage = null } = parseIntPagination(normalizeHeaders(headers));
commit(types.SET_PAGINATION, { ...paginationParams, page, hasNextPage: Boolean(nextPage) });
}
})
.catch(() => commit(types.RECEIVE_STAGE_DATA_ERROR));
@ -176,6 +182,14 @@ export const setDateRange = ({ dispatch, commit }, { createdAfter, createdBefore
return refetchStageData(dispatch);
};
export const updateStageTablePagination = (
{ commit, dispatch, state: { selectedStage } },
paginationParams,
) => {
commit(types.SET_PAGINATION, paginationParams);
return dispatch('fetchStageData', selectedStage.id);
};
export const initializeVsa = ({ commit, dispatch }, initialData = {}) => {
commit(types.INITIALIZE_VSA, initialData);

View File

@ -1,6 +1,7 @@
import dateFormat from 'dateformat';
import { dateFormats } from '~/analytics/shared/constants';
import { filterToQueryObject } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
import { PAGINATION_TYPE } from '../constants';
import { transformStagesForPathNavigation, filterStagesByHiddenStatus } from '../utils';
export const pathNavigationData = ({ stages, medians, stageCounts, selectedStage }) => {
@ -21,6 +22,13 @@ export const requestParams = (state) => {
return { requestPath: fullPath, valueStreamId, stageId };
};
export const paginationParams = ({ pagination: { page, sort, direction } }) => ({
pagination: PAGINATION_TYPE,
sort,
direction,
page,
});
const filterBarParams = ({ filters }) => {
const {
authors: { selected: selectedAuthor },

View File

@ -4,6 +4,7 @@ export const SET_LOADING = 'SET_LOADING';
export const SET_SELECTED_VALUE_STREAM = 'SET_SELECTED_VALUE_STREAM';
export const SET_SELECTED_STAGE = 'SET_SELECTED_STAGE';
export const SET_DATE_RANGE = 'SET_DATE_RANGE';
export const SET_PAGINATION = 'SET_PAGINATION';
export const REQUEST_VALUE_STREAMS = 'REQUEST_VALUE_STREAMS';
export const RECEIVE_VALUE_STREAMS_SUCCESS = 'RECEIVE_VALUE_STREAMS_SUCCESS';

View File

@ -1,13 +1,24 @@
import Vue from 'vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { PAGINATION_SORT_FIELD_END_EVENT, PAGINATION_SORT_DIRECTION_DESC } from '../constants';
import { formatMedianValues } from '../utils';
import * as types from './mutation_types';
export default {
[types.INITIALIZE_VSA](state, { endpoints, features, createdBefore, createdAfter }) {
[types.INITIALIZE_VSA](
state,
{ endpoints, features, createdBefore, createdAfter, pagination = {} },
) {
state.endpoints = endpoints;
state.createdBefore = createdBefore;
state.createdAfter = createdAfter;
state.features = features;
Vue.set(state, 'pagination', {
page: pagination.page ?? state.pagination.page,
sort: pagination.sort ?? state.pagination.sort,
direction: pagination.direction ?? state.pagination.direction,
});
},
[types.SET_LOADING](state, loadingState) {
state.isLoading = loadingState;
@ -22,6 +33,14 @@ export default {
state.createdBefore = createdBefore;
state.createdAfter = createdAfter;
},
[types.SET_PAGINATION](state, { page, hasNextPage, sort, direction }) {
Vue.set(state, 'pagination', {
page,
hasNextPage,
sort: sort || PAGINATION_SORT_FIELD_END_EVENT,
direction: direction || PAGINATION_SORT_DIRECTION_DESC,
});
},
[types.REQUEST_VALUE_STREAMS](state) {
state.valueStreams = [];
},

View File

@ -1,3 +1,8 @@
import {
PAGINATION_SORT_FIELD_END_EVENT,
PAGINATION_SORT_DIRECTION_DESC,
} from '~/cycle_analytics/constants';
export default () => ({
id: null,
features: {},
@ -20,4 +25,10 @@ export default () => ({
isLoadingStage: false,
isEmptyStage: false,
permissions: {},
pagination: {
page: null,
hasNextPage: false,
sort: PAGINATION_SORT_FIELD_END_EVENT,
direction: PAGINATION_SORT_DIRECTION_DESC,
},
});

View File

@ -6,6 +6,7 @@ import {
GlBadge,
GlSafeHtmlDirective,
GlTooltipDirective,
GlIntersectionObserver,
} from '@gitlab/ui';
import { sprintf, s__, __ } from '~/locale';
import SmartVirtualList from '~/vue_shared/components/smart_virtual_list.vue';
@ -25,6 +26,7 @@ export default {
GlLoadingIcon,
GlLink,
GlBadge,
GlIntersectionObserver,
SmartVirtualList,
StatusIcon,
Actions,
@ -39,6 +41,7 @@ export default {
collapsedData: null,
fullData: null,
isCollapsed: true,
showFade: false,
};
},
computed: {
@ -117,6 +120,16 @@ export default {
throw e;
});
},
appear(index) {
if (index === this.fullData.length - 1) {
this.showFade = false;
}
},
disappear(index) {
if (index === this.fullData.length - 1) {
this.showFade = true;
}
},
},
EXTENSION_ICON_CLASS,
};
@ -154,7 +167,7 @@ export default {
</div>
<div
v-if="!isCollapsed"
class="mr-widget-grouped-section"
class="mr-widget-grouped-section gl-relative"
data-testid="widget-extension-collapsed-section"
>
<div v-if="isLoadingExpanded" class="report-block-container">
@ -167,16 +180,24 @@ export default {
:size="32"
wtag="ul"
wclass="report-block-list"
class="report-block-container"
class="report-block-container gl-px-5 gl-py-0"
>
<li
v-for="data in fullData"
v-for="(data, index) in fullData"
:key="data.id"
class="gl-display-flex gl-align-items-center"
:class="{
'gl-border-b-solid gl-border-b-1 gl-border-gray-100': index !== fullData.length - 1,
}"
class="gl-display-flex gl-align-items-center gl-py-3 gl-pl-7"
data-testid="extension-list-item"
>
<status-icon v-if="data.icon" :icon-name="data.icon.name" :size="12" />
<div class="gl-mt-2 gl-mb-2 gl-flex-wrap gl-align-self-center gl-display-flex">
<gl-intersection-observer
:options="{ rootMargin: '100px', thresholds: 0.1 }"
class="gl-flex-wrap gl-align-self-center gl-display-flex"
@appear="appear(index)"
@disappear="disappear(index)"
>
<div v-safe-html="data.text" class="gl-mr-4"></div>
<div v-if="data.link">
<gl-link :href="data.link.href">{{ data.link.text }}</gl-link>
@ -184,9 +205,13 @@ export default {
<gl-badge v-if="data.badge" :variant="data.badge.variant || 'info'">
{{ data.badge.text }}
</gl-badge>
</div>
</gl-intersection-observer>
</li>
</smart-virtual-list>
<div
:class="{ show: showFade }"
class="fade mr-extenson-scrim gl-absolute gl-left-0 gl-bottom-0 gl-w-full gl-h-7"
></div>
</div>
</section>
</template>

View File

@ -45,8 +45,9 @@ export default {
:class="[
$options.EXTENSION_ICON_CLASS[iconName],
{ 'mr-widget-extension-icon': !isLoading && size === 16 },
{ 'gl-p-2': isLoading || size === 16 },
]"
class="align-self-center gl-rounded-full gl-mr-3 gl-relative gl-p-2"
class="align-self-center gl-rounded-full gl-mr-3 gl-relative"
>
<gl-loading-icon v-if="isLoading" size="md" inline class="gl-display-block" />
<gl-icon

View File

@ -103,8 +103,8 @@ export default {
GlDropdownItem,
GlFormCheckbox,
GlSkeletonLoader,
MergeTrainHelperText: () =>
import('ee_component/vue_merge_request_widget/components/merge_train_helper_text.vue'),
MergeTrainHelperIcon: () =>
import('ee_component/vue_merge_request_widget/components/merge_train_helper_icon.vue'),
MergeImmediatelyConfirmationDialog: () =>
import(
'ee_component/vue_merge_request_widget/components/merge_immediately_confirmation_dialog.vue'
@ -238,7 +238,7 @@ export default {
return CONFIRM;
},
iconClass() {
if (this.shouldRenderMergeTrainHelperText && !this.mr.preventMerge) {
if (this.shouldRenderMergeTrainHelperIcon && !this.mr.preventMerge) {
return PIPELINE_RUNNING_STATE;
}
@ -504,7 +504,7 @@ export default {
</div>
</div>
<template v-else>
<div class="mr-widget-body media" :class="{ 'gl-pb-3': shouldRenderMergeTrainHelperText }">
<div class="mr-widget-body media">
<status-icon :status="iconClass" />
<div class="media-body">
<div class="mr-widget-body-controls gl-display-flex gl-align-items-center">
@ -575,6 +575,13 @@ export default {
:is-disabled="isSquashReadOnly"
class="gl-mx-3"
/>
<merge-train-helper-icon
v-if="shouldRenderMergeTrainHelperIcon"
:merge-train-when-pipeline-succeeds-docs-path="
mr.mergeTrainWhenPipelineSucceedsDocsPath
"
/>
</div>
<template v-else>
<div class="bold js-resolve-mr-widget-items-message gl-ml-3">
@ -605,13 +612,6 @@ export default {
</div>
</div>
</div>
<merge-train-helper-text
v-if="shouldRenderMergeTrainHelperText"
:pipeline-id="pipelineId"
:pipeline-link="pipeline.path"
:merge-train-length="stateData.mergeTrainsCount"
:merge-train-when-pipeline-succeeds-docs-path="mr.mergeTrainWhenPipelineSucceedsDocsPath"
/>
<template v-if="shouldShowMergeControls">
<div
v-if="!shouldShowMergeEdit"

View File

@ -57,10 +57,10 @@ export default {
name: issue.state === 'closed' ? EXTENSION_ICONS.error : EXTENSION_ICONS.success,
},
// Badges get rendered next to the text on each row
badge: issue.state === 'closed' && {
text: 'Closed', // Required: Text to be used inside of the badge
// variant: 'info', // Optional: The variant of the badge, maps to GitLab UI variants
},
// badge: issue.state === 'closed' && {
// text: 'Closed', // Required: Text to be used inside of the badge
// // variant: 'info', // Optional: The variant of the badge, maps to GitLab UI variants
// },
// Each row can have its own link that will take the user elsewhere
// link: {
// href: 'https://google.com', // Required: href for the link

View File

@ -32,7 +32,7 @@ export default {
isMergeImmediatelyDangerous() {
return false;
},
shouldRenderMergeTrainHelperText() {
shouldRenderMergeTrainHelperIcon() {
return false;
},
pipelineId() {

View File

@ -109,3 +109,12 @@
}
}
}
// TODO: Move to GitLab UI
.mr-extenson-scrim {
background: linear-gradient(to bottom, rgba($gray-light, 0), rgba($gray-light, 1));
.gl-dark & {
background: linear-gradient(to bottom, rgba(#333, 0), rgba(#333, 1));
}
}

View File

@ -3,6 +3,8 @@
module Projects
module Alerting
class NotificationsController < Projects::ApplicationController
include ActionController::HttpAuthentication::Basic
respond_to :json
skip_before_action :verify_authenticity_token
@ -27,9 +29,19 @@ module Projects
end
def extract_alert_manager_token(request)
extract_bearer_token(request) || extract_basic_auth_token(request)
end
def extract_bearer_token(request)
Doorkeeper::OAuth::Token.from_bearer_authorization(request)
end
def extract_basic_auth_token(request)
_username, token = user_name_and_password(request)
token
end
def notify_service
notify_service_class.new(project, notification_payload)
end

View File

@ -47,7 +47,8 @@ module Resolvers
alert_management_alert: [:alert_management_alert],
labels: [:labels],
assignees: [:assignees],
timelogs: [:timelogs]
timelogs: [:timelogs],
customer_relations_contacts: { customer_relations_contacts: [:group] }
}
end

View File

@ -136,6 +136,9 @@ module Types
field :project_id, GraphQL::Types::Int, null: false, method: :project_id,
description: 'ID of the issue project.'
field :customer_relations_contacts, Types::CustomerRelations::ContactType.connection_type, null: true,
description: 'Customer relations contacts of the issue.'
def author
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.author_id).find
end

View File

@ -46,7 +46,6 @@ class Deployment < ApplicationRecord
scope :stoppable, -> { where.not(on_stop: nil).where.not(deployable_id: nil).success }
scope :active, -> { where(status: %i[created running]) }
scope :older_than, -> (deployment) { where('deployments.id < ?', deployment.id) }
scope :with_deployable, -> { joins('INNER JOIN ci_builds ON ci_builds.id = deployments.deployable_id').preload(:deployable) }
scope :with_api_entity_associations, -> { preload({ deployable: { runner: [], tags: [], user: [], job_artifacts_archive: [] } }) }
scope :finished_after, ->(date) { where('finished_at >= ?', date) }
@ -148,6 +147,16 @@ class Deployment < ApplicationRecord
success.find_by!(iid: iid)
end
# It should be used with caution especially on chaining.
# Fetching any unbounded or large intermediate dataset could lead to loading too many IDs into memory.
# See: https://docs.gitlab.com/ee/development/database/multiple_databases.html#use-disable_joins-for-has_one-or-has_many-through-relations
# For safety we default limit to fetch not more than 1000 records.
def self.builds(limit = 1000)
deployable_ids = where.not(deployable_id: nil).limit(limit).pluck(:deployable_id)
Ci::Build.where(id: deployable_ids)
end
class << self
##
# FastDestroyAll concerns

View File

@ -260,10 +260,9 @@ class Environment < ApplicationRecord
end
def cancel_deployment_jobs!
jobs = active_deployments.with_deployable
jobs.each do |deployment|
Gitlab::OptimisticLocking.retry_lock(deployment.deployable, name: 'environment_cancel_deployment_jobs') do |deployable|
deployable.cancel! if deployable&.cancelable?
active_deployments.builds.each do |build|
Gitlab::OptimisticLocking.retry_lock(build, name: 'environment_cancel_deployment_jobs') do |build|
build.cancel! if build&.cancelable?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, environment_id: id, deployment_id: deployment.id)

View File

@ -20,8 +20,6 @@ class ProductAnalyticsEvent < ApplicationRecord
where('collector_tstamp BETWEEN ? AND ? ', today - duration + 1, today + 1)
}
scope :by_category_and_action, ->(category, action) { where(se_category: category, se_action: action) }
def self.count_by_graph(graph, days)
group(graph).timerange(days).count
end

View File

@ -11,23 +11,23 @@ module Deployments
def execute
return unless @deployment&.running?
older_deployments.find_each do |older_deployment|
Gitlab::OptimisticLocking.retry_lock(older_deployment.deployable, name: 'older_deployments_drop') do |deployable|
deployable.drop(:forward_deployment_failure)
older_deployments_builds.each do |build|
Gitlab::OptimisticLocking.retry_lock(build, name: 'older_deployments_drop') do |build|
build.drop(:forward_deployment_failure)
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, subject_id: @deployment.id, deployment_id: older_deployment.id)
Gitlab::ErrorTracking.track_exception(e, subject_id: @deployment.id, build_id: build.id)
end
end
private
def older_deployments
def older_deployments_builds
@deployment
.environment
.active_deployments
.older_than(@deployment)
.with_deployable
.builds
end
end
end

View File

@ -0,0 +1,8 @@
---
name: new_customersdot_staging_url
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71827
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342513
milestone: '14.4'
type: development
group: group::fulfillment
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: redirect_to_latest_template_jobs_browser_performance_testing
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url:
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: redirect_to_latest_template_security_api_fuzzing
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url:
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: redirect_to_latest_template_security_dast
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url:
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: redirect_to_latest_template_terraform
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url:
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: redirect_to_latest_template_verify_browser_performance
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url:
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: search_blobs_language_aggregation
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71937
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342621
milestone: '14.4'
type: development
group: group::global search
default_enabled: false

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342024
milestone: '14.4'
type: development
group: group::optimize
default_enabled: false
default_enabled: true

View File

@ -7,7 +7,7 @@ product_stage: growth
product_group: group::product intelligence
product_category: collection
value_type: number
status: active
status: deprecated
milestone: "14.3"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70485
time_frame: 28d

View File

@ -4715,13 +4715,13 @@ Input type: `VulnerabilityCreateInput`
| <a id="mutationvulnerabilitycreatedismissedat"></a>`dismissedAt` | [`Time`](#time) | Timestamp of when the vulnerability state changed to dismissed (defaults to creation time if status is `dismissed`). |
| <a id="mutationvulnerabilitycreateidentifiers"></a>`identifiers` | [`[VulnerabilityIdentifierInput!]!`](#vulnerabilityidentifierinput) | Array of CVE or CWE identifiers for the vulnerability. |
| <a id="mutationvulnerabilitycreatemessage"></a>`message` | [`String`](#string) | Additional information about the vulnerability. |
| <a id="mutationvulnerabilitycreatename"></a>`name` | [`String!`](#string) | Name of the vulnerability. |
| <a id="mutationvulnerabilitycreateproject"></a>`project` | [`ProjectID!`](#projectid) | ID of the project to attach the vulnerability to. |
| <a id="mutationvulnerabilitycreateresolvedat"></a>`resolvedAt` | [`Time`](#time) | Timestamp of when the vulnerability state changed to resolved (defaults to creation time if status is `resolved`). |
| <a id="mutationvulnerabilitycreatescannername"></a>`scannerName` | [`String!`](#string) | Name of the security scanner used to discover the vulnerability. |
| <a id="mutationvulnerabilitycreatescanner"></a>`scanner` | [`VulnerabilityScannerInput!`](#vulnerabilityscannerinput) | Information about the scanner used to discover the vulnerability. |
| <a id="mutationvulnerabilitycreateseverity"></a>`severity` | [`VulnerabilitySeverity`](#vulnerabilityseverity) | Severity of the vulnerability (defaults to `unknown`). |
| <a id="mutationvulnerabilitycreatesolution"></a>`solution` | [`String`](#string) | How to fix this vulnerability. |
| <a id="mutationvulnerabilitycreatestate"></a>`state` | [`VulnerabilityState`](#vulnerabilitystate) | State of the vulnerability (defaults to `detected`). |
| <a id="mutationvulnerabilitycreatetitle"></a>`title` | [`String!`](#string) | Title of the vulnerability. |
#### Fields
@ -9816,6 +9816,7 @@ Relationship between an epic and an issue.
| <a id="epicissueconfidential"></a>`confidential` | [`Boolean!`](#boolean) | Indicates the issue is confidential. |
| <a id="epicissuecreatenoteemail"></a>`createNoteEmail` | [`String`](#string) | User specific email address for the issue. |
| <a id="epicissuecreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of when the issue was created. |
| <a id="epicissuecustomerrelationscontacts"></a>`customerRelationsContacts` | [`CustomerRelationsContactConnection`](#customerrelationscontactconnection) | Customer relations contacts of the issue. (see [Connections](#connections)) |
| <a id="epicissuedescription"></a>`description` | [`String`](#string) | Description of the issue. |
| <a id="epicissuedescriptionhtml"></a>`descriptionHtml` | [`String`](#string) | The GitLab Flavored Markdown rendering of `description`. |
| <a id="epicissuedesigncollection"></a>`designCollection` | [`DesignCollection`](#designcollection) | Collection of design images associated with this issue. |
@ -10979,6 +10980,7 @@ Returns [`VulnerabilitySeveritiesCount`](#vulnerabilityseveritiescount).
| <a id="issueconfidential"></a>`confidential` | [`Boolean!`](#boolean) | Indicates the issue is confidential. |
| <a id="issuecreatenoteemail"></a>`createNoteEmail` | [`String`](#string) | User specific email address for the issue. |
| <a id="issuecreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of when the issue was created. |
| <a id="issuecustomerrelationscontacts"></a>`customerRelationsContacts` | [`CustomerRelationsContactConnection`](#customerrelationscontactconnection) | Customer relations contacts of the issue. (see [Connections](#connections)) |
| <a id="issuedescription"></a>`description` | [`String`](#string) | Description of the issue. |
| <a id="issuedescriptionhtml"></a>`descriptionHtml` | [`String`](#string) | The GitLab Flavored Markdown rendering of `description`. |
| <a id="issuedesigncollection"></a>`designCollection` | [`DesignCollection`](#designcollection) | Collection of design images associated with this issue. |
@ -18311,3 +18313,23 @@ A time-frame defined as a closed inclusive range of two dates.
| <a id="vulnerabilityidentifierinputexternaltype"></a>`externalType` | [`String`](#string) | External type of the vulnerability identifier. |
| <a id="vulnerabilityidentifierinputname"></a>`name` | [`String!`](#string) | Name of the vulnerability identifier. |
| <a id="vulnerabilityidentifierinputurl"></a>`url` | [`String!`](#string) | URL of the vulnerability identifier. |
### `VulnerabilityScannerInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerabilityscannerinputid"></a>`id` | [`String!`](#string) | Unique ID that identifies the scanner. |
| <a id="vulnerabilityscannerinputname"></a>`name` | [`String!`](#string) | Human readable value that identifies the analyzer, not required to be unique. |
| <a id="vulnerabilityscannerinputurl"></a>`url` | [`String!`](#string) | Link to more information about the analyzer. |
| <a id="vulnerabilityscannerinputvendor"></a>`vendor` | [`VulnerabilityScannerVendorInput`](#vulnerabilityscannervendorinput) | Information about vendor/maintainer of the scanner. |
| <a id="vulnerabilityscannerinputversion"></a>`version` | [`String!`](#string) | Version of the scanner. |
### `VulnerabilityScannerVendorInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerabilityscannervendorinputname"></a>`name` | [`String!`](#string) | Name of the vendor/maintainer. |

View File

@ -794,7 +794,7 @@ job:
Scripts you specify in `after_script` execute in a new shell, separate from any
`before_script` or `script` commands. As a result, they:
- Have a current working directory set back to the default.
- Have the current working directory set back to the default (according to the [variables which define how the runner processes Git requests](#configure-runner-behavior-with-variables)).
- Don't have access to changes done by commands defined in the `before_script` or `script`,
including:
- Command aliases and variables exported in `script` scripts.

View File

@ -325,8 +325,14 @@ projects on `gitlab.com`:
After you're confident the latest template can be moved to stable:
1. Update the stable template with the content of the latest version.
1. Remove the migration template from `Gitlab::Template::GitlabCiYmlTemplate::TEMPLATES_WITH_LATEST_VERSION` const.
1. Remove the corresponding feature flag.
NOTE:
Feature flags are enabled by default in RSpec, so all tests are performed
against the latest templates. You should also test the stable templates
with `stub_feature_flags(redirect_to_latest_template_<name>: false)`.
### Further reading
There is an [open issue](https://gitlab.com/gitlab-org/gitlab/-/issues/17716) about

View File

@ -255,14 +255,14 @@ requirements.
1. The change is tested in a review app where possible and if appropriate.
1. The new feature does not degrade the user experience of the product.
1. The change is evaluated to [limit the impact of far-reaching work](https://about.gitlab.com/handbook/engineering/development/#reducing-the-impact-of-far-reaching-work).
1. An agreed-upon rollout plan.
1. An agreed-upon [rollout plan](https://about.gitlab.com/handbook/engineering/development/processes/rollout-plans).
1. Merged by a project maintainer.
### Production use
1. Confirmed to be working in staging before implementing the change in production, where possible.
1. Confirmed to be working in the production with no new [Sentry](https://about.gitlab.com/handbook/engineering/#sentry) errors after the contribution is deployed.
1. Confirmed that the rollout plan has been completed.
1. Confirmed that the [rollout plan](https://about.gitlab.com/handbook/engineering/development/processes/rollout-plans) has been completed.
1. If there is a performance risk in the change, I have analyzed the performance of the system before and after the change.
1. *If the merge request uses feature flags, per-project or per-group enablement, and a staged rollout:*
- Confirmed to be working on GitLab projects.

View File

@ -942,7 +942,6 @@ Aggregated metrics collected in `7d` and `28d` time frames are added into Servic
:packages => 155,
:personal_snippets => 2106,
:project_snippets => 407,
:promoted_issues => 719,
:aggregated_metrics => {
:example_metrics_union => 7,
:example_metrics_intersection => 2

View File

@ -125,17 +125,7 @@ NOTE:
Ensure your requests are smaller than the
[payload application limits](../../administration/instance_limits.md#generic-alert-json-payloads).
Example request:
```shell
curl --request POST \
--data '{"title": "Incident title"}' \
--header "Authorization: Bearer <authorization_key>" \
--header "Content-Type: application/json" \
<url>
```
The `<authorization_key>` and `<url>` values can be found when configuring an alert integration.
### Example request body
Example payload:
@ -157,6 +147,55 @@ Example payload:
}
```
## Authorization
The following authorization methods are accepted:
- Bearer authorization header
- Basic authentication
The `<authorization_key>` and `<url>` values can be found when configuring an alert integration.
### Bearer authorization header
The authorization key can be used as the Bearer token:
```shell
curl --request POST \
--data '{"title": "Incident title"}' \
--header "Authorization: Bearer <authorization_key>" \
--header "Content-Type: application/json" \
<url>
```
### Basic authentication
The authorization key can be used as the `password`. The `username` is left blank:
- username: <blank>
- pasword: authorization_key
```shell
curl --request POST \
--data '{"title": "Incident title"}' \
--header "Authorization: Basic <base_64_encoded_credentials>" \
--header "Content-Type: application/json" \
<url>
```
Basic authentication can also be used with credentials directly in the URL:
```shell
curl --request POST \
--data '{"title": "Incident title"}' \
--header "Content-Type: application/json" \
<username:password@url>
```
WARNING:
Using your authorization key in the URL is insecure, as it's visible in server logs. We recommend
using one of the above header options if your tooling supports it.
## Triggering test alerts
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/3066) in GitLab in 13.2.

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View File

@ -68,6 +68,34 @@ To filter analytics results based on a date range,
select different **From** and **To** days
from the date picker (default: last 30 days).
### Stage table
> Sorting the stage table [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/335974) in GitLab 14.4.
![Value Stream Analytics Stage table](img/project_vsa_stage_table_v14_4.png "Project VSA stage table")
The stage table shows a list of related workflow items for the selected stage. This can include:
- CI/CD jobs
- Issues
- Merge requests
- Pipelines
A little badge next to the workflow items table header shows the number of workflow items that
completed the selected stage.
The stage table also includes the **Time** column, which shows how long it takes each item to pass
through the selected value stream stage.
To sort the stage table by a table column, select the table header.
You can sort in ascending or descending order. To find items that spent the most time in a stage,
potentially causing bottlenecks in your value stream, sort the table by the **Time** column.
From there, select individual items to drill in and investigate how delays are happening.
To see which items most recently exited the stage, sort by the work item column on the left.
The table displays 20 items per page. If there are more than 20 items, you can use the
**Prev** and **Next** buttons to navigate through the pages.
## How Time metrics are measured
The **Time** metrics near the top of the page are measured as follows:

View File

@ -284,9 +284,9 @@ To sort the stage table by a table column, select the table header.
You can sort in ascending or descending order. To find items that spent the most time in a stage,
potentially causing bottlenecks in your value stream, sort the table by the **Time** column.
From there, select individual items to drill in and investigate how delays are happening.
To see which items the stage most recently, sort by the work item column on the left.
To see which items most recently exited the stage, sort by the work item column on the left.
The table displays up to 20 items at a time. If there are more than 20 items, you can use the
The table displays 20 items per page. If there are more than 20 items, you can use the
**Prev** and **Next** buttons to navigate through the pages.
### Creating a value stream

View File

@ -225,6 +225,53 @@ included_attributes:
- :updated_at
- :start_date
- :state
protected_branches:
- :project_id
- :name
- :created_at
- :updated_at
- :code_owner_approval_required
- :allow_force_push
protected_tags:
- :project_id
- :name
- :created_at
- :updated_at
create_access_levels:
- :access_level
- :created_at
- :updated_at
- :user_id
- :group_id
merge_access_levels:
- :access_level
- :created_at
- :updated_at
- :user_id
- :group_id
push_access_levels:
- :access_level
- :created_at
- :updated_at
- :user_id
- :group_id
releases:
- :tag
- :description
- :project_id
- :author_id
- :created_at
- :updated_at
- :name
- :sha
- :released_at
links:
- :url
- :name
- :created_at
- :updated_at
- :filepath
- :link_type
# Do not include the following attributes for the models specified.
excluded_attributes:
@ -593,3 +640,20 @@ ee:
- :reject_unsigned_commits
- :commit_committer_check
- :regexp_uses_re2
unprotect_access_levels:
- :access_level
- :user_id
- :group_id
deploy_access_levels:
- :created_at
- :updated_at
- :access_level
- :user_id
- :group_id
protected_environments:
- :project_id
- :group_id
- :name
- :created_at
- :updated_at

View File

@ -3,7 +3,15 @@
module Gitlab
module SubscriptionPortal
def self.default_subscriptions_url
::Gitlab.dev_or_test_env? ? 'https://customers.stg.gitlab.com' : 'https://customers.gitlab.com'
if ::Gitlab.dev_or_test_env?
if Feature.enabled?(:new_customersdot_staging_url, default_enabled: :yaml)
'https://customers.staging.gitlab.com'
else
'https://customers.stg.gitlab.com'
end
else
'https://customers.gitlab.com'
end
end
def self.subscriptions_url

View File

@ -6,11 +6,7 @@ module Gitlab
BASE_EXCLUDED_PATTERNS = [%r{\.latest\.}].freeze
TEMPLATES_WITH_LATEST_VERSION = {
'Jobs/Browser-Performance-Testing' => true,
'Jobs/Build' => true,
'Security/API-Fuzzing' => true,
'Security/DAST' => true,
'Terraform' => true
'Jobs/Build' => true
}.freeze
def description

View File

@ -203,19 +203,6 @@ module Gitlab
}
end
def snowplow_event_counts(time_period)
return {} unless report_snowplow_events?
{
promoted_issues: count(
self_monitoring_project
.product_analytics_events
.by_category_and_action('epics', 'promote')
.where(time_period)
)
}
end
def system_usage_data_monthly
{
counts_monthly: {
@ -228,10 +215,9 @@ module Gitlab
packages: count(::Packages::Package.where(monthly_time_range_db_params)),
personal_snippets: count(PersonalSnippet.where(monthly_time_range_db_params)),
project_snippets: count(ProjectSnippet.where(monthly_time_range_db_params)),
projects_with_alerts_created: distinct_count(::AlertManagement::Alert.where(monthly_time_range_db_params), :project_id)
}.merge(
snowplow_event_counts(monthly_time_range_db_params(column: :collector_tstamp))
).tap do |data|
projects_with_alerts_created: distinct_count(::AlertManagement::Alert.where(monthly_time_range_db_params), :project_id),
promoted_issues: DEPRECATED_VALUE
}.tap do |data|
data[:snippets] = add(data[:personal_snippets], data[:project_snippets])
end
}
@ -786,10 +772,6 @@ module Gitlab
}
end
def report_snowplow_events?
self_monitoring_project && Feature.enabled?(:product_analytics_tracking, type: :ops)
end
def distinct_count_service_desk_enabled_projects(time_period)
project_creator_id_start = minimum_id(User)
project_creator_id_finish = maximum_id(User)
@ -850,10 +832,6 @@ module Gitlab
count(::Issue.with_prometheus_alert_events, start: minimum_id(Issue), finish: maximum_id(Issue))
end
def self_monitoring_project
Gitlab::CurrentSettings.self_monitoring_project
end
def clear_memoized
CE_MEMOIZED_VALUES.each { |v| clear_memoization(v) }
end

View File

@ -40607,6 +40607,9 @@ msgstr ""
msgid "mrWidget|%{prefixToLinkStart}No pipeline%{prefixToLinkEnd} %{addPipelineLinkStart}Add the .gitlab-ci.yml file%{addPipelineLinkEnd} to create one."
msgstr ""
msgid "mrWidget|A merge train is a queued list of merge requests waiting to be merged into the target branch. The changes in each merge request are combined with the changes in earlier merge requests and tested before merge."
msgstr ""
msgid "mrWidget|A new merge train has started and this merge request is the first of the queue."
msgstr ""
@ -40702,6 +40705,9 @@ msgstr ""
msgid "mrWidget|Jump to first unresolved thread"
msgstr ""
msgid "mrWidget|Learn more"
msgstr ""
msgid "mrWidget|Loading deployment statistics"
msgstr ""
@ -40851,12 +40857,6 @@ msgstr ""
msgid "mrWidget|There are merge conflicts"
msgstr ""
msgid "mrWidget|This action will add the merge request to the merge train when pipeline %{pipelineLink} succeeds."
msgstr ""
msgid "mrWidget|This action will start a merge train when pipeline %{pipelineLink} succeeds."
msgstr ""
msgid "mrWidget|This merge request failed to be merged automatically"
msgstr ""
@ -40872,6 +40872,9 @@ msgstr ""
msgid "mrWidget|Use %{linkStart}CI pipelines to test your code%{linkEnd} by simply adding a GitLab CI configuration file to your project. It only takes a minute to make your code more secure and robust."
msgstr ""
msgid "mrWidget|What is a merge train?"
msgstr ""
msgid "mrWidget|You can merge after removing denied licenses"
msgstr ""

View File

@ -143,7 +143,10 @@ module QA
member.remove_via_api!
end
it 'adds members for imported group' do
it(
'adds members for imported group',
testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/2310'
) do
expect { imported_group.import_status }.to eventually_eq('finished').within(import_wait_duration)
imported_member = imported_group.reload!.members.find { |usr| usr.username == member.username }

View File

@ -32,7 +32,6 @@ module QA
influxdb_token,
bucket: 'e2e-test-stats',
org: 'gitlab-qa',
use_ssl: false,
precision: InfluxDB2::WritePrecision::NANOSECOND
)
end

View File

@ -25,7 +25,6 @@ describe QA::Support::Formatters::TestStatsFormatter do
{
bucket: 'e2e-test-stats',
org: 'gitlab-qa',
use_ssl: false,
precision: InfluxDB2::WritePrecision::NANOSECOND
}
end

View File

@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::Alerting::NotificationsController do
include HttpBasicAuthHelpers
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
@ -53,86 +55,96 @@ RSpec.describe Projects::Alerting::NotificationsController do
end
end
context 'bearer token' do
context 'when set' do
context 'when extractable' do
before do
request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
shared_examples 'a working token' do
it 'extracts token' do
expect(notify_service).to receive(:execute).with('some token', nil)
make_request
end
context 'with a corresponding integration' do
context 'with integration parameters specified' do
let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
let(:params) { project_params(endpoint_identifier: integration.endpoint_identifier, name: integration.name) }
context 'the integration is active' do
it 'extracts and finds the integration' do
expect(notify_service).to receive(:execute).with('some token', integration)
make_request
end
end
it 'extracts bearer token' do
expect(notify_service).to receive(:execute).with('some token', nil)
make_request
end
context 'with a corresponding integration' do
context 'with integration parameters specified' do
let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
let(:params) { project_params(endpoint_identifier: integration.endpoint_identifier, name: integration.name) }
context 'the integration is active' do
it 'extracts and finds the integration' do
expect(notify_service).to receive(:execute).with('some token', integration)
make_request
end
end
context 'when the integration is inactive' do
before do
integration.update!(active: false)
end
it 'does not find an integration' do
expect(notify_service).to receive(:execute).with('some token', nil)
make_request
end
end
context 'when the integration is inactive' do
before do
integration.update!(active: false)
end
context 'without integration parameters specified' do
let_it_be(:integration) { create(:alert_management_http_integration, :legacy, project: project) }
it 'does not find an integration' do
expect(notify_service).to receive(:execute).with('some token', nil)
it 'extracts and finds the legacy integration' do
expect(notify_service).to receive(:execute).with('some token', integration)
make_request
end
make_request
end
end
end
context 'when inextractable' do
it 'passes nil for a non-bearer token' do
request.headers['HTTP_AUTHORIZATION'] = 'some token'
context 'without integration parameters specified' do
let_it_be(:integration) { create(:alert_management_http_integration, :legacy, project: project) }
expect(notify_service).to receive(:execute).with(nil, nil)
it 'extracts and finds the legacy integration' do
expect(notify_service).to receive(:execute).with('some token', integration)
make_request
end
end
end
end
context 'when missing' do
it 'passes nil' do
expect(notify_service).to receive(:execute).with(nil, nil)
make_request
context 'with bearer token' do
context 'when set' do
before do
request.headers.merge(build_token_auth_header('some token'))
end
it_behaves_like 'a working token'
end
end
context 'with basic auth token' do
before do
request.headers.merge basic_auth_header(nil, 'some token')
end
it_behaves_like 'a working token'
end
context 'when inextractable token' do
it 'passes nil for a non-bearer token' do
request.headers['HTTP_AUTHORIZATION'] = 'some token'
expect(notify_service).to receive(:execute).with(nil, nil)
make_request
end
end
context 'when missing token' do
it 'passes nil' do
expect(notify_service).to receive(:execute).with(nil, nil)
make_request
end
end
end
context 'generic alert payload' do
context 'with generic alert payload' do
it_behaves_like 'process alert payload', Projects::Alerting::NotifyService do
let(:payload) { { title: 'Alert title' } }
end
end
context 'Prometheus alert payload' do
context 'with Prometheus alert payload' do
include PrometheusHelpers
it_behaves_like 'process alert payload', Projects::Prometheus::Alerts::NotifyService do

View File

@ -7,10 +7,13 @@ RSpec.describe 'Value Stream Analytics', :js do
let_it_be(:guest) { create(:user) }
let_it_be(:stage_table_selector) { '[data-testid="vsa-stage-table"]' }
let_it_be(:stage_table_event_selector) { '[data-testid="vsa-stage-event"]' }
let_it_be(:stage_table_event_title_selector) { '[data-testid="vsa-stage-event-title"]' }
let_it_be(:stage_table_pagination_selector) { '[data-testid="vsa-stage-pagination"]' }
let_it_be(:stage_table_duration_column_header_selector) { '[data-testid="vsa-stage-header-duration"]' }
let_it_be(:metrics_selector) { "[data-testid='vsa-time-metrics']" }
let_it_be(:metric_value_selector) { "[data-testid='displayValue']" }
let(:stage_table) { page.find(stage_table_selector) }
let(:stage_table) { find(stage_table_selector) }
let(:project) { create(:project, :repository) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let(:milestone) { create(:milestone, project: project) }
@ -53,6 +56,7 @@ RSpec.describe 'Value Stream Analytics', :js do
# So setting the date range to be the last 2 days should skip past the existing data
from = 2.days.ago.strftime("%Y-%m-%d")
to = 1.day.ago.strftime("%Y-%m-%d")
max_items_per_page = 20
around do |example|
travel_to(5.days.ago) { example.run }
@ -60,9 +64,8 @@ RSpec.describe 'Value Stream Analytics', :js do
before do
project.add_maintainer(user)
create_list(:issue, 2, project: project, created_at: 2.weeks.ago, milestone: milestone)
create_cycle(user, project, issue, mr, milestone, pipeline)
create_list(:issue, max_items_per_page, project: project, created_at: 2.weeks.ago, milestone: milestone)
deploy_master(user, project)
issue.metrics.update!(first_mentioned_in_commit_at: issue.metrics.first_associated_with_milestone_at + 1.hour)
@ -81,6 +84,8 @@ RSpec.describe 'Value Stream Analytics', :js do
wait_for_requests
end
let(:stage_table_events) { stage_table.all(stage_table_event_selector) }
it 'displays metrics' do
metrics_tiles = page.find(metrics_selector)
@ -112,20 +117,62 @@ RSpec.describe 'Value Stream Analytics', :js do
end
it 'can filter the issues by date' do
expect(stage_table.all(stage_table_event_selector).length).to eq(3)
expect(page).to have_selector(stage_table_event_selector)
set_daterange(from, to)
expect(stage_table.all(stage_table_event_selector).length).to eq(0)
expect(page).not_to have_selector(stage_table_event_selector)
expect(page).not_to have_selector(stage_table_pagination_selector)
end
it 'can filter the metrics by date' do
expect(metrics_values).to eq(["3.0", "2.0", "1.0", "0.0"])
expect(metrics_values).to match_array(["21.0", "2.0", "1.0", "0.0"])
set_daterange(from, to)
expect(metrics_values).to eq(['-'] * 4)
end
it 'can sort records' do
# NOTE: checking that the string changes should suffice
# depending on the order the tests are run we might run into problems with hard coded strings
original_first_title = first_stage_title
stage_time_column.click
expect_to_be_sorted "descending"
expect(first_stage_title).not_to have_text(original_first_title, exact: true)
stage_time_column.click
expect_to_be_sorted "ascending"
expect(first_stage_title).to have_text(original_first_title, exact: true)
end
it 'paginates the results' do
original_first_title = first_stage_title
expect(page).to have_selector(stage_table_pagination_selector)
go_to_next_page
expect(page).not_to have_text(original_first_title, exact: true)
end
def stage_time_column
stage_table.find(stage_table_duration_column_header_selector).ancestor("th")
end
def first_stage_title
stage_table.all(stage_table_event_title_selector).first.text
end
def expect_to_be_sorted(direction)
expect(stage_time_column['aria-sort']).to eq(direction)
end
def go_to_next_page
page.find(stage_table_pagination_selector).find_link("Next").click
end
end
end

View File

@ -2,6 +2,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
import getProjects from '~/analytics/shared/graphql/projects.query.graphql';
@ -65,7 +66,7 @@ describe('ProjectsDropdownFilter component', () => {
const createWithMockDropdown = (props) => {
createComponent(props, { GlDropdown: MockGlDropdown });
return wrapper.vm.$nextTick();
return waitForPromises();
};
afterEach(() => {
@ -73,6 +74,7 @@ describe('ProjectsDropdownFilter component', () => {
});
const findHighlightedItems = () => wrapper.findByTestId('vsa-highlighted-items');
const findUnhighlightedItems = () => wrapper.findByTestId('vsa-default-items');
const findHighlightedItemsTitle = () => wrapper.findByText('Selected');
const findClearAllButton = () => wrapper.findByText('Clear all');
@ -197,6 +199,24 @@ describe('ProjectsDropdownFilter component', () => {
});
});
describe('with a selected project and search term', () => {
beforeEach(async () => {
await createWithMockDropdown({ multiSelect: true });
selectDropdownItemAtIndex(0);
wrapper.setData({ searchTerm: 'this is a very long search string' });
});
it('renders the highlighted items', async () => {
expect(findUnhighlightedItems().findAll('li').length).toBe(1);
});
it('hides the unhighlighted items that do not match the string', async () => {
expect(findUnhighlightedItems().findAll('li').length).toBe(1);
expect(findUnhighlightedItems().text()).toContain('No matching results');
});
});
describe('when passed an array of defaultProject as prop', () => {
beforeEach(() => {
createComponent({

View File

@ -19,6 +19,7 @@ import {
createdAfter,
currentGroup,
stageCounts,
initialPaginationState as pagination,
} from './mock_data';
const selectedStageEvents = issueEvents.events;
@ -81,6 +82,7 @@ const findOverviewMetrics = () => wrapper.findComponent(ValueStreamMetrics);
const findStageTable = () => wrapper.findComponent(StageTable);
const findStageEvents = () => findStageTable().props('stageEvents');
const findEmptyStageTitle = () => wrapper.findComponent(GlEmptyState).props('title');
const findPagination = () => wrapper.findByTestId('vsa-stage-pagination');
const hasMetricsRequests = (reqs) => {
const foundReqs = findOverviewMetrics().props('requests');
@ -90,7 +92,7 @@ const hasMetricsRequests = (reqs) => {
describe('Value stream analytics component', () => {
beforeEach(() => {
wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents } });
wrapper = createComponent({ initialState: { selectedStage, selectedStageEvents, pagination } });
});
afterEach(() => {
@ -153,6 +155,10 @@ describe('Value stream analytics component', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('renders pagination', () => {
expect(findPagination().exists()).toBe(true);
});
describe('with `cycleAnalyticsForGroups=true` license', () => {
beforeEach(() => {
wrapper = createComponent({ initialState: { features: { cycleAnalyticsForGroups: true } } });

View File

@ -1,6 +1,12 @@
import { getJSONFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import { DEFAULT_VALUE_STREAM, DEFAULT_DAYS_IN_PAST } from '~/cycle_analytics/constants';
import {
DEFAULT_VALUE_STREAM,
DEFAULT_DAYS_IN_PAST,
PAGINATION_TYPE,
PAGINATION_SORT_DIRECTION_DESC,
PAGINATION_SORT_FIELD_END_EVENT,
} from '~/cycle_analytics/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { getDateInPast } from '~/lib/utils/datetime_utility';
@ -256,3 +262,22 @@ export const rawValueStreamStages = customizableStagesAndEvents.stages;
export const valueStreamStages = rawValueStreamStages.map((s) =>
convertObjectPropsToCamelCase(s, { deep: true }),
);
export const initialPaginationQuery = {
page: 15,
sort: PAGINATION_SORT_FIELD_END_EVENT,
direction: PAGINATION_SORT_DIRECTION_DESC,
};
export const initialPaginationState = {
...initialPaginationQuery,
page: null,
hasNextPage: false,
};
export const basePaginationResult = {
pagination: PAGINATION_TYPE,
sort: PAGINATION_SORT_FIELD_END_EVENT,
direction: PAGINATION_SORT_DIRECTION_DESC,
page: null,
};

View File

@ -11,6 +11,8 @@ import {
currentGroup,
createdAfter,
createdBefore,
initialPaginationState,
reviewEvents,
} from '../mock_data';
const { id: groupId, path: groupPath } = currentGroup;
@ -31,7 +33,13 @@ const mockSetDateActionCommit = {
type: 'SET_DATE_RANGE',
};
const defaultState = { ...getters, selectedValueStream, createdAfter, createdBefore };
const defaultState = {
...getters,
selectedValueStream,
createdAfter,
createdBefore,
pagination: initialPaginationState,
};
describe('Project Value Stream Analytics actions', () => {
let state;
@ -112,6 +120,21 @@ describe('Project Value Stream Analytics actions', () => {
});
});
describe('updateStageTablePagination', () => {
beforeEach(() => {
state = { ...state, selectedStage };
});
it(`will dispatch the "fetchStageData" action and commit the 'SET_PAGINATION' mutation`, () => {
return testAction({
action: actions.updateStageTablePagination,
state,
expectedMutations: [{ type: 'SET_PAGINATION' }],
expectedActions: [{ type: 'fetchStageData', payload: selectedStage.id }],
});
});
});
describe('fetchCycleAnalyticsData', () => {
beforeEach(() => {
state = { ...defaultState, endpoints: mockEndpoints };
@ -154,6 +177,10 @@ describe('Project Value Stream Analytics actions', () => {
describe('fetchStageData', () => {
const mockStagePath = /value_streams\/\w+\/stages\/\w+\/records/;
const headers = {
'X-Next-Page': 2,
'X-Page': 1,
};
beforeEach(() => {
state = {
@ -162,7 +189,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedStage,
};
mock = new MockAdapter(axios);
mock.onGet(mockStagePath).reply(httpStatusCodes.OK);
mock.onGet(mockStagePath).reply(httpStatusCodes.OK, reviewEvents, headers);
});
it(`commits the 'RECEIVE_STAGE_DATA_SUCCESS' mutation`, () =>
@ -170,7 +197,11 @@ describe('Project Value Stream Analytics actions', () => {
action: actions.fetchStageData,
state,
payload: {},
expectedMutations: [{ type: 'REQUEST_STAGE_DATA' }, { type: 'RECEIVE_STAGE_DATA_SUCCESS' }],
expectedMutations: [
{ type: 'REQUEST_STAGE_DATA' },
{ type: 'RECEIVE_STAGE_DATA_SUCCESS', payload: reviewEvents },
{ type: 'SET_PAGINATION', payload: { hasNextPage: true, page: 1 } },
],
expectedActions: [],
}));

View File

@ -1,17 +1,42 @@
import * as getters from '~/cycle_analytics/store/getters';
import {
allowedStages,
stageMedians,
transformedProjectStagePathData,
selectedStage,
stageCounts,
basePaginationResult,
initialPaginationState,
} from '../mock_data';
describe('Value stream analytics getters', () => {
let state = {};
describe('pathNavigationData', () => {
it('returns the transformed data', () => {
const state = { stages: allowedStages, medians: stageMedians, selectedStage, stageCounts };
state = { stages: allowedStages, medians: stageMedians, selectedStage, stageCounts };
expect(getters.pathNavigationData(state)).toEqual(transformedProjectStagePathData);
});
});
describe('paginationParams', () => {
beforeEach(() => {
state = { pagination: initialPaginationState };
});
it('returns the `pagination` type', () => {
expect(getters.paginationParams(state)).toEqual(basePaginationResult);
});
it('returns the `sort` type', () => {
expect(getters.paginationParams(state)).toEqual(basePaginationResult);
});
it('with page=10, sets the `page` property', () => {
const page = 10;
state = { pagination: { ...initialPaginationState, page } };
expect(getters.paginationParams(state)).toEqual({ ...basePaginationResult, page });
});
});
});

View File

@ -1,6 +1,10 @@
import { useFakeDate } from 'helpers/fake_date';
import * as types from '~/cycle_analytics/store/mutation_types';
import mutations from '~/cycle_analytics/store/mutations';
import {
PAGINATION_SORT_FIELD_END_EVENT,
PAGINATION_SORT_DIRECTION_DESC,
} from '~/cycle_analytics/constants';
import {
selectedStage,
rawIssueEvents,
@ -12,6 +16,7 @@ import {
formattedStageMedians,
rawStageCounts,
stageCounts,
initialPaginationState as pagination,
} from '../mock_data';
let state;
@ -25,7 +30,7 @@ describe('Project Value Stream Analytics mutations', () => {
useFakeDate(2020, 6, 18);
beforeEach(() => {
state = {};
state = { pagination };
});
afterEach(() => {
@ -88,16 +93,18 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
mutation | payload | stateKey | value
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdAfter'} | ${mockCreatedAfter}
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdBefore'} | ${mockCreatedBefore}
${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
${types.RECEIVE_STAGE_COUNTS_SUCCESS} | ${rawStageCounts} | ${'stageCounts'} | ${stageCounts}
mutation | payload | stateKey | value
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdAfter'} | ${mockCreatedAfter}
${types.SET_DATE_RANGE} | ${mockSetDatePayload} | ${'createdBefore'} | ${mockCreatedBefore}
${types.SET_LOADING} | ${true} | ${'isLoading'} | ${true}
${types.SET_LOADING} | ${false} | ${'isLoading'} | ${false}
${types.SET_SELECTED_VALUE_STREAM} | ${selectedValueStream} | ${'selectedValueStream'} | ${selectedValueStream}
${types.SET_PAGINATION} | ${pagination} | ${'pagination'} | ${{ ...pagination, sort: PAGINATION_SORT_FIELD_END_EVENT, direction: PAGINATION_SORT_DIRECTION_DESC }}
${types.SET_PAGINATION} | ${{ ...pagination, sort: 'duration', direction: 'asc' }} | ${'pagination'} | ${{ ...pagination, sort: 'duration', direction: 'asc' }}
${types.RECEIVE_VALUE_STREAMS_SUCCESS} | ${[selectedValueStream]} | ${'valueStreams'} | ${[selectedValueStream]}
${types.RECEIVE_VALUE_STREAM_STAGES_SUCCESS} | ${{ stages: rawValueStreamStages }} | ${'stages'} | ${valueStreamStages}
${types.RECEIVE_STAGE_MEDIANS_SUCCESS} | ${rawStageMedians} | ${'medians'} | ${formattedStageMedians}
${types.RECEIVE_STAGE_COUNTS_SUCCESS} | ${rawStageCounts} | ${'stageCounts'} | ${stageCounts}
`(
'$mutation with $payload will set $stateKey to $value',
({ mutation, payload, stateKey, value }) => {

View File

@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential hidden discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
create_note_email timelogs project_id]
create_note_email timelogs project_id customer_relations_contacts]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)

View File

@ -13,13 +13,6 @@ RSpec.describe 'CI YML Templates' do
excluded + ["Terraform.gitlab-ci.yml"]
end
before do
stub_feature_flags(
redirect_to_latest_template_terraform: false,
redirect_to_latest_template_security_api_fuzzing: false,
redirect_to_latest_template_security_dast: false)
end
shared_examples 'require default stages to be included' do
it 'require default stages to be included' do
expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default)

View File

@ -92,6 +92,13 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
:boards | true
:custom_attributes | true
:labels | true
:protected_branches | true
:protected_tags | true
:create_access_levels | true
:merge_access_levels | true
:push_access_levels | true
:releases | true
:links | true
end
with_them do

View File

@ -9,6 +9,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
before do
stub_env('CUSTOMER_PORTAL_URL', env_value)
stub_feature_flags(new_customersdot_staging_url: false)
end
describe '.default_subscriptions_url' do

View File

@ -724,7 +724,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
expect(counts_monthly[:promoted_issues]).to eq(1)
expect(counts_monthly[:promoted_issues]).to eq(Gitlab::UsageData::DEPRECATED_VALUE)
end
end
@ -1419,48 +1419,4 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
describe '.snowplow_event_counts' do
let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }
context 'when self-monitoring project exists' do
let_it_be(:project) { create(:project) }
before do
stub_application_setting(self_monitoring_project: project)
end
context 'and product_analytics FF is enabled for it' do
before do
stub_feature_flags(product_analytics_tracking: true)
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 2.days.ago)
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 9.days.ago)
create(:product_analytics_event, project: project, se_category: 'foo', se_action: 'bar', collector_tstamp: 2.days.ago)
end
it 'returns promoted_issues for the time period' do
expect(described_class.snowplow_event_counts(time_period)[:promoted_issues]).to eq(1)
end
end
context 'and product_analytics FF is disabled' do
before do
stub_feature_flags(product_analytics_tracking: false)
end
it 'returns an empty hash' do
expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
context 'when self-monitoring project does not exist' do
it 'returns an empty hash' do
expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
end

View File

@ -456,18 +456,6 @@ RSpec.describe Deployment do
end
end
describe 'with_deployable' do
subject { described_class.with_deployable }
it 'retrieves deployments with deployable builds' do
with_deployable = create(:deployment)
create(:deployment, deployable: nil)
create(:deployment, deployable_type: 'CommitStatus', deployable_id: non_existing_record_id)
is_expected.to contain_exactly(with_deployable)
end
end
describe 'visible' do
subject { described_class.visible }
@ -613,6 +601,26 @@ RSpec.describe Deployment do
end
end
describe '.builds' do
let!(:deployment1) { create(:deployment) }
let!(:deployment2) { create(:deployment) }
let!(:deployment3) { create(:deployment) }
subject { described_class.builds }
it 'retrieves builds for the deployments' do
is_expected.to match_array(
[deployment1.deployable, deployment2.deployable, deployment3.deployable])
end
it 'does not fetch the null deployable_ids' do
deployment3.update!(deployable_id: nil, deployable_type: nil)
is_expected.to match_array(
[deployment1.deployable, deployment2.deployable])
end
end
describe '#previous_deployment' do
using RSpec::Parameterized::TableSyntax

View File

@ -36,17 +36,6 @@ RSpec.describe ProductAnalyticsEvent, type: :model do
it { expect(described_class.count_by_graph('platform', 30.days)).to eq({ 'app' => 1, 'mobile' => 1, 'web' => 2 }) }
end
describe '.by_category_and_action' do
let_it_be(:event) { create(:product_analytics_event, se_category: 'catA', se_action: 'actA') }
before do
create(:product_analytics_event, se_category: 'catA', se_action: 'actB')
create(:product_analytics_event, se_category: 'catB', se_action: 'actA')
end
it { expect(described_class.by_category_and_action('catA', 'actA')).to match_array([event]) }
end
describe '.count_collector_tstamp_by_day' do
let_it_be(:time_now) { Time.zone.now }
let_it_be(:time_ago) { Time.zone.now - 5.days }

View File

@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe 'getting an issue list for a project' do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, :public, group: group) }
let_it_be(:current_user) { create(:user) }
let_it_be(:issue_a, reload: true) { create(:issue, project: project, discussion_locked: true) }
let_it_be(:issue_b, reload: true) { create(:issue, :with_alert, project: project) }
@ -409,6 +410,35 @@ RSpec.describe 'getting an issue list for a project' do
end
end
context 'when fetching customer_relations_contacts' do
let(:fields) do
<<~QUERY
nodes {
id
customerRelationsContacts {
nodes {
firstName
}
}
}
QUERY
end
def clean_state_query
run_with_clean_state(query, context: { current_user: current_user })
end
it 'avoids N+1 queries' do
create(:contact, group_id: group.id, issues: [issue_a])
control = ActiveRecord::QueryRecorder.new(skip_cached: false) { clean_state_query }
create(:contact, group_id: group.id, issues: [issue_a])
expect { clean_state_query }.not_to exceed_all_query_limit(control)
end
end
context 'when fetching labels' do
let(:fields) do
<<~QUERY

View File

@ -84,7 +84,7 @@ RSpec.describe Deployments::OlderDeploymentsDropService do
it 'does not drop an older deployment and tracks the exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(kind_of(RuntimeError), subject_id: deployment.id, deployment_id: older_deployment.id)
.with(kind_of(RuntimeError), subject_id: deployment.id, build_id: older_deployment.deployable_id)
expect { subject }.not_to change { Ci::Build.failed.count }
end

View File

@ -1228,6 +1228,7 @@
- "./spec/requests/api/commit_statuses_spec.rb"
- "./spec/requests/api/graphql/ci/runner_spec.rb"
- "./spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb"
- "./spec/requests/api/graphql/project/issues_spec.rb"
- "./spec/requests/api/graphql/project/merge_request_spec.rb"
- "./spec/requests/api/graphql/project_query_spec.rb"
- "./spec/requests/api/issues/issues_spec.rb"

View File

@ -70,8 +70,6 @@
- "./spec/models/ci/job_artifact_spec.rb"
- "./spec/models/ci/pipeline_spec.rb"
- "./spec/models/ci/runner_spec.rb"
- "./spec/models/deployment_spec.rb"
- "./spec/models/environment_spec.rb"
- "./spec/models/merge_request_spec.rb"
- "./spec/models/project_spec.rb"
- "./spec/models/user_spec.rb"