Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-03-13 21:15:31 +00:00
parent 7b7bc31c5b
commit 5699348c82
58 changed files with 990 additions and 551 deletions

View File

@ -595,7 +595,6 @@ Gitlab/StrongMemoizeAttr:
- 'lib/gitlab/ci/reports/accessibility_reports_comparer.rb'
- 'lib/gitlab/ci/reports/codequality_reports_comparer.rb'
- 'lib/gitlab/ci/reports/security/locations/base.rb'
- 'lib/gitlab/ci/reports/security/vulnerability_reports_comparer.rb'
- 'lib/gitlab/ci/reports/test_reports_comparer.rb'
- 'lib/gitlab/ci/reports/test_suite_comparer.rb'
- 'lib/gitlab/ci/reports/test_suite_summary.rb'

View File

@ -4147,7 +4147,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/flag_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/scanner_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb'
- 'spec/lib/gitlab/ci/runner_upgrade_check_spec.rb'
- 'spec/lib/gitlab/ci/status/bridge/factory_spec.rb'
- 'spec/lib/gitlab/ci/status/build/manual_spec.rb'

View File

@ -128,7 +128,6 @@ Layout/SpaceInsideParens:
- 'spec/lib/gitlab/ci/parsers/security/common_spec.rb'
- 'spec/lib/gitlab/ci/parsers_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/seed/build_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb'
- 'spec/lib/gitlab/ci/reports/test_suite_spec.rb'
- 'spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb'
- 'spec/lib/gitlab/ci/templates/MATLAB_spec.rb'

View File

@ -1730,7 +1730,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb'
- 'spec/lib/gitlab/ci/reports/reports_comparer_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/aggregated_report_spec.rb'
- 'spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb'
- 'spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb'
- 'spec/lib/gitlab/ci/runner_instructions_spec.rb'
- 'spec/lib/gitlab/ci/runner_upgrade_check_spec.rb'

View File

@ -490,7 +490,6 @@ Style/GuardClause:
- 'lib/gitlab/ci/pipeline/expression/lexeme/base.rb'
- 'lib/gitlab/ci/pipeline/expression/lexeme/pattern.rb'
- 'lib/gitlab/ci/reports/codequality_reports_comparer.rb'
- 'lib/gitlab/ci/reports/security/vulnerability_reports_comparer.rb'
- 'lib/gitlab/ci/runner/backoff.rb'
- 'lib/gitlab/ci/runner_upgrade_check.rb'
- 'lib/gitlab/ci/trace.rb'

View File

@ -1 +1 @@
4fe33cae7dca4ca605d0f505743ba4aa861fa876
e41cf4607486623e97e584533158cd4071beff31

View File

@ -524,7 +524,7 @@
{"name":"rubocop-rails","version":"2.17.4","platform":"ruby","checksum":"8004149a14372d3d6cededd000357879fa7eb0421403a7a26bc717e2a98bbedb"},
{"name":"rubocop-rspec","version":"2.18.1","platform":"ruby","checksum":"41c6455630fc98b809ebca047413389e2b7e3f68975028365c07bfea878db5ee"},
{"name":"ruby-fogbugz","version":"0.3.0","platform":"ruby","checksum":"5e04cde474648f498a71cf1e1a7ab42c66b953862fbe224f793ec0a7a1d5f657"},
{"name":"ruby-magic","version":"0.5.4","platform":"ruby","checksum":"2c17b185130d10a83791f63a40baa358c4b138af37da3f4dab53690121c421d5"},
{"name":"ruby-magic","version":"0.5.5","platform":"ruby","checksum":"d2cc5b6b719831c3108a4f8a62bf3314c1af6cb09c98e2b5a3f9509bf8814e6c"},
{"name":"ruby-progressbar","version":"1.11.0","platform":"ruby","checksum":"cc127db3866dc414ffccbf92928a241e585b3aa2b758a5563e74a6ee0f57d50a"},
{"name":"ruby-saml","version":"1.13.0","platform":"ruby","checksum":"d31cbdf5fb8fdd6aa3187e48dba3085cfeb751af30276a5739aa3659a66f069c"},
{"name":"ruby-statistics","version":"3.0.0","platform":"ruby","checksum":"610301370346931cb701e3a8d3d3e28eb65681162cae6066c0c11abf20efdc81"},

View File

@ -1327,8 +1327,8 @@ GEM
ruby-fogbugz (0.3.0)
crack (~> 0.4)
multipart-post (~> 2.0)
ruby-magic (0.5.4)
mini_portile2 (~> 2.6)
ruby-magic (0.5.5)
mini_portile2 (~> 2.8)
ruby-progressbar (1.11.0)
ruby-saml (1.13.0)
nokogiri (>= 1.10.5)

View File

@ -2,15 +2,22 @@
import { setUrlParams, redirectTo, queryToObject, updateHistory } from '~/lib/utils/url_utility';
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import { FILTERED_SEARCH_TOKENS } from '~/admin/abuse_reports/constants';
import {
FILTERED_SEARCH_TOKENS,
DEFAULT_SORT,
SORT_OPTIONS,
isValidSortKey,
} from '~/admin/abuse_reports/constants';
export default {
name: 'AbuseReportsFilteredSearchBar',
components: { FilteredSearchBar },
tokens: FILTERED_SEARCH_TOKENS,
sortOptions: SORT_OPTIONS,
data() {
return {
initialFilterValue: [],
initialSortBy: DEFAULT_SORT,
};
},
created() {
@ -24,6 +31,11 @@ export default {
updateHistory({ url: setUrlParams(query), replace: true });
}
const sort = this.currentSortKey();
if (sort) {
this.initialSortBy = query.sort;
}
const tokens = this.$options.tokens
.filter((token) => query[token.type])
.map((token) => ({
@ -37,8 +49,13 @@ export default {
this.initialFilterValue = tokens;
},
methods: {
currentSortKey() {
const { sort } = queryToObject(window.location.search);
return isValidSortKey(sort) ? sort : undefined;
},
handleFilter(tokens) {
const params = tokens.reduce((accumulator, token) => {
let params = tokens.reduce((accumulator, token) => {
const { type, value } = token;
// We don't support filtering reports by search term for now
@ -52,8 +69,18 @@ export default {
};
}, {});
const sort = this.currentSortKey();
if (sort) {
params = { ...params, sort };
}
redirectTo(setUrlParams(params, window.location.href, true));
},
handleSort(sort) {
const { page, ...query } = queryToObject(window.location.search);
redirectTo(setUrlParams({ ...query, sort }, window.location.href, true));
},
},
filteredSearchNamespace: 'abuse_reports',
recentSearchesStorageKey: 'abuse_reports',
@ -67,6 +94,9 @@ export default {
:recent-searches-storage-key="$options.recentSearchesStorageKey"
:search-input-placeholder="__('Filter reports')"
:initial-filter-value="initialFilterValue"
:initial-sort-by="initialSortBy"
:sort-options="$options.sortOptions"
@onFilter="handleFilter"
@onSort="handleSort"
/>
</template>

View File

@ -33,4 +33,30 @@ export const FILTERED_SEARCH_TOKEN_STATUS = {
operators: OPERATORS_IS,
};
export const DEFAULT_SORT = 'created_at_desc';
export const SORT_OPTIONS = [
{
id: 10,
title: __('Created date'),
sortDirection: {
descending: DEFAULT_SORT,
ascending: 'created_at_asc',
},
},
{
id: 20,
title: __('Updated date'),
sortDirection: {
descending: 'updated_at_desc',
ascending: 'updated_at_asc',
},
},
];
export const isValidSortKey = (key) =>
SORT_OPTIONS.some(
(sort) => sort.sortDirection.ascending === key || sort.sortDirection.descending === key,
);
export const FILTERED_SEARCH_TOKENS = [FILTERED_SEARCH_TOKEN_USER, FILTERED_SEARCH_TOKEN_STATUS];

View File

@ -48,12 +48,13 @@ export default {
'selectedStageEvents',
'selectedStageError',
'stageCounts',
'endpoints',
'features',
'createdBefore',
'createdAfter',
'pagination',
'hasNoAccessError',
'groupPath',
'namespace',
]),
...mapGetters(['pathNavigationData', 'filterParams']),
isLoaded() {
@ -111,7 +112,8 @@ export default {
},
dashboardsPath() {
const {
endpoints: { groupPath, fullPath },
namespace: { fullPath },
groupPath,
} = this;
return this.showLinkToDashboard
? generateValueStreamsDashboardLink(groupPath, [fullPath])
@ -166,7 +168,7 @@ export default {
<div>
<h3>{{ $options.i18n.pageTitle }}</h3>
<value-stream-filters
:group-path="endpoints.groupPath"
:group-path="groupPath"
:has-project-filter="false"
:start-date="createdAfter"
:end-date="createdBefore"
@ -184,7 +186,7 @@ export default {
/>
</div>
<value-stream-metrics
:request-path="endpoints.fullPath"
:request-path="namespace.fullPath"
:request-params="filterParams"
:requests="metricsRequests"
:group-by="$options.VSA_METRICS_GROUPS"

View File

@ -40,3 +40,6 @@ export const METRICS_REQUESTS = [
{ endpoint: METRIC_TYPE_TIME_SUMMARY, name: __('time summary'), request: getValueStreamMetrics },
...SUMMARY_METRICS_REQUEST,
];
export const MILESTONES_ENDPOINT = '/-/milestones.json';
export const LABELS_ENDPOINT = '/-/labels.json';

View File

@ -8,7 +8,13 @@ import {
import { normalizeHeaders, parseIntPagination } from '~/lib/utils/common_utils';
import { createAlert } from '~/alert';
import { __ } from '~/locale';
import { DEFAULT_VALUE_STREAM, I18N_VSA_ERROR_STAGE_MEDIAN } from '../constants';
import {
DEFAULT_VALUE_STREAM,
I18N_VSA_ERROR_STAGE_MEDIAN,
LABELS_ENDPOINT,
MILESTONES_ENDPOINT,
} from '../constants';
import { constructPathWithNamespace } from '../utils';
import * as types from './mutation_types';
export const setSelectedValueStream = ({ commit, dispatch }, valueStream) => {
@ -18,7 +24,7 @@ export const setSelectedValueStream = ({ commit, dispatch }, valueStream) => {
export const fetchValueStreamStages = ({ commit, state }) => {
const {
endpoints: { fullPath },
namespace: { fullPath },
selectedValueStream: { id },
} = state;
commit(types.REQUEST_VALUE_STREAM_STAGES);
@ -41,7 +47,7 @@ export const receiveValueStreamsSuccess = ({ commit, dispatch }, data = []) => {
export const fetchValueStreams = ({ commit, dispatch, state }) => {
const {
endpoints: { fullPath },
namespace: { fullPath },
} = state;
commit(types.REQUEST_VALUE_STREAMS);
@ -180,7 +186,8 @@ export const initializeVsa = async ({ commit, dispatch }, initialData = {}) => {
commit(types.INITIALIZE_VSA, initialData);
const {
endpoints: { fullPath, groupPath, milestonesPath = '', labelsPath = '' },
groupPath,
namespace,
selectedAuthor,
selectedMilestone,
selectedAssigneeList,
@ -189,10 +196,10 @@ export const initializeVsa = async ({ commit, dispatch }, initialData = {}) => {
} = initialData;
dispatch('filters/setEndpoints', {
labelsEndpoint: labelsPath,
milestonesEndpoint: milestonesPath,
labelsEndpoint: constructPathWithNamespace(namespace, LABELS_ENDPOINT),
milestonesEndpoint: constructPathWithNamespace(namespace, MILESTONES_ENDPOINT),
groupEndpoint: groupPath,
projectEndpoint: fullPath,
projectEndpoint: namespace.fullPath,
});
dispatch('filters/initialize', {

View File

@ -15,7 +15,7 @@ export const pathNavigationData = ({ stages, medians, stageCounts, selectedStage
export const requestParams = (state) => {
const {
endpoints: { fullPath },
namespace: { fullPath },
selectedValueStream: { id: valueStreamId },
selectedStage: { id: stageId = null },
} = state;

View File

@ -1,15 +1,16 @@
import Vue from 'vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { PAGINATION_SORT_FIELD_END_EVENT, PAGINATION_SORT_DIRECTION_DESC } from '../constants';
import { formatMedianValues } from '../utils';
import { PAGINATION_SORT_FIELD_END_EVENT, PAGINATION_SORT_DIRECTION_DESC } from '../constants';
import * as types from './mutation_types';
export default {
[types.INITIALIZE_VSA](
state,
{ endpoints, features, createdBefore, createdAfter, pagination = {} },
{ groupPath, features, createdBefore, createdAfter, pagination = {}, namespace = {} },
) {
state.endpoints = endpoints;
state.groupPath = groupPath;
state.namespace = namespace;
state.createdBefore = createdBefore;
state.createdAfter = createdAfter;
state.features = features;

View File

@ -6,7 +6,11 @@ import {
export default () => ({
id: null,
features: {},
endpoints: {},
groupPath: {},
namespace: {
name: null,
fullPath: null,
},
createdAfter: null,
createdBefore: null,
stages: [],

View File

@ -1,5 +1,6 @@
import { parseSeconds } from '~/lib/utils/datetime_utility';
import { formatTimeAsSummary } from '~/lib/utils/datetime/date_format_utility';
import { joinPaths } from '~/lib/utils/url_utility';
/**
* Takes the stages and median data, combined with the selected stage, to build an
@ -91,25 +92,21 @@ const extractFeatures = (gon) => ({
* @returns {Object} - The initial data to load the app with
*/
export const buildCycleAnalyticsInitialData = ({
fullPath,
requestPath,
projectId,
groupPath,
labelsPath,
milestonesPath,
stage,
createdAfter,
createdBefore,
namespaceName,
namespaceFullPath,
gon,
} = {}) => {
return {
projectId: parseInt(projectId, 10),
endpoints: {
requestPath,
fullPath,
labelsPath,
milestonesPath,
groupPath: `groups/${groupPath}`,
groupPath: `groups/${groupPath}`,
namespace: {
name: namespaceName,
fullPath: namespaceFullPath,
},
createdAfter: new Date(createdAfter),
createdBefore: new Date(createdBefore),
@ -117,3 +114,6 @@ export const buildCycleAnalyticsInitialData = ({
features: extractFeatures(gon),
};
};
export const constructPathWithNamespace = ({ fullPath }, endpoint) =>
joinPaths('/', fullPath, endpoint);

View File

@ -322,7 +322,9 @@ export default {
this.$emit('saveDescription', newDescription);
},
renderTaskListItemActions() {
const taskListItems = this.$el.querySelectorAll?.('.task-list-item:not(.inapplicable)');
const taskListItems = this.$el.querySelectorAll?.(
'.task-list-item:not(.inapplicable, table .task-list-item)',
);
taskListItems?.forEach((item) => {
const dropdown = this.createTaskListItemActions({ canUpdate: this.canUpdate });

View File

@ -1,4 +1,5 @@
<script>
import Autosize from 'autosize';
import axios from '~/lib/utils/axios_utils';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import { updateDraft, clearDraft, getDraft } from '~/lib/utils/autosave';
@ -80,6 +81,7 @@ export default {
this.markdown = val;
this.saveDraft();
this.autosizeTextarea();
},
},
mounted() {
@ -99,6 +101,7 @@ export default {
this.$emit('input', target.value);
this.saveDraft();
this.autosizeTextarea();
},
renderMarkdown(markdown) {
return axios.post(this.renderMarkdownPath, { text: markdown }).then(({ data }) => data.body);
@ -129,6 +132,13 @@ export default {
if (this.markdown) updateDraft(this.autosaveKey, this.markdown);
else clearDraft(this.autosaveKey);
},
autosizeTextarea() {
if (this.editingMode === EDITING_MODE_MARKDOWN_FIELD) {
this.$nextTick(() => {
Autosize.update(this.$refs.textarea);
});
}
},
},
};
</script>
@ -156,7 +166,7 @@ export default {
v-bind="formFieldProps"
ref="textarea"
:value="markdown"
class="note-textarea js-gfm-input js-autosize markdown-area"
class="note-textarea js-gfm-input markdown-area"
dir="auto"
:data-supports-quick-actions="supportsQuickActions"
data-qa-selector="markdown_editor_form_field"

View File

@ -3,6 +3,9 @@
class AbuseReportsFinder
attr_reader :params, :reports
DEFAULT_SORT = 'created_at_desc'
ALLOWED_SORT = [DEFAULT_SORT, *%w[created_at_asc updated_at_desc updated_at_asc]].freeze
def initialize(params = {})
@params = params
@reports = AbuseReport.all
@ -10,10 +13,9 @@ class AbuseReportsFinder
def execute
filter_reports
sort_reports
reports.with_order_id_desc
.with_users
.page(params[:page])
reports.with_users.page(params[:page])
end
private
@ -57,4 +59,16 @@ class AbuseReportsFinder
@reports = @reports.by_user_id(params[:user_id])
end
def sort_reports
if Feature.disabled?(:abuse_reports_list)
@reports = @reports.with_order_id_desc
return
end
sort_by = params[:sort]
sort_by = DEFAULT_SORT unless sort_by.in?(ALLOWED_SORT)
@reports = @reports.order_by(sort_by)
end
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
module Resolvers
module Analytics
module CycleAnalytics
class BaseIssueResolver < BaseResolver
type Types::Analytics::CycleAnalytics::MetricType, null: true
argument :assignee_usernames, [GraphQL::Types::String],
required: false,
description: 'Usernames of users assigned to the issue.'
argument :author_username, GraphQL::Types::String,
required: false,
description: 'Username of the author of the issue.'
argument :milestone_title, GraphQL::Types::String,
required: false,
description: 'Milestone applied to the issue.'
argument :label_names, [GraphQL::Types::String],
required: false,
description: 'Labels applied to the issue.'
argument :from, Types::TimeType,
required: true,
description: 'Issues created after the date.'
argument :to, Types::TimeType,
required: true,
description: 'Issues created before the date.'
def finder_params
{ project_id: object.project.id }
end
# :project level: no customization, returning the original resolver
# :group level: add the project_ids argument
def self.[](context = :project)
case context
when :project
self
when :group
Class.new(self) do
argument :project_ids, [GraphQL::Types::ID],
required: false,
description: 'Project IDs within the group hierarchy.'
define_method :finder_params do
{ group_id: object.id, include_subgroups: true }
end
end
end
end
end
end
end
end

View File

@ -1,35 +1,10 @@
# frozen_string_literal: true
# rubocop:disable Graphql/ResolverType (inherited from Resolvers::Analytics::CycleAnalytics::BaseIssueResolver)
module Resolvers
module Analytics
module CycleAnalytics
class IssueCountResolver < BaseResolver
type Types::Analytics::CycleAnalytics::MetricType, null: true
argument :assignee_usernames, [GraphQL::Types::String],
required: false,
description: 'Usernames of users assigned to the issue.'
argument :author_username, GraphQL::Types::String,
required: false,
description: 'Username of the author of the issue.'
argument :milestone_title, GraphQL::Types::String,
required: false,
description: 'Milestone applied to the issue.'
argument :label_names, [GraphQL::Types::String],
required: false,
description: 'Labels applied to the issue.'
argument :from, Types::TimeType,
required: true,
description: 'Issues created after the date.'
argument :to, Types::TimeType,
required: true,
description: 'Issues created before the date.'
class IssueCountResolver < BaseIssueResolver
def resolve(**args)
value = IssuesFinder
.new(current_user, process_params(args))
@ -55,31 +30,8 @@ module Resolvers
params.merge(finder_params)
end
def finder_params
{ project_id: object.project.id }
end
# :project level: no customization, returning the original resolver
# :group level: add the project_ids argument
def self.[](context = :project)
case context
when :project
self
when :group
Class.new(self) do
argument :project_ids, [GraphQL::Types::ID],
required: false,
description: 'Project IDs within the group hierarchy.'
define_method :finder_params do
{ group_id: object.id, include_subgroups: true }
end
end
end
end
end
end
end
end
# rubocop:enable Graphql/ResolverType

View File

@ -25,3 +25,6 @@ module Types
end
end
end
mod = Types::Analytics::CycleAnalytics::FlowMetrics
mod.prepend_mod_with('Types::Analytics::CycleAnalytics::FlowMetrics')

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Types
module Analytics
module CycleAnalytics
# rubocop: disable Graphql/AuthorizeTypes
class LinkType < BaseObject
graphql_name 'ValueStreamMetricLinkType'
field :name,
GraphQL::Types::String,
null: false,
description: 'Name of the link group.'
field :label,
GraphQL::Types::String,
null: false,
description: 'Label for the link.'
field :url,
GraphQL::Types::String,
null: false,
description: 'Drill-down URL.'
field :docs_link,
GraphQL::Types::Boolean,
null: true,
description: 'Link to the metric documentation.'
end
end
# rubocop: enable Graphql/AuthorizeTypes
end
end

View File

@ -29,7 +29,7 @@ module Types
description: 'Title for the metric.'
field :links,
[GraphQL::Types::String],
[LinkType],
null: false,
description: 'Optional links for drilling down.'
end

View File

@ -58,7 +58,7 @@ module Types
Types::Ci::RunnerJobExecutionStatusEnum,
null: true,
description: 'Job execution status of the runner.',
deprecated: { milestone: '15.7', reason: :alpha }
alpha: { milestone: '15.7' }
field :jobs, ::Types::Ci::JobType.connection_type, null: true,
description: 'Jobs assigned to the runner. This field can only be resolved for one runner in any single request.',
authorize: :read_builds,
@ -67,7 +67,8 @@ module Types
description: 'Indicates the runner is locked.'
field :machines, ::Types::Ci::RunnerMachineType.connection_type, null: true,
description: 'Machines associated with the runner configuration.',
method: :runner_machines
method: :runner_machines,
alpha: { milestone: '15.10' }
field :maintenance_note, GraphQL::Types::String, null: true,
description: 'Runner\'s maintenance notes.'
field :maximum_timeout, GraphQL::Types::Int, null: true,

View File

@ -45,12 +45,12 @@ module Projects
end
def with_timeout
result = {
result = success(
original_size: 0,
before_delete_size: 0,
deleted_size: 0,
deleted: []
}
)
yield Time.zone.now, result

View File

@ -831,7 +831,7 @@ Gitlab.ee do
Settings.cron_jobs['abandoned_trial_emails']['cron'] ||= "0 1 * * *"
Settings.cron_jobs['abandoned_trial_emails']['job_class'] = 'Emails::AbandonedTrialEmailsCronWorker'
Settings.cron_jobs['package_metadata_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['package_metadata_sync_worker']['cron'] ||= "0 * * * *"
Settings.cron_jobs['package_metadata_sync_worker']['cron'] ||= "*/5 * * * *"
Settings.cron_jobs['package_metadata_sync_worker']['job_class'] = 'PackageMetadata::SyncWorker'
Settings.cron_jobs['compliance_violations_consistency_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['compliance_violations_consistency_worker']['cron'] ||= '0 1 * * *'

View File

@ -33,7 +33,8 @@ class Gitlab::Seeder::CycleAnalytics # rubocop:disable Style/ClassAndModuleChild
code: 72,
test: 5,
review: 72,
deployment: 48
deployment: 48,
lead_time: 32
}.freeze
def self.seeder_based_on_env(project)
@ -69,6 +70,7 @@ class Gitlab::Seeder::CycleAnalytics # rubocop:disable Style/ClassAndModuleChild
create_developers!
create_issues!
seed_lead_time!
seed_issue_stage!
seed_plan_stage!
seed_code_stage!
@ -156,6 +158,13 @@ class Gitlab::Seeder::CycleAnalytics # rubocop:disable Style/ClassAndModuleChild
end
end
def seed_lead_time!
issues.each do |issue|
created_at = issue.created_at - MAX_DURATIONS[:lead_time].hours
issue.update!(created_at: created_at, closed_at: Time.now)
end
end
def create_issues!
@issue_count.times do
travel_to(start_time + rand(5).days) do

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddStatusCreatedAtAndUpdatedAtIndexesToAbuseReports < Gitlab::Database::Migration[2.1]
STATUS_AND_CREATED_AT_INDEX = 'index_abuse_reports_on_status_and_created_at'
STATUS_AND_UPDATED_AT_INDEX = 'index_abuse_reports_on_status_and_updated_at'
disable_ddl_transaction!
def up
add_concurrent_index :abuse_reports, [:status, :created_at], name: STATUS_AND_CREATED_AT_INDEX
add_concurrent_index :abuse_reports, [:status, :updated_at], name: STATUS_AND_UPDATED_AT_INDEX
end
def down
remove_concurrent_index_by_name :abuse_reports, STATUS_AND_CREATED_AT_INDEX
remove_concurrent_index_by_name :abuse_reports, STATUS_AND_UPDATED_AT_INDEX
end
end

View File

@ -0,0 +1 @@
15c56632eafda4ab511368001a7bbfdf9f346049ab19a9df3ad2c96adc12f1a0

View File

@ -29145,8 +29145,12 @@ CREATE INDEX idx_vulnerability_reads_project_id_scanner_id_vulnerability_id ON v
CREATE UNIQUE INDEX idx_work_item_types_on_namespace_id_and_name_null_namespace ON work_item_types USING btree (btrim(lower(name)), ((namespace_id IS NULL))) WHERE (namespace_id IS NULL);
CREATE INDEX index_abuse_reports_on_status_and_created_at ON abuse_reports USING btree (status, created_at);
CREATE INDEX index_abuse_reports_on_status_and_id ON abuse_reports USING btree (status, id);
CREATE INDEX index_abuse_reports_on_status_and_updated_at ON abuse_reports USING btree (status, updated_at);
CREATE INDEX index_abuse_reports_on_status_category_and_id ON abuse_reports USING btree (status, category, id);
CREATE INDEX index_abuse_reports_on_user_id ON abuse_reports USING btree (user_id);

View File

@ -20,6 +20,8 @@ information:
## List repository commits
> Commits by author [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114417) in GitLab 15.10.
Get a list of repository commits in a project.
```plaintext
@ -33,6 +35,7 @@ GET /projects/:id/repository/commits
| `since` | string | no | Only commits after or on this date are returned in ISO 8601 format `YYYY-MM-DDTHH:MM:SSZ` |
| `until` | string | no | Only commits before or on this date are returned in ISO 8601 format `YYYY-MM-DDTHH:MM:SSZ` |
| `path` | string | no | The file path |
| `author` | string | no | Search commits by commit author.|
| `all` | boolean | no | Retrieve every commit from the repository |
| `with_stats` | boolean | no | Stats about each commit are added to the response |
| `first_parent` | boolean | no | Follow only the first parent commit upon seeing a merge commit |

View File

@ -11858,7 +11858,7 @@ CI/CD variables for a project.
| <a id="cirunnerjobcount"></a>`jobCount` | [`Int`](#int) | Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist). |
| <a id="cirunnerjobexecutionstatus"></a>`jobExecutionStatus` **{warning-solid}** | [`CiRunnerJobExecutionStatus`](#cirunnerjobexecutionstatus) | **Introduced** in 15.7. This feature is in Alpha. It can be changed or removed at any time. Job execution status of the runner. |
| <a id="cirunnerlocked"></a>`locked` | [`Boolean`](#boolean) | Indicates the runner is locked. |
| <a id="cirunnermachines"></a>`machines` | [`CiRunnerMachineConnection`](#cirunnermachineconnection) | Machines associated with the runner configuration. (see [Connections](#connections)) |
| <a id="cirunnermachines"></a>`machines` **{warning-solid}** | [`CiRunnerMachineConnection`](#cirunnermachineconnection) | **Introduced** in 15.10. This feature is in Alpha. It can be changed or removed at any time. Machines associated with the runner configuration. |
| <a id="cirunnermaintenancenote"></a>`maintenanceNote` | [`String`](#string) | Runner's maintenance notes. |
| <a id="cirunnermaintenancenotehtml"></a>`maintenanceNoteHtml` | [`String`](#string) | GitLab Flavored Markdown rendering of `maintenance_note`. |
| <a id="cirunnermaximumtimeout"></a>`maximumTimeout` | [`Int`](#int) | Maximum timeout (in seconds) for jobs processed by the runner. |
@ -15216,6 +15216,24 @@ Exposes aggregated value stream flow metrics.
#### Fields with arguments
##### `GroupValueStreamAnalyticsFlowMetrics.cycleTime`
Median time from first commit to issue closed.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="groupvaluestreamanalyticsflowmetricscycletimeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the issue. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimefrom"></a>`from` | [`Time!`](#time) | Issues created after the date. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimeprojectids"></a>`projectIds` | [`[ID!]`](#id) | Project IDs within the group hierarchy. |
| <a id="groupvaluestreamanalyticsflowmetricscycletimeto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
##### `GroupValueStreamAnalyticsFlowMetrics.deploymentCount`
Number of production deployments in the given period.
@ -15248,6 +15266,24 @@ Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
| <a id="groupvaluestreamanalyticsflowmetricsissuecountprojectids"></a>`projectIds` | [`[ID!]`](#id) | Project IDs within the group hierarchy. |
| <a id="groupvaluestreamanalyticsflowmetricsissuecountto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
##### `GroupValueStreamAnalyticsFlowMetrics.leadTime`
Median time from when the issue was created to when it was closed.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the issue. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimefrom"></a>`from` | [`Time!`](#time) | Issues created after the date. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeprojectids"></a>`projectIds` | [`[ID!]`](#id) | Project IDs within the group hierarchy. |
| <a id="groupvaluestreamanalyticsflowmetricsleadtimeto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
### `GroupWikiRepositoryRegistry`
Represents the Geo sync and verification state of a group wiki repository.
@ -19666,6 +19702,23 @@ Exposes aggregated value stream flow metrics.
#### Fields with arguments
##### `ProjectValueStreamAnalyticsFlowMetrics.cycleTime`
Median time from first commit to issue closed.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectvaluestreamanalyticsflowmetricscycletimeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricscycletimeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the issue. |
| <a id="projectvaluestreamanalyticsflowmetricscycletimefrom"></a>`from` | [`Time!`](#time) | Issues created after the date. |
| <a id="projectvaluestreamanalyticsflowmetricscycletimelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricscycletimemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricscycletimeto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
##### `ProjectValueStreamAnalyticsFlowMetrics.deploymentCount`
Number of production deployments in the given period.
@ -19696,6 +19749,23 @@ Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
| <a id="projectvaluestreamanalyticsflowmetricsissuecountmilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricsissuecountto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
##### `ProjectValueStreamAnalyticsFlowMetrics.leadTime`
Median time from when the issue was created to when it was closed.
Returns [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric).
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimeassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimeauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the issue. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimefrom"></a>`from` | [`Time!`](#time) | Issues created after the date. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimelabelnames"></a>`labelNames` | [`[String!]`](#string) | Labels applied to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimemilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue. |
| <a id="projectvaluestreamanalyticsflowmetricsleadtimeto"></a>`to` | [`Time!`](#time) | Issues created before the date. |
### `PrometheusAlert`
The alert condition for Prometheus.
@ -21415,11 +21485,22 @@ fields relate to interactions between the two entities.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreamanalyticsmetricidentifier"></a>`identifier` | [`String!`](#string) | Identifier for the metric. |
| <a id="valuestreamanalyticsmetriclinks"></a>`links` | [`[String!]!`](#string) | Optional links for drilling down. |
| <a id="valuestreamanalyticsmetriclinks"></a>`links` | [`[ValueStreamMetricLinkType!]!`](#valuestreammetriclinktype) | Optional links for drilling down. |
| <a id="valuestreamanalyticsmetrictitle"></a>`title` | [`String!`](#string) | Title for the metric. |
| <a id="valuestreamanalyticsmetricunit"></a>`unit` | [`String`](#string) | Unit of measurement. |
| <a id="valuestreamanalyticsmetricvalue"></a>`value` | [`Float`](#float) | Value for the metric. |
### `ValueStreamMetricLinkType`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreammetriclinktypedocslink"></a>`docsLink` | [`Boolean`](#boolean) | Link to the metric documentation. |
| <a id="valuestreammetriclinktypelabel"></a>`label` | [`String!`](#string) | Label for the link. |
| <a id="valuestreammetriclinktypename"></a>`name` | [`String!`](#string) | Name of the link group. |
| <a id="valuestreammetriclinktypeurl"></a>`url` | [`String!`](#string) | Drill-down URL. |
### `VulnerabilitiesCountByDay`
Represents the count of vulnerabilities by severity on a particular day. This data is retained for 365 days.

Binary file not shown.

After

Width:  |  Height:  |  Size: 139 KiB

View File

@ -136,3 +136,39 @@ For example, see the `server` component of the `web-pages` service:
![web-pages-server-component SLI](img/stage_group_dashboards_service_sli_detail.png)
To add more SLIs tailored to specific features, you can use an [Application SLI](../application_slis/index.md).
## Kibana dashboard for error budgets
For a detailed analysis you can use [a specialized Kibana dashboard](https://log.gprd.gitlab.net/goto/771b5c10-c0ec-11ed-85ed-e7557b0a598c), like this:
![Kibana dashboard](img/error_budgets_kibana_dashboard_v15_10.png)
Description:
- **Apdex requests over limit (graph)** - Displays only requests that exceeded their
target duration.
- **Apdex operations over-limit duration (graph)** - Displays the distribution of duration
components (database, Redis, Gitaly, and Rails app).
- **Apdex requests** (pie chart) - Displays the percentage of `2xx`, `3xx`, `4xx` and
`5xx` requests.
- **Slow request component distribution** - Highlights the component responsible
for Apdex violation.
- **Apdex operations over limit** (table) - Displays a number of operations over
limit for each endpoint.
- **Apdex requests over limit** - Displays a list of individual requests responsible
for Apdex violation.
### Use the dashboard
1. Select the feature category you want to investigate.
1. Scroll to the **Feature Category** section. Enter the feature name.
1. Select **Apply changes**. Selected results contain only requests related to this feature category.
1. Select the time frame for the investigation.
1. Review dashboard and pay attention to the type of failures.
Questions to answer:
1. Does the failure pattern look like a spike? Or does it persist?
1. Does the failure look related to a particular component? (database, Redis, ...)
1. Does the failure affect a specific endpoint? Or is it system-wide?
1. Does the failure appear caused by infrastructure incidents?

View File

@ -0,0 +1,26 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Contribute to UX design
## UX Design
These instructions are specifically for those wanting to make UX design contributions to GitLab.
The UX department at GitLab uses [Figma](https://www.figma.com/) for all of its designs, and you can see our [Design Repository documentation](https://gitlab.com/gitlab-org/gitlab-design/blob/master/README.md#getting-started) for details on working with our files.
You may leverage the [Pajamas UI Kit](https://www.figma.com/community/file/781156790581391771) in Figma to create mockups for your proposals. However, we will also gladly accept handmade drawings and sketches, wireframes, manipulated DOM screenshots, or prototypes. You can find design resources documentation in our [Design System](https://design.gitlab.com/). Use it to understand where and when to use common design solutions.
## Contributing to Pajamas
To contribute to [Pajamas design system](https://design.gitlab.com/) and the [UI kit](https://www.figma.com/community/file/781156790581391771), follow the [contribution guidelines](https://design.gitlab.com/get-started/contribute) documented in the handbook. While the instructions are code-focused, they will help you understand the overall process of contributing.
## Contributing to other issues
1. Review the list of available issues that are currently [accepting UX contribution](https://gitlab.com/groups/gitlab-org/-/issues/?sort=weight&state=opened&label_name%5B%5D=UX&label_name%5B%5D=workflow%3A%3Aready%20for%20design&label_name%5B%5D=Accepting%20UX%20contributions&first_page_size=20).
1. Find an issue that does not have an Assignee to ensure someone else is not working on a solution. Add the `~"workflow::design"` and `~"Community contribution"` labels and mention `@gitlab-com/gitlab-ux/reviewers` to request they assign the issue to you.
1. Add your design proposal to the issue description/[design management](../../user/project/issues/design_management.md) section. Remember to keep the scope of the proposal/change small following our [MVCs guidelines](https://about.gitlab.com/handbook/values/#minimal-viable-change-mvc).
1. If you have any questions or are ready for a review of your proposal, mention `@gitlab-com/gitlab-ux/reviewers` in a comment to make your request.

View File

@ -0,0 +1,11 @@
---
redirect_to: '../organization/index.md'
remove_date: '2023-06-13'
---
This document was moved to [another location](../organization/index.md).
<!-- This redirect file can be deleted after <2023-06-13>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -0,0 +1,11 @@
---
redirect_to: '../organization/index.md'
remove_date: '2023-06-13'
---
This document was moved to [another location](../organization/index.md).
<!-- This redirect file can be deleted after <2023-06-13>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -78,6 +78,10 @@ module API
type: String,
desc: 'The file path',
documentation: { example: 'README.md' }
optional :author,
type: String,
desc: 'Search commits by commit author',
documentation: { example: 'John Smith' }
optional :all, type: Boolean, desc: 'Every commit will be returned'
optional :with_stats, type: Boolean, desc: 'Stats about each commit will be added to the response'
optional :first_parent, type: Boolean, desc: 'Only include the first parent of merges'
@ -101,6 +105,7 @@ module API
with_stats = params[:with_stats]
first_parent = params[:first_parent]
order = params[:order]
author = params[:author]
commits = user_project.repository.commits(ref,
path: path,
@ -111,6 +116,7 @@ module API
all: all,
first_parent: first_parent,
order: order,
author: author,
trailers: params[:trailers])
serializer = with_stats ? Entities::CommitWithStats : Entities::Commit

View File

@ -1,174 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Reports
module Security
class VulnerabilityReportsComparer
include Gitlab::Utils::StrongMemoize
attr_reader :base_report, :head_report
ACCEPTABLE_REPORT_AGE = 1.week
MAX_FINDINGS_COUNT = 25
def initialize(project, base_report, head_report)
@base_report = base_report
@head_report = head_report
@signatures_enabled = project.licensed_feature_available?(:vulnerability_finding_signatures)
if @signatures_enabled
@added_findings = []
@fixed_findings = []
calculate_changes
end
end
def base_report_created_at
@base_report.created_at
end
def head_report_created_at
@head_report.created_at
end
def base_report_out_of_date
return false unless @base_report.created_at
ACCEPTABLE_REPORT_AGE.ago > @base_report.created_at
end
def added
strong_memoize(:added) do
all_added_findings.take(MAX_FINDINGS_COUNT) # rubocop:disable CodeReuse/ActiveRecord (This is Array#take)
end
end
def fixed
strong_memoize(:fixed) do
all_fixed_findings.take(MAX_FINDINGS_COUNT) # rubocop:disable CodeReuse/ActiveRecord (This is Array#take)
end
end
private
def calculate_changes
# This is a deconstructed version of the eql? method on
# Ci::Reports::Security::Finding. It:
#
# * precomputes for the head_findings (using FindingMatcher):
# * sets of signature shas grouped by priority
# * mappings of signature shas to the head finding object
#
# These are then used when iterating the base findings to perform
# fast(er) prioritized, signature-based comparisons between each base finding
# and the head findings.
#
# Both the head_findings and base_findings arrays are iterated once
base_findings = base_report.findings
head_findings = head_report.findings
matcher = FindingMatcher.new(head_findings)
base_findings.each do |base_finding|
next if base_finding.requires_manual_resolution?
matched_head_finding = matcher.find_and_remove_match!(base_finding)
@fixed_findings << base_finding if matched_head_finding.nil?
end
@added_findings = matcher.unmatched_head_findings.values
end
def all_added_findings
if @signatures_enabled
@added_findings
else
head_report.findings - base_report.findings
end
end
def all_fixed_findings
if @signatures_enabled
@fixed_findings
else
base_report.findings - head_report.findings
end
end
end
class FindingMatcher
attr_reader :unmatched_head_findings, :head_findings
include Gitlab::Utils::StrongMemoize
def initialize(head_findings)
@head_findings = head_findings
@unmatched_head_findings = @head_findings.index_by(&:object_id)
end
def find_and_remove_match!(base_finding)
matched_head_finding = find_matched_head_finding_for(base_finding)
# no signatures matched, so check the normal uuids of the base and head findings
# for a match
matched_head_finding = head_signatures_shas[base_finding.uuid] if matched_head_finding.nil?
@unmatched_head_findings.delete(matched_head_finding.object_id) unless matched_head_finding.nil?
matched_head_finding
end
private
def find_matched_head_finding_for(base_finding)
base_signature = sorted_signatures_for(base_finding).find do |signature|
# at this point a head_finding exists that has a signature with a
# matching priority, and a matching sha --> lookup the actual finding
# object from head_signatures_shas
head_signatures_shas[signature.signature_sha].eql?(base_finding)
end
base_signature.present? ? head_signatures_shas[base_signature.signature_sha] : nil
end
def sorted_signatures_for(base_finding)
base_finding.signatures.select { |signature| head_finding_signature?(signature) }
.sort_by { |sig| -sig.priority }
end
def head_finding_signature?(signature)
head_signatures_priorities[signature.priority].include?(signature.signature_sha)
end
def head_signatures_priorities
strong_memoize(:head_signatures_priorities) do
signatures_priorities = Hash.new { |hash, key| hash[key] = Set.new }
head_findings.each_with_object(signatures_priorities) do |head_finding, memo|
head_finding.signatures.each do |signature|
memo[signature.priority].add(signature.signature_sha)
end
end
end
end
def head_signatures_shas
strong_memoize(:head_signatures_shas) do
head_findings.each_with_object({}) do |head_finding, memo|
head_finding.signatures.each do |signature|
memo[signature.signature_sha] = head_finding
end
# for the final uuid check when no signatures have matched
memo[head_finding.uuid] = head_finding
end
end
end
end
end
end
end
end

View File

@ -2427,6 +2427,9 @@ msgstr ""
msgid "Add request manually"
msgstr ""
msgid "Add start and due date"
msgstr ""
msgid "Add suggestion to batch"
msgstr ""

View File

@ -48,6 +48,30 @@ RSpec.describe 'Issue Detail', :js, feature_category: :team_planning do
end
end
context 'when issue description has task list items' do
before do
description = '- [ ] I am a task
| Table |
|-------|
| <ul><li>[ ] I am inside a table</li><ul> |'
issue.update!(description: description)
sign_in(user)
visit project_issue_path(project, issue)
end
it 'shows task actions ellipsis button when hovering over the task list item, but not within a table', :aggregate_failures do
find('li', text: 'I am a task').hover
expect(page).to have_button 'Task actions'
find('li', text: 'I am inside a table').hover
expect(page).not_to have_button 'Task actions'
end
end
context 'when issue description has xss snippet' do
before do
issue.update!(description: '![xss" onload=alert(1);//](a)')

View File

@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe AbuseReportsFinder, '#execute' do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
let_it_be(:abuse_report_1) { create(:abuse_report, category: 'spam', user: user1) }
let_it_be(:abuse_report_2) { create(:abuse_report, :closed, category: 'phishing', user: user2) }
let_it_be(:abuse_report_1) { create(:abuse_report, id: 20, category: 'spam', user: user1) }
let_it_be(:abuse_report_2) { create(:abuse_report, :closed, id: 30, category: 'phishing', user: user2) }
let(:params) { {} }
@ -72,4 +72,40 @@ RSpec.describe AbuseReportsFinder, '#execute' do
expect(subject).to match_array([abuse_report_2])
end
end
describe 'sorting' do
let(:params) { { sort: 'created_at_asc' } }
it 'returns reports sorted by the specified sort attribute' do
expect(subject).to eq [abuse_report_1, abuse_report_2]
end
context 'when sort is not specified' do
let(:params) { {} }
it "returns reports sorted by #{described_class::DEFAULT_SORT}" do
expect(subject).to eq [abuse_report_2, abuse_report_1]
end
end
context 'when sort is not supported' do
let(:params) { { sort: 'superiority' } }
it "returns reports sorted by #{described_class::DEFAULT_SORT}" do
expect(subject).to eq [abuse_report_2, abuse_report_1]
end
end
context 'when abuse_reports_list feature flag is disabled' do
let_it_be(:abuse_report_3) { create(:abuse_report, id: 10) }
before do
stub_feature_flags(abuse_reports_list: false)
end
it 'returns reports sorted by id in descending order' do
expect(subject).to eq [abuse_report_2, abuse_report_1, abuse_report_3]
end
end
end
end

View File

@ -6,6 +6,8 @@ import {
FILTERED_SEARCH_TOKENS,
FILTERED_SEARCH_TOKEN_USER,
FILTERED_SEARCH_TOKEN_STATUS,
DEFAULT_SORT,
SORT_OPTIONS,
} from '~/admin/abuse_reports/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
@ -42,6 +44,8 @@ describe('AbuseReportsFilteredSearchBar', () => {
recentSearchesStorageKey: 'abuse_reports',
searchInputPlaceholder: 'Filter reports',
tokens: FILTERED_SEARCH_TOKENS,
initialSortBy: DEFAULT_SORT,
sortOptions: SORT_OPTIONS,
});
});
@ -78,36 +82,113 @@ describe('AbuseReportsFilteredSearchBar', () => {
]);
});
describe('when filter bar is submitted', () => {
it('redirects with user query param', () => {
describe('initial sort', () => {
it.each(
SORT_OPTIONS.flatMap(({ sortDirection: { descending, ascending } }) => [
descending,
ascending,
]),
)(
'parses sort=%s query and passes it to `FilteredSearchBar` component as initialSortBy',
(sortBy) => {
setWindowLocation(`?sort=${sortBy}`);
createComponent();
expect(findFilteredSearchBar().props('initialSortBy')).toEqual(sortBy);
},
);
it(`uses ${DEFAULT_SORT} as initialSortBy when sort query param is invalid`, () => {
setWindowLocation(`?sort=unknown`);
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
{ type: FILTERED_SEARCH_TOKEN_USER.type, value: { data: 'mr_abuser', operator: '=' } },
]);
expect(findFilteredSearchBar().props('initialSortBy')).toEqual(DEFAULT_SORT);
});
});
describe('onFilter', () => {
const USER_FILTER_TOKEN = {
type: FILTERED_SEARCH_TOKEN_USER.type,
value: { data: 'mr_abuser', operator: '=' },
};
const createComponentAndFilter = (filterTokens, initialLocation) => {
if (initialLocation) {
setWindowLocation(initialLocation);
}
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', filterTokens);
};
it('redirects with user query param', () => {
createComponentAndFilter([USER_FILTER_TOKEN]);
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?user=mr_abuser');
});
it('redirects with status query param', () => {
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
{ type: FILTERED_SEARCH_TOKEN_STATUS.type, value: { data: 'open', operator: '=' } },
]);
const statusFilterToken = {
type: FILTERED_SEARCH_TOKEN_STATUS.type,
value: { data: 'open', operator: '=' },
};
createComponentAndFilter([statusFilterToken]);
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?status=open');
});
it('ignores search query param', () => {
const searchFilterToken = { type: FILTERED_SEARCH_TERM, value: { data: 'ignored' } };
createComponentAndFilter([USER_FILTER_TOKEN, searchFilterToken]);
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?user=mr_abuser');
});
it('redirects without page query param', () => {
createComponentAndFilter([USER_FILTER_TOKEN], '?page=2');
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?user=mr_abuser');
});
it('redirects with existing sort query param', () => {
createComponentAndFilter([USER_FILTER_TOKEN], `?sort=${DEFAULT_SORT}`);
expect(redirectTo).toHaveBeenCalledWith(
`https://localhost/?user=mr_abuser&sort=${DEFAULT_SORT}`,
);
});
});
describe('onSort', () => {
const SORT_VALUE = 'updated_at_asc';
const EXISTING_QUERY = 'status=closed&user=mr_abuser';
const createComponentAndSort = (initialLocation) => {
setWindowLocation(initialLocation);
createComponent();
findFilteredSearchBar().vm.$emit('onSort', SORT_VALUE);
};
findFilteredSearchBar().vm.$emit('onFilter', [
{ type: FILTERED_SEARCH_TOKEN_STATUS.type, value: { data: 'open', operator: '=' } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'ignored' } },
]);
it('redirects to URL with existing query params and the sort query param', () => {
createComponentAndSort(`?${EXISTING_QUERY}`);
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?status=open');
expect(redirectTo).toHaveBeenCalledWith(
`https://localhost/?${EXISTING_QUERY}&sort=${SORT_VALUE}`,
);
});
it('redirects without page query param', () => {
createComponentAndSort(`?${EXISTING_QUERY}&page=2`);
expect(redirectTo).toHaveBeenCalledWith(
`https://localhost/?${EXISTING_QUERY}&sort=${SORT_VALUE}`,
);
});
it('redirects with existing sort query param replaced with the new one', () => {
createComponentAndSort(`?${EXISTING_QUERY}&sort=created_at_desc`);
expect(redirectTo).toHaveBeenCalledWith(
`https://localhost/?${EXISTING_QUERY}&sort=${SORT_VALUE}`,
);
});
});
});

View File

@ -38,7 +38,8 @@ const defaultState = {
createdBefore,
createdAfter,
stageCounts,
endpoints: { fullPath, groupPath },
groupPath,
namespace: { fullPath },
};
function createStore({ initialState = {}, initialGetters = {} }) {

View File

@ -219,6 +219,8 @@ export const group = {
};
export const currentGroup = convertObjectPropsToCamelCase(group, { deep: true });
export const groupNamespace = { id: currentGroup.id, fullPath: `groups/${currentGroup.path}` };
export const projectNamespace = { fullPath: 'some/cool/path' };
export const selectedProjects = [
{

View File

@ -13,21 +13,13 @@ import {
createdBefore,
initialPaginationState,
reviewEvents,
projectNamespace as namespace,
} from '../mock_data';
const { id: groupId, path: groupPath } = currentGroup;
const mockMilestonesPath = 'mock-milestones.json';
const mockLabelsPath = 'mock-labels.json';
const mockRequestPath = 'some/cool/path';
const { path: groupPath } = currentGroup;
const mockMilestonesPath = `/${namespace.fullPath}/-/milestones.json`;
const mockLabelsPath = `/${namespace.fullPath}/-/labels.json`;
const mockFullPath = '/namespace/-/analytics/value_stream_analytics/value_streams';
const mockEndpoints = {
fullPath: mockFullPath,
requestPath: mockRequestPath,
labelsPath: mockLabelsPath,
milestonesPath: mockMilestonesPath,
groupId,
groupPath,
};
const mockSetDateActionCommit = {
payload: { createdAfter, createdBefore },
type: 'SET_DATE_RANGE',
@ -35,6 +27,7 @@ const mockSetDateActionCommit = {
const defaultState = {
...getters,
namespace,
selectedValueStream,
createdAfter,
createdBefore,
@ -81,7 +74,8 @@ describe('Project Value Stream Analytics actions', () => {
const selectedAssigneeList = ['Assignee 1', 'Assignee 2'];
const selectedLabelList = ['Label 1', 'Label 2'];
const payload = {
endpoints: mockEndpoints,
namespace,
groupPath,
selectedAuthor,
selectedMilestone,
selectedAssigneeList,
@ -92,7 +86,7 @@ describe('Project Value Stream Analytics actions', () => {
groupEndpoint: 'foo',
labelsEndpoint: mockLabelsPath,
milestonesEndpoint: mockMilestonesPath,
projectEndpoint: '/namespace/-/analytics/value_stream_analytics/value_streams',
projectEndpoint: namespace.fullPath,
};
it('will dispatch fetchValueStreams actions and commit SET_LOADING and INITIALIZE_VSA', () => {
@ -193,7 +187,6 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
...defaultState,
endpoints: mockEndpoints,
selectedStage,
};
mock = new MockAdapter(axios);
@ -219,7 +212,6 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
...defaultState,
endpoints: mockEndpoints,
selectedStage,
};
mock = new MockAdapter(axios);
@ -243,7 +235,6 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
...defaultState,
endpoints: mockEndpoints,
selectedStage,
};
mock = new MockAdapter(axios);
@ -265,9 +256,7 @@ describe('Project Value Stream Analytics actions', () => {
const mockValueStreamPath = /\/analytics\/value_stream_analytics\/value_streams/;
beforeEach(() => {
state = {
endpoints: mockEndpoints,
};
state = { namespace };
mock = new MockAdapter(axios);
mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_OK);
});
@ -333,7 +322,7 @@ describe('Project Value Stream Analytics actions', () => {
beforeEach(() => {
state = {
endpoints: mockEndpoints,
namespace,
selectedValueStream,
};
mock = new MockAdapter(axios);

View File

@ -17,12 +17,14 @@ import {
rawStageCounts,
stageCounts,
initialPaginationState as pagination,
projectNamespace as mockNamespace,
} from '../mock_data';
let state;
const rawEvents = rawIssueEvents.events;
const convertedEvents = issueEvents.events;
const mockRequestPath = 'fake/request/path';
const mockGroupPath = 'groups/path';
const mockFeatures = { some: 'feature' };
const mockCreatedAfter = '2020-06-18';
const mockCreatedBefore = '2020-07-18';
@ -64,19 +66,22 @@ describe('Project Value Stream Analytics mutations', () => {
const mockSetDatePayload = { createdAfter: mockCreatedAfter, createdBefore: mockCreatedBefore };
const mockInitialPayload = {
endpoints: { requestPath: mockRequestPath },
currentGroup: { title: 'cool-group' },
id: 1337,
groupPath: mockGroupPath,
namespace: mockNamespace,
features: mockFeatures,
...mockSetDatePayload,
};
const mockInitializedObj = {
endpoints: { requestPath: mockRequestPath },
...mockSetDatePayload,
};
it.each`
mutation | stateKey | value
${types.INITIALIZE_VSA} | ${'endpoints'} | ${{ requestPath: mockRequestPath }}
${types.INITIALIZE_VSA} | ${'features'} | ${mockFeatures}
${types.INITIALIZE_VSA} | ${'namespace'} | ${mockNamespace}
${types.INITIALIZE_VSA} | ${'groupPath'} | ${mockGroupPath}
${types.INITIALIZE_VSA} | ${'createdAfter'} | ${mockCreatedAfter}
${types.INITIALIZE_VSA} | ${'createdBefore'} | ${mockCreatedBefore}
`('$mutation will set $stateKey', ({ mutation, stateKey, value }) => {

View File

@ -92,7 +92,8 @@ describe('Value stream analytics utils', () => {
const createdAfter = '2021-09-01';
const createdBefore = '2021-11-06';
const groupPath = 'fake-group';
const fullPath = 'fake-group/fake-project';
const namespaceName = 'Fake project';
const namespaceFullPath = 'fake-group/fake-project';
const labelsPath = '/fake-group/fake-project/-/labels.json';
const milestonesPath = '/fake-group/fake-project/-/milestones.json';
const requestPath = '/fake-group/fake-project/-/value_stream_analytics';
@ -101,7 +102,8 @@ describe('Value stream analytics utils', () => {
projectId,
createdBefore,
createdAfter,
fullPath,
namespaceName,
namespaceFullPath,
requestPath,
labelsPath,
milestonesPath,
@ -122,13 +124,13 @@ describe('Value stream analytics utils', () => {
expect(res.createdAfter).toEqual(new Date(createdAfter));
});
it('sets the namespace', () => {
expect(res.namespace.name).toBe(namespaceName);
expect(res.namespace.fullPath).toBe(namespaceFullPath);
});
it('sets the endpoints', () => {
const { endpoints } = res;
expect(endpoints.fullPath).toBe(fullPath);
expect(endpoints.requestPath).toBe(requestPath);
expect(endpoints.labelsPath).toBe(labelsPath);
expect(endpoints.milestonesPath).toBe(milestonesPath);
expect(endpoints.groupPath).toBe(`groups/${groupPath}`);
expect(res.groupPath).toBe(`groups/${groupPath}`);
});
it('returns null when there is no stage', () => {

View File

@ -21,11 +21,7 @@ import {
getIssueDetailsResponse,
projectWorkItemTypesQueryResponse,
} from 'jest/work_items/mock_data';
import {
descriptionProps as initialProps,
descriptionHtmlWithList,
descriptionHtmlWithCheckboxes,
} from '../mock_data/mock_data';
import { descriptionProps as initialProps, descriptionHtmlWithList } from '../mock_data/mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
@ -295,21 +291,6 @@ describe('Description component', () => {
});
});
describe('description with checkboxes', () => {
beforeEach(() => {
createComponent({
props: {
descriptionHtml: descriptionHtmlWithCheckboxes,
},
});
return nextTick();
});
it('renders a list of hidden buttons corresponding to checkboxes in description HTML', () => {
expect(findTaskActionButtons()).toHaveLength(3);
});
});
describe('task list item actions', () => {
describe('converting the task list item to a task', () => {
describe('when successful', () => {

View File

@ -66,17 +66,3 @@ export const descriptionHtmlWithList = `
<li data-sourcepos="3:1-3:8">todo 3</li>
</ul>
`;
export const descriptionHtmlWithCheckboxes = `
<ul dir="auto" class="task-list" data-sourcepos"3:1-5:12">
<li class="task-list-item" data-sourcepos="3:1-3:11">
<input class="task-list-item-checkbox" type="checkbox"> todo 1
</li>
<li class="task-list-item" data-sourcepos="4:1-4:12">
<input class="task-list-item-checkbox" type="checkbox"> todo 2
</li>
<li class="task-list-item" data-sourcepos="5:1-5:12">
<input class="task-list-item-checkbox" type="checkbox"> todo 3
</li>
</ul>
`;

View File

@ -1,4 +1,5 @@
import axios from 'axios';
import Autosize from 'autosize';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@ -10,8 +11,10 @@ import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import { stubComponent } from 'helpers/stub_component';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import waitForPromises from 'helpers/wait_for_promises';
jest.mock('~/emoji');
jest.mock('autosize');
describe('vue_shared/component/markdown/markdown_editor', () => {
useLocalStorageSpy();
@ -107,6 +110,35 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
});
});
describe('autosize', () => {
it('autosizes the textarea when the value changes', async () => {
buildWrapper();
await findTextarea().setValue('Lots of newlines\n\n\n\n\n\n\nMore content\n\n\nand newlines');
expect(Autosize.update).toHaveBeenCalled();
});
it('autosizes the textarea when the value changes from outside the component', async () => {
buildWrapper();
wrapper.setProps({ value: 'Lots of newlines\n\n\n\n\n\n\nMore content\n\n\nand newlines' });
await nextTick();
await waitForPromises();
expect(Autosize.update).toHaveBeenCalled();
});
it('does not autosize the textarea if markdown editor is disabled', async () => {
buildWrapper();
findMarkdownField().vm.$emit('enableContentEditor');
wrapper.setProps({ value: 'Lots of newlines\n\n\n\n\n\n\nMore content\n\n\nand newlines' });
await nextTick();
await waitForPromises();
expect(Autosize.update).not.toHaveBeenCalled();
});
});
describe('autosave', () => {
it('automatically saves the textarea value to local storage if autosaveKey is defined', () => {
buildWrapper({ propsData: { autosaveKey: 'issue/1234', value: 'This is **markdown**' } });

View File

@ -1,187 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::Security::VulnerabilityReportsComparer, feature_category: :vulnerability_management do
let(:identifier) { build(:ci_reports_security_identifier) }
let_it_be(:project) { create(:project, :repository) }
let(:location_param) { build(:ci_reports_security_locations_sast, :dynamic) }
let(:vulnerability_params) { vuln_params(project.id, [identifier], confidence: :low, severity: :critical) }
let(:base_vulnerability) { build(:ci_reports_security_finding, location: location_param, **vulnerability_params) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability]) }
let(:head_vulnerability) { build(:ci_reports_security_finding, location: location_param, uuid: base_vulnerability.uuid, **vulnerability_params) }
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability]) }
shared_context 'comparing reports' do
let(:vul_params) { vuln_params(project.id, [identifier]) }
let(:base_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
let(:head_vulnerability) { build(:ci_reports_security_finding, :dynamic, **vul_params) }
let(:head_vul_findings) { [head_vulnerability, vuln] }
end
subject { described_class.new(project, base_report, head_report) }
where(vulnerability_finding_signatures: [true, false])
with_them do
before do
stub_licensed_features(vulnerability_finding_signatures: vulnerability_finding_signatures)
end
describe '#base_report_out_of_date' do
context 'no base report' do
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be false
end
end
context 'base report older than one week' do
let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago - 60.seconds) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be true
end
end
context 'base report less than one week old' do
let(:report) { build(:ci_reports_security_report, created_at: 1.week.ago + 60.seconds) }
let(:base_report) { build(:ci_reports_security_aggregated_reports, reports: [report]) }
it 'is not out of date' do
expect(subject.base_report_out_of_date).to be false
end
end
end
describe '#added' do
let(:new_location) { build(:ci_reports_security_locations_sast, :dynamic) }
let(:vul_params) { vuln_params(project.id, [identifier], confidence: :high) }
let(:vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:critical], location: new_location, **vul_params) }
let(:low_vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:low], location: new_location, **vul_params) }
context 'with new vulnerability' do
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln]) }
it 'points to source tree' do
expect(subject.added).to eq([vuln])
end
end
context 'when comparing reports with different fingerprints' do
include_context 'comparing reports'
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: head_vul_findings) }
it 'does not find any overlap' do
expect(subject.added).to eq(head_vul_findings)
end
end
context 'order' do
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln, low_vuln]) }
it 'does not change' do
expect(subject.added).to eq([vuln, low_vuln])
end
end
describe 'number of findings' do
let(:head_report) { build(:ci_reports_security_aggregated_reports, findings: [head_vulnerability, vuln, low_vuln]) }
before do
stub_const("#{described_class}::MAX_FINDINGS_COUNT", 1)
end
it 'returns no more than `MAX_FINDINGS_COUNT`' do
expect(subject.added).to eq([vuln])
end
end
end
describe '#fixed' do
let(:vul_params) { vuln_params(project.id, [identifier]) }
let(:vuln) { build(:ci_reports_security_finding, :dynamic, **vul_params ) }
let(:medium_vuln) { build(:ci_reports_security_finding, confidence: ::Enums::Vulnerability.confidence_levels[:high], severity: Enums::Vulnerability.severity_levels[:medium], uuid: vuln.uuid, **vul_params) }
context 'with fixed vulnerability' do
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
it 'points to base tree' do
expect(subject.fixed).to eq([vuln])
end
end
context 'when comparing reports with different fingerprints' do
include_context 'comparing reports'
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [base_vulnerability, vuln]) }
it 'does not find any overlap' do
expect(subject.fixed).to eq([base_vulnerability, vuln])
end
end
context 'order' do
let(:vul_findings) { [vuln, medium_vuln] }
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [*vul_findings, base_vulnerability]) }
it 'does not change' do
expect(subject.fixed).to eq(vul_findings)
end
end
describe 'number of findings' do
let(:base_report) { build(:ci_reports_security_aggregated_reports, findings: [vuln, medium_vuln, base_vulnerability]) }
before do
stub_const("#{described_class}::MAX_FINDINGS_COUNT", 1)
end
it 'returns no more than `MAX_FINDINGS_COUNT`' do
expect(subject.fixed).to eq([vuln])
end
end
end
describe 'with empty vulnerabilities' do
let(:empty_report) { build(:ci_reports_security_aggregated_reports, reports: [], findings: []) }
it 'returns empty array when reports are not present' do
comparer = described_class.new(project, empty_report, empty_report)
expect(comparer.fixed).to eq([])
expect(comparer.added).to eq([])
end
it 'returns added vulnerability when base is empty and head is not empty' do
comparer = described_class.new(project, empty_report, head_report)
expect(comparer.fixed).to eq([])
expect(comparer.added).to eq([head_vulnerability])
end
it 'returns fixed vulnerability when head is empty and base is not empty' do
comparer = described_class.new(project, base_report, empty_report)
expect(comparer.fixed).to eq([base_vulnerability])
expect(comparer.added).to eq([])
end
end
end
def vuln_params(project_id, identifiers, confidence: :high, severity: :critical)
{
project_id: project_id,
report_type: :sast,
identifiers: identifiers,
confidence: ::Enums::Vulnerability.confidence_levels[confidence],
severity: ::Enums::Vulnerability.severity_levels[severity]
}
end
end

View File

@ -132,6 +132,42 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
it_behaves_like 'project commits'
end
context 'with author parameter' do
let(:params) { { author: 'Zaporozhets' } }
it 'returns only this author commits' do
get api(route, user), params: params
expect(response).to have_gitlab_http_status(:ok)
author_names = json_response.map { |commit| commit['author_name'] }.uniq
expect(author_names).to contain_exactly('Dmitriy Zaporozhets')
end
context 'when author is missing' do
let(:params) { { author: '' } }
it 'returns all commits' do
get api(route, user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(20)
end
end
context 'when author does not exists' do
let(:params) { { author: 'does not exist' } }
it 'returns an empty list' do
get api(route, user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq([])
end
end
end
context 'when repository does not exist' do
let(:project) { create(:project, creator: user, path: 'my.project') }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, feature_category: :container_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for a cleanup tags service'
@ -149,6 +149,20 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
it_behaves_like 'when running a container_expiration_policy',
delete_expectations: [%w[Ba Bb C]]
end
context 'with no tags page' do
let(:tags_page_size) { 1000 }
let(:deleted) { [] }
let(:params) { {} }
before do
allow(repository.gitlab_api_client)
.to receive(:tags)
.and_return({})
end
it { is_expected.to eq(expected_service_response(status: :success, deleted: [], original_size: 0)) }
end
end
private

View File

@ -543,7 +543,9 @@ RSpec.configure do |config|
end
end
Support::AbilityCheck.inject(Ability.singleton_class)
# Disabled because it's causing N+1 queries.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/396352.
# Support::AbilityCheck.inject(Ability.singleton_class)
ActiveRecord::Migration.maintain_test_schema!

View File

@ -203,3 +203,262 @@ RSpec.shared_examples 'value stream analytics flow metrics deploymentCount examp
end
end
end
RSpec.shared_examples 'value stream analytics flow metrics leadTime examples' do
let_it_be(:milestone) { create(:milestone, group: group) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
let_it_be(:issue1) do
create(:issue, project: project1, author: author, created_at: 17.days.ago, closed_at: 12.days.ago)
end
let_it_be(:issue2) do
create(:issue, project: project2, author: author, created_at: 16.days.ago, closed_at: 13.days.ago)
end
let_it_be(:issue3) do
create(:labeled_issue,
project: project1,
labels: [label],
author: author,
milestone: milestone,
assignees: [assignee],
created_at: 14.days.ago,
closed_at: 11.days.ago)
end
let_it_be(:issue4) do
create(:labeled_issue,
project: project2,
labels: [label],
assignees: [assignee],
created_at: 20.days.ago,
closed_at: 15.days.ago)
end
before do
Analytics::CycleAnalytics::DataLoaderService.new(group: group, model: Issue).execute
end
let(:query) do
<<~QUERY
query($path: ID!, $assigneeUsernames: [String!], $authorUsername: String, $milestoneTitle: String, $labelNames: [String!], $from: Time!, $to: Time!) {
#{context}(fullPath: $path) {
flowMetrics {
leadTime(assigneeUsernames: $assigneeUsernames, authorUsername: $authorUsername, milestoneTitle: $milestoneTitle, labelNames: $labelNames, from: $from, to: $to) {
value
unit
identifier
title
links {
label
url
}
}
}
}
}
QUERY
end
let(:variables) do
{
path: full_path,
from: 21.days.ago.iso8601,
to: 10.days.ago.iso8601
}
end
subject(:result) do
post_graphql(query, current_user: current_user, variables: variables)
graphql_data.dig(context.to_s, 'flowMetrics', 'leadTime')
end
it 'returns the correct value' do
expect(result).to match(a_hash_including({
'identifier' => 'lead_time',
'unit' => n_('day', 'days', 4),
'value' => 4,
'title' => _('Lead Time'),
'links' => [
{ 'label' => s_('ValueStreamAnalytics|Dashboard'), 'url' => match(/issues_analytics/) },
{ 'label' => s_('ValueStreamAnalytics|Go to docs'), 'url' => match(/definitions/) }
]
}))
end
context 'when the user is not authorized' do
let(:current_user) { create(:user) }
it 'returns nil' do
expect(result).to eq(nil)
end
end
context 'when outside of the date range' do
let(:variables) do
{
path: full_path,
from: 30.days.ago.iso8601,
to: 25.days.ago.iso8601
}
end
it 'returns 0 count' do
expect(result).to match(a_hash_including({ 'value' => nil }))
end
end
context 'with all filters' do
let(:variables) do
{
path: full_path,
assigneeUsernames: [assignee.username],
labelNames: [label.title],
authorUsername: author.username,
milestoneTitle: milestone.title,
from: 20.days.ago.iso8601,
to: 10.days.ago.iso8601
}
end
it 'returns filtered count' do
expect(result).to match(a_hash_including({ 'value' => 3 }))
end
end
end
RSpec.shared_examples 'value stream analytics flow metrics cycleTime examples' do
let_it_be(:milestone) { create(:milestone, group: group) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
let_it_be(:issue1) do
create(:issue, project: project1, author: author, closed_at: 12.days.ago).tap do |issue|
issue.metrics.update!(first_mentioned_in_commit_at: 17.days.ago)
end
end
let_it_be(:issue2) do
create(:issue, project: project2, author: author, closed_at: 13.days.ago).tap do |issue|
issue.metrics.update!(first_mentioned_in_commit_at: 16.days.ago)
end
end
let_it_be(:issue3) do
create(:labeled_issue,
project: project1,
labels: [label],
author: author,
milestone: milestone,
assignees: [assignee],
closed_at: 11.days.ago).tap do |issue|
issue.metrics.update!(first_mentioned_in_commit_at: 14.days.ago)
end
end
let_it_be(:issue4) do
create(:labeled_issue,
project: project2,
labels: [label],
assignees: [assignee],
closed_at: 15.days.ago).tap do |issue|
issue.metrics.update!(first_mentioned_in_commit_at: 20.days.ago)
end
end
before do
Analytics::CycleAnalytics::DataLoaderService.new(group: group, model: Issue).execute
end
let(:query) do
<<~QUERY
query($path: ID!, $assigneeUsernames: [String!], $authorUsername: String, $milestoneTitle: String, $labelNames: [String!], $from: Time!, $to: Time!) {
#{context}(fullPath: $path) {
flowMetrics {
cycleTime(assigneeUsernames: $assigneeUsernames, authorUsername: $authorUsername, milestoneTitle: $milestoneTitle, labelNames: $labelNames, from: $from, to: $to) {
value
unit
identifier
title
links {
label
url
}
}
}
}
}
QUERY
end
let(:variables) do
{
path: full_path,
from: 21.days.ago.iso8601,
to: 10.days.ago.iso8601
}
end
subject(:result) do
post_graphql(query, current_user: current_user, variables: variables)
graphql_data.dig(context.to_s, 'flowMetrics', 'cycleTime')
end
it 'returns the correct value' do
expect(result).to eq({
'identifier' => 'cycle_time',
'unit' => n_('day', 'days', 4),
'value' => 4,
'title' => _('Cycle Time'),
'links' => []
})
end
context 'when the user is not authorized' do
let(:current_user) { create(:user) }
it 'returns nil' do
expect(result).to eq(nil)
end
end
context 'when outside of the date range' do
let(:variables) do
{
path: full_path,
from: 30.days.ago.iso8601,
to: 25.days.ago.iso8601
}
end
it 'returns 0 count' do
expect(result).to match(a_hash_including({ 'value' => nil }))
end
end
context 'with all filters' do
let(:variables) do
{
path: full_path,
assigneeUsernames: [assignee.username],
labelNames: [label.title],
authorUsername: author.username,
milestoneTitle: milestone.title,
from: 20.days.ago.iso8601,
to: 10.days.ago.iso8601
}
end
it 'returns filtered count' do
expect(result).to match(a_hash_including({ 'value' => 3 }))
end
end
end