Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-02 15:13:28 +00:00
parent b5cafdc023
commit 285aff2385
103 changed files with 1561 additions and 809 deletions

View File

@ -82,7 +82,7 @@ Instructions on how to start GitLab and how to run the tests can be found in the
GitLab is a Ruby on Rails application that runs on the following software:
- Ubuntu/Debian/CentOS/RHEL/OpenSUSE
- Ruby (MRI) 3.1.4
- Ruby (MRI) 3.2.5
- Git 2.33+
- Redis 6.0+
- PostgreSQL 14.9+

View File

@ -1,6 +1,6 @@
import $ from 'jquery';
import Pikaday from 'pikaday';
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { parsePikadayDate, toISODateFormat } from '~/lib/utils/datetime_utility';
export default function initDatePickers() {
$('.datepicker').each(function initPikaday() {
@ -13,7 +13,7 @@ export default function initDatePickers() {
format: 'yyyy-mm-dd',
container: $datePicker.parent().get(0),
parse: (dateString) => parsePikadayDate(dateString),
toString: (date) => pikadayToString(date),
toString: (date) => toISODateFormat(date),
onSelect(dateText) {
$datePicker.val(calendar.toString(dateText));
},

View File

@ -35,3 +35,6 @@ export const TYPE_ORGANIZATION = 'Organizations::Organization';
export const TYPE_USERS_SAVED_REPLY = 'Users::SavedReply';
export const TYPE_WORKSPACE = 'RemoteDevelopment::Workspace';
export const TYPE_COMPLIANCE_FRAMEWORK = 'ComplianceManagement::Framework';
export const QUERY_PARAM_START_CURSOR = 'start_cursor';
export const QUERY_PARAM_END_CURSOR = 'end_cursor';

View File

@ -1,5 +1,6 @@
import { isArray } from 'lodash';
import Visibility from 'visibilityjs';
import { QUERY_PARAM_START_CURSOR, QUERY_PARAM_END_CURSOR } from './constants';
/**
* Ids generated by GraphQL endpoints are usually in the format
@ -162,3 +163,25 @@ export const etagQueryHeaders = (featureCorrelation, etagResource = '') => {
},
};
};
export const calculateGraphQLPaginationQueryParams = ({
startCursor,
endCursor,
routeQuery: { start_cursor, end_cursor, ...routeQuery },
}) => {
if (startCursor) {
return {
...routeQuery,
[QUERY_PARAM_START_CURSOR]: startCursor,
};
}
if (endCursor) {
return {
...routeQuery,
[QUERY_PARAM_END_CURSOR]: endCursor,
};
}
return routeQuery;
};

View File

@ -2,7 +2,7 @@ import $ from 'jquery';
import Pikaday from 'pikaday';
import GfmAutoComplete from 'ee_else_ce/gfm_auto_complete';
import Autosave from '~/autosave';
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { parsePikadayDate, toISODateFormat } from '~/lib/utils/datetime_utility';
import { queryToObject, objectToQuery } from '~/lib/utils/url_utility';
import UsersSelect from '~/users_select';
import ZenMode from '~/zen_mode';
@ -114,7 +114,7 @@ export default class IssuableForm {
format: 'yyyy-mm-dd',
container: $issuableDueDate.parent().get(0),
parse: (dateString) => parsePikadayDate(dateString),
toString: (date) => pikadayToString(date),
toString: (date) => toISODateFormat(date),
onSelect: (dateText) => {
$issuableDueDate.val(calendar.toString(dateText));
if (this.autosaves.has('due_date')) this.autosaves.get('due_date').save();

View File

@ -33,7 +33,7 @@ export const MILLISECONDS_IN_DAY = 24 * 60 * 60 * 1000;
* for UTC-8 timezone.
*
* @param {string|number|Date} date
* @returns {Date|null|undefined}
* @returns {Date|null|undefined} A Date object in local time
*/
export const newDate = (date) => {
if (date === null) {
@ -42,6 +42,7 @@ export const newDate = (date) => {
if (date === undefined) {
return undefined;
}
// Fix historical bug so we return a local time for `yyyy-mm-dd` date-only strings
if (typeof date === 'string' && DATE_ONLY_REGEX.test(date)) {
const parts = date.split('-');
const year = parseInt(parts[0], 10);
@ -545,48 +546,6 @@ export const dateAtFirstDayOfMonth = (date) => new Date(cloneDate(date).setDate(
*/
export const datesMatch = (date1, date2) => differenceInMilliseconds(date1, date2) === 0;
/**
* A utility function which checks if two date ranges overlap.
*
* @param {Object} givenPeriodLeft - the first period to compare.
* @param {Object} givenPeriodRight - the second period to compare.
* @returns {Object} { overlap: number of days the overlap is present, overlapStartDate: the start date of the overlap in time format, overlapEndDate: the end date of the overlap in time format }
* @throws {Error} Uncaught Error: Invalid period
*
* @example
* getOverlappingDaysInPeriods(
* { start: new Date(2021, 0, 11), end: new Date(2021, 0, 13) },
* { start: new Date(2021, 0, 11), end: new Date(2021, 0, 14) }
* ) => { daysOverlap: 2, overlapStartDate: 1610323200000, overlapEndDate: 1610496000000 }
*
*/
export const getOverlappingDaysInPeriods = (givenPeriodLeft = {}, givenPeriodRight = {}) => {
const leftStartTime = new Date(givenPeriodLeft.start).getTime();
const leftEndTime = new Date(givenPeriodLeft.end).getTime();
const rightStartTime = new Date(givenPeriodRight.start).getTime();
const rightEndTime = new Date(givenPeriodRight.end).getTime();
if (!(leftStartTime <= leftEndTime && rightStartTime <= rightEndTime)) {
throw new Error(__('Invalid period'));
}
const isOverlapping = leftStartTime < rightEndTime && rightStartTime < leftEndTime;
if (!isOverlapping) {
return { daysOverlap: 0 };
}
const overlapStartDate = Math.max(leftStartTime, rightStartTime);
const overlapEndDate = rightEndTime > leftEndTime ? leftEndTime : rightEndTime;
const differenceInMs = overlapEndDate - overlapStartDate;
return {
daysOverlap: Math.ceil(differenceInMs / MILLISECONDS_IN_DAY),
overlapStartDate,
overlapEndDate,
};
};
/**
* Mimics the behaviour of the rails distance_of_time_in_words function
* https://api.rubyonrails.org/classes/ActionView/Helpers/DateHelper.html#method-i-distance_of_time_in_words
@ -634,21 +593,6 @@ export const approximateDuration = (seconds = 0) => {
return n__('1 day', '%d days', seconds < ONE_DAY_LIMIT ? 1 : days);
};
/**
* A utility function which helps creating a date object
* for a specific date. Accepts the year, month and day
* returning a date object for the given params.
*
* @param {Int} year the full year as a number i.e. 2020
* @param {Int} month the month index i.e. January => 0
* @param {Int} day the day as a number i.e. 23
*
* @return {Date} the date object from the params
*/
export const dateFromParams = (year, month, day) => {
return new Date(year, month, day);
};
/**
* A utility function which computes a formatted 24 hour
* time string from a positive int in the range 0 - 24.

View File

@ -357,6 +357,19 @@ export const timeToHoursMinutes = (time = '') => {
return { hours, minutes };
};
/**
* Converts a Date object to a date-only string in the ISO format `yyyy-mm-dd`
*
* @param {Date} date A Date object
* @returns {string} A string in the format `yyyy-mm-dd`
*/
export const toISODateFormat = (date) => {
const day = padWithZeros(date.getDate());
const month = padWithZeros(date.getMonth() + 1);
const year = date.getFullYear();
return `${year}-${month}-${day}`;
};
/**
* This combines a date and a time and returns the computed Date's ISO string representation.
*
@ -516,3 +529,15 @@ export const humanTimeframe = (startDate, dueDate) => {
}
return '';
};
/**
* Formats seconds into a human readable value of elapsed time,
* optionally limiting it to hours.
* @param {Number} seconds Seconds to format
* @param {Boolean} limitToHours Whether or not to limit the elapsed time to be expressed in hours
* @return {String} Provided seconds in human readable elapsed time format
*/
export const formatTimeSpent = (seconds, limitToHours) => {
const negative = seconds < 0;
return (negative ? '- ' : '') + stringifyTime(parseSeconds(seconds, { limitToHours }));
};

View File

@ -1,5 +1,3 @@
export const pad = (val, len = 2) => `0${val}`.slice(-len);
/**
* Formats dates in Pickaday
* @param {String} dateString Date in yyyy-mm-dd format
@ -13,16 +11,3 @@ export const parsePikadayDate = (dateString) => {
return new Date(year, month, day);
};
/**
* Used `onSelect` method in pickaday
* @param {Date} date UTC format
* @return {String} Date formatted in yyyy-mm-dd
*/
export const pikadayToString = (date) => {
const day = pad(date.getDate());
const month = pad(date.getMonth() + 1);
const year = date.getFullYear();
return `${year}-${month}-${day}`;
};

View File

@ -1,13 +0,0 @@
import { stringifyTime, parseSeconds } from './date_format_utility';
/**
* Formats seconds into a human readable value of elapsed time,
* optionally limiting it to hours.
* @param {Number} seconds Seconds to format
* @param {Boolean} limitToHours Whether or not to limit the elapsed time to be expressed in hours
* @return {String} Provided seconds in human readable elapsed time format
*/
export const formatTimeSpent = (seconds, limitToHours) => {
const negative = seconds < 0;
return (negative ? '- ' : '') + stringifyTime(parseSeconds(seconds, { limitToHours }));
};

View File

@ -2,5 +2,4 @@ export * from './datetime/timeago_utility';
export * from './datetime/date_format_utility';
export * from './datetime/date_calculation_utility';
export * from './datetime/pikaday_utility';
export * from './datetime/time_spent_utility';
export * from './datetime/locale_dateformat';

View File

@ -6,16 +6,15 @@ import GroupsView from '~/organizations/shared/components/groups_view.vue';
import ProjectsView from '~/organizations/shared/components/projects_view.vue';
import NewGroupButton from '~/organizations/shared/components/new_group_button.vue';
import NewProjectButton from '~/organizations/shared/components/new_project_button.vue';
import { onPageChange } from '~/organizations/shared/utils';
import { calculateGraphQLPaginationQueryParams } from '~/graphql_shared/utils';
import {
RESOURCE_TYPE_GROUPS,
RESOURCE_TYPE_PROJECTS,
QUERY_PARAM_END_CURSOR,
QUERY_PARAM_START_CURSOR,
SORT_DIRECTION_ASC,
SORT_DIRECTION_DESC,
SORT_ITEM_NAME,
} from '~/organizations/shared/constants';
import { QUERY_PARAM_END_CURSOR, QUERY_PARAM_START_CURSOR } from '~/graphql_shared/constants';
import FilteredSearchAndSort from '~/groups_projects/components/filtered_search_and_sort.vue';
import {
RECENT_SEARCHES_STORAGE_KEY_GROUPS,
@ -172,7 +171,9 @@ export default {
});
},
onPageChange(pagination) {
this.pushQuery(onPageChange({ ...pagination, routeQuery: this.$route.query }));
this.pushQuery(
calculateGraphQLPaginationQueryParams({ ...pagination, routeQuery: this.$route.query }),
);
},
async userPreferencesUpdateMutate(input) {
try {

View File

@ -30,9 +30,6 @@ export const FORM_FIELD_DESCRIPTION_VALIDATORS = [
),
];
export const QUERY_PARAM_START_CURSOR = 'start_cursor';
export const QUERY_PARAM_END_CURSOR = 'end_cursor';
export const SORT_DIRECTION_ASC = 'asc';
export const SORT_DIRECTION_DESC = 'desc';

View File

@ -4,12 +4,7 @@ import {
TIMESTAMP_TYPE_CREATED_AT,
TIMESTAMP_TYPE_UPDATED_AT,
} from '~/vue_shared/components/resource_lists/constants';
import {
SORT_CREATED_AT,
SORT_UPDATED_AT,
QUERY_PARAM_END_CURSOR,
QUERY_PARAM_START_CURSOR,
} from './constants';
import { SORT_CREATED_AT, SORT_UPDATED_AT } from './constants';
const availableGroupActions = (userPermissions) => {
const baseActions = [];
@ -52,28 +47,6 @@ export const formatGroups = (groups) =>
}),
);
export const onPageChange = ({
startCursor,
endCursor,
routeQuery: { start_cursor, end_cursor, ...routeQuery },
}) => {
if (startCursor) {
return {
...routeQuery,
[QUERY_PARAM_START_CURSOR]: startCursor,
};
}
if (endCursor) {
return {
...routeQuery,
[QUERY_PARAM_END_CURSOR]: endCursor,
};
}
return routeQuery;
};
export const timestampType = (sortName) => {
const SORT_MAP = {
[SORT_CREATED_AT]: TIMESTAMP_TYPE_CREATED_AT,

View File

@ -4,16 +4,15 @@ import { isEqual } from 'lodash';
import { s__, __ } from '~/locale';
import GroupsView from '~/organizations/shared/components/groups_view.vue';
import ProjectsView from '~/organizations/shared/components/projects_view.vue';
import { onPageChange } from '~/organizations/shared/utils';
import { calculateGraphQLPaginationQueryParams } from '~/graphql_shared/utils';
import {
RESOURCE_TYPE_GROUPS,
RESOURCE_TYPE_PROJECTS,
QUERY_PARAM_END_CURSOR,
QUERY_PARAM_START_CURSOR,
SORT_CREATED_AT,
SORT_UPDATED_AT,
SORT_DIRECTION_DESC,
} from '~/organizations/shared/constants';
import { QUERY_PARAM_END_CURSOR, QUERY_PARAM_START_CURSOR } from '~/graphql_shared/constants';
import { GROUPS_AND_PROJECTS_PER_PAGE } from '../constants';
import { buildDisplayListboxItem } from '../utils';
@ -108,7 +107,9 @@ export default {
this.pushQuery({ display });
},
onPageChange(pagination) {
this.pushQuery(onPageChange({ ...pagination, routeQuery: this.$route.query }));
this.pushQuery(
calculateGraphQLPaginationQueryParams({ ...pagination, routeQuery: this.$route.query }),
);
},
},
};

View File

@ -7,7 +7,7 @@ import {
getDayName,
getDayDifference,
localeDateFormat,
pikadayToString,
toISODateFormat,
newDate,
} from '~/lib/utils/datetime_utility';
import { n__, s__, __ } from '~/locale';
@ -117,7 +117,7 @@ export default class ActivityCalendar {
date.setDate(date.getDate() + i);
const day = date.getDay();
const count = timestamps[pikadayToString(date)] || 0;
const count = timestamps[toISODateFormat(date)] || 0;
// Create a new group array if this is the first day of the week
// or if is first object

View File

@ -3,9 +3,11 @@ import { GlTabs, GlTab, GlBadge, GlFilteredSearchToken } from '@gitlab/ui';
import { isEqual } from 'lodash';
import { __ } from '~/locale';
import { TIMESTAMP_TYPE_UPDATED_AT } from '~/vue_shared/components/resource_lists/constants';
import { QUERY_PARAM_END_CURSOR, QUERY_PARAM_START_CURSOR } from '~/graphql_shared/constants';
import { numberToMetricPrefix } from '~/lib/utils/number_utils';
import { createAlert } from '~/alert';
import FilteredSearchAndSort from '~/groups_projects/components/filtered_search_and_sort.vue';
import { calculateGraphQLPaginationQueryParams } from '~/graphql_shared/utils';
import { RECENT_SEARCHES_STORAGE_KEY_PROJECTS } from '~/filtered_search/recent_searches_storage_keys';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { ACCESS_LEVEL_OWNER_INTEGER } from '~/access_level/constants';
@ -133,6 +135,12 @@ export default {
isAscending() {
return this.sort.endsWith(SORT_DIRECTION_ASC);
},
startCursor() {
return this.$route.query[QUERY_PARAM_START_CURSOR];
},
endCursor() {
return this.$route.query[QUERY_PARAM_END_CURSOR];
},
},
methods: {
numberToMetricPrefix,
@ -182,6 +190,11 @@ export default {
this.pushQuery({ sort, ...filters });
},
onPageChange(pagination) {
this.pushQuery(
calculateGraphQLPaginationQueryParams({ ...pagination, routeQuery: this.$route.query }),
);
},
},
};
</script>
@ -201,7 +214,13 @@ export default {
</div>
</template>
<tab-view v-if="tab.query" :tab="tab" />
<tab-view
v-if="tab.query"
:tab="tab"
:start-cursor="startCursor"
:end-cursor="endCursor"
@page-change="onPageChange"
/>
<template v-else>{{ tab.text }}</template>
</gl-tab>

View File

@ -1,7 +1,8 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import { GlLoadingIcon, GlKeysetPagination } from '@gitlab/ui';
import { get } from 'lodash';
import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
import { DEFAULT_PER_PAGE } from '~/api';
import { __ } from '~/locale';
import { createAlert } from '~/alert';
import { formatGraphQLProjects } from '~/vue_shared/components/projects_list/utils';
@ -17,6 +18,7 @@ export default {
},
components: {
GlLoadingIcon,
GlKeysetPagination,
ProjectsList,
},
props: {
@ -24,6 +26,16 @@ export default {
required: true,
type: Object,
},
startCursor: {
type: String,
required: false,
default: null,
},
endCursor: {
type: String,
required: false,
default: null,
},
},
data() {
return {
@ -34,6 +46,9 @@ export default {
projects() {
return {
query: this.tab.query,
variables() {
return this.pagination;
},
update(response) {
const { nodes, pageInfo } = get(response, this.tab.queryPath);
@ -52,6 +67,26 @@ export default {
nodes() {
return this.projects.nodes || [];
},
pageInfo() {
return this.projects.pageInfo || {};
},
pagination() {
if (!this.startCursor && !this.endCursor) {
return {
first: DEFAULT_PER_PAGE,
after: null,
last: null,
before: null,
};
}
return {
first: this.endCursor && DEFAULT_PER_PAGE,
after: this.endCursor,
last: this.startCursor && DEFAULT_PER_PAGE,
before: this.startCursor,
};
},
isLoading() {
return this.$apollo.queries.projects.loading;
},
@ -60,18 +95,34 @@ export default {
onDeleteComplete() {
this.$apollo.queries.projects.refetch();
},
onNext(endCursor) {
this.$emit('page-change', {
endCursor,
startCursor: null,
});
},
onPrev(startCursor) {
this.$emit('page-change', {
endCursor: null,
startCursor,
});
},
},
};
</script>
<template>
<gl-loading-icon v-if="isLoading" class="gl-mt-5" size="md" />
<projects-list
v-else-if="nodes.length"
:projects="nodes"
show-project-icon
list-item-class="gl-px-5"
:timestamp-type="$options.TIMESTAMP_TYPE_UPDATED_AT"
@delete-complete="onDeleteComplete"
/>
<div v-else-if="nodes.length">
<projects-list
:projects="nodes"
show-project-icon
list-item-class="gl-px-5"
:timestamp-type="$options.TIMESTAMP_TYPE_UPDATED_AT"
@delete-complete="onDeleteComplete"
/>
<div v-if="pageInfo.hasNextPage || pageInfo.hasPreviousPage" class="gl-mt-5 gl-text-center">
<gl-keyset-pagination v-bind="pageInfo" @prev="onPrev" @next="onNext" />
</div>
</div>
</template>

View File

@ -1,12 +1,16 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/graphql_shared/fragments/project.fragment.graphql"
query getContributedProjects {
query getContributedProjects($first: Int, $last: Int, $before: String, $after: String) {
currentUser {
id
contributedProjects {
contributedProjects(first: $first, last: $last, before: $before, after: $after) {
nodes {
...Project
}
pageInfo {
...PageInfo
}
}
}
}

View File

@ -1,9 +1,20 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/graphql_shared/fragments/project.fragment.graphql"
query getInactiveProjects {
projects(archived: ONLY, membership: true) {
query getInactiveProjects($first: Int, $last: Int, $before: String, $after: String) {
projects(
archived: ONLY
membership: true
first: $first
last: $last
before: $before
after: $after
) {
nodes {
...Project
}
pageInfo {
...PageInfo
}
}
}

View File

@ -1,9 +1,13 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/graphql_shared/fragments/project.fragment.graphql"
query getMembershipProjects {
projects(membership: true) {
query getMembershipProjects($first: Int, $last: Int, $before: String, $after: String) {
projects(membership: true, first: $first, last: $last, before: $before, after: $after) {
nodes {
...Project
}
pageInfo {
...PageInfo
}
}
}

View File

@ -1,9 +1,13 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/graphql_shared/fragments/project.fragment.graphql"
query getPersonalProjects {
projects(personal: true) {
query getPersonalProjects($first: Int, $last: Int, $before: String, $after: String) {
projects(personal: true, first: $first, last: $last, before: $before, after: $after) {
nodes {
...Project
}
pageInfo {
...PageInfo
}
}
}

View File

@ -1,12 +1,16 @@
#import "~/graphql_shared/fragments/page_info.fragment.graphql"
#import "ee_else_ce/graphql_shared/fragments/project.fragment.graphql"
query getStarredProjects {
query getStarredProjects($first: Int, $last: Int, $before: String, $after: String) {
currentUser {
id
starredProjects {
starredProjects(first: $first, last: $last, before: $before, after: $after) {
nodes {
...Project
}
pageInfo {
...PageInfo
}
}
}
}

View File

@ -0,0 +1,105 @@
<script>
import { GlFilteredSearchSuggestion } from '@gitlab/ui';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
import searchGroupsQuery from '../queries/search_groups.query.graphql';
export default {
components: {
BaseToken,
GlFilteredSearchSuggestion,
},
props: {
config: {
type: Object,
required: true,
},
value: {
type: Object,
required: true,
},
active: {
type: Boolean,
required: true,
},
},
data() {
return {
groups: this.config.initialGroups || [],
loading: false,
};
},
computed: {
defaultGroups() {
return this.config.defaultGroups || [];
},
},
methods: {
fetchGroups(search = '') {
return this.$apollo
.query({
query: searchGroupsQuery,
variables: { search },
})
.then(({ data }) => data.groups.nodes);
},
fetchGroupsBySearchTerm(search) {
this.loading = true;
this.fetchGroups(search)
.then((response) => {
this.groups = response;
})
.catch(() => createAlert({ message: __('There was a problem fetching groups.') }))
.finally(() => {
this.loading = false;
});
},
getActiveGroup(groups, data) {
if (data && groups.length) {
return groups.find((group) => this.getValue(group) === data);
}
return undefined;
},
getValue(group) {
return String(this.getGroupIdProperty(group));
},
displayValue(group) {
return group?.fullName;
},
getGroupIdProperty(group) {
return getIdFromGraphQLId(group.id);
},
},
};
</script>
<template>
<base-token
:config="config"
:value="value"
:active="active"
:suggestions-loading="loading"
:suggestions="groups"
:get-active-token-value="getActiveGroup"
:default-suggestions="defaultGroups"
:value-identifier="getValue"
v-bind="$attrs"
@fetch-suggestions="fetchGroupsBySearchTerm"
v-on="$listeners"
>
<template #view="{ viewTokenProps: { inputValue, activeTokenValue } }">
{{ activeTokenValue ? displayValue(activeTokenValue) : inputValue }}
</template>
<template #suggestions-list="{ suggestions }">
<gl-filtered-search-suggestion
v-for="group in suggestions"
:key="group.id"
:value="getValue(group)"
>
{{ group.fullName }}
</gl-filtered-search-suggestion>
</template>
</base-token>
</template>

View File

@ -0,0 +1,105 @@
<script>
import { GlFilteredSearchSuggestion } from '@gitlab/ui';
import { createAlert } from '~/alert';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
import searchProjectsQuery from '../queries/search_projects.query.graphql';
export default {
components: {
BaseToken,
GlFilteredSearchSuggestion,
},
props: {
config: {
type: Object,
required: true,
},
value: {
type: Object,
required: true,
},
active: {
type: Boolean,
required: true,
},
},
data() {
return {
projects: this.config.initialProjects || [],
loading: false,
};
},
computed: {
defaultProjects() {
return this.config.defaultProjects || [];
},
},
methods: {
fetchProjects(search = '') {
return this.$apollo
.query({
query: searchProjectsQuery,
variables: { search },
})
.then(({ data }) => data.projects.nodes);
},
fetchProjectsBySearchTerm(search) {
this.loading = true;
this.fetchProjects(search)
.then((response) => {
this.projects = response;
})
.catch(() => createAlert({ message: __('There was a problem fetching projects.') }))
.finally(() => {
this.loading = false;
});
},
getActiveProject(projects, data) {
if (data && projects.length) {
return projects.find((project) => this.getValue(project) === data);
}
return undefined;
},
getValue(project) {
return String(this.getProjectIdProperty(project));
},
displayValue(project) {
return project?.name;
},
getProjectIdProperty(project) {
return getIdFromGraphQLId(project.id);
},
},
};
</script>
<template>
<base-token
:config="config"
:value="value"
:active="active"
:suggestions-loading="loading"
:suggestions="projects"
:get-active-token-value="getActiveProject"
:default-suggestions="defaultProjects"
:value-identifier="getValue"
v-bind="$attrs"
@fetch-suggestions="fetchProjectsBySearchTerm"
v-on="$listeners"
>
<template #view="{ viewTokenProps: { inputValue, activeTokenValue } }">
{{ activeTokenValue ? displayValue(activeTokenValue) : inputValue }}
</template>
<template #suggestions-list="{ suggestions }">
<gl-filtered-search-suggestion
v-for="project in suggestions"
:key="project.id"
:value="getValue(project)"
>
{{ project.name }}
</gl-filtered-search-suggestion>
</template>
</base-token>
</template>

View File

@ -0,0 +1,8 @@
query searchGroups($search: String) {
groups(search: $search, first: 20) {
nodes {
id
fullName
}
}
}

View File

@ -0,0 +1,8 @@
query searchTodosProjects($search: String) {
projects(search: $search, membership: true, first: 20) {
nodes {
id
name
}
}
}

View File

@ -49,6 +49,7 @@ export default {
action: [],
sort: `${SORT_OPTIONS[0].value}_DESC`,
},
alert: null,
};
},
apollo: {
@ -67,7 +68,7 @@ export default {
return nodes;
},
error(error) {
createAlert({ message: s__('Todos|Something went wrong. Please try again.') });
this.alert = createAlert({ message: s__('Todos|Something went wrong. Please try again.') });
Sentry.captureException(error);
},
},
@ -140,6 +141,7 @@ export default {
};
},
handleFiltersChanged(data) {
this.alert?.dismiss();
this.queryFilterValues = { ...data };
},
},

View File

@ -1,8 +1,15 @@
<script>
import { GlFormGroup, GlCollapsibleListbox, GlSorting } from '@gitlab/ui';
import ProjectSelect from '~/vue_shared/components/entity_select/project_select.vue';
import GroupSelect from '~/vue_shared/components/entity_select/group_select.vue';
import { GlSorting, GlFilteredSearch, GlFilteredSearchToken, GlAlert } from '@gitlab/ui';
import { s__ } from '~/locale';
import {
OPERATORS_IS,
TOKEN_TITLE_GROUP,
TOKEN_TYPE_GROUP,
TOKEN_TITLE_PROJECT,
TOKEN_TYPE_PROJECT,
ENTITY_TYPES,
FILTERED_SEARCH_TERM,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
TODO_TARGET_TYPE_ISSUE,
TODO_TARGET_TYPE_WORK_ITEM,
@ -24,6 +31,8 @@ import {
TODO_ACTION_TYPE_OKR_CHECKIN_REQUESTED,
TODO_ACTION_TYPE_ADDED_APPROVER,
} from '../constants';
import GroupToken from './filtered_search_tokens/group_token.vue';
import ProjectToken from './filtered_search_tokens/project_token.vue';
export const SORT_OPTIONS = [
{
@ -41,133 +50,174 @@ export const SORT_OPTIONS = [
];
export const TARGET_TYPES = [
{
value: 'any',
text: s__('Todos|Any'),
},
{
value: TODO_TARGET_TYPE_ISSUE,
text: s__('Todos|Issue'),
title: s__('Todos|Issue'),
},
{
value: TODO_TARGET_TYPE_WORK_ITEM,
text: s__('Todos|Work item'),
title: s__('Todos|Work item'),
},
{
value: TODO_TARGET_TYPE_MERGE_REQUEST,
text: s__('Todos|Merge request'),
title: s__('Todos|Merge request'),
},
{
value: TODO_TARGET_TYPE_DESIGN,
text: s__('Todos|Design'),
title: s__('Todos|Design'),
},
{
value: TODO_TARGET_TYPE_ALERT,
text: s__('Todos|Alert'),
title: s__('Todos|Alert'),
},
{
value: TODO_TARGET_TYPE_EPIC,
text: s__('Todos|Epic'),
title: s__('Todos|Epic'),
},
];
export const ACTION_TYPES = [
{
value: 'any',
text: s__('Todos|Any'),
},
{
value: TODO_ACTION_TYPE_ASSIGNED,
text: s__('Todos|Assigned'),
title: s__('Todos|Assigned'),
},
{
value: TODO_ACTION_TYPE_MENTIONED,
text: s__('Todos|Mentioned'),
title: s__('Todos|Mentioned'),
},
{
value: TODO_ACTION_TYPE_BUILD_FAILED,
text: s__('Todos|Build failed'),
title: s__('Todos|Build failed'),
},
{
value: TODO_ACTION_TYPE_MARKED,
text: s__('Todos|Marked'),
title: s__('Todos|Marked'),
},
{
value: TODO_ACTION_TYPE_APPROVAL_REQUIRED,
text: s__('Todos|Approval required'),
title: s__('Todos|Approval required'),
},
{
value: TODO_ACTION_TYPE_UNMERGEABLE,
text: s__('Todos|Unmergeable'),
title: s__('Todos|Unmergeable'),
},
{
value: TODO_ACTION_TYPE_DIRECTLY_ADDRESSED,
text: s__('Todos|Directly addressed'),
title: s__('Todos|Directly addressed'),
},
{
value: TODO_ACTION_TYPE_MERGE_TRAIN_REMOVED,
text: s__('Todos|Merge train removed'),
title: s__('Todos|Merge train removed'),
},
{
value: TODO_ACTION_TYPE_REVIEW_REQUESTED,
text: s__('Todos|Review requested'),
title: s__('Todos|Review requested'),
},
{
value: TODO_ACTION_TYPE_MEMBER_ACCESS_REQUESTED,
text: s__('Todos|Member access request'),
title: s__('Todos|Member access request'),
},
{
value: TODO_ACTION_TYPE_REVIEW_SUBMITTED,
text: s__('Todos|Review submitted'),
title: s__('Todos|Review submitted'),
},
{
value: TODO_ACTION_TYPE_OKR_CHECKIN_REQUESTED,
text: s__('Todos|OKR checkin requested'),
title: s__('Todos|OKR checkin requested'),
},
{
value: TODO_ACTION_TYPE_ADDED_APPROVER,
text: s__('Todos|Added approver'),
title: s__('Todos|Added approver'),
},
];
const DEFAULT_TOKEN_OPTIONS = {
unique: true,
operators: OPERATORS_IS,
};
const TOKEN_TYPE_CATEGORY = 'category';
const TOKEN_TYPE_REASON = 'reason';
const FILTERED_SEARCH_TOKENS = [
{
...DEFAULT_TOKEN_OPTIONS,
icon: 'group',
title: TOKEN_TITLE_GROUP,
type: TOKEN_TYPE_GROUP,
entityType: ENTITY_TYPES.GROUP,
token: GroupToken,
},
{
...DEFAULT_TOKEN_OPTIONS,
icon: 'project',
title: TOKEN_TITLE_PROJECT,
type: TOKEN_TYPE_PROJECT,
entityType: ENTITY_TYPES.PROJECT,
token: ProjectToken,
},
{
...DEFAULT_TOKEN_OPTIONS,
icon: 'overview',
title: s__('Todos|Category'),
type: TOKEN_TYPE_CATEGORY,
token: GlFilteredSearchToken,
options: TARGET_TYPES,
},
{
...DEFAULT_TOKEN_OPTIONS,
icon: 'trigger-source',
title: s__('Todos|Reason'),
type: TOKEN_TYPE_REASON,
token: GlFilteredSearchToken,
options: ACTION_TYPES,
},
];
export default {
FILTERED_SEARCH_TOKENS,
SORT_OPTIONS,
i18n: {
searchTextOptionLabel: s__('Todos|Raw text search is not currently supported'),
fullTextSearchWarning: s__(
'Todos|Raw text search is not currently supported. Please use the available search tokens.',
),
filteredSearchPlaceholder: s__('Todos|Filter to-do items'),
},
components: {
GlFormGroup,
GlCollapsibleListbox,
GlSorting,
GroupSelect,
ProjectSelect,
GlFilteredSearch,
GlAlert,
},
data() {
return {
selectedType: TARGET_TYPES[0].value,
selectedAction: ACTION_TYPES[0].value,
selectedProjectId: null,
selectedGroupId: null,
typeItems: TARGET_TYPES,
actionItems: ACTION_TYPES,
sortOptions: SORT_OPTIONS,
isAscending: false,
sortBy: SORT_OPTIONS[0].value,
filterTokens: [],
showFullTextSearchWarning: false,
};
},
computed: {
filters() {
return Object.fromEntries(
[
['groupId', TOKEN_TYPE_GROUP],
['projectId', TOKEN_TYPE_PROJECT],
['type', TOKEN_TYPE_CATEGORY],
['action', TOKEN_TYPE_REASON],
].map(([param, tokenType]) => {
const selectedValue = this.filterTokens.find((token) => token.type === tokenType);
return [param, selectedValue ? [selectedValue.value.data] : []];
}),
);
},
hasFullTextSearchToken() {
return this.filterTokens.some(
(token) => token.type === FILTERED_SEARCH_TERM && token.value.data.length,
);
},
},
methods: {
handleProjectSelected(data) {
this.selectedProjectId = data?.id;
this.sendFilterChanged();
},
handleGroupSelected(data) {
this.selectedGroupId = data?.id;
this.sendFilterChanged();
},
handleActionSelected(data) {
this.selectedAction = data;
this.sendFilterChanged();
},
handleTypeSelected(data) {
this.selectedType = data;
this.sendFilterChanged();
},
onSortByChange(value) {
this.sortBy = value;
this.sendFilterChanged();
@ -176,18 +226,17 @@ export default {
this.isAscending = isAscending;
this.sendFilterChanged();
},
dismissFullTextSearchWarning() {
this.showFullTextSearchWarning = false;
},
async onFiltersCleared() {
await this.$nextTick();
this.sendFilterChanged();
},
sendFilterChanged() {
this.showFullTextSearchWarning = this.hasFullTextSearchToken;
this.$emit('filters-changed', {
groupId: this.selectedGroupId ? [this.selectedGroupId] : [],
projectId: this.selectedProjectId ? [this.selectedProjectId] : [],
type:
this.selectedType && this.selectedType !== TARGET_TYPES[0].value
? [this.selectedType]
: [],
action:
this.selectedAction && this.selectedAction !== ACTION_TYPES[0].value
? [this.selectedAction]
: [],
...this.filters,
sort: this.isAscending ? `${this.sortBy}_ASC` : `${this.sortBy}_DESC`,
});
},
@ -197,55 +246,31 @@ export default {
<template>
<div class="todos-filters">
<gl-alert
v-if="showFullTextSearchWarning"
variant="warning"
class="gl-mt-3"
@dismiss="dismissFullTextSearchWarning"
>
{{ $options.i18n.fullTextSearchWarning }}
</gl-alert>
<div class="gl-border-b gl-flex gl-flex-col gl-gap-4 gl-bg-gray-10 gl-p-5 sm:gl-flex-row">
<group-select
class="gl-mb-0 gl-w-full sm:gl-w-3/20"
:label="__('Group')"
input-name="group"
input-id="group"
empty-text="Any"
:block="true"
:clearable="true"
@input="handleGroupSelected"
<gl-filtered-search
v-model="filterTokens"
terms-as-tokens
:placeholder="$options.i18n.filteredSearchPlaceholder"
:available-tokens="$options.FILTERED_SEARCH_TOKENS"
:search-text-option-label="$options.i18n.searchTextOptionLabel"
@submit="sendFilterChanged"
@clear="onFiltersCleared"
/>
<project-select
class="gl-mb-0 gl-w-full sm:gl-w-3/20"
:label="__('Project')"
input-name="project"
input-id="project"
empty-text="Any"
:block="true"
:include-subgroups="true"
@input="handleProjectSelected"
<gl-sorting
:sort-options="$options.SORT_OPTIONS"
:sort-by="sortBy"
:is-ascending="isAscending"
@sortByChange="onSortByChange"
@sortDirectionChange="onDirectionChange"
/>
<gl-form-group class="gl-mb-0 gl-w-full sm:gl-w-3/20" :label="__('Author')">
{{ __('Author') }}</gl-form-group
>
<gl-form-group class="gl-mb-0 gl-w-full sm:gl-w-3/20" :label="__('Action')">
<gl-collapsible-listbox
:block="true"
:items="actionItems"
:selected="selectedAction"
@select="handleActionSelected"
/>
</gl-form-group>
<gl-form-group class="gl-mb-0 gl-w-full sm:gl-w-3/20" :label="__('Type')">
<gl-collapsible-listbox
:block="true"
:items="typeItems"
:selected="selectedType"
@select="handleTypeSelected"
/>
</gl-form-group>
<gl-form-group class="gl-mb-0 sm:gl-ml-auto" :label="__('Sort by')">
<gl-sorting
:sort-options="sortOptions"
:sort-by="sortBy"
:is-ascending="isAscending"
@sortByChange="onSortByChange"
@sortDirectionChange="onDirectionChange"
/>
</gl-form-group>
</div>
</div>
</template>

View File

@ -637,7 +637,7 @@ function UsersSelect(currentUser, els, options = {}) {
)}</a></li>`;
} else {
// 0 margin, because it's now handled by a wrapper
img = `<img src='${avatar}' class='avatar avatar-inline !gl-m-0' width='32' />`;
img = `<img src='${avatar}' alt='' class='avatar avatar-inline !gl-m-0' width='32' />`;
}
return userSelect.renderRow(
@ -728,7 +728,7 @@ UsersSelect.prototype.renderRow = function (
</strong>
${
username
? `<span class="dropdown-menu-user-username gl-text-gray-400">${escape(
? `<span class="dropdown-menu-user-username gl-text-subtle">${escape(
username,
)}</span>`
: ''

View File

@ -33,4 +33,5 @@ export const FAILURE_REASONS = {
locked_paths: __('All paths must be unlocked'),
locked_lfs_files: __('All LFS files must be unlocked.'),
security_policy_evaluation: __('All security policies must be evaluated.'),
security_policy_violations: __('All policy rules must be satisfied.'),
};

View File

@ -130,3 +130,13 @@ export const TOKEN_TYPE_CLOSED = 'closed';
export const TOKEN_TYPE_DEPLOYED_BEFORE = 'deployed-before';
export const TOKEN_TYPE_DEPLOYED_AFTER = 'deployed-after';
export const TOKEN_TYPE_ENVIRONMENT = 'environment';
// Due to the i18n eslint rule we can't have a capitalized string even if it is a case-aware URL param
/* eslint-disable @gitlab/require-i18n-strings */
export const ENTITY_TYPES = {
USER: 'User',
AUTHOR: 'Author',
GROUP: 'Group',
PROJECT: 'Project',
};
/* eslint-enable @gitlab/require-i18n-strings */

View File

@ -507,12 +507,12 @@ export default {
<aside
v-if="hasWidgets"
data-testid="work-item-overview-right-sidebar"
class="work-item-overview-right-sidebar"
class="work-item-overview-right-sidebar gl-px-3"
:class="{ 'is-modal': true }"
>
<template v-if="workItemAssignees">
<work-item-assignees
class="js-assignee gl-mb-5"
class="js-assignee work-item-attributes-item"
:can-update="canUpdate"
:full-path="fullPath"
:is-group="isGroup"
@ -528,7 +528,7 @@ export default {
</template>
<template v-if="workItemLabels">
<work-item-labels
class="js-labels gl-mb-5"
class="js-labels work-item-attributes-item"
:can-update="canUpdate"
:full-path="fullPath"
:is-group="isGroup"
@ -540,7 +540,7 @@ export default {
</template>
<template v-if="workItemRolledupDates">
<work-item-rolledup-dates
class="gl-mb-5"
class="work-item-attributes-item"
:can-update="canUpdate"
:full-path="fullPath"
:due-date-is-fixed="workItemRolledupDates.dueDateIsFixed"
@ -556,7 +556,7 @@ export default {
</template>
<template v-if="workItemHealthStatus">
<work-item-health-status
class="gl-mb-5"
class="work-item-attributes-item"
:work-item-id="workItemId"
:work-item-iid="workItemIid"
:work-item-type="selectedWorkItemTypeName"
@ -566,7 +566,7 @@ export default {
</template>
<template v-if="workItemColor">
<work-item-color
class="gl-mb-5"
class="work-item-attributes-item"
:work-item="workItem"
:full-path="fullPath"
:can-update="canUpdate"
@ -575,7 +575,7 @@ export default {
</template>
<template v-if="workItemCrmContacts">
<work-item-crm-contacts
class="gl-mb-5"
class="work-item-attributes-item"
:full-path="fullPath"
:work-item-id="workItemId"
:work-item-iid="workItemIid"

View File

@ -3,7 +3,7 @@ import { GlButton, GlDatepicker, GlFormGroup, GlOutsideDirective as Outside } fr
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { s__ } from '~/locale';
import Tracking from '~/tracking';
import { formatDate, newDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { formatDate, newDate, toISODateFormat } from '~/lib/utils/datetime_utility';
import { Mousetrap } from '~/lib/mousetrap';
import { keysFor, SIDEBAR_CLOSE_WIDGET } from '~/behaviors/shortcuts/keybindings';
import {
@ -124,8 +124,8 @@ export default {
),
{
...workItemDatesWidget,
dueDate: this.dirtyDueDate ? pikadayToString(this.dirtyDueDate) : null,
startDate: this.dirtyStartDate ? pikadayToString(this.dirtyStartDate) : null,
dueDate: this.dirtyDueDate ? toISODateFormat(this.dirtyDueDate) : null,
startDate: this.dirtyStartDate ? toISODateFormat(this.dirtyStartDate) : null,
},
],
},
@ -181,8 +181,8 @@ export default {
input: {
id: this.workItemId,
startAndDueDateWidget: {
dueDate: this.dirtyDueDate ? pikadayToString(this.dirtyDueDate) : null,
startDate: this.dirtyStartDate ? pikadayToString(this.dirtyStartDate) : null,
dueDate: this.dirtyDueDate ? toISODateFormat(this.dirtyDueDate) : null,
startDate: this.dirtyStartDate ? toISODateFormat(this.dirtyStartDate) : null,
},
},
},

View File

@ -1,7 +1,7 @@
import { set, isEmpty } from 'lodash';
import { produce } from 'immer';
import { findWidget } from '~/issues/list/utils';
import { pikadayToString } from '~/lib/utils/datetime_utility';
import { toISODateFormat } from '~/lib/utils/datetime_utility';
import { updateDraft } from '~/lib/utils/autosave';
import { getNewWorkItemAutoSaveKey, newWorkItemFullPath } from '../utils';
import {
@ -37,10 +37,10 @@ const updateRolledUpDatesWidget = (draftData, rolledUpDates) => {
if (!rolledUpDates) return;
const dueDateFixed = rolledUpDates.dueDateFixed
? pikadayToString(rolledUpDates.dueDateFixed)
? toISODateFormat(rolledUpDates.dueDateFixed)
: null;
const startDateFixed = rolledUpDates.startDateFixed
? pikadayToString(rolledUpDates.startDateFixed)
? toISODateFormat(rolledUpDates.startDateFixed)
: null;
const widget = findWidget(WIDGET_TYPE_ROLLEDUP_DATES, draftData.workspace.workItem);

View File

@ -752,7 +752,7 @@
.dropdown-toggle-text {
&.is-default {
color: $gl-text-color-secondary;
color: var(--gl-text-color-subtle);
}
}

View File

@ -14,47 +14,6 @@
// - app/assets/javascripts/commit/pipelines/pipelines_bundle.js
.pipelines {
.pipeline-actions {
min-width: 170px; //Guarantees buttons don't break in several lines.
.btn-default {
color: var(--gray-500, $gray-500);
}
.btn.btn-retry:hover,
.btn.btn-retry:focus {
border-color: $dropdown-toggle-active-border-color;
background-color: $gray-50;
}
svg path {
fill: var(--gray-500, $gray-500);
}
.dropdown-menu {
max-height: $dropdown-max-height;
overflow-y: auto;
}
.dropdown-toggle,
.dropdown-menu {
color: var(--gray-500, $gray-500);
}
.btn-group.open .btn-default {
background-color: $gray-50;
border-color: $gray-100;
}
.btn .text-center {
display: inline;
}
.tooltip {
white-space: nowrap;
}
}
.pipeline-tags .label-container {
white-space: normal;
}

View File

@ -27,11 +27,6 @@
}
&.todo-pending.done-reversible {
.todo-item,
.todo-timestamp {
opacity: 0.5;
}
.todo-avatar {
filter: grayscale(1);
}

View File

@ -1,2 +1,3 @@
= content_tag (@inline ? :span : :div), **html_options do
%span{ class: spinner_class, aria: {label: @label} }>
%span{ class: spinner_class, aria: {hidden: true} }>
%span{ class: 'gl-sr-only !gl-absolute' }= @label

View File

@ -69,6 +69,9 @@ module Types
value 'MERGE_TIME',
value: :merge_time,
description: 'Merge request may not be merged until after the specified time.'
value 'SECURITY_POLICIES_VIOLATIONS',
value: :security_policy_violations,
description: 'All policy rules must be satisfied.'
end
end
end

View File

@ -235,7 +235,7 @@ module TodosHelper
is_overdue = todo.target.overdue?
css_class =
if is_due_today
'text-warning'
'gl-text-warning'
elsif is_overdue
'gl-text-danger'
else

View File

@ -1,7 +1,7 @@
%li.todo.gl-border-t.gl-border-gray-50.hover:gl-border-blue-200.hover:gl-bg-blue-50.hover:gl-cursor-pointer.gl-relative{ class: "hover:gl-z-1 todo-#{todo.done? ? 'done' : 'pending'}", id: dom_id(todo) }
.gl-flex.gl-flex-col.sm:gl-flex-row.sm:gl-items-center
.todo-item.gl-overflow-hidden.gl-overflow-x-auto.gl-self-center.gl-w-full{ data: { testid: "todo-item-container" } }
.todo-title.gl-pt-2.gl-pb-3.gl-px-2.gl-md-mb-1.gl-text-sm.gl-text-secondary
.todo-title.gl-pt-2.gl-pb-3.gl-px-2.gl-md-mb-1.gl-text-sm.gl-text-subtle
= todo_target_state_pill(todo)
@ -16,7 +16,7 @@
%span.todo-label
- if todo.target
= link_to todo_target_name(todo), todo_target_path(todo), class: 'todo-target-link !gl-text-secondary !gl-no-underline', :'aria-describedby' => dom_id(todo) + "_describer", :'aria-label' => todo_target_aria_label(todo)
= link_to todo_target_name(todo), todo_target_path(todo), class: 'todo-target-link !gl-text-subtle !gl-no-underline', :'aria-describedby' => dom_id(todo) + "_describer", :'aria-label' => todo_target_aria_label(todo)
- else
= _("(removed)")
@ -48,19 +48,19 @@
= first_line_in_markdown(todo, :body, 125, project: todo.project, group: todo.group)
.todo-timestamp.gl-whitespace-nowrap.sm:gl-ml-3.gl-mt-2.gl-mb-2.gl-sm-my-0.gl-px-2.gl-sm-px-0
%span.todo-timestamp.gl-text-sm.gl-text-secondary
%span.todo-timestamp.gl-text-sm.gl-text-subtle
= todo_due_date(todo)
#{time_ago_with_tooltip(todo.created_at)}
.todo-actions.gl-mr-4.gl-px-2.gl-sm-px-0.sm:gl-mx-0
- if todo.pending?
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-done-todo has-tooltip', title: _('Mark as done')}, method: :delete, href: dashboard_todo_path(todo)), 'aria-label' => _('Mark as done') do
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-done-todo has-tooltip', title: _('Mark as done'), aria: { label: _('Mark as done') }}, method: :delete, href: dashboard_todo_path(todo)) do
= gl_loading_icon(inline: true, css_class: 'hidden')
= sprite_icon('check', css_class: 'js-todo-button-icon')
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-undo-todo hidden has-tooltip', title: _('Undo')}, method: :patch, href: restore_dashboard_todo_path(todo)), 'aria-label' => _('Undo') do
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-undo-todo hidden has-tooltip', title: _('Undo'), aria: { label: _('Undo') }}, method: :patch, href: restore_dashboard_todo_path(todo)) do
= gl_loading_icon(inline: true, css_class: 'hidden')
= sprite_icon('redo', css_class: 'js-todo-button-icon')
- else
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-add-todo has-tooltip', title: _('Re-add this to-do item')}, method: :patch, href: restore_dashboard_todo_path(todo)), 'aria-label' => _('Re-add this to-do item') do
= render Pajamas::ButtonComponent.new(button_options: { class: 'btn-icon gl-flex js-add-todo has-tooltip', title: _('Re-add this to-do item'), aria: { label: _('Re-add this to-do item') }}, method: :patch, href: restore_dashboard_todo_path(todo)) do
= gl_loading_icon(inline: true, css_class: 'hidden')
= sprite_icon('redo', css_class: 'js-todo-button-icon')

View File

@ -1,9 +0,0 @@
---
name: pipeline_run_keyword
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/440487
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/146333
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/471925
milestone: '17.2'
group: group::pipeline authoring
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillCiResourcesProjectId
description: Backfills sharding key `ci_resources.project_id` from `ci_resource_groups`.
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167645
milestone: '17.5'
queued_migration_version: 20240930154303
finalize_after: '2024-10-22'
finalized_by: # version of the migration that finalized this BBM

View File

@ -17,3 +17,4 @@ desired_sharding_key:
table: ci_resource_groups
sharding_key: project_id
belongs_to: resource_group
desired_sharding_key_migration_job_name: BackfillCiResourcesProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToCiResources < Gitlab::Database::Migration[2.2]
milestone '17.5'
def change
add_column :ci_resources, :project_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexCiResourcesOnProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
INDEX_NAME = 'index_ci_resources_on_project_id'
def up
add_concurrent_index :ci_resources, :project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :ci_resources, INDEX_NAME
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddCiResourcesProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.5'
def up
install_sharding_key_assignment_trigger(
table: :ci_resources,
sharding_key: :project_id,
parent_table: :ci_resource_groups,
parent_sharding_key: :project_id,
foreign_key: :resource_group_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :ci_resources,
sharding_key: :project_id,
parent_table: :ci_resource_groups,
parent_sharding_key: :project_id,
foreign_key: :resource_group_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillCiResourcesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.5'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillCiResourcesProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:ci_resources,
:id,
:project_id,
:ci_resource_groups,
:project_id,
:resource_group_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:ci_resources,
:id,
[
:project_id,
:ci_resource_groups,
:project_id,
:resource_group_id
]
)
end
end

View File

@ -0,0 +1 @@
514b46352f4e88800a8bc7fbec6ffb04330000b8acc2959d1d3278f9c03cd73d

View File

@ -0,0 +1 @@
3bfe20df65af690a2471a64737b9021af4de2c0502369673820ee48a40d3f53a

View File

@ -0,0 +1 @@
afbcc248e293796fa532187be6c3eac72670cde338f6274e5eba36c378ff3af1

View File

@ -0,0 +1 @@
e02bd928089badc13d78bb9345667495534101b578dcf7d49d5902341b496277

View File

@ -985,6 +985,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_1c0f1ca199a3() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."project_id"
FROM "ci_resource_groups"
WHERE "ci_resource_groups"."id" = NEW."resource_group_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_1ed40f4d5f4e() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -8860,7 +8876,8 @@ CREATE TABLE ci_resources (
updated_at timestamp with time zone NOT NULL,
resource_group_id bigint NOT NULL,
build_id bigint,
partition_id bigint
partition_id bigint,
project_id bigint
);
CREATE SEQUENCE ci_resources_id_seq
@ -28149,6 +28166,8 @@ CREATE INDEX index_ci_resources_on_build_id ON ci_resources USING btree (build_i
CREATE INDEX index_ci_resources_on_partition_id_build_id ON ci_resources USING btree (partition_id, build_id);
CREATE INDEX index_ci_resources_on_project_id ON ci_resources USING btree (project_id);
CREATE UNIQUE INDEX index_ci_resources_on_resource_group_id_and_build_id ON ci_resources USING btree (resource_group_id, build_id);
CREATE INDEX index_ci_runner_machines_on_contacted_at_desc_and_id_desc ON ci_runner_machines USING btree (contacted_at DESC, id DESC);
@ -33407,6 +33426,8 @@ CREATE TRIGGER trigger_174b23fa3dfb BEFORE INSERT OR UPDATE ON approval_project_
CREATE TRIGGER trigger_18bc439a6741 BEFORE INSERT OR UPDATE ON packages_conan_metadata FOR EACH ROW EXECUTE FUNCTION trigger_18bc439a6741();
CREATE TRIGGER trigger_1c0f1ca199a3 BEFORE INSERT OR UPDATE ON ci_resources FOR EACH ROW EXECUTE FUNCTION trigger_1c0f1ca199a3();
CREATE TRIGGER trigger_1ed40f4d5f4e BEFORE INSERT OR UPDATE ON packages_maven_metadata FOR EACH ROW EXECUTE FUNCTION trigger_1ed40f4d5f4e();
CREATE TRIGGER trigger_206cbe2dc1a2 BEFORE INSERT OR UPDATE ON packages_package_files FOR EACH ROW EXECUTE FUNCTION trigger_206cbe2dc1a2();

View File

@ -36990,6 +36990,7 @@ Detailed representation of whether a GitLab merge request can be merged.
| <a id="detailedmergestatuspreparing"></a>`PREPARING` | Merge request diff is being created. |
| <a id="detailedmergestatusrequested_changes"></a>`REQUESTED_CHANGES` | Indicates a reviewer has requested changes. |
| <a id="detailedmergestatussecurity_policies_evaluating"></a>`SECURITY_POLICIES_EVALUATING` | All security policies must be evaluated. |
| <a id="detailedmergestatussecurity_policies_violations"></a>`SECURITY_POLICIES_VIOLATIONS` | All policy rules must be satisfied. |
| <a id="detailedmergestatusunchecked"></a>`UNCHECKED` | Merge status has not been checked. |
### `DiffPositionType`
@ -37814,6 +37815,7 @@ Representation of mergeability check identifier.
| <a id="mergeabilitycheckidentifiernot_open"></a>`NOT_OPEN` | Checks whether the merge request is open. |
| <a id="mergeabilitycheckidentifierrequested_changes"></a>`REQUESTED_CHANGES` | Checks whether the merge request has changes requested. |
| <a id="mergeabilitycheckidentifiersecurity_policy_evaluation"></a>`SECURITY_POLICY_EVALUATION` | Checks whether the security policies are evaluated. |
| <a id="mergeabilitycheckidentifiersecurity_policy_violations"></a>`SECURITY_POLICY_VIOLATIONS` | Checks whether the security policies are satisfied. |
| <a id="mergeabilitycheckidentifierstatus_checks_must_pass"></a>`STATUS_CHECKS_MUST_PASS` | Checks whether the external status checks pass. |
### `MergeabilityCheckStatus`

View File

@ -917,6 +917,8 @@ Use `detailed_merge_status` instead of `merge_status` to account for all potenti
- `requested_changes`: The merge request has reviewers who have requested changes.
- `security_policy_evaluation`: All security policies must be evaluated.
Requires the `policy_mergability_check` feature flag to be enabled.
- `security_policy_violations`: All security policies must be satisfied.
Requires the `policy_mergability_check` feature flag to be enabled.
- `status_checks_must_pass`: All status checks must pass before merge.
- `unchecked`: Git has not yet tested if a valid merge is possible.
- `locked_paths`: Paths locked by other users must be unlocked before merging to default branch.

View File

@ -4674,6 +4674,7 @@ DETAILS:
**Status:** Experiment
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/440487) in GitLab 17.3 [with a flag](../../administration/feature_flags.md) named `pipeline_run_keyword`. Disabled by default. Requires GitLab Runner 17.1.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/471925) in GitLab 17.5. Feature flag `pipeline_run_keyword` removed.
FLAG:
The availability of this feature is controlled by a feature flag.

View File

@ -133,10 +133,10 @@ Project Owners can perform any listed action, and can delete pipelines:
| Manage job triggers | | | | | ✓ | |
| Manage project CI/CD variables | | | | | ✓ | |
| Manage project [Secure Files](../api/secure_files.md) | | | | | ✓ | |
| View [agents for Kubernetes](clusters/agent/index.md) | | | | ✓ | ✓ | |
| Manage [agents for Kubernetes](clusters/agent/index.md) | | | | | ✓ | |
| Read [Terraform](infrastructure/index.md) state | | | | ✓ | ✓ | |
| Manage [Terraform](infrastructure/index.md) state | | | | | ✓ | |
| View [agents for Kubernetes](clusters/agent/index.md) | | | | ✓ | ✓ | |
| Manage [agents for Kubernetes](clusters/agent/index.md) | | | | | ✓ | |
| Read [Terraform](infrastructure/index.md) state | | | | ✓ | ✓ | |
| Manage [Terraform](infrastructure/index.md) state | | | | | ✓ | |
This table shows granted privileges for jobs triggered by specific roles.

View File

@ -64,8 +64,7 @@ A deploy token's scope determines the actions it can perform.
> - [Feature flag `ci_variable_for_group_gitlab_deploy_token`](https://gitlab.com/gitlab-org/gitlab/-/issues/363621) removed in GitLab 15.4.
A GitLab deploy token is a special type of deploy token. If you create a deploy token named
`gitlab-deploy-token`, the deploy token is automatically exposed to the CI/CD jobs as variables, for
use in a CI/CD pipeline:
`gitlab-deploy-token`, the deploy token is automatically exposed to project CI/CD jobs as variables:
- `CI_DEPLOY_USER`: Username
- `CI_DEPLOY_PASSWORD`: Token
@ -82,6 +81,9 @@ work for group deploy tokens. To make a group deploy token available for CI/CD j
`CI_DEPLOY_USER` and `CI_DEPLOY_PASSWORD` CI/CD variables in **Settings > CI/CD > Variables** to the
name and token of the group deploy token.
When `gitlab-deploy-token` is defined in a group, the `CI_DEPLOY_USER` and `CI_DEPLOY_PASSWORD`
CI/CD variables are available only to immediate child projects of the group.
### GitLab deploy token security
GitLab deploy tokens are long-lived, making them attractive for attackers.

View File

@ -156,10 +156,10 @@ To preserve historical context, the placeholder user name and username are deriv
Prerequisites:
- You must have the Owner role of the group.
- You must have the Owner role for the group.
Placeholder users are created in the top-level group on the destination instance where a group or project are imported
to. After the import, to view placeholder users for a group:
Placeholder users are created on the destination instance while a group or project is imported.
To view placeholder users created during imports to a top-level group and its subgroups:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Manage > Members**.

View File

@ -11,6 +11,7 @@ module API
MAJOR_BROWSERS = %i[webkit firefox ie edge opera chrome].freeze
WEB_BROWSER_ERROR_MESSAGE = 'This endpoint is not meant to be accessed by a web browser.'
UPSTREAM_GID_HEADER = 'X-Gitlab-Virtual-Registry-Upstream-Global-Id'
MAX_FILE_SIZE = 5.gigabytes
included do
helpers do
@ -53,14 +54,24 @@ module API
upstream.headers,
url,
response_headers: NO_BROWSER_EXECUTION_RESPONSE_HEADERS,
upload_config: { headers: { UPSTREAM_GID_HEADER => upstream.to_global_id.to_s } },
allow_localhost: allow_localhost,
allowed_uris: allowed_uris,
ssrf_filter: true
ssrf_filter: true,
upload_config: {
headers: { UPSTREAM_GID_HEADER => upstream.to_global_id.to_s },
authorized_upload_response: authorized_upload_response
}
)
)
end
def authorized_upload_response
::VirtualRegistries::CachedResponseUploader.workhorse_authorize(
has_length: true,
maximum_size: MAX_FILE_SIZE
)
end
def send_workhorse_headers(headers)
header(*headers)
env['api.format'] = :binary

View File

@ -23,8 +23,7 @@ module API
expose :runner_variables, as: :variables
expose :steps, using: Entities::Ci::JobRequest::Step, unless: ->(job) do
::Feature.enabled?(:pipeline_run_keyword, job.project) &&
job.execution_config&.run_steps.present?
job.execution_config&.run_steps.present?
end
expose :runtime_hooks, as: :hooks, using: Entities::Ci::JobRequest::Hook
@ -38,10 +37,7 @@ module API
Entities::Ci::JobRequest::Dependency.represent(job.all_dependencies, options.merge(running_job: job))
end
expose :run, if: ->(job) {
::Feature.enabled?(:pipeline_run_keyword, job.project) &&
job.execution_config&.run_steps.present?
} do |job|
expose :run, if: ->(job) { job.execution_config&.run_steps.present? } do |job|
job.execution_config.run_steps.to_json
end
end

View File

@ -11,8 +11,8 @@ module API
helpers do
def queue_metrics
hash = {}
Gitlab::Redis::Queues.instances.each_value do |v| # rubocop:disable Cop/RedisQueueUsage -- allow iteration over shard instances
queue_metrics_from_shard(v.sidekiq_redis).each do |queue_name, queue_details|
Gitlab::SidekiqSharding::Router.with_routed_client do
queue_metrics_from_shard.each do |queue_name, queue_details|
if hash[queue_name].nil?
hash[queue_name] = queue_details
else
@ -24,38 +24,36 @@ module API
hash
end
def queue_metrics_from_shard(pool)
Sidekiq::Client.via(pool) do
::Gitlab::SidekiqConfig.routing_queues.each_with_object({}) do |queue_name, hash|
queue = Sidekiq::Queue.new(queue_name)
hash[queue.name] = {
backlog: queue.size,
latency: queue.latency.to_i
}
end
def queue_metrics_from_shard
::Gitlab::SidekiqConfig.routing_queues.each_with_object({}) do |queue_name, hash|
queue = Sidekiq::Queue.new(queue_name)
hash[queue.name] = {
backlog: queue.size,
latency: queue.latency.to_i
}
end
end
def process_metrics
Gitlab::Redis::Queues.instances.values.flat_map do |v| # rubocop:disable Cop/RedisQueueUsage -- allow iteration over shard instances
process_metrics_from_shard(v.sidekiq_redis)
metrics = []
Gitlab::SidekiqSharding::Router.with_routed_client do
metrics << process_metrics_from_shard
end
metrics.flatten
end
def process_metrics_from_shard(pool)
Sidekiq::Client.via(pool) do
Sidekiq::ProcessSet.new(false).map do |process|
{
hostname: process['hostname'],
pid: process['pid'],
tag: process['tag'],
started_at: Time.at(process['started_at']),
queues: process['queues'],
labels: process['labels'],
concurrency: process['concurrency'],
busy: process['busy']
}
end
def process_metrics_from_shard
Sidekiq::ProcessSet.new(false).map do |process|
{
hostname: process['hostname'],
pid: process['pid'],
tag: process['tag'],
started_at: Time.at(process['started_at']),
queues: process['queues'],
labels: process['labels'],
concurrency: process['concurrency'],
busy: process['busy']
}
end
end
@ -67,23 +65,21 @@ module API
dead: 0
}
Gitlab::Redis::Queues.instances.each_value do |shard| # rubocop:disable Cop/RedisQueueUsage -- allow iteration over shard instances
job_stats_from_shard(shard.sidekiq_redis).each { |k, v| stats[k] += v }
Gitlab::SidekiqSharding::Router.with_routed_client do
job_stats_from_shard.each { |k, v| stats[k] += v }
end
stats
end
def job_stats_from_shard(pool)
Sidekiq::Client.via(pool) do
stats = Sidekiq::Stats.new
{
processed: stats.processed,
failed: stats.failed,
enqueued: stats.enqueued,
dead: stats.dead_size
}
end
def job_stats_from_shard
stats = Sidekiq::Stats.new
{
processed: stats.processed,
failed: stats.failed,
enqueued: stats.enqueued,
dead: stats.dead_size
}
end
end

View File

@ -9,8 +9,6 @@ module API
feature_category :virtual_registry
urgency :low
MAX_FILE_SIZE = 5.gigabytes
authenticate_with do |accept|
accept.token_types(:personal_access_token).sent_through(:http_private_token_header)
accept.token_types(:deploy_token).sent_through(:http_deploy_token_header)
@ -107,82 +105,52 @@ module API
send_successful_response_from(service_response: service_response)
end
namespace 'upload' do
after_validation do
require_gitlab_workhorse!
authorize!(:read_virtual_registry, registry)
end
desc 'Workhorse upload endpoint of the Maven virtual registry. Only workhorse can access it.' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The file being uploaded',
documentation: { type: 'file' }
end
post 'upload' do
require_gitlab_workhorse!
authorize!(:read_virtual_registry, registry)
desc 'Workhorse authorize upload endpoint of the Maven virtual registry. Only workhorse can access it.' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
end
post 'authorize' do
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
::VirtualRegistries::CachedResponseUploader.workhorse_authorize(has_length: true,
maximum_size: MAX_FILE_SIZE)
end
etag, content_type, upstream_gid = request.headers.fetch_values(
'Etag',
::Gitlab::Workhorse::SEND_DEPENDENCY_CONTENT_TYPE_HEADER,
UPSTREAM_GID_HEADER
) { nil }
desc 'Workhorse upload endpoint of the Maven virtual registry. Only workhorse can access it.' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The file being uploaded',
documentation: { type: 'file' }
end
post do
etag, content_type, upstream_gid = request.headers.fetch_values(
'Etag',
::Gitlab::Workhorse::SEND_DEPENDENCY_CONTENT_TYPE_HEADER,
UPSTREAM_GID_HEADER
) { nil }
# TODO: revisit this part when multiple upstreams are supported
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
# coherence check
not_found!('Upstream') unless upstream == GlobalID::Locator.locate(upstream_gid)
# TODO: revisit this part when multiple upstreams are supported
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
# coherence check
not_found!('Upstream') unless upstream == GlobalID::Locator.locate(upstream_gid)
service_response = ::VirtualRegistries::Packages::Maven::CachedResponses::CreateOrUpdateService.new(
upstream: upstream,
current_user: current_user,
params: declared_params.merge(etag: etag, content_type: content_type)
).execute
service_response = ::VirtualRegistries::Packages::Maven::CachedResponses::CreateOrUpdateService.new(
upstream: upstream,
current_user: current_user,
params: declared_params.merge(etag: etag, content_type: content_type)
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
created!
end
send_error_response_from!(service_response: service_response) if service_response.error?
status :ok
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillCiResourcesProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_ci_resources_project_id
feature_category :continuous_integration
end
end
end

View File

@ -147,11 +147,7 @@ module Gitlab
:allow_failure, :publish, :pages, :manual_confirmation, :run
def self.matching?(name, config)
if ::Gitlab::Ci::Config::FeatureFlags.enabled?(:pipeline_run_keyword, type: :gitlab_com_derisk)
!name.to_s.start_with?('.') && config.is_a?(Hash) && (config.key?(:script) || config.key?(:run))
else
!name.to_s.start_with?('.') && config.is_a?(Hash) && config.key?(:script)
end
!name.to_s.start_with?('.') && config.is_a?(Hash) && (config.key?(:script) || config.key?(:run))
end
def self.visible?
@ -189,7 +185,7 @@ module Gitlab
publish: publish,
pages: pages,
manual_confirmation: self.manual_confirmation,
run: ::Gitlab::Ci::Config::FeatureFlags.enabled?(:pipeline_run_keyword, type: :gitlab_com_derisk) ? run : nil
run: run
).compact
end

View File

@ -97,7 +97,7 @@ module Gitlab
delegate :logger, to: :@context
def build_execution_config_attribute
return {} unless ::Feature.enabled?(:pipeline_run_keyword, @pipeline.project) && @execution_config_attribute
return {} unless @execution_config_attribute
execution_config = @context.find_or_build_execution_config(@execution_config_attribute)
{ execution_config: execution_config }

View File

@ -54,11 +54,14 @@ module Gitlab
end
def workers_uncached
Gitlab::Redis::Queues.instances.values.flat_map do |instance| # rubocop:disable Cop/RedisQueueUsage -- iterating over instances is allowed as we pass the pool to Sidekiq
Sidekiq::Client.via(instance.sidekiq_redis) do
sidekiq_workers.map { |_process_id, _thread_id, work| ::Gitlab::Json.parse(work.payload)['class'] }
hash = []
Gitlab::SidekiqSharding::Router.with_routed_client do
workers = sidekiq_workers.map do |_process_id, _thread_id, work|
::Gitlab::Json.parse(work.payload)['class']
end
end.tally
hash.concat(workers)
end
hash.tally
end
def sidekiq_workers

View File

@ -39,6 +39,14 @@ module Gitlab
end
end
def with_routed_client
Gitlab::Redis::Queues.instances.each_value do |inst|
Sidekiq::Client.via(inst.sidekiq_redis) do
yield
end
end
end
def migrated_shards
@migrated_shards ||= Set.new(Gitlab::Json.parse(ENV.fetch('SIDEKIQ_MIGRATED_SHARDS', '[]')))
end

View File

@ -242,7 +242,8 @@ module Gitlab
'UploadConfig' => {
'Method' => upload_config[:method],
'Url' => upload_config[:url],
'Headers' => (upload_config[:headers] || {}).transform_values { |v| Array.wrap(v) }
'Headers' => (upload_config[:headers] || {}).transform_values { |v| Array.wrap(v) },
'AuthorizedUploadResponse' => upload_config[:authorized_upload_response] || {}
}.compact_blank!
}
params.compact_blank!

View File

@ -5621,6 +5621,9 @@ msgstr ""
msgid "All paths must be unlocked"
msgstr ""
msgid "All policy rules must be satisfied."
msgstr ""
msgid "All project members"
msgstr ""
@ -29470,9 +29473,6 @@ msgstr ""
msgid "Invalid name %{input} was given for this default stage, allowed names: %{names}"
msgstr ""
msgid "Invalid period"
msgstr ""
msgid "Invalid pin code."
msgstr ""
@ -57123,9 +57123,6 @@ msgstr ""
msgid "Todos|All"
msgstr ""
msgid "Todos|Any"
msgstr ""
msgid "Todos|Any Action"
msgstr ""
@ -57141,6 +57138,9 @@ msgstr ""
msgid "Todos|Build failed"
msgstr ""
msgid "Todos|Category"
msgstr ""
msgid "Todos|Could not merge"
msgstr ""
@ -57183,6 +57183,9 @@ msgstr ""
msgid "Todos|Filter by project"
msgstr ""
msgid "Todos|Filter to-do items"
msgstr ""
msgid "Todos|Give yourself a pat on the back!"
msgstr ""
@ -57249,6 +57252,15 @@ msgstr ""
msgid "Todos|Pipelines"
msgstr ""
msgid "Todos|Raw text search is not currently supported"
msgstr ""
msgid "Todos|Raw text search is not currently supported. Please use the available search tokens."
msgstr ""
msgid "Todos|Reason"
msgstr ""
msgid "Todos|Removed from Merge Train"
msgstr ""

View File

@ -82,7 +82,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component, feature_category: :de
end
it 'does not render a spinner' do
expect(page).not_to have_css ".gl-spinner[aria-label='Loading']"
expect(page).not_to have_css('.gl-sr-only', text: 'Loading')
end
end
@ -94,7 +94,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component, feature_category: :de
end
it 'renders a spinner' do
expect(page).to have_css ".gl-spinner[aria-label='Loading']"
expect(page).to have_css('.gl-sr-only', text: 'Loading')
end
end
end
@ -217,7 +217,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component, feature_category: :de
it 'renders only a loading icon' do
expect(page).not_to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
expect(page).to have_css ".gl-spinner[aria-label='Loading']"
expect(page).to have_css('.gl-sr-only', text: 'Loading')
end
end
end

View File

@ -50,16 +50,16 @@ RSpec.describe Pajamas::SpinnerComponent, type: :component do
describe 'label' do
context 'by default' do
it 'has "Loading" as aria-label' do
expect(page).to have_css '.gl-spinner[aria-label="Loading"]'
it 'has "Loading" as screen reader available text' do
expect(page).to have_css('.gl-sr-only', text: 'Loading')
end
end
context 'when set to something else' do
let(:options) { { label: "Sending" } }
it 'has a custom aria-label' do
expect(page).to have_css '.gl-spinner[aria-label="Sending"]'
it 'has a custom label as screen reader available text' do
expect(page).to have_css('.gl-sr-only', text: 'Sending')
end
end
end

View File

@ -90,6 +90,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_pipeline_variables: %w[partition_id pipeline_id project_id],
ci_pipelines: %w[partition_id auto_canceled_by_partition_id],
ci_unit_test_failures: %w[project_id],
ci_resources: %w[project_id],
p_ci_pipelines: %w[partition_id auto_canceled_by_partition_id auto_canceled_by_id],
p_ci_runner_machine_builds: %w[project_id],
ci_runners: %w[sharding_key_id], # This value is meant to populate the partitioned table, no other usage

View File

@ -0,0 +1,97 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
let_it_be(:user) { create(:user, username: 'john') }
let_it_be(:user2) { create(:user, username: 'diane') }
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public, developers: user) }
let_it_be(:issue) { create(:issue, project: project, due_date: Date.today, title: "Fix bug") }
context 'when user does not have todos' do
before do
sign_in(user)
visit dashboard_todos_path
wait_for_requests # ensures page is fully loaded
end
it 'passes axe automated accessibility testing' do
expect(page).to be_axe_clean.within('#content-body')
end
end
context 'when user has todos' do
before do
allow(Todo).to receive(:default_per_page).and_return(2)
mr_merged = create(:merge_request, :simple, :merged, author: user, source_project: project)
note = create(
:note,
project: project,
note: "Check out #{mr_merged.to_reference}",
noteable: create(:issue, project: project)
)
create(:todo, :assigned, user: user, project: project, target: issue, author: user2)
create(:todo, :mentioned, user: user, project: project, target: mr_merged, author: author)
create(:todo, :mentioned, project: project, target: issue, user: user, note_id: note.id)
sign_in(user)
visit dashboard_todos_path
wait_for_requests # ensures page is fully loaded
end
it 'passes axe automated accessibility testing' do
expect(page).to be_axe_clean.within('#content-body')
end
context 'when user has the option to undo delete action' do
before do
within first('.todo') do
find_by_testid('check-icon').click
end
wait_for_requests # ensures page is fully loaded
end
it 'passes axe automated accessibility testing' do
expect(page).to be_axe_clean.within('#content-body')
end
end
context 'and filters with search options' do
it 'passes axe automated accessibility testing' do
click_button 'Author'
wait_for_requests # ensures page is fully loaded
expect(page).to be_axe_clean.within('.todos-filters')
end
end
context 'and filters with list options' do
it 'passes axe automated accessibility testing' do
click_button 'Action'
expect(page).to be_axe_clean.within('.todos-filters')
end
end
context 'and sorts the list' do
it 'passes axe automated accessibility testing' do
click_button 'Last created'
expect(page).to be_axe_clean.within('.todos-filters')
end
end
end
context 'when user has todos marked as done' do
before do
create(:todo, :mentioned, :done, user: user, project: project, target: issue, author: author)
sign_in(user)
visit dashboard_todos_path(state: :done)
wait_for_requests # ensures page is fully loaded
end
it 'passes axe automated accessibility testing' do
expect(page).to be_axe_clean.within('#content-body')
end
end
end

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
# covered by ./accessibility_spec.rb
require 'spec_helper'
RSpec.describe 'Dashboard > Todo target states', feature_category: :team_planning do

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
# covered by ./accessibility_spec.rb
require 'spec_helper'
RSpec.describe 'Dashboard > User filters todos', :js, feature_category: :team_planning do

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
# covered by ./accessibility_spec.rb
require 'spec_helper'
RSpec.describe 'Dashboard > User sorts todos', feature_category: :team_planning do

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
# covered by ./accessibility_spec.rb
require 'spec_helper'
RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do

View File

@ -11,6 +11,7 @@ import {
getNodesOrDefault,
toggleQueryPollingByVisibility,
etagQueryHeaders,
calculateGraphQLPaginationQueryParams,
} from '~/graphql_shared/utils';
const mockType = 'Group';
@ -192,3 +193,29 @@ describe('etagQueryHeaders', () => {
});
});
});
describe('calculateGraphQLPaginationQueryParams', () => {
const mockRouteQuery = { start_cursor: 'mockStartCursor', end_cursor: 'mockEndCursor' };
describe('when `startCursor` is defined', () => {
it('sets start cursor query param', () => {
expect(
calculateGraphQLPaginationQueryParams({
startCursor: 'newMockStartCursor',
routeQuery: mockRouteQuery,
}),
).toEqual({ start_cursor: 'newMockStartCursor' });
});
});
describe('when `endCursor` is defined', () => {
it('sets end cursor query param', () => {
expect(
calculateGraphQLPaginationQueryParams({
endCursor: 'newMockEndCursor',
routeQuery: mockRouteQuery,
}),
).toEqual({ end_cursor: 'newMockEndCursor' });
});
});
});

View File

@ -117,53 +117,75 @@ describe('date_format_utility.js', () => {
);
});
});
});
describe('formatTimeAsSummary', () => {
it.each`
unit | value | result
${'months'} | ${1.5} | ${'1.5 months'}
${'weeks'} | ${1.25} | ${'1.5 weeks'}
${'days'} | ${2} | ${'2 days'}
${'hours'} | ${10} | ${'10 hours'}
${'minutes'} | ${20} | ${'20 minutes'}
${'seconds'} | ${10} | ${'<1 minute'}
${'seconds'} | ${0} | ${'-'}
`('will format $value $unit to $result', ({ unit, value, result }) => {
expect(utils.formatTimeAsSummary({ [unit]: value })).toBe(result);
describe('formatTimeAsSummary', () => {
it.each`
unit | value | result
${'months'} | ${1.5} | ${'1.5 months'}
${'weeks'} | ${1.25} | ${'1.5 weeks'}
${'days'} | ${2} | ${'2 days'}
${'hours'} | ${10} | ${'10 hours'}
${'minutes'} | ${20} | ${'20 minutes'}
${'seconds'} | ${10} | ${'<1 minute'}
${'seconds'} | ${0} | ${'-'}
`('will format $value $unit to $result', ({ unit, value, result }) => {
expect(utils.formatTimeAsSummary({ [unit]: value })).toBe(result);
});
});
describe('formatUtcOffset', () => {
it.each`
offset | expected
${-32400} | ${'-9'}
${'-12600'} | ${'-3.5'}
${0} | ${' 0'}
${'10800'} | ${'+3'}
${19800} | ${'+5.5'}
${0} | ${' 0'}
${[]} | ${' 0'}
${{}} | ${' 0'}
${true} | ${' 0'}
${null} | ${' 0'}
${undefined} | ${' 0'}
`('returns $expected given $offset', ({ offset, expected }) => {
expect(utils.formatUtcOffset(offset)).toEqual(expected);
});
});
describe('humanTimeframe', () => {
it.each`
startDate | dueDate | returnValue
${'2021-1-1'} | ${'2021-2-28'} | ${'Jan 1 Feb 28, 2021'}
${'2021-1-1'} | ${'2022-2-28'} | ${'Jan 1, 2021 Feb 28, 2022'}
${'2021-1-1'} | ${null} | ${'Jan 1, 2021 No due date'}
${null} | ${'2021-2-28'} | ${'No start date Feb 28, 2021'}
`(
'returns string "$returnValue" when startDate is $startDate and dueDate is $dueDate',
({ startDate, dueDate, returnValue }) => {
expect(utils.humanTimeframe(startDate, dueDate)).toBe(returnValue);
},
);
});
describe('formatTimeSpent', () => {
describe('with limitToHours false', () => {
it('formats 34500 seconds to `1d 1h 35m`', () => {
expect(utils.formatTimeSpent(34500)).toEqual('1d 1h 35m');
});
it('formats -34500 seconds to `- 1d 1h 35m`', () => {
expect(utils.formatTimeSpent(-34500)).toEqual('- 1d 1h 35m');
});
});
describe('with limitToHours true', () => {
it('formats 34500 seconds to `9h 35m`', () => {
expect(utils.formatTimeSpent(34500, true)).toEqual('9h 35m');
});
it('formats -34500 seconds to `- 9h 35m`', () => {
expect(utils.formatTimeSpent(-34500, true)).toEqual('- 9h 35m');
});
});
});
});
describe('formatUtcOffset', () => {
it.each`
offset | expected
${-32400} | ${'-9'}
${'-12600'} | ${'-3.5'}
${0} | ${' 0'}
${'10800'} | ${'+3'}
${19800} | ${'+5.5'}
${0} | ${' 0'}
${[]} | ${' 0'}
${{}} | ${' 0'}
${true} | ${' 0'}
${null} | ${' 0'}
${undefined} | ${' 0'}
`('returns $expected given $offset', ({ offset, expected }) => {
expect(utils.formatUtcOffset(offset)).toEqual(expected);
});
});
describe('humanTimeframe', () => {
it.each`
startDate | dueDate | returnValue
${'2021-1-1'} | ${'2021-2-28'} | ${'Jan 1 Feb 28, 2021'}
${'2021-1-1'} | ${'2022-2-28'} | ${'Jan 1, 2021 Feb 28, 2022'}
${'2021-1-1'} | ${null} | ${'Jan 1, 2021 No due date'}
${null} | ${'2021-2-28'} | ${'No start date Feb 28, 2021'}
`(
'returns string "$returnValue" when startDate is $startDate and dueDate is $dueDate',
({ startDate, dueDate, returnValue }) => {
expect(utils.humanTimeframe(startDate, dueDate)).toBe(returnValue);
},
);
});

View File

@ -1,25 +0,0 @@
import { formatTimeSpent } from '~/lib/utils/datetime/time_spent_utility';
describe('Time spent utils', () => {
describe('formatTimeSpent', () => {
describe('with limitToHours false', () => {
it('formats 34500 seconds to `1d 1h 35m`', () => {
expect(formatTimeSpent(34500)).toEqual('1d 1h 35m');
});
it('formats -34500 seconds to `- 1d 1h 35m`', () => {
expect(formatTimeSpent(-34500)).toEqual('- 1d 1h 35m');
});
});
describe('with limitToHours true', () => {
it('formats 34500 seconds to `9h 35m`', () => {
expect(formatTimeSpent(34500, true)).toEqual('9h 35m');
});
it('formats -34500 seconds to `- 9h 35m`', () => {
expect(formatTimeSpent(-34500, true)).toEqual('- 9h 35m');
});
});
});
});

View File

@ -316,29 +316,15 @@ describe('formatTime', () => {
});
describe('datefix', () => {
describe('pad', () => {
it('should add a 0 when length is smaller than 2', () => {
expect(datetimeUtility.pad(2)).toEqual('02');
});
it('should not add a zero when length matches the default', () => {
expect(datetimeUtility.pad(12)).toEqual('12');
});
it('should add a 0 when length is smaller than the provided', () => {
expect(datetimeUtility.pad(12, 3)).toEqual('012');
});
});
describe('parsePikadayDate', () => {
it('should return a UTC date', () => {
expect(datetimeUtility.parsePikadayDate('2020-01-29')).toEqual(new Date(2020, 0, 29));
});
});
describe('pikadayToString', () => {
it('should format a UTC date into yyyy-mm-dd format', () => {
expect(datetimeUtility.pikadayToString(new Date('2020-01-29:00:00'))).toEqual('2020-01-29');
describe('toISODateFormat', () => {
it('should format a Date object into yyyy-mm-dd format', () => {
expect(datetimeUtility.toISODateFormat(new Date('2020-01-29:00:00'))).toEqual('2020-01-29');
});
});
});

View File

@ -1,5 +1,5 @@
import organizationGroupsGraphQlResponse from 'test_fixtures/graphql/organizations/groups.query.graphql.json';
import { formatGroups, onPageChange, timestampType } from '~/organizations/shared/utils';
import { formatGroups, timestampType } from '~/organizations/shared/utils';
import { SORT_CREATED_AT, SORT_UPDATED_AT, SORT_NAME } from '~/organizations/shared/constants';
import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
@ -65,32 +65,6 @@ describe('formatGroups', () => {
});
});
describe('onPageChange', () => {
const mockRouteQuery = { start_cursor: 'mockStartCursor', end_cursor: 'mockEndCursor' };
describe('when `startCursor` is defined', () => {
it('sets start cursor query param', () => {
expect(
onPageChange({
startCursor: 'newMockStartCursor',
routeQuery: mockRouteQuery,
}),
).toEqual({ start_cursor: 'newMockStartCursor' });
});
});
describe('when `endCursor` is defined', () => {
it('sets end cursor query param', () => {
expect(
onPageChange({
endCursor: 'newMockEndCursor',
routeQuery: mockRouteQuery,
}),
).toEqual({ end_cursor: 'newMockEndCursor' });
});
});
});
describe('timestampType', () => {
describe.each`
sortName | expectedTimestampType

View File

@ -59,6 +59,8 @@ const defaultProvide = {
};
const searchTerm = 'foo bar';
const mockEndCursor = 'mockEndCursor';
const mockStartCursor = 'mockStartCursor';
describe('YourWorkProjectsApp', () => {
let wrapper;
@ -89,6 +91,7 @@ describe('YourWorkProjectsApp', () => {
wrapper.findAllByRole('tab').wrappers.find((tab) => tab.text().includes(name));
const getTabCount = (tabName) => findTabByName(tabName).findComponent(GlBadge).text();
const findFilteredSearchAndSort = () => wrapper.findComponent(FilteredSearchAndSort);
const findTabView = () => wrapper.findComponent(TabView);
afterEach(() => {
router = null;
@ -273,7 +276,7 @@ describe('YourWorkProjectsApp', () => {
if (expectedTab.query) {
it('renders `TabView` component and passes `tab` prop', () => {
expect(wrapper.findComponent(TabView).props('tab')).toMatchObject(expectedTab);
expect(findTabView().props('tab')).toMatchObject(expectedTab);
});
}
});
@ -341,4 +344,56 @@ describe('YourWorkProjectsApp', () => {
});
});
});
describe('when page is changed', () => {
describe('when going to next page', () => {
beforeEach(async () => {
createComponent({
route: defaultRoute,
});
await nextTick();
findTabView().vm.$emit('page-change', {
endCursor: mockEndCursor,
startCursor: null,
hasPreviousPage: true,
});
});
it('sets `end_cursor` query string', () => {
expect(router.currentRoute.query).toMatchObject({
end_cursor: mockEndCursor,
});
});
});
describe('when going to previous page', () => {
beforeEach(async () => {
createComponent({
route: {
...defaultRoute,
query: {
start_cursor: mockStartCursor,
end_cursor: mockEndCursor,
},
},
});
await nextTick();
findTabView().vm.$emit('page-change', {
endCursor: null,
startCursor: mockStartCursor,
hasPreviousPage: true,
});
});
it('sets `start_cursor` query string', () => {
expect(router.currentRoute.query).toMatchObject({
start_cursor: mockStartCursor,
});
});
});
});
});

View File

@ -0,0 +1,7 @@
export const pageInfoMultiplePages = {
endCursor: 'eyJpZCI6IjEwNTMifQ',
hasNextPage: true,
hasPreviousPage: true,
startCursor: 'eyJpZCI6IjEwNzIifQ',
__typename: 'PageInfo',
};

View File

@ -1,14 +1,15 @@
import Vue from 'vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { GlLoadingIcon, GlKeysetPagination } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import starredProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/starred_projects.query.graphql.json';
import inactiveProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/inactive_projects.query.graphql.json';
import personalProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/personal_projects.query.graphql.json';
import membershipProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/membership_projects.query.graphql.json';
import contributedProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/contributed_projects.query.graphql.json';
import starredProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/starred_projects.query.graphql.json';
import inactiveProjectsGraphQlResponse from 'test_fixtures/graphql/projects/your_work/inactive_projects.query.graphql.json';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import TabView from '~/projects/your_work/components/tab_view.vue';
import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
import { DEFAULT_PER_PAGE } from '~/api';
import contributedProjectsQuery from '~/projects/your_work/graphql/queries/contributed_projects.query.graphql';
import personalProjectsQuery from '~/projects/your_work/graphql/queries/personal_projects.query.graphql';
import membershipProjectsQuery from '~/projects/your_work/graphql/queries/membership_projects.query.graphql';
@ -25,6 +26,7 @@ import {
} from '~/projects/your_work/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { pageInfoMultiplePages } from './mock_data';
jest.mock('~/alert');
@ -44,6 +46,7 @@ describe('TabView', () => {
};
const findProjectsList = () => wrapper.findComponent(ProjectsList);
const findPagination = () => wrapper.findComponent(GlKeysetPagination);
afterEach(() => {
mockApollo = null;
@ -73,6 +76,17 @@ describe('TabView', () => {
await waitForPromises();
});
it('calls GraphQL query with correct variables', async () => {
await waitForPromises();
expect(handler[1]).toHaveBeenCalledWith({
last: null,
first: DEFAULT_PER_PAGE,
before: null,
after: null,
});
});
it('passes projects to `ProjectsList` component', () => {
expect(findProjectsList().props('projects')).toEqual(
formatGraphQLProjects(expectedProjects),
@ -110,4 +124,115 @@ describe('TabView', () => {
});
});
});
describe('pagination', () => {
const propsData = { tab: PERSONAL_TAB };
describe('when there is one page of projects', () => {
beforeEach(async () => {
createComponent({
handler: [
personalProjectsQuery,
jest.fn().mockResolvedValue(personalProjectsGraphQlResponse),
],
propsData,
});
await waitForPromises();
});
it('does not render pagination', () => {
expect(findPagination().exists()).toBe(false);
});
});
describe('when there are multiple pages of projects', () => {
const mockEndCursor = 'mockEndCursor';
const mockStartCursor = 'mockStartCursor';
const handler = [
personalProjectsQuery,
jest.fn().mockResolvedValue({
data: {
projects: {
nodes: personalProjectsGraphQlResponse.data.projects.nodes,
pageInfo: pageInfoMultiplePages,
},
},
}),
];
beforeEach(async () => {
createComponent({
handler,
propsData,
});
await waitForPromises();
});
it('renders pagination', () => {
expect(findPagination().exists()).toBe(true);
});
describe('when next button is clicked', () => {
beforeEach(() => {
findPagination().vm.$emit('next', mockEndCursor);
});
it('emits `page-change` event', () => {
expect(wrapper.emitted('page-change')[0]).toEqual([
{
endCursor: mockEndCursor,
startCursor: null,
},
]);
});
});
describe('when `endCursor` prop is changed', () => {
beforeEach(async () => {
wrapper.setProps({ endCursor: mockEndCursor });
await waitForPromises();
});
it('calls query with correct variables', () => {
expect(handler[1]).toHaveBeenCalledWith({
after: mockEndCursor,
before: null,
first: DEFAULT_PER_PAGE,
last: null,
});
});
});
describe('when previous button is clicked', () => {
beforeEach(() => {
findPagination().vm.$emit('prev', mockStartCursor);
});
it('emits `page-change` event', () => {
expect(wrapper.emitted('page-change')[0]).toEqual([
{
endCursor: null,
startCursor: mockStartCursor,
},
]);
});
});
describe('when `startCursor` prop is changed', () => {
beforeEach(async () => {
wrapper.setProps({ startCursor: mockStartCursor });
await waitForPromises();
});
it('calls query with correct variables', () => {
expect(handler[1]).toHaveBeenCalledWith({
after: null,
before: mockStartCursor,
first: null,
last: DEFAULT_PER_PAGE,
});
});
});
});
});
});

View File

@ -11,10 +11,29 @@ function factory(propsData = {}) {
}
describe('Merge request merge checks message component', () => {
it('renders failure reason text', () => {
factory({ check: { status: 'success', identifier: 'discussions_not_resolved' } });
it.each`
identifier | expectedText
${'commits_status'} | ${'Source branch exists and contains commits.'}
${'ci_must_pass'} | ${'Pipeline must succeed.'}
${'conflict'} | ${'Merge conflicts must be resolved.'}
${'discussions_not_resolved'} | ${'Unresolved discussions must be resolved.'}
${'draft_status'} | ${'Merge request must not be draft.'}
${'not_open'} | ${'Merge request must be open.'}
${'need_rebase'} | ${'Merge request must be rebased, because a fast-forward merge is not possible.'}
${'not_approved'} | ${'All required approvals must be given.'}
${'merge_request_blocked'} | ${'Merge request dependencies must be merged.'}
${'status_checks_must_pass'} | ${'Status checks must pass.'}
${'jira_association_missing'} | ${'Either the title or description must reference a Jira issue.'}
${'requested_changes'} | ${'The change requests must be completed or resolved.'}
${'approvals_syncing'} | ${'The merge request approvals are currently syncing.'}
${'locked_paths'} | ${'All paths must be unlocked'}
${'locked_lfs_files'} | ${'All LFS files must be unlocked.'}
${'security_policy_evaluation'} | ${'All security policies must be evaluated.'}
${'security_policy_violations'} | ${'All policy rules must be satisfied.'}
`('renders failure reason text', ({ identifier, expectedText }) => {
factory({ check: { status: 'success', identifier } });
expect(wrapper.text()).toEqual('Unresolved discussions must be resolved.');
expect(wrapper.text()).toBe(expectedText);
});
it.each`

View File

@ -258,7 +258,8 @@ RSpec.describe IconsHelper do
describe 'gl_loading_icon' do
it 'returns the default spinner markup' do
expect(gl_loading_icon.to_s)
.to eq '<div class="gl-spinner-container" role="status"><span aria-label="Loading" class="gl-spinner gl-spinner-sm gl-spinner-dark !gl-align-text-bottom"></span></div>'
.to eq '<div class="gl-spinner-container" role="status"><span aria-hidden class="gl-spinner gl-spinner-sm gl-spinner-dark !gl-align-text-bottom"></span><span class="gl-sr-only !gl-absolute">Loading</span>
</div>'
end
context 'when css_class is provided' do

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillCiResourcesProjectId,
feature_category: :continuous_integration,
schema: 20240930154300,
migration: :gitlab_ci do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :ci_resources }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :ci_resource_groups }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :resource_group_id }
end
end

View File

@ -65,19 +65,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
end
it { is_expected.to be_truthy }
context 'when pipeline_run_keyword feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
context 'when config has run key' do
let(:name) { :rspec }
let(:config) { { run: [{ name: 'step1', step: 'some reference' }] } }
it { is_expected.to be_falsey }
end
end
end
context 'when config is a bridge job' do
@ -969,16 +956,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
end
end
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
it 'return nil for run value' do
expect(entry.value[:run]).to be_nil
end
end
end
context 'with retry present in the config' do

View File

@ -73,16 +73,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co
)
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
it 'does not include execution_config attribute' do
expect(subject).not_to include(:execution_config)
end
end
context 'when job:run attribute is not specified' do
let(:attributes) do
{

View File

@ -275,6 +275,7 @@ merge_requests:
- scan_result_policy_reads_through_violations
- scan_result_policy_reads_through_approval_rules
- running_scan_result_policy_violations
- completed_scan_result_policy_violations
external_pull_requests:
- project
merge_request_diff:

View File

@ -630,7 +630,8 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let(:upload_method) { nil }
let(:upload_url) { nil }
let(:upload_headers) { {} }
let(:upload_config) { { method: upload_method, headers: upload_headers, url: upload_url }.compact_blank! }
let(:authorized_upload_response) { {} }
let(:upload_config) { { method: upload_method, headers: upload_headers, url: upload_url, authorized_upload_response: authorized_upload_response }.compact_blank! }
let(:ssrf_filter) { false }
let(:allow_localhost) { true }
let(:allowed_uris) { [] }
@ -653,7 +654,8 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
'UploadConfig' => {
'Method' => upload_method,
'Url' => upload_url,
'Headers' => upload_headers.transform_values { |v| Array.wrap(v) }
'Headers' => upload_headers.transform_values { |v| Array.wrap(v) },
'AuthorizedUploadResponse' => authorized_upload_response
}.compact_blank!
}
expected_params.compact_blank!
@ -686,6 +688,12 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Headers'
end
context 'with authorized upload response set' do
let(:authorized_upload_response) { { 'TempPath' => '/dev/null' } }
it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'AuthorizedUploadResponse'
end
context 'when `ssrf_filter` parameter is set' do
let(:ssrf_filter) { true }

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillCiResourcesProjectId, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :ci_resources,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: [
:project_id,
:ci_resource_groups,
:project_id,
:resource_group_id
]
)
}
end
end
end

View File

@ -505,19 +505,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(response).to have_gitlab_http_status(:created)
expect(json_response['steps']).to be_nil
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
it 'returns nil for run steps' do
request_job
expect(response).to have_gitlab_http_status(:created)
expect(json_response['run']).to be_nil
end
end
end
context 'when job does not have execution config' do
@ -551,20 +538,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(response).to have_gitlab_http_status(:created)
expect(json_response['run']).to be_nil
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
it 'returns nil for run steps' do
request_job
expect(response).to have_gitlab_http_status(:created)
expect(json_response['run']).to be_nil
expect(json_response['steps']).to eq(expected_steps)
end
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::SidekiqMetrics, :aggregate_failures, feature_category: :shared do
RSpec.describe API::SidekiqMetrics, :clean_gitlab_redis_queues, :aggregate_failures, feature_category: :shared do
let(:instance_count) { 1 }
let(:admin) { create(:user, :admin) }
@ -15,6 +15,19 @@ RSpec.describe API::SidekiqMetrics, :aggregate_failures, feature_category: :shar
end
shared_examples 'GET sidekiq metrics' do
before do
# ProcessSet looks up running processes in Redis.
# To ensure test coverage, stub some data so it actually performs some iteration.
Gitlab::SidekiqSharding::Validator.allow_unrouted_sidekiq_calls do
Sidekiq.redis do |r|
r.sadd('processes', 'teststub')
r.hset('teststub',
['info', Sidekiq.dump_json({ started_at: Time.now.to_i }), "busy", 1, "quiet", 1, "rss", 1, "rtt_us", 1]
)
end
end
end
it 'defines the `queue_metrics` endpoint' do
expect(Gitlab::SidekiqConfig).to receive(:routing_queues).exactly(instance_count).times.and_call_original
get api('/sidekiq/queue_metrics', admin, admin_mode: true)

View File

@ -1257,14 +1257,15 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
end
expected_upload_config = {
'Headers' => { described_class::UPSTREAM_GID_HEADER => [upstream.to_global_id.to_s] }
'Headers' => { described_class::UPSTREAM_GID_HEADER => [upstream.to_global_id.to_s] },
'AuthorizedUploadResponse' => a_kind_of(Hash)
}
expect(send_data_type).to eq('send-dependency')
expect(send_data['Url']).to be_present
expect(send_data['Headers']).to eq(expected_headers)
expect(send_data['ResponseHeaders']).to eq(expected_resp_headers)
expect(send_data['UploadConfig']).to eq(expected_upload_config)
expect(send_data['UploadConfig']).to include(expected_upload_config)
end
end
@ -1357,53 +1358,6 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
it_behaves_like 'not authenticated user'
end
describe 'POST /api/v4/virtual_registries/packages/maven/:id/*path/upload/authorize' do
include_context 'workhorse headers'
let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
let(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}/upload/authorize" }
subject(:request) do
post api(url), headers: headers
end
shared_examples 'returning the workhorse authorization response' do
it 'authorizes the upload' do
request
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
end
it_behaves_like 'authenticated endpoint',
success_shared_example_name: 'returning the workhorse authorization response' do
let(:headers) { workhorse_headers }
end
context 'with a valid user' do
let(:headers) { workhorse_headers.merge(token_header(:personal_access_token)) }
context 'with no workhorse headers' do
let(:headers) { token_header(:personal_access_token) }
it_behaves_like 'returning response status', :forbidden
end
context 'with no permissions on registry' do
let_it_be(:user) { create(:user) }
it_behaves_like 'returning response status', :forbidden
end
it_behaves_like 'disabled feature flag'
it_behaves_like 'disabled dependency proxy'
end
it_behaves_like 'not authenticated user'
end
describe 'POST /api/v4/virtual_registries/packages/maven/:id/*path/upload' do
include_context 'workhorse headers'
@ -1431,7 +1385,7 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
it 'accepts the upload', :freeze_time do
expect { request }.to change { upstream.cached_responses.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(response).to have_gitlab_http_status(:ok)
expect(upstream.cached_responses.last).to have_attributes(
relative_path: "/#{path}",
downloads_count: 1,

View File

@ -47,16 +47,6 @@ RSpec.describe Ci::CreatePipelineService, :ci_config_feature_flag_correctness,
}
])
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pipeline_run_keyword: false)
end
it 'does not create a pipeline' do
expect(pipeline).not_to be_created_successfully
end
end
end
context 'when job has multiple run steps with different configurations' do

View File

@ -20,7 +20,8 @@ internal/api/channel_settings.go:57:28: G402: TLS MinVersion too low. (gosec)
internal/channel/channel.go:128:31: response body must be closed (bodyclose)
internal/config/config.go:246:18: G204: Subprocess launched with variable (gosec)
internal/config/config.go:328:8: G101: Potential hardcoded credentials (gosec)
internal/dependencyproxy/dependencyproxy_test.go:476: internal/dependencyproxy/dependencyproxy_test.go:476: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
internal/dependencyproxy/dependencyproxy.go:114: Function 'Inject' is too long (61 > 60) (funlen)
internal/dependencyproxy/dependencyproxy_test.go:510: internal/dependencyproxy/dependencyproxy_test.go:510: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
internal/git/archive.go:35:2: var-naming: struct field CommitId should be CommitID (revive)
internal/git/archive.go:43:2: exported: exported var SendArchive should have comment or be unexported (revive)
internal/git/archive.go:53: Function 'Inject' has too many statements (47 > 40) (funlen)

View File

@ -14,9 +14,11 @@ import (
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/fail"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/senddata"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/transport"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload"
)
const dialTimeout = 10 * time.Second
@ -36,7 +38,7 @@ var httpClients sync.Map
// Injector provides functionality for injecting dependencies
type Injector struct {
senddata.Prefix
uploadHandler http.Handler
uploadHandler upload.BodyUploadHandler
}
type entryParams struct {
@ -50,9 +52,33 @@ type entryParams struct {
}
type uploadConfig struct {
Headers http.Header
Method string
URL string
Headers http.Header
Method string
URL string
AuthorizedUploadResponse authorizeUploadResponse
}
type authorizeUploadResponse struct {
TempPath string
RemoteObject api.RemoteObject
MaximumSize int64
UploadHashFunctions []string
}
func (u *uploadConfig) ExtractUploadAuthorizeFields() *api.Response {
tempPath := u.AuthorizedUploadResponse.TempPath
remoteID := u.AuthorizedUploadResponse.RemoteObject.RemoteTempObjectID
if tempPath == "" && remoteID == "" {
return nil
}
return &api.Response{
TempPath: tempPath,
RemoteObject: u.AuthorizedUploadResponse.RemoteObject,
MaximumSize: u.AuthorizedUploadResponse.MaximumSize,
UploadHashFunctions: u.AuthorizedUploadResponse.UploadHashFunctions,
}
}
type nullResponseWriter struct {
@ -80,7 +106,7 @@ func NewInjector() *Injector {
}
// SetUploadHandler sets the upload handler for the Injector
func (p *Injector) SetUploadHandler(uploadHandler http.Handler) {
func (p *Injector) SetUploadHandler(uploadHandler upload.BodyUploadHandler) {
p.uploadHandler = uploadHandler
}
@ -135,7 +161,12 @@ func (p *Injector) Inject(w http.ResponseWriter, r *http.Request, sendData strin
saveFileRequest.ContentLength = dependencyResponse.ContentLength
nrw := &nullResponseWriter{header: make(http.Header)}
p.uploadHandler.ServeHTTP(nrw, saveFileRequest)
apiResponse := params.UploadConfig.ExtractUploadAuthorizeFields()
if apiResponse != nil {
p.uploadHandler.ServeHTTPWithAPIResponse(nrw, saveFileRequest, apiResponse)
} else {
p.uploadHandler.ServeHTTP(nrw, saveFileRequest)
}
if nrw.status != http.StatusOK {
fields := log.Fields{"code": nrw.status}
@ -213,14 +244,14 @@ func (p *Injector) unpackParams(sendData string) (*entryParams, error) {
return nil, fmt.Errorf("dependency proxy: unpack sendData: %w", err)
}
if err := p.validateParams(params); err != nil {
if err := p.validateParams(&params); err != nil {
return nil, fmt.Errorf("dependency proxy: invalid params: %w", err)
}
return &params, nil
}
func (p *Injector) validateParams(params entryParams) error {
func (p *Injector) validateParams(params *entryParams) error {
var uploadMethod = params.UploadConfig.Method
if uploadMethod != "" && uploadMethod != http.MethodPost && uploadMethod != http.MethodPut {
return fmt.Errorf("invalid upload method %s", uploadMethod)

Some files were not shown because too many files have changed in this diff Show More