Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
720d75f7f9
commit
10adf63c4b
|
|
@ -6,6 +6,7 @@ import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
|
|||
import { tableField } from '../utils';
|
||||
import { I18N_STATUS_NEVER_CONTACTED } from '../constants';
|
||||
import RunnerStatusBadge from './runner_status_badge.vue';
|
||||
import RunnerJobStatusBadge from './runner_job_status_badge.vue';
|
||||
|
||||
export default {
|
||||
name: 'RunnerManagersTable',
|
||||
|
|
@ -15,6 +16,7 @@ export default {
|
|||
HelpPopover,
|
||||
GlIntersperse,
|
||||
RunnerStatusBadge,
|
||||
RunnerJobStatusBadge,
|
||||
RunnerUpgradeStatusIcon: () =>
|
||||
import('ee_component/ci/runner/components/runner_upgrade_status_icon.vue'),
|
||||
},
|
||||
|
|
@ -52,7 +54,15 @@ export default {
|
|||
</help-popover>
|
||||
</template>
|
||||
<template #cell(status)="{ item = {} }">
|
||||
<runner-status-badge :contacted-at="item.contactedAt" :status="item.status" />
|
||||
<runner-status-badge
|
||||
class="gl-vertical-align-middle"
|
||||
:contacted-at="item.contactedAt"
|
||||
:status="item.status"
|
||||
/>
|
||||
<runner-job-status-badge
|
||||
class="gl-vertical-align-middle"
|
||||
:job-status="item.jobExecutionStatus"
|
||||
/>
|
||||
</template>
|
||||
<template #cell(version)="{ item = {} }">
|
||||
{{ item.version }}
|
||||
|
|
|
|||
|
|
@ -9,4 +9,5 @@ fragment CiRunnerManagerShared on CiRunnerManager {
|
|||
platformName
|
||||
ipAddress
|
||||
contactedAt
|
||||
jobExecutionStatus
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ import {
|
|||
newDateAsLocaleTime,
|
||||
} from '~/lib/utils/datetime_utility';
|
||||
import { __ } from '~/locale';
|
||||
import { STATE_CLOSED } from '~/work_items/constants';
|
||||
import { isMilestoneWidget, isStartAndDueDateWidget } from '~/work_items/utils';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
|
@ -26,9 +28,12 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
milestone() {
|
||||
return this.issue.milestone || this.issue.widgets?.find(isMilestoneWidget)?.milestone;
|
||||
},
|
||||
milestoneDate() {
|
||||
if (this.issue.milestone?.dueDate) {
|
||||
const { dueDate, startDate } = this.issue.milestone;
|
||||
if (this.milestone.dueDate) {
|
||||
const { dueDate, startDate } = this.milestone;
|
||||
const date = dateInWords(newDateAsLocaleTime(dueDate), true);
|
||||
const remainingTime = this.milestoneRemainingTime(dueDate, startDate);
|
||||
return `${date} (${remainingTime})`;
|
||||
|
|
@ -36,15 +41,19 @@ export default {
|
|||
return __('Milestone');
|
||||
},
|
||||
milestoneLink() {
|
||||
return this.issue.milestone.webPath || this.issue.milestone.webUrl;
|
||||
return this.milestone.webPath || this.milestone.webUrl;
|
||||
},
|
||||
dueDate() {
|
||||
return this.issue.dueDate && dateInWords(newDateAsLocaleTime(this.issue.dueDate), true);
|
||||
return this.issue.dueDate || this.issue.widgets?.find(isStartAndDueDateWidget)?.dueDate;
|
||||
},
|
||||
dueDateText() {
|
||||
return this.dueDate && dateInWords(newDateAsLocaleTime(this.dueDate), true);
|
||||
},
|
||||
isClosed() {
|
||||
return this.issue.state === STATUS_CLOSED || this.issue.state === STATE_CLOSED;
|
||||
},
|
||||
showDueDateInRed() {
|
||||
return (
|
||||
isInPast(newDateAsLocaleTime(this.issue.dueDate)) && this.issue.state !== STATUS_CLOSED
|
||||
);
|
||||
return isInPast(newDateAsLocaleTime(this.dueDate)) && !this.isClosed;
|
||||
},
|
||||
timeEstimate() {
|
||||
return this.issue.humanTimeEstimate || this.issue.timeStats?.humanTimeEstimate;
|
||||
|
|
@ -73,7 +82,7 @@ export default {
|
|||
<template>
|
||||
<span>
|
||||
<span
|
||||
v-if="issue.milestone"
|
||||
v-if="milestone"
|
||||
class="issuable-milestone gl-mr-3 gl-text-truncate gl-max-w-26 gl-display-inline-block gl-vertical-align-bottom"
|
||||
data-testid="issuable-milestone"
|
||||
>
|
||||
|
|
@ -84,11 +93,11 @@ export default {
|
|||
class="gl-font-sm gl-text-gray-500!"
|
||||
>
|
||||
<gl-icon name="clock" :size="12" />
|
||||
{{ issue.milestone.title }}
|
||||
{{ milestone.title }}
|
||||
</gl-link>
|
||||
</span>
|
||||
<span
|
||||
v-if="issue.dueDate"
|
||||
v-if="dueDate"
|
||||
v-gl-tooltip
|
||||
class="issuable-due-date gl-mr-3"
|
||||
:class="{ 'gl-text-red-500': showDueDateInRed }"
|
||||
|
|
@ -96,7 +105,7 @@ export default {
|
|||
data-testid="issuable-due-date"
|
||||
>
|
||||
<gl-icon name="calendar" :size="12" />
|
||||
{{ dueDate }}
|
||||
{{ dueDateText }}
|
||||
</span>
|
||||
<span
|
||||
v-if="timeEstimate"
|
||||
|
|
|
|||
|
|
@ -260,7 +260,9 @@ export default {
|
|||
</gl-intersection-observer>
|
||||
</td>
|
||||
<td class="tree-time-ago text-right cursor-default gl-text-secondary">
|
||||
<timeago-tooltip v-if="commitData" :time="commitData.committedDate" />
|
||||
<gl-intersection-observer @appear="rowAppeared" @disappear="rowDisappeared">
|
||||
<timeago-tooltip v-if="commitData" :time="commitData.committedDate" />
|
||||
</gl-intersection-observer>
|
||||
<gl-skeleton-loader v-if="showSkeletonLoader" :lines="1" />
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import { fetchPolicies } from '~/lib/graphql';
|
|||
import { isPositiveInteger } from '~/lib/utils/number_utils';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { getParameterByName, joinPaths } from '~/lib/utils/url_utility';
|
||||
import { scrollUp } from '~/lib/utils/scroll_utils';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
|
||||
import { DEFAULT_PAGE_SIZE, issuableListTabs } from '~/vue_shared/issuable/list/constants';
|
||||
|
|
@ -206,6 +207,18 @@ export default {
|
|||
[STATUS_ALL]: allIssues?.count,
|
||||
};
|
||||
},
|
||||
currentTabCount() {
|
||||
return this.tabCounts[this.state] ?? 0;
|
||||
},
|
||||
showPaginationControls() {
|
||||
return (
|
||||
this.serviceDeskIssues.length > 0 &&
|
||||
(this.pageInfo.hasNextPage || this.pageInfo.hasPreviousPage)
|
||||
);
|
||||
},
|
||||
showPageSizeControls() {
|
||||
return this.currentTabCount > DEFAULT_PAGE_SIZE;
|
||||
},
|
||||
isLoading() {
|
||||
return this.$apollo.queries.serviceDeskIssues.loading;
|
||||
},
|
||||
|
|
@ -404,6 +417,32 @@ export default {
|
|||
|
||||
this.$router.push({ query: this.urlParams });
|
||||
},
|
||||
handleNextPage() {
|
||||
this.pageParams = {
|
||||
afterCursor: this.pageInfo.endCursor,
|
||||
firstPageSize: this.pageSize,
|
||||
};
|
||||
scrollUp();
|
||||
|
||||
this.$router.push({ query: this.urlParams });
|
||||
},
|
||||
handlePreviousPage() {
|
||||
this.pageParams = {
|
||||
beforeCursor: this.pageInfo.startCursor,
|
||||
lastPageSize: this.pageSize,
|
||||
};
|
||||
scrollUp();
|
||||
|
||||
this.$router.push({ query: this.urlParams });
|
||||
},
|
||||
handlePageSizeChange(newPageSize) {
|
||||
const pageParam = getParameterByName(PARAM_LAST_PAGE_SIZE) ? 'lastPageSize' : 'firstPageSize';
|
||||
this.pageParams[pageParam] = newPageSize;
|
||||
this.pageSize = newPageSize;
|
||||
scrollUp();
|
||||
|
||||
this.$router.push({ query: this.urlParams });
|
||||
},
|
||||
handleSort(sortKey) {
|
||||
if (this.sortKey === sortKey) {
|
||||
return;
|
||||
|
|
@ -525,6 +564,8 @@ export default {
|
|||
:issuables-loading="isLoading"
|
||||
:initial-filter-value="filterTokens"
|
||||
:show-filtered-search-friendly-text="hasOrFeature"
|
||||
:show-pagination-controls="showPaginationControls"
|
||||
:show-page-size-change-controls="showPageSizeControls"
|
||||
:sort-options="sortOptions"
|
||||
:initial-sort-by="sortKey"
|
||||
:is-manual-ordering="isManualOrdering"
|
||||
|
|
@ -533,11 +574,17 @@ export default {
|
|||
:tab-counts="tabCounts"
|
||||
:current-tab="state"
|
||||
:default-page-size="pageSize"
|
||||
:has-next-page="pageInfo.hasNextPage"
|
||||
:has-previous-page="pageInfo.hasPreviousPage"
|
||||
sync-filter-and-sort
|
||||
use-keyset-pagination
|
||||
@click-tab="handleClickTab"
|
||||
@filter="handleFilter"
|
||||
@sort="handleSort"
|
||||
@reorder="handleReorder"
|
||||
@next-page="handleNextPage"
|
||||
@previous-page="handlePreviousPage"
|
||||
@page-size-change="handlePageSizeChange"
|
||||
>
|
||||
<template #empty-state>
|
||||
<empty-state-with-any-issues :has-search="hasSearch" :is-open-tab="isOpenTab" />
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { GlDisclosureDropdownGroup, GlLoadingIcon } from '@gitlab/ui';
|
|||
import * as Sentry from '@sentry/browser';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
|
||||
import Tracking from '~/tracking';
|
||||
import { getFormattedItem } from '../utils';
|
||||
|
||||
import {
|
||||
|
|
@ -18,6 +19,8 @@ import {
|
|||
PATH_GROUP_TITLE,
|
||||
GROUP_TITLES,
|
||||
MAX_ROWS,
|
||||
TRACKING_ACTIVATE_COMMAND_PALETTE,
|
||||
TRACKING_HANDLE_LABEL_MAP,
|
||||
} from './constants';
|
||||
import SearchItem from './search_item.vue';
|
||||
import { commandMapper, linksReducer, autocompleteQuery, fileMapper } from './utils';
|
||||
|
|
@ -29,6 +32,7 @@ export default {
|
|||
GlLoadingIcon,
|
||||
SearchItem,
|
||||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
inject: [
|
||||
'commandPaletteCommands',
|
||||
'commandPaletteLinks',
|
||||
|
|
@ -134,10 +138,15 @@ export default {
|
|||
immediate: true,
|
||||
},
|
||||
handle: {
|
||||
handler() {
|
||||
this.debouncedSearch();
|
||||
handler(value, oldValue) {
|
||||
// Do not run search immediately on component creation
|
||||
if (oldValue !== undefined) this.debouncedSearch();
|
||||
|
||||
// Track immediately on component creation
|
||||
const label = TRACKING_HANDLE_LABEL_MAP[value] ?? 'unknown';
|
||||
this.track(TRACKING_ACTIVATE_COMMAND_PALETTE, { label });
|
||||
},
|
||||
immediate: false,
|
||||
immediate: true,
|
||||
},
|
||||
},
|
||||
updated() {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,16 @@ export const PROJECT_HANDLE = ':';
|
|||
export const ISSUE_HANDLE = '#';
|
||||
export const PATH_HANDLE = '/';
|
||||
|
||||
export const TRACKING_ACTIVATE_COMMAND_PALETTE = 'activate_command_palette';
|
||||
export const TRACKING_CLICK_COMMAND_PALETTE_ITEM = 'click_command_palette_item';
|
||||
export const TRACKING_HANDLE_LABEL_MAP = {
|
||||
[COMMAND_HANDLE]: 'command',
|
||||
[USER_HANDLE]: 'user',
|
||||
[PROJECT_HANDLE]: 'project',
|
||||
[PATH_HANDLE]: 'path',
|
||||
// No ISSUE_HANDLE. See https://gitlab.com/gitlab-org/gitlab/-/issues/417434.
|
||||
};
|
||||
|
||||
export const COMMON_HANDLES = [COMMAND_HANDLE, USER_HANDLE, PROJECT_HANDLE];
|
||||
export const SEARCH_OR_COMMAND_MODE_PLACEHOLDER = sprintf(
|
||||
s__(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
import { isNil, omitBy } from 'lodash';
|
||||
import { objectToQuery, joinPaths } from '~/lib/utils/url_utility';
|
||||
import { SEARCH_SCOPE, GLOBAL_COMMANDS_GROUP_TITLE } from './constants';
|
||||
import { TRACKING_UNKNOWN_ID } from '~/super_sidebar/constants';
|
||||
import {
|
||||
SEARCH_SCOPE,
|
||||
GLOBAL_COMMANDS_GROUP_TITLE,
|
||||
TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
} from './constants';
|
||||
|
||||
export const commandMapper = ({ name, items }) => {
|
||||
// TODO: we filter out invite_members for now, because it is complicated to add the invite members modal here
|
||||
|
|
@ -12,18 +17,34 @@ export const commandMapper = ({ name, items }) => {
|
|||
};
|
||||
|
||||
export const linksReducer = (acc, menuItem) => {
|
||||
const trackingAttrs = ({ id, title }) => {
|
||||
return {
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': id || TRACKING_UNKNOWN_ID,
|
||||
...(id
|
||||
? {}
|
||||
: {
|
||||
'data-track-extra': JSON.stringify({ title }),
|
||||
}),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
acc.push({
|
||||
text: menuItem.title,
|
||||
keywords: menuItem.title,
|
||||
icon: menuItem.icon,
|
||||
href: menuItem.link,
|
||||
...trackingAttrs(menuItem),
|
||||
});
|
||||
if (menuItem.items?.length) {
|
||||
const items = menuItem.items.map(({ title, link }) => ({
|
||||
keywords: title,
|
||||
text: [menuItem.title, title].join(' > '),
|
||||
href: link,
|
||||
const items = menuItem.items.map((item) => ({
|
||||
keywords: item.title,
|
||||
text: [menuItem.title, item.title].join(' > '),
|
||||
href: item.link,
|
||||
icon: menuItem.icon,
|
||||
...trackingAttrs(item),
|
||||
}));
|
||||
|
||||
/* eslint-disable-next-line no-param-reassign */
|
||||
|
|
@ -37,6 +58,10 @@ export const fileMapper = (projectBlobPath, file) => {
|
|||
icon: 'doc-code',
|
||||
text: file,
|
||||
href: joinPaths(projectBlobPath, file),
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': 'file',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlIcon } from '@gitlab/ui';
|
||||
import { truncateNamespace } from '~/lib/utils/text_utility';
|
||||
import { getItemsFromLocalStorage, removeItemFromLocalStorage } from '~/super_sidebar/utils';
|
||||
import { TRACKING_UNKNOWN_PANEL } from '~/super_sidebar/constants';
|
||||
import { TRACKING_CLICK_COMMAND_PALETTE_ITEM } from '../command_palette/constants';
|
||||
import FrequentItem from './frequent_item.vue';
|
||||
|
||||
export default {
|
||||
|
|
@ -65,6 +67,12 @@ export default {
|
|||
// validator, and the href field ensures it renders a link.
|
||||
text: item.name,
|
||||
href: item.webUrl,
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': item.id,
|
||||
'data-track-property': TRACKING_UNKNOWN_PANEL,
|
||||
'data-track-extra': JSON.stringify({ title: item.name }),
|
||||
},
|
||||
},
|
||||
forRenderer: {
|
||||
id: item.id,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
<script>
|
||||
import { GlDisclosureDropdownGroup } from '@gitlab/ui';
|
||||
import { PLACES } from '~/vue_shared/global_search/constants';
|
||||
import { TRACKING_UNKNOWN_ID, TRACKING_UNKNOWN_PANEL } from '~/super_sidebar/constants';
|
||||
import { TRACKING_CLICK_COMMAND_PALETTE_ITEM } from '../command_palette/constants';
|
||||
|
||||
export default {
|
||||
name: 'DefaultPlaces',
|
||||
|
|
@ -18,7 +20,19 @@ export default {
|
|||
group() {
|
||||
return {
|
||||
name: this.$options.i18n.PLACES,
|
||||
items: this.contextSwitcherLinks.map(({ title, link }) => ({ text: title, href: link })),
|
||||
items: this.contextSwitcherLinks.map(({ title, link }) => ({
|
||||
text: title,
|
||||
href: link,
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
// The label and property are hard-coded as unknown for now for
|
||||
// parity with the existing corresponding context switcher items.
|
||||
// Once the context switcher is removed, these can be changed.
|
||||
'data-track-label': TRACKING_UNKNOWN_ID,
|
||||
'data-track-property': TRACKING_UNKNOWN_PANEL,
|
||||
'data-track-extra': JSON.stringify({ title }),
|
||||
},
|
||||
})),
|
||||
};
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
SEARCH_RESULTS_ORDER,
|
||||
} from '~/vue_shared/global_search/constants';
|
||||
import { getFormattedItem } from '../utils';
|
||||
import { TRACKING_CLICK_COMMAND_PALETTE_ITEM } from '../command_palette/constants';
|
||||
|
||||
import {
|
||||
ICON_GROUP,
|
||||
|
|
@ -172,6 +173,10 @@ export const scopedSearchOptions = (state, getters) => {
|
|||
scopeCategory: PROJECTS_CATEGORY,
|
||||
icon: ICON_PROJECT,
|
||||
href: getters.projectUrl,
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': 'scoped_in_project',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -182,6 +187,10 @@ export const scopedSearchOptions = (state, getters) => {
|
|||
scopeCategory: GROUPS_CATEGORY,
|
||||
icon: state.searchContext.group?.full_name?.includes('/') ? ICON_SUBGROUP : ICON_GROUP,
|
||||
href: getters.groupUrl,
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': 'scoped_in_group',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -189,6 +198,10 @@ export const scopedSearchOptions = (state, getters) => {
|
|||
text: 'scoped-in-all',
|
||||
description: MSG_IN_ALL_GITLAB,
|
||||
href: getters.allUrl,
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': 'scoped_in_all',
|
||||
},
|
||||
});
|
||||
|
||||
return items;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { pickBy } from 'lodash';
|
||||
import { truncateNamespace } from '~/lib/utils/text_utility';
|
||||
import { slugify, truncateNamespace } from '~/lib/utils/text_utility';
|
||||
import {
|
||||
GROUPS_CATEGORY,
|
||||
PROJECTS_CATEGORY,
|
||||
|
|
@ -7,6 +7,7 @@ import {
|
|||
ISSUES_CATEGORY,
|
||||
RECENT_EPICS_CATEGORY,
|
||||
} from '~/vue_shared/global_search/constants';
|
||||
import { TRACKING_CLICK_COMMAND_PALETTE_ITEM } from './command_palette/constants';
|
||||
import { LARGE_AVATAR_PX, SMALL_AVATAR_PX } from './constants';
|
||||
|
||||
const getTruncatedNamespace = (string) => {
|
||||
|
|
@ -61,6 +62,15 @@ export const getFormattedItem = (item, searchContext) => {
|
|||
const avatarSize = getAvatarSize(category);
|
||||
const entityId = getEntityId(item, searchContext);
|
||||
const entityName = getEntityName(item, searchContext);
|
||||
const trackingLabel = slugify(category ?? '');
|
||||
const trackingAttrs = trackingLabel
|
||||
? {
|
||||
extraAttrs: {
|
||||
'data-track-action': TRACKING_CLICK_COMMAND_PALETTE_ITEM,
|
||||
'data-track-label': slugify(category, '_'),
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
return pickBy(
|
||||
{
|
||||
|
|
@ -75,6 +85,7 @@ export const getFormattedItem = (item, searchContext) => {
|
|||
namespace,
|
||||
entity_id: entityId,
|
||||
entity_name: entityName,
|
||||
...trackingAttrs,
|
||||
},
|
||||
(val) => val !== undefined,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ import {
|
|||
|
||||
function simplifyWidgetName(componentName) {
|
||||
const noWidget = componentName.replace(/^Widget/, '');
|
||||
const camelName = noWidget.charAt(0).toLowerCase() + noWidget.slice(1);
|
||||
const tierlessName = camelName.replace(/(CE|EE)$/, '');
|
||||
|
||||
return noWidget.charAt(0).toLowerCase() + noWidget.slice(1);
|
||||
return tierlessName;
|
||||
}
|
||||
|
||||
function baseRedisEventName(extensionName) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
<script>
|
||||
import * as Sentry from '@sentry/browser';
|
||||
import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
|
||||
import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
|
||||
import { STATUS_OPEN } from '~/issues/constants';
|
||||
import { __, s__ } from '~/locale';
|
||||
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
|
||||
|
|
@ -14,6 +16,8 @@ export default {
|
|||
issuableListTabs,
|
||||
components: {
|
||||
IssuableList,
|
||||
IssueCardStatistics,
|
||||
IssueCardTimeInfo,
|
||||
},
|
||||
inject: ['fullPath'],
|
||||
data() {
|
||||
|
|
@ -57,6 +61,7 @@ export default {
|
|||
:current-tab="state"
|
||||
:error="error"
|
||||
:issuables="workItems"
|
||||
:issuables-loading="$apollo.queries.workItems.loading"
|
||||
namespace="work-items"
|
||||
recent-searches-storage-key="issues"
|
||||
:search-input-placeholder="$options.i18n.searchPlaceholder"
|
||||
|
|
@ -66,8 +71,16 @@ export default {
|
|||
:tabs="$options.issuableListTabs"
|
||||
@dismiss-alert="error = undefined"
|
||||
>
|
||||
<template #timeframe="{ issuable = {} }">
|
||||
<issue-card-time-info :issue="issuable" />
|
||||
</template>
|
||||
|
||||
<template #status="{ issuable }">
|
||||
{{ getStatus(issuable) }}
|
||||
</template>
|
||||
|
||||
<template #statistics="{ issuable = {} }">
|
||||
<issue-card-statistics :issue="issuable" />
|
||||
</template>
|
||||
</issuable-list>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import { parseBoolean } from '~/lib/utils/common_utils';
|
||||
import WorkItemsListApp from './components/work_items_list_app.vue';
|
||||
|
||||
export const mountWorkItemsListApp = () => {
|
||||
|
|
@ -12,6 +13,8 @@ export const mountWorkItemsListApp = () => {
|
|||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const { fullPath, hasIssuableHealthStatusFeature, hasIssueWeightsFeature } = el.dataset;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
name: 'WorkItemsListRoot',
|
||||
|
|
@ -19,7 +22,9 @@ export const mountWorkItemsListApp = () => {
|
|||
defaultClient: createDefaultClient(),
|
||||
}),
|
||||
provide: {
|
||||
fullPath: el.dataset.fullPath,
|
||||
fullPath,
|
||||
hasIssuableHealthStatusFeature: parseBoolean(hasIssuableHealthStatusFeature),
|
||||
hasIssueWeightsFeature: parseBoolean(hasIssueWeightsFeature),
|
||||
},
|
||||
render: (createComponent) => createComponent(WorkItemsListApp),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
#import "~/graphql_shared/fragments/user.fragment.graphql"
|
||||
|
||||
fragment BaseWorkItemWidgets on WorkItemWidget {
|
||||
... on WorkItemWidgetAssignees {
|
||||
type
|
||||
assignees {
|
||||
nodes {
|
||||
...User
|
||||
}
|
||||
}
|
||||
}
|
||||
... on WorkItemWidgetLabels {
|
||||
type
|
||||
allowsScopedLabels
|
||||
labels {
|
||||
nodes {
|
||||
id
|
||||
color
|
||||
description
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
... on WorkItemWidgetMilestone {
|
||||
type
|
||||
milestone {
|
||||
id
|
||||
dueDate
|
||||
startDate
|
||||
title
|
||||
webPath
|
||||
}
|
||||
}
|
||||
... on WorkItemWidgetStartAndDueDate {
|
||||
type
|
||||
dueDate
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
#import "ee_else_ce/work_items/list/queries/work_item_widgets.fragment.graphql"
|
||||
|
||||
query getWorkItems($fullPath: ID!) {
|
||||
group(fullPath: $fullPath) {
|
||||
id
|
||||
|
|
@ -21,30 +23,7 @@ query getWorkItems($fullPath: ID!) {
|
|||
updatedAt
|
||||
webUrl
|
||||
widgets {
|
||||
... on WorkItemWidgetAssignees {
|
||||
assignees {
|
||||
nodes {
|
||||
id
|
||||
avatarUrl
|
||||
name
|
||||
username
|
||||
webUrl
|
||||
}
|
||||
}
|
||||
type
|
||||
}
|
||||
... on WorkItemWidgetLabels {
|
||||
allowsScopedLabels
|
||||
labels {
|
||||
nodes {
|
||||
id
|
||||
color
|
||||
description
|
||||
title
|
||||
}
|
||||
}
|
||||
type
|
||||
}
|
||||
...WorkItemWidgets
|
||||
}
|
||||
workItemType {
|
||||
id
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
#import "./base_work_item_widgets.fragment.graphql"
|
||||
|
||||
fragment WorkItemWidgets on WorkItemWidget {
|
||||
...BaseWorkItemWidgets
|
||||
}
|
||||
|
|
@ -1,9 +1,25 @@
|
|||
import { WIDGET_TYPE_ASSIGNEES, WIDGET_TYPE_HIERARCHY, WIDGET_TYPE_LABELS } from './constants';
|
||||
import {
|
||||
WIDGET_TYPE_ASSIGNEES,
|
||||
WIDGET_TYPE_HEALTH_STATUS,
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
WIDGET_TYPE_LABELS,
|
||||
WIDGET_TYPE_MILESTONE,
|
||||
WIDGET_TYPE_START_AND_DUE_DATE,
|
||||
WIDGET_TYPE_WEIGHT,
|
||||
} from './constants';
|
||||
|
||||
export const isAssigneesWidget = (widget) => widget.type === WIDGET_TYPE_ASSIGNEES;
|
||||
|
||||
export const isHealthStatusWidget = (widget) => widget.type === WIDGET_TYPE_HEALTH_STATUS;
|
||||
|
||||
export const isLabelsWidget = (widget) => widget.type === WIDGET_TYPE_LABELS;
|
||||
|
||||
export const isMilestoneWidget = (widget) => widget.type === WIDGET_TYPE_MILESTONE;
|
||||
|
||||
export const isStartAndDueDateWidget = (widget) => widget.type === WIDGET_TYPE_START_AND_DUE_DATE;
|
||||
|
||||
export const isWeightWidget = (widget) => widget.type === WIDGET_TYPE_WEIGHT;
|
||||
|
||||
export const findHierarchyWidgets = (widgets) =>
|
||||
widgets?.find((widget) => widget.type === WIDGET_TYPE_HIERARCHY);
|
||||
|
||||
|
|
|
|||
|
|
@ -66,15 +66,11 @@ module Projects
|
|||
def integration
|
||||
AlertManagement::HttpIntegrationsFinder.new(
|
||||
project,
|
||||
endpoint_identifier: endpoint_identifier,
|
||||
endpoint_identifier: params[:endpoint_identifier],
|
||||
active: true
|
||||
).execute.first
|
||||
end
|
||||
|
||||
def endpoint_identifier
|
||||
params[:endpoint_identifier] || AlertManagement::HttpIntegration::LEGACY_IDENTIFIERS
|
||||
end
|
||||
|
||||
def notification_payload
|
||||
@notification_payload ||= params.permit![:notification]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Projects
|
||||
module Prometheus
|
||||
class AlertsController < Projects::ApplicationController
|
||||
respond_to :json
|
||||
|
||||
protect_from_forgery except: [:notify]
|
||||
|
||||
skip_before_action :project, only: [:notify]
|
||||
|
||||
prepend_before_action :repository, :project_without_auth, only: [:notify]
|
||||
|
||||
before_action :authorize_read_prometheus_alerts!, except: [:notify]
|
||||
|
||||
feature_category :incident_management
|
||||
urgency :low
|
||||
|
||||
def notify
|
||||
token = extract_alert_manager_token(request)
|
||||
result = notify_service.execute(token)
|
||||
|
||||
head result.http_status
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def notify_service
|
||||
Projects::Prometheus::Alerts::NotifyService
|
||||
.new(project, params.permit!)
|
||||
end
|
||||
|
||||
def extract_alert_manager_token(request)
|
||||
Doorkeeper::OAuth::Token.from_bearer_authorization(request)
|
||||
end
|
||||
|
||||
def project_without_auth
|
||||
@project ||= Project
|
||||
.find_by_full_path("#{params[:namespace_id]}/#{params[:project_id]}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -9,6 +9,9 @@ module Mutations
|
|||
|
||||
argument :link_type, ::Types::WorkItems::RelatedLinkTypeEnum,
|
||||
required: false, description: 'Type of link. Defaults to `RELATED`.'
|
||||
argument :work_items_ids, [::Types::GlobalIDType[::WorkItem]],
|
||||
required: true,
|
||||
description: "Global IDs of the items to link. Maximum number of IDs you can provide: #{MAX_WORK_ITEMS}."
|
||||
|
||||
private
|
||||
|
||||
|
|
|
|||
|
|
@ -10,9 +10,6 @@ module Mutations
|
|||
|
||||
argument :id, ::Types::GlobalIDType[::WorkItem],
|
||||
required: true, description: 'Global ID of the work item.'
|
||||
argument :work_items_ids, [::Types::GlobalIDType[::WorkItem]],
|
||||
required: true,
|
||||
description: "Global IDs of the items to link. Maximum number of IDs you can provide: #{MAX_WORK_ITEMS}."
|
||||
|
||||
field :work_item, Types::WorkItemType,
|
||||
null: true, description: 'Updated work item.'
|
||||
|
|
@ -26,7 +23,7 @@ module Mutations
|
|||
if args[:work_items_ids].size > MAX_WORK_ITEMS
|
||||
raise Gitlab::Graphql::Errors::ArgumentError,
|
||||
format(
|
||||
_('No more than %{max_work_items} work items can be linked at the same time.'),
|
||||
_('No more than %{max_work_items} work items can be modified at the same time.'),
|
||||
max_work_items: MAX_WORK_ITEMS
|
||||
)
|
||||
end
|
||||
|
|
@ -50,7 +47,7 @@ module Mutations
|
|||
private
|
||||
|
||||
def update_links(work_item, params)
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError, "#{self.class} does not implement #{__method__}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Mutations
|
||||
module WorkItems
|
||||
module LinkedItems
|
||||
class Remove < Base
|
||||
graphql_name 'WorkItemRemoveLinkedItems'
|
||||
description 'Remove items linked to the work item.'
|
||||
|
||||
argument :work_items_ids, [::Types::GlobalIDType[::WorkItem]],
|
||||
required: true,
|
||||
description: "Global IDs of the items to unlink. Maximum number of IDs you can provide: #{MAX_WORK_ITEMS}."
|
||||
|
||||
private
|
||||
|
||||
def update_links(work_item, params)
|
||||
gids = params.delete(:work_items_ids)
|
||||
raise Gitlab::Graphql::Errors::ArgumentError, "workItemsIds cannot be empty" if gids.empty?
|
||||
|
||||
work_item_ids = gids.filter_map { |gid| gid.model_id.to_i }
|
||||
::WorkItems::RelatedWorkItemLinks::DestroyService
|
||||
.new(work_item, current_user, { item_ids: work_item_ids })
|
||||
.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -181,6 +181,7 @@ module Types
|
|||
mount_mutation Mutations::WorkItems::Export, alpha: { milestone: '15.10' }
|
||||
mount_mutation Mutations::WorkItems::Convert, alpha: { milestone: '15.11' }
|
||||
mount_mutation Mutations::WorkItems::LinkedItems::Add, alpha: { milestone: '16.3' }
|
||||
mount_mutation Mutations::WorkItems::LinkedItems::Remove, alpha: { milestone: '16.3' }
|
||||
mount_mutation Mutations::SavedReplies::Create
|
||||
mount_mutation Mutations::SavedReplies::Update
|
||||
mount_mutation Mutations::Pages::MarkOnboardingComplete
|
||||
|
|
|
|||
|
|
@ -11,4 +11,10 @@ module WorkItemsHelper
|
|||
report_abuse_path: add_category_abuse_reports_path
|
||||
}
|
||||
end
|
||||
|
||||
def work_items_list_data(group)
|
||||
{
|
||||
full_path: group.full_path
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1039,6 +1039,13 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def time_in_queue_seconds
|
||||
return if queued_at.nil?
|
||||
|
||||
(::Time.current - queued_at).seconds.to_i
|
||||
end
|
||||
strong_memoize_attr :time_in_queue_seconds
|
||||
|
||||
protected
|
||||
|
||||
def run_status_commit_hooks!
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ module LinkableItem
|
|||
|
||||
scope :for_source, ->(item) { where(source_id: item.id) }
|
||||
scope :for_target, ->(item) { where(target_id: item.id) }
|
||||
scope :for_source_and_target, ->(source, target) { where(source: source, target: target) }
|
||||
scope :for_items, ->(source, target) do
|
||||
where(source: source, target: target).or(where(source: target, target: source))
|
||||
end
|
||||
|
|
|
|||
|
|
@ -81,6 +81,8 @@ class Project < ApplicationRecord
|
|||
MAX_SUGGESTIONS_TEMPLATE_LENGTH = 255
|
||||
MAX_COMMIT_TEMPLATE_LENGTH = 500
|
||||
|
||||
INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET = 5
|
||||
|
||||
DEFAULT_MERGE_COMMIT_TEMPLATE = <<~MSG.rstrip.freeze
|
||||
Merge branch '%{source_branch}' into '%{target_branch}'
|
||||
|
||||
|
|
@ -3270,6 +3272,13 @@ class Project < ApplicationRecord
|
|||
errors.add(:path, s_('Project|already in use'))
|
||||
end
|
||||
|
||||
def instance_runner_running_jobs_count
|
||||
# excluding currently started job
|
||||
::Ci::RunningBuild.instance_type.where(project_id: self.id)
|
||||
.limit(INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET + 1).count - 1
|
||||
end
|
||||
strong_memoize_attr :instance_runner_running_jobs_count
|
||||
|
||||
private
|
||||
|
||||
# overridden in EE
|
||||
|
|
|
|||
|
|
@ -61,6 +61,16 @@ module Ci
|
|||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def project_jobs_running_on_instance_runners_count
|
||||
# if not instance runner we don't care about that value and present `+Inf` as a placeholder for Prometheus
|
||||
return '+Inf' unless runner.instance_type?
|
||||
|
||||
return project.instance_runner_running_jobs_count.to_s if
|
||||
project.instance_runner_running_jobs_count < Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET
|
||||
|
||||
"#{Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET}+"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_archive(artifacts)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ module Ci
|
|||
|
||||
TEMPORARY_LOCK_TIMEOUT = 3.seconds
|
||||
|
||||
Result = Struct.new(:build, :build_json, :valid?)
|
||||
Result = Struct.new(:build, :build_json, :build_presented, :valid?)
|
||||
|
||||
##
|
||||
# The queue depth limit number has been determined by observing 95
|
||||
|
|
@ -43,7 +43,7 @@ module Ci
|
|||
if !db_all_caught_up && !result.build
|
||||
metrics.increment_queue_operation(:queue_replication_lag)
|
||||
|
||||
::Ci::RegisterJobService::Result.new(nil, nil, false) # rubocop:disable Cop/AvoidReturnFromBlocks
|
||||
::Ci::RegisterJobService::Result.new(nil, nil, nil, false) # rubocop:disable Cop/AvoidReturnFromBlocks
|
||||
else
|
||||
result
|
||||
end
|
||||
|
|
@ -86,7 +86,7 @@ module Ci
|
|||
next unless result
|
||||
|
||||
if result.valid?
|
||||
@metrics.register_success(result.build)
|
||||
@metrics.register_success(result.build_presented)
|
||||
@metrics.observe_queue_depth(:found, depth)
|
||||
|
||||
return result # rubocop:disable Cop/AvoidReturnFromBlocks
|
||||
|
|
@ -102,7 +102,7 @@ module Ci
|
|||
@metrics.observe_queue_depth(:not_found, depth) if valid
|
||||
@metrics.register_failure
|
||||
|
||||
Result.new(nil, nil, valid)
|
||||
Result.new(nil, nil, nil, valid)
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
|
|
@ -159,7 +159,7 @@ module Ci
|
|||
# this operation.
|
||||
#
|
||||
if ::Ci::UpdateBuildQueueService.new.remove!(build)
|
||||
return Result.new(nil, nil, false)
|
||||
return Result.new(nil, nil, nil, false)
|
||||
end
|
||||
|
||||
return
|
||||
|
|
@ -190,11 +190,11 @@ module Ci
|
|||
# to make sure that this is properly handled by runner.
|
||||
@metrics.increment_queue_operation(:build_conflict_lock)
|
||||
|
||||
Result.new(nil, nil, false)
|
||||
Result.new(nil, nil, nil, false)
|
||||
rescue StateMachines::InvalidTransition
|
||||
@metrics.increment_queue_operation(:build_conflict_transition)
|
||||
|
||||
Result.new(nil, nil, false)
|
||||
Result.new(nil, nil, nil, false)
|
||||
rescue StandardError => ex
|
||||
@metrics.increment_queue_operation(:build_conflict_exception)
|
||||
|
||||
|
|
@ -221,7 +221,7 @@ module Ci
|
|||
log_build_dependencies_size(presented_build)
|
||||
|
||||
build_json = Gitlab::Json.dump(::API::Entities::Ci::JobRequest::Response.new(presented_build))
|
||||
Result.new(build, build_json, true)
|
||||
Result.new(build, build_json, presented_build, true)
|
||||
end
|
||||
|
||||
def log_build_dependencies_size(presented_build)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,85 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module WorkItems
|
||||
module RelatedWorkItemLinks
|
||||
class DestroyService < BaseService
|
||||
def initialize(work_item, user, params)
|
||||
@work_item = work_item
|
||||
@current_user = user
|
||||
@params = params.dup
|
||||
@failed_ids = []
|
||||
@removed_ids = []
|
||||
end
|
||||
|
||||
def execute
|
||||
return error(_('No work item found.'), 403) unless can?(current_user, :admin_work_item_link, work_item)
|
||||
return error(_('No work item IDs provided.'), 409) if params[:item_ids].empty?
|
||||
|
||||
destroy_links_for(params[:item_ids])
|
||||
|
||||
if removed_ids.any?
|
||||
success(message: response_message, items_removed: removed_ids, items_with_errors: failed_ids.flatten)
|
||||
else
|
||||
error(error_message)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :work_item, :current_user, :failed_ids, :removed_ids
|
||||
|
||||
def destroy_links_for(item_ids)
|
||||
destroy_links(source: work_item, target: item_ids, direction: :target)
|
||||
destroy_links(source: item_ids, target: work_item, direction: :source)
|
||||
end
|
||||
|
||||
def destroy_links(source:, target:, direction:)
|
||||
WorkItems::RelatedWorkItemLink.for_source_and_target(source, target).each do |link|
|
||||
linked_item = link.try(direction)
|
||||
|
||||
if can?(current_user, :admin_work_item_link, linked_item)
|
||||
link.destroy!
|
||||
removed_ids << linked_item.id
|
||||
create_notes(link)
|
||||
else
|
||||
failed_ids << linked_item.id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def create_notes(link)
|
||||
SystemNoteService.unrelate_issuable(link.source, link.target, current_user)
|
||||
SystemNoteService.unrelate_issuable(link.target, link.source, current_user)
|
||||
end
|
||||
|
||||
def error_message
|
||||
not_linked = params[:item_ids] - (removed_ids + failed_ids)
|
||||
error_messages = []
|
||||
|
||||
if failed_ids.any?
|
||||
error_messages << format(
|
||||
_('%{item_ids} could not be removed due to insufficient permissions'), item_ids: failed_ids.to_sentence
|
||||
)
|
||||
end
|
||||
|
||||
if not_linked.any?
|
||||
error_messages << format(
|
||||
_('%{item_ids} could not be removed due to not being linked'), item_ids: not_linked.to_sentence
|
||||
)
|
||||
end
|
||||
|
||||
return '' unless error_messages.any?
|
||||
|
||||
format(_('IDs with errors: %{error_messages}.'), error_messages: error_messages.join(', '))
|
||||
end
|
||||
|
||||
def response_message
|
||||
success_message = format(_('Successfully unlinked IDs: %{item_ids}.'), item_ids: removed_ids.to_sentence)
|
||||
|
||||
return success_message unless error_message.present?
|
||||
|
||||
"#{success_message} #{error_message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
- page_title s_('WorkItem|Work items')
|
||||
- add_page_specific_style 'page_bundles/issuable_list'
|
||||
|
||||
.js-work-items-list-root{ data: { full_path: @group.full_path } }
|
||||
.js-work-items-list-root{ data: work_items_list_data(@group) }
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
.gl-md-ml-3.dropdown.gl-dropdown{ class: "gl-display-none! gl-md-display-flex!" }
|
||||
#js-check-out-modal{ data: how_merge_modal_data(@merge_request) }
|
||||
= button_tag type: 'button', class: "btn dropdown-toggle btn-confirm gl-button gl-dropdown-toggle", data: { toggle: 'dropdown', qa_selector: 'mr_code_dropdown' } do
|
||||
= button_tag type: 'button', class: "btn dropdown-toggle btn-confirm gl-button gl-dropdown-toggle", data: { toggle: 'dropdown', testid: 'mr-code-dropdown' } do
|
||||
%span.gl-dropdown-button-text= _('Code')
|
||||
= sprite_icon "chevron-down", size: 16, css_class: "dropdown-icon gl-icon gl-ml-2 gl-mr-0!"
|
||||
.dropdown-menu.dropdown-menu-right
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
= _('Check out branch')
|
||||
- if current_user
|
||||
%li.gl-dropdown-item
|
||||
= link_to ide_merge_request_path(@merge_request), class: 'dropdown-item', target: '_blank', data: { qa_selector: 'open_in_web_ide_button' } do
|
||||
= link_to ide_merge_request_path(@merge_request), class: 'dropdown-item', target: '_blank', data: { testid: 'open-in-web-ide-button' } do
|
||||
.gl-dropdown-item-text-wrapper
|
||||
= _('Open in Web IDE')
|
||||
- if Gitlab::CurrentSettings.gitpod_enabled && current_user&.gitpod_enabled
|
||||
|
|
@ -30,10 +30,10 @@
|
|||
%header.dropdown-header
|
||||
= _('Download')
|
||||
%li.gl-dropdown-item
|
||||
= link_to merge_request_path(@merge_request, format: :patch), class: 'dropdown-item', download: '', data: { qa_selector: 'download_email_patches_menu_item' } do
|
||||
= link_to merge_request_path(@merge_request, format: :patch), class: 'dropdown-item', download: '', data: { testid: 'download-email-patches-menu-item' } do
|
||||
.gl-dropdown-item-text-wrapper
|
||||
= _('Patches')
|
||||
%li.gl-dropdown-item
|
||||
= link_to merge_request_path(@merge_request, format: :diff), class: 'dropdown-item', download: '', data: { qa_selector: 'download_plain_diff_menu_item' } do
|
||||
= link_to merge_request_path(@merge_request, format: :diff), class: 'dropdown-item', download: '', data: { testid: 'download-plain-diff-menu-item' } do
|
||||
.gl-dropdown-item-text-wrapper
|
||||
= _('Plain diff')
|
||||
|
|
|
|||
|
|
@ -28,12 +28,12 @@
|
|||
.merge-request-tabs-holder{ class: "#{'js-tabs-affix' unless ENV['RAILS_ENV'] == 'test'} #{'gl-static' if moved_mr_sidebar_enabled?}" }
|
||||
.merge-request-tabs-container.gl-display-flex.gl-justify-content-space-between{ class: "#{'is-merge-request' if Feature.enabled?(:moved_mr_sidebar, @project) && !fluid_layout}" }
|
||||
%ul.merge-request-tabs.nav-tabs.nav.nav-links.gl-display-flex.gl-flex-nowrap.gl-m-0.gl-p-0{ class: "#{'gl-w-full gl-lg-w-auto!' if Feature.enabled?(:moved_mr_sidebar, @project)}" }
|
||||
= render "projects/merge_requests/tabs/tab", class: "notes-tab", qa_selector: "notes_tab" do
|
||||
= render "projects/merge_requests/tabs/tab", class: "notes-tab", testid: "notes-tab" do
|
||||
= tab_link_for @merge_request, :show, force_link: @commit.present? do
|
||||
= _("Overview")
|
||||
= gl_badge_tag @merge_request.related_notes.user.count, { size: :sm }, { class: 'js-discussions-count' }
|
||||
- if @merge_request.source_project
|
||||
= render "projects/merge_requests/tabs/tab", name: "commits", class: "commits-tab", qa_selector: "commits_tab" do
|
||||
= render "projects/merge_requests/tabs/tab", name: "commits", class: "commits-tab", testid: "commits-tab" do
|
||||
= tab_link_for @merge_request, :commits do
|
||||
= _("Commits")
|
||||
= gl_badge_tag tab_count_display(@merge_request, @commits_count), { size: :sm }, { class: 'js-commits-count' }
|
||||
|
|
@ -42,7 +42,7 @@
|
|||
= tab_link_for @merge_request, :pipelines do
|
||||
= _("Pipelines")
|
||||
= gl_badge_tag @number_of_pipelines, { size: :sm }, { class: 'js-pipelines-mr-count' }
|
||||
= render "projects/merge_requests/tabs/tab", name: "diffs", class: "diffs-tab js-diffs-tab", id: "diffs-tab", qa_selector: "diffs_tab" do
|
||||
= render "projects/merge_requests/tabs/tab", name: "diffs", class: "diffs-tab js-diffs-tab", id: "diffs-tab", testid: "diffs-tab" do
|
||||
= tab_link_for @merge_request, :diffs do
|
||||
= _("Changes")
|
||||
= gl_badge_tag tab_count_display(@merge_request, @diffs_count), { size: :sm }
|
||||
|
|
|
|||
|
|
@ -31,4 +31,4 @@
|
|||
= form_errors(@merge_request)
|
||||
.row
|
||||
.col-12
|
||||
= f.submit _('Compare branches and continue'), data: { qa_selector: 'compare_branches_button' }, pajamas_button: true
|
||||
= f.submit _('Compare branches and continue'), data: { testid: 'compare-branches-button' }, pajamas_button: true
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@
|
|||
= _("Pipelines")
|
||||
= gl_badge_tag @pipelines.size, { size: :sm }, { class: 'gl-tab-counter-badge' }
|
||||
%li.diffs-tab
|
||||
= link_to url_for(safe_params.merge(action: 'diffs')), data: {target: 'div#diffs', action: 'diffs', toggle: 'tabvue', qa_selector: 'diffs_tab'} do
|
||||
= link_to url_for(safe_params.merge(action: 'diffs')), data: {target: 'div#diffs', action: 'diffs', toggle: 'tabvue', testid: 'diffs-tab'} do
|
||||
= _("Changes")
|
||||
= gl_badge_tag @merge_request.diff_size, { size: :sm }, { class: 'gl-tab-counter-badge' }
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
- tab_name = local_assigns.fetch(:name, nil)
|
||||
- tab_class = local_assigns.fetch(:class, nil)
|
||||
- qa_selector = local_assigns.fetch(:qa_selector, nil)
|
||||
- testid = local_assigns.fetch(:testid, nil)
|
||||
- id = local_assigns.fetch(:id, nil)
|
||||
- attrs = { class: [tab_class, ("active" if params[:tab] == tab_name)], data: { qa_selector: qa_selector } }
|
||||
- attrs = { class: [tab_class, ("active" if params[:tab] == tab_name)], data: { testid: testid } }
|
||||
- attrs[:id] = id if id.present?
|
||||
|
||||
%li{ attrs }
|
||||
|
|
|
|||
|
|
@ -67,7 +67,6 @@ InitializerConnections.raise_if_new_database_connection do
|
|||
Gitlab.ee do
|
||||
resource :company, only: [:new, :create], controller: 'company'
|
||||
resources :groups, only: [:new, :create]
|
||||
draw :verification
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -525,20 +525,20 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
|
|||
end
|
||||
|
||||
namespace :prometheus do
|
||||
resources :alerts, constraints: { id: /\d+/ }, only: [] do # rubocop: disable Cop/PutProjectRoutesUnderScope
|
||||
post :notify, on: :collection # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
member do
|
||||
get :metrics_dashboard # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
end
|
||||
end
|
||||
|
||||
resources :metrics, constraints: { id: %r{[^\/]+} }, only: [:index, :new, :create, :edit, :update, :destroy] do # rubocop: disable Cop/PutProjectRoutesUnderScope
|
||||
get :active_common, on: :collection # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
post :validate_query, on: :collection # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
end
|
||||
end
|
||||
|
||||
post 'alerts/notify', to: 'alerting/notifications#create' # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
scope :prometheus, as: :prometheus do
|
||||
resources :alerts, constraints: { id: /\d+/ }, only: [] do # rubocop: disable Cop/PutProjectRoutesUnderScope
|
||||
post :notify, on: :collection, to: 'alerting/notifications#create', defaults: { endpoint_identifier: 'legacy-prometheus' } # rubocop: disable Cop/PutProjectRoutesUnderScope
|
||||
get :metrics_dashboard, on: :member # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
end
|
||||
end
|
||||
|
||||
post 'alerts/notify', to: 'alerting/notifications#create', defaults: { endpoint_identifier: 'legacy' } # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
post 'alerts/notify/:name/:endpoint_identifier', # rubocop:todo Cop/PutProjectRoutesUnderScope
|
||||
to: 'alerting/notifications#create',
|
||||
as: :alert_http_integration,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class EnsureDumNoteIdBigintBackfillIsFinishedForSelfManaged < Gitlab::Database::Migration[2.1]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
return if com_or_dev_or_test_but_not_jh?
|
||||
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
|
||||
table_name: 'design_user_mentions',
|
||||
column_name: 'id',
|
||||
job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SwapDesignUserMentionsNoteIdToBigIntForSelfManaged < Gitlab::Database::Migration[2.1]
|
||||
include Gitlab::Database::MigrationHelpers::ConvertToBigint
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = 'design_user_mentions'
|
||||
|
||||
def up
|
||||
return if com_or_dev_or_test_but_not_jh?
|
||||
return if temp_column_removed?(TABLE_NAME, :note_id)
|
||||
return if columns_swapped?(TABLE_NAME, :note_id)
|
||||
|
||||
swap
|
||||
end
|
||||
|
||||
def down
|
||||
return if com_or_dev_or_test_but_not_jh?
|
||||
return if temp_column_removed?(TABLE_NAME, :note_id)
|
||||
return unless columns_swapped?(TABLE_NAME, :note_id)
|
||||
|
||||
swap
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def swap
|
||||
# This will replace the existing index_design_user_mentions_on_note_id
|
||||
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
|
||||
name: 'index_design_user_mentions_on_note_id_convert_to_bigint'
|
||||
|
||||
# This will replace the existing fk_rails_8de8c6d632
|
||||
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
|
||||
name: 'fk_design_user_mentions_note_id_convert_to_bigint',
|
||||
on_delete: :cascade
|
||||
|
||||
with_lock_retries(raise_on_exhaustion: true) do
|
||||
execute "LOCK TABLE notes, #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id TO note_id_tmp"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_convert_to_bigint TO note_id"
|
||||
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_tmp TO note_id_convert_to_bigint"
|
||||
|
||||
function_name = Gitlab::Database::UnidirectionalCopyTrigger
|
||||
.on_table(TABLE_NAME, connection: connection)
|
||||
.name(:note_id, :note_id_convert_to_bigint)
|
||||
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
|
||||
|
||||
# Swap defaults
|
||||
change_column_default TABLE_NAME, :note_id, nil
|
||||
change_column_default TABLE_NAME, :note_id_convert_to_bigint, 0
|
||||
|
||||
execute 'DROP INDEX IF EXISTS index_design_user_mentions_on_note_id'
|
||||
rename_index TABLE_NAME, 'index_design_user_mentions_on_note_id_convert_to_bigint',
|
||||
'index_design_user_mentions_on_note_id'
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT IF EXISTS fk_rails_8de8c6d632"
|
||||
rename_constraint(TABLE_NAME, 'fk_design_user_mentions_note_id_convert_to_bigint', 'fk_rails_8de8c6d632')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
29d6dd16a743370a09774a2ec1887ab1a82c69c3d8a41e1b4ad3f632c6d7c006
|
||||
|
|
@ -0,0 +1 @@
|
|||
10e1a3a85c6ce4fa2fca10c1b95b8ba2775c1abe98360f27638878dda282d6d0
|
||||
|
|
@ -3750,6 +3750,7 @@ Input type: `GoogleCloudLoggingConfigurationCreateInput`
|
|||
| <a id="mutationgooglecloudloggingconfigurationcreategoogleprojectidname"></a>`googleProjectIdName` | [`String!`](#string) | Unique identifier of the Google Cloud project to which the logging configuration belongs. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationcreategrouppath"></a>`groupPath` | [`ID!`](#id) | Group path. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationcreatelogidname"></a>`logIdName` | [`String`](#string) | Unique identifier used to distinguish and manage different logs within the same Google Cloud project.(defaults to `audit_events`). |
|
||||
| <a id="mutationgooglecloudloggingconfigurationcreatename"></a>`name` | [`String`](#string) | Destination name. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationcreateprivatekey"></a>`privateKey` | [`String!`](#string) | Private Key associated with the service account. This key is used to authenticate the service account and authorize it to interact with the Google Cloud Logging service. |
|
||||
|
||||
#### Fields
|
||||
|
|
@ -3791,6 +3792,7 @@ Input type: `GoogleCloudLoggingConfigurationUpdateInput`
|
|||
| <a id="mutationgooglecloudloggingconfigurationupdategoogleprojectidname"></a>`googleProjectIdName` | [`String`](#string) | Unique identifier of the Google Cloud project to which the logging configuration belongs. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationupdateid"></a>`id` | [`AuditEventsGoogleCloudLoggingConfigurationID!`](#auditeventsgooglecloudloggingconfigurationid) | ID of the google Cloud configuration to update. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationupdatelogidname"></a>`logIdName` | [`String`](#string) | Unique identifier used to distinguish and manage different logs within the same Google Cloud project. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationupdatename"></a>`name` | [`String`](#string) | Destination name. |
|
||||
| <a id="mutationgooglecloudloggingconfigurationupdateprivatekey"></a>`privateKey` | [`String`](#string) | Private Key associated with the service account. This key is used to authenticate the service account and authorize it to interact with the Google Cloud Logging service. |
|
||||
|
||||
#### Fields
|
||||
|
|
@ -7472,6 +7474,33 @@ Input type: `WorkItemExportInput`
|
|||
| <a id="mutationworkitemexporterrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
|
||||
| <a id="mutationworkitemexportmessage"></a>`message` | [`String`](#string) | Export request result message. |
|
||||
|
||||
### `Mutation.workItemRemoveLinkedItems`
|
||||
|
||||
Remove items linked to the work item.
|
||||
|
||||
WARNING:
|
||||
**Introduced** in 16.3.
|
||||
This feature is an Experiment. It can be changed or removed at any time.
|
||||
|
||||
Input type: `WorkItemRemoveLinkedItemsInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationworkitemremovelinkeditemsclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationworkitemremovelinkeditemsid"></a>`id` | [`WorkItemID!`](#workitemid) | Global ID of the work item. |
|
||||
| <a id="mutationworkitemremovelinkeditemsworkitemsids"></a>`workItemsIds` | [`[WorkItemID!]!`](#workitemid) | Global IDs of the items to unlink. Maximum number of IDs you can provide: 3. |
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationworkitemremovelinkeditemsclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationworkitemremovelinkeditemserrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
|
||||
| <a id="mutationworkitemremovelinkeditemsmessage"></a>`message` | [`String`](#string) | Linked items update result message. |
|
||||
| <a id="mutationworkitemremovelinkeditemsworkitem"></a>`workItem` | [`WorkItem`](#workitem) | Updated work item. |
|
||||
|
||||
### `Mutation.workItemSubscribe`
|
||||
|
||||
WARNING:
|
||||
|
|
@ -16583,6 +16612,7 @@ Stores Google Cloud Logging configurations associated with IAM service accounts,
|
|||
| <a id="googlecloudloggingconfigurationtypegroup"></a>`group` | [`Group!`](#group) | Group the configuration belongs to. |
|
||||
| <a id="googlecloudloggingconfigurationtypeid"></a>`id` | [`ID!`](#id) | ID of the configuration. |
|
||||
| <a id="googlecloudloggingconfigurationtypelogidname"></a>`logIdName` | [`String!`](#string) | Log ID. |
|
||||
| <a id="googlecloudloggingconfigurationtypename"></a>`name` | [`String!`](#string) | Name of the external destination to send audit events to. |
|
||||
| <a id="googlecloudloggingconfigurationtypeprivatekey"></a>`privateKey` | [`String!`](#string) | Private key. |
|
||||
|
||||
### `GpgSignature`
|
||||
|
|
|
|||
|
|
@ -302,7 +302,7 @@ One iteration describes one quarter's worth of work.
|
|||
1. Iteration 8 - FY25Q4
|
||||
- TBD
|
||||
|
||||
## Technical Proposals
|
||||
## Technical proposals
|
||||
|
||||
The Cells architecture has long lasting implications to data processing, location, scalability and the GitLab architecture.
|
||||
This section links all different technical proposals that are being evaluated.
|
||||
|
|
@ -310,6 +310,19 @@ This section links all different technical proposals that are being evaluated.
|
|||
- [Stateless Router That Uses a Cache to Pick Cell and Is Redirected When Wrong Cell Is Reached](proposal-stateless-router-with-buffering-requests.md)
|
||||
- [Stateless Router That Uses a Cache to Pick Cell and pre-flight `/api/v4/cells/learn`](proposal-stateless-router-with-routes-learning.md)
|
||||
|
||||
## Data pipeline ingestion
|
||||
|
||||
The Cells architecture will have a significant impact on the current [data pipeline](https://about.gitlab.com/handbook/business-technology/data-team/platform/pipelines/SAAS-Gitlab-com/) which exports data from Postgres to Snowflake for the use of data analytics. This data pipeline fulfils many use cases (i.e. SAAS Service ping, Gainsight metrics and Reporting and Analytics of the SAAS Platform).
|
||||
|
||||
The current data pipeline is limited by not having the possibility to get data via a CDC mechanism (which leads to data quality issues) and works by polling the Postgres database and looking for new and updated records or fully extracting data for certain tables which causes a lot of overhead.
|
||||
At the moment the data pipeline runs against two instances that get created from a snapshot of both the `main` and `ci` databases.
|
||||
This is done to avoid workload on the production databases.
|
||||
In the Cells architecture there will be more Postgres instances because of which the current pipeline couldn't scale to pull data from all the Postgres instances. Requirements around the data pipeline moving forward are as follows:
|
||||
|
||||
- We need a process that allows capturing all the CDC (insert, update and delete) from all Cells, scaling automatically with N number of Cells.
|
||||
- We need to have (direct or indirect) access to database instances which allows it to do data catch up in case of major failure or root cause analysis for data anomalies.
|
||||
- We need monitoring in place to alert any incident that can delay the data ingestion.
|
||||
|
||||
## Impacted features
|
||||
|
||||
The Cells architecture will impact many features requiring some of them to be rewritten, or changed significantly.
|
||||
|
|
|
|||
|
|
@ -66,29 +66,46 @@ the setup and maintenance of the registry database for new and existing deploys.
|
|||
|
||||
For the registry, we need to develop and validate import tooling which
|
||||
coordinates with the core import functionality which was used to migrate all
|
||||
container images on GitLab.com. Additionally, we must validate that each supported
|
||||
storage driver works as expected with the import process and provide estimated
|
||||
import times for admins.
|
||||
container images on GitLab.com. Additionally, we should provide estimated import
|
||||
times for admins for each supported storage driver.
|
||||
|
||||
We can structure our work to meet the standards outlined in support for
|
||||
Experiment, Beta, and Alpha features. Doing so will help to prioritize core
|
||||
functionality and to allow users who wish to be early adopters to begin using
|
||||
the database and providing us with invaluable feedback.
|
||||
During the beta phase, we can highlight key features of our work to provide a
|
||||
quick reference for what features we have now, are planning, their statuses, and
|
||||
an excutive summary of the overall state of the migration experience.
|
||||
This could be advertised to self-managed users via a simple chart, allowing them
|
||||
to tell at a glance the status of this project and determine if it is feature-
|
||||
complete enough for their needs and level of risk tolerance.
|
||||
|
||||
These levels of support could be advertised to self-managed users via a simple
|
||||
chart, allowing them to tell at a glance the status of this project as it relates
|
||||
to their situation.
|
||||
This should be documented in the container registry administration documentation,
|
||||
rather than in this blueprint. Providing this information there will place it in
|
||||
a familiar place for self-managed admins, will allow for logical cross-linking
|
||||
from other sections of the same document, such as from the garbage collection
|
||||
section.
|
||||
|
||||
| Installation | GCS | AWS | Filesystem | Azure | OSS | Swift|
|
||||
| ------ | ------ |------ | ------ | ------ |------ | ------ |
|
||||
| Omnibus | GA | GA | Beta | Experimental | Experimental | Experimental |
|
||||
| Charts | GA | GA |Beta | Experimental | Experimental | Experimental |
|
||||
For example:
|
||||
|
||||
### Justification of Structuring Support by Driver
|
||||
The metadata database is in early beta for self-managed users. The core migration
|
||||
process for existing registries has been implemented, and online garbage collection
|
||||
is fully implemented. Certain database enabled features are only enabled for GitLab.com
|
||||
and automatic database provisioning for the registry database is not available.
|
||||
Please see the table below for the status of features related to the container
|
||||
registry database.
|
||||
|
||||
It's possible that we could simplify the proposed support matrix by structuring
|
||||
it only by deployment environment and not differentiate by storage driver. The
|
||||
following two sections briefly summarize several points for and against.
|
||||
| Feature | Description | Status | Link |
|
||||
| --------------------------- | ------------------------------------------------------------------- | ------------------ | ---------------------------------------------------------------------------------------------- |
|
||||
| Import Tool | Allows existing deployments to migrate to the database. | Completed | [Import Tool](https://gitlab.com/gitlab-org/container-registry/-/issues/884) |
|
||||
| Automatic Import Validation | Tests that the import maintained data integrity of imported images. | Backlog | [Validate self-managed imports](https://gitlab.com/gitlab-org/container-registry/-/issues/938) |
|
||||
| Foo Bar | Lorem ipsum dolor sit amet. | Scheduled for 16.5 | <LINK> |
|
||||
|
||||
### Structuring Support by Driver
|
||||
|
||||
The import operation heavily relies on the object storage driver implementation
|
||||
to iterate over all registry metadata so that it can be stored in the database.
|
||||
It's possible that implementation differences in the driver will make a
|
||||
meaningful impact on the performance and reliability of the import process.
|
||||
|
||||
The following two sections briefly summarize several points for and against
|
||||
structuring support by driver.
|
||||
|
||||
#### Arguments Opposed to Structuring Support by Driver
|
||||
|
||||
|
|
|
|||
|
|
@ -41,11 +41,16 @@ The majority of the code is not properly namespaced and organized:
|
|||
In June 2023 we've started extracing gems out of the main codebase, into
|
||||
[`gems/` directory inside the monorepo](https://gitlab.com/gitlab-org/gitlab/-/blob/4c6e120069abe751d3128c05ade45ea749a033df/doc/development/gems.md).
|
||||
|
||||
This is our first step towards modularization: externalize code that can be
|
||||
extracted to prevent coupling from being introduced into modules that have been
|
||||
designed as separate components.
|
||||
This is our first step towards modularization.
|
||||
|
||||
These gems as still part of the monorepo.
|
||||
- We want to separate generic code from domain code (that powers the business logic).
|
||||
- We want to cleanup `lib/` directory from generic code.
|
||||
- We want to isolate code that could live in a separate project, to prevent it from depending on domain code.
|
||||
|
||||
These gems as still part of the monorepo but could be extracted into dedicated repositories if needed.
|
||||
|
||||
Extraction of gems is non blocking to modularization but the less generic code exists in `lib/` the
|
||||
easier will be identifying and separating bounded context.
|
||||
|
||||
### 1. What makes a bounded context?
|
||||
|
||||
|
|
@ -103,17 +108,3 @@ With this static list we could:
|
|||
- Understand where to place new classes and modules.
|
||||
- Enforce if any top-level namespaces are used that are not in the list of bounded contexts.
|
||||
- Autoload non-standard Rails directories based on the given list.
|
||||
|
||||
## Glossary
|
||||
|
||||
- `modules` are Ruby modules and can be used to nest code hierarchically.
|
||||
- `namespaces` are unique hierarchies of Ruby constants. For example, `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
|
||||
- `packages` are Packwerk packages to group together related functionalities. These packages can be big or small depending on the design and architecture. Inside a package all constants (classes and modules) have the same namespace. For example:
|
||||
- In a package `ci`, all the classes would be nested under `Ci::` namespace. There can be also nested namespaces like `Ci::PipelineProcessing::`.
|
||||
- In a package `ci-pipeline_creation` all classes are nested under `Ci::PipelineCreation`, like `Ci::PipelineCreation::Chain::Command`.
|
||||
- In a package `ci` a class named `MergeRequests::UpdateHeadPipelineService` would not be allowed because it would not match the package's namespace.
|
||||
- This can be enforced easily with [Packwerk's based Rubocop Cops](https://github.com/rubyatscale/rubocop-packs/blob/main/lib/rubocop/cop/packs/root_namespace_is_pack_name.rb).
|
||||
- `bounded context` is a top-level Packwerk package that represents a macro aspect of the domain. For example: `Ci::`, `MergeRequests::`, `Packages::`, etc.
|
||||
- A bounded context is represented by a single Ruby module/namespace. For example, `Ci::` and not `Ci::JobArtifacts::`.
|
||||
- A bounded context can be made of 1 or multiple Packwerk packages. Nested packages would be recommended if the domain is quite complex and we want to enforce privacy among all the implementation details. For example: `Ci::PipelineProcessing::` and `Ci::PipelineCreation::` could be separate packages of the same bounded context and expose their public API while keeping implementation details private.
|
||||
- A new bounded context like `RemoteDevelopment::` can be represented a single package while large and complex bounded contexts like `Ci::` would need to be organized into smaller/nested packages.
|
||||
|
|
|
|||
|
|
@ -25,12 +25,22 @@ Use [Packwerk](https://github.com/Shopify/packwerk) to enforce privacy and depen
|
|||
|
||||
## Details
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
u([User]) -- interacts directly with --> AA[Application Adapter: WebUI, REST, GraphQL, git, ...]
|
||||
AA --uses abstractions from--> D[Application Domain]
|
||||
AA -- depends on --> Platform
|
||||
D -- depends on --> Platform[Platform: gems, configs, framework, ...]
|
||||
```
|
||||
|
||||
### Application domain
|
||||
|
||||
The application core (functional domains) is divided into separate top-level bounded contexts called after the
|
||||
[feature category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) they represent.
|
||||
The application core (functional domains) is composed of all the code that describes the business logic, policies and data
|
||||
that is unique to GitLab product.
|
||||
It is divided into separate top-level [bounded contexts](../bounded_contexts.md).
|
||||
A bounded-context is represented in the form of a Ruby module.
|
||||
This follows the existing [guideline on naming namespaces](../../../../development/software_design.md#use-namespaces-to-define-bounded-contexts) but puts more structure to it.
|
||||
This follows the existing [guideline on naming namespaces](../../../../development/software_design.md#use-namespaces-to-define-bounded-contexts)
|
||||
but puts more structure to it.
|
||||
|
||||
Modules should:
|
||||
|
||||
|
|
@ -52,6 +62,12 @@ If a feature category is only relevant in the context of a parent feature catego
|
|||
parent's bounded context. For example: Build artifacts existing in the context of Continuous Integration feature category
|
||||
and they may be merged under a single bounded context.
|
||||
|
||||
The application domain has no knowledge of outer layers like the application adapters and only depends on the
|
||||
platform code. This makes the domain code to be the SSoT of the business logic, be reusable and testable regardless
|
||||
whether the request came from the WebUI or REST API.
|
||||
|
||||
If a dependency between an outer layer and an inner layer is required (domain code depending on the interface of an adapter), this can be solved using inversion of control techniques, especially dependency injection.
|
||||
|
||||
### Application adapters
|
||||
|
||||
>>>
|
||||
|
|
@ -67,9 +83,14 @@ Application adapters would be:
|
|||
- Web UI (Rails controllers, view, JS and Vue client)
|
||||
- REST API endpoints
|
||||
- GraphQL Endpoints
|
||||
- Action Cable
|
||||
|
||||
TODO: continue describing how adapters are organized and why they are separate from the domain code.
|
||||
They are responsible for the interaction with the user. Each adapter should interpret the request, parse parameters
|
||||
and invoke the right abstraction from the application domain, then present the result back to the user.
|
||||
|
||||
Presentation logic, and possibly authentication, would be specific to the adapters layer.
|
||||
|
||||
The application adapters layer depends on the platform code to run: the Rails framework, the gems that power the adapter,
|
||||
the configurations and utilities.
|
||||
|
||||
### Platform code
|
||||
|
||||
|
|
@ -95,19 +116,76 @@ This means that aside from the Rails framework code, the rest of the platform co
|
|||
Eventually all code inside `gems/` could potentially be extracted in a separate repository or open sourced.
|
||||
Placing platform code inside `gems/` makes it clear that its purpose is to serve the application code.
|
||||
|
||||
### Why Packwerk?
|
||||
### Enforcing boundaries
|
||||
|
||||
TODO:
|
||||
Ruby does not have the concept of privacy of constants in a given module. Unlike other programming languages, even extracting
|
||||
well documented gems doesn't prevent other developers from coupling code to implementation details because all constants
|
||||
are public in Ruby.
|
||||
|
||||
- boundaries not enforced at runtime. Ruby code will still work as being all loaded in the same memory space.
|
||||
- can be introduced incrementally. Not everything requires to be moved to packs for the Rails autoloader to work.
|
||||
We can have a codebase perfectly organized in an hexagonal architecture but still having the application domain, the biggest
|
||||
part of the codebase, being a non modularized [big ball of mud](https://en.wikipedia.org/wiki/Big_ball_of_mud).
|
||||
|
||||
Enforcing boundaries is also vital to maintaining the structure long term. We don't want that after a big modularization
|
||||
effort we slowly fall back into a big ball of mud gain by violating the boundaries.
|
||||
|
||||
We explored the idea of [using Packwerk in a proof of concept](../proof_of_concepts.md#use-packwerk-to-enforce-module-boundaries)
|
||||
to enforce module boundaries.
|
||||
|
||||
[Packwerk](https://github.com/Shopify/packwerk) is a static analyzer that allows to gradually introduce packages in the
|
||||
codebase and enforce privacy and explicit dependencies. Packwerk can detect if some Ruby code is using private implementation
|
||||
details of another package or if it's using a package that wasn't declared explicitly as a dependency.
|
||||
|
||||
Being a static analyzer it does not affect code execution, meaning that introducing Packwerk is safe and can be done
|
||||
gradually.
|
||||
|
||||
Companies like Gusto have been developing and maintaining a list of [development and engineering tools](https://github.com/rubyatscale)
|
||||
for organizations that want to move to using a Rails modular monolith around Packwerk.
|
||||
|
||||
### EE and JH extensions
|
||||
|
||||
TODO:
|
||||
One of the unique challenges of modularizing the GitLab codebase is the presence of EE extensions (managed by GitLab)
|
||||
and JH extensions (managed by JiHu).
|
||||
|
||||
By moving related domain code (e.g. `Ci::`) under the same bounded context and Packwerk package, we would also need to
|
||||
move `ee/` extensions in it.
|
||||
|
||||
To have top-level bounded contexts to also match Packwerk packages it means that all code related to a specific domain
|
||||
needs to be placed under the same package directory, including EE extensions, for example.
|
||||
|
||||
The following is just an example of a possible directory structure:
|
||||
|
||||
```shell
|
||||
domains
|
||||
├── ci
|
||||
│ ├── package.yml # package definition.
|
||||
│ ├── packwerk.yml # tool configurations for this package.
|
||||
│ ├── package_todo.yml # existing violations.
|
||||
│ ├── core # Core features available in Community Edition and always autoloaded.
|
||||
│ │ ├── app
|
||||
│ │ │ ├── models/...
|
||||
│ │ │ ├── services/...
|
||||
│ │ │ └── lib/... # domain-specific `lib` moved inside `app` together with other classes.
|
||||
│ │ └── spec
|
||||
│ │ └── models/...
|
||||
│ ├── ee # EE extensions specific to the bounded context, conditionally autoloaded.
|
||||
│ │ ├── models/...
|
||||
│ │ └── spec
|
||||
│ │ └── models/...
|
||||
│ └── public # Public constants are placed here so they can be referenced by other packages.
|
||||
│ ├── core
|
||||
│ │ ├── app
|
||||
│ │ │ └── models/...
|
||||
│ │ └── spec
|
||||
│ │ └── models/...
|
||||
│ └── ee
|
||||
│ ├── app
|
||||
│ │ └── models/...
|
||||
│ └── spec
|
||||
│ └── models/...
|
||||
├── merge_requests/
|
||||
├── repositories/
|
||||
└── ...
|
||||
```
|
||||
|
||||
## Challenges
|
||||
|
||||
|
|
|
|||
|
|
@ -93,12 +93,11 @@ There are many aspects and details required to make modularization of our
|
|||
monolith successful. We will work on the aspects listed below, refine them, and
|
||||
add more important details as we move forward towards the goal:
|
||||
|
||||
1. [Deliver modularization proof-of-concepts that will deliver key insights](proof_of_concepts.md)
|
||||
1. [Align modularization plans to the organizational structure](bounded_contexts.md)
|
||||
1. [Deliver modularization proof-of-concepts that will deliver key insights](proof_of_concepts.md).
|
||||
1. Align modularization plans to the organizational structure by [defining bounded contexts](bounded_contexts.md).
|
||||
1. Separate domains into modules that will reflect organizational structure (TODO)
|
||||
1. Start a training program for team members on how to work with decoupled domains (TODO)
|
||||
1. Build tools that will make it easier to build decoupled domains through inversion of control (TODO)
|
||||
1. Separate domains into modules that will reflect organizational structure (TODO)
|
||||
1. Build necessary services to align frontend and backend modularization (TODO)
|
||||
1. [Introduce hexagonal architecture within the monolith](hexagonal_monolith/index.md)
|
||||
1. Introduce clean architecture with one-way-dependencies and host application (TODO)
|
||||
1. Build abstractions that will make it possible to run and deploy domains separately (TODO)
|
||||
|
|
@ -107,6 +106,20 @@ add more important details as we move forward towards the goal:
|
|||
|
||||
In progress.
|
||||
|
||||
## Glossary
|
||||
|
||||
- `modules` are Ruby modules and can be used to nest code hierarchically.
|
||||
- `namespaces` are unique hierarchies of Ruby constants. For example, `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
|
||||
- `packages` are Packwerk packages to group together related functionalities. These packages can be big or small depending on the design and architecture. Inside a package all constants (classes and modules) have the same namespace. For example:
|
||||
- In a package `ci`, all the classes would be nested under `Ci::` namespace. There can be also nested namespaces like `Ci::PipelineProcessing::`.
|
||||
- In a package `ci-pipeline_creation` all classes are nested under `Ci::PipelineCreation`, like `Ci::PipelineCreation::Chain::Command`.
|
||||
- In a package `ci` a class named `MergeRequests::UpdateHeadPipelineService` would not be allowed because it would not match the package's namespace.
|
||||
- This can be enforced easily with [Packwerk's based Rubocop Cops](https://github.com/rubyatscale/rubocop-packs/blob/main/lib/rubocop/cop/packs/root_namespace_is_pack_name.rb).
|
||||
- `bounded context` is a top-level Packwerk package that represents a macro aspect of the domain. For example: `Ci::`, `MergeRequests::`, `Packages::`, etc.
|
||||
- A bounded context is represented by a single Ruby module/namespace. For example, `Ci::` and not `Ci::JobArtifacts::`.
|
||||
- A bounded context can be made of 1 or multiple Packwerk packages. Nested packages would be recommended if the domain is quite complex and we want to enforce privacy among all the implementation details. For example: `Ci::PipelineProcessing::` and `Ci::PipelineCreation::` could be separate packages of the same bounded context and expose their public API while keeping implementation details private.
|
||||
- A new bounded context like `RemoteDevelopment::` can be represented a single package while large and complex bounded contexts like `Ci::` would need to be organized into smaller/nested packages.
|
||||
|
||||
## References
|
||||
|
||||
[List of references](references.md)
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
# Use Sigstore for keyless signing and verification **(FREE SAAS)**
|
||||
|
||||
The [Sigstore](https://www.sigstore.dev/) project provides a CLI called
|
||||
[Cosign](https://docs.sigstore.dev/cosign/overview/) which can be used for keyless signing of container images built
|
||||
[Cosign](https://docs.sigstore.dev/signing/quickstart/) which can be used for keyless signing of container images built
|
||||
with GitLab CI/CD. Keyless signing has many advantages, including eliminating the need to manage, safeguard, and rotate a private
|
||||
key. Cosign requests a short-lived key pair to use for signing, records it on a certificate transparency log, and
|
||||
then discards it. The key is generated through a token obtained from the GitLab server using the OIDC identity of the user who
|
||||
ran the pipeline. This token includes unique claims that certify the token was generated by a CI/CD pipeline. To learn more,
|
||||
see Cosign [documentation](https://docs.sigstore.dev/cosign/overview/#example-working-with-containers) on keyless signatures.
|
||||
see Cosign [documentation](https://docs.sigstore.dev/signing/quickstart/#example-working-with-containers) on keyless signatures.
|
||||
|
||||
For details on the mapping between GitLab OIDC claims and Fulcio certificate extensions, see the GitLab column of
|
||||
[Mapping OIDC token claims to Fulcio OIDs](https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#mapping-oidc-token-claims-to-fulcio-oids).
|
||||
|
|
@ -36,11 +36,11 @@ You can use Cosign to sign and verify container images and build artifacts.
|
|||
- When signing container images, sign the digest (which is immutable) instead of the tag.
|
||||
|
||||
GitLab [ID tokens](../secrets/id_token_authentication.md#id-tokens) can be used by Cosign for
|
||||
[keyless signing](https://docs.sigstore.dev/cosign/overview/). The token must have
|
||||
[keyless signing](https://docs.sigstore.dev/signing/quickstart/). The token must have
|
||||
`sigstore` set as the [`aud`](../secrets/id_token_authentication.md#token-payload) claim. The token can be used by Cosign automatically when it is set in the
|
||||
`SIGSTORE_ID_TOKEN` environment variable.
|
||||
|
||||
To learn more about how to install Cosign, see [Cosign Installation documentation](https://docs.sigstore.dev/cosign/installation/).
|
||||
To learn more about how to install Cosign, see [Cosign Installation documentation](https://docs.sigstore.dev/system_config/installation/).
|
||||
|
||||
### Signing
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ To learn more about how to install Cosign, see [Cosign Installation documentatio
|
|||
The example below demonstrates how to sign a container image in GitLab CI. The signature is automatically stored in the
|
||||
same container repository as the image.
|
||||
|
||||
To learn more about signing containers, see [Cosign Signing Containers documentation](https://docs.sigstore.dev/cosign/signing_with_containers/).
|
||||
To learn more about signing containers, see [Cosign Signing Containers documentation](https://docs.sigstore.dev/signing/signing_with_containers/).
|
||||
|
||||
```yaml
|
||||
build_and_sign_image:
|
||||
|
|
@ -77,7 +77,7 @@ build_and_sign_image:
|
|||
The example below demonstrates how to sign a build artifact in GitLab CI. You should save the `cosign.bundle` file
|
||||
produced by `cosign sign-blob`, which is used for signature verification.
|
||||
|
||||
To learn more about signing artifacts, see [Cosign Signing Blobs documentation](https://docs.sigstore.dev/cosign/signing_with_blobs/#keyless-signing-of-blobs-and-files).
|
||||
To learn more about signing artifacts, see [Cosign Signing Blobs documentation](https://docs.sigstore.dev/signing/signing_with_blobs/).
|
||||
|
||||
```yaml
|
||||
build_and_sign_artifact:
|
||||
|
|
@ -109,7 +109,7 @@ build_and_sign_artifact:
|
|||
| `--certificate-oidc-issuer` | The GitLab instance URL where the image/artifact was signed. For example, `https://gitlab.com`. |
|
||||
| `--bundle` | The `bundle` file produced by `cosign sign-blob`. Only used for verifying build artifacts. |
|
||||
|
||||
To learn more about verifying signed images/artifacts, see [Cosign Verifying documentation](https://docs.sigstore.dev/cosign/verify/#keyless-verification-using-openid-connect).
|
||||
To learn more about verifying signed images/artifacts, see [Cosign Verifying documentation](https://docs.sigstore.dev/verifying/verify/).
|
||||
|
||||
#### Container images
|
||||
|
||||
|
|
@ -149,7 +149,7 @@ You can use Sigstore and npm, together with GitLab CI/CD, to digitally sign buil
|
|||
|
||||
### About npm provenance
|
||||
|
||||
[npm CLI](https://docs.npmjs.com/cli) allows package maintainers to provide users with provenance attestations. Using npm
|
||||
[npm CLI](https://docs.npmjs.com/cli/) allows package maintainers to provide users with provenance attestations. Using npm
|
||||
CLI provenance generation allows users to trust and verify that the package they are downloading and using is from you and the
|
||||
build system that built it.
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ The default timeout for Jest is set in
|
|||
If your test exceeds that time, it fails.
|
||||
|
||||
If you cannot improve the performance of the tests, you can increase the timeout
|
||||
for the whole suite using [`jest.setTimeout`](https://jestjs.io/docs/28.x/jest-object#jestsettimeouttimeout)
|
||||
for the whole suite using [`jest.setTimeout`](https://jestjs.io/docs/next/jest-object#jestsettimeouttimeout)
|
||||
|
||||
```javascript
|
||||
jest.setTimeout(500);
|
||||
|
|
@ -63,7 +63,7 @@ describe('Component', () => {
|
|||
});
|
||||
```
|
||||
|
||||
or for a specific test by providing a third argument to [`it`](https://jestjs.io/docs/28.x/api#testname-fn-timeout)
|
||||
or for a specific test by providing a third argument to [`it`](https://jestjs.io/docs/next/api#testname-fn-timeout)
|
||||
|
||||
```javascript
|
||||
describe('Component', () => {
|
||||
|
|
|
|||
|
|
@ -301,7 +301,7 @@ Then you can use `yarn add` to install your packages.
|
|||
## Related topics
|
||||
|
||||
- [npm documentation](../npm_registry/index.md#helpful-hints)
|
||||
- [Yarn Migration Guide](https://yarnpkg.com/getting-started/migration)
|
||||
- [Yarn Migration Guide](https://yarnpkg.com/migration/guide/)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
|
|
|||
|
|
@ -67,6 +67,44 @@ To search in a project:
|
|||
|
||||
The results are displayed. To filter the results, on the left sidebar, select a filter.
|
||||
|
||||
## Search for a project by full path
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108906) in GitLab 15.9 [with a flag](../../administration/feature_flags.md) named `full_path_project_search`. Disabled by default.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114932) in GitLab 15.11. Feature flag `full_path_project_search` removed.
|
||||
|
||||
You can search for a project by entering its full path (including the namespace it belongs to) in the search box.
|
||||
As you type the project path, [autocomplete suggestions](#autocomplete-suggestions) are displayed.
|
||||
|
||||
For example:
|
||||
|
||||
- `gitlab-org/gitlab` searches for the `gitlab` project in the `gitlab-org` namespace.
|
||||
- `gitlab-org/` displays autocomplete suggestions for projects that belong to the `gitlab-org` namespace.
|
||||
|
||||
## Include archived projects in search results
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121981) in GitLab 16.1 [with a flag](../../administration/feature_flags.md) named `search_projects_hide_archived`. Disabled by default.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/413821) in GitLab 16.3. Feature flag `search_projects_hide_archived` removed.
|
||||
|
||||
By default, archived projects are excluded from search results.
|
||||
To include archived projects:
|
||||
|
||||
1. On the project search page, on the left sidebar, select the **Include archived** checkbox.
|
||||
1. On the left sidebar, select **Apply**.
|
||||
|
||||
## Exclude issues in archived projects from search results
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124846) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `search_issues_hide_archived_projects`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
an administrator can [enable the feature flag](../../administration/feature_flags.md) named `search_issues_hide_archived_projects`. On GitLab.com, this feature is not available.
|
||||
|
||||
By default, issues in archived projects are included in search results.
|
||||
To exclude issues in archived projects, ensure the `search_issues_hide_archived_projects` flag is enabled.
|
||||
|
||||
To include issues in archived projects with `search_issues_hide_archived_projects` enabled,
|
||||
you must add the parameter `include_archived=true` to the URL.
|
||||
|
||||
## Search for code
|
||||
|
||||
To search for code in a project:
|
||||
|
|
@ -97,42 +135,6 @@ To filter code search results by one or more languages:
|
|||
1. On the code search page, on the left sidebar, select one or more languages.
|
||||
1. On the left sidebar, select **Apply**.
|
||||
|
||||
## Include archived projects in search results
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121981) in GitLab 16.1 [with a flag](../../administration/feature_flags.md) named `search_projects_hide_archived`. Disabled by default.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/413821) in GitLab 16.3. Feature flag `search_projects_hide_archived` removed.
|
||||
|
||||
By default, archived projects are excluded from search results.
|
||||
To include archived projects in search results:
|
||||
|
||||
1. On the project search page, on the left sidebar, select the **Include archived** checkbox.
|
||||
1. On the left sidebar, select **Apply**.
|
||||
|
||||
## Exclude issues in archived projects from search results
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124846) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `search_issues_hide_archived_projects`. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
an administrator can [enable the feature flag](../../administration/feature_flags.md) named `search_issues_hide_archived_projects`. On GitLab.com, this feature is not available.
|
||||
|
||||
Issues in archived projects are included in search results by default. To exclude issues in archived projects, ensure the `search_issues_hide_archived_projects` flag is enabled.
|
||||
|
||||
To include issues in archived projects with `search_issues_hide_archived_projects` enabled, you must add the parameter `include_archived=true` to the URL.
|
||||
|
||||
## Search for a project by full path
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108906) in GitLab 15.9 [with a flag](../../administration/feature_flags.md) named `full_path_project_search`. Disabled by default.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114932) in GitLab 15.11. Feature flag `full_path_project_search` removed.
|
||||
|
||||
You can search for a project by entering its full path (including the namespace it belongs to) in the search box.
|
||||
As you type the project path, [autocomplete suggestions](#autocomplete-suggestions) are displayed.
|
||||
|
||||
For example:
|
||||
|
||||
- `gitlab-org/gitlab` searches for the `gitlab` project in the `gitlab-org` namespace.
|
||||
- `gitlab-org/` displays autocomplete suggestions for projects that belong to the `gitlab-org` namespace.
|
||||
|
||||
## Search for a commit SHA
|
||||
|
||||
To search for a commit SHA:
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ module API
|
|||
class JobInfo < Grape::Entity
|
||||
expose :id, :name, :stage
|
||||
expose :project_id, :project_name
|
||||
expose :time_in_queue_seconds
|
||||
expose :project_jobs_running_on_instance_runners_count
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ module Gitlab
|
|||
|
||||
METRICS_SHARD_TAG_PREFIX = 'metrics_shard::'
|
||||
DEFAULT_METRICS_SHARD = 'default'
|
||||
JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5
|
||||
|
||||
OPERATION_COUNTERS = [
|
||||
:build_can_pick,
|
||||
|
|
@ -57,7 +56,7 @@ module Gitlab
|
|||
|
||||
def register_success(job)
|
||||
labels = { shared_runner: runner.instance_type?,
|
||||
jobs_running_for_project: jobs_running_for_project(job),
|
||||
jobs_running_for_project: job.project_jobs_running_on_instance_runners_count,
|
||||
shard: DEFAULT_METRICS_SHARD }
|
||||
|
||||
if runner.instance_type?
|
||||
|
|
@ -65,7 +64,7 @@ module Gitlab
|
|||
labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
|
||||
end
|
||||
|
||||
self.class.job_queue_duration_seconds.observe(labels, Time.current - job.queued_at) unless job.queued_at.nil?
|
||||
self.class.job_queue_duration_seconds.observe(labels, job.time_in_queue_seconds) unless job.queued_at.nil?
|
||||
self.class.attempt_counter.increment
|
||||
end
|
||||
|
||||
|
|
@ -231,28 +230,6 @@ module Gitlab
|
|||
Gitlab::Metrics.histogram(name, comment, labels, buckets)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def jobs_running_for_project(job)
|
||||
return '+Inf' unless runner.instance_type?
|
||||
|
||||
# excluding currently started job
|
||||
running_jobs_count = running_jobs_relation(job)
|
||||
.limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1
|
||||
|
||||
if running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET
|
||||
running_jobs_count
|
||||
else
|
||||
"#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+"
|
||||
end
|
||||
end
|
||||
|
||||
def running_jobs_relation(job)
|
||||
::Ci::RunningBuild.instance_type.where(project_id: job.project_id)
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module TimeSeriesStorable
|
||||
# requires a #redis_key(event, date) method to be defined
|
||||
def keys_for_aggregation(events:, start_date:, end_date:)
|
||||
# we always keep 1 week of margin
|
||||
# .end_of_week is necessary to make sure this works for 1 week long periods too
|
||||
end_date = end_date.end_of_week - 1.week
|
||||
(start_date.to_date..end_date.to_date).flat_map do |date|
|
||||
events.map { |event| redis_key(event, date) }
|
||||
end.uniq
|
||||
end
|
||||
|
||||
def apply_time_aggregation(key, time)
|
||||
year_week = time.strftime('%G-%V')
|
||||
"#{key}-#{year_week}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -18,6 +18,7 @@ module Gitlab
|
|||
class << self
|
||||
include Gitlab::Utils::UsageData
|
||||
include Gitlab::Usage::TimeFrame
|
||||
include Gitlab::Usage::TimeSeriesStorable
|
||||
|
||||
# Track unique events
|
||||
#
|
||||
|
|
@ -78,13 +79,6 @@ module Gitlab
|
|||
redis_usage_data { Gitlab::Redis::HLL.count(keys: keys) }
|
||||
end
|
||||
|
||||
def keys_for_aggregation(events:, start_date:, end_date:)
|
||||
end_date = end_date.end_of_week - 1.week
|
||||
(start_date.to_date..end_date.to_date).map do |date|
|
||||
events.map { |event| redis_key(event, date) }
|
||||
end.flatten.uniq
|
||||
end
|
||||
|
||||
def load_events
|
||||
events = Gitlab::Usage::MetricDefinition.all.map do |d|
|
||||
next unless d.available?
|
||||
|
|
@ -109,7 +103,6 @@ module Gitlab
|
|||
known_events.select { |event| event_names.include?(event[:name]) }
|
||||
end
|
||||
|
||||
# Compose the key in order to store events daily or weekly
|
||||
def redis_key(event, time)
|
||||
raise UnknownEvent, "Unknown event #{event[:name]}" unless known_events_names.include?(event[:name].to_s)
|
||||
|
||||
|
|
|
|||
|
|
@ -796,6 +796,12 @@ msgstr ""
|
|||
msgid "%{italic_start}What's new%{italic_end} is inactive and cannot be viewed."
|
||||
msgstr ""
|
||||
|
||||
msgid "%{item_ids} could not be removed due to insufficient permissions"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{item_ids} could not be removed due to not being linked"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{itemsCount} issues with a limit of %{maxIssueCount}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -23603,6 +23609,9 @@ msgstr ""
|
|||
msgid "IDE|This option is disabled because you don't have write permissions for the current branch."
|
||||
msgstr ""
|
||||
|
||||
msgid "IDs with errors: %{error_messages}."
|
||||
msgstr ""
|
||||
|
||||
msgid "IMPORTANT: Use this setting only for VERY strict auditing purposes. When turned on, nobody will be able to remove the label from any merge requests after they are merged. In addition, nobody will be able to turn off this setting or delete this label."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -31419,7 +31428,7 @@ msgstr ""
|
|||
msgid "No more than %{max_issues} issues can be updated at the same time"
|
||||
msgstr ""
|
||||
|
||||
msgid "No more than %{max_work_items} work items can be linked at the same time."
|
||||
msgid "No more than %{max_work_items} work items can be modified at the same time."
|
||||
msgstr ""
|
||||
|
||||
msgid "No other labels with such name or description"
|
||||
|
|
@ -31536,6 +31545,12 @@ msgstr ""
|
|||
msgid "No webhooks enabled. Select trigger events above."
|
||||
msgstr ""
|
||||
|
||||
msgid "No work item IDs provided."
|
||||
msgstr ""
|
||||
|
||||
msgid "No work item found."
|
||||
msgstr ""
|
||||
|
||||
msgid "No worries, you can still use all the %{strong}%{plan_name}%{strong_close} features for now. You have %{remaining_days} day to renew your subscription."
|
||||
msgid_plural "No worries, you can still use all the %{strong}%{plan_name}%{strong_close} features for now. You have %{remaining_days} days to renew your subscription."
|
||||
msgstr[0] ""
|
||||
|
|
@ -38854,36 +38869,6 @@ msgstr ""
|
|||
msgid "RegistrationFeatures|use this feature"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Are you sure you want to skip this step?"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Enable free compute minutes"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|GitLab will not charge your card, it will only be used for validation."
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Pipelines using shared GitLab runners will fail until you validate your account."
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Skip this for now"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|To keep GitLab spam and abuse free we ask that you verify your identity with a valid payment method, such as a debit or credit card. Until then, you can't use free compute minutes to build your application."
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Validate account"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Verify your identity"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|Yes, I'd like to skip"
|
||||
msgstr ""
|
||||
|
||||
msgid "RegistrationVerification|You can alway verify your account at a later time."
|
||||
msgstr ""
|
||||
|
||||
msgid "Registry entry enqueued to be resynced"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -45956,6 +45941,9 @@ msgstr ""
|
|||
msgid "Successfully unblocked"
|
||||
msgstr ""
|
||||
|
||||
msgid "Successfully unlinked IDs: %{item_ids}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Successfully unlocked"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -52212,16 +52200,19 @@ msgstr ""
|
|||
msgid "VulnerabilityExport|Vulnerability"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|%{statusStart}Confirmed%{statusEnd} %{timeago} by %{user}"
|
||||
msgid "VulnerabilityManagement|%{statusStart}Confirmed%{statusEnd} · %{timeago} by %{user}"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|%{statusStart}Detected%{statusEnd} %{timeago} in pipeline %{pipelineLink}"
|
||||
msgid "VulnerabilityManagement|%{statusStart}Detected%{statusEnd} · %{timeago} in pipeline %{pipelineLink}"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|%{statusStart}Dismissed%{statusEnd} %{timeago} by %{user}"
|
||||
msgid "VulnerabilityManagement|%{statusStart}Dismissed%{statusEnd} · %{timeago} by %{user}"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|%{statusStart}Resolved%{statusEnd} %{timeago} by %{user}"
|
||||
msgid "VulnerabilityManagement|%{statusStart}Dismissed%{statusEnd}: %{dismissalReason} · %{timeago} by %{user}"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|%{statusStart}Resolved%{statusEnd} · %{timeago} by %{user}"
|
||||
msgstr ""
|
||||
|
||||
msgid "VulnerabilityManagement|(optional) Include the solution to the vulnerability if available."
|
||||
|
|
@ -54803,9 +54794,6 @@ msgstr ""
|
|||
msgid "Your authorized applications"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your browser does not support iFrames"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your browser doesn't support WebAuthn. Please use a supported browser, e.g. Chrome (67+) or Firefox (60+)."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
FactoryBot.define do
|
||||
factory :file, class: 'QA::Resource::File'
|
||||
end
|
||||
end
|
||||
|
|
@ -11,15 +11,15 @@ module QA
|
|||
end
|
||||
|
||||
view 'app/views/projects/merge_requests/creations/_new_compare.html.haml' do
|
||||
element :compare_branches_button
|
||||
element 'compare-branches-button'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/merge_requests/components/compare_dropdown.vue' do
|
||||
element :source_branch_dropdown, ':data-qa-selector="qaSelector"' # rubocop:disable QA/ElementWithPattern
|
||||
end
|
||||
|
||||
view 'app/views/projects/merge_requests/_page.html.haml' do
|
||||
element :diffs_tab
|
||||
view 'app/views/projects/merge_requests/creations/_new_submit.html.haml' do
|
||||
element 'diffs-tab'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/diffs/components/diff_file_header.vue' do
|
||||
|
|
@ -34,7 +34,7 @@ module QA
|
|||
end
|
||||
|
||||
def click_compare_branches_and_continue
|
||||
click_element(:compare_branches_button)
|
||||
click_element('compare-branches-button')
|
||||
end
|
||||
|
||||
def create_merge_request
|
||||
|
|
@ -42,7 +42,7 @@ module QA
|
|||
end
|
||||
|
||||
def click_diffs_tab
|
||||
click_element(:diffs_tab)
|
||||
click_element('diffs-tab')
|
||||
end
|
||||
|
||||
def has_file?(file_name)
|
||||
|
|
|
|||
|
|
@ -53,10 +53,10 @@ module QA
|
|||
end
|
||||
|
||||
view 'app/views/projects/merge_requests/_code_dropdown.html.haml' do
|
||||
element :mr_code_dropdown
|
||||
element :download_email_patches_menu_item
|
||||
element :download_plain_diff_menu_item
|
||||
element :open_in_web_ide_button
|
||||
element 'mr-code-dropdown'
|
||||
element 'download-email-patches-menu-item'
|
||||
element 'download-plain-diff-menu-item'
|
||||
element 'open-in-web-ide-button'
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue' do
|
||||
|
|
@ -124,9 +124,9 @@ module QA
|
|||
end
|
||||
|
||||
view 'app/views/projects/merge_requests/_page.html.haml' do
|
||||
element :notes_tab, required: true
|
||||
element :commits_tab, required: true
|
||||
element :diffs_tab, required: true
|
||||
element 'notes-tab', required: true
|
||||
element 'commits-tab', required: true
|
||||
element 'diffs-tab', required: true
|
||||
end
|
||||
|
||||
view 'app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue' do
|
||||
|
|
@ -190,18 +190,18 @@ module QA
|
|||
end
|
||||
|
||||
def click_discussions_tab
|
||||
click_element(:notes_tab)
|
||||
click_element('notes-tab')
|
||||
|
||||
wait_for_requests
|
||||
end
|
||||
|
||||
def click_commits_tab
|
||||
click_element(:commits_tab)
|
||||
click_element('commits-tab')
|
||||
end
|
||||
|
||||
def click_diffs_tab
|
||||
# Do not wait for spinner due to https://gitlab.com/gitlab-org/gitlab/-/issues/398584
|
||||
click_element(:diffs_tab, skip_finished_loading_check: true)
|
||||
click_element('diffs-tab', skip_finished_loading_check: true)
|
||||
end
|
||||
|
||||
def click_pipeline_link
|
||||
|
|
@ -395,16 +395,16 @@ module QA
|
|||
# Click by JS is needed to bypass the Moved MR actions popover
|
||||
# Change back to regular click_element when moved_mr_sidebar FF is removed
|
||||
# Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/385460
|
||||
click_by_javascript(find_element(:mr_code_dropdown))
|
||||
visit_link_in_element(:download_email_patches_menu_item)
|
||||
click_by_javascript(find_element('mr-code-dropdown'))
|
||||
visit_link_in_element('download-email-patches-menu-item')
|
||||
end
|
||||
|
||||
def view_plain_diff
|
||||
# Click by JS is needed to bypass the Moved MR actions popover
|
||||
# Change back to regular click_element when moved_mr_sidebar FF is removed
|
||||
# Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/385460
|
||||
click_by_javascript(find_element(:mr_code_dropdown))
|
||||
visit_link_in_element(:download_plain_diff_menu_item)
|
||||
click_by_javascript(find_element('mr-code-dropdown'))
|
||||
visit_link_in_element('download-plain-diff-menu-item')
|
||||
end
|
||||
|
||||
def wait_for_merge_request_error_message
|
||||
|
|
@ -417,8 +417,8 @@ module QA
|
|||
# Click by JS is needed to bypass the Moved MR actions popover
|
||||
# Change back to regular click_element when moved_mr_sidebar FF is removed
|
||||
# Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/385460
|
||||
click_by_javascript(find_element(:mr_code_dropdown))
|
||||
click_element(:open_in_web_ide_button)
|
||||
click_by_javascript(find_element('mr-code-dropdown'))
|
||||
click_element('open-in-web-ide-button')
|
||||
page.driver.browser.switch_to.window(page.driver.browser.window_handles.last)
|
||||
wait_for_requests
|
||||
end
|
||||
|
|
|
|||
|
|
@ -18,14 +18,10 @@ module QA
|
|||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/367064'
|
||||
) do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = api_client
|
||||
file.project = project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = "text-#{SecureRandom.hex(8)}.txt"
|
||||
file.content = 'New file'
|
||||
end
|
||||
create(:file,
|
||||
api_client: api_client,
|
||||
project: project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
rescue StandardError => e
|
||||
QA::Runtime::Logger.error("Full failure message: #{e.message}")
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -14,14 +14,10 @@ module QA
|
|||
context 'for the same project' do
|
||||
it 'can be used to create a file via the project API', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/347858' do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = @user_api_client
|
||||
file.project = @project_access_token.project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = "text-#{SecureRandom.hex(8)}.txt"
|
||||
file.content = 'New file'
|
||||
end
|
||||
create(:file,
|
||||
api_client: @user_api_client,
|
||||
project: @project_access_token.project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
rescue StandardError => e
|
||||
QA::Runtime::Logger.error("Full failure message: #{e.message}")
|
||||
raise
|
||||
|
|
@ -52,14 +48,10 @@ module QA
|
|||
|
||||
it 'cannot be used to create a file via the project API', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/347860' do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = @user_api_client
|
||||
file.project = @different_project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = "text-#{SecureRandom.hex(8)}.txt"
|
||||
file.content = 'New file'
|
||||
end
|
||||
create(:file,
|
||||
api_client: @user_api_client,
|
||||
project: @different_project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
end.to raise_error(Resource::ApiFabricator::ResourceFabricationFailedError, /403 Forbidden/)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -45,14 +45,10 @@ module QA
|
|||
it 'is not allowed to create a file via the API', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/347864' do
|
||||
QA::Support::Retrier.retry_on_exception(max_attempts: 5, sleep_interval: 2) do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = @user_api_client
|
||||
file.project = @project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = 'test.txt'
|
||||
file.content = "New file"
|
||||
end
|
||||
create(:file,
|
||||
api_client: @user_api_client,
|
||||
project: @project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
end.to raise_error(Resource::ApiFabricator::ResourceFabricationFailedError, /403 Forbidden/)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -57,14 +57,10 @@ module QA
|
|||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/363348'
|
||||
) do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = parent_group_user_api_client
|
||||
file.project = sub_group_project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = 'test.txt'
|
||||
file.content = "New file"
|
||||
end
|
||||
create(:file,
|
||||
api_client: parent_group_user_api_client,
|
||||
project: sub_group_project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
end.not_to raise_error
|
||||
end
|
||||
|
||||
|
|
@ -138,14 +134,10 @@ module QA
|
|||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/363343'
|
||||
) do
|
||||
expect do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.api_client = sub_group_user_api_client
|
||||
file.project = parent_group_project
|
||||
file.branch = "new_branch_#{SecureRandom.hex(8)}"
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = 'test.txt'
|
||||
file.content = "New file"
|
||||
end
|
||||
create(:file,
|
||||
api_client: sub_group_user_api_client,
|
||||
project: parent_group_project,
|
||||
branch: "new_branch_#{SecureRandom.hex(8)}")
|
||||
end.to raise_error(Resource::ApiFabricator::ResourceFabricationFailedError, /403 Forbidden/)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module QA
|
||||
RSpec.describe 'Create' do
|
||||
describe 'File management', product_group: :source_code do
|
||||
let(:file) { Resource::File.fabricate_via_api! }
|
||||
let(:file) { create(:file) }
|
||||
|
||||
commit_message_for_delete = 'QA Test - Delete file'
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module QA
|
||||
RSpec.describe 'Create', :reliable do
|
||||
describe 'File management', product_group: :source_code do
|
||||
let(:file) { Resource::File.fabricate_via_api! }
|
||||
let(:file) { create(:file) }
|
||||
|
||||
updated_file_content = 'QA Test - Updated file content'
|
||||
commit_message_for_update = 'QA Test - Update file'
|
||||
|
|
|
|||
|
|
@ -12,12 +12,10 @@ module QA
|
|||
|
||||
context 'when file name starts with a dash and contains hash, semicolon, colon, and question mark' do
|
||||
it 'renders repository file tree correctly', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/347714' do
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.project = project
|
||||
file.commit_message = 'Add new file'
|
||||
file.name = "test-folder/#{file_name}"
|
||||
file.content = "### Heading\n\n[Example link](https://example.com/)"
|
||||
end
|
||||
create(:file,
|
||||
project: project,
|
||||
name: "test-folder/#{file_name}",
|
||||
content: "### Heading\n\n[Example link](https://example.com/)")
|
||||
|
||||
project.visit!
|
||||
|
||||
|
|
|
|||
|
|
@ -20,14 +20,13 @@ module QA
|
|||
# add second file to repo to enable diff from initial commit
|
||||
@commit_message = 'Add second file'
|
||||
|
||||
Resource::File.fabricate_via_api! do |file|
|
||||
file.project = @project
|
||||
file.name = 'second'
|
||||
file.content = 'second file content'
|
||||
file.commit_message = @commit_message
|
||||
file.author_name = @user.name
|
||||
file.author_email = @user.public_email
|
||||
end
|
||||
create(:file,
|
||||
project: @project,
|
||||
name: 'second',
|
||||
content: 'second file content',
|
||||
commit_message: @commit_message,
|
||||
author_name: @user.name,
|
||||
author_email: @user.public_email)
|
||||
end
|
||||
|
||||
def view_commit
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ module QA
|
|||
super
|
||||
end
|
||||
|
||||
# @param name [Symbol] name of the data_qa_selector element
|
||||
# @param name [Symbol, String] name of the data_qa_selector or data-testid element
|
||||
# @param page [Class] a target page class to check existence of (class must inherit from QA::Page::Base)
|
||||
# @param kwargs [Hash] keyword arguments to pass to Capybara finder
|
||||
def click_element(name, page = nil, **kwargs)
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ module RuboCop
|
|||
# Returns true for any parent class of format Gitlab::Database::Migration[version] if version < current_version
|
||||
def old_version_migration_class?(class_node)
|
||||
parent_class_node = class_node.parent_class
|
||||
return false if parent_class_node.nil?
|
||||
return false unless parent_class_node.send_type? && parent_class_node.arguments.last.float_type?
|
||||
return false unless parent_class_node.children[0].const_name == GITLAB_MIGRATION_CLASS
|
||||
|
||||
|
|
|
|||
|
|
@ -1,110 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::Prometheus::AlertsController, feature_category: :incident_management do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:environment) { create(:environment, project: project) }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
shared_examples 'unprivileged' do
|
||||
before do
|
||||
project.add_developer(user)
|
||||
end
|
||||
|
||||
it 'returns not_found' do
|
||||
make_request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'project non-specific environment' do |status|
|
||||
let(:other) { create(:environment) }
|
||||
|
||||
it "returns #{status}" do
|
||||
make_request(environment_id: other)
|
||||
|
||||
expect(response).to have_gitlab_http_status(status)
|
||||
end
|
||||
|
||||
if status == :ok
|
||||
it 'returns no prometheus alerts' do
|
||||
make_request(environment_id: other)
|
||||
|
||||
expect(json_response).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST #notify' do
|
||||
let(:alert_1) { build(:alert_management_alert, :prometheus, project: project) }
|
||||
let(:alert_2) { build(:alert_management_alert, :prometheus, project: project) }
|
||||
let(:service_response) { ServiceResponse.success(http_status: :created) }
|
||||
let(:notify_service) { instance_double(Projects::Prometheus::Alerts::NotifyService, execute: service_response) }
|
||||
|
||||
before do
|
||||
sign_out(user)
|
||||
|
||||
expect(Projects::Prometheus::Alerts::NotifyService)
|
||||
.to receive(:new)
|
||||
.with(project, duck_type(:permitted?))
|
||||
.and_return(notify_service)
|
||||
end
|
||||
|
||||
it 'returns created if notification succeeds' do
|
||||
expect(notify_service).to receive(:execute).and_return(service_response)
|
||||
|
||||
post :notify, params: project_params, session: { as: :json }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
end
|
||||
|
||||
it 'returns unprocessable entity if notification fails' do
|
||||
expect(notify_service).to receive(:execute).and_return(
|
||||
ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
|
||||
)
|
||||
|
||||
post :notify, params: project_params, session: { as: :json }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
|
||||
context 'bearer token' do
|
||||
context 'when set' do
|
||||
it 'extracts bearer token' do
|
||||
request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
|
||||
|
||||
expect(notify_service).to receive(:execute).with('some token')
|
||||
|
||||
post :notify, params: project_params, as: :json
|
||||
end
|
||||
|
||||
it 'pass nil if cannot extract a non-bearer token' do
|
||||
request.headers['HTTP_AUTHORIZATION'] = 'some token'
|
||||
|
||||
expect(notify_service).to receive(:execute).with(nil)
|
||||
|
||||
post :notify, params: project_params, as: :json
|
||||
end
|
||||
end
|
||||
|
||||
context 'when missing' do
|
||||
it 'passes nil' do
|
||||
expect(notify_service).to receive(:execute).with(nil)
|
||||
|
||||
post :notify, params: project_params, as: :json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def project_params(opts = {})
|
||||
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
|
||||
end
|
||||
end
|
||||
|
|
@ -60,8 +60,8 @@ describe('RunnerJobs', () => {
|
|||
|
||||
it('shows status', () => {
|
||||
createComponent();
|
||||
expect(findCellText({ field: 'status', i: 0 })).toBe(s__('Runners|Online'));
|
||||
expect(findCellText({ field: 'status', i: 1 })).toBe(s__('Runners|Online'));
|
||||
expect(findCellText({ field: 'status', i: 0 })).toContain(s__('Runners|Online'));
|
||||
expect(findCellText({ field: 'status', i: 0 })).toContain(s__('Runners|Idle'));
|
||||
});
|
||||
|
||||
it('shows version', () => {
|
||||
|
|
|
|||
|
|
@ -3,13 +3,14 @@ import { shallowMount } from '@vue/test-utils';
|
|||
import { useFakeDate } from 'helpers/fake_date';
|
||||
import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
|
||||
import IssueCardTimeInfo from '~/issues/list/components/issue_card_time_info.vue';
|
||||
import { WIDGET_TYPE_MILESTONE, WIDGET_TYPE_START_AND_DUE_DATE } from '~/work_items/constants';
|
||||
|
||||
describe('CE IssueCardTimeInfo component', () => {
|
||||
useFakeDate(2020, 11, 11); // 2020 Dec 11
|
||||
|
||||
let wrapper;
|
||||
|
||||
const issue = {
|
||||
const issueObject = {
|
||||
milestone: {
|
||||
dueDate: '2020-12-17',
|
||||
startDate: '2020-12-10',
|
||||
|
|
@ -20,22 +21,41 @@ describe('CE IssueCardTimeInfo component', () => {
|
|||
humanTimeEstimate: '1w',
|
||||
};
|
||||
|
||||
const workItemObject = {
|
||||
widgets: [
|
||||
{
|
||||
type: WIDGET_TYPE_MILESTONE,
|
||||
milestone: {
|
||||
dueDate: '2020-12-17',
|
||||
startDate: '2020-12-10',
|
||||
title: 'My milestone',
|
||||
webPath: '/milestone/webPath',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: WIDGET_TYPE_START_AND_DUE_DATE,
|
||||
dueDate: '2020-12-12',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const findMilestone = () => wrapper.find('[data-testid="issuable-milestone"]');
|
||||
const findMilestoneTitle = () => findMilestone().findComponent(GlLink).attributes('title');
|
||||
const findDueDate = () => wrapper.find('[data-testid="issuable-due-date"]');
|
||||
|
||||
const mountComponent = ({
|
||||
issue = issueObject,
|
||||
state = STATUS_OPEN,
|
||||
dueDate = issue.dueDate,
|
||||
milestoneDueDate = issue.milestone.dueDate,
|
||||
milestoneStartDate = issue.milestone.startDate,
|
||||
dueDate = issueObject.dueDate,
|
||||
milestoneDueDate = issueObject.milestone.dueDate,
|
||||
milestoneStartDate = issueObject.milestone.startDate,
|
||||
} = {}) =>
|
||||
shallowMount(IssueCardTimeInfo, {
|
||||
propsData: {
|
||||
issue: {
|
||||
...issue,
|
||||
milestone: {
|
||||
...issue.milestone,
|
||||
...issueObject.milestone,
|
||||
dueDate: milestoneDueDate,
|
||||
startDate: milestoneStartDate,
|
||||
},
|
||||
|
|
@ -45,63 +65,70 @@ describe('CE IssueCardTimeInfo component', () => {
|
|||
},
|
||||
});
|
||||
|
||||
describe('milestone', () => {
|
||||
it('renders', () => {
|
||||
wrapper = mountComponent();
|
||||
|
||||
const milestone = findMilestone();
|
||||
|
||||
expect(milestone.text()).toBe(issue.milestone.title);
|
||||
expect(milestone.findComponent(GlIcon).props('name')).toBe('clock');
|
||||
expect(milestone.findComponent(GlLink).attributes('href')).toBe(issue.milestone.webPath);
|
||||
});
|
||||
|
||||
describe.each`
|
||||
time | text | milestoneDueDate | milestoneStartDate | expected
|
||||
${'due date is in past'} | ${'Past due'} | ${'2020-09-09'} | ${null} | ${'Sep 9, 2020 (Past due)'}
|
||||
${'due date is today'} | ${'Today'} | ${'2020-12-11'} | ${null} | ${'Dec 11, 2020 (Today)'}
|
||||
${'start date is in future'} | ${'Upcoming'} | ${'2021-03-01'} | ${'2021-02-01'} | ${'Mar 1, 2021 (Upcoming)'}
|
||||
${'due date is in future'} | ${'2 weeks remaining'} | ${'2020-12-25'} | ${null} | ${'Dec 25, 2020 (2 weeks remaining)'}
|
||||
`('when $description', ({ text, milestoneDueDate, milestoneStartDate, expected }) => {
|
||||
it(`renders with "${text}"`, () => {
|
||||
wrapper = mountComponent({ milestoneDueDate, milestoneStartDate });
|
||||
|
||||
expect(findMilestoneTitle()).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('due date', () => {
|
||||
describe('when upcoming', () => {
|
||||
describe.each`
|
||||
type | obj
|
||||
${'issue'} | ${issueObject}
|
||||
${'work item'} | ${workItemObject}
|
||||
`('with $type object', ({ obj }) => {
|
||||
describe('milestone', () => {
|
||||
it('renders', () => {
|
||||
wrapper = mountComponent();
|
||||
wrapper = mountComponent({ issue: obj });
|
||||
|
||||
const dueDate = findDueDate();
|
||||
const milestone = findMilestone();
|
||||
|
||||
expect(dueDate.text()).toBe('Dec 12, 2020');
|
||||
expect(dueDate.attributes('title')).toBe('Due date');
|
||||
expect(dueDate.findComponent(GlIcon).props('name')).toBe('calendar');
|
||||
expect(dueDate.classes()).not.toContain('gl-text-red-500');
|
||||
expect(milestone.text()).toBe('My milestone');
|
||||
expect(milestone.findComponent(GlIcon).props('name')).toBe('clock');
|
||||
expect(milestone.findComponent(GlLink).attributes('href')).toBe('/milestone/webPath');
|
||||
});
|
||||
|
||||
describe.each`
|
||||
time | text | milestoneDueDate | milestoneStartDate | expected
|
||||
${'due date is in past'} | ${'Past due'} | ${'2020-09-09'} | ${null} | ${'Sep 9, 2020 (Past due)'}
|
||||
${'due date is today'} | ${'Today'} | ${'2020-12-11'} | ${null} | ${'Dec 11, 2020 (Today)'}
|
||||
${'start date is in future'} | ${'Upcoming'} | ${'2021-03-01'} | ${'2021-02-01'} | ${'Mar 1, 2021 (Upcoming)'}
|
||||
${'due date is in future'} | ${'2 weeks remaining'} | ${'2020-12-25'} | ${null} | ${'Dec 25, 2020 (2 weeks remaining)'}
|
||||
`('when $description', ({ text, milestoneDueDate, milestoneStartDate, expected }) => {
|
||||
it(`renders with "${text}"`, () => {
|
||||
wrapper = mountComponent({ issue: obj, milestoneDueDate, milestoneStartDate });
|
||||
|
||||
expect(findMilestoneTitle()).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when in the past', () => {
|
||||
describe('when issue is open', () => {
|
||||
it('renders in red', () => {
|
||||
wrapper = mountComponent({ dueDate: '2020-10-10' });
|
||||
describe('due date', () => {
|
||||
describe('when upcoming', () => {
|
||||
it('renders', () => {
|
||||
wrapper = mountComponent({ issue: obj });
|
||||
|
||||
expect(findDueDate().classes()).toContain('gl-text-red-500');
|
||||
const dueDate = findDueDate();
|
||||
|
||||
expect(dueDate.text()).toBe('Dec 12, 2020');
|
||||
expect(dueDate.attributes('title')).toBe('Due date');
|
||||
expect(dueDate.findComponent(GlIcon).props('name')).toBe('calendar');
|
||||
expect(dueDate.classes()).not.toContain('gl-text-red-500');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when issue is closed', () => {
|
||||
it('does not render in red', () => {
|
||||
wrapper = mountComponent({
|
||||
dueDate: '2020-10-10',
|
||||
state: STATUS_CLOSED,
|
||||
});
|
||||
describe('when in the past', () => {
|
||||
describe('when issue is open', () => {
|
||||
it('renders in red', () => {
|
||||
wrapper = mountComponent({ issue: obj, dueDate: '2020-10-10' });
|
||||
|
||||
expect(findDueDate().classes()).not.toContain('gl-text-red-500');
|
||||
expect(findDueDate().classes()).toContain('gl-text-red-500');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when issue is closed', () => {
|
||||
it('does not render in red', () => {
|
||||
wrapper = mountComponent({
|
||||
issue: obj,
|
||||
dueDate: '2020-10-10',
|
||||
state: STATUS_CLOSED,
|
||||
});
|
||||
|
||||
expect(findDueDate().classes()).not.toContain('gl-text-red-500');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -112,7 +139,7 @@ describe('CE IssueCardTimeInfo component', () => {
|
|||
|
||||
const timeEstimate = wrapper.find('[data-testid="time-estimate"]');
|
||||
|
||||
expect(timeEstimate.text()).toBe(issue.humanTimeEstimate);
|
||||
expect(timeEstimate.text()).toBe(issueObject.humanTimeEstimate);
|
||||
expect(timeEstimate.attributes('title')).toBe('Estimate');
|
||||
expect(timeEstimate.findComponent(GlIcon).props('name')).toBe('timer');
|
||||
});
|
||||
|
|
|
|||
|
|
@ -54,12 +54,14 @@ exports[`Repository table row component renders a symlink table row 1`] = `
|
|||
<td
|
||||
class="tree-time-ago text-right cursor-default gl-text-secondary"
|
||||
>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
<gl-intersection-observer-stub>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
</gl-intersection-observer-stub>
|
||||
|
||||
<!---->
|
||||
</td>
|
||||
|
|
@ -120,12 +122,14 @@ exports[`Repository table row component renders table row 1`] = `
|
|||
<td
|
||||
class="tree-time-ago text-right cursor-default gl-text-secondary"
|
||||
>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
<gl-intersection-observer-stub>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
</gl-intersection-observer-stub>
|
||||
|
||||
<!---->
|
||||
</td>
|
||||
|
|
@ -186,12 +190,14 @@ exports[`Repository table row component renders table row for path with special
|
|||
<td
|
||||
class="tree-time-ago text-right cursor-default gl-text-secondary"
|
||||
>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
<gl-intersection-observer-stub>
|
||||
<timeago-tooltip-stub
|
||||
cssclass=""
|
||||
datetimeformat="DATE_WITH_TIME_FORMAT"
|
||||
time="2019-01-01"
|
||||
tooltipplacement="top"
|
||||
/>
|
||||
</gl-intersection-observer-stub>
|
||||
|
||||
<!---->
|
||||
</td>
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import { TEST_HOST } from 'helpers/test_constants';
|
|||
import { joinPaths } from '~/lib/utils/url_utility';
|
||||
import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { scrollUp } from '~/lib/utils/scroll_utils';
|
||||
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
|
||||
import { issuableListTabs } from '~/vue_shared/issuable/list/constants';
|
||||
import { TYPENAME_USER } from '~/graphql_shared/constants';
|
||||
|
|
@ -56,6 +57,7 @@ import {
|
|||
|
||||
jest.mock('@sentry/browser');
|
||||
jest.mock('~/alert');
|
||||
jest.mock('~/lib/utils/scroll_utils', () => ({ scrollUp: jest.fn() }));
|
||||
|
||||
describe('CE ServiceDeskListApp', () => {
|
||||
let wrapper;
|
||||
|
|
@ -82,6 +84,10 @@ describe('CE ServiceDeskListApp', () => {
|
|||
initialSort: CREATED_DESC,
|
||||
isIssueRepositioningDisabled: false,
|
||||
issuablesLoading: false,
|
||||
showPaginationControls: true,
|
||||
useKeysetPagination: true,
|
||||
hasPreviousPage: getServiceDeskIssuesQueryResponse.data.project.issues.pageInfo.hasPreviousPage,
|
||||
hasNextPage: getServiceDeskIssuesQueryResponse.data.project.issues.pageInfo.hasNextPage,
|
||||
};
|
||||
|
||||
let defaultQueryResponse = getServiceDeskIssuesQueryResponse;
|
||||
|
|
@ -584,6 +590,37 @@ describe('CE ServiceDeskListApp', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe.each`
|
||||
event | params
|
||||
${'next-page'} | ${{ page_after: 'endcursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
|
||||
${'previous-page'} | ${{ page_after: undefined, page_before: 'startcursor', first_page_size: undefined, last_page_size: 20 }}
|
||||
`('when "$event" event is emitted by IssuableList', ({ event, params }) => {
|
||||
beforeEach(async () => {
|
||||
wrapper = createComponent({
|
||||
data: {
|
||||
pageInfo: {
|
||||
endCursor: 'endCursor',
|
||||
startCursor: 'startCursor',
|
||||
},
|
||||
},
|
||||
});
|
||||
await waitForPromises();
|
||||
router.push = jest.fn();
|
||||
|
||||
findIssuableList().vm.$emit(event);
|
||||
});
|
||||
|
||||
it('scrolls to the top', () => {
|
||||
expect(scrollUp).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('updates url', () => {
|
||||
expect(router.push).toHaveBeenCalledWith({
|
||||
query: expect.objectContaining(params),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when "filter" event is emitted by IssuableList', () => {
|
||||
it('updates IssuableList with url params', async () => {
|
||||
wrapper = createComponent();
|
||||
|
|
@ -598,6 +635,22 @@ describe('CE ServiceDeskListApp', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when "page-size-change" event is emitted by IssuableList', () => {
|
||||
it('updates url params with new page size', async () => {
|
||||
wrapper = createComponent();
|
||||
router.push = jest.fn();
|
||||
await waitForPromises();
|
||||
|
||||
findIssuableList().vm.$emit('page-size-change', 50);
|
||||
await nextTick();
|
||||
|
||||
expect(router.push).toHaveBeenCalledTimes(1);
|
||||
expect(router.push).toHaveBeenCalledWith({
|
||||
query: expect.objectContaining({ first_page_size: 50 }),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Errors', () => {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import {
|
|||
PATH_GROUP_TITLE,
|
||||
USER_HANDLE,
|
||||
PATH_HANDLE,
|
||||
PROJECT_HANDLE,
|
||||
SEARCH_SCOPE,
|
||||
MAX_ROWS,
|
||||
} from '~/super_sidebar/components/global_search/command_palette/constants';
|
||||
|
|
@ -20,6 +21,7 @@ import {
|
|||
import { getFormattedItem } from '~/super_sidebar/components/global_search/utils';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
|
||||
import { mockTracking } from 'helpers/tracking_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { COMMANDS, LINKS, USERS, FILES } from './mock_data';
|
||||
|
||||
|
|
@ -32,7 +34,7 @@ describe('CommandPaletteItems', () => {
|
|||
const projectFilesPath = 'project/files/path';
|
||||
const projectBlobPath = '/blob/main';
|
||||
|
||||
const createComponent = (props) => {
|
||||
const createComponent = (props, options = {}) => {
|
||||
wrapper = shallowMount(CommandPaletteItems, {
|
||||
propsData: {
|
||||
handle: COMMAND_HANDLE,
|
||||
|
|
@ -51,6 +53,7 @@ describe('CommandPaletteItems', () => {
|
|||
projectFilesPath,
|
||||
projectBlobPath,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -227,4 +230,41 @@ describe('CommandPaletteItems', () => {
|
|||
expect(axios.get).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Tracking', () => {
|
||||
let trackingSpy;
|
||||
let mockAxios;
|
||||
|
||||
beforeEach(() => {
|
||||
trackingSpy = mockTracking(undefined, undefined, jest.spyOn);
|
||||
mockAxios = new MockAdapter(axios);
|
||||
createComponent({ attachTo: document.body });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockAxios.restore();
|
||||
});
|
||||
|
||||
it('tracks event immediately', () => {
|
||||
expect(trackingSpy).toHaveBeenCalledTimes(1);
|
||||
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_command_palette', {
|
||||
label: 'command',
|
||||
});
|
||||
});
|
||||
|
||||
it.each`
|
||||
handle | label
|
||||
${USER_HANDLE} | ${'user'}
|
||||
${PROJECT_HANDLE} | ${'project'}
|
||||
${PATH_HANDLE} | ${'path'}
|
||||
`('tracks changing the handle to "$handle"', async ({ handle, label }) => {
|
||||
trackingSpy.mockClear();
|
||||
|
||||
await wrapper.setProps({ handle });
|
||||
expect(trackingSpy).toHaveBeenCalledTimes(1);
|
||||
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'activate_command_palette', {
|
||||
label,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -69,24 +69,41 @@ export const TRANSFORMED_LINKS = [
|
|||
icon: 'users',
|
||||
keywords: 'Manage',
|
||||
text: 'Manage',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'item_without_id',
|
||||
'data-track-extra': '{"title":"Manage"}',
|
||||
},
|
||||
},
|
||||
{
|
||||
href: '/flightjs/Flight/activity',
|
||||
icon: 'users',
|
||||
keywords: 'Activity',
|
||||
text: 'Manage > Activity',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'activity',
|
||||
},
|
||||
},
|
||||
{
|
||||
href: '/flightjs/Flight/-/project_members',
|
||||
icon: 'users',
|
||||
keywords: 'Members',
|
||||
text: 'Manage > Members',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'members',
|
||||
},
|
||||
},
|
||||
{
|
||||
href: '/flightjs/Flight/-/labels',
|
||||
icon: 'users',
|
||||
keywords: 'Labels',
|
||||
text: 'Manage > Labels',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'labels',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,10 @@ describe('fileMapper', () => {
|
|||
icon: 'doc-code',
|
||||
text: file,
|
||||
href: `${projectBlobPath}/${file}`,
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'file',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -67,10 +67,22 @@ describe('GlobalSearchDefaultPlaces', () => {
|
|||
{
|
||||
text: 'Explore',
|
||||
href: '/explore',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-extra': '{"title":"Explore"}',
|
||||
'data-track-label': 'item_without_id',
|
||||
'data-track-property': 'nav_panel_unknown',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'Admin area',
|
||||
href: '/admin',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-extra': '{"title":"Admin area"}',
|
||||
'data-track-label': 'item_without_id',
|
||||
'data-track-property': 'nav_panel_unknown',
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -109,6 +109,10 @@ export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
|
|||
scopeCategory: PROJECTS_CATEGORY,
|
||||
icon: ICON_PROJECT,
|
||||
href: MOCK_PROJECT.path,
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'scoped_in_project',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'scoped-in-group',
|
||||
|
|
@ -116,11 +120,19 @@ export const MOCK_SCOPED_SEARCH_OPTIONS_DEF = [
|
|||
scopeCategory: GROUPS_CATEGORY,
|
||||
icon: ICON_GROUP,
|
||||
href: MOCK_GROUP.path,
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'scoped_in_group',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'scoped-in-all',
|
||||
description: MSG_IN_ALL_GITLAB,
|
||||
href: MOCK_ALL_PATH,
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'scoped_in_all',
|
||||
},
|
||||
},
|
||||
];
|
||||
export const MOCK_SCOPED_SEARCH_OPTIONS = [
|
||||
|
|
@ -263,6 +275,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
|
|||
avatar_size: 32,
|
||||
entity_id: 1,
|
||||
entity_name: 'MockGroup1',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'groups',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -281,6 +297,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
|
|||
avatar_size: 32,
|
||||
entity_id: 1,
|
||||
entity_name: 'MockProject1',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'projects',
|
||||
},
|
||||
},
|
||||
{
|
||||
category: 'Projects',
|
||||
|
|
@ -294,6 +314,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
|
|||
avatar_size: 32,
|
||||
entity_id: 2,
|
||||
entity_name: 'MockProject2',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'projects',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -307,6 +331,10 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [
|
|||
href: 'help/gitlab',
|
||||
avatar_size: 16,
|
||||
entity_name: 'GitLab Help',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'help',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -325,6 +353,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
|
|||
avatar_size: 32,
|
||||
entity_id: 1,
|
||||
entity_name: 'MockGroup1',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'groups',
|
||||
},
|
||||
},
|
||||
{
|
||||
avatar_size: 32,
|
||||
|
|
@ -338,6 +370,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
|
|||
namespace: 'Gitlab Org / MockProject1',
|
||||
text: 'MockProject1',
|
||||
value: 'MockProject1',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'projects',
|
||||
},
|
||||
},
|
||||
{
|
||||
avatar_size: 32,
|
||||
|
|
@ -351,6 +387,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
|
|||
namespace: 'Gitlab Org / MockProject2',
|
||||
text: 'MockProject2',
|
||||
value: 'MockProject2',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'projects',
|
||||
},
|
||||
},
|
||||
{
|
||||
avatar_size: 16,
|
||||
|
|
@ -359,6 +399,10 @@ export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [
|
|||
label: 'GitLab Help',
|
||||
text: 'GitLab Help',
|
||||
href: 'help/gitlab',
|
||||
extraAttrs: {
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': 'help',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
|||
|
|
@ -13,48 +13,58 @@ import {
|
|||
|
||||
describe('getFormattedItem', () => {
|
||||
describe.each`
|
||||
item | avatarSize | searchContext | entityId | entityName
|
||||
${{ category: PROJECTS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 29 } }} | ${29} | ${'project1'}
|
||||
${{ category: GROUPS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 12 } }} | ${12} | ${'project1'}
|
||||
${{ category: 'Help', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'}
|
||||
${{ category: 'Settings', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'}
|
||||
${{ category: GROUPS_CATEGORY, value: 'group1', label: 'Group 1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${1} | ${'group1'}
|
||||
${{ category: PROJECTS_CATEGORY, value: 'group2', label: 'Group2' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${2} | ${'group2'}
|
||||
${{ category: ISSUES_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${3} | ${'test3'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${4} | ${'test4'}
|
||||
${{ category: RECENT_EPICS_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${5} | ${'test5'}
|
||||
${{ category: GROUPS_CATEGORY, group_id: 6, group_name: 'test6' }} | ${LARGE_AVATAR_PX} | ${null} | ${6} | ${'test6'}
|
||||
${{ category: PROJECTS_CATEGORY, project_id: 7, project_name: 'test7' }} | ${LARGE_AVATAR_PX} | ${null} | ${7} | ${'test7'}
|
||||
${{ category: ISSUES_CATEGORY, project_id: 8, project_name: 'test8' }} | ${SMALL_AVATAR_PX} | ${null} | ${8} | ${'test8'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY, project_id: 9, project_name: 'test9' }} | ${SMALL_AVATAR_PX} | ${null} | ${9} | ${'test9'}
|
||||
${{ category: RECENT_EPICS_CATEGORY, group_id: 10, group_name: 'test10' }} | ${SMALL_AVATAR_PX} | ${null} | ${10} | ${'test10'}
|
||||
${{ category: GROUPS_CATEGORY, group_id: 11, group_name: 'test11' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${11} | ${'test11'}
|
||||
${{ category: PROJECTS_CATEGORY, project_id: 12, project_name: 'test12' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${12} | ${'test12'}
|
||||
${{ category: ISSUES_CATEGORY, project_id: 13, project_name: 'test13' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${13} | ${'test13'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY, project_id: 14, project_name: 'test14' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${14} | ${'test14'}
|
||||
${{ category: RECENT_EPICS_CATEGORY, group_id: 15, group_name: 'test15' }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${15} | ${'test15'}
|
||||
`('formats the item', ({ item, avatarSize, searchContext, entityId, entityName }) => {
|
||||
describe(`when item is ${JSON.stringify(item)}`, () => {
|
||||
let formattedItem;
|
||||
beforeEach(() => {
|
||||
formattedItem = getFormattedItem(item, searchContext);
|
||||
});
|
||||
item | avatarSize | searchContext | entityId | entityName | trackingLabel
|
||||
${{ category: PROJECTS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 29 } }} | ${29} | ${'project1'} | ${'projects'}
|
||||
${{ category: GROUPS_CATEGORY, label: 'project1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 12 } }} | ${12} | ${'project1'} | ${'groups'}
|
||||
${{ category: 'Help', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'} | ${'help'}
|
||||
${{ category: 'Settings', label: 'project1' }} | ${SMALL_AVATAR_PX} | ${null} | ${undefined} | ${'project1'} | ${'settings'}
|
||||
${{ category: GROUPS_CATEGORY, value: 'group1', label: 'Group 1' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${1} | ${'group1'} | ${'groups'}
|
||||
${{ category: PROJECTS_CATEGORY, value: 'group2', label: 'Group2' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${2} | ${'group2'} | ${'projects'}
|
||||
${{ category: ISSUES_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${3} | ${'test3'} | ${'recent_issues'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${4} | ${'test4'} | ${'recent_merge_requests'}
|
||||
${{ category: RECENT_EPICS_CATEGORY }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${5} | ${'test5'} | ${'recent_epics'}
|
||||
${{ category: GROUPS_CATEGORY, group_id: 6, group_name: 'test6' }} | ${LARGE_AVATAR_PX} | ${null} | ${6} | ${'test6'} | ${'groups'}
|
||||
${{ category: PROJECTS_CATEGORY, project_id: 7, project_name: 'test7' }} | ${LARGE_AVATAR_PX} | ${null} | ${7} | ${'test7'} | ${'projects'}
|
||||
${{ category: ISSUES_CATEGORY, project_id: 8, project_name: 'test8' }} | ${SMALL_AVATAR_PX} | ${null} | ${8} | ${'test8'} | ${'recent_issues'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY, project_id: 9, project_name: 'test9' }} | ${SMALL_AVATAR_PX} | ${null} | ${9} | ${'test9'} | ${'recent_merge_requests'}
|
||||
${{ category: RECENT_EPICS_CATEGORY, group_id: 10, group_name: 'test10' }} | ${SMALL_AVATAR_PX} | ${null} | ${10} | ${'test10'} | ${'recent_epics'}
|
||||
${{ category: GROUPS_CATEGORY, group_id: 11, group_name: 'test11' }} | ${LARGE_AVATAR_PX} | ${{ group: { id: 1, name: 'test1' } }} | ${11} | ${'test11'} | ${'groups'}
|
||||
${{ category: PROJECTS_CATEGORY, project_id: 12, project_name: 'test12' }} | ${LARGE_AVATAR_PX} | ${{ project: { id: 2, name: 'test2' } }} | ${12} | ${'test12'} | ${'projects'}
|
||||
${{ category: ISSUES_CATEGORY, project_id: 13, project_name: 'test13' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 3, name: 'test3' } }} | ${13} | ${'test13'} | ${'recent_issues'}
|
||||
${{ category: MERGE_REQUEST_CATEGORY, project_id: 14, project_name: 'test14' }} | ${SMALL_AVATAR_PX} | ${{ project: { id: 4, name: 'test4' } }} | ${14} | ${'test14'} | ${'recent_merge_requests'}
|
||||
${{ category: RECENT_EPICS_CATEGORY, group_id: 15, group_name: 'test15' }} | ${SMALL_AVATAR_PX} | ${{ group: { id: 5, name: 'test5' } }} | ${15} | ${'test15'} | ${'recent_epics'}
|
||||
`(
|
||||
'formats the item',
|
||||
({ item, avatarSize, searchContext, entityId, entityName, trackingLabel }) => {
|
||||
describe(`when item is ${JSON.stringify(item)}`, () => {
|
||||
let formattedItem;
|
||||
beforeEach(() => {
|
||||
formattedItem = getFormattedItem(item, searchContext);
|
||||
});
|
||||
|
||||
it(`should set text to ${item.value || item.label}`, () => {
|
||||
expect(formattedItem.text).toBe(item.value || item.label);
|
||||
});
|
||||
it(`should set text to ${item.value || item.label}`, () => {
|
||||
expect(formattedItem.text).toBe(item.value || item.label);
|
||||
});
|
||||
|
||||
it(`should set avatarSize to ${avatarSize}`, () => {
|
||||
expect(formattedItem.avatar_size).toBe(avatarSize);
|
||||
});
|
||||
it(`should set avatarSize to ${avatarSize}`, () => {
|
||||
expect(formattedItem.avatar_size).toBe(avatarSize);
|
||||
});
|
||||
|
||||
it(`should set avatar entityId to ${entityId}`, () => {
|
||||
expect(formattedItem.entity_id).toBe(entityId);
|
||||
});
|
||||
it(`should set avatar entityId to ${entityId}`, () => {
|
||||
expect(formattedItem.entity_id).toBe(entityId);
|
||||
});
|
||||
|
||||
it(`should set avatar entityName to ${entityName}`, () => {
|
||||
expect(formattedItem.entity_name).toBe(entityName);
|
||||
it(`should set avatar entityName to ${entityName}`, () => {
|
||||
expect(formattedItem.entity_name).toBe(entityName);
|
||||
});
|
||||
|
||||
it('should add tracking label', () => {
|
||||
expect(formattedItem.extraAttrs).toEqual({
|
||||
'data-track-action': 'click_command_palette_item',
|
||||
'data-track-label': trackingLabel,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1111,6 +1111,67 @@ describe('MrWidgetOptions', () => {
|
|||
registeredExtensions.extensions = [];
|
||||
});
|
||||
|
||||
describe('component name tier suffixes', () => {
|
||||
let extension;
|
||||
|
||||
beforeEach(() => {
|
||||
extension = workingExtension();
|
||||
});
|
||||
|
||||
it('reports events without a CE suffix', () => {
|
||||
extension.name = `${extension.name}CE`;
|
||||
|
||||
registerExtension(extension);
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_view',
|
||||
);
|
||||
expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_c_e_view',
|
||||
);
|
||||
});
|
||||
|
||||
it('reports events without a EE suffix', () => {
|
||||
extension.name = `${extension.name}EE`;
|
||||
|
||||
registerExtension(extension);
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_view',
|
||||
);
|
||||
expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_e_e_view',
|
||||
);
|
||||
});
|
||||
|
||||
it('leaves non-CE & non-EE all caps suffixes intact', () => {
|
||||
extension.name = `${extension.name}HI`;
|
||||
|
||||
registerExtension(extension);
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(api.trackRedisHllUserEvent).not.toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_view',
|
||||
);
|
||||
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_extension_h_i_view',
|
||||
);
|
||||
});
|
||||
|
||||
it("doesn't remove CE or EE from the middle of a widget name", () => {
|
||||
extension.name = 'TestCEExtensionEETest';
|
||||
|
||||
registerExtension(extension);
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
|
||||
'i_code_review_merge_request_widget_test_c_e_extension_e_e_test_view',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('triggers view events when mounted', () => {
|
||||
registerExtension(workingExtension());
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ import * as Sentry from '@sentry/browser';
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import IssueCardStatistics from 'ee_else_ce/issues/list/components/issue_card_statistics.vue';
|
||||
import IssueCardTimeInfo from 'ee_else_ce/issues/list/components/issue_card_time_info.vue';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { STATUS_OPEN } from '~/issues/constants';
|
||||
|
|
@ -20,6 +22,8 @@ describe('WorkItemsListApp component', () => {
|
|||
const defaultQueryHandler = jest.fn().mockResolvedValue(groupWorkItemsQueryResponse);
|
||||
|
||||
const findIssuableList = () => wrapper.findComponent(IssuableList);
|
||||
const findIssueCardStatistics = () => wrapper.findComponent(IssueCardStatistics);
|
||||
const findIssueCardTimeInfo = () => wrapper.findComponent(IssueCardTimeInfo);
|
||||
|
||||
const mountComponent = ({ queryHandler = defaultQueryHandler } = {}) => {
|
||||
wrapper = shallowMount(WorkItemsListApp, {
|
||||
|
|
@ -37,6 +41,7 @@ describe('WorkItemsListApp component', () => {
|
|||
currentTab: STATUS_OPEN,
|
||||
error: '',
|
||||
issuables: [],
|
||||
issuablesLoading: true,
|
||||
namespace: 'work-items',
|
||||
recentSearchesStorageKey: 'issues',
|
||||
searchInputPlaceholder: 'Search or filter results...',
|
||||
|
|
@ -47,6 +52,18 @@ describe('WorkItemsListApp component', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('renders IssueCardStatistics component', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findIssueCardStatistics().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders IssueCardTimeInfo component', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findIssueCardTimeInfo().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders work items', async () => {
|
||||
mountComponent();
|
||||
await waitForPromises();
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ RSpec.describe Mutations::WorkItems::LinkedItems::Base, feature_category: :group
|
|||
it 'raises a NotImplementedError error if the update_links method is called on the base class' do
|
||||
mutation = described_class.new(context: { current_user: user }, object: nil, field: nil)
|
||||
|
||||
expect { mutation.resolve(id: work_item.to_gid) }.to raise_error(NotImplementedError)
|
||||
expect { mutation.resolve(id: work_item.to_gid) }
|
||||
.to raise_error(NotImplementedError, "#{described_class} does not implement update_links")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,4 +21,18 @@ RSpec.describe WorkItemsHelper, feature_category: :team_planning do
|
|||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#work_items_list_data' do
|
||||
let_it_be(:group) { build(:group) }
|
||||
|
||||
subject(:work_items_list_data) { helper.work_items_list_data(group) }
|
||||
|
||||
it 'returns expected data' do
|
||||
expect(work_items_list_data).to include(
|
||||
{
|
||||
full_path: group.full_path
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::TimeSeriesStorable, feature_category: :service_ping do
|
||||
let(:counter_class) do
|
||||
Class.new do
|
||||
include Gitlab::Usage::TimeSeriesStorable
|
||||
|
||||
def redis_key(event, date)
|
||||
key = apply_time_aggregation(event, date)
|
||||
"#{key}:"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
let(:counter_instance) { counter_class.new }
|
||||
|
||||
describe '#apply_time_aggregation' do
|
||||
let(:key) { "key3" }
|
||||
let(:time) { Date.new(2023, 5, 1) }
|
||||
|
||||
it 'returns proper key for given time' do
|
||||
expect(counter_instance.apply_time_aggregation(key, time)).to eq("key3-2023-18")
|
||||
end
|
||||
end
|
||||
|
||||
describe '#keys_for_aggregation' do
|
||||
let(:result) { counter_instance.keys_for_aggregation(**params) }
|
||||
let(:params) { base_params }
|
||||
let(:base_params) { { events: events, start_date: start_date, end_date: end_date } }
|
||||
let(:events) { %w[event1 event2] }
|
||||
let(:start_date) { Date.new(2023, 4, 1) }
|
||||
let(:end_date) { Date.new(2023, 4, 15) }
|
||||
|
||||
it 'returns proper keys' do
|
||||
expect(result).to match_array(["event1-2023-13:", "event1-2023-14:", "event2-2023-13:", "event2-2023-14:"])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -50,6 +50,11 @@ RSpec.describe SwapEpicUserMentionsNoteIdToBigintForSelfHosts, feature_category:
|
|||
connection.execute('ALTER TABLE epic_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer')
|
||||
end
|
||||
|
||||
after do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE epic_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does not swap the columns' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
|
|
@ -115,6 +120,11 @@ RSpec.describe SwapEpicUserMentionsNoteIdToBigintForSelfHosts, feature_category:
|
|||
BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
|
||||
end
|
||||
|
||||
after do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE epic_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'swaps the columns' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
|
|
|
|||
|
|
@ -50,6 +50,11 @@ RSpec.describe SwapSuggestionsNoteIdToBigintForSelfHosts, feature_category: :dat
|
|||
connection.execute('ALTER TABLE suggestions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer')
|
||||
end
|
||||
|
||||
after do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE suggestions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does not swap the columns' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
|
|
@ -115,6 +120,11 @@ RSpec.describe SwapSuggestionsNoteIdToBigintForSelfHosts, feature_category: :dat
|
|||
BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
|
||||
end
|
||||
|
||||
after do
|
||||
connection = described_class.new.connection
|
||||
connection.execute('ALTER TABLE suggestions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'swaps the columns' do
|
||||
# rubocop: disable RSpec/AnyInstanceOf
|
||||
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require_migration!
|
||||
|
||||
RSpec.describe EnsureDumNoteIdBigintBackfillIsFinishedForSelfManaged, feature_category: :database do
|
||||
describe '#up' do
|
||||
let(:migration_arguments) do
|
||||
{
|
||||
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
|
||||
table_name: 'design_user_mentions',
|
||||
column_name: 'id',
|
||||
job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
|
||||
}
|
||||
end
|
||||
|
||||
it 'ensures the migration is completed for self-managed instances' do
|
||||
expect_next_instance_of(described_class) do |instance|
|
||||
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
|
||||
expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
|
||||
end
|
||||
|
||||
migrate!
|
||||
end
|
||||
|
||||
it 'skips the check for GitLab.com, dev, or test' do
|
||||
expect_next_instance_of(described_class) do |instance|
|
||||
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
|
||||
expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
|
||||
end
|
||||
|
||||
migrate!
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require_migration!
|
||||
|
||||
RSpec.describe SwapDesignUserMentionsNoteIdToBigIntForSelfManaged, feature_category: :database do
|
||||
let(:connection) { described_class.new.connection }
|
||||
let(:design_user_mentions) { table(:design_user_mentions) }
|
||||
|
||||
shared_examples 'column `note_id_convert_to_bigint` is already dropped' do
|
||||
before do
|
||||
connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE bigint')
|
||||
connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does not swap the columns' do
|
||||
disable_migrations_output do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
design_user_mentions.reset_column_information
|
||||
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
design_user_mentions.reset_column_information
|
||||
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }).to be_nil
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
before do
|
||||
# rubocop:disable RSpec/AnyInstanceOf
|
||||
allow_any_instance_of(described_class).to(
|
||||
receive(:com_or_dev_or_test_but_not_jh?).and_return(com_or_dev_or_test_but_not_jh?)
|
||||
)
|
||||
# rubocop:enable RSpec/AnyInstanceOf
|
||||
end
|
||||
|
||||
context 'when GitLab.com, dev, or test' do
|
||||
let(:com_or_dev_or_test_but_not_jh?) { true }
|
||||
|
||||
it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
|
||||
end
|
||||
|
||||
context 'when self-managed instance with the `note_id_convert_to_bigint` column already dropped' do
|
||||
let(:com_or_dev_or_test_but_not_jh?) { false }
|
||||
|
||||
it_behaves_like 'column `note_id_convert_to_bigint` is already dropped'
|
||||
end
|
||||
|
||||
context 'when self-managed instance columns already swapped' do
|
||||
let(:com_or_dev_or_test_but_not_jh?) { false }
|
||||
|
||||
before do
|
||||
connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE bigint')
|
||||
connection.execute(
|
||||
'ALTER TABLE design_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint integer'
|
||||
)
|
||||
|
||||
disable_migrations_output { migrate! }
|
||||
end
|
||||
|
||||
after do
|
||||
connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'does not swaps the columns' do
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
|
||||
eq('integer')
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when self-managed instance' do
|
||||
let(:com_or_dev_or_test_but_not_jh?) { false }
|
||||
|
||||
before do
|
||||
connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE integer')
|
||||
connection.execute('ALTER TABLE design_user_mentions ADD COLUMN IF NOT EXISTS note_id_convert_to_bigint bigint')
|
||||
connection.execute('CREATE OR REPLACE FUNCTION trigger_3dc62927cae8() RETURNS trigger LANGUAGE plpgsql AS $$
|
||||
BEGIN NEW."note_id_convert_to_bigint" := NEW."note_id"; RETURN NEW; END; $$;')
|
||||
end
|
||||
|
||||
after do
|
||||
connection.execute('ALTER TABLE design_user_mentions DROP COLUMN IF EXISTS note_id_convert_to_bigint')
|
||||
end
|
||||
|
||||
it 'swaps the columns' do
|
||||
disable_migrations_output do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
design_user_mentions.reset_column_information
|
||||
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
|
||||
eq('bigint')
|
||||
)
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
design_user_mentions.reset_column_information
|
||||
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
|
||||
expect(design_user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to(
|
||||
eq('integer')
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -260,7 +260,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
expect(runner.reload.platform).to eq('darwin')
|
||||
expect(json_response['id']).to eq(job.id)
|
||||
expect(json_response['token']).to eq(job.token)
|
||||
expect(json_response['job_info']).to eq(expected_job_info)
|
||||
expect(json_response['job_info']).to include(expected_job_info)
|
||||
expect(json_response['git_info']).to eq(expected_git_info)
|
||||
expect(json_response['image']).to eq(
|
||||
{ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil }
|
||||
|
|
@ -672,7 +672,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
expect(runner.reload.platform).to eq('darwin')
|
||||
expect(json_response['id']).to eq(job.id)
|
||||
expect(json_response['token']).to eq(job.token)
|
||||
expect(json_response['job_info']).to eq(expected_job_info)
|
||||
expect(json_response['job_info']).to include(expected_job_info)
|
||||
expect(json_response['git_info']).to eq(expected_git_info)
|
||||
expect(json_response['artifacts']).to eq(expected_artifacts)
|
||||
end
|
||||
|
|
@ -785,6 +785,63 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'time_in_queue_seconds support' do
|
||||
let(:job) do
|
||||
create(:ci_build, :pending, :queued, pipeline: pipeline,
|
||||
name: 'spinach', stage: 'test', stage_idx: 0,
|
||||
queued_at: 60.seconds.ago)
|
||||
end
|
||||
|
||||
it 'presents the time_in_queue_seconds info in the payload' do
|
||||
request_job
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['job_info']['time_in_queue_seconds']).to be >= 60.seconds
|
||||
end
|
||||
end
|
||||
|
||||
describe 'project_jobs_running_on_instance_runners_count support' do
|
||||
context 'when runner is not instance_type' do
|
||||
it 'presents the project_jobs_running_on_instance_runners_count info in the payload as +Inf' do
|
||||
request_job
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('+Inf')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when runner is instance_type' do
|
||||
let(:project) { create(:project, namespace: group, shared_runners_enabled: true) }
|
||||
let(:runner) { create(:ci_runner, :instance) }
|
||||
|
||||
context 'when less than Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET running jobs assigned to an instance runner are on the list' do
|
||||
it 'presents the project_jobs_running_on_instance_runners_count info in the payload as a correct number in a string format' do
|
||||
request_job
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('0')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when at least Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET running jobs assigned to an instance runner are on the list' do
|
||||
let(:other_runner) { create(:ci_runner, :instance) }
|
||||
|
||||
before do
|
||||
stub_const('Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET', 1)
|
||||
|
||||
create(:ci_running_build, runner: other_runner, runner_type: other_runner.runner_type, project: project)
|
||||
end
|
||||
|
||||
it 'presents the project_jobs_running_on_instance_runners_count info in the payload as Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET+' do
|
||||
request_job
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
expect(json_response['job_info']['project_jobs_running_on_instance_runners_count']).to eq('1+')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'port support' do
|
||||
|
|
|
|||
|
|
@ -104,18 +104,15 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
|
|||
|
||||
context 'when there are more than the max allowed items to link' do
|
||||
let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
|
||||
let(:error_msg) { "No more than #{max_work_items} work items can be linked at the same time." }
|
||||
|
||||
before do
|
||||
max_work_items.times { |i| ids_to_link.push("gid://gitlab/WorkItem/#{i}") }
|
||||
end
|
||||
let(:ids_to_link) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }
|
||||
let(:error_msg) { "No more than #{max_work_items} work items can be modified at the same time." }
|
||||
|
||||
it 'returns an error message' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
end.not_to change { WorkItems::RelatedWorkItemLink.count }
|
||||
|
||||
expect_graphql_errors_to_include("No more than #{max_work_items} work items can be linked at the same time.")
|
||||
expect_graphql_errors_to_include(error_msg)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,120 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe "Remove items linked to a work item", feature_category: :portfolio_management do
|
||||
include GraphqlHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :private) }
|
||||
let_it_be(:guest) { create(:user).tap { |user| project.add_guest(user) } }
|
||||
let_it_be(:work_item) { create(:work_item, project: project) }
|
||||
let_it_be(:related1) { create(:work_item, project: project) }
|
||||
let_it_be(:related2) { create(:work_item, project: project) }
|
||||
let_it_be(:link1) { create(:work_item_link, source: work_item, target: related1) }
|
||||
let_it_be(:link2) { create(:work_item_link, source: work_item, target: related2) }
|
||||
|
||||
let(:mutation_response) { graphql_mutation_response(:work_item_remove_linked_items) }
|
||||
let(:mutation) { graphql_mutation(:workItemRemoveLinkedItems, input, fields) }
|
||||
let(:ids_to_unlink) { [related1.to_global_id.to_s, related2.to_global_id.to_s] }
|
||||
let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => ids_to_unlink } }
|
||||
|
||||
let(:fields) do
|
||||
<<~FIELDS
|
||||
workItem {
|
||||
id
|
||||
widgets {
|
||||
type
|
||||
... on WorkItemWidgetLinkedItems {
|
||||
linkedItems {
|
||||
edges {
|
||||
node {
|
||||
linkType
|
||||
workItem {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
errors
|
||||
message
|
||||
FIELDS
|
||||
end
|
||||
|
||||
context 'when the user is not allowed to read the work item' do
|
||||
let(:current_user) { create(:user) }
|
||||
|
||||
it_behaves_like 'a mutation that returns a top-level access error'
|
||||
end
|
||||
|
||||
context 'when user has permissions to read the work item' do
|
||||
let(:current_user) { guest }
|
||||
|
||||
it 'unlinks the work items' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
end.to change { WorkItems::RelatedWorkItemLink.count }.by(-2)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
|
||||
expect(mutation_response['message']).to eq("Successfully unlinked IDs: #{related1.id} and #{related2.id}.")
|
||||
expect(mutation_response['workItem']['widgets']).to include(
|
||||
{
|
||||
'linkedItems' => { 'edges' => [] }, 'type' => 'LINKED_ITEMS'
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
context 'when some items fail' do
|
||||
let_it_be(:other_project) { create(:project, :private) }
|
||||
let_it_be(:not_related) { create(:work_item, project: project) }
|
||||
let_it_be(:no_access) { create(:work_item, project: other_project) }
|
||||
let_it_be(:no_access_link) { create(:work_item_link, source: work_item, target: no_access) }
|
||||
|
||||
let(:ids_to_unlink) { [related1.to_global_id.to_s, not_related.to_global_id.to_s, no_access.to_global_id.to_s] }
|
||||
let(:error_msg) do
|
||||
"Successfully unlinked IDs: #{related1.id}. " \
|
||||
"IDs with errors: #{no_access.id} could not be removed due to insufficient permissions, " \
|
||||
"#{not_related.id} could not be removed due to not being linked."
|
||||
end
|
||||
|
||||
it 'remove valid item and include failing ids in response message' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
end.to change { WorkItems::RelatedWorkItemLink.count }.by(-1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
expect(mutation_response['message']).to eq(error_msg)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are more than the max allowed items to unlink' do
|
||||
let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
|
||||
let(:ids_to_unlink) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }
|
||||
|
||||
it 'returns an error message' do
|
||||
expect do
|
||||
post_graphql_mutation(mutation, current_user: current_user)
|
||||
end.not_to change { WorkItems::RelatedWorkItemLink.count }
|
||||
|
||||
expect_graphql_errors_to_include("No more than #{max_work_items} work items can be modified at the same time.")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when workItemsIds is empty' do
|
||||
let(:ids_to_unlink) { [] }
|
||||
|
||||
it_behaves_like 'a mutation that returns top-level errors', errors: ['workItemsIds cannot be empty']
|
||||
end
|
||||
|
||||
context 'when `linked_work_items` feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(linked_work_items: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'a mutation that returns a top-level access error'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -82,6 +82,15 @@ RSpec.describe RuboCop::Cop::Migration::VersionedMigrationClass, feature_categor
|
|||
end
|
||||
RUBY
|
||||
end
|
||||
|
||||
it 'excludes parentless classes defined inside the migration' do
|
||||
expect_no_offenses(<<~RUBY)
|
||||
class TestMigration < Gitlab::Database::Migration[2.1]
|
||||
class TestClass
|
||||
end
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -948,8 +948,8 @@ module Ci
|
|||
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
|
||||
let(:expected_shared_runner) { true }
|
||||
let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
|
||||
let(:expected_jobs_running_for_project_first_job) { 0 }
|
||||
let(:expected_jobs_running_for_project_third_job) { 2 }
|
||||
let(:expected_jobs_running_for_project_first_job) { '0' }
|
||||
let(:expected_jobs_running_for_project_third_job) { '2' }
|
||||
|
||||
it_behaves_like 'metrics collector'
|
||||
|
||||
|
|
@ -969,7 +969,7 @@ module Ci
|
|||
|
||||
context 'when max running jobs bucket size is exceeded' do
|
||||
before do
|
||||
stub_const('Gitlab::Ci::Queue::Metrics::JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET', 1)
|
||||
stub_const('Project::INSTANCE_RUNNER_RUNNING_JOBS_MAX_BUCKET', 1)
|
||||
end
|
||||
|
||||
let(:expected_jobs_running_for_project_third_job) { '1+' }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe WorkItems::RelatedWorkItemLinks::DestroyService, feature_category: :portfolio_management do
|
||||
describe '#execute' do
|
||||
let_it_be(:project) { create(:project_empty_repo, :private) }
|
||||
let_it_be(:other_project) { create(:project_empty_repo, :private) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:source) { create(:work_item, project: project) }
|
||||
let_it_be(:linked_item1) { create(:work_item, project: project) }
|
||||
let_it_be(:linked_item2) { create(:work_item, project: project) }
|
||||
let_it_be(:no_access_item) { create(:work_item, project: other_project) }
|
||||
let_it_be(:not_linked_item) { create(:work_item, project: project) }
|
||||
|
||||
let_it_be(:link1) { create(:work_item_link, source: source, target: linked_item1) }
|
||||
let_it_be(:link2) { create(:work_item_link, source: source, target: linked_item2) }
|
||||
let_it_be(:link3) { create(:work_item_link, source: source, target: no_access_item) }
|
||||
|
||||
let(:ids_to_remove) { [linked_item1.id, linked_item2.id, no_access_item.id, not_linked_item.id] }
|
||||
|
||||
subject(:destroy_links) { described_class.new(source, user, { item_ids: ids_to_remove }).execute }
|
||||
|
||||
context 'when user can `admin_work_item_link` for the work item' do
|
||||
before_all do
|
||||
project.add_guest(user)
|
||||
end
|
||||
|
||||
it 'removes existing linked items with access' do
|
||||
expect { destroy_links }.to change { WorkItems::RelatedWorkItemLink.count }.by(-2)
|
||||
end
|
||||
|
||||
it 'creates notes for the source and target of each removed link' do
|
||||
[linked_item1, linked_item2].each do |item|
|
||||
expect(SystemNoteService).to receive(:unrelate_issuable).with(source, item, user)
|
||||
expect(SystemNoteService).to receive(:unrelate_issuable).with(item, source, user)
|
||||
end
|
||||
|
||||
destroy_links
|
||||
end
|
||||
|
||||
it 'returns correct response message' do
|
||||
message = "Successfully unlinked IDs: #{linked_item1.id} and #{linked_item2.id}. IDs with errors: " \
|
||||
"#{no_access_item.id} could not be removed due to insufficient permissions, " \
|
||||
"#{not_linked_item.id} could not be removed due to not being linked."
|
||||
|
||||
is_expected.to eq(
|
||||
status: :success,
|
||||
message: message,
|
||||
items_removed: [linked_item1.id, linked_item2.id],
|
||||
items_with_errors: [no_access_item.id]
|
||||
)
|
||||
end
|
||||
|
||||
context 'when all items fail' do
|
||||
let(:ids_to_remove) { [no_access_item.id] }
|
||||
let(:params) { { item_ids: [no_access_item.id] } }
|
||||
let(:error_msg) { "IDs with errors: #{ids_to_remove[0]} could not be removed due to insufficient permissions." }
|
||||
|
||||
it 'returns an error response' do
|
||||
expect { destroy_links }.not_to change { WorkItems::RelatedWorkItemLink.count }
|
||||
|
||||
is_expected.to eq(status: :error, message: error_msg)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when item_ids is empty' do
|
||||
let(:ids_to_remove) { [] }
|
||||
|
||||
it 'returns error response' do
|
||||
is_expected.to eq(message: 'No work item IDs provided.', status: :error, http_status: 409)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user cannot `admin_work_item_link` for the work item' do
|
||||
it 'returns error response' do
|
||||
is_expected.to eq(message: 'No work item found.', status: :error, http_status: 403)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -78,5 +78,16 @@ RSpec.shared_examples 'includes LinkableItem concern' do
|
|||
expect(described_class.for_items(item, item2)).to contain_exactly(target_link)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.for_source_and_target' do
|
||||
let_it_be(:item3) { create(:work_item, project: project) }
|
||||
let_it_be(:link1) { create(link_factory, source: item, target: item1) }
|
||||
let_it_be(:link2) { create(link_factory, source: item, target: item2) }
|
||||
let_it_be(:link3) { create(link_factory, source: item, target: item3) }
|
||||
|
||||
it 'includes links for provided source and target' do
|
||||
expect(described_class.for_source_and_target(item, [item1, item2])).to contain_exactly(link1, link2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue