Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ad3bb109f8
commit
d5c2e5cc22
|
|
@ -25,7 +25,7 @@ export const FILTERED_SEARCH_TOKEN_USER = {
|
|||
export const FILTERED_SEARCH_TOKEN_REPORTER = {
|
||||
...FILTERED_SEARCH_TOKEN_USER,
|
||||
type: 'reporter',
|
||||
title: __('Reporter'),
|
||||
title: s__('AbuseReport|Reporter'),
|
||||
};
|
||||
|
||||
export const FILTERED_SEARCH_TOKEN_STATUS = {
|
||||
|
|
|
|||
|
|
@ -80,6 +80,11 @@ export const config = {
|
|||
},
|
||||
},
|
||||
},
|
||||
Namespace: {
|
||||
fields: {
|
||||
merge: true,
|
||||
},
|
||||
},
|
||||
WorkItemWidgetNotes: {
|
||||
fields: {
|
||||
// If we add any key args, the discussions field becomes discussions({"filter":"ONLY_ACTIVITY","first":10}) and
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ export default {
|
|||
|
||||
<!-- Flex order for slots is defined in the parent component: e.g. related_issues_block.vue -->
|
||||
<span v-if="weight > 0" class="order-md-1">
|
||||
<issue-weight :weight="weight" class="item-weight gl-flex gl-items-center" />
|
||||
<issue-weight :weight="weight" class="item-weight gl-items-center" />
|
||||
</span>
|
||||
|
||||
<span v-if="dueDate" class="order-md-1">
|
||||
|
|
|
|||
|
|
@ -69,6 +69,11 @@ export default {
|
|||
required: false,
|
||||
default: ALERT_VARIANTS.danger,
|
||||
},
|
||||
canReorderDesign: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
apollo: {
|
||||
designCollection: {
|
||||
|
|
@ -155,6 +160,7 @@ export default {
|
|||
return (
|
||||
!this.$options.isLoggedIn ||
|
||||
!this.isLatestVersion ||
|
||||
!this.canReorderDesign ||
|
||||
this.isReorderingInProgress ||
|
||||
this.isMobile
|
||||
);
|
||||
|
|
@ -332,6 +338,10 @@ export default {
|
|||
dragOptions: {
|
||||
animation: 200,
|
||||
ghostClass: 'gl-invisible',
|
||||
forceFallback: true,
|
||||
tag: 'ol',
|
||||
filter: '.no-drag',
|
||||
draggable: '.js-design-tile',
|
||||
},
|
||||
i18n: {
|
||||
designLoadingError: s__(
|
||||
|
|
@ -441,10 +451,6 @@ export default {
|
|||
:value="designs"
|
||||
:disabled="isDraggingDisabled"
|
||||
v-bind="$options.dragOptions"
|
||||
:force-fallback="true"
|
||||
tag="ol"
|
||||
draggable=".js-design-tile"
|
||||
filter=".no-drag"
|
||||
class="list-unstyled row -gl-my-1 gl-flex gl-gap-y-5"
|
||||
:class="{ 'gl-px-3 gl-py-2': hasDesigns, 'gl-hidden': !hasDesigns }"
|
||||
@end="onDragEnd"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
|||
import toast from '~/vue_shared/plugins/global_toast';
|
||||
import { isLoggedIn } from '~/lib/utils/common_utils';
|
||||
|
||||
import WorkItemChangeTypeModal from 'ee_else_ce/work_items/components/work_item_change_type_modal.vue';
|
||||
import {
|
||||
sprintfWorkItem,
|
||||
BASE_ALLOWED_CREATE_TYPES,
|
||||
|
|
@ -50,7 +51,6 @@ import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql
|
|||
import updateWorkItemNotificationsMutation from '../graphql/update_work_item_notifications.mutation.graphql';
|
||||
import convertWorkItemMutation from '../graphql/work_item_convert.mutation.graphql';
|
||||
import namespaceWorkItemTypesQuery from '../graphql/namespace_work_item_types.query.graphql';
|
||||
import WorkItemChangeTypeModal from './work_item_change_type_modal.vue';
|
||||
import WorkItemStateToggle from './work_item_state_toggle.vue';
|
||||
import CreateWorkItemModal from './create_work_item_modal.vue';
|
||||
|
||||
|
|
@ -227,6 +227,11 @@ export default {
|
|||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
namespaceFullName: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
@ -326,7 +331,7 @@ export default {
|
|||
: this.$options.i18n.confidentialityEnabled;
|
||||
},
|
||||
showChangeType() {
|
||||
return !this.isEpic && this.glFeatures.workItemsBeta;
|
||||
return !this.isEpic && this.glFeatures.workItemsBeta && this.$options.isLoggedIn;
|
||||
},
|
||||
allowedWorkItemTypes() {
|
||||
if (this.isGroup) {
|
||||
|
|
@ -644,13 +649,16 @@ export default {
|
|||
v-if="showChangeType"
|
||||
ref="workItemsChangeTypeModal"
|
||||
:work-item-id="workItemId"
|
||||
:work-item-iid="workItemIid"
|
||||
:work-item-type="workItemType"
|
||||
:full-path="fullPath"
|
||||
:has-children="hasChildren"
|
||||
:has-parent="hasParent"
|
||||
:widgets="widgets"
|
||||
:allowed-child-types="allowedChildTypes"
|
||||
:namespace-full-name="namespaceFullName"
|
||||
@workItemTypeChanged="$emit('workItemTypeChanged')"
|
||||
@error="$emit('error', $event)"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ import {
|
|||
WORK_ITEM_ALLOWED_CHANGE_TYPE_MAP,
|
||||
WORK_ITEM_TYPE_ENUM_OBJECTIVE,
|
||||
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_ENUM_EPIC,
|
||||
WORK_ITEM_TYPE_VALUE_EPIC,
|
||||
sprintfWorkItem,
|
||||
I18N_WORK_ITEM_CHANGE_TYPE_PARENT_ERROR,
|
||||
I18N_WORK_ITEM_CHANGE_TYPE_CHILD_ERROR,
|
||||
|
|
@ -42,6 +44,11 @@ export default {
|
|||
type: String,
|
||||
required: true,
|
||||
},
|
||||
workItemIid: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
workItemType: {
|
||||
type: String,
|
||||
required: false,
|
||||
|
|
@ -71,15 +78,36 @@ export default {
|
|||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
namespaceFullName: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
allowedWorkItemTypesEE: {
|
||||
type: Array,
|
||||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
epicFieldNote: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
getEpicWidgetDefinitions: {
|
||||
type: Function,
|
||||
required: false,
|
||||
default: () => () => {},
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
selectedWorkItemType: null,
|
||||
workItemTypes: [],
|
||||
warningMessage: '',
|
||||
errorMessage: '',
|
||||
changeTypeDisabled: true,
|
||||
hasDesigns: false,
|
||||
workItemFullPath: this.fullPath,
|
||||
typeFieldNote: '',
|
||||
};
|
||||
},
|
||||
apollo: {
|
||||
|
|
@ -87,14 +115,19 @@ export default {
|
|||
query: namespaceWorkItemTypesQuery,
|
||||
variables() {
|
||||
return {
|
||||
fullPath: this.fullPath,
|
||||
fullPath: this.workItemFullPath,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data.workspace?.workItemTypes?.nodes;
|
||||
},
|
||||
error(e) {
|
||||
this.showErrorMessage(e);
|
||||
this.throwError(e);
|
||||
},
|
||||
result() {
|
||||
if (this.selectedWorkItemType !== null) {
|
||||
this.validateWorkItemType();
|
||||
}
|
||||
},
|
||||
},
|
||||
hasDesigns: {
|
||||
|
|
@ -112,7 +145,7 @@ export default {
|
|||
return !this.workItemId;
|
||||
},
|
||||
error(e) {
|
||||
this.showErrorMessage(e);
|
||||
this.throwError(e);
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -120,7 +153,7 @@ export default {
|
|||
allowedConversionWorkItemTypes() {
|
||||
// The logic will be simplified once we implement
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/issues/498656
|
||||
return [
|
||||
let allowedWorkItemTypes = [
|
||||
{ text: __('Select type'), value: null },
|
||||
...Object.entries(WORK_ITEMS_TYPE_MAP)
|
||||
.map(([key, value]) => ({
|
||||
|
|
@ -141,12 +174,21 @@ export default {
|
|||
return WORK_ITEM_ALLOWED_CHANGE_TYPE_MAP.includes(item.value);
|
||||
}),
|
||||
];
|
||||
// Adding hardcoded EPIC till we have epic conversion support
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/issues/478486
|
||||
if (this.allowedWorkItemTypesEE.length > 0) {
|
||||
allowedWorkItemTypes = [...allowedWorkItemTypes, ...this.allowedWorkItemTypesEE];
|
||||
}
|
||||
|
||||
return allowedWorkItemTypes;
|
||||
},
|
||||
isOkrsEnabled() {
|
||||
return this.hasOkrsFeature && this.glFeatures.okrsMvc;
|
||||
},
|
||||
selectedWorkItemTypeWidgetDefinitions() {
|
||||
return this.getWidgetDefinitions(this.selectedWorkItemType?.text);
|
||||
return this.selectedWorkItemType?.value === WORK_ITEM_TYPE_ENUM_EPIC
|
||||
? this.getEpicWidgetDefinitions({ workItemTypes: this.workItemTypes })
|
||||
: this.getWidgetDefinitions(this.selectedWorkItemType?.text);
|
||||
},
|
||||
currentWorkItemTypeWidgetDefinitions() {
|
||||
return this.getWidgetDefinitions(this.workItemType);
|
||||
|
|
@ -200,6 +242,9 @@ export default {
|
|||
}));
|
||||
},
|
||||
hasWidgetDifference() {
|
||||
if (this.hasParent || this.hasChildren) {
|
||||
return false;
|
||||
}
|
||||
return this.widgetsWithExistingData.length > 0;
|
||||
},
|
||||
parentWorkItem() {
|
||||
|
|
@ -209,7 +254,7 @@ export default {
|
|||
return this.parentWorkItem?.workItemType?.name;
|
||||
},
|
||||
workItemTypeId() {
|
||||
return this.workItemTypes.find((type) => type.name === this.selectedWorkItemType.text).id;
|
||||
return this.workItemTypes.find((type) => type.name === this.selectedWorkItemType?.text).id;
|
||||
},
|
||||
selectedWorkItemTypeValue() {
|
||||
return this.selectedWorkItemType?.value || null;
|
||||
|
|
@ -223,9 +268,19 @@ export default {
|
|||
},
|
||||
};
|
||||
},
|
||||
isWorkItemTypesQueryLoading() {
|
||||
return this.$apollo.queries.workItemTypes.loading;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
async changeType() {
|
||||
changeType() {
|
||||
if (this.selectedWorkItemType.value === WORK_ITEM_TYPE_ENUM_EPIC) {
|
||||
this.$emit('promoteToEpic');
|
||||
} else {
|
||||
this.convertType();
|
||||
}
|
||||
},
|
||||
async convertType() {
|
||||
try {
|
||||
const {
|
||||
data: {
|
||||
|
|
@ -240,15 +295,15 @@ export default {
|
|||
},
|
||||
},
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
this.showErrorMessage(errors[0]);
|
||||
if (errors?.length > 0) {
|
||||
this.throwError(errors[0]);
|
||||
return;
|
||||
}
|
||||
this.$toast.show(s__('WorkItem|Type changed.'));
|
||||
this.$emit('workItemTypeChanged');
|
||||
this.hide();
|
||||
} catch (error) {
|
||||
this.showErrorMessage(error);
|
||||
this.throwError(error.message);
|
||||
Sentry.captureException(error);
|
||||
}
|
||||
},
|
||||
|
|
@ -258,9 +313,9 @@ export default {
|
|||
}
|
||||
return this.workItemTypes.find((widget) => widget.name === type)?.widgetDefinitions;
|
||||
},
|
||||
validateWorkItemType(value) {
|
||||
this.changeTypeDisabled = false;
|
||||
this.warningMessage = '';
|
||||
updateWorkItemFullPath(value) {
|
||||
this.typeFieldNote = '';
|
||||
|
||||
if (!value) {
|
||||
this.resetModal();
|
||||
return;
|
||||
|
|
@ -270,27 +325,40 @@ export default {
|
|||
(item) => item.value === value,
|
||||
);
|
||||
|
||||
if (value === WORK_ITEM_TYPE_ENUM_EPIC) {
|
||||
// triggers the `workItemTypes` to fetch Epic widget definitions
|
||||
this.workItemFullPath = this.fullPath.substring(0, this.fullPath.lastIndexOf('/'));
|
||||
this.typeFieldNote = this.epicFieldNote;
|
||||
}
|
||||
this.validateWorkItemType();
|
||||
},
|
||||
validateWorkItemType() {
|
||||
this.changeTypeDisabled = false;
|
||||
this.warningMessage = '';
|
||||
|
||||
if (this.hasParent) {
|
||||
this.showWarningMessage(
|
||||
sprintfWorkItem(
|
||||
I18N_WORK_ITEM_CHANGE_TYPE_PARENT_ERROR,
|
||||
this.selectedWorkItemType.text,
|
||||
this.parentWorkItemType,
|
||||
),
|
||||
this.warningMessage = sprintfWorkItem(
|
||||
I18N_WORK_ITEM_CHANGE_TYPE_PARENT_ERROR,
|
||||
this.selectedWorkItemType.value === WORK_ITEM_TYPE_ENUM_EPIC
|
||||
? WORK_ITEM_TYPE_VALUE_EPIC
|
||||
: this.selectedWorkItemType.text,
|
||||
this.parentWorkItemType,
|
||||
);
|
||||
|
||||
this.changeTypeDisabled = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.hasChildren) {
|
||||
const msg = sprintf(I18N_WORK_ITEM_CHANGE_TYPE_CHILD_ERROR, {
|
||||
this.warningMessage = sprintf(I18N_WORK_ITEM_CHANGE_TYPE_CHILD_ERROR, {
|
||||
workItemType: capitalizeFirstCharacter(
|
||||
this.selectedWorkItemType.text.toLocaleLowerCase(),
|
||||
this.selectedWorkItemType.value === WORK_ITEM_TYPE_ENUM_EPIC
|
||||
? WORK_ITEM_TYPE_VALUE_EPIC.toLocaleLowerCase()
|
||||
: this.selectedWorkItemType.text.toLocaleLowerCase(),
|
||||
),
|
||||
childItemType: this.allowedChildTypes?.[0]?.name?.toLocaleLowerCase(),
|
||||
});
|
||||
|
||||
this.showWarningMessage(msg);
|
||||
this.changeTypeDisabled = true;
|
||||
return;
|
||||
}
|
||||
|
|
@ -299,15 +367,14 @@ export default {
|
|||
if (this.hasWidgetDifference) {
|
||||
this.warningMessage = sprintfWorkItem(
|
||||
I18N_WORK_ITEM_CHANGE_TYPE_MISSING_FIELDS_ERROR,
|
||||
this.selectedWorkItemType.text,
|
||||
this.selectedWorkItemType.value === WORK_ITEM_TYPE_ENUM_EPIC
|
||||
? WORK_ITEM_TYPE_VALUE_EPIC
|
||||
: this.selectedWorkItemType.text,
|
||||
);
|
||||
}
|
||||
},
|
||||
showWarningMessage(message) {
|
||||
this.warningMessage = message;
|
||||
},
|
||||
showErrorMessage(message) {
|
||||
this.errorMessage = message;
|
||||
throwError(message) {
|
||||
this.$emit('error', message);
|
||||
},
|
||||
show() {
|
||||
this.resetModal();
|
||||
|
|
@ -320,10 +387,10 @@ export default {
|
|||
},
|
||||
resetModal() {
|
||||
this.warningMessage = '';
|
||||
this.errorMessage = '';
|
||||
this.showDifferenceMessage = false;
|
||||
this.selectedWorkItemType = null;
|
||||
this.changeTypeDisabled = false;
|
||||
this.typeFieldNote = '';
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -341,30 +408,23 @@ export default {
|
|||
@primary="changeType"
|
||||
@canceled="hide"
|
||||
>
|
||||
<gl-alert
|
||||
v-if="errorMessage"
|
||||
data-testid="change-type-error-message"
|
||||
class="gl-mb-3"
|
||||
variant="danger"
|
||||
@dismiss="errorMessage = undefined"
|
||||
>
|
||||
{{ errorMessage }}
|
||||
</gl-alert>
|
||||
<div class="gl-mb-4">
|
||||
{{ s__('WorkItem|Select which type you would like to change this item to.') }}
|
||||
</div>
|
||||
<gl-form-group :label="__('Type')" label-for="work-item-type-select">
|
||||
<gl-form-select
|
||||
id="work-item-type-select"
|
||||
class="gl-mb-2"
|
||||
data-testid="work-item-change-type-select"
|
||||
:value="selectedWorkItemTypeValue"
|
||||
width="md"
|
||||
:options="allowedConversionWorkItemTypes"
|
||||
@change="validateWorkItemType"
|
||||
@change="updateWorkItemFullPath"
|
||||
/>
|
||||
<p v-if="typeFieldNote" class="gl-text-subtle">{{ typeFieldNote }}</p>
|
||||
</gl-form-group>
|
||||
<gl-alert
|
||||
v-if="warningMessage"
|
||||
v-if="warningMessage && !isWorkItemTypesQueryLoading"
|
||||
data-testid="change-type-warning-message"
|
||||
variant="warning"
|
||||
:dismissible="false"
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ import WorkItemCreateBranchMergeRequestSplitButton from './work_item_development
|
|||
|
||||
const defaultWorkspacePermissions = {
|
||||
createDesign: false,
|
||||
moveDesign: false,
|
||||
};
|
||||
|
||||
export default {
|
||||
|
|
@ -371,6 +372,9 @@ export default {
|
|||
showUploadDesign() {
|
||||
return this.hasDesignWidget && this.workspacePermissions.createDesign;
|
||||
},
|
||||
canReorderDesign() {
|
||||
return this.hasDesignWidget && this.workspacePermissions.moveDesign;
|
||||
},
|
||||
workItemNotificationsSubscribed() {
|
||||
return Boolean(this.isWidgetPresent(WIDGET_TYPE_NOTIFICATIONS)?.subscribed);
|
||||
},
|
||||
|
|
@ -477,6 +481,9 @@ export default {
|
|||
showCreateBranchMergeRequestSplitButton() {
|
||||
return this.workItemDevelopment && this.workItemIsOpen;
|
||||
},
|
||||
namespaceFullName() {
|
||||
return this.workItem?.namespace?.fullName || '';
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
handleWorkItemCreated() {
|
||||
|
|
@ -762,6 +769,7 @@ export default {
|
|||
:is-group="isGroupWorkItem"
|
||||
:allowed-child-types="allowedChildTypes"
|
||||
:parent-id="parentWorkItemId"
|
||||
:namespace-full-name="namespaceFullName"
|
||||
@hideStickyHeader="hideStickyHeader"
|
||||
@showStickyHeader="showStickyHeader"
|
||||
@deleteWorkItem="$emit('deleteWorkItem', { workItemType, workItemId: workItem.id })"
|
||||
|
|
@ -866,6 +874,7 @@ export default {
|
|||
:is-group="isGroupWorkItem"
|
||||
:widgets="widgets"
|
||||
:allowed-child-types="allowedChildTypes"
|
||||
:namespace-full-name="namespaceFullName"
|
||||
@deleteWorkItem="$emit('deleteWorkItem', { workItemType, workItemId: workItem.id })"
|
||||
@toggleWorkItemConfidentiality="toggleConfidentiality"
|
||||
@error="updateError = $event"
|
||||
|
|
@ -979,6 +988,7 @@ export default {
|
|||
:upload-error="designUploadError"
|
||||
:upload-error-variant="designUploadErrorVariant"
|
||||
:is-saving="isSaving"
|
||||
:can-reorder-design="canReorderDesign"
|
||||
@upload="onUploadDesign"
|
||||
@dismissError="designUploadError = null"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -74,12 +74,14 @@ export default {
|
|||
activeItem: {
|
||||
deep: true,
|
||||
immediate: true,
|
||||
handler(newValue) {
|
||||
handler(newValue, oldValue) {
|
||||
if (newValue?.iid) {
|
||||
this.setDrawerParams();
|
||||
// focus on header link when drawer is updated
|
||||
this.$nextTick(() => {
|
||||
this.focusOnHeaderLink();
|
||||
if (!oldValue || oldValue?.iid !== newValue?.iid) {
|
||||
this.focusOnHeaderLink();
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -301,6 +301,7 @@ export default {
|
|||
markFormSubmitInProgress(value) {
|
||||
this.submitInProgress = value;
|
||||
this.$emit('update-in-progress', this.submitInProgress);
|
||||
if (!value) this.$refs.wiTitleInput?.$el?.focus();
|
||||
},
|
||||
addChild() {
|
||||
this.markFormSubmitInProgress(true);
|
||||
|
|
@ -340,7 +341,6 @@ export default {
|
|||
} else {
|
||||
this.unsetError();
|
||||
this.workItemsToAdd = [];
|
||||
this.closeForm();
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
|
|
@ -379,7 +379,6 @@ export default {
|
|||
} else {
|
||||
this.unsetError();
|
||||
this.$emit('addChild');
|
||||
this.closeForm();
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
|
|
|
|||
|
|
@ -85,6 +85,11 @@ export default {
|
|||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
namespaceFullName: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
canUpdate() {
|
||||
|
|
@ -197,6 +202,7 @@ export default {
|
|||
:widgets="widgets"
|
||||
:allowed-child-types="allowedChildTypes"
|
||||
:parent-id="parentId"
|
||||
:namespace-full-name="namespaceFullName"
|
||||
@deleteWorkItem="$emit('deleteWorkItem')"
|
||||
@toggleWorkItemConfidentiality="
|
||||
$emit('toggleWorkItemConfidentiality', !workItem.confidential)
|
||||
|
|
|
|||
|
|
@ -568,6 +568,7 @@ export const setNewWorkItemCache = async (
|
|||
id: newWorkItemPath,
|
||||
fullPath,
|
||||
name: newWorkItemPath,
|
||||
fullName: newWorkItemPath,
|
||||
__typename: 'Namespace',
|
||||
},
|
||||
author: {
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ fragment WorkItem on WorkItem {
|
|||
id
|
||||
fullPath
|
||||
name
|
||||
fullName
|
||||
}
|
||||
author {
|
||||
...Author
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ query workspacePermissions($fullPath: ID!) {
|
|||
id
|
||||
userPermissions {
|
||||
createDesign
|
||||
moveDesign
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class GitHttpClientController < Repositories::ApplicationController
|
||||
class GitHttpClientController < ::Repositories::ApplicationController
|
||||
include ActionController::HttpAuthentication::Basic
|
||||
include KerberosHelper
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
|
@ -153,4 +153,4 @@ module Repositories
|
|||
end
|
||||
end
|
||||
|
||||
Repositories::GitHttpClientController.prepend_mod_with('Repositories::GitHttpClientController')
|
||||
::Repositories::GitHttpClientController.prepend_mod_with('Repositories::GitHttpClientController')
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class GitHttpController < Repositories::GitHttpClientController
|
||||
class GitHttpController < ::Repositories::GitHttpClientController
|
||||
include WorkhorseRequest
|
||||
|
||||
before_action :access_check
|
||||
|
|
@ -152,4 +152,4 @@ module Repositories
|
|||
end
|
||||
end
|
||||
|
||||
Repositories::GitHttpController.prepend_mod_with('Repositories::GitHttpController')
|
||||
::Repositories::GitHttpController.prepend_mod_with('Repositories::GitHttpController')
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class LfsApiController < Repositories::GitHttpClientController
|
||||
class LfsApiController < ::Repositories::GitHttpClientController
|
||||
include LfsRequest
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
|
|
@ -211,4 +211,4 @@ module Repositories
|
|||
end
|
||||
end
|
||||
|
||||
Repositories::LfsApiController.prepend_mod_with('Repositories::LfsApiController')
|
||||
::Repositories::LfsApiController.prepend_mod_with('Repositories::LfsApiController')
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class LfsLocksApiController < Repositories::GitHttpClientController
|
||||
class LfsLocksApiController < ::Repositories::GitHttpClientController
|
||||
include LfsRequest
|
||||
|
||||
# added here as a part of the refactor, will be removed
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class LfsStorageController < Repositories::GitHttpClientController
|
||||
class LfsStorageController < ::Repositories::GitHttpClientController
|
||||
include LfsRequest
|
||||
include WorkhorseRequest
|
||||
include SendFileUpload
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module Resolvers
|
|||
description: 'Number of branch names to return.'
|
||||
|
||||
def resolve(search_pattern:, offset:, limit:)
|
||||
Repositories::BranchNamesFinder.new(object, offset: offset, limit: limit, search: search_pattern).execute
|
||||
::Repositories::BranchNamesFinder.new(object, offset: offset, limit: limit, search: search_pattern).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class Commit
|
|||
include ActsAsPaginatedDiff
|
||||
include CacheMarkdownField
|
||||
include GlobalID::Identification
|
||||
include Repositories::StreamableDiff
|
||||
include ::Repositories::StreamableDiff
|
||||
|
||||
participant :author
|
||||
participant :committer
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
class Compare
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
include ActsAsPaginatedDiff
|
||||
include Repositories::StreamableDiff
|
||||
include ::Repositories::StreamableDiff
|
||||
|
||||
delegate :same, :head, :base, :generated_files, to: :@compare
|
||||
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ module HasRepository
|
|||
reload_default_branch
|
||||
|
||||
Gitlab::EventStore.publish(
|
||||
Repositories::DefaultBranchChangedEvent.new(data: { container_id: id, container_type: self.class.name }))
|
||||
::Repositories::DefaultBranchChangedEvent.new(data: { container_id: id, container_type: self.class.name }))
|
||||
end
|
||||
|
||||
def after_change_head_branch_does_not_exist(branch)
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class Project < ApplicationRecord
|
|||
include FeatureGate
|
||||
include OptionallySearch
|
||||
include FromUnion
|
||||
include Repositories::CanHousekeepRepository
|
||||
include ::Repositories::CanHousekeepRepository
|
||||
include EachBatch
|
||||
include GitlabRoutingHelper
|
||||
include BulkMemberAccessLoad
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
class Wiki
|
||||
extend ::Gitlab::Utils::Override
|
||||
include HasRepository
|
||||
include Repositories::CanHousekeepRepository
|
||||
include ::Repositories::CanHousekeepRepository
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
include GlobalID::Identification
|
||||
include Gitlab::Git::WrapsGitalyErrors
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ module UpdateRepositoryStorageMethods
|
|||
object_pool = repository.project&.pool_repository&.object_pool
|
||||
hint = object_pool ? object_pool.relative_path : ""
|
||||
|
||||
Repositories::ReplicateService.new(raw_repository)
|
||||
::Repositories::ReplicateService.new(raw_repository)
|
||||
.execute(new_repository, type.name, partition_hint: hint)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -135,10 +135,10 @@ module Git
|
|||
end
|
||||
|
||||
def perform_housekeeping
|
||||
housekeeping = Repositories::HousekeepingService.new(project)
|
||||
housekeeping = ::Repositories::HousekeepingService.new(project)
|
||||
housekeeping.increment!
|
||||
housekeeping.execute if housekeeping.needed?
|
||||
rescue Repositories::HousekeepingService::LeaseTaken
|
||||
rescue ::Repositories::HousekeepingService::LeaseTaken
|
||||
end
|
||||
|
||||
def process_commit_worker_pool
|
||||
|
|
|
|||
|
|
@ -77,10 +77,10 @@ module Git
|
|||
end
|
||||
|
||||
def perform_housekeeping
|
||||
housekeeping = Repositories::HousekeepingService.new(wiki)
|
||||
housekeeping = ::Repositories::HousekeepingService.new(wiki)
|
||||
housekeeping.increment!
|
||||
housekeeping.execute if housekeeping.needed?
|
||||
rescue Repositories::HousekeepingService::LeaseTaken
|
||||
rescue ::Repositories::HousekeepingService::LeaseTaken
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ module Projects
|
|||
def remove_repository(repository)
|
||||
return true unless repository
|
||||
|
||||
result = Repositories::DestroyService.new(repository).execute
|
||||
result = ::Repositories::DestroyService.new(repository).execute
|
||||
|
||||
result[:status] == :success
|
||||
end
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ module Projects
|
|||
)
|
||||
|
||||
begin
|
||||
Repositories::ReplicateService.new(pool_repository.object_pool.repository)
|
||||
::Repositories::ReplicateService.new(pool_repository.object_pool.repository)
|
||||
.execute(target_pool_repository.object_pool.repository, :object_pool)
|
||||
rescue StandardError => e
|
||||
target_pool_repository.destroy!
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Repositories::DestroyService < Repositories::BaseService
|
||||
class Repositories::DestroyService < ::Repositories::BaseService
|
||||
def execute
|
||||
return success unless repository
|
||||
return success unless repo_exists?(disk_path)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
# Used for git housekeeping
|
||||
#
|
||||
# Ex.
|
||||
# Repositories::HousekeepingService.new(project).execute
|
||||
# Repositories::HousekeepingService.new(project.wiki).execute
|
||||
# ::Repositories::HousekeepingService.new(project).execute
|
||||
# ::Repositories::HousekeepingService.new(project.wiki).execute
|
||||
#
|
||||
module Repositories
|
||||
class HousekeepingService < BaseService
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
class ReplicateService < Repositories::BaseService
|
||||
class ReplicateService < ::Repositories::BaseService
|
||||
Error = Class.new(StandardError)
|
||||
|
||||
def execute(new_repository, type, partition_hint: "")
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module Repositories
|
|||
result = validate_input(blob_oids: blob_oids, redactions: redactions)
|
||||
return result if result.error?
|
||||
|
||||
Repositories::RewriteHistoryWorker.perform_async(
|
||||
::Repositories::RewriteHistoryWorker.perform_async(
|
||||
project_id: project.id,
|
||||
user_id: current_user.id,
|
||||
blob_oids: blob_oids,
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ module Snippets
|
|||
|
||||
def attempt_delete_repositories!
|
||||
snippets.each do |snippet|
|
||||
result = Repositories::DestroyService.new(snippet.repository).execute
|
||||
result = ::Repositories::DestroyService.new(snippet.repository).execute
|
||||
|
||||
raise DeleteRepositoryError if result[:status] == :error
|
||||
end
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ module Snippets
|
|||
private
|
||||
|
||||
def attempt_destroy!
|
||||
result = Repositories::DestroyService.new(snippet.repository).execute
|
||||
result = ::Repositories::DestroyService.new(snippet.repository).execute
|
||||
|
||||
raise DestroyError if result[:status] == :error
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module ObjectPool
|
|||
|
||||
project.link_pool_repository
|
||||
|
||||
Repositories::HousekeepingService.new(project).execute
|
||||
::Repositories::HousekeepingService.new(project).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ module Projects
|
|||
def perform(project_id)
|
||||
@project = Project.find(project_id)
|
||||
|
||||
service = Repositories::HousekeepingService.new(@project)
|
||||
service = ::Repositories::HousekeepingService.new(@project)
|
||||
|
||||
service.execute do
|
||||
import_failure_service.with_retry(action: 'delete_all_refs') do
|
||||
|
|
@ -26,7 +26,7 @@ module Projects
|
|||
# import actually changed, so we increment the counter to avoid
|
||||
# causing GC to run every time.
|
||||
service.increment!
|
||||
rescue Repositories::HousekeepingService::LeaseTaken => e
|
||||
rescue ::Repositories::HousekeepingService::LeaseTaken => e
|
||||
::Import::Framework::Logger.info(
|
||||
message: 'Project housekeeping failed',
|
||||
project_full_path: @project.full_path,
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module Repositories
|
|||
user = User.find_by_id(args[:user_id])
|
||||
return unless user
|
||||
|
||||
result = Repositories::RewriteHistoryService.new(project, user).execute(
|
||||
result = ::Repositories::RewriteHistoryService.new(project, user).execute(
|
||||
blob_oids: args.fetch(:blob_oids, []),
|
||||
redactions: args.fetch(:redactions, [])
|
||||
)
|
||||
|
|
|
|||
|
|
@ -133,6 +133,7 @@ To delete an on-demand scan:
|
|||
|
||||
> - Site profile features, scan method and file URL, were [enabled on GitLab.com and GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab/-/issues/345837) in GitLab 15.6.
|
||||
> - GraphQL endpoint path feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378692) in GitLab 15.7.
|
||||
> - Additional variables [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/177703) in GitLab 17.9.
|
||||
|
||||
A site profile defines the attributes and configuration details of the deployed application,
|
||||
website, or API to be scanned by DAST. A site profile can be referenced in `.gitlab-ci.yml` and
|
||||
|
|
@ -157,6 +158,7 @@ A site profile contains:
|
|||
- **Scan method**: A type of method to perform API testing. The supported methods are OpenAPI, Postman Collections, HTTP Archive (HAR), or GraphQL.
|
||||
- **GraphQL endpoint path**: The path to the GraphQL endpoint. This path is concatenated with the target URL to provide the URI for the scan to test. The GraphQL endpoint must support introspection queries.
|
||||
- **File URL**: The URL of the OpenAPI, Postman Collection, or HTTP Archive file.
|
||||
- **Additional variables**: A list of environment variables to configure specific scan behaviors. These variables provide the same configuration options as pipeline-based DAST scans, such as setting timeouts, adding an authentication success URL, or enabling advanced scan features.
|
||||
|
||||
When an API site type is selected, a host override is used to ensure the API being scanned is on the same host as the target. This is done to reduce the risk of running an active scan against the wrong API.
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ module Gitlab
|
|||
|
||||
# The initial fetch can bring in lots of loose refs and objects.
|
||||
# Running a `git gc` will make importing pull requests faster.
|
||||
Repositories::HousekeepingService.new(project, :gc).execute
|
||||
::Repositories::HousekeepingService.new(project, :gc).execute
|
||||
|
||||
true
|
||||
end
|
||||
|
|
|
|||
|
|
@ -45,10 +45,10 @@ module Gitlab
|
|||
message: %(Deleting existing "#{repository.disk_path}" to re-import it.)
|
||||
)
|
||||
|
||||
Repositories::DestroyService.new(repository).execute
|
||||
::Repositories::DestroyService.new(repository).execute
|
||||
|
||||
# Because Gitlab::Git::Repository#remove happens inside a run_after_commit
|
||||
# callback in the Repositories::DestroyService#execute we need to trigger
|
||||
# callback in the ::Repositories::DestroyService#execute we need to trigger
|
||||
# the callback.
|
||||
repository.project.touch
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2776,6 +2776,9 @@ msgstr ""
|
|||
msgid "AbuseReport|Reported profile"
|
||||
msgstr ""
|
||||
|
||||
msgid "AbuseReport|Reporter"
|
||||
msgstr ""
|
||||
|
||||
msgid "AbuseReport|Screenshot of reported abuse"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -22703,6 +22706,9 @@ msgstr ""
|
|||
msgid "Epic"
|
||||
msgstr ""
|
||||
|
||||
msgid "Epic (Promote to group)"
|
||||
msgstr ""
|
||||
|
||||
msgid "Epic Boards"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -64588,6 +64594,9 @@ msgstr ""
|
|||
msgid "WorkItem|Epic"
|
||||
msgstr ""
|
||||
|
||||
msgid "WorkItem|Epic will be moved to parent group %{groupName}."
|
||||
msgstr ""
|
||||
|
||||
msgid "WorkItem|Existing %{workItemName}"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -74,11 +74,23 @@ class LintDocsRedirect
|
|||
## The navigation.yaml equivalent is: ee/administration/appearance.html
|
||||
##
|
||||
def check_for_missing_nav_entry(file)
|
||||
file_sub = file["old_path"].gsub('doc', project_slug).gsub('index.md', '').gsub('.md', '.html')
|
||||
file_sub = file["old_path"].gsub('doc', project_slug).gsub(/_?index\.md/, '').gsub('.md',
|
||||
'.html')
|
||||
|
||||
result = navigation_file.include?(file_sub)
|
||||
return unless result
|
||||
|
||||
# If we're here, the path exists in navigation
|
||||
# Now check if this is a rename between index.md and _index.md
|
||||
if renamed_doc_file?(file)
|
||||
old_basename = File.basename(file['old_path'])
|
||||
new_basename = File.basename(file['new_path'])
|
||||
|
||||
# Allow renames between index.md and _index.md
|
||||
return if %w[index.md _index.md].include?(old_basename) &&
|
||||
%w[index.md _index.md].include?(new_basename)
|
||||
end
|
||||
|
||||
warning(file)
|
||||
|
||||
abort
|
||||
|
|
|
|||
|
|
@ -654,7 +654,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
|
|||
|
||||
describe '#housekeeping' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let(:housekeeping_service_dbl) { instance_double(Repositories::HousekeepingService) }
|
||||
let(:housekeeping_service_dbl) { instance_double(::Repositories::HousekeepingService) }
|
||||
let(:params) do
|
||||
{
|
||||
namespace_id: project.namespace.path,
|
||||
|
|
@ -665,7 +665,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
|
|||
|
||||
let(:prune) { nil }
|
||||
let_it_be(:project) { create(:project, group: group) }
|
||||
let(:housekeeping) { Repositories::HousekeepingService.new(project) }
|
||||
let(:housekeeping) { ::Repositories::HousekeepingService.new(project) }
|
||||
|
||||
subject { post :housekeeping, params: params }
|
||||
|
||||
|
|
@ -674,7 +674,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
|
|||
group.add_owner(user)
|
||||
sign_in(user)
|
||||
|
||||
allow(Repositories::HousekeepingService).to receive(:new).with(project, :eager).and_return(housekeeping)
|
||||
allow(::Repositories::HousekeepingService).to receive(:new).with(project, :eager).and_return(housekeeping)
|
||||
end
|
||||
|
||||
it 'forces a full garbage collection' do
|
||||
|
|
@ -707,7 +707,7 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
|
|||
let(:prune) { true }
|
||||
|
||||
it 'enqueues pruning' do
|
||||
allow(Repositories::HousekeepingService).to receive(:new).with(project, :prune).and_return(housekeeping_service_dbl)
|
||||
allow(::Repositories::HousekeepingService).to receive(:new).with(project, :prune).and_return(housekeeping_service_dbl)
|
||||
expect(housekeeping_service_dbl).to receive(:execute)
|
||||
|
||||
subject
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ RSpec.describe 'Work item detail', :js, feature_category: :team_planning do
|
|||
it 'change type action is not displayed' do
|
||||
click_button _('More actions'), match: :first
|
||||
|
||||
expect(find_by_testid('work-item-actions-dropdown')).to have_button(s_('WorkItem|Change type'))
|
||||
expect(find_by_testid('work-item-actions-dropdown')).not_to have_button(s_('WorkItem|Change type'))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -98,6 +98,7 @@ describe('DesignWidget', () => {
|
|||
routeArg = MOCK_ROUTE,
|
||||
uploadError = null,
|
||||
uploadErrorVariant = ALERT_VARIANTS.danger,
|
||||
canReorderDesign = true,
|
||||
} = {}) {
|
||||
wrapper = shallowMountExtended(DesignWidget, {
|
||||
isLoggedIn: isLoggedIn(),
|
||||
|
|
@ -110,6 +111,7 @@ describe('DesignWidget', () => {
|
|||
workItemId,
|
||||
uploadError,
|
||||
uploadErrorVariant,
|
||||
canReorderDesign,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: createMockDirective('gl-tooltip'),
|
||||
|
|
@ -163,6 +165,14 @@ describe('DesignWidget', () => {
|
|||
|
||||
it('renders VueDraggable component', () => {
|
||||
expect(findVueDraggable().exists()).toBe(true);
|
||||
expect(findVueDraggable().vm.$attrs.disabled).toBe(false);
|
||||
});
|
||||
|
||||
it('renders VueDraggable component with dragging disabled when canReorderDesign prop is false', async () => {
|
||||
await createComponent({ canReorderDesign: false });
|
||||
await waitForPromises();
|
||||
|
||||
expect(findVueDraggable().vm.$attrs.disabled).toBe(true);
|
||||
});
|
||||
|
||||
it('calls moveDesignMutation with correct parameters and reorders designs', async () => {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import WorkItemActions from '~/work_items/components/work_item_actions.vue';
|
|||
import WorkItemAbuseModal from '~/work_items/components/work_item_abuse_modal.vue';
|
||||
import WorkItemStateToggle from '~/work_items/components/work_item_state_toggle.vue';
|
||||
import CreateWorkItemModal from '~/work_items/components/create_work_item_modal.vue';
|
||||
import WorkItemChangeTypeModal from '~/work_items/components/work_item_change_type_modal.vue';
|
||||
import WorkItemChangeTypeModal from 'ee_else_ce/work_items/components/work_item_change_type_modal.vue';
|
||||
import {
|
||||
STATE_OPEN,
|
||||
TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION,
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
const convertWorkItemMutationSuccessHandler = jest
|
||||
.fn()
|
||||
.mockResolvedValue(convertWorkItemMutationResponse);
|
||||
|
||||
const graphqlError = 'GraphQL error';
|
||||
const convertWorkItemMutationErrorResponse = {
|
||||
errors: [
|
||||
|
|
@ -52,6 +53,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
workItemConvert: null,
|
||||
},
|
||||
};
|
||||
|
||||
const noDesignQueryHandler = jest.fn().mockResolvedValue(designCollectionResponse([]));
|
||||
const oneDesignQueryHandler = jest.fn().mockResolvedValue(designCollectionResponse([mockDesign]));
|
||||
|
||||
|
|
@ -63,6 +65,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
widgets = [],
|
||||
workItemType = WORK_ITEM_TYPE_VALUE_TASK,
|
||||
convertWorkItemMutationHandler = convertWorkItemMutationSuccessHandler,
|
||||
|
||||
designQueryHandler = noDesignQueryHandler,
|
||||
} = {}) => {
|
||||
wrapper = mountExtended(WorkItemChangeTypeModal, {
|
||||
|
|
@ -74,6 +77,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
propsData: {
|
||||
workItemId: 'gid://gitlab/WorkItem/1',
|
||||
fullPath: 'gitlab-org/gitlab-test',
|
||||
workItemIid: '1',
|
||||
hasParent,
|
||||
hasChildren,
|
||||
widgets,
|
||||
|
|
@ -98,7 +102,6 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
const findChangeTypeModal = () => wrapper.findComponent(GlModal);
|
||||
const findGlFormSelect = () => wrapper.findComponent(GlFormSelect);
|
||||
const findWarningAlert = () => wrapper.findByTestId('change-type-warning-message');
|
||||
const findErrorAlert = () => wrapper.findByTestId('change-type-error-message');
|
||||
|
||||
beforeEach(async () => {
|
||||
createComponent();
|
||||
|
|
@ -137,6 +140,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findWarningAlert().text()).toBe(
|
||||
'Parent item type issue is not supported on key result. Remove the parent item to change type.',
|
||||
|
|
@ -151,6 +155,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findWarningAlert().text()).toBe(
|
||||
'Key result does not support the task child item types. Remove child items to change type.',
|
||||
|
|
@ -226,9 +231,9 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
});
|
||||
|
||||
it.each`
|
||||
errorType | expectedErrorMessage | failureHandler
|
||||
${'graphql error'} | ${graphqlError} | ${jest.fn().mockResolvedValue(convertWorkItemMutationErrorResponse)}
|
||||
${'network error'} | ${'Error: Network error'} | ${jest.fn().mockRejectedValue(new Error('Network error'))}
|
||||
errorType | expectedErrorMessage | failureHandler
|
||||
${'graphql error'} | ${graphqlError} | ${jest.fn().mockResolvedValue(convertWorkItemMutationErrorResponse)}
|
||||
${'network error'} | ${'Network error'} | ${jest.fn().mockRejectedValue(new Error('Network error'))}
|
||||
`(
|
||||
'emits an error when there is a $errorType',
|
||||
async ({ expectedErrorMessage, failureHandler }) => {
|
||||
|
|
@ -246,7 +251,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findErrorAlert().text()).toContain(expectedErrorMessage);
|
||||
expect(wrapper.emitted('error')[0][0]).toEqual(expectedErrorMessage);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ describe('WorkItemDescription', () => {
|
|||
id: 'gid://gitlab/Group/24',
|
||||
fullPath: 'gitlab-org',
|
||||
name: 'Gitlab Org',
|
||||
fullName: 'Gitlab Org',
|
||||
__typename: 'Namespace',
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -88,7 +88,9 @@ describe('WorkItemDetail component', () => {
|
|||
.mockResolvedValue(mockProjectPermissionsQueryResponse());
|
||||
const workspacePermissionsNotAllowedHandler = jest
|
||||
.fn()
|
||||
.mockResolvedValue(mockProjectPermissionsQueryResponse({ createDesign: false }));
|
||||
.mockResolvedValue(
|
||||
mockProjectPermissionsQueryResponse({ createDesign: false, moveDesign: false }),
|
||||
);
|
||||
const uploadSuccessDesignMutationHandler = jest
|
||||
.fn()
|
||||
.mockResolvedValue(mockUploadDesignMutationResponse);
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ describe('WorkItemLinksForm', () => {
|
|||
expect(wrapper.emitted('update-in-progress')[1]).toEqual([false]);
|
||||
});
|
||||
|
||||
it('creates child task in non confidential parent and closes the form', async () => {
|
||||
it('creates child task in non confidential parent', async () => {
|
||||
submitForm({ title: 'Create task test' });
|
||||
|
||||
expect(wrapper.emitted('update-in-progress')).toEqual([[true]]);
|
||||
|
|
@ -219,7 +219,6 @@ describe('WorkItemLinksForm', () => {
|
|||
},
|
||||
});
|
||||
expect(wrapper.emitted('addChild')).toEqual([[]]);
|
||||
expect(wrapper.emitted('cancel')).toEqual([[]]);
|
||||
expect(wrapper.emitted('update-in-progress')[1]).toEqual([false]);
|
||||
});
|
||||
|
||||
|
|
@ -265,7 +264,7 @@ describe('WorkItemLinksForm', () => {
|
|||
expect(findWorkItemTokenInput().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('creates child issue in non confidential parent and closes the form', async () => {
|
||||
it('creates child issue in non confidential parent', async () => {
|
||||
submitForm({ title: 'Create issue test', fullPath: projectData[0].fullPath });
|
||||
|
||||
expect(wrapper.emitted('update-in-progress')).toEqual([[true]]);
|
||||
|
|
@ -285,7 +284,6 @@ describe('WorkItemLinksForm', () => {
|
|||
});
|
||||
expect(wrapper.emitted('addChild')).toEqual([[]]);
|
||||
expect(wrapper.emitted('update-in-progress')[1]).toEqual([false]);
|
||||
expect(wrapper.emitted('cancel')).toEqual([[]]);
|
||||
});
|
||||
|
||||
it('creates child issue in confidential parent', async () => {
|
||||
|
|
@ -459,7 +457,7 @@ describe('WorkItemLinksForm', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('selects, adds children and closes the form', async () => {
|
||||
it('selects and adds children', async () => {
|
||||
await selectAvailableWorkItemTokens();
|
||||
|
||||
expect(findAddChildButton().text()).toBe('Add tasks');
|
||||
|
|
@ -473,7 +471,6 @@ describe('WorkItemLinksForm', () => {
|
|||
await waitForPromises();
|
||||
|
||||
expect(updateMutationResolver).toHaveBeenCalled();
|
||||
expect(wrapper.emitted('cancel')).toEqual([[]]);
|
||||
});
|
||||
|
||||
it('shows validation error when non-confidential child items are being added to confidential parent', async () => {
|
||||
|
|
|
|||
|
|
@ -198,6 +198,7 @@ export const workItemQueryResponse = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
workItemType: {
|
||||
__typename: 'WorkItemType',
|
||||
|
|
@ -319,6 +320,7 @@ export const updateWorkItemMutationResponse = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
workItemType: {
|
||||
__typename: 'WorkItemType',
|
||||
|
|
@ -454,6 +456,7 @@ export const convertWorkItemMutationResponse = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
workItemType: {
|
||||
__typename: 'WorkItemType',
|
||||
|
|
@ -1397,6 +1400,7 @@ export const workItemResponseFactory = ({
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
workItemType,
|
||||
userPermissions: {
|
||||
|
|
@ -1764,6 +1768,7 @@ export const createWorkItemMutationResponse = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
workItemType: {
|
||||
__typename: 'WorkItemType',
|
||||
|
|
@ -2342,6 +2347,7 @@ export const workItemObjectiveWithChild = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
userPermissions: {
|
||||
deleteWorkItem: true,
|
||||
|
|
@ -2431,6 +2437,7 @@ export const workItemObjectiveWithoutChild = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
userPermissions: {
|
||||
deleteWorkItem: true,
|
||||
|
|
@ -3028,6 +3035,7 @@ export const changeWorkItemParentMutationResponse = {
|
|||
id: '1',
|
||||
fullPath: 'test-project-path',
|
||||
name: 'Project name',
|
||||
fullName: 'Group name',
|
||||
},
|
||||
reference: 'test-project-path#2',
|
||||
createNoteEmail:
|
||||
|
|
@ -5452,6 +5460,7 @@ export const createWorkItemQueryResponse = {
|
|||
id: 'full-path-epic-id',
|
||||
fullPath: 'full-path',
|
||||
name: 'Gitlab Org',
|
||||
fullName: 'Group name',
|
||||
__typename: 'Namespace',
|
||||
},
|
||||
author: {
|
||||
|
|
@ -5803,12 +5812,16 @@ export const mockUserPreferences = (useWorkItemsView = true) => ({
|
|||
},
|
||||
});
|
||||
|
||||
export const mockProjectPermissionsQueryResponse = ({ createDesign = true } = {}) => ({
|
||||
export const mockProjectPermissionsQueryResponse = ({
|
||||
createDesign = true,
|
||||
moveDesign = true,
|
||||
} = {}) => ({
|
||||
data: {
|
||||
workspace: {
|
||||
id: 'gid://gitlab/Project/1',
|
||||
userPermissions: {
|
||||
createDesign,
|
||||
moveDesign,
|
||||
__typename: 'ProjectPermissions',
|
||||
},
|
||||
__typename: 'Project',
|
||||
|
|
|
|||
|
|
@ -89,9 +89,9 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline, feature_cat
|
|||
|
||||
describe '#after_run' do
|
||||
it 'executes housekeeping service after import' do
|
||||
service = instance_double(Repositories::HousekeepingService)
|
||||
service = instance_double(::Repositories::HousekeepingService)
|
||||
|
||||
expect(Repositories::HousekeepingService).to receive(:new).with(context.portable, :gc).and_return(service)
|
||||
expect(::Repositories::HousekeepingService).to receive(:new).with(context.portable, :gc).and_return(service)
|
||||
expect(service).to receive(:execute)
|
||||
|
||||
pipeline.after_run(context)
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter, feature_categ
|
|||
expect(importer).to receive(:validate_repository_size!)
|
||||
|
||||
service = double
|
||||
expect(Repositories::HousekeepingService)
|
||||
expect(::Repositories::HousekeepingService)
|
||||
.to receive(:new).with(project, :gc).and_return(service)
|
||||
expect(service).to receive(:execute)
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
|
|||
allow(project.repository).to receive(:exists?).and_return(true)
|
||||
allow(project.repository).to receive(:disk_path).and_return('repository_path')
|
||||
|
||||
expect_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
expect_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
expect(instance).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1611,7 +1611,7 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
|
|||
it 'removes deployment without any errors' do
|
||||
deployment = create(:deployment, environment: environment, project: project)
|
||||
|
||||
Repositories::DestroyService.new(project.repository).execute
|
||||
::Repositories::DestroyService.new(project.repository).execute
|
||||
project.save! # to trigger a repository removal
|
||||
|
||||
expect { described_class.where(id: deployment).fast_destroy_all }
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ RSpec.describe "projectBlobsRemove", feature_category: :source_code_management d
|
|||
|
||||
describe 'Removing blobs:' do
|
||||
it 'processes text redaction asynchoronously' do
|
||||
expect(Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
expect(::Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
project_id: project.id, user_id: current_user.id, blob_oids: blob_oids, redactions: []
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ RSpec.describe "projectTextReplace", feature_category: :source_code_management d
|
|||
|
||||
describe 'Replacing text' do
|
||||
it 'processes text redaction asynchoronously' do
|
||||
expect(Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
expect(::Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
project_id: project.id, user_id: current_user.id, redactions: literal_replacements, blob_oids: []
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -5700,14 +5700,14 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
end
|
||||
|
||||
describe 'POST /projects/:id/housekeeping' do
|
||||
let(:housekeeping) { Repositories::HousekeepingService.new(project) }
|
||||
let(:housekeeping) { ::Repositories::HousekeepingService.new(project) }
|
||||
let(:params) { {} }
|
||||
let(:path) { "/projects/#{project.id}/housekeeping" }
|
||||
|
||||
subject(:request) { post api(path, user), params: params }
|
||||
|
||||
before do
|
||||
allow(Repositories::HousekeepingService).to receive(:new).with(project, :eager).and_return(housekeeping)
|
||||
allow(::Repositories::HousekeepingService).to receive(:new).with(project, :eager).and_return(housekeeping)
|
||||
end
|
||||
|
||||
context 'when authenticated as owner' do
|
||||
|
|
@ -5736,7 +5736,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
let(:params) { { task: :prune } }
|
||||
|
||||
it 'triggers a prune' do
|
||||
expect(Repositories::HousekeepingService).to receive(:new).with(project, :prune).and_return(housekeeping)
|
||||
expect(::Repositories::HousekeepingService).to receive(:new).with(project, :prune).and_return(housekeeping)
|
||||
expect(housekeeping).to receive(:execute).once
|
||||
|
||||
request
|
||||
|
|
@ -5749,7 +5749,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
let(:params) { { task: :unsupported_task } }
|
||||
|
||||
it 'responds with bad_request' do
|
||||
expect(Repositories::HousekeepingService).not_to receive(:new)
|
||||
expect(::Repositories::HousekeepingService).not_to receive(:new)
|
||||
|
||||
request
|
||||
|
||||
|
|
@ -5759,7 +5759,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
|
|||
|
||||
context 'when housekeeping lease is taken' do
|
||||
it 'returns conflict' do
|
||||
expect(housekeeping).to receive(:execute).once.and_raise(Repositories::HousekeepingService::LeaseTaken)
|
||||
expect(housekeeping).to receive(:execute).once.and_raise(::Repositories::HousekeepingService::LeaseTaken)
|
||||
|
||||
request
|
||||
|
||||
|
|
|
|||
|
|
@ -808,10 +808,10 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
|
||||
describe 'GET /projects/:id/repository/changelog' do
|
||||
it 'generates the changelog for a version' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
release_notes = 'Release notes'
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -842,10 +842,10 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'returns generated changelog when using JOB-TOKEN auth' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
release_notes = 'Release notes'
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -877,9 +877,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'supports leaving out the from and to attribute' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -906,9 +906,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'supports specified config file path' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
expect(Repositories::ChangelogService)
|
||||
expect(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -962,9 +962,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
|
||||
describe 'POST /projects/:id/repository/changelog' do
|
||||
it 'generates the changelog for a version' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -1000,9 +1000,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'supports leaving out the from and to attribute' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -1034,9 +1034,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it 'produces an error when generating the changelog fails' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
allow(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
@ -1075,9 +1075,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
it "support specified config file path" do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
spy = instance_spy(::Repositories::ChangelogService)
|
||||
|
||||
expect(Repositories::ChangelogService)
|
||||
expect(::Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
|
|
|
|||
|
|
@ -201,10 +201,10 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
|
|||
end
|
||||
|
||||
describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
|
||||
let(:housekeeping) { Repositories::HousekeepingService.new(project) }
|
||||
let(:housekeeping) { ::Repositories::HousekeepingService.new(project) }
|
||||
|
||||
before do
|
||||
allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
|
||||
allow(::Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
|
||||
|
||||
allow(push_service_class)
|
||||
.to receive(:new)
|
||||
|
|
|
|||
|
|
@ -283,12 +283,12 @@ RSpec.describe Git::WikiPushService, :services, feature_category: :wiki do
|
|||
end
|
||||
|
||||
describe '#perform_housekeeping', :clean_gitlab_redis_shared_state do
|
||||
let(:housekeeping) { Repositories::HousekeepingService.new(wiki) }
|
||||
let(:housekeeping) { ::Repositories::HousekeepingService.new(wiki) }
|
||||
|
||||
subject { create_service(current_sha).execute }
|
||||
|
||||
before do
|
||||
allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
|
||||
allow(::Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
|
||||
end
|
||||
|
||||
it 'does not perform housekeeping when not needed' do
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ RSpec.describe MergeRequests::CleanupRefsService, feature_category: :code_review
|
|||
|
||||
context 'when repository no longer exists' do
|
||||
before do
|
||||
Repositories::DestroyService.new(merge_request.project.repository).execute
|
||||
::Repositories::DestroyService.new(merge_request.project.repository).execute
|
||||
end
|
||||
|
||||
it 'does not fail and still mark schedule as complete' do
|
||||
|
|
|
|||
|
|
@ -331,7 +331,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
|
|||
before do
|
||||
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
|
||||
allow(Gitlab::GitLogger).to receive(:warn).with(
|
||||
class: Repositories::DestroyService.name,
|
||||
class: ::Repositories::DestroyService.name,
|
||||
container_id: project.id,
|
||||
disk_path: project.disk_path,
|
||||
message: 'Gitlab::Git::CommandError').and_call_original
|
||||
|
|
@ -586,7 +586,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
|
|||
# 3. Design repository
|
||||
|
||||
it 'Repositories::DestroyService is called for existing repos' do
|
||||
expect_next_instances_of(Repositories::DestroyService, 3) do |instance|
|
||||
expect_next_instances_of(::Repositories::DestroyService, 3) do |instance|
|
||||
expect(instance).to receive(:execute).and_return(status: :success)
|
||||
end
|
||||
|
||||
|
|
@ -596,7 +596,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
|
|||
context 'when the removal has errors' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let(:mock_error) { instance_double(Repositories::DestroyService, execute: { message: 'foo', status: :error }) }
|
||||
let(:mock_error) { instance_double(::Repositories::DestroyService, execute: { message: 'foo', status: :error }) }
|
||||
let(:project_repository) { project.repository }
|
||||
let(:wiki_repository) { project.wiki.repository }
|
||||
let(:design_repository) { project.design_repository }
|
||||
|
|
@ -609,8 +609,8 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
|
|||
|
||||
with_them do
|
||||
before do
|
||||
allow(Repositories::DestroyService).to receive(:new).with(anything).and_call_original
|
||||
allow(Repositories::DestroyService).to receive(:new).with(repo).and_return(mock_error)
|
||||
allow(::Repositories::DestroyService).to receive(:new).with(anything).and_call_original
|
||||
allow(::Repositories::DestroyService).to receive(:new).with(repo).and_return(mock_error)
|
||||
end
|
||||
|
||||
it 'raises correct error' do
|
||||
|
|
|
|||
|
|
@ -170,7 +170,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
|
|||
|
||||
expect do
|
||||
subject.execute
|
||||
end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify project repository checksum/)
|
||||
end.to raise_error(::Repositories::ReplicateService::Error, /Failed to verify project repository checksum/)
|
||||
|
||||
expect(project).not_to be_repository_read_only
|
||||
expect(project.repository_storage).to eq(storage_source)
|
||||
|
|
@ -361,7 +361,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService, feature_category: :sour
|
|||
|
||||
expect do
|
||||
subject.execute
|
||||
end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify object_pool repository/)
|
||||
end.to raise_error(::Repositories::ReplicateService::Error, /Failed to verify object_pool repository/)
|
||||
|
||||
project.reload
|
||||
|
||||
|
|
|
|||
|
|
@ -226,10 +226,10 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma
|
|||
service = described_class
|
||||
.new(project, user, version: '1.0.0', to: 'bar')
|
||||
|
||||
finder_spy = instance_spy(Repositories::ChangelogTagFinder)
|
||||
finder_spy = instance_spy(::Repositories::ChangelogTagFinder)
|
||||
tag = double(:tag, target_commit: double(:commit, id: '123'))
|
||||
|
||||
allow(Repositories::ChangelogTagFinder)
|
||||
allow(::Repositories::ChangelogTagFinder)
|
||||
.to receive(:new)
|
||||
.with(project, regex: an_instance_of(String))
|
||||
.and_return(finder_spy)
|
||||
|
|
@ -246,9 +246,9 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma
|
|||
service = described_class
|
||||
.new(project, user, version: '1.0.0', to: 'bar')
|
||||
|
||||
finder_spy = instance_spy(Repositories::ChangelogTagFinder)
|
||||
finder_spy = instance_spy(::Repositories::ChangelogTagFinder)
|
||||
|
||||
allow(Repositories::ChangelogTagFinder)
|
||||
allow(::Repositories::ChangelogTagFinder)
|
||||
.to receive(:new)
|
||||
.with(project, regex: an_instance_of(String))
|
||||
.and_return(finder_spy)
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ RSpec.describe Repositories::RewriteHistoryService, feature_category: :source_co
|
|||
let(:redactions) { ['p455w0rd'] }
|
||||
|
||||
it 'triggers a RewriteHistoryWorker job' do
|
||||
expect(Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
expect(::Repositories::RewriteHistoryWorker).to receive(:perform_async).with(
|
||||
project_id: project.id, user_id: user.id, blob_oids: blob_oids, redactions: redactions
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ RSpec.describe Snippets::BulkDestroyService, feature_category: :source_code_mana
|
|||
it 'deletes the snippets in bulk' do
|
||||
response = nil
|
||||
|
||||
expect(Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
|
||||
expect(Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
|
||||
expect(::Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
|
||||
expect(::Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
|
||||
|
||||
aggregate_failures do
|
||||
expect do
|
||||
|
|
@ -86,7 +86,7 @@ RSpec.describe Snippets::BulkDestroyService, feature_category: :source_code_mana
|
|||
|
||||
context 'when an error is raised deleting the repository' do
|
||||
before do
|
||||
allow_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
allow_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
allow(instance).to receive(:execute).and_return({ status: :error })
|
||||
end
|
||||
end
|
||||
|
|
@ -112,9 +112,9 @@ RSpec.describe Snippets::BulkDestroyService, feature_category: :source_code_mana
|
|||
it 'returns success' do
|
||||
response = nil
|
||||
|
||||
expect(Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
|
||||
expect(Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
|
||||
expect(Repositories::DestroyService).to receive(:new).with(snippet_without_repo.repository).and_call_original
|
||||
expect(::Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
|
||||
expect(::Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
|
||||
expect(::Repositories::DestroyService).to receive(:new).with(snippet_without_repo.repository).and_call_original
|
||||
|
||||
expect do
|
||||
response = subject.execute
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
|
|||
shared_examples 'deletes the snippet repository' do
|
||||
it 'removes the snippet repository' do
|
||||
expect(snippet.repository.exists?).to be_truthy
|
||||
expect_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
expect_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
expect(instance).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
|
|
@ -50,7 +50,7 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
|
|||
|
||||
context 'when the repository deletion service raises an error' do
|
||||
before do
|
||||
allow_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
allow_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
allow(instance).to receive(:execute).and_return({ status: :error })
|
||||
end
|
||||
end
|
||||
|
|
@ -70,7 +70,7 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
|
|||
it 'does not schedule anything and return success' do
|
||||
allow(snippet).to receive(:repository).and_return(nil)
|
||||
|
||||
expect_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
expect_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
expect(instance).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
|
|
@ -150,7 +150,7 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme
|
|||
expect(snippet.repository).not_to be_nil
|
||||
expect(snippet.repository.exists?).to be_falsey
|
||||
|
||||
expect_next_instance_of(Repositories::DestroyService) do |instance|
|
||||
expect_next_instance_of(::Repositories::DestroyService) do |instance|
|
||||
expect(instance).to receive(:execute).and_call_original
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService, feature_category: :sour
|
|||
|
||||
expect do
|
||||
subject.execute
|
||||
end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
|
||||
end.to raise_error(::Repositories::ReplicateService::Error, /Failed to verify snippet repository checksum from \w+ to not matching checksum/)
|
||||
|
||||
expect(snippet).not_to be_repository_read_only
|
||||
expect(snippet.repository_storage).to eq('default')
|
||||
|
|
|
|||
|
|
@ -162,7 +162,7 @@ RSpec.shared_examples 'model with repository' do
|
|||
end
|
||||
|
||||
describe '#after_repository_change_head' do
|
||||
let(:event) { instance_double('Repositories::DefaultBranchChangedEvent') }
|
||||
let(:event) { instance_double('::Repositories::DefaultBranchChangedEvent') }
|
||||
let(:event_data) { { container_id: stubbed_container.id, container_type: stubbed_container.class.name } }
|
||||
|
||||
it 'calls #reload_default_branch' do
|
||||
|
|
@ -172,7 +172,7 @@ RSpec.shared_examples 'model with repository' do
|
|||
end
|
||||
|
||||
it 'publishes an Repositories::DefaultBranchChangedEvent event' do
|
||||
allow(Repositories::DefaultBranchChangedEvent)
|
||||
allow(::Repositories::DefaultBranchChangedEvent)
|
||||
.to receive(:new)
|
||||
.with(data: event_data)
|
||||
.and_return(event)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ end
|
|||
RSpec.shared_examples 'LFS http 200 blob response' do
|
||||
it_behaves_like 'LFS http expected response code and message' do
|
||||
let(:response_code) { :ok }
|
||||
let(:content_type) { Repositories::LfsApiController::LFS_TRANSFER_CONTENT_TYPE }
|
||||
let(:content_type) { ::Repositories::LfsApiController::LFS_TRANSFER_CONTENT_TYPE }
|
||||
let(:response_headers) { { 'X-Sendfile' => lfs_object.file.path } }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
|
|||
|
||||
expect do
|
||||
subject.execute
|
||||
end.to raise_error(Repositories::ReplicateService::Error, /Failed to verify \w+ repository checksum from \w+ to not matching checksum/)
|
||||
end.to raise_error(::Repositories::ReplicateService::Error, /Failed to verify \w+ repository checksum from \w+ to not matching checksum/)
|
||||
|
||||
expect(project).not_to be_repository_read_only
|
||||
expect(project.repository_storage).to eq('default')
|
||||
|
|
|
|||
|
|
@ -40,19 +40,19 @@ RSpec.shared_examples 'housekeeps repository' do
|
|||
it 'does not enqueue a job' do
|
||||
expect(resource.git_garbage_collect_worker_klass).not_to receive(:perform_async)
|
||||
|
||||
expect { subject.execute }.to raise_error(Repositories::HousekeepingService::LeaseTaken)
|
||||
expect { subject.execute }.to raise_error(::Repositories::HousekeepingService::LeaseTaken)
|
||||
end
|
||||
|
||||
it 'does not reset pushes_since_gc' do
|
||||
expect do
|
||||
expect { subject.execute }.to raise_error(Repositories::HousekeepingService::LeaseTaken)
|
||||
expect { subject.execute }.to raise_error(::Repositories::HousekeepingService::LeaseTaken)
|
||||
end.not_to change { resource.pushes_since_gc }
|
||||
end
|
||||
|
||||
it 'does not yield' do
|
||||
expect do |block|
|
||||
expect { subject.execute(&block) }
|
||||
.to raise_error(Repositories::HousekeepingService::LeaseTaken)
|
||||
.to raise_error(::Repositories::HousekeepingService::LeaseTaken)
|
||||
end.not_to yield_with_no_args
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ RSpec.describe Projects::AfterImportWorker, feature_category: :importers do
|
|||
|
||||
describe '#execute' do
|
||||
before do
|
||||
allow(Repositories::HousekeepingService)
|
||||
allow(::Repositories::HousekeepingService)
|
||||
.to receive(:new).with(project).and_return(housekeeping_service)
|
||||
|
||||
allow(housekeeping_service)
|
||||
|
|
@ -71,10 +71,10 @@ RSpec.describe Projects::AfterImportWorker, feature_category: :importers do
|
|||
end
|
||||
|
||||
context 'when housekeeping service lease is taken' do
|
||||
let(:exception) { Repositories::HousekeepingService::LeaseTaken.new }
|
||||
let(:exception) { ::Repositories::HousekeepingService::LeaseTaken.new }
|
||||
|
||||
it 'logs the error message' do
|
||||
allow_next_instance_of(Repositories::HousekeepingService) do |instance|
|
||||
allow_next_instance_of(::Repositories::HousekeepingService) do |instance|
|
||||
expect(instance).to receive(:execute).and_raise(exception)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ RSpec.describe Repositories::RewriteHistoryWorker, feature_category: :source_cod
|
|||
end
|
||||
|
||||
it 'executes RewriteHistoryService service' do
|
||||
allow_next_instance_of(Repositories::RewriteHistoryService) do |instance|
|
||||
allow_next_instance_of(::Repositories::RewriteHistoryService) do |instance|
|
||||
expect(instance).to receive(:execute).with(blob_oids: blob_oids, redactions: []).and_call_original
|
||||
end
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ RSpec.describe Repositories::RewriteHistoryWorker, feature_category: :source_cod
|
|||
let(:project_id) { non_existing_record_id }
|
||||
|
||||
it 'skips the execution' do
|
||||
expect(Repositories::RewriteHistoryService).not_to receive(:new)
|
||||
expect(::Repositories::RewriteHistoryService).not_to receive(:new)
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
|
@ -43,14 +43,14 @@ RSpec.describe Repositories::RewriteHistoryWorker, feature_category: :source_cod
|
|||
let(:user_id) { non_existing_record_id }
|
||||
|
||||
it 'skips the execution' do
|
||||
expect(Repositories::RewriteHistoryService).not_to receive(:new)
|
||||
expect(::Repositories::RewriteHistoryService).not_to receive(:new)
|
||||
perform
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Emails' do
|
||||
before do
|
||||
allow_next_instance_of(Repositories::RewriteHistoryService) do |instance|
|
||||
allow_next_instance_of(::Repositories::RewriteHistoryService) do |instance|
|
||||
allow(instance).to receive(:execute).and_return(response)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue