Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
b59341a15a
commit
77ae95a49e
|
|
@ -532,8 +532,7 @@
|
|||
- name: clickhouse/clickhouse-server:23.11.3.23-alpine
|
||||
alias: clickhouse
|
||||
variables:
|
||||
CLICKHOUSE_USER: clickhouse
|
||||
CLICKHOUSE_PASSWORD: clickhouse
|
||||
CLICKHOUSE_USER: default
|
||||
CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: 1
|
||||
CLICKHOUSE_DB: gitlab_clickhouse_test
|
||||
|
||||
|
|
|
|||
|
|
@ -2001,7 +2001,7 @@
|
|||
when: never
|
||||
- <<: *if-merge-request-targeting-stable-branch
|
||||
when: never
|
||||
- <<: *if-default-refs
|
||||
- <<: *if-not-merge-request
|
||||
when: never
|
||||
- !reference [".rails:rules:ee-and-foss-migration", rules]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
<script>
|
||||
import Tracking from '~/tracking';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import setSelectedBoardItemsMutation from '~/boards/graphql/client/set_selected_board_items.mutation.graphql';
|
||||
import unsetSelectedBoardItemsMutation from '~/boards/graphql/client/unset_selected_board_items.mutation.graphql';
|
||||
import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
|
||||
|
|
@ -13,7 +14,7 @@ export default {
|
|||
BoardCardInner,
|
||||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
inject: ['disabled', 'isIssueBoard'],
|
||||
inject: ['disabled', 'isIssueBoard', 'isEpicBoard'],
|
||||
props: {
|
||||
list: {
|
||||
type: Object,
|
||||
|
|
@ -98,6 +99,16 @@ export default {
|
|||
showFocusBackground() {
|
||||
return !this.isActive && !this.multiSelectVisible;
|
||||
},
|
||||
itemPrefix() {
|
||||
return this.isEpicBoard ? '&' : '#';
|
||||
},
|
||||
itemReferencePath() {
|
||||
const { referencePath } = this.item;
|
||||
return referencePath.split(this.itemPrefix)[0];
|
||||
},
|
||||
boardItemUniqueId() {
|
||||
return `listItem-${this.itemReferencePath}/${getIdFromGraphQLId(this.item.id)}`;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
toggleIssue(e) {
|
||||
|
|
@ -222,6 +233,7 @@ export default {
|
|||
class="board-card gl-border gl-relative gl-mb-3 gl-rounded-base gl-border-section gl-bg-section gl-leading-normal hover:gl-bg-subtle dark:hover:gl-bg-gray-200"
|
||||
>
|
||||
<button
|
||||
:id="boardItemUniqueId"
|
||||
:class="[
|
||||
{
|
||||
'focus:gl-bg-subtle dark:focus:gl-bg-gray-200': showFocusBackground,
|
||||
|
|
|
|||
|
|
@ -125,7 +125,6 @@ export default {
|
|||
itemPrefix() {
|
||||
return this.isEpicBoard ? '&' : '#';
|
||||
},
|
||||
|
||||
itemId() {
|
||||
if (this.item.iid) {
|
||||
return `${this.itemPrefix}${this.item.iid}`;
|
||||
|
|
|
|||
|
|
@ -16,12 +16,7 @@ import csrf from '~/lib/utils/csrf';
|
|||
import { __, s__ } from '~/locale';
|
||||
import validation, { initFormField } from '~/vue_shared/directives/validation';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import {
|
||||
SECONDARY_OPTIONS_TEXT,
|
||||
COMMIT_LABEL,
|
||||
COMMIT_MESSAGE_SUBJECT_MAX_LENGTH,
|
||||
COMMIT_MESSAGE_BODY_MAX_LENGTH,
|
||||
} from '../constants';
|
||||
import { COMMIT_MESSAGE_SUBJECT_MAX_LENGTH, COMMIT_MESSAGE_BODY_MAX_LENGTH } from '../constants';
|
||||
|
||||
export default {
|
||||
csrf,
|
||||
|
|
@ -51,7 +46,7 @@ export default {
|
|||
COMMIT_IN_DEFAULT_BRANCH: __(
|
||||
'GitLab will create a default branch, %{branchName}, and commit your changes.',
|
||||
),
|
||||
COMMIT_LABEL,
|
||||
COMMIT_LABEL: __('Commit message'),
|
||||
COMMIT_MESSAGE_HINT: __(
|
||||
'Try to keep the first line under 52 characters and the others under 72.',
|
||||
),
|
||||
|
|
@ -70,7 +65,7 @@ export default {
|
|||
NO_PERMISSION_TO_COMMIT_MESSAGE: __(
|
||||
"You don't have permission to commit to %{branchName}. %{linkStart}Learn more.%{linkEnd}",
|
||||
),
|
||||
SECONDARY_OPTIONS_TEXT,
|
||||
SECONDARY_OPTIONS_TEXT: __('Cancel'),
|
||||
},
|
||||
directives: {
|
||||
validation: validation(),
|
||||
|
|
|
|||
|
|
@ -343,8 +343,8 @@ export default {
|
|||
<new-directory-modal
|
||||
v-if="showNewDirectoryModal"
|
||||
:can-push-code="canPushCode"
|
||||
:can-push-to-branch="canPushToBranch"
|
||||
:modal-id="$options.newDirectoryModalId"
|
||||
:commit-message="__('Add new directory')"
|
||||
:target-branch="selectedBranch"
|
||||
:original-branch="originalBranch"
|
||||
:path="newDirectoryPath"
|
||||
|
|
|
|||
|
|
@ -1,68 +1,32 @@
|
|||
<script>
|
||||
import {
|
||||
GlAlert,
|
||||
GlForm,
|
||||
GlModal,
|
||||
GlFormGroup,
|
||||
GlFormInput,
|
||||
GlFormTextarea,
|
||||
GlFormCheckbox,
|
||||
} from '@gitlab/ui';
|
||||
import { GlFormGroup, GlFormInput } from '@gitlab/ui';
|
||||
import { createAlert } from '~/alert';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import { logError } from '~/lib/logger';
|
||||
import { __ } from '~/locale';
|
||||
import {
|
||||
SECONDARY_OPTIONS_TEXT,
|
||||
COMMIT_LABEL,
|
||||
TARGET_BRANCH_LABEL,
|
||||
TOGGLE_CREATE_MR_LABEL,
|
||||
NEW_BRANCH_IN_FORK,
|
||||
} from '../constants';
|
||||
import CommitChangesModal from './commit_changes_modal.vue';
|
||||
|
||||
const MODAL_TITLE = __('Create new directory');
|
||||
const PRIMARY_OPTIONS_TEXT = __('Create directory');
|
||||
const DIR_LABEL = __('Directory name');
|
||||
const ERROR_MESSAGE = __('Error creating new directory. Please try again.');
|
||||
const COMMIT_MESSSAGE = __('Add new directory');
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlAlert,
|
||||
GlModal,
|
||||
GlForm,
|
||||
GlFormGroup,
|
||||
GlFormInput,
|
||||
GlFormTextarea,
|
||||
GlFormCheckbox,
|
||||
CommitChangesModal,
|
||||
},
|
||||
i18n: {
|
||||
DIR_LABEL,
|
||||
COMMIT_LABEL,
|
||||
TARGET_BRANCH_LABEL,
|
||||
TOGGLE_CREATE_MR_LABEL,
|
||||
NEW_BRANCH_IN_FORK,
|
||||
PRIMARY_OPTIONS_TEXT,
|
||||
ERROR_MESSAGE,
|
||||
COMMIT_MESSSAGE,
|
||||
},
|
||||
props: {
|
||||
modalTitle: {
|
||||
type: String,
|
||||
default: MODAL_TITLE,
|
||||
required: false,
|
||||
},
|
||||
modalId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
primaryBtnText: {
|
||||
type: String,
|
||||
default: PRIMARY_OPTIONS_TEXT,
|
||||
required: false,
|
||||
},
|
||||
commitMessage: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
targetBranch: {
|
||||
type: String,
|
||||
required: true,
|
||||
|
|
@ -79,54 +43,29 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
canPushToBranch: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
dir: null,
|
||||
commit: this.commitMessage,
|
||||
target: this.targetBranch,
|
||||
createNewMr: true,
|
||||
loading: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
primaryOptions() {
|
||||
return {
|
||||
text: this.primaryBtnText,
|
||||
attributes: {
|
||||
variant: 'confirm',
|
||||
loading: this.loading,
|
||||
disabled: !this.formCompleted || this.loading,
|
||||
},
|
||||
};
|
||||
},
|
||||
cancelOptions() {
|
||||
return {
|
||||
text: SECONDARY_OPTIONS_TEXT,
|
||||
attributes: {
|
||||
disabled: this.loading,
|
||||
},
|
||||
};
|
||||
},
|
||||
showCreateNewMrToggle() {
|
||||
return this.canPushCode;
|
||||
},
|
||||
formCompleted() {
|
||||
return this.dir && this.commit && this.target;
|
||||
isValid() {
|
||||
return Boolean(this.dir);
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
submitForm() {
|
||||
submitForm(formData) {
|
||||
this.loading = true;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('dir_name', this.dir);
|
||||
formData.append('commit_message', this.commit);
|
||||
formData.append('branch_name', this.target);
|
||||
formData.append('original_branch', this.originalBranch);
|
||||
|
||||
if (this.createNewMr) {
|
||||
formData.append('create_merge_request', this.createNewMr);
|
||||
if (!formData.has('branch_name')) {
|
||||
formData.append('branch_name', this.targetBranch);
|
||||
}
|
||||
|
||||
return axios
|
||||
|
|
@ -134,8 +73,12 @@ export default {
|
|||
.then((response) => {
|
||||
visitUrl(response.data.filePath);
|
||||
})
|
||||
.catch(() => {
|
||||
.catch((e) => {
|
||||
this.loading = false;
|
||||
logError(
|
||||
__('Failed to create a new directory. See exception details for more information.'),
|
||||
e,
|
||||
);
|
||||
createAlert({ message: ERROR_MESSAGE });
|
||||
});
|
||||
},
|
||||
|
|
@ -144,33 +87,24 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<gl-form>
|
||||
<gl-modal
|
||||
:modal-id="modalId"
|
||||
:title="modalTitle"
|
||||
:action-primary="primaryOptions"
|
||||
:action-cancel="cancelOptions"
|
||||
@primary.prevent="submitForm"
|
||||
>
|
||||
<commit-changes-modal
|
||||
v-bind="$attrs"
|
||||
:ref="modalId"
|
||||
:loading="loading"
|
||||
:valid="isValid"
|
||||
:modal-id="modalId"
|
||||
:can-push-code="canPushCode"
|
||||
:can-push-to-branch="canPushToBranch"
|
||||
:commit-message="$options.i18n.COMMIT_MESSSAGE"
|
||||
:target-branch="targetBranch"
|
||||
:original-branch="originalBranch"
|
||||
v-on="$listeners"
|
||||
@submit-form="submitForm"
|
||||
>
|
||||
<template #body>
|
||||
<gl-form-group :label="$options.i18n.DIR_LABEL" label-for="dir_name">
|
||||
<gl-form-input v-model="dir" :disabled="loading" name="dir_name" />
|
||||
<gl-form-input id="dir_name" v-model="dir" :disabled="loading" name="dir_name" />
|
||||
</gl-form-group>
|
||||
<gl-form-group :label="$options.i18n.COMMIT_LABEL" label-for="commit_message">
|
||||
<gl-form-textarea v-model="commit" name="commit_message" :disabled="loading" no-resize />
|
||||
</gl-form-group>
|
||||
<gl-form-group
|
||||
v-if="canPushCode"
|
||||
:label="$options.i18n.TARGET_BRANCH_LABEL"
|
||||
label-for="branch_name"
|
||||
>
|
||||
<gl-form-input v-model="target" :disabled="loading" name="branch_name" />
|
||||
</gl-form-group>
|
||||
<gl-form-checkbox v-if="showCreateNewMrToggle" v-model="createNewMr" :disabled="loading">
|
||||
{{ $options.i18n.TOGGLE_CREATE_MR_LABEL }}
|
||||
</gl-form-checkbox>
|
||||
<gl-alert v-if="!canPushCode" variant="info" :dismissible="false" class="gl-mt-3">
|
||||
{{ $options.i18n.NEW_BRANCH_IN_FORK }}
|
||||
</gl-alert>
|
||||
</gl-modal>
|
||||
</gl-form>
|
||||
</template>
|
||||
</commit-changes-modal>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -6,14 +6,6 @@ export const TREE_PAGE_SIZE = 100; // the amount of items to be fetched per (bat
|
|||
|
||||
export const COMMIT_BATCH_SIZE = 25; // we request commit data in batches of 25
|
||||
|
||||
export const SECONDARY_OPTIONS_TEXT = __('Cancel');
|
||||
export const COMMIT_LABEL = __('Commit message');
|
||||
export const TARGET_BRANCH_LABEL = __('Target branch');
|
||||
export const TOGGLE_CREATE_MR_LABEL = __('Create a merge request for this change');
|
||||
export const NEW_BRANCH_IN_FORK = __(
|
||||
'GitLab will create a branch in your fork and start a merge request.',
|
||||
);
|
||||
|
||||
export const COMMIT_MESSAGE_SUBJECT_MAX_LENGTH = 52;
|
||||
export const COMMIT_MESSAGE_BODY_MAX_LENGTH = 72;
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ export default {
|
|||
* label: string;
|
||||
* value: number;
|
||||
* formattedValue: number | string;
|
||||
* color: string;
|
||||
* hideLabel: boolean,
|
||||
* }[]
|
||||
*/
|
||||
sections: {
|
||||
|
|
@ -27,10 +29,9 @@ export default {
|
|||
computedSections() {
|
||||
return this.sections.map((section, index) => {
|
||||
const percentage = section.value / this.sectionsCombinedValue;
|
||||
|
||||
return {
|
||||
...section,
|
||||
backgroundColor: colorFromDefaultPalette(index),
|
||||
backgroundColor: section.color ?? colorFromDefaultPalette(index),
|
||||
cssPercentage: `${roundOffFloat(percentage * 100, 4)}%`,
|
||||
srLabelPercentage: formatNumber(percentage, {
|
||||
style: 'percent',
|
||||
|
|
@ -39,6 +40,9 @@ export default {
|
|||
};
|
||||
});
|
||||
},
|
||||
sectionLabels() {
|
||||
return this.computedSections.filter((s) => !s.hideLabel);
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
@ -62,7 +66,7 @@ export default {
|
|||
<div class="gl-mt-5">
|
||||
<div class="-gl-mx-3 -gl-my-3 gl-flex gl-flex-wrap gl-items-center">
|
||||
<div
|
||||
v-for="{ id, label, backgroundColor, formattedValue } in computedSections"
|
||||
v-for="{ id, label, backgroundColor, formattedValue } in sectionLabels"
|
||||
:key="id"
|
||||
class="gl-flex gl-items-center gl-p-3"
|
||||
:data-testid="`percentage-bar-legend-section-${id}`"
|
||||
|
|
|
|||
|
|
@ -272,6 +272,10 @@ export default {
|
|||
|
||||
return __('This issue is hidden because its author has been banned.');
|
||||
},
|
||||
listItemUniqueId() {
|
||||
const availableFullPath = this.workItemFullPath || this.fullPath;
|
||||
return `listItem-${availableFullPath}/${getIdFromGraphQLId(this.issuable.id)}`;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
hasSlotContents(slotName) {
|
||||
|
|
@ -409,6 +413,7 @@ export default {
|
|||
>
|
||||
<template #default="{ prefetchWorkItem, clearPrefetching }">
|
||||
<gl-link
|
||||
:id="listItemUniqueId"
|
||||
class="issue-title-text gl-text-base"
|
||||
dir="auto"
|
||||
:href="issuableLinkHref"
|
||||
|
|
@ -425,6 +430,7 @@ export default {
|
|||
</work-item-prefetch>
|
||||
<gl-link
|
||||
v-else
|
||||
:id="listItemUniqueId"
|
||||
class="issue-title-text gl-text-base"
|
||||
dir="auto"
|
||||
:href="issuableLinkHref"
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ import {
|
|||
WIDGET_TYPE_ITERATION,
|
||||
WIDGET_TYPE_MILESTONE,
|
||||
DEFAULT_EPIC_COLORS,
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
} from '../constants';
|
||||
import createWorkItemMutation from '../graphql/create_work_item.mutation.graphql';
|
||||
import namespaceWorkItemTypesQuery from '../graphql/namespace_work_item_types.query.graphql';
|
||||
|
|
@ -49,6 +50,7 @@ import WorkItemDescription from './work_item_description.vue';
|
|||
import WorkItemAssignees from './work_item_assignees.vue';
|
||||
import WorkItemLabels from './work_item_labels.vue';
|
||||
import WorkItemMilestone from './work_item_milestone.vue';
|
||||
import WorkItemParent from './work_item_parent.vue';
|
||||
import WorkItemLoading from './work_item_loading.vue';
|
||||
import WorkItemCrmContacts from './work_item_crm_contacts.vue';
|
||||
|
||||
|
|
@ -69,6 +71,7 @@ export default {
|
|||
WorkItemCrmContacts,
|
||||
WorkItemProjectsListbox,
|
||||
TitleSuggestions,
|
||||
WorkItemParent,
|
||||
WorkItemWeight: () => import('ee_component/work_items/components/work_item_weight.vue'),
|
||||
WorkItemHealthStatus: () =>
|
||||
import('ee_component/work_items/components/work_item_health_status.vue'),
|
||||
|
|
@ -77,7 +80,7 @@ export default {
|
|||
import('ee_component/work_items/components/work_item_rolledup_dates.vue'),
|
||||
WorkItemIteration: () => import('ee_component/work_items/components/work_item_iteration.vue'),
|
||||
},
|
||||
inject: ['fullPath'],
|
||||
inject: ['fullPath', 'groupPath'],
|
||||
i18n: {
|
||||
suggestionTitle: s__('WorkItem|Similar items'),
|
||||
similarWorkItemHelpText: s__(
|
||||
|
|
@ -266,6 +269,9 @@ export default {
|
|||
workItemColor() {
|
||||
return findWidget(WIDGET_TYPE_COLOR, this.workItem);
|
||||
},
|
||||
workItemHierarchy() {
|
||||
return findWidget(WIDGET_TYPE_HIERARCHY, this.workItem);
|
||||
},
|
||||
workItemCrmContacts() {
|
||||
return findWidget(WIDGET_TYPE_CRM_CONTACTS, this.workItem);
|
||||
},
|
||||
|
|
@ -336,6 +342,9 @@ export default {
|
|||
workItemCrmContactIds() {
|
||||
return this.workItemCrmContacts?.contacts?.nodes?.map((item) => item.id) || [];
|
||||
},
|
||||
workItemParent() {
|
||||
return this.workItemHierarchy?.parent || null;
|
||||
},
|
||||
workItemColorValue() {
|
||||
const colorWidget = findWidget(WIDGET_TYPE_COLOR, this.workItem);
|
||||
return colorWidget?.color || '';
|
||||
|
|
@ -551,9 +560,12 @@ export default {
|
|||
};
|
||||
}
|
||||
|
||||
if (this.parentId) {
|
||||
if (
|
||||
this.parentId ||
|
||||
(this.isWidgetSupported(WIDGET_TYPE_HIERARCHY) && this.workItemParent?.id)
|
||||
) {
|
||||
workItemCreateInput.hierarchyWidget = {
|
||||
parentId: this.parentId,
|
||||
parentId: this.workItemParent?.id ?? this.parentId,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -568,8 +580,15 @@ export default {
|
|||
update: (store, { data: { workItemCreate } }) => {
|
||||
const { workItem } = workItemCreate;
|
||||
|
||||
if (this.parentId) {
|
||||
addHierarchyChild({ cache: store, id: this.parentId, workItem });
|
||||
if (
|
||||
this.parentId ||
|
||||
(this.isWidgetSupported(WIDGET_TYPE_HIERARCHY) && this.workItemParent?.id)
|
||||
) {
|
||||
addHierarchyChild({
|
||||
cache: store,
|
||||
id: this.workItemParent?.id ?? this.parentId,
|
||||
workItem,
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
|
@ -796,6 +815,18 @@ export default {
|
|||
:can-update="canUpdate"
|
||||
@error="$emit('error', $event)"
|
||||
/>
|
||||
<work-item-parent
|
||||
v-if="workItemHierarchy"
|
||||
class="work-item-attributes-item"
|
||||
:can-update="canUpdate"
|
||||
:work-item-id="workItemId"
|
||||
:work-item-type="selectedWorkItemTypeName"
|
||||
:group-path="groupPath"
|
||||
:full-path="fullPath"
|
||||
:parent="workItemParent"
|
||||
:is-group="isGroup"
|
||||
@error="$emit('error', $event)"
|
||||
/>
|
||||
<work-item-crm-contacts
|
||||
v-if="workItemCrmContacts"
|
||||
class="work-item-attributes-item"
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import {
|
|||
import { __, s__, sprintf } from '~/locale';
|
||||
import { isScopedLabel } from '~/lib/utils/common_utils';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import WorkItemLinkChildMetadata from 'ee_else_ce/work_items/components/shared/work_item_link_child_metadata.vue';
|
||||
import RichTimestampTooltip from '../rich_timestamp_tooltip.vue';
|
||||
import WorkItemTypeIcon from '../work_item_type_icon.vue';
|
||||
|
|
@ -154,6 +155,9 @@ export default {
|
|||
gon.current_user_use_work_items_view
|
||||
);
|
||||
},
|
||||
childItemUniqueId() {
|
||||
return `listItem-${this.childItemFullPath}/${getIdFromGraphQLId(this.childItem.id)}`;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
showScopedLabel(label) {
|
||||
|
|
@ -217,6 +221,7 @@ export default {
|
|||
/>
|
||||
</span>
|
||||
<gl-link
|
||||
:id="childItemUniqueId"
|
||||
:href="childItemWebUrl"
|
||||
:class="{ '!gl-text-subtle': !isChildItemOpen }"
|
||||
class="gl-hyphens-auto gl-break-words gl-font-semibold"
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
|
|||
import { DETAIL_VIEW_QUERY_PARAM_NAME } from '~/work_items/constants';
|
||||
import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
||||
import { visitUrl, setUrlParams, updateHistory, removeParams } from '~/lib/utils/url_utility';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { makeDrawerItemFullPath, makeDrawerUrlParam, canRouterNav } from '../utils';
|
||||
|
||||
export default {
|
||||
|
|
@ -151,8 +152,17 @@ export default {
|
|||
url: setUrlParams({ [DETAIL_VIEW_QUERY_PARAM_NAME]: params }),
|
||||
});
|
||||
},
|
||||
handleClose() {
|
||||
handleClose(isClickedOutside) {
|
||||
updateHistory({ url: removeParams([DETAIL_VIEW_QUERY_PARAM_NAME]) });
|
||||
|
||||
if (!isClickedOutside) {
|
||||
document
|
||||
.getElementById(
|
||||
`listItem-${this.activeItemFullPath}/${getIdFromGraphQLId(this.activeItem.id)}`,
|
||||
)
|
||||
?.focus();
|
||||
}
|
||||
|
||||
this.$emit('close');
|
||||
},
|
||||
handleClickOutside(event) {
|
||||
|
|
@ -172,7 +182,7 @@ export default {
|
|||
}
|
||||
}
|
||||
}
|
||||
this.handleClose();
|
||||
this.handleClose(true);
|
||||
},
|
||||
focusOnHeaderLink() {
|
||||
this.$refs?.workItemUrl?.$el?.focus();
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import WorkItemSidebarDropdownWidget from '~/work_items/components/shared/work_i
|
|||
import updateParentMutation from '~/work_items/graphql/update_parent.mutation.graphql';
|
||||
import { isValidURL } from '~/lib/utils/url_utility';
|
||||
|
||||
import updateNewWorkItemMutation from '~/work_items/graphql/update_new_work_item.mutation.graphql';
|
||||
import { updateParent } from '../graphql/cache_utils';
|
||||
import groupWorkItemsQuery from '../graphql/group_work_items.query.graphql';
|
||||
import projectWorkItemsQuery from '../graphql/project_work_items.query.graphql';
|
||||
|
|
@ -15,10 +16,11 @@ import workItemAllowedParentTypesQuery from '../graphql/work_item_allowed_parent
|
|||
import {
|
||||
I18N_WORK_ITEM_ERROR_UPDATING,
|
||||
sprintfWorkItem,
|
||||
WORK_ITEM_TYPE_ENUM_EPIC,
|
||||
WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
WORK_ITEM_TYPE_VALUE_MAP,
|
||||
} from '../constants';
|
||||
import { isReference, findHierarchyWidgetDefinition } from '../utils';
|
||||
import { isReference, findHierarchyWidgetDefinition, newWorkItemId } from '../utils';
|
||||
|
||||
export default {
|
||||
name: 'WorkItemParent',
|
||||
|
|
@ -120,6 +122,9 @@ export default {
|
|||
isSearchingByReference() {
|
||||
return isReference(this.searchTerm) || isValidURL(this.searchTerm);
|
||||
},
|
||||
allowedParentTypesForNewWorkItem() {
|
||||
return this.workItemId === newWorkItemId(this.workItemType) ? [WORK_ITEM_TYPE_ENUM_EPIC] : [];
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
parent: {
|
||||
|
|
@ -139,7 +144,7 @@ export default {
|
|||
return {
|
||||
fullPath: this.isIssue ? this.groupPath : this.fullPath,
|
||||
searchTerm: this.searchTerm,
|
||||
types: this.allowedParentTypes,
|
||||
types: [...this.allowedParentTypes, ...this.allowedParentTypesForNewWorkItem],
|
||||
in: this.searchTerm ? 'TITLE' : undefined,
|
||||
iid: null,
|
||||
isNumber: false,
|
||||
|
|
@ -182,7 +187,7 @@ export default {
|
|||
};
|
||||
},
|
||||
skip() {
|
||||
return !this.searchStarted && !this.workItemId;
|
||||
return this.workItemId === newWorkItemId(this.workItemType);
|
||||
},
|
||||
update(data) {
|
||||
return (
|
||||
|
|
@ -202,6 +207,32 @@ export default {
|
|||
if (this.parent?.id === this.localSelectedItem) return;
|
||||
|
||||
this.updateInProgress = true;
|
||||
|
||||
if (this.workItemId === newWorkItemId(this.workItemType)) {
|
||||
this.$apollo
|
||||
.mutate({
|
||||
mutation: updateNewWorkItemMutation,
|
||||
variables: {
|
||||
input: {
|
||||
fullPath: this.fullPath,
|
||||
parent: {
|
||||
...this.availableWorkItems?.find(({ id }) => id === this.localSelectedItem),
|
||||
webUrl: this.parentWebUrl ?? null,
|
||||
},
|
||||
workItemType: this.workItemType,
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
Sentry.captureException(error);
|
||||
})
|
||||
.finally(() => {
|
||||
this.searchStarted = false;
|
||||
this.updateInProgress = false;
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const {
|
||||
data: {
|
||||
|
|
|
|||
|
|
@ -23,6 +23,11 @@ query groupWorkItems(
|
|||
iid
|
||||
title
|
||||
confidential
|
||||
workItemType {
|
||||
id
|
||||
name
|
||||
iconName
|
||||
}
|
||||
}
|
||||
}
|
||||
workItemsByIid: workItems(
|
||||
|
|
@ -36,6 +41,11 @@ query groupWorkItems(
|
|||
iid
|
||||
title
|
||||
confidential
|
||||
workItemType {
|
||||
id
|
||||
name
|
||||
iconName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import {
|
|||
WIDGET_TYPE_START_AND_DUE_DATE,
|
||||
NEW_WORK_ITEM_IID,
|
||||
WIDGET_TYPE_MILESTONE,
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
} from '../constants';
|
||||
import workItemByIidQuery from './work_item_by_iid.query.graphql';
|
||||
|
||||
|
|
@ -63,6 +64,7 @@ export const updateNewWorkItemCache = (input, cache) => {
|
|||
iteration,
|
||||
weight,
|
||||
milestone,
|
||||
parent,
|
||||
} = input;
|
||||
|
||||
const query = workItemByIidQuery;
|
||||
|
|
@ -119,6 +121,11 @@ export const updateNewWorkItemCache = (input, cache) => {
|
|||
newData: milestone,
|
||||
nodePath: 'milestone',
|
||||
},
|
||||
{
|
||||
widgetType: WIDGET_TYPE_HIERARCHY,
|
||||
newData: parent,
|
||||
nodePath: 'parent',
|
||||
},
|
||||
];
|
||||
|
||||
widgetUpdates.forEach(({ widgetType, newData, nodePath }) => {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ module Types
|
|||
|
||||
authorize :read_work_item
|
||||
|
||||
present_using WorkItemPresenter
|
||||
|
||||
field :author, Types::UserType, null: true,
|
||||
description: 'User that created the work item.',
|
||||
experiment: { milestone: '15.9' }
|
||||
|
|
@ -63,6 +65,11 @@ module Types
|
|||
description: 'Whether the work item belongs to an archived project. Always false for group level work items.',
|
||||
experiment: { milestone: '16.5' }
|
||||
|
||||
field :duplicated_to_work_item_url, GraphQL::Types::String, null: true,
|
||||
description: 'URL of the work item that the work item is marked as a duplicate of.'
|
||||
field :moved_to_work_item_url, GraphQL::Types::String, null: true,
|
||||
description: 'URL of the work item that the work item was moved to.'
|
||||
|
||||
markdown_field :title_html, null: true
|
||||
markdown_field :description_html, null: true
|
||||
|
||||
|
|
@ -74,10 +81,6 @@ module Types
|
|||
object.work_item_type
|
||||
end
|
||||
|
||||
def web_url
|
||||
Gitlab::UrlBuilder.build(object)
|
||||
end
|
||||
|
||||
def create_note_email
|
||||
object.creatable_note_email_address(context[:current_user])
|
||||
end
|
||||
|
|
@ -89,3 +92,5 @@ module Types
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
Types::WorkItemType.prepend_mod_with('Types::WorkItemType')
|
||||
|
|
|
|||
|
|
@ -7,8 +7,6 @@ class Namespace::Detail < ApplicationRecord
|
|||
validates :namespace, presence: true
|
||||
validates :description, length: { maximum: 255 }
|
||||
|
||||
ignore_column :pending_delete, remove_with: '17.8', remove_after: '2024-12-13'
|
||||
|
||||
self.primary_key = :namespace_id
|
||||
|
||||
# This method should not be called directly. Instead, it is available on the namespace via delegation and should
|
||||
|
|
|
|||
|
|
@ -74,12 +74,10 @@ class PagesDeployment < ApplicationRecord
|
|||
update(deleted_at: Time.now.utc)
|
||||
end
|
||||
|
||||
def self.count_versioned_deployments_for(projects, limit)
|
||||
project_id_in(projects)
|
||||
.active
|
||||
.versioned
|
||||
.limit(limit)
|
||||
.count
|
||||
def self.count_versioned_deployments_for(projects, limit, group_by_project: false)
|
||||
query = project_id_in(projects).active.versioned
|
||||
query = query.group(:project_id) if group_by_project
|
||||
query.limit(limit).count
|
||||
end
|
||||
|
||||
def active?
|
||||
|
|
|
|||
|
|
@ -1,4 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class WorkItemPresenter < IssuePresenter # rubocop:todo Gitlab/NamespacedClass
|
||||
class WorkItemPresenter < IssuePresenter # rubocop:todo Gitlab/NamespacedClass -- WorkItem is not namespaced
|
||||
presents ::WorkItem, as: :work_item
|
||||
|
||||
def duplicated_to_work_item_url
|
||||
return unless work_item.duplicated?
|
||||
return unless allowed_to_read_work_item?(work_item.duplicated_to)
|
||||
|
||||
Gitlab::UrlBuilder.build(work_item.duplicated_to)
|
||||
end
|
||||
|
||||
def moved_to_work_item_url
|
||||
return unless work_item.moved?
|
||||
return unless allowed_to_read_work_item?(work_item.moved_to)
|
||||
|
||||
Gitlab::UrlBuilder.build(work_item.moved_to)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def allowed_to_read_work_item?(item)
|
||||
Ability.allowed?(current_user, :read_work_item, item)
|
||||
end
|
||||
end
|
||||
|
||||
WorkItemPresenter.prepend_mod_with('WorkItemPresenter')
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ test:
|
|||
main:
|
||||
database: gitlab_clickhouse_test
|
||||
url: 'http://localhost:8123'
|
||||
username: clickhouse
|
||||
password: clickhouse
|
||||
username: default
|
||||
password:
|
||||
variables:
|
||||
# execute mutations (updates, deletes) synchronously to avoid flaky tests
|
||||
mutations_sync: 1
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Coverband.configure do |config|
|
|||
config.reporting_wiggle = 0 # Since this is not run in production disable wiggle and report every second.
|
||||
config.ignore += %w[spec/.* lib/tasks/.*
|
||||
config/application.rb config/boot.rb config/initializers/.* db/post_migrate/.*
|
||||
config/puma.rb bin/.* config/environments/.* db/migrate/.*]
|
||||
config/puma.rb bin/.* config/environments/.* db/migrate/.* ee/app/workers/search/zoekt/.*]
|
||||
|
||||
config.verbose = false # this spams logfile a lot, set to true for debugging locally
|
||||
config.logger = Gitlab::AppLogger.primary_logger
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: workhorse_use_aws_sdk_v2
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/482044
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164597
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/483000
|
||||
milestone: '17.4'
|
||||
group: infrastructure
|
||||
type: ops
|
||||
default_enabled: true
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
name: merge_request_dashboard_new_lists
|
||||
feature_issue_url:
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/173399
|
||||
rollout_issue_url:
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/512602
|
||||
milestone: '17.7'
|
||||
group: group::code review
|
||||
type: wip
|
||||
|
|
|
|||
|
|
@ -5,4 +5,4 @@ feature_category: vulnerability_management
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156249
|
||||
milestone: '17.1'
|
||||
queued_migration_version: 20240613064362
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
finalized_by: '20241120232704'
|
||||
|
|
|
|||
|
|
@ -3,6 +3,6 @@ migration_job_name: DeleteOrphanedGroups
|
|||
description: Deletes orhpaned groups whose parent's does not exist
|
||||
feature_category: groups_and_projects
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172420
|
||||
milestone: '17.7'
|
||||
queued_migration_version: 20241206154945
|
||||
milestone: '17.8'
|
||||
queued_migration_version: 20250109105904
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
|
|||
|
|
@ -10,22 +10,12 @@ class QueueRequeueDeleteOrphanedGroups < Gitlab::Database::Migration[2.2]
|
|||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
return unless Gitlab.com_except_jh? && !Gitlab.staging?
|
||||
|
||||
# Clear previous background migration execution from QueueDeleteOrphanedGroups
|
||||
delete_batched_background_migration(MIGRATION, :namespaces, :id, [])
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:namespaces,
|
||||
:id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
# no-op because there was a bug in the original migration, which has been
|
||||
# fixed by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176705
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :namespaces, :id, [])
|
||||
# no-op because there was a bug in the original migration, which has been
|
||||
# fixed by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176705
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeBackfillVulnerabilityIssueLinksProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.8'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_sec
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'BackfillVulnerabilityIssueLinksProjectId',
|
||||
table_name: :vulnerability_issue_links,
|
||||
column_name: :id,
|
||||
job_arguments: [:project_id, :vulnerabilities, :project_id, :vulnerability_id],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down; end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddShardingKeyIdIndexToCiRunners < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '17.8'
|
||||
|
||||
INDEX_NAME = 'index_ci_runners_on_sharding_key_id_when_not_null'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_runners, :sharding_key_id, name: INDEX_NAME, where: 'sharding_key_id IS NOT NULL'
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index :ci_runners, :sharding_key_id, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddShardingKeyIdIndexToCiRunnerMachines < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
|
||||
milestone '17.8'
|
||||
|
||||
INDEX_NAME = 'index_ci_runner_machines_on_sharding_key_id_when_not_null'
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_runner_machines, :sharding_key_id, name: INDEX_NAME, where: 'sharding_key_id IS NOT NULL'
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index :ci_runner_machines, :sharding_key_id, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueReEnqueueDeleteOrphanedGroups < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.8'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
MIGRATION = "DeleteOrphanedGroups"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
return unless Gitlab.com_except_jh? && !Gitlab.staging?
|
||||
|
||||
# Clear previous background migration execution from QueueRequeueDeleteOrphanedGroups
|
||||
delete_batched_background_migration(MIGRATION, :namespaces, :id, [])
|
||||
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:namespaces,
|
||||
:id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :namespaces, :id, [])
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
c1b75439237a239e424c1ea2e044d4336bcde8bacab45aa134d509f9634a1c9d
|
||||
|
|
@ -0,0 +1 @@
|
|||
a875225031c1e4fa081b538be50c6aa118303bb8a6bfd36b4c96c315a5ac2006
|
||||
|
|
@ -0,0 +1 @@
|
|||
6b668dd08bbc97f3fddf904595e47bf286b13309d6eeddd55b2ece50f2c8066e
|
||||
|
|
@ -0,0 +1 @@
|
|||
e5fe54e6057e8f2f8c0de076f9b43f9dd33085ccc1aea3fb209a76d53f528fe4
|
||||
|
|
@ -30358,6 +30358,8 @@ CREATE UNIQUE INDEX index_ci_runner_machines_on_runner_id_and_system_xid ON ci_r
|
|||
|
||||
CREATE INDEX index_ci_runner_machines_on_runner_type ON ci_runner_machines USING btree (runner_type);
|
||||
|
||||
CREATE INDEX index_ci_runner_machines_on_sharding_key_id_when_not_null ON ci_runner_machines USING btree (sharding_key_id) WHERE (sharding_key_id IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_ci_runner_machines_on_version ON ci_runner_machines USING btree (version);
|
||||
|
||||
CREATE INDEX index_ci_runner_namespaces_on_namespace_id ON ci_runner_namespaces USING btree (namespace_id);
|
||||
|
|
@ -30390,6 +30392,8 @@ CREATE INDEX index_ci_runners_on_locked ON ci_runners USING btree (locked);
|
|||
|
||||
CREATE INDEX index_ci_runners_on_runner_type_and_id ON ci_runners USING btree (runner_type, id);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_sharding_key_id_when_not_null ON ci_runners USING btree (sharding_key_id) WHERE (sharding_key_id IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_token_expires_at_and_id_desc ON ci_runners USING btree (token_expires_at, id DESC);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_token_expires_at_desc_and_id_desc ON ci_runners USING btree (token_expires_at DESC, id DESC);
|
||||
|
|
|
|||
|
|
@ -42,13 +42,14 @@ To configure GitLab SCIM:
|
|||
You can configure the following as an identity provider:
|
||||
|
||||
- [Okta](#configure-okta).
|
||||
- [Microsoft Entra ID (formerly Azure Active Directory)](#configure-microsoft-entra-id-formerly-azure-active-directory)
|
||||
|
||||
NOTE:
|
||||
Other identity providers can work with GitLab but they have not been tested and are not supported. You should contact the provider for support. GitLab support can assist by reviewing related log entries.
|
||||
|
||||
### Configure Okta
|
||||
|
||||
The SAML application created during [single sign-on](index.md) set up for Okta must be set up for SCIM.
|
||||
The SAML application created during [single sign-on](../../integration/saml.md) set up for Okta must be set up for SCIM.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
|
|
@ -82,6 +83,142 @@ To configure Okta for SCIM:
|
|||
1. Select **Save**.
|
||||
1. Assign users in the **Assignments** tab. Assigned users are created and managed in your GitLab group.
|
||||
|
||||
### Configure Microsoft Entra ID (formerly Azure Active Directory)
|
||||
|
||||
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143146) to Microsoft Entra ID terminology in GitLab 16.10.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- [GitLab is configured](#configure-gitlab) for SCIM.
|
||||
- The [SAML application for Microsoft Entra ID is set up](../../integration/saml.md#set-up-microsoft-entra-id).
|
||||
|
||||
The SAML application created during [single sign-on](../../integration/saml.md) set up for
|
||||
[Azure Active Directory](https://learn.microsoft.com/en-us/entra/identity/enterprise-apps/view-applications-portal)
|
||||
must be set up for SCIM. For an example, see [example configuration](../../user/group/saml_sso/example_saml_config.md#scim-mapping).
|
||||
|
||||
NOTE:
|
||||
You must configure SCIM provisioning exactly as detailed in the following instructions. If misconfigured, you will encounter issues with user provisioning
|
||||
and sign in, which require a lot of effort to resolve. If you have any trouble or questions with any step, contact GitLab support.
|
||||
|
||||
To configure Microsoft Entra ID, you configure:
|
||||
|
||||
- Microsoft Entra ID for SCIM.
|
||||
- Settings.
|
||||
- Mappings, including attribute mappings.
|
||||
|
||||
#### Configure Microsoft Entra ID for SCIM
|
||||
|
||||
1. In your app, go to the **Provisioning** tab and select **Get started**.
|
||||
1. Set the **Provisioning Mode** to **Automatic**.
|
||||
1. Complete the **Admin Credentials** using the value of:
|
||||
- **SCIM API endpoint URL** in GitLab for the **Tenant URL** field.
|
||||
- **Your SCIM token** in GitLab for the **Secret Token** field.
|
||||
1. Select **Test Connection**.
|
||||
|
||||
If the test is successful, save your configuration.
|
||||
|
||||
If the test is unsuccessful, see
|
||||
[troubleshooting](../../user/group/saml_sso/troubleshooting.md) to try to resolve this.
|
||||
1. Select **Save**.
|
||||
|
||||
After saving, the **Mappings** and **Settings** sections appear.
|
||||
|
||||
#### Configure mappings
|
||||
|
||||
Under the **Mappings** section, first provision the groups:
|
||||
|
||||
1. Select **Provision Microsoft Entra ID Groups**.
|
||||
1. On the Attribute Mapping page, turn off the **Enabled** toggle.
|
||||
|
||||
SCIM group provisioning is not supported in GitLab. Leaving group provisioning enabled does not break the SCIM user provisioning, but it causes errors in the
|
||||
Entra ID SCIM provisioning log that might be confusing and misleading.
|
||||
|
||||
NOTE:
|
||||
Even when **Provision Microsoft Entra ID Groups** is disabled, the mappings section may display "Enabled: Yes". This behavior is a display bug that you can safely ignore.
|
||||
|
||||
1. Select **Save**.
|
||||
|
||||
Next, provision the users:
|
||||
|
||||
1. Select **Provision Microsoft Entra ID Users**.
|
||||
1. Ensure that the **Enabled** toggle is set to **Yes**.
|
||||
1. Ensure that all **Target Object Actions** are enabled.
|
||||
1. Under **Attribute Mappings**, configure mappings to match
|
||||
the [configured attribute mappings](#configure-attribute-mappings):
|
||||
1. Optional. In the **customappsso Attribute** column, find `externalId` and delete it.
|
||||
1. Edit the first attribute to have a:
|
||||
- **source attribute** of `objectId`.
|
||||
- **target attribute** of `externalId`.
|
||||
- **matching precedence** of `1`.
|
||||
1. Update the existing **customappsso** attributes to match the
|
||||
[configured attribute mappings](#configure-attribute-mappings).
|
||||
1. Delete any additional attributes that are not present in the [attribute mappings table](#configure-attribute-mappings). They do not cause problems if they are
|
||||
not deleted, but GitLab does not consume the attributes.
|
||||
1. Under the mapping list, select the **Show advanced options** checkbox.
|
||||
1. Select the **Edit attribute list for customappsso** link.
|
||||
1. Ensure the `id` is the primary and required field, and `externalId` is also required.
|
||||
1. Select **Save**, which returns you to the Attribute Mapping configuration page.
|
||||
1. Close the **Attribute Mapping** configuration page by clicking the `X` in the top right corner.
|
||||
|
||||
##### Configure attribute mappings
|
||||
|
||||
NOTE:
|
||||
While Microsoft transitions from Azure Active Directory to Entra ID naming schemes, you might notice inconsistencies in
|
||||
your user interface. If you're having trouble, you can view an older version of this document or contact GitLab Support.
|
||||
|
||||
While [configuring Entra ID for SCIM](#configure-microsoft-entra-id-formerly-azure-active-directory), you configure
|
||||
attribute mappings. For an example, see [example configuration](../../user/group/saml_sso/example_saml_config.md#scim-mapping).
|
||||
|
||||
The following table provides attribute mappings that are required for GitLab.
|
||||
|
||||
| Source attribute | Target attribute | Matching precedence |
|
||||
|:---------------------------------------------------------------------------|:-------------------------------|:--------------------|
|
||||
| `objectId` | `externalId` | 1 |
|
||||
| `userPrincipalName` OR `mail` <sup>1</sup> | `emails[type eq "work"].value` | |
|
||||
| `mailNickname` | `userName` | |
|
||||
| `displayName` OR `Join(" ", [givenName], [surname])` <sup>2</sup> | `name.formatted` | |
|
||||
| `Switch([IsSoftDeleted], , "False", "True", "True", "False")` <sup>3</sup> | `active` | |
|
||||
|
||||
**Footnotes:**
|
||||
|
||||
1. Use `mail` as a source attribute when the `userPrincipalName` is not an email address or is not deliverable.
|
||||
1. Use the `Join` expression if your `displayName` does not match the format of `Firstname Lastname`.
|
||||
1. This is an expression mapping type, not a direct mapping. Select **Expression** in the **Mapping type** dropdown list.
|
||||
|
||||
Each attribute mapping has:
|
||||
|
||||
- A **customappsso Attribute**, which corresponds to **target attribute**.
|
||||
- A **Microsoft Entra ID Attribute**, which corresponds to **source attribute**.
|
||||
- A matching precedence.
|
||||
|
||||
For each attribute:
|
||||
|
||||
1. Edit the existing attribute or add a new attribute.
|
||||
1. Select the required source and target attribute mappings from the dropdown lists.
|
||||
1. Select **Ok**.
|
||||
1. Select **Save**.
|
||||
|
||||
If your SAML configuration differs from [the recommended SAML settings](../../integration/saml.md), select the mapping
|
||||
attributes and modify them accordingly. The source attribute that you map to the `externalId`
|
||||
target attribute must match the attribute used for the SAML `NameID`.
|
||||
|
||||
If a mapping is not listed in the table, use the Microsoft Entra ID defaults. For a list of required attributes,
|
||||
refer to the [internal instance SCIM API](../../development/internal_api/index.md#instance-scim-api) documentation.
|
||||
|
||||
#### Configure settings
|
||||
|
||||
Under the **Settings** section:
|
||||
|
||||
1. Optional. If desired, select the **Send an email notification when a failure occurs** checkbox.
|
||||
1. Optional. If desired, select the **Prevent accidental deletion** checkbox.
|
||||
1. If necessary, select **Save** to ensure all changes have been saved.
|
||||
|
||||
After you have configured the mappings and the settings, return to the app overview page and select **Start provisioning** to start automatic SCIM provisioning of users in GitLab.
|
||||
|
||||
WARNING:
|
||||
Once synchronized, changing the field mapped to `id` and `externalId` might cause errors. These include
|
||||
provisioning errors, duplicate users, and might prevent existing users from accessing the GitLab group.
|
||||
|
||||
## Remove access
|
||||
|
||||
Removing or deactivating a user on the identity provider blocks the user on
|
||||
|
|
|
|||
|
|
@ -37740,12 +37740,15 @@ four standard [pagination arguments](#pagination-arguments):
|
|||
| <a id="workitemcreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of when the work item was created. |
|
||||
| <a id="workitemdescription"></a>`description` | [`String`](#string) | Description of the work item. |
|
||||
| <a id="workitemdescriptionhtml"></a>`descriptionHtml` | [`String`](#string) | GitLab Flavored Markdown rendering of `description`. |
|
||||
| <a id="workitemduplicatedtoworkitemurl"></a>`duplicatedToWorkItemUrl` | [`String`](#string) | URL of the work item that the work item is marked as a duplicate of. |
|
||||
| <a id="workitemid"></a>`id` | [`WorkItemID!`](#workitemid) | Global ID of the work item. |
|
||||
| <a id="workitemiid"></a>`iid` | [`String!`](#string) | Internal ID of the work item. |
|
||||
| <a id="workitemlockversion"></a>`lockVersion` | [`Int!`](#int) | Lock version of the work item. Incremented each time the work item is updated. |
|
||||
| <a id="workitemmovedtoworkitemurl"></a>`movedToWorkItemUrl` | [`String`](#string) | URL of the work item that the work item was moved to. |
|
||||
| <a id="workitemname"></a>`name` | [`String`](#string) | Name or title of this object. |
|
||||
| <a id="workitemnamespace"></a>`namespace` **{warning-solid}** | [`Namespace`](#namespace) | **Introduced** in GitLab 15.10. **Status**: Experiment. Namespace the work item belongs to. |
|
||||
| <a id="workitemproject"></a>`project` **{warning-solid}** | [`Project`](#project) | **Introduced** in GitLab 15.3. **Status**: Experiment. Project the work item belongs to. |
|
||||
| <a id="workitempromotedtoepicurl"></a>`promotedToEpicUrl` | [`String`](#string) | URL of the epic that the work item has been promoted to. |
|
||||
| <a id="workitemstate"></a>`state` | [`WorkItemState!`](#workitemstate) | State of the work item. |
|
||||
| <a id="workitemtitle"></a>`title` | [`String!`](#string) | Title of the work item. |
|
||||
| <a id="workitemtitlehtml"></a>`titleHtml` | [`String`](#string) | GitLab Flavored Markdown rendering of `title`. |
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ Most of the tooling and APIs are considered unstable.
|
|||
|
||||
```sql
|
||||
create database gitlab_clickhouse_development;
|
||||
create database gitlab_clickhouse_test;
|
||||
```
|
||||
|
||||
### Validate your setup
|
||||
|
|
|
|||
|
|
@ -0,0 +1,119 @@
|
|||
---
|
||||
stage: none
|
||||
group: unassigned
|
||||
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
|
||||
---
|
||||
|
||||
# Getting Started with Python in GitLab
|
||||
|
||||
## Onboarding Guide
|
||||
|
||||
This guide helps non-Python developers get started with Python quickly and efficiently.
|
||||
|
||||
1. **Set up Python**:
|
||||
- Install Python from the official [Python website](https://www.python.org/downloads/).
|
||||
|
||||
1. **Install Poetry** for package management:
|
||||
- Poetry is a modern, Python-specific dependency manager that simplifies packaging and dependency handling. To install it, run:
|
||||
|
||||
```shell
|
||||
curl --silent --show-error --location "https://install.python-poetry.org" | python3 -
|
||||
```
|
||||
|
||||
- Once installed, create a new Python project with Poetry:
|
||||
|
||||
```shell
|
||||
poetry new my_project
|
||||
cd my_project
|
||||
poetry install
|
||||
```
|
||||
|
||||
1. **Run and Debug Existing Code**
|
||||
- Familiarize yourself with the project's structure by following the `README.md`.
|
||||
- Use tools like `pdb` or IDE debugging features to debug code. Example:
|
||||
|
||||
```shell
|
||||
poetry shell
|
||||
python -m pdb <file_name>.py
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Learning resources
|
||||
|
||||
If you are new to Python or looking to refresh your knowledge, this section provides variours materials for
|
||||
learning the language.
|
||||
|
||||
1. **[Python Cheatsheet](https://www.pythoncheatsheet.org)**
|
||||
A comprehensive reference covering essential Python syntax, built-in functions, and useful libraries.
|
||||
This is ideal for both beginners and experienced users who want a quick, organized summary of Python's key features.
|
||||
|
||||
1. **[A Whirlwind Tour of Python (Jupyter Notebook)](https://github.com/jakevdp/WhirlwindTourOfPython)**
|
||||
A fast-paced introduction to Python fundamentals, tailored especially for data science practitioners but works well for everyone who wants to get just the basic understanding of the language.
|
||||
This is a Jupiter Notebook which makes this guide an interactive resource as well as a good introduction to Jupiter Notebook itself.
|
||||
|
||||
1. **[100-page Python Intro](https://learnbyexample.github.io/100_page_python_intro)**
|
||||
Brief guide provides a straightforward introduction to Python, covering all the essentials needed to start programming effectively. It’s a beginner-friendly option that covers everything from syntax to debugging and testing.
|
||||
1. **[Learn X in Y Minutes: Python](https://learnxinyminutes.com/docs/python)**
|
||||
A very brief, high-level introduction cuts directly to the core syntax and features of Python, making it a valuable quick start for developers transitioning to Python.
|
||||
|
||||
1. **[Exercism Python Track](https://exercism.io/tracks/python)**
|
||||
Use Exercism's Python track as a foundation for learning Python concepts and best practices. Exercism provides hands-on practice with mentoring support, making it an excellent resource for mastering Python through coding exercises and feedback.
|
||||
|
||||
When building Python APIs, we use FastAPI and Pydantic. To get started with building and reviewing these technologies, refer to the following resources:
|
||||
|
||||
1. **[FastAPI Documentation](https://fastapi.tiangolo.com/)**
|
||||
FastAPI is a modern web framework for building APIs with Python. This resource will help you learn how to create fast and efficient web applications and APIs. FastAPI is especially useful for building Python applications with high performance and scalability.
|
||||
|
||||
1. **[Pydantic Documentation](https://pydantic-docs.helpmanual.io/)**
|
||||
Pydantic is a Python library for data validation and settings management using Python type annotations. Learn how to integrate Pydantic into your Python projects for easier data validation and management, particularly when working with FastAPI.
|
||||
|
||||
We use pytest for testing Python code. To learn more about writing and running tests with pytest, refer to the following resources:
|
||||
|
||||
1. **[pytest Documentation](https://docs.pytest.org/en/stable/)**
|
||||
pytest is a popular testing framework for Python that makes it easy to write simple and scalable tests. This resource provides comprehensive documentation on how to write and run tests using pytest, including fixtures, plugins, and test discovery.
|
||||
|
||||
1. **[Python Testing with pytest (Book)](https://pragprog.com/titles/bopytest2/python-testing-with-pytest-second-edition/)**
|
||||
This book is a comprehensive guide to testing Python code with pytest. It covers everything from the basics of writing tests to advanced topics like fixtures, plugins, and test organization.
|
||||
|
||||
---
|
||||
|
||||
### Learning Group
|
||||
|
||||
A collaborative space for developers to study Python, FastAPI, and Pydantic, focusing on building real-world apps.
|
||||
|
||||
Refer to [Track and Propose Sessions for Python Learning Group](https://gitlab.com/gitlab-org/gitlab/-/issues/512600) issue for ongoing updates and discussions.
|
||||
|
||||
**Core Topics for Group Learning**:
|
||||
|
||||
1. **Basic Python Syntax**:
|
||||
- Learn Python concepts such as variables, functions, loops, and conditionals.
|
||||
- Practice at [Exercism Python Track](https://exercism.io/tracks/python).
|
||||
|
||||
1. **FastAPI and Pydantic**:
|
||||
- Learn how to build APIs using FastAPI and validate data with Pydantic.
|
||||
- Key resources:
|
||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
||||
- [Pydantic Documentation](https://pydantic-docs.helpmanual.io/)
|
||||
|
||||
---
|
||||
|
||||
### Python Review Office Hours
|
||||
|
||||
- **Bi-weekly sessions** for code review and discussion, led by experienced Python developers.
|
||||
- These sessions are designed to help you improve your Python skills through practical feedback.
|
||||
- Please feel free to add the office hours to your calendar.
|
||||
|
||||
---
|
||||
|
||||
### Encourage Recorded Group Meetings
|
||||
|
||||
All review and study group meetings will be recorded and shared, covering key concepts in Python, FastAPI, and Pydantic. These recordings are great for revisiting topics or catching up if you miss a session.
|
||||
|
||||
Add any uploaded videos to the [Python Resources](https://www.youtube.com/playlist?list=PL05JrBw4t0Kq4i9FD276WtOL1dSSm9a1G) playlist.
|
||||
|
||||
---
|
||||
|
||||
### Mentorship Process
|
||||
|
||||
1:1 mentorship for Python is possible and encouraged. For more information on how to get started with a mentor, please refer to the [GitLab Mentoring Handbook](https://handbook.gitlab.com/handbook/engineering/careers/mentoring/#mentoring).
|
||||
|
|
@ -21,28 +21,31 @@ in a single container.
|
|||
|
||||
The Docker image for the AI gateway is around 340 MB (compressed) for the `linux/amd64` architecture and requires a minimum of 512 MB of RAM to operate. A GPU is not needed for the GitLab AI gateway. To ensure better performance, especially under heavy usage, consider allocating more disk space, memory, and resources than the minimum requirements. Higher RAM and disk capacity can enhance the AI gateway's efficiency during peak loads.
|
||||
|
||||
### Find the AI gateway release
|
||||
### Find the AI Gateway Release
|
||||
|
||||
Find the GitLab official Docker image at:
|
||||
|
||||
- [AI gateway Docker image on Container Registry](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/container_registry/).
|
||||
- [AI gateway Docker image on DockerHub](https://hub.docker.com/repository/docker/gitlab/model-gateway/tags).
|
||||
- [Release process for self-hosted AI gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/docs/release.md).
|
||||
- [AI Gateway Docker image on Container Registry](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/container_registry/).
|
||||
- [AI Gateway Docker image on DockerHub](https://hub.docker.com/repository/docker/gitlab/model-gateway/tags).
|
||||
- [Release process for self-hosted AI Gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/docs/release.md).
|
||||
|
||||
Use the image tag that corresponds to your GitLab version. For example, if the
|
||||
GitLab version is `v17.6.0`, use `self-hosted-v17.6.0-ee` tag.
|
||||
Use the image tag that corresponds to your GitLab version. For example, if your GitLab version is `v17.6.0`, use the `self-hosted-17.6.0-ee` tag. It is critical to ensure that the image version matches your GitLab version to avoid compatibility issues.
|
||||
|
||||
### Start a container from the image
|
||||
NOTE:
|
||||
Using the `:latest` tag is **not recommended** as it can cause incompatibility if your GitLab version lags behind or jumps ahead of the AI Gateway release. Always use an explicit version tag.
|
||||
|
||||
1. For Docker images with version `self-hosted-17.6.0-ee` and later, run the following:
|
||||
### Start a Container from the Image
|
||||
|
||||
1. For Docker images with version `self-hosted-17.6.0-ee` and later, run the following command, replacing `<your_gitlab_instance>` and `<your_gitlab_domain>` with your GitLab instance's URL and domain:
|
||||
|
||||
```shell
|
||||
docker run -p 5052:5052 \
|
||||
-e AIGW_GITLAB_URL=<your_gitlab_instance> \
|
||||
-e AIGW_GITLAB_API_URL=https://<your_gitlab_domain>/api/v4/ \
|
||||
<image>
|
||||
```
|
||||
registry.gitlab.com/gitlab-org/ai-gateway/self-hosted-17.6.0-ee:latest
|
||||
```
|
||||
|
||||
Replace `self-hosted-17.6.0-ee` with the version that matches your GitLab instance. For example, if your GitLab version is `v17.8.0`, use `self-hosted-17.8.0-ee`.
|
||||
From the container host, accessing `http://localhost:5052/docs` should open the AI gateway API documentation.
|
||||
|
||||
1. Ensure that port `5052` is forwarded to the container from the host and is included in the `AI_GATEWAY_URL` environment variable.
|
||||
|
|
|
|||
|
|
@ -108,8 +108,6 @@ Prerequisites:
|
|||
|
||||
The user is sent a confirmation email.
|
||||
|
||||
To turn off these emails, an administrator can [disable the `duo_seat_assignment_email_for_sm` feature flag](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags).
|
||||
|
||||
#### Configure network and proxy settings
|
||||
|
||||
For self-managed instances, to enable GitLab Duo features,
|
||||
|
|
|
|||
|
|
@ -169,8 +169,10 @@ Prerequisites:
|
|||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/173480) in GitLab 17.7.
|
||||
|
||||
Use the `skip_ci` keyword to specify whether users are allowed to apply the `skip-ci` directive to skip the pipelines.
|
||||
When the keyword is not specified, the `skip-ci` directive is ignored, preventing all users
|
||||
Pipeline execution policies offer control over who can use the `[skip ci]` directive. You can specify certain users or service accounts that are allowed to use `[skip ci]` while still ensuring critical security and compliance checks are performed.
|
||||
|
||||
Use the `skip_ci` keyword to specify whether users are allowed to apply the `skip_ci` directive to skip the pipelines.
|
||||
When the keyword is not specified, the `skip_ci` directive is ignored, preventing all users
|
||||
from bypassing the pipeline execution policies.
|
||||
|
||||
| Field | Type | Possible values | Description |
|
||||
|
|
@ -351,7 +353,9 @@ You can [define project or group variables in the UI](../../../ci/variables/inde
|
|||
|
||||
## Behavior with `[skip ci]`
|
||||
|
||||
To prevent a regular pipeline from triggering, users can push a commit to a protected branch with `[skip ci]` in the commit message. However, jobs defined with a pipeline execution policy are always triggered, as the policy ignores the `[skip ci]` directive. This prevents developers from skipping the execution of jobs defined in the policy, which ensures that critical security and compliance checks are always performed.
|
||||
By default, to prevent a regular pipeline from triggering, users can push a commit to a protected branch with `[skip ci]` in the commit message. However, jobs defined with a pipeline execution policy are always triggered, as the policy ignores the `[skip ci]` directive. This prevents developers from skipping the execution of jobs defined in the policy, which ensures that critical security and compliance checks are always performed.
|
||||
|
||||
For more flexible control over `[skip ci]` behavior, see the [`skip_ci` type](#skip_ci-type) section.
|
||||
|
||||
## Interaction with scan execution policies
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 64 KiB |
|
|
@ -36,6 +36,18 @@ DevOps adoption shows feature adoption for development, security, and operations
|
|||
A feature shows as **adopted** when a group or subgroup has used the feature in a project in the last full calendar month.
|
||||
For example, if an issue was created in a project in a group, the group has adopted issues in that time.
|
||||
|
||||

|
||||
|
||||
The **Overview** tab illustrates the:
|
||||
|
||||
- Total number of features adopted.
|
||||
- Features adopted in each category.
|
||||
- Number of features adopted in each category by month in the **Adoption over time** bar chart.
|
||||
The chart shows only data from the date you enabled DevOps adoption for the group.
|
||||
- Number of features adopted in each category by subgroup in the **Adoption by subgroup** table.
|
||||
|
||||
The **Dev**, **Sec**, and **Ops** tabs illustrate the features adopted in development, security, and operations by subgroup.
|
||||
|
||||
The DevOps adoption report excludes:
|
||||
|
||||
- Dormant projects. The number of projects that use a feature is not considered. Having many dormant projects doesn't lower the adoption.
|
||||
|
|
@ -61,16 +73,7 @@ To view DevOps adoption:
|
|||
|
||||
1. On the left sidebar, select **Search or go to** and find your group.
|
||||
1. Select **Analyze > DevOps adoption**.
|
||||
|
||||
The **Overview** tab displays the:
|
||||
|
||||
- Total number of features adopted.
|
||||
- Features adopted in each category.
|
||||
- Number of features adopted in each category by month in the **Adoption over time** chart.
|
||||
The chart shows only data from the date you enabled DevOps adoption for the group.
|
||||
- Number of features adopted in each category by subgroup in the **Adoption by subgroup** table.
|
||||
|
||||
The **Dev**, **Sec**, and **Ops** tabs display the features adopted in development, security, and operations by subgroup.
|
||||
1. To view the features adopted by category in a month, hover over a bar.
|
||||
|
||||
## Add a subgroup to DevOps adoption
|
||||
|
||||
|
|
|
|||
|
|
@ -5,17 +5,67 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
description: "Manage Git access to projects by adding CA certificates to your top-level group, instead of individual groups."
|
||||
---
|
||||
|
||||
# Manage group's SSH certificates
|
||||
# Manage group SSH certificates
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Premium, Ultimate
|
||||
**Offering:** GitLab.com
|
||||
|
||||
Manage Git access to the projects by sharing public Certified Authority (`CA`) files in your organization's top-level group.
|
||||
You can control and manage Git access to your projects and groups with SSH certificates.
|
||||
|
||||
Git access control options on GitLab SaaS (SSH, HTTPS) rely on credentials (such as access tokens and SSH keys)
|
||||
setup in the user profile and are out of control of the organization.
|
||||
To temporarily grant Git access to your projects, you can use SSH certificates.
|
||||
SSH certificates are cryptographically signed documents that authenticate a user's identity and
|
||||
permissions.
|
||||
They are issued by a trusted Certificate Authority (CA) and contain information such as
|
||||
the user's identity, validity period, and permissions.
|
||||
|
||||
The benefits of SSH certificate authentication are:
|
||||
|
||||
- **Centralized access control**: You can manage access through a central CA, instead of individual
|
||||
user-managed SSH keys.
|
||||
- **Enhanced security**: SSH certificates are more secure than traditional SSH keys.
|
||||
- **Time-limited access**: You can set certificates to expire after a specific period.
|
||||
- **Simplified credential management**: Organizations can maintain a list of approved
|
||||
SSH certificate credentials for repository access.
|
||||
- **Independent from user-managed credentials**: Access is controlled with group-managed
|
||||
certificates, and not users' personal public SSH keys.
|
||||
|
||||
## SSH certificates and SSH keys
|
||||
|
||||
The following table compares SSH certificates and SSH keys:
|
||||
|
||||
| Feature | SSH certificates | SSH keys |
|
||||
| ------------------------- | ------------------------------------- | -------- |
|
||||
| **Access control** | Centralized through group-managed CA. | Distributed across individual user accounts. |
|
||||
| **Expiration** | Built-in expiration. | No built-in expiration. |
|
||||
| **Credential management** | Managed by group Owners. | Managed by individual users. |
|
||||
| **Setup complexity** | More complex initial setup. | Simpler initial setup. |
|
||||
|
||||
## Authentication flow
|
||||
|
||||
The following diagram illustrates how SSH certificate authentication works
|
||||
in GitLab, from requesting a certificate to accessing a repository:
|
||||
|
||||
```mermaid
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
sequenceDiagram
|
||||
accTitle: SSH certificate authentication flow
|
||||
accDescr: Sequential diagram showing how a user obtains an SSH certificate from a Group Certificate Authority and uses it to access a Git repository through GitLab.
|
||||
|
||||
participant User
|
||||
participant GroupCA as Group Certificate Authority
|
||||
participant GitLab
|
||||
participant GitRepo as Git Repository
|
||||
|
||||
User->>GroupCA: Request SSH certificate
|
||||
GroupCA->>User: Issue signed SSH certificate
|
||||
User->>GitLab: Attempt to access repository via SSH
|
||||
GitLab->>GitLab: Verify certificate is valid and issued by Group CA
|
||||
GitLab->>GitRepo: Grant access
|
||||
GitRepo->>User: Allow repository operations
|
||||
```
|
||||
|
||||
The authentication process verifies that users have valid SSH certificates before
|
||||
allowing repository access.
|
||||
|
||||
## Add a CA certificate to a top-level group
|
||||
|
||||
|
|
@ -68,11 +118,17 @@ The user certificates can only be used to access the projects in the top-level g
|
|||
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/426235) in GitLab 16.9.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/488635) in GitLab 17.7. Feature flag `enforce_ssh_certificates_via_settings` removed.
|
||||
|
||||
You can enforce usage of SSH certificates and forbid users from authenticating using SSH
|
||||
You can enforce the usage of SSH certificates and restrict users from authenticating using SSH
|
||||
keys and access tokens.
|
||||
|
||||
When SSH certificates are enforced, only individual user accounts are affected.
|
||||
It does not apply to service accounts, deploy keys, and other types of internal accounts.
|
||||
When SSH certificates are enforced:
|
||||
|
||||
- Only individual user accounts are affected.
|
||||
- It does not apply to service accounts, deploy keys, and other types of internal accounts.
|
||||
- Only SSH certificates added to the group by Owners are used to authenticate repository access.
|
||||
|
||||
NOTE:
|
||||
Enforcing SSH certificates disables HTTPS access for regular users.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
|
|
|
|||
|
|
@ -185,6 +185,7 @@ To auto-format this table, use the VS Code Markdown Table formatter: `https://do
|
|||
| `/unassign` | **{dotted-circle}** No | **{check-circle}** Yes | **{check-circle}** Yes | Remove all assignees. |
|
||||
| `/unlabel ~label1 ~label2` or `/remove_label ~label1 ~label2` | **{check-circle}** Yes | **{check-circle}** Yes | **{check-circle}** Yes | Remove specified labels. |
|
||||
| `/unlabel` or `/remove_label` | **{check-circle}** Yes | **{check-circle}** Yes | **{check-circle}** Yes | Remove all labels. |
|
||||
| `/unlink` | **{check-circle}** Yes | **{check-circle}** Yes | **{check-circle}** Yes | Remove link to the provided work item. The `<work item>` value should be in the format of `#work_item`, `group/project#work_item`, or the full work item URL. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/481851) in GitLab 17.8. |
|
||||
| `/unsubscribe` | **{check-circle}** Yes | **{check-circle}** Yes | **{check-circle}** Yes | Unsubscribe to notifications. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/420796) in GitLab 16.4 |
|
||||
| `/weight <value>` | **{check-circle}** Yes | **{dotted-circle}** No | **{dotted-circle}** No | Set weight. Valid options for `<value>` include `0`, `1`, and `2`. |
|
||||
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ module ObjectStorage
|
|||
UseIamProfile: config.use_iam_profile?,
|
||||
ServerSideEncryption: config.server_side_encryption,
|
||||
SSEKMSKeyID: config.server_side_encryption_kms_key_id,
|
||||
AwsSDK: Feature.enabled?(:workhorse_use_aws_sdk_v2, :instance) ? "v2" : "v1"
|
||||
AwsSDK: "v2"
|
||||
}.compact
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23748,6 +23748,9 @@ msgstr ""
|
|||
msgid "Failed to create a branch for this issue. Please try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "Failed to create a new directory. See exception details for more information."
|
||||
msgstr ""
|
||||
|
||||
msgid "Failed to create a to-do item for the design."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -40612,10 +40615,13 @@ msgstr ""
|
|||
msgid "PagesDomain|Certificate Key is too long. (Max %d bytes)"
|
||||
msgstr ""
|
||||
|
||||
msgid "PagesUsageQuota|Active parallel deployments"
|
||||
msgstr ""
|
||||
|
||||
msgid "PagesUsageQuota|Learn about limits for Pages deployments"
|
||||
msgstr ""
|
||||
|
||||
msgid "PagesUsageQuota|Parallel deployments"
|
||||
msgid "PagesUsageQuota|Remaining deployments"
|
||||
msgstr ""
|
||||
|
||||
msgid "Pages|+ %{n} more deployments"
|
||||
|
|
@ -40663,9 +40669,6 @@ msgstr ""
|
|||
msgid "Pages|No deployments yet"
|
||||
msgstr ""
|
||||
|
||||
msgid "Pages|Pages deployments"
|
||||
msgstr ""
|
||||
|
||||
msgid "Pages|Parallel deployments"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -51,6 +51,10 @@ module QA
|
|||
runner.tags.all? { |tag| has_content?(tag) }
|
||||
end
|
||||
end
|
||||
|
||||
def has_no_runner?(runner)
|
||||
has_no_element?("runner-row-#{runner.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ module QA
|
|||
def has_online_runner?
|
||||
has_element?('runner-status-icon', status: 'online')
|
||||
end
|
||||
|
||||
def has_offline_runner?
|
||||
has_element?('runner-status-icon', status: 'offline')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -65,6 +65,10 @@ module QA
|
|||
end
|
||||
end
|
||||
|
||||
def unregister!
|
||||
unregister_runner
|
||||
end
|
||||
|
||||
def remove_via_api!
|
||||
super
|
||||
ensure
|
||||
|
|
@ -124,6 +128,12 @@ module QA
|
|||
raise(e)
|
||||
end
|
||||
|
||||
def unregister_runner
|
||||
raise "Cannot unregister runner: Docker container not initialized for runner '#{name}'" unless @docker_container
|
||||
|
||||
@docker_container.run_unregister_command!
|
||||
end
|
||||
|
||||
def populate_initial_id
|
||||
tag_list = tags ? { tag_list: tags.compact.join(',') } : {}
|
||||
runner = runner(**tag_list)
|
||||
|
|
|
|||
|
|
@ -39,6 +39,10 @@ module QA
|
|||
start_container_and_register
|
||||
end
|
||||
|
||||
def unregister!
|
||||
unregister_runner
|
||||
end
|
||||
|
||||
def remove_via_api!
|
||||
super
|
||||
ensure
|
||||
|
|
@ -106,6 +110,12 @@ module QA
|
|||
raise(e)
|
||||
end
|
||||
|
||||
def unregister_runner
|
||||
raise "Cannot unregister runner: Docker container not initialized for runner '#{name}'" unless @docker_container
|
||||
|
||||
@docker_container.run_unregister_command!
|
||||
end
|
||||
|
||||
def populate_initial_id
|
||||
tag_list = tags ? { tag_list: tags.compact.join(',') } : {}
|
||||
runner = runner(**tag_list)
|
||||
|
|
|
|||
|
|
@ -52,6 +52,14 @@ module QA
|
|||
shell("docker exec #{@name} sh -c '#{prove_airgap}'") if network == 'airgapped'
|
||||
end
|
||||
|
||||
def run_unregister_command!
|
||||
cmd = <<~CMD.tr("\n", ' ')
|
||||
docker exec --detach #{@name} sh -c "#{unregister_command}"
|
||||
CMD
|
||||
|
||||
shell(cmd, mask_secrets: [runner_auth_token])
|
||||
end
|
||||
|
||||
def tags=(tags)
|
||||
@tags = tags
|
||||
@run_untagged = false
|
||||
|
|
@ -105,6 +113,15 @@ module QA
|
|||
CMD
|
||||
end
|
||||
|
||||
def runner_auth_token
|
||||
runner_list = shell("docker exec #{@name} sh -c 'gitlab-runner list'")
|
||||
runner_list.match(/Token\e\[0;m=([a-zA-Z0-9_-]+)/i)&.[](1)
|
||||
end
|
||||
|
||||
def unregister_command
|
||||
"gitlab-runner unregister --url #{@address} --token #{runner_auth_token}"
|
||||
end
|
||||
|
||||
# Ping Cloudflare DNS, should fail
|
||||
# Ping Registry, should fail to resolve
|
||||
def prove_airgap
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
RSpec.describe 'Verify', product_group: :runner do
|
||||
describe 'Runner' do
|
||||
let(:executor) { "qa-runner-#{SecureRandom.hex(6)}" }
|
||||
let!(:runner) { create(:deprecated_group_runner, name: executor, tags: ["e2e-test-#{SecureRandom.hex(6)}"]) }
|
||||
|
||||
after do
|
||||
runner.remove_via_api!
|
||||
# Skip 404 since the test deletes the runner by unregistering in this case
|
||||
rescue StandardError => e
|
||||
raise e unless e.message.include?('404')
|
||||
end
|
||||
|
||||
it 'user unregisters a runner with deprecated registration token',
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/510655' do
|
||||
Flow::Login.sign_in
|
||||
|
||||
runner.group.visit!
|
||||
|
||||
Page::Group::Menu.perform(&:go_to_runners)
|
||||
|
||||
Page::Group::Runners::Index.perform do |group_runners|
|
||||
expect { group_runners.has_active_runner?(runner) }.to eventually_be(true).within(sleep_interval: 2)
|
||||
end
|
||||
|
||||
runner.unregister!
|
||||
|
||||
Page::Group::Runners::Index.perform do |group_runners|
|
||||
group_runners.refresh
|
||||
expect { group_runners.has_no_runner?(runner) }.to eventually_be(true).within(sleep_interval: 2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
RSpec.describe 'Verify', product_group: :runner do
|
||||
describe 'Runner' do
|
||||
let(:executor) { "qa-runner-#{SecureRandom.hex(6)}" }
|
||||
let!(:runner) { create(:project_runner, name: executor, tags: ["e2e-test-#{SecureRandom.hex(6)}"]) }
|
||||
|
||||
after do
|
||||
runner.remove_via_api!
|
||||
end
|
||||
|
||||
it 'user unregisters a runner with authentication token',
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/510652' do
|
||||
Flow::Login.sign_in
|
||||
|
||||
runner.project.visit!
|
||||
|
||||
Page::Project::Menu.perform(&:go_to_ci_cd_settings)
|
||||
Page::Project::Settings::CiCd.perform do |settings|
|
||||
settings.expand_runners_settings do |page|
|
||||
expect(page).to have_content(executor)
|
||||
expect(page).to have_online_runner
|
||||
end
|
||||
end
|
||||
|
||||
runner.unregister!
|
||||
|
||||
page.refresh
|
||||
|
||||
Page::Project::Settings::CiCd.perform do |settings|
|
||||
settings.expand_runners_settings do |page|
|
||||
expect(page).to have_content(executor)
|
||||
expect(page).to have_offline_runner
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -200,6 +200,31 @@ module QA
|
|||
end
|
||||
end
|
||||
|
||||
describe '#unregister!' do
|
||||
let(:run_unregister_command) { subject.send(:run_unregister_command!) }
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:shell)
|
||||
|
||||
subject.instance_eval do
|
||||
def runner_auth_token
|
||||
token
|
||||
end
|
||||
end
|
||||
|
||||
run_unregister_command
|
||||
end
|
||||
|
||||
it 'sets url' do
|
||||
expect(subject).to have_received_masked_shell_command(/ --url #{subject.address} /)
|
||||
end
|
||||
|
||||
it 'sets masked token' do
|
||||
auth_token = subject.runner_auth_token
|
||||
expect(subject).to have_received_masked_shell_command(/ --token #{auth_token}/)
|
||||
end
|
||||
end
|
||||
|
||||
RSpec::Matchers.define "have_received_masked_shell_command" do |cmd|
|
||||
match do |actual|
|
||||
expect(actual).to have_received(:shell).with(cmd, mask_secrets: anything)
|
||||
|
|
|
|||
|
|
@ -264,7 +264,7 @@ RSpec.describe 'Database schema',
|
|||
approval_merge_request_rules: 17,
|
||||
ci_builds: 27,
|
||||
ci_pipelines: 24,
|
||||
ci_runners: 16,
|
||||
ci_runners: 17,
|
||||
ci_runners_e59bb2812d: 16,
|
||||
deployments: 18,
|
||||
epics: 19,
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js, feature_categ
|
|||
|
||||
it 'creates the directory in the default branch' do
|
||||
fill_in(:dir_name, with: 'new_directory')
|
||||
click_button('Create directory')
|
||||
click_button('Commit changes')
|
||||
|
||||
expect(page).to have_content('master')
|
||||
expect(page).to have_content('The directory has been successfully created')
|
||||
|
|
@ -39,7 +39,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js, feature_categ
|
|||
it 'does not create a directory with a name of already existed directory' do
|
||||
fill_in(:dir_name, with: 'files')
|
||||
fill_in(:commit_message, with: 'New commit message', visible: true)
|
||||
click_button('Create directory')
|
||||
click_button('Commit changes')
|
||||
|
||||
expect(page).to have_content('A directory with this name already exists')
|
||||
expect(page).to have_current_path(project_tree_path(project, 'master'), ignore_query: true)
|
||||
|
|
@ -58,7 +58,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js, feature_categ
|
|||
click_button('New directory')
|
||||
|
||||
fill_in(:dir_name, with: 'new_directory')
|
||||
click_button('Create directory')
|
||||
click_button('Commit changes')
|
||||
|
||||
expect(page).to have_content('files')
|
||||
expect(page).to have_content('new_directory')
|
||||
|
|
@ -70,18 +70,34 @@ RSpec.describe 'Projects > Files > User creates a directory', :js, feature_categ
|
|||
first('.add-to-tree').click
|
||||
click_button('New directory')
|
||||
fill_in(:dir_name, with: 'new_directory')
|
||||
choose('Commit to a new branch', option: true)
|
||||
fill_in(:branch_name, with: 'new-feature')
|
||||
click_button('Create directory')
|
||||
end
|
||||
|
||||
it 'creates the directory in the new branch and redirect to the merge request' do
|
||||
expect(page).to have_content('new-feature')
|
||||
expect(page).to have_content('The directory has been successfully created')
|
||||
expect(page).to have_content('New merge request')
|
||||
expect(page).to have_content('From new-feature into master')
|
||||
expect(page).to have_content('Add new directory')
|
||||
context 'when create a merge request for changes is selected' do
|
||||
it 'creates the directory in the new branch and redirect to the merge request' do
|
||||
click_button('Commit changes')
|
||||
|
||||
expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
|
||||
expect(page).to have_content('new-feature')
|
||||
expect(page).to have_content('The directory has been successfully created')
|
||||
expect(page).to have_content('New merge request')
|
||||
expect(page).to have_content('From new-feature into master')
|
||||
expect(page).to have_content('Add new directory')
|
||||
|
||||
expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when create a merge request for changes is not selected' do
|
||||
it 'creates the directory in the new branch and redirect to that directory' do
|
||||
uncheck('Create a merge request for this change')
|
||||
click_button('Commit changes')
|
||||
|
||||
expect(page).to have_content('The directory has been successfully created')
|
||||
expect(page).to have_content('new_directory')
|
||||
expect(page).to have_current_path(project_tree_path(project, File.join('new-feature', 'new_directory')),
|
||||
ignore_query: true)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -102,7 +118,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js, feature_categ
|
|||
click_button('New directory')
|
||||
fill_in(:dir_name, with: 'new_directory')
|
||||
fill_in(:commit_message, with: 'New commit message', visible: true)
|
||||
click_button('Create directory')
|
||||
click_button('Commit changes')
|
||||
|
||||
fork = user.fork_of(project2.reload)
|
||||
wait_for_requests
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ RSpec.describe 'Create issue work item', :js, feature_category: :team_planning d
|
|||
expect(page).to have_selector('[data-testid="work-item-assignees"]')
|
||||
expect(page).to have_selector('[data-testid="work-item-labels"]')
|
||||
expect(page).to have_selector('[data-testid="work-item-milestone"]')
|
||||
expect(page).to have_selector('[data-testid="work-item-parent"]')
|
||||
|
||||
send_keys 'I am a new issue'
|
||||
click_button 'Create issue'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { GlLabel } from '@gitlab/ui';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
|
|
@ -143,6 +143,14 @@ describe('Board card', () => {
|
|||
expect(wrapper.classes()).not.toContain('is-active');
|
||||
});
|
||||
|
||||
it('render card with unique id', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findBoardCardButton().attributes().id).toBe(
|
||||
`listItem-${mockIssue.referencePath.split('#')[0]}/${getIdFromGraphQLId(mockIssue.id)}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('when mouseup event is called on the card', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({ mountOptions: { attachTo: document.body } });
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import { GlModal, GlFormTextarea, GlFormCheckbox } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { nextTick } from 'vue';
|
||||
import axios from 'axios';
|
||||
|
|
@ -6,21 +5,22 @@ import MockAdapter from 'axios-mock-adapter';
|
|||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { createAlert } from '~/alert';
|
||||
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import * as urlUtility from '~/lib/utils/url_utility';
|
||||
import { logError } from '~/lib/logger';
|
||||
import CommitChangesModal from '~/repository/components/commit_changes_modal.vue';
|
||||
import NewDirectoryModal from '~/repository/components/new_directory_modal.vue';
|
||||
|
||||
jest.mock('~/alert');
|
||||
jest.mock('~/lib/utils/url_utility', () => ({
|
||||
visitUrl: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/lib/logger');
|
||||
|
||||
const initialProps = {
|
||||
modalTitle: 'Create new directory',
|
||||
modalId: 'modal-new-directory',
|
||||
commitMessage: 'Add new directory',
|
||||
targetBranch: 'some-target-branch',
|
||||
originalBranch: 'master',
|
||||
originalBranch: 'main',
|
||||
canPushCode: true,
|
||||
canPushToBranch: true,
|
||||
path: 'create_dir',
|
||||
};
|
||||
|
||||
|
|
@ -35,6 +35,7 @@ const defaultFormValue = {
|
|||
describe('NewDirectoryModal', () => {
|
||||
let wrapper;
|
||||
let mock;
|
||||
let visitUrlSpy;
|
||||
|
||||
const createComponent = (props = {}) => {
|
||||
wrapper = shallowMount(NewDirectoryModal, {
|
||||
|
|
@ -46,89 +47,50 @@ describe('NewDirectoryModal', () => {
|
|||
static: true,
|
||||
visible: true,
|
||||
},
|
||||
stubs: {
|
||||
CommitChangesModal,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const findModal = () => wrapper.findComponent(GlModal);
|
||||
const findCommitChangesModal = () => wrapper.findComponent(CommitChangesModal);
|
||||
const findDirName = () => wrapper.find('[name="dir_name"]');
|
||||
const findBranchName = () => wrapper.find('[name="branch_name"]');
|
||||
const findCommitMessage = () => wrapper.findComponent(GlFormTextarea);
|
||||
const findMrCheckbox = () => wrapper.findComponent(GlFormCheckbox);
|
||||
|
||||
const fillForm = async (inputValue = {}) => {
|
||||
const {
|
||||
dirName = defaultFormValue.dirName,
|
||||
branchName = defaultFormValue.branchName,
|
||||
commitMessage = defaultFormValue.commitMessage,
|
||||
createNewMr = true,
|
||||
} = inputValue;
|
||||
|
||||
const fillForm = async (dirName = defaultFormValue.dirName) => {
|
||||
await findDirName().vm.$emit('input', dirName);
|
||||
await findBranchName().vm.$emit('input', branchName);
|
||||
await findCommitMessage().vm.$emit('input', commitMessage);
|
||||
await findMrCheckbox().vm.$emit('input', createNewMr);
|
||||
await nextTick();
|
||||
};
|
||||
|
||||
const submitForm = async () => {
|
||||
const mockEvent = { preventDefault: jest.fn() };
|
||||
findModal().vm.$emit('primary', mockEvent);
|
||||
findCommitChangesModal().vm.$emit('submit-form', new FormData());
|
||||
await waitForPromises();
|
||||
};
|
||||
|
||||
it('renders modal component', () => {
|
||||
beforeEach(() => {
|
||||
visitUrlSpy = jest.spyOn(urlUtility, 'visitUrl');
|
||||
createComponent();
|
||||
|
||||
const { modalTitle: title } = initialProps;
|
||||
|
||||
expect(findModal().props()).toMatchObject({
|
||||
title,
|
||||
size: 'md',
|
||||
actionPrimary: {
|
||||
text: NewDirectoryModal.i18n.PRIMARY_OPTIONS_TEXT,
|
||||
},
|
||||
actionCancel: {
|
||||
text: 'Cancel',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('form', () => {
|
||||
it.each`
|
||||
component | defaultValue | canPushCode | targetBranch | originalBranch | exist | attributes
|
||||
${findDirName} | ${undefined} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true} | ${'value'}
|
||||
${findBranchName} | ${initialProps.targetBranch} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true} | ${'value'}
|
||||
${findBranchName} | ${undefined} | ${false} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${false} | ${'value'}
|
||||
${findCommitMessage} | ${initialProps.commitMessage} | ${true} | ${initialProps.targetBranch} | ${initialProps.originalBranch} | ${true} | ${'value'}
|
||||
${findMrCheckbox} | ${'true'} | ${true} | ${'new-target-branch'} | ${'master'} | ${true} | ${'checked'}
|
||||
${findMrCheckbox} | ${'true'} | ${true} | ${'master'} | ${'master'} | ${true} | ${'checked'}
|
||||
`(
|
||||
'has the correct form fields',
|
||||
({
|
||||
component,
|
||||
defaultValue,
|
||||
canPushCode,
|
||||
targetBranch,
|
||||
originalBranch,
|
||||
exist,
|
||||
attributes,
|
||||
}) => {
|
||||
createComponent({
|
||||
canPushCode,
|
||||
targetBranch,
|
||||
originalBranch,
|
||||
});
|
||||
const formField = component();
|
||||
describe('default', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
});
|
||||
|
||||
if (!exist) {
|
||||
expect(formField.exists()).toBe(false);
|
||||
return;
|
||||
}
|
||||
it('renders commit changes modal', () => {
|
||||
expect(findCommitChangesModal().props()).toMatchObject({
|
||||
modalId: 'modal-new-directory',
|
||||
commitMessage: 'Add new directory',
|
||||
targetBranch: 'some-target-branch',
|
||||
originalBranch: 'main',
|
||||
canPushCode: true,
|
||||
canPushToBranch: true,
|
||||
valid: false,
|
||||
loading: false,
|
||||
});
|
||||
});
|
||||
|
||||
expect(formField.exists()).toBe(true);
|
||||
expect(formField.attributes(attributes)).toBe(defaultValue);
|
||||
},
|
||||
);
|
||||
it('includes directory name input', () => {
|
||||
expect(findDirName().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('form submission', () => {
|
||||
|
|
@ -141,61 +103,53 @@ describe('NewDirectoryModal', () => {
|
|||
});
|
||||
|
||||
describe('valid form', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
it('enables submit button when form is complete', async () => {
|
||||
await fillForm({ dirName: 'test-dir' });
|
||||
expect(findCommitChangesModal().props('valid')).toBe(true);
|
||||
});
|
||||
|
||||
it('passes the formData', async () => {
|
||||
const { dirName, branchName, commitMessage, originalBranch, createNewMr } =
|
||||
defaultFormValue;
|
||||
it('passes additional formData', async () => {
|
||||
const { dirName, branchName } = defaultFormValue;
|
||||
mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, {});
|
||||
await fillForm();
|
||||
await submitForm();
|
||||
|
||||
expect(mock.history.post[0].data.get('dir_name')).toEqual(dirName);
|
||||
expect(mock.history.post[0].data.get('branch_name')).toEqual(branchName);
|
||||
expect(mock.history.post[0].data.get('commit_message')).toEqual(commitMessage);
|
||||
expect(mock.history.post[0].data.get('original_branch')).toEqual(originalBranch);
|
||||
expect(mock.history.post[0].data.get('create_merge_request')).toEqual(String(createNewMr));
|
||||
});
|
||||
|
||||
it('does not submit "create_merge_request" formData if createNewMr is not checked', async () => {
|
||||
mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, {});
|
||||
await fillForm({ createNewMr: false });
|
||||
await submitForm();
|
||||
expect(mock.history.post[0].data.get('create_merge_request')).toBeNull();
|
||||
const formData = mock.history.post[0].data;
|
||||
expect(formData.get('dir_name')).toBe(dirName);
|
||||
expect(formData.get('branch_name')).toBe(branchName);
|
||||
});
|
||||
|
||||
it('redirects to the new directory', async () => {
|
||||
const response = { filePath: 'new-dir-path' };
|
||||
mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, response);
|
||||
|
||||
await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
|
||||
await fillForm('foo');
|
||||
await submitForm();
|
||||
|
||||
expect(visitUrl).toHaveBeenCalledWith(response.filePath);
|
||||
expect(visitUrlSpy).toHaveBeenCalledWith(response.filePath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('invalid form', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
it('passes correct prop for validity', async () => {
|
||||
await fillForm('');
|
||||
expect(findCommitChangesModal().props('valid')).toBe(false);
|
||||
});
|
||||
|
||||
it('disables submit button', async () => {
|
||||
await fillForm({ dirName: '', branchName: '', commitMessage: '' });
|
||||
expect(findModal().props('actionPrimary').attributes.disabled).toBe(true);
|
||||
});
|
||||
|
||||
it('creates an alert error', async () => {
|
||||
it('creates an alert error and logs the error', async () => {
|
||||
mock.onPost(initialProps.path).timeout();
|
||||
const mockError = new Error('timeout of 0ms exceeded');
|
||||
|
||||
await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
|
||||
await fillForm('foo');
|
||||
await submitForm();
|
||||
|
||||
expect(createAlert).toHaveBeenCalledWith({
|
||||
message: NewDirectoryModal.i18n.ERROR_MESSAGE,
|
||||
});
|
||||
expect(logError).toHaveBeenCalledWith(
|
||||
'Failed to create a new directory. See exception details for more information.',
|
||||
mockError,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { GRAY_100 } from '@gitlab/ui/src/tokens/build/js/tokens';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import SectionedPercentageBar from '~/usage_quotas/components/sectioned_percentage_bar.vue';
|
||||
|
||||
|
|
@ -98,4 +99,88 @@ describe('SectionedPercentageBar', () => {
|
|||
section4.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
|
||||
).toBe('background-color: rgb(78, 127, 14);');
|
||||
});
|
||||
|
||||
describe('hiding labels', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
propsData: {
|
||||
sections: [
|
||||
{
|
||||
id: SECTION_1,
|
||||
label: 'Section 1',
|
||||
value: 20,
|
||||
formattedValue: '20',
|
||||
hideLabel: true,
|
||||
},
|
||||
{
|
||||
id: SECTION_2,
|
||||
label: 'Section 2',
|
||||
value: 40,
|
||||
formattedValue: '40',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('hides the label when hideLabel=true', () => {
|
||||
const section1 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_1);
|
||||
expect(section1.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not hide the label when hideLabel=false', () => {
|
||||
const section2 = wrapper.findByTestId(
|
||||
PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_2,
|
||||
);
|
||||
expect(section2.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom colors', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
propsData: {
|
||||
sections: [
|
||||
{
|
||||
id: SECTION_1,
|
||||
label: 'Section 1',
|
||||
value: 20,
|
||||
formattedValue: '20',
|
||||
color: GRAY_100,
|
||||
},
|
||||
{
|
||||
id: SECTION_2,
|
||||
label: 'Section 2',
|
||||
value: 40,
|
||||
formattedValue: '40',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('uses the custom color in the percentage bar', () => {
|
||||
const section1PercentageBar = wrapper.findByTestId(
|
||||
PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_1,
|
||||
);
|
||||
expect(section1PercentageBar.attributes('style')).toContain(
|
||||
'background-color: rgb(220, 220, 222);',
|
||||
);
|
||||
});
|
||||
|
||||
it('uses the custom color in the legend', () => {
|
||||
const section1Legend = wrapper.findByTestId(
|
||||
PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_1,
|
||||
);
|
||||
|
||||
expect(
|
||||
section1Legend.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
|
||||
).toBe('background-color: rgb(220, 220, 222);');
|
||||
});
|
||||
|
||||
it('falls back to the palette color when not specified', () => {
|
||||
const section2 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_2);
|
||||
expect(section2.attributes('style')).toContain('background-color: rgb(177, 79, 24);');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import { useFakeDate } from 'helpers/fake_date';
|
|||
import { TEST_HOST } from 'helpers/test_constants';
|
||||
import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { visitUrl } from '~/lib/utils/url_utility';
|
||||
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
|
||||
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
|
||||
|
|
@ -23,6 +24,7 @@ const createComponent = ({
|
|||
showWorkItemTypeIcon = false,
|
||||
isActive = false,
|
||||
preventRedirect = false,
|
||||
fullPath = 'gitlab-org/issuable-project-path',
|
||||
} = {}) =>
|
||||
shallowMount(IssuableItem, {
|
||||
propsData: {
|
||||
|
|
@ -34,6 +36,7 @@ const createComponent = ({
|
|||
showWorkItemTypeIcon,
|
||||
isActive,
|
||||
preventRedirect,
|
||||
fullPath,
|
||||
},
|
||||
slots,
|
||||
stubs: {
|
||||
|
|
@ -71,6 +74,7 @@ describe('IssuableItem', () => {
|
|||
const findIssuablePrefetchTrigger = () => wrapper.findByTestId('issuable-prefetch-trigger');
|
||||
const findStatusEl = () => wrapper.findByTestId('issuable-status');
|
||||
const findRelationshipIcons = () => wrapper.findComponent(WorkItemRelationshipIcons);
|
||||
const findIssuableTitleLink = () => wrapper.findByTestId('issuable-title-link');
|
||||
|
||||
describe('computed', () => {
|
||||
describe('author', () => {
|
||||
|
|
@ -692,6 +696,24 @@ describe('IssuableItem', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('renders link with unique id for issuable', () => {
|
||||
wrapper = createComponent({ issuable: { ...mockIssuable, namespace: { fullPath: '' } } });
|
||||
|
||||
expect(findIssuableTitleLink().attributes().id).toBe(
|
||||
`listItem-${'gitlab-org/issuable-project-path'}/${getIdFromGraphQLId(mockIssuable.id)}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders link with unique id for work item', () => {
|
||||
wrapper = createComponent({
|
||||
issuable: { ...mockIssuable, namespace: { fullPath: 'gitlab-org/test-project-path' } },
|
||||
});
|
||||
|
||||
expect(findIssuableTitleLink().attributes().id).toBe(
|
||||
`listItem-${'gitlab-org/test-project-path'}/${getIdFromGraphQLId(mockIssuable.id)}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when preventing redirect on clicking the link', () => {
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
|
|||
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
|
||||
import WorkItemCrmContacts from '~/work_items/components/work_item_crm_contacts.vue';
|
||||
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
|
||||
import WorkItemParent from '~/work_items/components/work_item_parent.vue';
|
||||
import WorkItemProjectsListbox from '~/work_items/components/work_item_links/work_item_projects_listbox.vue';
|
||||
import TitleSuggestions from '~/issues/new/components/title_suggestions.vue';
|
||||
import {
|
||||
|
|
@ -78,6 +79,7 @@ describe('Create work item component', () => {
|
|||
const findLabelsWidget = () => wrapper.findComponent(WorkItemLabels);
|
||||
const findCrmContactsWidget = () => wrapper.findComponent(WorkItemCrmContacts);
|
||||
const findMilestoneWidget = () => wrapper.findComponent(WorkItemMilestone);
|
||||
const findParentWidget = () => wrapper.findComponent(WorkItemParent);
|
||||
const findProjectsSelector = () => wrapper.findComponent(WorkItemProjectsListbox);
|
||||
const findSelect = () => wrapper.findComponent(GlFormSelect);
|
||||
const findTitleSuggestions = () => wrapper.findComponent(TitleSuggestions);
|
||||
|
|
@ -123,6 +125,7 @@ describe('Create work item component', () => {
|
|||
},
|
||||
provide: {
|
||||
fullPath: 'full-path',
|
||||
groupPath: 'group-path',
|
||||
hasIssuableHealthStatusFeature: false,
|
||||
hasIterationsFeature: true,
|
||||
hasIssueWeightsFeature: false,
|
||||
|
|
@ -509,6 +512,10 @@ describe('Create work item component', () => {
|
|||
it('renders the work item milestone widget', () => {
|
||||
expect(findMilestoneWidget().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders the work item parent widget', () => {
|
||||
expect(findParentWidget().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { GlLabel, GlLink, GlButton, GlAvatarsInline } from '@gitlab/ui';
|
||||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
|
||||
import WorkItemLinkChildMetadata from 'ee_else_ce/work_items/components/shared/work_item_link_child_metadata.vue';
|
||||
|
|
@ -114,6 +115,14 @@ describe('WorkItemLinkChildContents', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('renders link with unique id', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findTitleEl().attributes().id).toBe(
|
||||
`listItem-${workItemTask.namespace.fullPath}/${getIdFromGraphQLId(workItemTask.id)}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('item title', () => {
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import VueApollo from 'vue-apollo';
|
|||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { stubComponent } from 'helpers/stub_component';
|
||||
import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
|
||||
|
||||
import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
|
||||
import { DETAIL_VIEW_QUERY_PARAM_NAME } from '~/work_items/constants';
|
||||
|
|
@ -41,7 +42,7 @@ describe('WorkItemDrawer', () => {
|
|||
|
||||
const createComponent = ({
|
||||
open = false,
|
||||
activeItem = { iid: '1', webUrl: 'test', fullPath: 'gitlab-org/gitlab' },
|
||||
activeItem = { id: '1', iid: '1', webUrl: 'test', fullPath: 'gitlab-org/gitlab' },
|
||||
issuableType = TYPE_ISSUE,
|
||||
clickOutsideExcludeSelector = undefined,
|
||||
isGroup = true,
|
||||
|
|
@ -395,4 +396,25 @@ describe('WorkItemDrawer', () => {
|
|||
expect(document.activeElement).toBe(findReferenceLink().element);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when drawer is opened from a link', () => {
|
||||
beforeEach(() => {
|
||||
setHTMLFixture(
|
||||
`<div><a id="listItem-gitlab-org/gitlab/1" tabIndex="1">Link 1</a><div id="drawer-container"></div></div>`,
|
||||
);
|
||||
});
|
||||
afterEach(() => {
|
||||
resetHTMLFixture();
|
||||
});
|
||||
|
||||
it('focuses on the link when drawer is closed', async () => {
|
||||
createComponent({ attachTo: '#drawer-container', open: true });
|
||||
|
||||
findGlDrawer().vm.$emit('close');
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(document.activeElement).toBe(document.getElementById('listItem-gitlab-org/gitlab/1'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -35,9 +35,11 @@ RSpec.describe GitlabSchema.types['WorkItem'], feature_category: :team_planning
|
|||
reference
|
||||
archived
|
||||
name
|
||||
duplicatedToWorkItemUrl
|
||||
movedToWorkItemUrl
|
||||
]
|
||||
|
||||
expect(described_class).to have_graphql_fields(*fields)
|
||||
expect(described_class).to have_graphql_fields(*fields).at_least
|
||||
end
|
||||
|
||||
describe 'pagination and count' do
|
||||
|
|
|
|||
|
|
@ -137,16 +137,6 @@ RSpec.describe ObjectStorage::DirectUpload, feature_category: :shared do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when workhorse_use_aws_sdk_v2 is set to false' do
|
||||
before do
|
||||
stub_feature_flags(workhorse_use_aws_sdk_v2: false)
|
||||
end
|
||||
|
||||
it 'sets AwsSDK to v1' do
|
||||
expect(subject[:ObjectStorage][:S3Config][:AwsSDK]).to eq("v1")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when V2 signatures are used' do
|
||||
before do
|
||||
credentials[:aws_signature_version] = 2
|
||||
|
|
|
|||
|
|
@ -8,42 +8,8 @@ RSpec.describe QueueRequeueDeleteOrphanedGroups, migration: :gitlab_main, featur
|
|||
|
||||
it 'does not schedule a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
context 'when executed on .com' do
|
||||
before do
|
||||
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
it 'schedules background migration' do
|
||||
migrate!
|
||||
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :namespaces,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'removes scheduled background migrations' do
|
||||
migrate!
|
||||
schema_migrate_down!
|
||||
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
end
|
||||
migration.before -> { expect(batched_migration).not_to have_scheduled_batched_migration }
|
||||
migration.after -> { expect(batched_migration).not_to have_scheduled_batched_migration }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueReEnqueueDeleteOrphanedGroups, migration: :gitlab_main, feature_category: :groups_and_projects do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'does not schedule a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
context 'when executed on .com' do
|
||||
before do
|
||||
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
it 'schedules background migration' do
|
||||
migrate!
|
||||
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :namespaces,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'removes scheduled background migrations' do
|
||||
migrate!
|
||||
schema_migrate_down!
|
||||
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3,12 +3,74 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe WorkItemPresenter, feature_category: :portfolio_management do
|
||||
let(:work_item) { build_stubbed(:work_item) }
|
||||
let(:user) { build_stubbed(:user) }
|
||||
let(:project) { build_stubbed(:project) }
|
||||
let(:original_work_item) { build_stubbed(:work_item, project: project) }
|
||||
let(:target_work_item) { build_stubbed(:work_item, project: project) }
|
||||
let(:target_work_item_url) { Gitlab::UrlBuilder.build(target_work_item) }
|
||||
|
||||
subject(:presenter) { described_class.new(original_work_item, current_user: user) }
|
||||
|
||||
it 'presents a work item and uses methods defined in IssuePresenter' do
|
||||
user = build_stubbed(:user)
|
||||
presenter = work_item.present(current_user: user)
|
||||
|
||||
expect(presenter.issue_path).to eq(presenter.web_path)
|
||||
end
|
||||
|
||||
shared_examples 'returns target work item url based on permissions' do
|
||||
context 'when anonymous' do
|
||||
let(:user) { nil }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
||||
context 'with signed in user' do
|
||||
before do
|
||||
stub_member_access_level(project, access_level => user) if access_level
|
||||
end
|
||||
|
||||
context 'when user has no role in project' do
|
||||
let(:access_level) { nil }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
||||
context 'when user has guest role in project' do
|
||||
let(:access_level) { :guest }
|
||||
|
||||
it { is_expected.to eq(target_work_item_url) }
|
||||
end
|
||||
|
||||
context 'when user has reporter role in project' do
|
||||
let(:access_level) { :reporter }
|
||||
|
||||
it { is_expected.to eq(target_work_item_url) }
|
||||
end
|
||||
|
||||
context 'when user has developer role in project' do
|
||||
let(:access_level) { :developer }
|
||||
|
||||
it { is_expected.to eq(target_work_item_url) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#duplicated_to_work_item_url' do
|
||||
subject { presenter.duplicated_to_work_item_url }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
|
||||
it_behaves_like 'returns target work item url based on permissions' do
|
||||
let(:original_work_item) { build_stubbed(:work_item, project: project, duplicated_to: target_work_item) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#moved_to_work_item_url' do
|
||||
subject { presenter.moved_to_work_item_url }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
|
||||
it_behaves_like 'returns target work item url based on permissions' do
|
||||
# Create original work item in other project
|
||||
let(:original_work_item) { build_stubbed(:work_item, moved_to: target_work_item) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ require (
|
|||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.1
|
||||
github.com/BurntSushi/toml v1.4.0
|
||||
github.com/alecthomas/chroma/v2 v2.14.0
|
||||
github.com/aws/aws-sdk-go v1.55.5
|
||||
github.com/aws/aws-sdk-go-v2 v1.32.3
|
||||
github.com/aws/aws-sdk-go-v2/config v1.28.1
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.42
|
||||
|
|
@ -59,6 +58,7 @@ require (
|
|||
github.com/DataDog/datadog-go v4.4.0+incompatible // indirect
|
||||
github.com/DataDog/sketches-go v1.0.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.1 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 // indirect
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ type S3Config struct {
|
|||
UseIamProfile bool `toml:"-"`
|
||||
ServerSideEncryption string `toml:"-"` // Server-side encryption mode (e.g. AES256, aws:kms)
|
||||
SSEKMSKeyID string `toml:"-"` // Server-side encryption key-management service key ID (e.g. arn:aws:xxx)
|
||||
AwsSDK string `toml:"-"` // Use "v1" to force using AWS SDK v1. Default is v2.
|
||||
AwsSDK string `toml:"-"` // DEPRECATED. AWS SDK v2 is always used now.
|
||||
}
|
||||
|
||||
// GoCloudConfig holds GoCloud-specific configuration
|
||||
|
|
|
|||
|
|
@ -203,21 +203,12 @@ func getClientInformation(ctx context.Context, opts *UploadOpts, fh *FileHandler
|
|||
}
|
||||
uploadDestination, err = objectstore.NewGoCloudObject(p)
|
||||
case opts.UseWorkhorseClientEnabled() && opts.ObjectStorageConfig.IsAWS() && opts.ObjectStorageConfig.IsValid():
|
||||
if opts.ObjectStorageConfig.S3Config.AwsSDK == "v1" {
|
||||
clientMode = "s3_client"
|
||||
uploadDestination, err = objectstore.NewS3Object(
|
||||
opts.RemoteTempObjectID,
|
||||
opts.ObjectStorageConfig.S3Credentials,
|
||||
opts.ObjectStorageConfig.S3Config,
|
||||
)
|
||||
} else {
|
||||
clientMode = "s3_client_v2"
|
||||
uploadDestination, err = objectstore.NewS3v2Object(
|
||||
opts.RemoteTempObjectID,
|
||||
opts.ObjectStorageConfig.S3Credentials,
|
||||
opts.ObjectStorageConfig.S3Config,
|
||||
)
|
||||
}
|
||||
clientMode = "s3_client_v2"
|
||||
uploadDestination, err = objectstore.NewS3v2Object(
|
||||
opts.RemoteTempObjectID,
|
||||
opts.ObjectStorageConfig.S3Credentials,
|
||||
opts.ObjectStorageConfig.S3Config,
|
||||
)
|
||||
case opts.IsMultipart():
|
||||
clientMode = "s3_multipart"
|
||||
uploadDestination, err = objectstore.NewMultipart(
|
||||
|
|
|
|||
|
|
@ -298,7 +298,7 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
|
|||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
s3Creds, s3Config, sess, ts := test.SetupS3(t, "")
|
||||
s3Creds, s3Config, client, ts := test.SetupS3(t, "")
|
||||
defer ts.Close()
|
||||
|
||||
if tc.awsSDK != "" {
|
||||
|
|
@ -325,10 +325,10 @@ func TestUploadWithS3WorkhorseClient(t *testing.T) {
|
|||
|
||||
if tc.expectedErr == nil {
|
||||
require.NoError(t, err)
|
||||
test.S3ObjectExists(t, sess, s3Config, remoteObject, test.ObjectContent)
|
||||
test.S3ObjectExists(ctx, t, client, s3Config, remoteObject, test.ObjectContent)
|
||||
} else {
|
||||
require.Equal(t, tc.expectedErr, err)
|
||||
test.S3ObjectDoesNotExist(t, sess, s3Config, remoteObject)
|
||||
test.S3ObjectDoesNotExist(ctx, t, client, s3Config, remoteObject)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
|
|
@ -22,7 +21,7 @@ func TestS3ClientSetup(t *testing.T) {
|
|||
options := client.Options()
|
||||
require.Nil(t, options.BaseEndpoint)
|
||||
require.Equal(t, "us-west-1", options.Region)
|
||||
require.True(t, aws.BoolValue(&options.UsePathStyle))
|
||||
require.True(t, options.UsePathStyle)
|
||||
|
||||
clientCache.Lock()
|
||||
require.Len(t, clientCache.clients, 1)
|
||||
|
|
|
|||
|
|
@ -1,125 +0,0 @@
|
|||
package objectstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"gitlab.com/gitlab-org/labkit/log"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
)
|
||||
|
||||
// S3Object represents an object stored in Amazon S3.
|
||||
type S3Object struct {
|
||||
credentials config.S3Credentials
|
||||
config config.S3Config
|
||||
objectName string
|
||||
uploaded bool
|
||||
|
||||
*uploader
|
||||
}
|
||||
|
||||
// NewS3Object creates a new S3Object with the provided object name, S3 credentials, and S3 config.
|
||||
func NewS3Object(objectName string, s3Credentials config.S3Credentials, s3Config config.S3Config) (*S3Object, error) {
|
||||
o := &S3Object{
|
||||
credentials: s3Credentials,
|
||||
config: s3Config,
|
||||
objectName: objectName,
|
||||
}
|
||||
|
||||
o.uploader = newUploader(o)
|
||||
return o, nil
|
||||
}
|
||||
|
||||
func setEncryptionOptions(input *s3manager.UploadInput, s3Config config.S3Config) {
|
||||
if s3Config.ServerSideEncryption != "" {
|
||||
input.ServerSideEncryption = aws.String(s3Config.ServerSideEncryption)
|
||||
|
||||
if s3Config.ServerSideEncryption == s3.ServerSideEncryptionAwsKms && s3Config.SSEKMSKeyID != "" {
|
||||
input.SSEKMSKeyId = aws.String(s3Config.SSEKMSKeyID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Upload uploads the S3 object with the provided context and reader.
|
||||
func (s *S3Object) Upload(ctx context.Context, r io.Reader) error {
|
||||
sess, err := setupS3Session(s.credentials, s.config)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("error creating S3 session")
|
||||
return err
|
||||
}
|
||||
|
||||
uploader := s3manager.NewUploader(sess)
|
||||
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: aws.String(s.config.Bucket),
|
||||
Key: aws.String(s.objectName),
|
||||
Body: r,
|
||||
}
|
||||
|
||||
setEncryptionOptions(input, s.config)
|
||||
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("error uploading S3 session")
|
||||
// Get the root cause, such as ErrEntityTooLarge, so we can return the proper HTTP status code
|
||||
return unwrapAWSError(err)
|
||||
}
|
||||
|
||||
s.uploaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ETag returns the ETag of the S3 object.
|
||||
func (s *S3Object) ETag() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Abort aborts the multipart upload by deleting the object.
|
||||
func (s *S3Object) Abort() {
|
||||
s.Delete()
|
||||
}
|
||||
|
||||
// Delete deletes the S3 object if it has been uploaded.
|
||||
func (s *S3Object) Delete() {
|
||||
if !s.uploaded {
|
||||
return
|
||||
}
|
||||
|
||||
session, err := setupS3Session(s.credentials, s.config)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("error setting up S3 session in delete")
|
||||
return
|
||||
}
|
||||
|
||||
svc := s3.New(session)
|
||||
input := &s3.DeleteObjectInput{
|
||||
Bucket: aws.String(s.config.Bucket),
|
||||
Key: aws.String(s.objectName),
|
||||
}
|
||||
|
||||
// We can't use the request context because in a successful
|
||||
// case, the original request has already completed.
|
||||
deleteCtx, cancel := context.WithTimeout(context.Background(), 60*time.Second) // lint:allow context.Background
|
||||
defer cancel()
|
||||
|
||||
_, err = svc.DeleteObjectWithContext(deleteCtx, input)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("error deleting S3 object", err)
|
||||
}
|
||||
}
|
||||
|
||||
// This is needed until https://github.com/aws/aws-sdk-go/issues/2820 is closed.
|
||||
func unwrapAWSError(e error) error {
|
||||
if awsErr, ok := e.(awserr.Error); ok {
|
||||
return unwrapAWSError(awsErr.OrigErr())
|
||||
}
|
||||
|
||||
return e
|
||||
}
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
package objectstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
|
||||
)
|
||||
|
||||
type failedReader struct {
|
||||
io.Reader
|
||||
}
|
||||
|
||||
func (r *failedReader) Read(_ []byte) (int, error) {
|
||||
origErr := fmt.Errorf("entity is too large")
|
||||
return 0, awserr.New("Read", "read failed", origErr)
|
||||
}
|
||||
|
||||
func TestS3ObjectUpload(t *testing.T) {
|
||||
testCases := []struct {
|
||||
encryption string
|
||||
}{
|
||||
{encryption: ""},
|
||||
{encryption: s3.ServerSideEncryptionAes256},
|
||||
{encryption: s3.ServerSideEncryptionAwsKms},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("encryption=%v", tc.encryption), func(t *testing.T) {
|
||||
creds, config, sess, ts := test.SetupS3(t, tc.encryption)
|
||||
defer ts.Close()
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
object, err := NewS3Object(objectName, creds, config)
|
||||
require.NoError(t, err)
|
||||
|
||||
// copy data
|
||||
n, err := object.Consume(ctx, strings.NewReader(test.ObjectContent), deadline)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, test.ObjectSize, n, "Uploaded file mismatch")
|
||||
|
||||
test.S3ObjectExists(t, sess, config, objectName, test.ObjectContent)
|
||||
test.CheckS3Metadata(t, sess, config, objectName)
|
||||
|
||||
cancel()
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
return (test.S3ObjectDoesNotExist(t, sess, config, objectName))
|
||||
}, 5*time.Second, time.Millisecond, "file is still present")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConcurrentS3ObjectUpload(t *testing.T) {
|
||||
creds, uploadsConfig, uploadsSession, uploadServer := test.SetupS3WithBucket(t, "uploads", "")
|
||||
defer uploadServer.Close()
|
||||
|
||||
// This will return a separate S3 endpoint
|
||||
_, artifactsConfig, artifactsSession, artifactsServer := test.SetupS3WithBucket(t, "artifacts", "")
|
||||
defer artifactsServer.Close()
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := 0; i < 4; i++ {
|
||||
wg.Add(1)
|
||||
|
||||
go func(index int) {
|
||||
var sess *session.Session
|
||||
var config config.S3Config
|
||||
|
||||
if index%2 == 0 {
|
||||
sess = uploadsSession
|
||||
config = uploadsConfig
|
||||
} else {
|
||||
sess = artifactsSession
|
||||
config = artifactsConfig
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("s3-test-data-%d", index)
|
||||
objectName := filepath.Join(tmpDir, name)
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
object, err := NewS3Object(objectName, creds, config)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// copy data
|
||||
n, err := object.Consume(ctx, strings.NewReader(test.ObjectContent), deadline)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, test.ObjectSize, n, "Uploaded file mismatch")
|
||||
|
||||
test.S3ObjectExists(t, sess, config, objectName, test.ObjectContent)
|
||||
wg.Done()
|
||||
}(i)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func TestS3ObjectUploadCancel(t *testing.T) {
|
||||
creds, config, _, ts := test.SetupS3(t, "")
|
||||
defer ts.Close()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||
|
||||
object, err := NewS3Object(objectName, creds, config)
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
// Cancel the transfer before the data has been copied to ensure
|
||||
// we handle this gracefully.
|
||||
cancel()
|
||||
|
||||
_, err = object.Consume(ctx, strings.NewReader(test.ObjectContent), deadline)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, "context canceled", err.Error())
|
||||
}
|
||||
|
||||
func TestS3ObjectUploadLimitReached(t *testing.T) {
|
||||
creds, config, _, ts := test.SetupS3(t, "")
|
||||
defer ts.Close()
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||
object, err := NewS3Object(objectName, creds, config)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = object.Consume(context.Background(), &failedReader{}, deadline)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, "entity is too large", err.Error())
|
||||
}
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
package objectstore
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/endpoints"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
|
||||
"gitlab.com/gitlab-org/labkit/fips"
|
||||
)
|
||||
|
||||
type s3Session struct {
|
||||
session *session.Session
|
||||
expiry time.Time
|
||||
}
|
||||
|
||||
type s3SessionCache struct {
|
||||
// An S3 session is cached by its input configuration (e.g. region,
|
||||
// endpoint, path style, etc.), but the bucket is actually
|
||||
// determined by the type of object to be uploaded (e.g. CI
|
||||
// artifact, LFS, etc.) during runtime. In practice, we should only
|
||||
// need one session per Workhorse process if we only allow one
|
||||
// configuration for many different buckets. However, using a map
|
||||
// indexed by the config avoids potential pitfalls in case the
|
||||
// bucket configuration is supplied at startup or we need to support
|
||||
// multiple S3 endpoints.
|
||||
sessions map[config.S3Config]*s3Session
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func (s *s3Session) isExpired() bool {
|
||||
return time.Now().After(s.expiry)
|
||||
}
|
||||
|
||||
var (
|
||||
// By default, it looks like IAM instance profiles may last 6 hours
|
||||
// (via curl http://169.254.169.254/latest/meta-data/iam/security-credentials/<role_name>),
|
||||
// but this may be configurable from anywhere for 15 minutes to 12
|
||||
// hours. To be safe, refresh AWS sessions every 10 minutes.
|
||||
sessionExpiration = 10 * time.Minute
|
||||
sessionCache = &s3SessionCache{sessions: make(map[config.S3Config]*s3Session)}
|
||||
)
|
||||
|
||||
// SetupS3Session initializes a new AWS S3 session and refreshes one if
|
||||
// necessary. As recommended in https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/sessions.html,
|
||||
// sessions should be cached when possible. Sessions are safe to use
|
||||
// concurrently as long as the session isn't modified.
|
||||
func setupS3Session(s3Credentials config.S3Credentials, s3Config config.S3Config) (*session.Session, error) {
|
||||
sessionCache.Lock()
|
||||
defer sessionCache.Unlock()
|
||||
|
||||
if s, ok := sessionCache.sessions[s3Config]; ok && !s.isExpired() {
|
||||
return s.session, nil
|
||||
}
|
||||
|
||||
cfg := &aws.Config{
|
||||
Region: aws.String(s3Config.Region),
|
||||
S3ForcePathStyle: aws.Bool(s3Config.PathStyle),
|
||||
S3DisableContentMD5Validation: aws.Bool(fips.Enabled()),
|
||||
}
|
||||
|
||||
// In case IAM profiles aren't being used, use the static credentials
|
||||
if s3Credentials.AwsAccessKeyID != "" && s3Credentials.AwsSecretAccessKey != "" {
|
||||
cfg.Credentials = credentials.NewStaticCredentials(s3Credentials.AwsAccessKeyID, s3Credentials.AwsSecretAccessKey, "")
|
||||
}
|
||||
|
||||
if s3Config.Endpoint != "" {
|
||||
// The administrator has configured an S3 endpoint override,
|
||||
// e.g. to make use of S3 IPv6 support or S3 FIPS mode. We
|
||||
// need to configure a custom resolver to make sure that
|
||||
// the custom endpoint is only used for S3 API calls, and not
|
||||
// for STS API calls.
|
||||
s3CustomResolver := func(service, region string, optFns ...func(*endpoints.Options)) (endpoints.ResolvedEndpoint, error) {
|
||||
if service == s3.EndpointsID {
|
||||
return endpoints.ResolvedEndpoint{
|
||||
URL: s3Config.Endpoint,
|
||||
SigningRegion: region,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return endpoints.DefaultResolver().EndpointFor(service, region, optFns...)
|
||||
}
|
||||
|
||||
cfg.EndpointResolver = endpoints.ResolverFunc(s3CustomResolver)
|
||||
}
|
||||
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sessionCache.sessions[s3Config] = &s3Session{
|
||||
expiry: time.Now().Add(sessionExpiration),
|
||||
session: sess,
|
||||
}
|
||||
|
||||
return sess, nil
|
||||
}
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
package objectstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
)
|
||||
|
||||
func TestS3SessionSetup(t *testing.T) {
|
||||
resetS3Sessions()
|
||||
|
||||
credentials := config.S3Credentials{}
|
||||
cfg := config.S3Config{Region: "us-west-1", PathStyle: true}
|
||||
|
||||
sess, err := setupS3Session(credentials, cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
s3Config := sess.ClientConfig(s3.EndpointsID)
|
||||
require.Equal(t, "https://s3.us-west-1.amazonaws.com", s3Config.Endpoint)
|
||||
require.Equal(t, "us-west-1", s3Config.SigningRegion)
|
||||
require.True(t, aws.BoolValue(sess.Config.S3ForcePathStyle))
|
||||
|
||||
sessionCache.Lock()
|
||||
require.Len(t, sessionCache.sessions, 1)
|
||||
sessionCache.Unlock()
|
||||
|
||||
anotherConfig := cfg
|
||||
_, err = setupS3Session(credentials, anotherConfig)
|
||||
require.NoError(t, err)
|
||||
|
||||
sessionCache.Lock()
|
||||
require.Len(t, sessionCache.sessions, 1)
|
||||
sessionCache.Unlock()
|
||||
}
|
||||
|
||||
func TestS3SessionEndpointSetup(t *testing.T) {
|
||||
resetS3Sessions()
|
||||
|
||||
credentials := config.S3Credentials{}
|
||||
const customS3Endpoint = "https://example.com"
|
||||
const region = "us-west-2"
|
||||
cfg := config.S3Config{Region: region, PathStyle: true, Endpoint: customS3Endpoint}
|
||||
|
||||
sess, err := setupS3Session(credentials, cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
// ClientConfig is what is ultimately used by an S3 client
|
||||
s3Config := sess.ClientConfig(s3.EndpointsID)
|
||||
require.Equal(t, customS3Endpoint, s3Config.Endpoint)
|
||||
require.Equal(t, region, s3Config.SigningRegion)
|
||||
|
||||
stsConfig := sess.ClientConfig(sts.EndpointsID)
|
||||
require.Equal(t, "https://sts.amazonaws.com", stsConfig.Endpoint, "STS should use default endpoint")
|
||||
}
|
||||
|
||||
func TestS3SessionExpiry(t *testing.T) {
|
||||
resetS3Sessions()
|
||||
|
||||
credentials := config.S3Credentials{}
|
||||
cfg := config.S3Config{Region: "us-west-1", PathStyle: true}
|
||||
|
||||
sess, err := setupS3Session(credentials, cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, "us-west-1", aws.StringValue(sess.Config.Region))
|
||||
require.True(t, aws.BoolValue(sess.Config.S3ForcePathStyle))
|
||||
|
||||
firstSession, ok := getS3Session(cfg)
|
||||
require.True(t, ok)
|
||||
require.False(t, firstSession.isExpired())
|
||||
|
||||
firstSession.expiry = time.Now().Add(-1 * time.Second)
|
||||
require.True(t, firstSession.isExpired())
|
||||
|
||||
_, err = setupS3Session(credentials, cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
nextSession, ok := getS3Session(cfg)
|
||||
require.True(t, ok)
|
||||
require.False(t, nextSession.isExpired())
|
||||
}
|
||||
|
||||
func resetS3Sessions() {
|
||||
sessionCache.Lock()
|
||||
defer sessionCache.Unlock()
|
||||
sessionCache.sessions = make(map[config.S3Config]*s3Session)
|
||||
}
|
||||
|
||||
func getS3Session(cfg config.S3Config) (*s3Session, bool) {
|
||||
sessionCache.Lock()
|
||||
defer sessionCache.Unlock()
|
||||
session, ok := sessionCache.sessions[cfg]
|
||||
return session, ok
|
||||
}
|
||||
|
|
@ -84,7 +84,7 @@ func (s *S3v2Object) Upload(ctx context.Context, r io.Reader) error {
|
|||
|
||||
input := &s3.PutObjectInput{
|
||||
Bucket: aws.String(s.config.Bucket),
|
||||
Key: aws.String(normalizeKey(s.objectName)),
|
||||
Key: aws.String(s.Name()),
|
||||
Body: r,
|
||||
}
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ func (s *S3v2Object) Delete() {
|
|||
|
||||
input := &s3.DeleteObjectInput{
|
||||
Bucket: aws.String(s.config.Bucket),
|
||||
Key: aws.String(normalizeKey(s.objectName)),
|
||||
Key: aws.String(s.Name()),
|
||||
}
|
||||
|
||||
// We can't use the request context because in a successful
|
||||
|
|
@ -138,3 +138,8 @@ func (s *S3v2Object) Delete() {
|
|||
log.WithError(err).Error("error deleting S3 object", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Name returns the object name without a leading slash.
|
||||
func (s *S3v2Object) Name() string {
|
||||
return normalizeKey(s.objectName)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,9 +10,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
|
|
@ -25,22 +24,21 @@ type s3FailedReader struct {
|
|||
}
|
||||
|
||||
func (r *s3FailedReader) Read(_ []byte) (int, error) {
|
||||
origErr := fmt.Errorf("entity is too large")
|
||||
return 0, awserr.New("Read", "read failed", origErr)
|
||||
return 0, fmt.Errorf("entity is too large")
|
||||
}
|
||||
|
||||
func TestS3v2ObjectUpload(t *testing.T) {
|
||||
testCases := []struct {
|
||||
encryption string
|
||||
encryption types.ServerSideEncryption
|
||||
}{
|
||||
{encryption: ""},
|
||||
{encryption: s3.ServerSideEncryptionAes256},
|
||||
{encryption: s3.ServerSideEncryptionAwsKms},
|
||||
{encryption: types.ServerSideEncryptionAes256},
|
||||
{encryption: types.ServerSideEncryptionAwsKms},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("encryption=%v", tc.encryption), func(t *testing.T) {
|
||||
creds, config, sess, ts := test.SetupS3(t, tc.encryption)
|
||||
t.Run(fmt.Sprintf("encryption=%s", string(tc.encryption)), func(t *testing.T) {
|
||||
creds, config, client, ts := test.SetupS3(t, string(tc.encryption))
|
||||
defer ts.Close()
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
|
|
@ -48,6 +46,7 @@ func TestS3v2ObjectUpload(t *testing.T) {
|
|||
|
||||
objectName := filepath.Join(tmpDir, "s3-test-data")
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
object, err := NewS3v2Object(objectName, creds, config)
|
||||
require.NoError(t, err)
|
||||
|
|
@ -57,24 +56,22 @@ func TestS3v2ObjectUpload(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
require.Equal(t, test.ObjectSize, n, "Uploaded file mismatch")
|
||||
|
||||
test.S3ObjectExists(t, sess, config, objectName, test.ObjectContent)
|
||||
test.CheckS3Metadata(t, sess, config, objectName)
|
||||
|
||||
cancel()
|
||||
test.S3ObjectExists(ctx, t, client, config, object.Name(), test.ObjectContent)
|
||||
test.CheckS3Metadata(ctx, t, client, config, object.Name())
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
return (test.S3ObjectDoesNotExist(t, sess, config, objectName))
|
||||
return (test.S3ObjectDoesNotExist(ctx, t, client, config, objectName))
|
||||
}, 5*time.Second, time.Millisecond, "file is still present")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConcurrentS3v2ObjectUpload(t *testing.T) {
|
||||
creds, uploadsConfig, uploadsSession, uploadServer := test.SetupS3WithBucket(t, "uploads", "")
|
||||
creds, uploadsConfig, uploadsClient, uploadServer := test.SetupS3WithBucket(t, "uploads", "")
|
||||
defer uploadServer.Close()
|
||||
|
||||
// This will return a separate S3 endpoint
|
||||
_, artifactsConfig, artifactsSession, artifactsServer := test.SetupS3WithBucket(t, "artifacts", "")
|
||||
_, artifactsConfig, artifactsClient, artifactsServer := test.SetupS3WithBucket(t, "artifacts", "")
|
||||
defer artifactsServer.Close()
|
||||
|
||||
deadline := time.Now().Add(testTimeout)
|
||||
|
|
@ -86,14 +83,14 @@ func TestConcurrentS3v2ObjectUpload(t *testing.T) {
|
|||
wg.Add(1)
|
||||
|
||||
go func(index int) {
|
||||
var sess *session.Session
|
||||
var client *s3.Client
|
||||
var config config.S3Config
|
||||
|
||||
if index%2 == 0 {
|
||||
sess = uploadsSession
|
||||
client = uploadsClient
|
||||
config = uploadsConfig
|
||||
} else {
|
||||
sess = artifactsSession
|
||||
client = artifactsClient
|
||||
config = artifactsConfig
|
||||
}
|
||||
|
||||
|
|
@ -110,7 +107,7 @@ func TestConcurrentS3v2ObjectUpload(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
assert.Equal(t, test.ObjectSize, n, "Uploaded file mismatch")
|
||||
|
||||
test.S3ObjectExists(t, sess, config, objectName, test.ObjectContent)
|
||||
test.S3ObjectExists(ctx, t, client, config, object.Name(), test.ObjectContent)
|
||||
wg.Done()
|
||||
}(i)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,32 +2,33 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go-v2/aws"
|
||||
awscfg "github.com/aws/aws-sdk-go-v2/config"
|
||||
"github.com/aws/aws-sdk-go-v2/credentials"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3/types"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
|
||||
"github.com/johannesboyne/gofakes3"
|
||||
"github.com/johannesboyne/gofakes3/backend/s3mem"
|
||||
)
|
||||
|
||||
// SetupS3 sets up a local S3 server with a default bucket for testing purposes and returns the necessary credentials, configuration, session, and server.
|
||||
func SetupS3(t *testing.T, encryption string) (config.S3Credentials, config.S3Config, *session.Session, *httptest.Server) {
|
||||
func SetupS3(t *testing.T, encryption string) (config.S3Credentials, config.S3Config, *s3.Client, *httptest.Server) {
|
||||
return SetupS3WithBucket(t, "test-bucket", encryption)
|
||||
}
|
||||
|
||||
// SetupS3WithBucket sets up a local S3 server for testing purposes and returns the necessary credentials, configuration, session, and server.
|
||||
func SetupS3WithBucket(t *testing.T, bucket string, encryption string) (config.S3Credentials, config.S3Config, *session.Session, *httptest.Server) {
|
||||
func SetupS3WithBucket(t *testing.T, bucket string, encryption string) (config.S3Credentials, config.S3Config, *s3.Client, *httptest.Server) {
|
||||
backend := s3mem.New()
|
||||
faker := gofakes3.New(backend)
|
||||
ts := httptest.NewServer(faker.Server())
|
||||
|
|
@ -37,7 +38,7 @@ func SetupS3WithBucket(t *testing.T, bucket string, encryption string) (config.S
|
|||
AwsSecretAccessKey: "YOUR-SECRETACCESSKEY",
|
||||
}
|
||||
|
||||
config := config.S3Config{
|
||||
cfg := config.S3Config{
|
||||
Bucket: bucket,
|
||||
Endpoint: ts.URL,
|
||||
Region: "eu-central-1",
|
||||
|
|
@ -45,36 +46,38 @@ func SetupS3WithBucket(t *testing.T, bucket string, encryption string) (config.S
|
|||
}
|
||||
|
||||
if encryption != "" {
|
||||
config.ServerSideEncryption = encryption
|
||||
cfg.ServerSideEncryption = encryption
|
||||
|
||||
if encryption == s3.ServerSideEncryptionAwsKms {
|
||||
config.SSEKMSKeyID = "arn:aws:1234"
|
||||
if encryption == string(types.ServerSideEncryptionAwsKms) {
|
||||
cfg.SSEKMSKeyID = "arn:aws:1234"
|
||||
}
|
||||
}
|
||||
|
||||
sess, err := session.NewSession(&aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(creds.AwsAccessKeyID, creds.AwsSecretAccessKey, ""),
|
||||
Endpoint: aws.String(ts.URL),
|
||||
Region: aws.String(config.Region),
|
||||
DisableSSL: aws.Bool(true),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
})
|
||||
ctx := context.Background() // lint:allow context.Background
|
||||
awsCfg, err := awscfg.LoadDefaultConfig(ctx,
|
||||
awscfg.WithRegion(cfg.Region),
|
||||
awscfg.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(creds.AwsAccessKeyID, creds.AwsSecretAccessKey, "")),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create S3 service client
|
||||
svc := s3.New(sess)
|
||||
client := s3.NewFromConfig(awsCfg, func(o *s3.Options) {
|
||||
o.UsePathStyle = true
|
||||
o.BaseEndpoint = aws.String(ts.URL)
|
||||
})
|
||||
|
||||
_, err = svc.CreateBucket(&s3.CreateBucketInput{
|
||||
_, err = client.CreateBucket(ctx, &s3.CreateBucketInput{
|
||||
Bucket: aws.String(bucket),
|
||||
})
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
return creds, config, sess, ts
|
||||
return creds, cfg, client, ts
|
||||
}
|
||||
|
||||
// S3ObjectExists will fail the test if the file does not exist.
|
||||
func S3ObjectExists(t *testing.T, sess *session.Session, config config.S3Config, objectName string, expectedBytes string) {
|
||||
downloadObject(t, sess, config, objectName, func(tmpfile *os.File, numBytes int64, err error) {
|
||||
func S3ObjectExists(ctx context.Context, t *testing.T, client *s3.Client, config config.S3Config, objectName string, expectedBytes string) {
|
||||
downloadObject(ctx, t, client, config, objectName, func(tmpfile *os.File, numBytes int64, err error) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(len(expectedBytes)), numBytes)
|
||||
|
||||
|
|
@ -86,26 +89,24 @@ func S3ObjectExists(t *testing.T, sess *session.Session, config config.S3Config,
|
|||
}
|
||||
|
||||
// CheckS3Metadata is a helper function for testing S3 metadata.
|
||||
func CheckS3Metadata(t *testing.T, sess *session.Session, config config.S3Config, objectName string) {
|
||||
// In a real S3 provider, s3crypto.NewDecryptionClient should probably be used
|
||||
svc := s3.New(sess)
|
||||
result, err := svc.GetObject(&s3.GetObjectInput{
|
||||
func CheckS3Metadata(ctx context.Context, t *testing.T, client *s3.Client, config config.S3Config, objectName string) {
|
||||
result, err := client.GetObject(ctx, &s3.GetObjectInput{
|
||||
Bucket: aws.String(config.Bucket),
|
||||
Key: aws.String(objectName),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
if config.ServerSideEncryption != "" {
|
||||
require.Equal(t, aws.String(config.ServerSideEncryption), result.ServerSideEncryption)
|
||||
require.Equal(t, config.ServerSideEncryption, string(result.ServerSideEncryption))
|
||||
|
||||
if config.ServerSideEncryption == s3.ServerSideEncryptionAwsKms {
|
||||
if config.ServerSideEncryption == string(types.ServerSideEncryptionAwsKms) {
|
||||
require.Equal(t, aws.String(config.SSEKMSKeyID), result.SSEKMSKeyId)
|
||||
} else {
|
||||
require.Nil(t, result.SSEKMSKeyId)
|
||||
}
|
||||
} else {
|
||||
require.Nil(t, result.ServerSideEncryption)
|
||||
require.Nil(t, result.SSEKMSKeyId)
|
||||
require.Empty(t, result.ServerSideEncryption)
|
||||
require.Empty(t, result.SSEKMSKeyId)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -114,10 +115,10 @@ func CheckS3Metadata(t *testing.T, sess *session.Session, config config.S3Config
|
|||
// S3ObjectExists because deletion may need to be retried since deferred
|
||||
// clean up callsinternal/objectstore/test/s3_stub.go may cause the actual deletion to happen after the
|
||||
// initial check.
|
||||
func S3ObjectDoesNotExist(t *testing.T, sess *session.Session, config config.S3Config, objectName string) bool {
|
||||
func S3ObjectDoesNotExist(ctx context.Context, t *testing.T, client *s3.Client, config config.S3Config, objectName string) bool {
|
||||
deleted := false
|
||||
|
||||
downloadObject(t, sess, config, objectName, func(_ *os.File, _ int64, err error) {
|
||||
downloadObject(ctx, t, client, config, objectName, func(_ *os.File, _ int64, err error) {
|
||||
if err != nil && strings.Contains(err.Error(), "NoSuchKey") {
|
||||
deleted = true
|
||||
}
|
||||
|
|
@ -126,17 +127,24 @@ func S3ObjectDoesNotExist(t *testing.T, sess *session.Session, config config.S3C
|
|||
return deleted
|
||||
}
|
||||
|
||||
func downloadObject(t *testing.T, sess *session.Session, config config.S3Config, objectName string, handler func(tmpfile *os.File, numBytes int64, err error)) {
|
||||
func downloadObject(ctx context.Context, t *testing.T, client *s3.Client, config config.S3Config, objectName string, handler func(tmpfile *os.File, numBytes int64, err error)) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
tmpfile, err := os.CreateTemp(tmpDir, "s3-output")
|
||||
require.NoError(t, err)
|
||||
|
||||
downloadSvc := s3manager.NewDownloader(sess)
|
||||
numBytes, err := downloadSvc.Download(tmpfile, &s3.GetObjectInput{
|
||||
result, err := client.GetObject(ctx, &s3.GetObjectInput{
|
||||
Bucket: aws.String(config.Bucket),
|
||||
Key: aws.String(objectName),
|
||||
})
|
||||
|
||||
numBytes := int64(0)
|
||||
if err == nil {
|
||||
var copyErr error
|
||||
defer func() { _ = result.Body.Close() }()
|
||||
numBytes, copyErr = io.Copy(tmpfile, result.Body)
|
||||
require.NoError(t, copyErr)
|
||||
}
|
||||
|
||||
handler(tmpfile, numBytes, err)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue