Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
79c469c065
commit
d00599dae8
|
|
@ -18,7 +18,7 @@ variables:
|
|||
# Helm chart ref used by test-on-cng pipeline
|
||||
GITLAB_HELM_CHART_REF: "6cdb0e1cd4ceb7c9fd01ffa2f62c4a7a4c77a23b"
|
||||
# Specific ref for cng-mirror project to trigger builds for
|
||||
GITLAB_CNG_MIRROR_REF: "df7aafcccafdbab732a7cf757efb3b7b74c851dd"
|
||||
GITLAB_CNG_MIRROR_REF: "8c4bbd04b509dc6cc3cb0469066ef053db028607"
|
||||
# Makes sure some of the common scripts from pipeline-common use bundler to execute commands
|
||||
RUN_WITH_BUNDLE: "true"
|
||||
# Makes sure reporting script defined in .gitlab-qa-report from pipeline-common is executed from correct folder
|
||||
|
|
|
|||
|
|
@ -1,15 +0,0 @@
|
|||
---
|
||||
# Cop supports --autocorrect.
|
||||
InternalAffairs/RedundantSourceRange:
|
||||
Details: grace period
|
||||
Exclude:
|
||||
- 'rubocop/cop/gitlab/const_get_inherit_false.rb'
|
||||
- 'rubocop/cop/gitlab/documentation_links/link.rb'
|
||||
- 'rubocop/cop/gitlab/http_v2.rb'
|
||||
- 'rubocop/cop/gitlab/httparty.rb'
|
||||
- 'rubocop/cop/gitlab/json.rb'
|
||||
- 'rubocop/cop/inject_enterprise_edition_module.rb'
|
||||
- 'rubocop/cop/project_path_helper.rb'
|
||||
- 'rubocop/cop/rspec/before_all.rb'
|
||||
- 'rubocop/cop/rspec/factory_bot/inline_association.rb'
|
||||
- 'rubocop/cop/rspec/have_gitlab_http_status.rb'
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
---
|
||||
# Cop supports --autocorrect.
|
||||
RSpec/ChangeByZero:
|
||||
Exclude:
|
||||
- 'ee/spec/controllers/groups/todos_controller_spec.rb'
|
||||
- 'ee/spec/lib/ee/gitlab/scim/group/deprovisioning_service_spec.rb'
|
||||
- 'ee/spec/lib/merge_requests/external_status_check_changes_auditor_spec.rb'
|
||||
- 'ee/spec/models/ee/project_member_spec.rb'
|
||||
- 'spec/features/admin/users/users_spec.rb'
|
||||
- 'spec/lib/api/helpers_spec.rb'
|
||||
- 'spec/lib/gitlab/background_migration/backfill_user_details_spec.rb'
|
||||
- 'spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb'
|
||||
- 'spec/lib/gitlab/checks/matching_merge_request_spec.rb'
|
||||
- 'spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb'
|
||||
- 'spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb'
|
||||
- 'spec/lib/gitlab/database/tables_truncate_spec.rb'
|
||||
- 'spec/lib/gitlab/git_access_project_spec.rb'
|
||||
- 'spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb'
|
||||
- 'spec/lib/gitlab/github_import/importer/releases_importer_spec.rb'
|
||||
- 'spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb'
|
||||
- 'spec/lib/gitlab/seeders/ci/variables_group_seeder_spec.rb'
|
||||
- 'spec/lib/gitlab/seeders/ci/variables_project_seeder_spec.rb'
|
||||
- 'spec/lib/gitlab/seeders/project_environment_seeder_spec.rb'
|
||||
- 'spec/migrations/db/migrate/20240806100120_remove_records_without_project_from_project_saved_replies_table_spec.rb'
|
||||
- 'spec/models/integrations/slack_workspace/api_scope_spec.rb'
|
||||
|
|
@ -1 +1 @@
|
|||
42fc100c92311d4989681df8c62b91cd18edb886
|
||||
79d381bd3d4933427e3f3f08821b0e81310764d0
|
||||
|
|
|
|||
|
|
@ -349,7 +349,7 @@ export default {
|
|||
class="board-title gl-m-0 gl-flex gl-h-9 gl-items-center gl-px-3 gl-text-base"
|
||||
>
|
||||
<gl-button
|
||||
v-gl-tooltip.hover
|
||||
v-gl-tooltip
|
||||
:aria-label="chevronTooltip"
|
||||
:title="chevronTooltip"
|
||||
class="board-title-caret no-drag btn-icon gl-cursor-pointer hover:gl-bg-strong"
|
||||
|
|
@ -377,7 +377,7 @@ export default {
|
|||
}"
|
||||
>
|
||||
<gl-avatar
|
||||
v-gl-tooltip.hover.bottom
|
||||
v-gl-tooltip.bottom
|
||||
:title="listAssignee"
|
||||
:alt="list.assignee.name"
|
||||
:src="list.assignee.avatarUrl"
|
||||
|
|
@ -411,7 +411,7 @@ export default {
|
|||
|
||||
<span
|
||||
v-if="listType !== 'label'"
|
||||
v-gl-tooltip.hover
|
||||
v-gl-tooltip
|
||||
:class="{
|
||||
'!gl-ml-2': list.collapsed && !showAssigneeListDetails,
|
||||
'gl-text-subtle': list.collapsed,
|
||||
|
|
@ -432,7 +432,7 @@ export default {
|
|||
<!-- EE end -->
|
||||
<gl-label
|
||||
v-if="listType === 'label'"
|
||||
v-gl-tooltip.hover.bottom
|
||||
v-gl-tooltip.bottom
|
||||
:background-color="list.label.color"
|
||||
:description="list.label.description"
|
||||
:scoped="showScopedLabels(list.label)"
|
||||
|
|
@ -521,7 +521,7 @@ export default {
|
|||
<gl-button
|
||||
v-if="isNewIssueShown"
|
||||
ref="newIssueBtn"
|
||||
v-gl-tooltip.hover
|
||||
v-gl-tooltip
|
||||
:aria-label="$options.i18n.newIssue"
|
||||
:title="$options.i18n.newIssue"
|
||||
size="small"
|
||||
|
|
@ -532,7 +532,7 @@ export default {
|
|||
|
||||
<gl-button
|
||||
v-if="isNewEpicShown"
|
||||
v-gl-tooltip.hover
|
||||
v-gl-tooltip
|
||||
:aria-label="$options.i18n.newEpic"
|
||||
:title="$options.i18n.newEpic"
|
||||
size="small"
|
||||
|
|
@ -544,7 +544,7 @@ export default {
|
|||
<gl-button
|
||||
v-if="isSettingsShown"
|
||||
ref="settingsBtn"
|
||||
v-gl-tooltip.hover
|
||||
v-gl-tooltip
|
||||
:aria-label="$options.i18n.listSettings"
|
||||
size="small"
|
||||
:title="$options.i18n.listSettings"
|
||||
|
|
|
|||
|
|
@ -98,13 +98,13 @@ export default {
|
|||
<template>
|
||||
<work-item-attribute
|
||||
anchor-id="board-card-due-date"
|
||||
wrapper-component="span"
|
||||
:wrapper-component-class="`${cssClass} board-card-info gl-mr-3 gl-cursor-help gl-text-subtle`"
|
||||
wrapper-component="button"
|
||||
:wrapper-component-class="`${cssClass} board-card-info gl-mr-3 !gl-cursor-help gl-text-subtle gl-bg-transparent gl-border-0 gl-p-0 focus-visible:gl-focus-inset`"
|
||||
>
|
||||
<template #icon>
|
||||
<gl-icon
|
||||
:variant="isOverdue ? 'danger' : 'subtle'"
|
||||
class="board-card-info-icon gl-mr-2"
|
||||
class="board-card-info-icon"
|
||||
:name="iconName"
|
||||
/>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
<script>
|
||||
import { GlIcon, GlLink } from '@gitlab/ui';
|
||||
import { GlIcon, GlLink, GlBadge } from '@gitlab/ui';
|
||||
import timeagoMixin from '~/vue_shared/mixins/timeago';
|
||||
import workItemsWidgetMetadataQuery from '../graphql/queries/work_items_widget_metadata.query.graphql';
|
||||
|
||||
export default {
|
||||
name: 'WorkItemsWidget',
|
||||
components: {
|
||||
GlIcon,
|
||||
GlLink,
|
||||
GlBadge,
|
||||
},
|
||||
mixins: [timeagoMixin],
|
||||
props: {
|
||||
assignedToYouPath: {
|
||||
type: String,
|
||||
|
|
@ -17,16 +21,49 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
metadata: {},
|
||||
};
|
||||
},
|
||||
apollo: {
|
||||
metadata: {
|
||||
query: workItemsWidgetMetadataQuery,
|
||||
variables() {
|
||||
return { username: gon.current_username };
|
||||
},
|
||||
update({ currentUser }) {
|
||||
return currentUser;
|
||||
},
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
isLoadingMetadata() {
|
||||
return this.$apollo.queries.metadata.loading;
|
||||
},
|
||||
assignedCount() {
|
||||
return this.metadata?.assigned?.count ?? 0;
|
||||
},
|
||||
assignedLastUpdatedAt() {
|
||||
return this.metadata?.assigned?.nodes?.[0]?.updatedAt ?? null;
|
||||
},
|
||||
authoredCount() {
|
||||
return this.metadata?.authored?.count ?? 0;
|
||||
},
|
||||
authoredLastUpdatedAt() {
|
||||
return this.metadata?.authored?.nodes?.[0]?.updatedAt ?? null;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="gl-border gl-rounded-lg gl-px-4 gl-py-1">
|
||||
<h4 class="gl-flex gl-items-center gl-gap-2">
|
||||
<gl-icon name="work-items" :size="16" />{{ __('Work items') }}
|
||||
<gl-icon name="issues" :size="16" />{{ __('Issues') }}
|
||||
</h4>
|
||||
<ul class="gl-list-none gl-p-0">
|
||||
<li>
|
||||
<li class="gl-flex gl-items-center gl-gap-3">
|
||||
<gl-link
|
||||
class="gl-flex gl-items-center gl-gap-3 gl-rounded-small gl-px-1 gl-py-1 !gl-no-underline hover:gl-bg-gray-10 dark:hover:gl-bg-alpha-light-8"
|
||||
variant="meta"
|
||||
|
|
@ -34,8 +71,17 @@ export default {
|
|||
>
|
||||
{{ s__('HomePageWorkItemsWidget|Assigned to you') }}
|
||||
</gl-link>
|
||||
<template v-if="!isLoadingMetadata">
|
||||
<gl-badge data-testid="assigned-count">{{ assignedCount }}</gl-badge>
|
||||
<span
|
||||
v-if="assignedLastUpdatedAt"
|
||||
data-testid="assigned-last-updated-at"
|
||||
class="gl-ml-auto gl-text-sm gl-text-subtle"
|
||||
>{{ timeFormatted(assignedLastUpdatedAt) }}</span
|
||||
>
|
||||
</template>
|
||||
</li>
|
||||
<li>
|
||||
<li class="gl-flex gl-items-center gl-gap-3">
|
||||
<gl-link
|
||||
class="gl-flex gl-items-center gl-gap-3 gl-rounded-small gl-px-1 gl-py-1 !gl-no-underline hover:gl-bg-gray-10 dark:hover:gl-bg-alpha-light-8"
|
||||
variant="meta"
|
||||
|
|
@ -43,6 +89,15 @@ export default {
|
|||
>
|
||||
{{ s__('HomePageWorkItemsWidget|Authored by you') }}
|
||||
</gl-link>
|
||||
<template v-if="!isLoadingMetadata">
|
||||
<gl-badge data-testid="authored-count">{{ authoredCount }}</gl-badge>
|
||||
<span
|
||||
v-if="authoredLastUpdatedAt"
|
||||
data-testid="authored-last-updated-at"
|
||||
class="gl-ml-auto gl-text-sm gl-text-subtle"
|
||||
>{{ timeFormatted(authoredLastUpdatedAt) }}</span
|
||||
>
|
||||
</template>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
query WorkItemsWidgetMetadata($username: String!) {
|
||||
currentUser {
|
||||
id
|
||||
assigned: workItems(
|
||||
assigneeUsernames: [$username]
|
||||
state: opened
|
||||
sort: UPDATED_DESC
|
||||
first: 1
|
||||
) {
|
||||
count
|
||||
nodes {
|
||||
id
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
authored: workItems(authorUsername: $username, state: opened, sort: UPDATED_DESC, first: 1) {
|
||||
count
|
||||
nodes {
|
||||
id
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -73,10 +73,10 @@ export default {
|
|||
<template>
|
||||
<work-item-attribute
|
||||
anchor-id="board-card-milestone"
|
||||
wrapper-component="div"
|
||||
wrapper-component-class="issue-milestone-details gl-flex gl-max-w-15 gl-gap-2 gl-items-center"
|
||||
wrapper-component="button"
|
||||
wrapper-component-class="issue-milestone-details gl-flex gl-max-w-15 gl-gap-2 gl-items-center !gl-cursor-help gl-bg-transparent gl-border-0 gl-p-0 focus-visible:gl-focus-inset"
|
||||
icon-name="milestone"
|
||||
icon-class="flex-shrink-0"
|
||||
icon-class="flex-shrink-0 gl-text-subtle"
|
||||
:title="milestone.title"
|
||||
title-component-class="milestone-title gl-inline-block gl-truncate"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,105 @@
|
|||
/**
|
||||
* Manages a Websocket Connection
|
||||
* @param {string} url - WebSocket URL
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {Function} options.onOpen - Open handler
|
||||
* @param {Function} options.onMessage - Message handler
|
||||
* @param {Function} options.onError - Error handler
|
||||
* @param {Function} options.onClose - Close handler
|
||||
* @returns {Object} WebSocket connection object with utility methods
|
||||
*/
|
||||
export const createWebSocket = (url, options = {}) => {
|
||||
const {
|
||||
onOpen = () => {},
|
||||
onMessage = () => {},
|
||||
onError = () => {},
|
||||
onClose = () => {},
|
||||
} = options;
|
||||
|
||||
let socket = null;
|
||||
|
||||
const close = () => {
|
||||
if (socket?.readyState === WebSocket.OPEN || socket?.readyState === WebSocket.CONNECTING) {
|
||||
socket.close();
|
||||
}
|
||||
socket = null;
|
||||
};
|
||||
|
||||
const isConnected = () => {
|
||||
return socket?.readyState === WebSocket.OPEN;
|
||||
};
|
||||
|
||||
const send = (message) => {
|
||||
if (isConnected()) {
|
||||
const payload = typeof message === 'string' ? message : JSON.stringify(message);
|
||||
socket.send(payload);
|
||||
}
|
||||
};
|
||||
|
||||
const connect = (initialMessage = null) => {
|
||||
close(); // Close any existing connection
|
||||
|
||||
try {
|
||||
socket = new WebSocket(url);
|
||||
|
||||
socket.onopen = (event) => {
|
||||
if (initialMessage) {
|
||||
send(initialMessage);
|
||||
}
|
||||
|
||||
onOpen(event);
|
||||
};
|
||||
|
||||
socket.onmessage = (event) => {
|
||||
onMessage(event);
|
||||
};
|
||||
|
||||
socket.onclose = (event) => {
|
||||
socket = null;
|
||||
onClose(event);
|
||||
};
|
||||
|
||||
socket.onerror = (error) => {
|
||||
onError(error);
|
||||
};
|
||||
} catch (error) {
|
||||
onError(error);
|
||||
}
|
||||
};
|
||||
|
||||
const isConnecting = () => {
|
||||
return socket?.readyState === WebSocket.CONNECTING;
|
||||
};
|
||||
|
||||
return {
|
||||
connect,
|
||||
send,
|
||||
isConnected,
|
||||
isConnecting,
|
||||
close,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses JSON message from WebSocket event
|
||||
* @param {MessageEvent} event - The WebSocket message event
|
||||
* @returns {Promise<Object|null>} Parsed message data or null if parsing fails
|
||||
*/
|
||||
export const parseMessage = async (event) => {
|
||||
try {
|
||||
const data = typeof event.data === 'string' ? event.data : await event.data.text();
|
||||
return JSON.parse(data);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Safe socket cleanup utility
|
||||
* @param {Object|WebSocket} socket - The socket to close
|
||||
*/
|
||||
export const closeSocket = (socket) => {
|
||||
if (typeof socket?.close === 'function') {
|
||||
socket.close();
|
||||
}
|
||||
};
|
||||
|
|
@ -1,8 +1,7 @@
|
|||
<script>
|
||||
import { GlAlert, GlButton, GlFormCheckbox, GlTooltipDirective } from '@gitlab/ui';
|
||||
import $ from 'jquery';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapActions, mapGetters, mapState } from 'vuex';
|
||||
import { mapActions, mapState } from 'pinia';
|
||||
import { createAlert } from '~/alert';
|
||||
import { STATUS_CLOSED, STATUS_MERGED, STATUS_OPEN, STATUS_REOPENED } from '~/issues/constants';
|
||||
import { detectAndConfirmSensitiveTokens } from '~/lib/utils/secret_detection';
|
||||
|
|
@ -19,13 +18,12 @@ import HelpIcon from '~/vue_shared/components/help_icon/help_icon.vue';
|
|||
import { trackSavedUsingEditor } from '~/vue_shared/components/markdown/tracking';
|
||||
import glAbilitiesMixin from '~/vue_shared/mixins/gl_abilities_mixin';
|
||||
import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
|
||||
|
||||
import { badgeState } from '~/merge_requests/badge_state';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import * as constants from '../constants';
|
||||
import eventHub from '../event_hub';
|
||||
import { COMMENT_FORM } from '../i18n';
|
||||
import { createNoteErrorMessages, isSlashCommand } from '../utils';
|
||||
|
||||
import issuableStateMixin from '../mixins/issuable_state';
|
||||
import CommentFieldLayout from './comment_field_layout.vue';
|
||||
import CommentTypeDropdown from './comment_type_dropdown.vue';
|
||||
|
|
@ -80,16 +78,15 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
...mapGetters([
|
||||
...mapState(useNotes, [
|
||||
'isToggleStateButtonLoading',
|
||||
'getCurrentUserLastNote',
|
||||
'getUserData',
|
||||
'getNoteableData',
|
||||
'getNoteableDataByProp',
|
||||
'getNotesData',
|
||||
'getUserData',
|
||||
'openState',
|
||||
'hasDrafts',
|
||||
]),
|
||||
...mapState(['isToggleStateButtonLoading']),
|
||||
autocompleteDataSources() {
|
||||
return gl.GfmAutoComplete?.dataSources;
|
||||
},
|
||||
|
|
@ -220,7 +217,7 @@ export default {
|
|||
});
|
||||
},
|
||||
methods: {
|
||||
...mapActions([
|
||||
...mapActions(useNotes, [
|
||||
'saveNote',
|
||||
'removePlaceholderNotes',
|
||||
'closeIssuable',
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
<script>
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters, mapActions } from 'vuex';
|
||||
import { mapState, mapActions } from 'pinia';
|
||||
import DuoCodeReviewSystemNote from 'ee_component/vue_shared/components/notes/duo_code_review_system_note.vue';
|
||||
import { __ } from '~/locale';
|
||||
import PlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
|
||||
import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';
|
||||
import SystemNote from '~/vue_shared/components/notes/system_note.vue';
|
||||
import { FILE_DIFF_POSITION_TYPE } from '~/diffs/constants';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import { SYSTEM_NOTE } from '../constants';
|
||||
import DiscussionNotesRepliesWrapper from './discussion_notes_replies_wrapper.vue';
|
||||
import NoteEditedText from './note_edited_text.vue';
|
||||
|
|
@ -57,7 +57,7 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
...mapGetters(['userCanReply']),
|
||||
...mapState(useNotes, ['userCanReply']),
|
||||
hasReplies() {
|
||||
return Boolean(this.replies.length);
|
||||
},
|
||||
|
|
@ -88,7 +88,7 @@ export default {
|
|||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['toggleDiscussion', 'setSelectedCommentPositionHover']),
|
||||
...mapActions(useNotes, ['toggleDiscussion', 'setSelectedCommentPositionHover']),
|
||||
componentName(note) {
|
||||
if (note.isPlaceholderNote) {
|
||||
if (note.placeholderType === SYSTEM_NOTE) {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
<script>
|
||||
import { GlButton, GlSprintf, GlLink, GlFormCheckbox } from '@gitlab/ui';
|
||||
import { mapState } from 'pinia';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters as mapVuexGetters, mapActions as mapVuexActions } from 'vuex';
|
||||
import { mapState, mapActions } from 'pinia';
|
||||
import { mergeUrlParams } from '~/lib/utils/url_utility';
|
||||
import { __ } from '~/locale';
|
||||
import glAbilitiesMixin from '~/vue_shared/mixins/gl_abilities_mixin';
|
||||
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
|
||||
import { trackSavedUsingEditor } from '~/vue_shared/components/markdown/tracking';
|
||||
import { useBatchComments } from '~/batch_comments/store';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import eventHub from '../event_hub';
|
||||
import issuableStateMixin from '../mixins/issuable_state';
|
||||
import resolvable from '../mixins/resolvable';
|
||||
|
|
@ -136,10 +135,9 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
...mapVuexGetters([
|
||||
...mapState(useNotes, [
|
||||
'getDiscussionLastNote',
|
||||
'getNoteableData',
|
||||
'getNoteableDataByProp',
|
||||
'getNotesDataByProp',
|
||||
'getUserDataByProp',
|
||||
]),
|
||||
|
|
@ -191,7 +189,7 @@ export default {
|
|||
return null;
|
||||
},
|
||||
markdownPreviewPath() {
|
||||
const notable = this.getNoteableDataByProp('preview_note_path');
|
||||
const notable = this.getNoteableData.preview_note_path;
|
||||
|
||||
const previewSuggestions = this.line && this.diffParams;
|
||||
const params = previewSuggestions
|
||||
|
|
@ -267,7 +265,7 @@ export default {
|
|||
this.updatePlaceholder();
|
||||
},
|
||||
methods: {
|
||||
...mapVuexActions(['toggleResolveNote']),
|
||||
...mapActions(useNotes, ['toggleResolveNote']),
|
||||
shouldToggleResolved(beforeSubmitDiscussionState) {
|
||||
return (
|
||||
this.showResolveDiscussionToggle && beforeSubmitDiscussionState !== this.newResolvedState()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
<script>
|
||||
import { GlTooltipDirective, GlIcon } from '@gitlab/ui';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapActions, mapGetters } from 'vuex';
|
||||
import { mapActions, mapState } from 'pinia';
|
||||
import DraftNote from '~/batch_comments/components/draft_note.vue';
|
||||
import { createAlert } from '~/alert';
|
||||
import { clearDraft, getDraft, getAutoSaveKeyFromDiscussion } from '~/lib/utils/autosave';
|
||||
|
|
@ -14,6 +13,7 @@ import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item
|
|||
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
|
||||
import { detectAndConfirmSensitiveTokens } from '~/lib/utils/secret_detection';
|
||||
import { FILE_DIFF_POSITION_TYPE, IMAGE_DIFF_POSITION_TYPE } from '~/diffs/constants';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import eventHub from '../event_hub';
|
||||
import noteable from '../mixins/noteable';
|
||||
import resolvable from '../mixins/resolvable';
|
||||
|
|
@ -86,7 +86,7 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
...mapGetters([
|
||||
...mapState(useNotes, [
|
||||
'convertedDisscussionIds',
|
||||
'getNoteableData',
|
||||
'userCanReply',
|
||||
|
|
@ -209,7 +209,7 @@ export default {
|
|||
eventHub.$off('startReplying', this.onStartReplying);
|
||||
},
|
||||
methods: {
|
||||
...mapActions([
|
||||
...mapActions(useNotes, [
|
||||
'saveNote',
|
||||
'removePlaceholderNotes',
|
||||
'toggleResolveNote',
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
<script>
|
||||
import { GlSprintf, GlAvatarLink, GlAvatar } from '@gitlab/ui';
|
||||
import { escape } from 'lodash';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters as mapVuexGetters, mapActions as mapVuexActions } from 'vuex';
|
||||
import { mapState } from 'pinia';
|
||||
import { mapState, mapActions } from 'pinia';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import SafeHtml from '~/vue_shared/directives/safe_html';
|
||||
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
|
||||
|
|
@ -17,6 +15,7 @@ import { __, s__, sprintf } from '~/locale';
|
|||
import { renderGFM } from '~/behaviors/markdown/render_gfm';
|
||||
import { detectAndConfirmSensitiveTokens } from '~/lib/utils/secret_detection';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import eventHub from '../event_hub';
|
||||
import noteable from '../mixins/noteable';
|
||||
import resolvable from '../mixins/resolvable';
|
||||
|
|
@ -129,7 +128,7 @@ export default {
|
|||
},
|
||||
computed: {
|
||||
...mapState(useLegacyDiffs, ['getDiffFileByHash']),
|
||||
...mapVuexGetters(['targetNoteHash', 'getNoteableData', 'getUserData', 'commentsDisabled']),
|
||||
...mapState(useNotes, ['targetNoteHash', 'getNoteableData', 'getUserData', 'commentsDisabled']),
|
||||
isEditing: {
|
||||
get() {
|
||||
return this.note.isEditing ?? this.isEditingLocal;
|
||||
|
|
@ -274,7 +273,7 @@ export default {
|
|||
},
|
||||
|
||||
methods: {
|
||||
...mapVuexActions([
|
||||
...mapActions(useNotes, [
|
||||
'deleteNote',
|
||||
'removeNote',
|
||||
'updateNote',
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters, mapActions as mapVuexActions, mapState } from 'vuex';
|
||||
import { mapActions } from 'pinia';
|
||||
import { mapActions, mapState } from 'pinia';
|
||||
import { scrollToElement, contentTop } from '~/lib/utils/common_utils';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
function isOverviewPage() {
|
||||
return window.mrTabs?.currentAction === 'show';
|
||||
|
|
@ -85,17 +84,15 @@ function handleJumpForBothPages(getDiscussion, ctx, fn, scrollOptions) {
|
|||
|
||||
export default {
|
||||
computed: {
|
||||
...mapGetters([
|
||||
...mapState(useNotes, [
|
||||
'nextUnresolvedDiscussionId',
|
||||
'previousUnresolvedDiscussionId',
|
||||
'getDiscussion',
|
||||
'currentDiscussionId',
|
||||
]),
|
||||
...mapState({
|
||||
currentDiscussionId: (state) => state.notes.currentDiscussionId,
|
||||
}),
|
||||
},
|
||||
methods: {
|
||||
...mapVuexActions(['expandDiscussion', 'setCurrentDiscussionId']),
|
||||
...mapActions(useNotes, ['expandDiscussion', 'setCurrentDiscussionId']),
|
||||
...mapActions(useLegacyDiffs, ['scrollToFile', 'disableVirtualScroller']),
|
||||
|
||||
async jumpToNextDiscussion(scrollOptions) {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters } from 'vuex';
|
||||
import { mapState } from 'pinia';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
export default {
|
||||
computed: {
|
||||
...mapGetters(['getNoteableDataByProp']),
|
||||
...mapState(useNotes, ['getNoteableDataByProp']),
|
||||
isProjectArchived() {
|
||||
return this.getNoteableDataByProp('is_project_archived');
|
||||
},
|
||||
|
|
|
|||
|
|
@ -154,7 +154,12 @@ export default {
|
|||
</project-setting-row>
|
||||
|
||||
<template #footer>
|
||||
<gl-button variant="confirm" type="submit" data-testid="gitlab-duo-save-button">
|
||||
<gl-button
|
||||
variant="confirm"
|
||||
type="submit"
|
||||
data-testid="gitlab-duo-save-button"
|
||||
:disabled="duoFeaturesLocked"
|
||||
>
|
||||
{{ $options.i18n.saveChanges }}
|
||||
</gl-button>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import GitlabDuoSettings from './components/gitlab_duo_settings.vue';
|
||||
|
||||
export default function initGitlabDuoSettings() {
|
||||
|
|
@ -10,7 +11,9 @@ export default function initGitlabDuoSettings() {
|
|||
}
|
||||
|
||||
const componentProps = JSON.parse(componentPropsEl.innerHTML);
|
||||
|
||||
const componentPropsParsed = convertObjectPropsToCamelCase(componentProps, {
|
||||
deep: true,
|
||||
});
|
||||
const { targetFormId } = mountPoint.dataset;
|
||||
|
||||
return new Vue({
|
||||
|
|
@ -18,7 +21,7 @@ export default function initGitlabDuoSettings() {
|
|||
name: 'GitlabDuoSettingsRoot',
|
||||
render: (createElement) =>
|
||||
createElement(GitlabDuoSettings, {
|
||||
props: componentProps,
|
||||
props: componentPropsParsed,
|
||||
on: {
|
||||
confirm: () => {
|
||||
if (targetFormId) document.getElementById(targetFormId)?.submit();
|
||||
|
|
|
|||
|
|
@ -17,11 +17,11 @@
|
|||
* />
|
||||
*/
|
||||
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters } from 'vuex';
|
||||
import { mapState } from 'pinia';
|
||||
import SafeHtml from '~/vue_shared/directives/safe_html';
|
||||
import { renderMarkdown } from '~/notes/utils';
|
||||
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
export default {
|
||||
name: 'PlaceholderNote',
|
||||
|
|
@ -43,7 +43,7 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
...mapGetters(['getUserData']),
|
||||
...mapState(useNotes, ['getUserData']),
|
||||
renderedNote() {
|
||||
return renderMarkdown(this.note.body);
|
||||
},
|
||||
|
|
|
|||
|
|
@ -18,14 +18,14 @@
|
|||
*/
|
||||
import { GlButton, GlSkeletonLoader, GlTooltipDirective, GlIcon } from '@gitlab/ui';
|
||||
import $ from 'jquery';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { mapGetters, mapActions, mapState } from 'vuex';
|
||||
import { mapActions, mapState } from 'pinia';
|
||||
import SafeHtml from '~/vue_shared/directives/safe_html';
|
||||
import descriptionVersionHistoryMixin from 'ee_else_ce/notes/mixins/description_version_history';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { __ } from '~/locale';
|
||||
import NoteHeader from '~/notes/components/note_header.vue';
|
||||
import { renderGFM } from '~/behaviors/markdown/render_gfm';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import TimelineEntryItem from './timeline_entry_item.vue';
|
||||
|
||||
const MAX_VISIBLE_COMMIT_LIST_COUNT = 3;
|
||||
|
|
@ -72,8 +72,7 @@ export default {
|
|||
};
|
||||
},
|
||||
computed: {
|
||||
...mapGetters(['targetNoteHash', 'descriptionVersions']),
|
||||
...mapState(['isLoadingDescriptionVersion']),
|
||||
...mapState(useNotes, ['targetNoteHash', 'descriptionVersions', 'isLoadingDescriptionVersion']),
|
||||
noteAnchorId() {
|
||||
return `note_${this.note.id}`;
|
||||
},
|
||||
|
|
@ -123,7 +122,7 @@ export default {
|
|||
},
|
||||
methods: {
|
||||
// eslint-disable-next-line vue/no-unused-properties -- These are used by the `descriptionVersionHistoryMixin` mixin
|
||||
...mapActions(['fetchDescriptionVersion', 'softDeleteDescriptionVersion']),
|
||||
...mapActions(useNotes, ['fetchDescriptionVersion', 'softDeleteDescriptionVersion']),
|
||||
async toggleDiff() {
|
||||
this.showLines = !this.showLines;
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ export default {
|
|||
data-testid="work-item-type-icon"
|
||||
:title="workItemTooltipTitle"
|
||||
:aria-label="workItemTypeText"
|
||||
class="!gl-cursor-default gl-border-none gl-bg-transparent gl-p-0"
|
||||
class="!gl-cursor-default gl-border-none gl-bg-transparent gl-p-0 focus-visible:gl-focus-inset"
|
||||
>
|
||||
<gl-icon :name="iconName" :variant="iconVariant" :class="iconClass" />
|
||||
<span v-if="workItemTypeText" :class="{ 'gl-sr-only !gl-absolute': !showText }">{{
|
||||
|
|
|
|||
|
|
@ -4,6 +4,14 @@ module Packages
|
|||
module Downloadable
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
def touch_last_downloaded_at(id)
|
||||
::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).without_sticky_writes do
|
||||
id_in(id).update_all(last_downloaded_at: Time.zone.now)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def touch_last_downloaded_at
|
||||
::Gitlab::Database::LoadBalancing::SessionMap.current(load_balancer).without_sticky_writes do
|
||||
update_column(:last_downloaded_at, Time.zone.now)
|
||||
|
|
@ -13,3 +21,4 @@ module Packages
|
|||
end
|
||||
|
||||
Packages::Downloadable.prepend_mod
|
||||
Packages::Downloadable::ClassMethods.prepend_mod
|
||||
|
|
|
|||
|
|
@ -414,7 +414,7 @@ class MergeRequest < ApplicationRecord
|
|||
preload_routables.preload(
|
||||
:assignees, :author, :unresolved_notes, :labels, :milestone,
|
||||
:timelogs, :latest_merge_request_diff, :reviewers,
|
||||
:merge_schedule,
|
||||
:merge_schedule, :merge_user,
|
||||
target_project: [:project_feature, :project_setting],
|
||||
metrics: [:latest_closed_by, :merged_by]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2111,6 +2111,15 @@ class User < ApplicationRecord
|
|||
def owns_runner?(runner)
|
||||
runner = runner.__getobj__ if runner.is_a?(Ci::RunnerPresenter)
|
||||
|
||||
# NOTE: This is a workaround to the fact that `ci_owned_group_runners` does not return the group runners that the
|
||||
# user has access to in group A, when the user is owner of group B, and group B has been invited as owner
|
||||
# to group A. Instead it only returns group runners that belong to a group that the user is a direct owner of.
|
||||
# Ideally, we'd add a `min_access_level` argument to `User#authorized_groups`, similar to `User#authorized_projects`
|
||||
# and that would get used by `ci_owned_group_runners`, but that would require deeper changes
|
||||
# from the ~"group::authorization" team.
|
||||
# TODO: Remove this workaround when https://gitlab.com/gitlab-org/gitlab/-/issues/549985 is resolved
|
||||
return Ability.allowed?(self, :admin_runner, runner.owner) if runner.group_type?
|
||||
|
||||
ci_owned_runners.include?(runner)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module Authn
|
|||
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext -- does not perform work scoped to a context
|
||||
|
||||
idempotent!
|
||||
deduplicate :until_executed
|
||||
deduplicate :until_executing, including_scheduled: true
|
||||
data_consistency :sticky
|
||||
feature_category :system_access
|
||||
concurrency_limit -> { 1 }
|
||||
|
|
|
|||
|
|
@ -775,7 +775,7 @@ Settings.cron_jobs['import_placeholder_user_cleanup_worker'] ||= {}
|
|||
Settings.cron_jobs['import_placeholder_user_cleanup_worker']['cron'] ||= "0 0 * * *"
|
||||
Settings.cron_jobs['import_placeholder_user_cleanup_worker']['job_class'] = 'Import::PlaceholderUserCleanupWorker'
|
||||
Settings.cron_jobs['authn_oauth_access_token_cleanup_worker'] ||= {}
|
||||
Settings.cron_jobs['authn_oauth_access_token_cleanup_worker']['cron'] ||= '0 9 2 * *'
|
||||
Settings.cron_jobs['authn_oauth_access_token_cleanup_worker']['cron'] ||= '5 6 * * *'
|
||||
Settings.cron_jobs['authn_oauth_access_token_cleanup_worker']['job_class'] = 'Authn::OauthAccessTokenCleanupWorker'
|
||||
|
||||
Gitlab.ee do
|
||||
|
|
@ -1144,6 +1144,8 @@ Settings.cell.topology_service_client['address'] ||= 'topology-service.example.c
|
|||
Settings.cell.topology_service_client['ca_file'] ||= nil
|
||||
Settings.cell.topology_service_client['certificate_file'] ||= nil
|
||||
Settings.cell.topology_service_client['private_key_file'] ||= nil
|
||||
Settings.cell.topology_service_client['tls'] ||= {}
|
||||
Settings.cell.topology_service_client['tls']['enabled'] = true if Settings.cell.topology_service_client['tls']['enabled'].nil?
|
||||
|
||||
#
|
||||
# GitLab KAS
|
||||
|
|
|
|||
|
|
@ -8,24 +8,7 @@ description: Stores access settings for protected branch unprotection
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/5103
|
||||
milestone: '10.7'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
desired_sharding_key:
|
||||
protected_branch_project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: protected_branch_id
|
||||
table: protected_branches
|
||||
sharding_key: project_id
|
||||
belongs_to: protected_branch
|
||||
protected_branch_namespace_id:
|
||||
references: namespaces
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: protected_branch_id
|
||||
table: protected_branches
|
||||
sharding_key: namespace_id
|
||||
belongs_to: protected_branch
|
||||
sharding_key:
|
||||
protected_branch_project_id: projects
|
||||
protected_branch_namespace_id: namespaces
|
||||
table_size: small
|
||||
desired_sharding_key_migration_job_name:
|
||||
- BackfillProtectedBranchUnprotectAccessLevelsProtectedBranchProjectId
|
||||
- BackfillProtectedBranchUnprotectAccessLevelsProtectedBranchNamespaceId
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddMultiColumnNotNullConstraintToProtectedBranchUnprotectAccessLevels < Gitlab::Database::Migration[2.3]
|
||||
milestone '18.2'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_multi_column_not_null_constraint(:protected_branch_unprotect_access_levels, :protected_branch_project_id,
|
||||
:protected_branch_namespace_id)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_multi_column_not_null_constraint(:protected_branch_unprotect_access_levels, :protected_branch_project_id,
|
||||
:protected_branch_namespace_id)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
550036e67e6a0956e8c53d6a6e262a9cbc9ca249a898360e0cf578c9d1dc634e
|
||||
|
|
@ -22192,7 +22192,8 @@ CREATE TABLE protected_branch_unprotect_access_levels (
|
|||
user_id bigint,
|
||||
group_id bigint,
|
||||
protected_branch_project_id bigint,
|
||||
protected_branch_namespace_id bigint
|
||||
protected_branch_namespace_id bigint,
|
||||
CONSTRAINT check_a5a558921b CHECK ((num_nonnulls(protected_branch_namespace_id, protected_branch_project_id) = 1))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE protected_branch_unprotect_access_levels_id_seq
|
||||
|
|
|
|||
|
|
@ -483,7 +483,7 @@ end
|
|||
|
||||
override :as_indexed_json
|
||||
def as_indexed_json
|
||||
# a hash containing the document represenation for this reference
|
||||
# a hash containing the document representation for this reference
|
||||
end
|
||||
|
||||
override :index_name
|
||||
|
|
@ -509,6 +509,12 @@ The logs show the updates. To check the document in the index, run this command:
|
|||
curl "http://localhost:9200/gitlab-development-<type>/_search"
|
||||
```
|
||||
|
||||
##### Common gotchas
|
||||
|
||||
- Index operations actually perform an upsert. If the document exists, it performs a partial update by merging fields sent
|
||||
with the existing document fields. If you want to explicitly remove fields or set them to empty, the `as_indexed_json`
|
||||
must send `nil` or an empty array.
|
||||
|
||||
#### Data consistency
|
||||
|
||||
Now that we have an index and a way to bulk index the new document type into Elasticsearch, we need to add data into the index. This consists of doing a backfill and doing continuous updates to ensure the index data is up to date.
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ Use pipeline execution policies to manage and enforce CI/CD jobs for multiple pr
|
|||
|
||||
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> For a video walkthrough, see [Security Policies: Pipeline Execution Policy Type](https://www.youtube.com/watch?v=QQAOpkZ__pA).
|
||||
|
||||
## Pipeline execution policies schema
|
||||
## Schema
|
||||
|
||||
{{< history >}}
|
||||
|
||||
|
|
@ -45,7 +45,7 @@ the following sections and tables provide an alternative.
|
|||
|-------|------|----------|-------------|
|
||||
| `pipeline_execution_policy` | `array` of pipeline execution policy | true | List of pipeline execution policies (maximum five) |
|
||||
|
||||
## Pipeline execution policy schema
|
||||
## `pipeline_execution_policy` schema
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
|
|
@ -297,7 +297,7 @@ the policy configuration is not as well protected as when using the `allowlist`
|
|||
|
||||
{{< /alert >}}
|
||||
|
||||
### Policy scope schema
|
||||
### `policy scope` schema
|
||||
|
||||
To customize policy enforcement, you can define a policy's scope to either include, or exclude,
|
||||
specified projects, groups, or compliance framework labels. For more details, see
|
||||
|
|
@ -416,7 +416,7 @@ If you don't see the **CI/CD** settings, go to **Settings > General > Visibility
|
|||
|
||||
Pipeline configuration strategy defines the method for merging the policy configuration with the project pipeline. Pipeline execution policies execute the jobs defined in the `.gitlab-ci.yml` file in isolated pipelines, which are merged into the pipelines of the target projects.
|
||||
|
||||
### `inject_policy`
|
||||
### `inject_policy` type
|
||||
|
||||
{{< history >}}
|
||||
|
||||
|
|
@ -579,7 +579,7 @@ Special cases:
|
|||
|
||||
{{< alert type="warning" >}}
|
||||
|
||||
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/475152) in GitLab 17.9. Use [`inject_policy`](#inject_policy) instead as it supports the enforcement of custom policy stages.
|
||||
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/475152) in GitLab 17.9. Use [`inject_policy`](#inject_policy-type) instead as it supports the enforcement of custom policy stages.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ To unlink a security policy project, follow the same steps but instead select th
|
|||
the dialog.
|
||||
You can link to a security policy project from a different subgroup in the same top-level group, or from an entirely different top-level group.
|
||||
However, when you enforce a
|
||||
[pipeline execution policy](pipeline_execution_policies.md#pipeline-execution-policy-schema), users must have at least read-only access to the project that contains the CI/CD configuration referenced in the policy to trigger the pipeline.
|
||||
[pipeline execution policy](pipeline_execution_policies.md#schema), users must have at least read-only access to the project that contains the CI/CD configuration referenced in the policy to trigger the pipeline.
|
||||
|
||||
### Viewing the linked security policy project
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ title: Use Model Context Protocol with AI-native features
|
|||
|
||||
{{< details >}}
|
||||
|
||||
- Tier: Ultimate
|
||||
- Tier: Free, Premium, Ultimate
|
||||
- Offering: GitLab.com
|
||||
- Status: Experiment
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ To turn MCP on or off for your group:
|
|||
|
||||
To specify the MCP servers you want the AI-native feature to connect to:
|
||||
|
||||
1. In VS Code, create an `mcp.json` file in `~/gitlab/duo/`.
|
||||
1. In VS Code, create an `mcp.json` file in `~/.gitlab/duo/`.
|
||||
1. Populate this file with the MCP servers you want the feature to connect to.
|
||||
|
||||
For more information and examples, see the [MCP example servers documentation](https://modelcontextprotocol.io/examples). You can also find other example servers at [Smithery.ai](https://smithery.ai/)
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 68 KiB |
|
|
@ -151,41 +151,6 @@ To update multiple epics at the same time:
|
|||
1. Select the appropriate fields and their values from the sidebar.
|
||||
1. Select **Update selected**.
|
||||
|
||||
### Open epics in a drawer
|
||||
|
||||
{{< details >}}
|
||||
|
||||
- Offering: GitLab Self-Managed
|
||||
|
||||
{{< /details >}}
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/464063) in GitLab 17.4 [with a flag](../../../administration/feature_flags/_index.md) named `issues_list_drawer`. Disabled by default.
|
||||
- Feature flag [changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170066) from `issues_list_drawer` to `epics_list_drawer` in GitLab 17.6.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
{{< alert type="flag" >}}
|
||||
|
||||
The availability of this feature is controlled by a feature flag.
|
||||
For more information, see the history.
|
||||
This feature is available for testing, but not ready for production use.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
When this feature is enabled, when you select an epic from the list or epic board, it opens in a drawer.
|
||||
You can then edit the epic or create comments.
|
||||
|
||||
To open the epic in full view, either:
|
||||
|
||||
- Open the epic in a new tab. From the list of epics, you can either:
|
||||
- Right-click the epic and open it in a new browser tab.
|
||||
- Hold <kbd>Cmd</kbd> or <kbd>Ctrl</kbd> and select the epic.
|
||||
- From the drawer, in the top-left corner, select **Open in full view**.
|
||||
|
||||

|
||||
|
||||
## Prevent truncating descriptions with "Read more"
|
||||
|
||||
{{< history >}}
|
||||
|
|
@ -429,6 +394,54 @@ the epic's [confidentiality status](#make-an-epic-confidential):
|
|||
- Confidential epic (regardless of group visibility): You must have at least the Planner
|
||||
role for the group.
|
||||
|
||||
### Open epics in a drawer
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/464698) in GitLab 17.4 [with a flag](../../../administration/feature_flags/_index.md) named `work_item_view_for_issues`. Enabled by default.
|
||||
- Ability to toggle between drawer and full page view [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/536620) in GitLab 18.2.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
{{< alert type="flag" >}}
|
||||
|
||||
The availability of this feature is controlled by a feature flag.
|
||||
For more information, see the history.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
When you select an epic from the Epics page or an epic board, it opens in a drawer.
|
||||
You can then view and edit its details without losing context of the epic list or board.
|
||||
|
||||
When using the drawer:
|
||||
|
||||
- Select an epic from the list to open it in the drawer.
|
||||
- The drawer appears on the right side of the screen.
|
||||
- You can edit the epic directly in the drawer.
|
||||
- To close the drawer, select the close icon ({{< icon name="close" >}}) or press **Escape**.
|
||||
|
||||
#### Open an epic in full page view
|
||||
|
||||
To open an epic in the full page view:
|
||||
|
||||
- Open the epic in a new tab. From the list of epics, either:
|
||||
- Right-click the epic and open it in a new browser tab.
|
||||
- Hold <kbd>Cmd</kbd> or <kbd>Ctrl</kbd> and select the epic.
|
||||
- Select an epic, and from the drawer, in the top-left corner, select **Open in full page** ({{< icon name="maximize" >}}).
|
||||
|
||||
#### Set preference whether to open epics in a drawer
|
||||
|
||||
To configure how epics open on the Epics page:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your group.
|
||||
1. Select **Plan > Epics**.
|
||||
1. In the top right corner, select **Display options** ({{< icon name="preferences" >}}).
|
||||
1. Toggle **Open items in side panel**:
|
||||
- **On** (default): Epics open in a drawer overlay.
|
||||
- **Off**: Epics open in a full page view.
|
||||
|
||||
Your preference is saved and remembered across all your sessions and devices.
|
||||
|
||||
### Cached epic count
|
||||
|
||||
The total count of open epics displayed in the sidebar is cached if higher
|
||||
|
|
|
|||
|
|
@ -157,16 +157,15 @@ build:
|
|||
- docker push $IMAGE_TAG
|
||||
```
|
||||
|
||||
In this example, `$CI_REGISTRY_IMAGE` resolves to the address of the registry tied
|
||||
to this project. `$CI_COMMIT_REF_NAME` resolves to the branch or tag name, which
|
||||
can contain forward slashes. Image tags can't contain forward slashes. Use
|
||||
`$CI_COMMIT_REF_SLUG` as the image tag. You can declare the variable, `$IMAGE_TAG`,
|
||||
combining `$CI_REGISTRY_IMAGE` and `$CI_COMMIT_REF_NAME` to save some typing in the
|
||||
`script` section.
|
||||
In the previous example:
|
||||
|
||||
This example splits the tasks into 4 pipeline stages, including two tests that run in parallel. The `build` is stored in the container
|
||||
registry and used by subsequent stages, downloading the container image when needed. Changes to `main` also get tagged as
|
||||
`latest` and deployed using an application-specific deploy script:
|
||||
- `$CI_REGISTRY_IMAGE` resolves to the address of the registry tied
|
||||
to this project.
|
||||
- `$IMAGE_TAG` is a custom variable that combines the registry address with `$CI_COMMIT_REF_SLUG`, the image tag. The [`$CI_COMMIT_REF_NAME` predefined variable](../../../ci/variables/predefined_variables.md#predefined-variables) resolves to the branch or tag name and can contain forward slashes. Image tags cannot contain forward slashes. Use `$CI_COMMIT_REF_SLUG` instead.
|
||||
|
||||
The following example splits CI/CD tasks into four pipeline stages, including two tests that run in parallel.
|
||||
|
||||
The `build` is stored in the container registry and used by subsequent stages that download the container image when needed. When you push changes to the `main` branch, the pipeline tags the image as `latest` and deploys it using an application-specific deploy script:
|
||||
|
||||
```yaml
|
||||
default:
|
||||
|
|
@ -227,8 +226,8 @@ deploy:
|
|||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
This example explicitly calls `docker pull`. If you prefer to implicitly pull the container image using `image:`,
|
||||
The previous example explicitly calls `docker pull`. If you prefer to implicitly pull the container image using `image:`,
|
||||
and use either the [Docker](https://docs.gitlab.com/runner/executors/docker.html) or [Kubernetes](https://docs.gitlab.com/runner/executors/kubernetes/) executor,
|
||||
make sure that [`pull_policy`](https://docs.gitlab.com/runner/executors/docker.html#how-pull-policies-work) is set to `always`.
|
||||
make sure that [`pull_policy`](https://docs.gitlab.com/runner/executors/docker.html#set-the-always-pull-policy) is set to `always`.
|
||||
|
||||
{{< /alert >}}
|
||||
|
|
|
|||
|
|
@ -42,12 +42,15 @@ Find a bug or have a request? Leave feedback in [issue 523713](https://gitlab.co
|
|||
|
||||
The new issues experience includes these improvements:
|
||||
|
||||
- **Drawer view**: When you open an issue from the issue list, board, or child or linked item list, the issue opens in a
|
||||
- **Drawer view**: When you open an issue from the issue list, the issue opens in a
|
||||
drawer without leaving the current page.
|
||||
The drawer provides a complete view of the issue.
|
||||
|
||||
To view the full page instead, either:
|
||||
1. Select **View in full page** at the top of the drawer.
|
||||
1. Open the link in a new tab.
|
||||
|
||||
To always open issues in the full page view on the Epics page, in the top right corner, select **Display options** ({{< icon name="preferences" >}}) and turn off the **Open items in side panel** toggle.
|
||||
- **Issue controls**: All issue controls, including confidentiality settings, are now in the top actions menu.
|
||||
This menu stays visible as you scroll through the page.
|
||||
- **Redesigned sidebar**: The sidebar is now embedded in the page, similar to merge requests and epics.
|
||||
|
|
|
|||
|
|
@ -416,6 +416,29 @@ see [Create a custom workspace image that supports arbitrary user IDs](create_im
|
|||
For more information, see the
|
||||
[OpenShift documentation](https://docs.openshift.com/container-platform/4.12/openshift_images/create-images.html#use-uid_create-images).
|
||||
|
||||
## Shallow cloning
|
||||
|
||||
{{< history >}}
|
||||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/543982) in GitLab 18.2 [with a flag](../../administration/feature_flags/_index.md) named `workspaces_shallow_clone_project`. Disabled by default.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
{{< alert type="flag" >}}
|
||||
|
||||
The availability of this feature is controlled by a feature flag.
|
||||
For more information, see the history.
|
||||
This feature is available for testing, but not ready for production use.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
When you create a workspace, GitLab uses shallow cloning to improve performance.
|
||||
A shallow clone downloads only the latest commit history instead of the complete Git history,
|
||||
which significantly reduces the initial clone time for large repositories.
|
||||
|
||||
After the workspace starts, Git converts the shallow clone to a full clone in the background.
|
||||
This process is transparent and doesn't affect your development workflow.
|
||||
|
||||
## Related topics
|
||||
|
||||
- [Troubleshooting Workspaces](workspaces_troubleshooting.md)
|
||||
|
|
|
|||
|
|
@ -23,9 +23,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def service_credentials
|
||||
config = Gitlab.config.cell.topology_service_client
|
||||
return :this_channel_is_insecure unless topology_service_config.tls.enabled
|
||||
|
||||
ca_file, key_file, cert_file = config.values_at('ca_file', 'private_key_file', 'certificate_file')
|
||||
ca_file, key_file, cert_file = topology_service_config.values_at(
|
||||
'ca_file', 'private_key_file', 'certificate_file'
|
||||
)
|
||||
|
||||
return GRPC::Core::ChannelCredentials.new unless key_file && cert_file
|
||||
return GRPC::Core::ChannelCredentials.new unless File.exist?(key_file) && File.exist?(cert_file)
|
||||
|
|
@ -38,12 +40,16 @@ module Gitlab
|
|||
end
|
||||
|
||||
def topology_service_address
|
||||
Gitlab.config.cell.topology_service_client.address
|
||||
topology_service_config.address
|
||||
end
|
||||
|
||||
def enabled?
|
||||
Gitlab.config.cell.enabled
|
||||
end
|
||||
|
||||
def topology_service_config
|
||||
@topology_service_config ||= Gitlab.config.cell.topology_service_client
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module QA
|
||||
RSpec.describe 'Analytics' do
|
||||
describe 'Service ping default checked', product_group: :analytics_instrumentation do
|
||||
context 'when using default gitlab.yml config', :requires_admin do
|
||||
context 'when using default gitlab.yml config', :requires_admin, :skip_live_env do
|
||||
before do
|
||||
Flow::Login.sign_in_as_admin
|
||||
|
||||
|
|
|
|||
|
|
@ -22,10 +22,10 @@ module RuboCop
|
|||
|
||||
add_offense(node.loc.selector) do |corrector|
|
||||
if arg = second_argument(node)
|
||||
corrector.replace(arg.source_range, 'false')
|
||||
corrector.replace(arg, 'false')
|
||||
else
|
||||
first_argument = node.first_argument
|
||||
corrector.insert_after(first_argument.source_range, ', false')
|
||||
corrector.insert_after(first_argument, ', false')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ module RuboCop
|
|||
path_without_extension = path_without_anchor.gsub(/#{extension_pattern}$/, '')
|
||||
arg_with_md_extension = path.gsub(/#{path_without_extension}#{extension_pattern}(\#.+)?$/,
|
||||
"#{path_without_extension}.md\\2")
|
||||
corrector.replace(node.first_argument.source_range, "'#{arg_with_md_extension}'")
|
||||
corrector.replace(node.first_argument, "'#{arg_with_md_extension}'")
|
||||
end
|
||||
path_without_anchor += ".md"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ module RuboCop
|
|||
|
||||
replacement = "Gitlab::HTTP.#{method_name}(#{arg_nodes.map(&:source).join(', ')})"
|
||||
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,11 +35,11 @@ module RuboCop
|
|||
|
||||
replacement = "Gitlab::HTTP.#{method_name}(#{arg_nodes.map(&:source).join(', ')})"
|
||||
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
elsif includes_httparty?(node)
|
||||
add_offense(node, message: MSG_INCLUDE) do |corrector|
|
||||
corrector.remove(node.source_range)
|
||||
corrector.remove(node)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ module RuboCop
|
|||
add_offense(node) do |corrector|
|
||||
replacement = "#{cbased(node)}Gitlab::Json.#{method_name}(#{arg_source})"
|
||||
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ module RuboCop
|
|||
def corrector(node)
|
||||
->(corrector) do
|
||||
corrector.insert_after(
|
||||
node.source_range,
|
||||
node,
|
||||
" # rubocop: disable #{cop_name}"
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ module RuboCop
|
|||
|
||||
replacement = "#{helper_name}(#{arguments.map(&:source).join(', ')})"
|
||||
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ module Rubocop
|
|||
|
||||
add_offense(node) do |corrector|
|
||||
replacement = 'before_all'
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ module RuboCop
|
|||
receiver = "#{receiver.source}." if receiver
|
||||
expression = "#{receiver}#{type}"
|
||||
replacement = node.source.sub(expression, REPLACEMENT)
|
||||
corrector.replace(node.source_range, replacement)
|
||||
corrector.replace(node, replacement)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ module RuboCop
|
|||
def corrector(node)
|
||||
->(corrector) do
|
||||
replacement = replace_matcher(node) || replace_response_status(node)
|
||||
corrector.replace(node.source_range, replacement) if node.source_range.source != replacement
|
||||
corrector.replace(node, replacement) if node.source != replacement
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -436,7 +436,7 @@ RSpec.describe 'Admin::Users', :with_current_organization, feature_category: :us
|
|||
let_it_be(:user_username) { 'Bing bang' }
|
||||
|
||||
it "doesn't create the user and shows an error message" do
|
||||
expect { click_button 'Create user' }.to change { User.count }.by(0)
|
||||
expect { click_button 'Create user' }.not_to change { User.count }
|
||||
|
||||
expect(page).to have_content('The form contains the following error')
|
||||
expect(page).to have_content('Username can contain only letters, digits')
|
||||
|
|
|
|||
|
|
@ -142,7 +142,10 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
|
|||
expect(dropdown_nodes[4]).to have_content(milestone_2.title)
|
||||
expect(dropdown_nodes.last).to have_content(milestone_1.title)
|
||||
|
||||
click_on milestone_1.title
|
||||
within('.gl-filtered-search-suggestion-list') do
|
||||
click_on milestone_1.title
|
||||
end
|
||||
|
||||
filter_submit.click
|
||||
|
||||
expect(find('[data-testid="board-list"]:nth-child(1)')).to have_selector('.board-card', count: 1)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
export const withItems = {
|
||||
data: {
|
||||
currentUser: {
|
||||
id: 'gid://gitlab/User/1',
|
||||
assigned: {
|
||||
count: 5,
|
||||
nodes: [
|
||||
{
|
||||
id: 'gid://gitlab/WorkItem/20',
|
||||
updatedAt: '2025-06-27T19:25:04Z',
|
||||
__typename: 'WorkItem',
|
||||
},
|
||||
],
|
||||
__typename: 'WorkItemConnection',
|
||||
},
|
||||
authored: {
|
||||
count: 32,
|
||||
nodes: [
|
||||
{
|
||||
id: 'gid://gitlab/WorkItem/619',
|
||||
updatedAt: '2025-06-25T15:52:05Z',
|
||||
__typename: 'WorkItem',
|
||||
},
|
||||
],
|
||||
__typename: 'WorkItemConnection',
|
||||
},
|
||||
__typename: 'CurrentUser',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const withoutItems = {
|
||||
data: {
|
||||
currentUser: {
|
||||
id: 'gid://gitlab/User/1',
|
||||
assigned: {
|
||||
count: 0,
|
||||
nodes: [],
|
||||
__typename: 'WorkItemConnection',
|
||||
},
|
||||
authored: {
|
||||
count: 0,
|
||||
nodes: [],
|
||||
__typename: 'WorkItemConnection',
|
||||
},
|
||||
__typename: 'CurrentUser',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -1,17 +1,42 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import { useFakeDate } from 'helpers/fake_date';
|
||||
import WorkItemsWidget from '~/homepage/components/work_items_widget.vue';
|
||||
import workItemsWidgetMetadataQuery from '~/homepage/graphql/queries/work_items_widget_metadata.query.graphql';
|
||||
import { withItems, withoutItems } from './mocks/work_items_widget_metadata_query_mocks';
|
||||
|
||||
describe('WorkItemsWidget', () => {
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const MOCK_ASSIGNED_TO_YOU_PATH = '/assigned/to/you/path';
|
||||
const MOCK_AUTHORED_BY_YOU_PATH = '/authored/to/you/path';
|
||||
const MOCK_CURRENT_TIME = new Date('2025-06-29T18:13:25Z');
|
||||
|
||||
useFakeDate(MOCK_CURRENT_TIME);
|
||||
|
||||
const workItemsWidgetMetadataQueryHandler = (data) => jest.fn().mockResolvedValue(data);
|
||||
|
||||
let wrapper;
|
||||
|
||||
const findGlLinks = () => wrapper.findAllComponents(GlLink);
|
||||
const findAssignedCount = () => wrapper.findByTestId('assigned-count');
|
||||
const findAssignedLastUpdatedAt = () => wrapper.findByTestId('assigned-last-updated-at');
|
||||
const findAuthoredCount = () => wrapper.findByTestId('authored-count');
|
||||
const findAuthoredLastUpdatedAt = () => wrapper.findByTestId('authored-last-updated-at');
|
||||
|
||||
function createWrapper() {
|
||||
function createWrapper({ workItemsWidgetMetadataQueryMock = withItems } = {}) {
|
||||
const mockApollo = createMockApollo([
|
||||
[
|
||||
workItemsWidgetMetadataQuery,
|
||||
workItemsWidgetMetadataQueryHandler(workItemsWidgetMetadataQueryMock),
|
||||
],
|
||||
]);
|
||||
wrapper = shallowMountExtended(WorkItemsWidget, {
|
||||
apolloProvider: mockApollo,
|
||||
propsData: {
|
||||
assignedToYouPath: MOCK_ASSIGNED_TO_YOU_PATH,
|
||||
authoredByYouPath: MOCK_AUTHORED_BY_YOU_PATH,
|
||||
|
|
@ -38,4 +63,36 @@ describe('WorkItemsWidget', () => {
|
|||
expect(link.text()).toBe('Authored by you');
|
||||
});
|
||||
});
|
||||
|
||||
describe('metadata', () => {
|
||||
it('does not show any metadata until the query has resolved', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findAssignedCount().exists()).toBe(false);
|
||||
expect(findAssignedLastUpdatedAt().exists()).toBe(false);
|
||||
expect(findAuthoredCount().exists()).toBe(false);
|
||||
expect(findAuthoredLastUpdatedAt().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('shows the metadata once the query has resolved', async () => {
|
||||
createWrapper();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findAssignedCount().text()).toBe('5');
|
||||
expect(findAssignedLastUpdatedAt().text()).toBe('1 day ago');
|
||||
expect(findAuthoredCount().text()).toBe('32');
|
||||
expect(findAuthoredLastUpdatedAt().text()).toBe('4 days ago');
|
||||
});
|
||||
|
||||
it('shows partial metadata when the user has no relevant items', async () => {
|
||||
createWrapper({ workItemsWidgetMetadataQueryMock: withoutItems });
|
||||
await waitForPromises();
|
||||
|
||||
expect(findAssignedLastUpdatedAt().exists()).toBe(false);
|
||||
expect(findAuthoredLastUpdatedAt().exists()).toBe(false);
|
||||
|
||||
expect(findAssignedCount().text()).toBe('0');
|
||||
expect(findAuthoredCount().text()).toBe('0');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,251 @@
|
|||
import { createWebSocket, parseMessage, closeSocket } from '~/lib/utils/websocket_utils';
|
||||
|
||||
const TEST_URL = 'ws://test.com';
|
||||
const INVALID_INPUTS = [
|
||||
['null', null],
|
||||
['undefined', undefined],
|
||||
['object without required method', { someOtherMethod: jest.fn() }],
|
||||
];
|
||||
global.WebSocket = jest.fn();
|
||||
|
||||
global.WebSocket.CONNECTING = 0;
|
||||
global.WebSocket.OPEN = 1;
|
||||
global.WebSocket.CLOSING = 2;
|
||||
global.WebSocket.CLOSED = 3;
|
||||
|
||||
const createAndConnectSocket = (url = TEST_URL, handlers = {}) => {
|
||||
const connection = createWebSocket(url, handlers);
|
||||
connection.connect();
|
||||
return connection;
|
||||
};
|
||||
|
||||
describe('WebSocket Utils', () => {
|
||||
let mockWebSocket;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockWebSocket = {
|
||||
readyState: global.WebSocket.CONNECTING,
|
||||
send: jest.fn(),
|
||||
close: jest.fn(),
|
||||
onopen: null,
|
||||
onmessage: null,
|
||||
onclose: null,
|
||||
onerror: null,
|
||||
};
|
||||
|
||||
global.WebSocket.mockImplementation(() => mockWebSocket);
|
||||
});
|
||||
|
||||
describe('createWebSocket', () => {
|
||||
describe('initialization', () => {
|
||||
it('should create connection object with default handlers', () => {
|
||||
const connection = createWebSocket(TEST_URL);
|
||||
|
||||
expect(connection).toHaveProperty('connect');
|
||||
expect(connection).toHaveProperty('send');
|
||||
expect(connection).toHaveProperty('isConnected');
|
||||
expect(connection).toHaveProperty('isConnecting');
|
||||
expect(connection).toHaveProperty('close');
|
||||
|
||||
expect(typeof connection.connect).toBe('function');
|
||||
expect(typeof connection.send).toBe('function');
|
||||
expect(typeof connection.isConnected).toBe('function');
|
||||
expect(typeof connection.isConnecting).toBe('function');
|
||||
expect(typeof connection.close).toBe('function');
|
||||
});
|
||||
|
||||
it('should create WebSocket instance when connect is called', () => {
|
||||
createAndConnectSocket();
|
||||
|
||||
expect(global.WebSocket).toHaveBeenCalledWith(TEST_URL);
|
||||
expect(global.WebSocket).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('event handlers', () => {
|
||||
it('should call custom event handlers when WebSocket events occur', () => {
|
||||
const mockOnOpen = jest.fn();
|
||||
const mockOnMessage = jest.fn();
|
||||
createAndConnectSocket(TEST_URL, {
|
||||
onOpen: mockOnOpen,
|
||||
onMessage: mockOnMessage,
|
||||
});
|
||||
|
||||
mockWebSocket.onopen({ type: 'open' });
|
||||
expect(mockOnOpen).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnOpen).toHaveBeenCalledWith({ type: 'open' });
|
||||
|
||||
const messageEvent = { type: 'message', data: 'test message' };
|
||||
mockWebSocket.onmessage(messageEvent);
|
||||
expect(mockOnMessage).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnMessage).toHaveBeenCalledWith(messageEvent);
|
||||
});
|
||||
|
||||
it('should call onError and onClose handlers', () => {
|
||||
const mockOnError = jest.fn();
|
||||
const mockOnClose = jest.fn();
|
||||
createAndConnectSocket(TEST_URL, {
|
||||
onError: mockOnError,
|
||||
onClose: mockOnClose,
|
||||
});
|
||||
|
||||
const errorEvent = { type: 'error', message: 'Connection failed' };
|
||||
mockWebSocket.onerror(errorEvent);
|
||||
expect(mockOnError).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnError).toHaveBeenCalledWith(errorEvent);
|
||||
|
||||
const closeEvent = { type: 'close', code: 1000 };
|
||||
mockWebSocket.onclose(closeEvent);
|
||||
expect(mockOnClose).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnClose).toHaveBeenCalledWith(closeEvent);
|
||||
});
|
||||
|
||||
it('should call onError handler when WebSocket constructor throws', () => {
|
||||
const mockOnError = jest.fn();
|
||||
|
||||
const constructorError = new Error('WebSocket constructor failed');
|
||||
global.WebSocket.mockImplementation(() => {
|
||||
throw constructorError;
|
||||
});
|
||||
createAndConnectSocket(TEST_URL, { onError: mockOnError });
|
||||
|
||||
expect(mockOnError).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnError).toHaveBeenCalledWith(constructorError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('connection state', () => {
|
||||
describe('isConnected', () => {
|
||||
it.each([
|
||||
[global.WebSocket.CONNECTING, false],
|
||||
[global.WebSocket.OPEN, true],
|
||||
[global.WebSocket.CLOSING, false],
|
||||
[global.WebSocket.CLOSED, false],
|
||||
])('should return %s when readyState is %s', (readyState, expected) => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = readyState;
|
||||
expect(connection.isConnected()).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isConnecting', () => {
|
||||
it.each([
|
||||
[global.WebSocket.CONNECTING, true],
|
||||
[global.WebSocket.OPEN, false],
|
||||
[global.WebSocket.CLOSING, false],
|
||||
[global.WebSocket.CLOSED, false],
|
||||
])('should return %s when readyState is %s', (readyState, expected) => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = readyState;
|
||||
expect(connection.isConnecting()).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('send', () => {
|
||||
it('should send string message when connected', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.OPEN;
|
||||
|
||||
connection.send('test message');
|
||||
|
||||
expect(mockWebSocket.send).toHaveBeenCalledTimes(1);
|
||||
expect(mockWebSocket.send).toHaveBeenCalledWith('test message');
|
||||
});
|
||||
|
||||
it('should stringify object message when connected', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.OPEN;
|
||||
|
||||
const message = { type: 'test', data: 'value' };
|
||||
connection.send(message);
|
||||
|
||||
expect(mockWebSocket.send).toHaveBeenCalledTimes(1);
|
||||
expect(mockWebSocket.send).toHaveBeenCalledWith(JSON.stringify(message));
|
||||
});
|
||||
|
||||
it('should not send message when not connected', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.CONNECTING;
|
||||
|
||||
connection.send('test message');
|
||||
|
||||
expect(mockWebSocket.send).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
describe('close', () => {
|
||||
it('should close socket when in OPEN state', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.OPEN;
|
||||
connection.close();
|
||||
|
||||
expect(mockWebSocket.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should close socket when in CONNECTING state', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.CONNECTING;
|
||||
connection.close();
|
||||
|
||||
expect(mockWebSocket.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should not close socket when in CLOSED state', () => {
|
||||
const connection = createAndConnectSocket();
|
||||
|
||||
mockWebSocket.readyState = global.WebSocket.CLOSED;
|
||||
connection.close();
|
||||
|
||||
expect(mockWebSocket.close).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseMessage', () => {
|
||||
it('should parse JSON string message', async () => {
|
||||
const event = { data: '{"type": "test", "value": 123}' };
|
||||
const result = await parseMessage(event);
|
||||
|
||||
expect(result).toEqual({ type: 'test', value: 123 });
|
||||
});
|
||||
|
||||
it('should return null for invalid JSON', async () => {
|
||||
const event = { data: 'invalid json' };
|
||||
const result = await parseMessage(event);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle empty string', async () => {
|
||||
const event = { data: '' };
|
||||
const result = await parseMessage(event);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('closeSocket', () => {
|
||||
it('should call close method when socket has close method', () => {
|
||||
const mockSocket = {
|
||||
close: jest.fn(),
|
||||
};
|
||||
|
||||
closeSocket(mockSocket);
|
||||
|
||||
expect(mockSocket.close).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it.each(INVALID_INPUTS)('should not throw when socket is %s', (description, socket) => {
|
||||
expect(() => closeSocket(socket)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -2,8 +2,6 @@ import { GlAlert } from '@gitlab/ui';
|
|||
import Autosize from 'autosize';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import Vuex from 'vuex';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
|
|
@ -13,7 +11,6 @@ import {
|
|||
shallowMountExtended,
|
||||
} from 'helpers/vue_test_utils_helper';
|
||||
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
|
||||
import batchComments from '~/batch_comments/stores/modules/batch_comments';
|
||||
import { fetchUserCounts } from '~/super_sidebar/user_counts_fetch';
|
||||
import { createAlert } from '~/alert';
|
||||
import { STATUS_CLOSED, STATUS_OPEN } from '~/issues/constants';
|
||||
|
|
@ -24,13 +21,13 @@ import CommentForm from '~/notes/components/comment_form.vue';
|
|||
import * as constants from '~/notes/constants';
|
||||
import eventHub from '~/notes/event_hub';
|
||||
import { COMMENT_FORM } from '~/notes/i18n';
|
||||
import notesModule from '~/notes/stores/modules';
|
||||
import { sprintf } from '~/locale';
|
||||
import { mockTracking } from 'helpers/tracking_helper';
|
||||
import { detectAndConfirmSensitiveTokens } from '~/lib/utils/secret_detection';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import { useBatchComments } from '~/batch_comments/store';
|
||||
import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
|
||||
|
||||
jest.mock('autosize');
|
||||
|
|
@ -42,7 +39,6 @@ jest.mock('~/lib/utils/secret_detection', () => {
|
|||
};
|
||||
});
|
||||
|
||||
Vue.use(Vuex);
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('issue_comment_form component', () => {
|
||||
|
|
@ -65,26 +61,6 @@ describe('issue_comment_form component', () => {
|
|||
const findCommentButton = () => findCommentTypeDropdown().find('button');
|
||||
const findErrorAlerts = () => wrapper.findAllComponents(GlAlert).wrappers;
|
||||
|
||||
const createStore = ({ actions = { saveNote: jest.fn() }, state = {}, getters = {} } = {}) => {
|
||||
const baseModule = notesModule();
|
||||
|
||||
return new Vuex.Store({
|
||||
...baseModule,
|
||||
actions: {
|
||||
...baseModule.actions,
|
||||
...actions,
|
||||
},
|
||||
state: {
|
||||
...baseModule.state,
|
||||
...state,
|
||||
},
|
||||
getters: {
|
||||
...baseModule.getters,
|
||||
...getters,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const createNotableDataMock = (data = {}) => {
|
||||
return {
|
||||
...noteableDataMock,
|
||||
|
|
@ -116,12 +92,11 @@ describe('issue_comment_form component', () => {
|
|||
userData = userDataMock,
|
||||
features = {},
|
||||
mountFunction = shallowMountExtended,
|
||||
store = createStore(),
|
||||
stubs = {},
|
||||
} = {}) => {
|
||||
store.dispatch('setNoteableData', noteableData);
|
||||
store.dispatch('setNotesData', notesData);
|
||||
store.dispatch('setUserData', userData);
|
||||
useNotes().setNoteableData(noteableData);
|
||||
useNotes().setNotesData(notesData);
|
||||
useNotes().setUserData(userData);
|
||||
|
||||
wrapper = mountFunction(CommentForm, {
|
||||
propsData: {
|
||||
|
|
@ -132,7 +107,6 @@ describe('issue_comment_form component', () => {
|
|||
...initialData,
|
||||
};
|
||||
},
|
||||
store,
|
||||
pinia,
|
||||
provide: {
|
||||
glFeatures: features,
|
||||
|
|
@ -144,7 +118,8 @@ describe('issue_comment_form component', () => {
|
|||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin], stubActions: false });
|
||||
useLegacyDiffs();
|
||||
useNotes();
|
||||
useNotes().saveNote.mockResolvedValue();
|
||||
useBatchComments();
|
||||
axiosMock = new MockAdapter(axios);
|
||||
trackingSpy = mockTracking(undefined, null, jest.spyOn);
|
||||
detectAndConfirmSensitiveTokens.mockReturnValue(true);
|
||||
|
|
@ -160,20 +135,17 @@ describe('issue_comment_form component', () => {
|
|||
const note = 'hello world';
|
||||
|
||||
it('should request to save note when note is entered', async () => {
|
||||
const store = createStore();
|
||||
jest.spyOn(store, 'dispatch');
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note }, store });
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note } });
|
||||
expect(findCloseReopenButton().props('disabled')).toBe(false);
|
||||
expect(findMarkdownEditor().props('value')).toBe(note);
|
||||
await findCloseReopenButton().trigger('click');
|
||||
expect(findCloseReopenButton().props('disabled')).toBe(true);
|
||||
expect(findMarkdownEditor().props('value')).toBe('');
|
||||
expect(store.dispatch).toHaveBeenLastCalledWith('saveNote', expect.objectContaining({}));
|
||||
expect(useNotes().saveNote).toHaveBeenLastCalledWith(expect.objectContaining({}));
|
||||
});
|
||||
|
||||
it('tracks event', async () => {
|
||||
const store = createStore();
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note }, store });
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note } });
|
||||
await findCloseReopenButton().trigger('click');
|
||||
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'save_markdown', {
|
||||
label: 'markdown_editor',
|
||||
|
|
@ -182,11 +154,9 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('does not report errors in the UI when the save succeeds', async () => {
|
||||
const store = createStore();
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: '/label ~sdfghj' },
|
||||
store,
|
||||
});
|
||||
await findCommentButton().trigger('click');
|
||||
// findErrorAlerts().exists returns false if *any* wrapper is empty,
|
||||
|
|
@ -205,20 +175,15 @@ describe('issue_comment_form component', () => {
|
|||
`(
|
||||
'displays the correct errors ($errors) for a $httpStatus network response',
|
||||
async ({ errors, httpStatus }) => {
|
||||
const store = createStore({
|
||||
actions: {
|
||||
saveNote: jest.fn().mockRejectedValue({
|
||||
response: {
|
||||
status: httpStatus,
|
||||
data: { quick_actions_status: { error_messages: errors } },
|
||||
},
|
||||
}),
|
||||
useNotes().saveNote.mockRejectedValue({
|
||||
response: {
|
||||
status: httpStatus,
|
||||
data: { quick_actions_status: { error_messages: errors } },
|
||||
},
|
||||
});
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: '/label ~sdfghj' },
|
||||
store,
|
||||
});
|
||||
await findCommentButton().trigger('click');
|
||||
await waitForPromises();
|
||||
|
|
@ -234,21 +199,15 @@ describe('issue_comment_form component', () => {
|
|||
|
||||
describe('if response contains validation errors', () => {
|
||||
beforeEach(async () => {
|
||||
const store = createStore({
|
||||
actions: {
|
||||
saveNote: jest.fn().mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { errors: 'error 1 and error 2' },
|
||||
},
|
||||
}),
|
||||
useNotes().saveNote.mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { errors: 'error 1 and error 2' },
|
||||
},
|
||||
});
|
||||
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: 'invalid note' },
|
||||
store,
|
||||
});
|
||||
|
||||
findCommentButton().trigger('click');
|
||||
|
|
@ -268,20 +227,15 @@ describe('issue_comment_form component', () => {
|
|||
|
||||
it('should remove the correct error from the list when it is dismissed', async () => {
|
||||
const commandErrors = ['1', '2', '3'];
|
||||
const store = createStore({
|
||||
actions: {
|
||||
saveNote: jest.fn().mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { quick_actions_status: { error_messages: [...commandErrors] } },
|
||||
},
|
||||
}),
|
||||
useNotes().saveNote.mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { quick_actions_status: { error_messages: [...commandErrors] } },
|
||||
},
|
||||
});
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: '/label ~sdfghj' },
|
||||
store,
|
||||
});
|
||||
await findCommentButton().trigger('click');
|
||||
await waitForPromises();
|
||||
|
|
@ -312,15 +266,10 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should disable action button while submitting', async () => {
|
||||
const store = createStore({
|
||||
actions: {
|
||||
saveNote: jest.fn().mockReturnValue(),
|
||||
},
|
||||
});
|
||||
useNotes().saveNote.mockResolvedValue();
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: 'hello world' },
|
||||
store,
|
||||
});
|
||||
const actionButton = findCloseReopenButton();
|
||||
await actionButton.trigger('click');
|
||||
|
|
@ -377,13 +326,10 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should resize textarea after note is saved', async () => {
|
||||
const store = createStore();
|
||||
store.registerModule('batchComments', batchComments());
|
||||
store.state.batchComments.drafts = [{ note: 'A' }];
|
||||
useBatchComments().drafts = [{ note: 'A' }];
|
||||
await mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: 'foo' },
|
||||
store,
|
||||
});
|
||||
await findAddCommentNowButton().trigger('click');
|
||||
await waitForPromises();
|
||||
|
|
@ -394,12 +340,8 @@ describe('issue_comment_form component', () => {
|
|||
describe('edit mode', () => {
|
||||
it('should enter edit mode when arrow up is pressed', async () => {
|
||||
const noteId = 2;
|
||||
const store = createStore({
|
||||
state: {
|
||||
discussions: [{ notes: [{ id: noteId, author: userDataMock }] }],
|
||||
},
|
||||
});
|
||||
mountComponent({ mountFunction: mountExtended, store });
|
||||
useNotes().discussions = [{ notes: [{ id: noteId, author: userDataMock }] }];
|
||||
mountComponent({ mountFunction: mountExtended });
|
||||
jest.spyOn(eventHub, '$emit');
|
||||
await findMarkdownEditorTextarea().trigger('keydown.up');
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith('enterEditMode', { noteId });
|
||||
|
|
@ -407,57 +349,56 @@ describe('issue_comment_form component', () => {
|
|||
|
||||
describe('event enter', () => {
|
||||
describe('when no draft exists', () => {
|
||||
const store = createStore({ actions: {} });
|
||||
|
||||
it('should save note when cmd+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(axios, 'post');
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', { metaKey: true });
|
||||
expect(axios.post).toHaveBeenCalledWith(noteableDataMock.create_note_path, {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
internal: false,
|
||||
note: 'a',
|
||||
noteable_id: noteableDataMock.id,
|
||||
noteable_type: 'Issue',
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
internal: false,
|
||||
note: 'a',
|
||||
noteable_id: noteableDataMock.id,
|
||||
noteable_type: 'Issue',
|
||||
},
|
||||
},
|
||||
endpoint: noteableDataMock.create_note_path,
|
||||
flashContainer: expect.anything(),
|
||||
isDraft: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should save note when ctrl+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(axios, 'post');
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', { ctrlKey: true });
|
||||
expect(axios.post).toHaveBeenCalledWith(noteableDataMock.create_note_path, {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
internal: false,
|
||||
note: 'a',
|
||||
noteable_id: noteableDataMock.id,
|
||||
noteable_type: 'Issue',
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
internal: false,
|
||||
note: 'a',
|
||||
noteable_id: noteableDataMock.id,
|
||||
noteable_type: 'Issue',
|
||||
},
|
||||
},
|
||||
endpoint: noteableDataMock.create_note_path,
|
||||
flashContainer: expect.anything(),
|
||||
isDraft: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a draft exists', () => {
|
||||
let store;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore({
|
||||
actions: {
|
||||
saveNote: jest.fn().mockResolvedValue(),
|
||||
},
|
||||
});
|
||||
store.registerModule('batchComments', batchComments());
|
||||
store.state.batchComments.drafts = [{ note: 'A' }];
|
||||
useNotes().saveNote.mockResolvedValue();
|
||||
useBatchComments().drafts = [{ note: 'A' }];
|
||||
});
|
||||
|
||||
it('sends the event to indicate that a new draft comment has been added', async () => {
|
||||
const note = 'some note text which enables actually adding a draft note';
|
||||
|
||||
jest.spyOn(eventHub, '$emit');
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note }, store });
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note } });
|
||||
|
||||
findAddToReviewButton().trigger('click');
|
||||
|
||||
|
|
@ -469,10 +410,9 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should save note draft when cmd+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', { metaKey: true });
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', {
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
|
|
@ -490,10 +430,9 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should save note draft when ctrl+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', { ctrlKey: true });
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', {
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
|
|
@ -511,13 +450,12 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should add comment when shift+cmd+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', {
|
||||
shiftKey: true,
|
||||
metaKey: true,
|
||||
});
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', {
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
|
|
@ -534,13 +472,12 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('should add comment when shift+ctrl+enter is pressed', async () => {
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' }, store });
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
mountComponent({ mountFunction: mountExtended, initialData: { note: 'a' } });
|
||||
await findMarkdownEditorTextarea().trigger('keydown.enter', {
|
||||
shiftKey: true,
|
||||
ctrlKey: true,
|
||||
});
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', {
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
|
|
@ -730,10 +667,8 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('renders checkbox when hasDrafts is true', () => {
|
||||
const store = createStore({ getters: { hasDrafts: () => true } });
|
||||
|
||||
mountComponent({ store });
|
||||
|
||||
useBatchComments().drafts = [{}];
|
||||
mountComponent();
|
||||
expect(findConfidentialNoteCheckbox().exists()).toBe(true);
|
||||
});
|
||||
|
||||
|
|
@ -785,16 +720,13 @@ describe('issue_comment_form component', () => {
|
|||
${false}
|
||||
`('when checkbox value is `$shouldCheckboxBeChecked`', ({ shouldCheckboxBeChecked }) => {
|
||||
it(`sets \`internal\` to \`${shouldCheckboxBeChecked}\``, async () => {
|
||||
const store = createStore();
|
||||
const note = 'internal note';
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note },
|
||||
noteableData: { ...notableDataMockCanUpdateIssuable },
|
||||
store,
|
||||
});
|
||||
|
||||
jest.spyOn(store, 'dispatch');
|
||||
const checkbox = findConfidentialNoteCheckbox();
|
||||
|
||||
// check checkbox
|
||||
|
|
@ -806,7 +738,7 @@ describe('issue_comment_form component', () => {
|
|||
findCommentButton().trigger('click');
|
||||
await waitForPromises();
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', {
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith({
|
||||
data: {
|
||||
merge_request_diff_head_sha: undefined,
|
||||
note: {
|
||||
|
|
@ -839,31 +771,26 @@ describe('issue_comment_form component', () => {
|
|||
describe('check sensitive tokens', () => {
|
||||
const sensitiveMessage = 'token: glpat-1234567890abcdefghij';
|
||||
const nonSensitiveMessage = 'text';
|
||||
const store = createStore();
|
||||
|
||||
it('should not save note when it contains sensitive token', async () => {
|
||||
detectAndConfirmSensitiveTokens.mockReturnValue(false);
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: sensitiveMessage },
|
||||
store,
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
findCommentButton().trigger('click');
|
||||
await waitForPromises();
|
||||
expect(store.dispatch).not.toHaveBeenCalled();
|
||||
expect(useNotes().saveNote).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should save note it does not contain sensitive token', async () => {
|
||||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: nonSensitiveMessage },
|
||||
store,
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
await findCommentButton().trigger('click');
|
||||
await waitForPromises();
|
||||
expect(store.dispatch).toHaveBeenCalledWith('saveNote', expect.objectContaining({}));
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith(expect.objectContaining({}));
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -887,15 +814,8 @@ describe('issue_comment_form component', () => {
|
|||
|
||||
describe('with batchComments in store', () => {
|
||||
describe('start review, add to review and comment now buttons', () => {
|
||||
let store;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore();
|
||||
store.registerModule('batchComments', batchComments());
|
||||
});
|
||||
|
||||
it('when no drafts exist on non-merge request, should not render', () => {
|
||||
mountComponent({ store });
|
||||
mountComponent();
|
||||
expect(findCommentTypeDropdown().exists()).toBe(true);
|
||||
expect(findStartReviewButton().exists()).toBe(false);
|
||||
expect(findAddToReviewButton().exists()).toBe(false);
|
||||
|
|
@ -903,7 +823,7 @@ describe('issue_comment_form component', () => {
|
|||
});
|
||||
|
||||
it('when no drafts exist in a merge request, should render', () => {
|
||||
mountComponent({ noteableType: constants.MERGE_REQUEST_NOTEABLE_TYPE, store });
|
||||
mountComponent({ noteableType: constants.MERGE_REQUEST_NOTEABLE_TYPE });
|
||||
expect(findCommentTypeDropdown().exists()).toBe(true);
|
||||
expect(findStartReviewButton().exists()).toBe(true);
|
||||
expect(findAddToReviewButton().exists()).toBe(false);
|
||||
|
|
@ -912,11 +832,11 @@ describe('issue_comment_form component', () => {
|
|||
|
||||
describe('when drafts exist', () => {
|
||||
beforeEach(() => {
|
||||
store.state.batchComments.drafts = [{ note: 'A' }];
|
||||
useBatchComments().drafts = [{ note: 'A' }];
|
||||
});
|
||||
|
||||
it('should render proper action elements', async () => {
|
||||
await mountComponent({ store });
|
||||
await mountComponent();
|
||||
expect(findCommentTypeDropdown().exists()).toBe(false);
|
||||
expect(findAddToReviewButton().exists()).toBe(true);
|
||||
expect(findAddCommentNowButton().exists()).toBe(true);
|
||||
|
|
@ -927,12 +847,9 @@ describe('issue_comment_form component', () => {
|
|||
mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: 'a draft note' },
|
||||
store,
|
||||
});
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
await findAddToReviewButton().trigger('click');
|
||||
expect(store.dispatch).toHaveBeenCalledWith(
|
||||
'saveNote',
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
endpoint: notesDataMock.draftsPath,
|
||||
isDraft: true,
|
||||
|
|
@ -944,12 +861,9 @@ describe('issue_comment_form component', () => {
|
|||
await mountComponent({
|
||||
mountFunction: mountExtended,
|
||||
initialData: { note: 'a comment' },
|
||||
store,
|
||||
});
|
||||
jest.spyOn(store, 'dispatch').mockResolvedValue();
|
||||
await findAddCommentNowButton().trigger('click');
|
||||
expect(store.dispatch).toHaveBeenCalledWith(
|
||||
'saveNote',
|
||||
expect(useNotes().saveNote).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
endpoint: noteableDataMock.create_note_path,
|
||||
isDraft: false,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import DiscussionActions from '~/notes/components/discussion_actions.vue';
|
|||
import DiscussionReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
|
||||
import ResolveDiscussionButton from '~/notes/components/discussion_resolve_button.vue';
|
||||
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
|
||||
import createStore from '~/notes/stores';
|
||||
import { discussionMock } from '../mock_data';
|
||||
|
||||
// NOTE: clone mock_data so that it is not accidentally mutated
|
||||
|
|
@ -23,11 +22,9 @@ describe('DiscussionActions', () => {
|
|||
const createComponentFactory =
|
||||
(shallow = true) =>
|
||||
(props, options) => {
|
||||
const store = createStore();
|
||||
const mountFn = shallow ? shallowMount : mount;
|
||||
|
||||
wrapper = mountFn(DiscussionActions, {
|
||||
store,
|
||||
propsData: {
|
||||
discussion: discussionMock,
|
||||
isResolving: false,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import { PiniaVuePlugin } from 'pinia';
|
|||
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
|
||||
import NoteableNote from '~/notes/components/noteable_note.vue';
|
||||
import { SYSTEM_NOTE } from '~/notes/constants';
|
||||
import createStore from '~/notes/stores';
|
||||
import PlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
|
||||
import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';
|
||||
import SystemNote from '~/vue_shared/components/notes/system_note.vue';
|
||||
|
|
@ -28,7 +27,6 @@ const DISCUSSION_WITH_LINE_RANGE = {
|
|||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('DiscussionNotes', () => {
|
||||
let store;
|
||||
let pinia;
|
||||
let wrapper;
|
||||
|
||||
|
|
@ -37,7 +35,6 @@ describe('DiscussionNotes', () => {
|
|||
|
||||
const createComponent = (props, mountingMethod = shallowMount) => {
|
||||
wrapper = mountingMethod(DiscussionNotes, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: {
|
||||
discussion: discussionMock,
|
||||
|
|
@ -61,10 +58,8 @@ describe('DiscussionNotes', () => {
|
|||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
useNotes();
|
||||
store = createStore();
|
||||
store.dispatch('setNoteableData', noteableDataMock);
|
||||
store.dispatch('setNotesData', notesDataMock);
|
||||
useNotes().noteableData = noteableDataMock;
|
||||
useNotes().notesData = notesDataMock;
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
|
|
@ -178,20 +173,27 @@ describe('DiscussionNotes', () => {
|
|||
});
|
||||
|
||||
describe.each`
|
||||
desc | props | event | expectedCalls
|
||||
${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${'mouseenter'} | ${[['setSelectedCommentPositionHover', LINE_RANGE]]}
|
||||
${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${'mouseleave'} | ${[['setSelectedCommentPositionHover']]}
|
||||
${'without `discussion.position`'} | ${{}} | ${'mouseenter'} | ${[]}
|
||||
${'without `discussion.position`'} | ${{}} | ${'mouseleave'} | ${[]}
|
||||
`('$desc', ({ props, event, expectedCalls }) => {
|
||||
desc | props | event | shouldSelectPosition | shouldIncludeRange
|
||||
${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${'mouseenter'} | ${true} | ${true}
|
||||
${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${'mouseleave'} | ${true} | ${false}
|
||||
${'without `discussion.position`'} | ${{}} | ${'mouseenter'} | ${false} | ${false}
|
||||
${'without `discussion.position`'} | ${{}} | ${'mouseleave'} | ${false} | ${false}
|
||||
`('$desc', ({ props, event, shouldSelectPosition, shouldIncludeRange }) => {
|
||||
beforeEach(() => {
|
||||
createComponent(props);
|
||||
jest.spyOn(store, 'dispatch');
|
||||
});
|
||||
|
||||
it(`calls store ${expectedCalls.length} times on ${event}`, () => {
|
||||
it(`calls store on ${event}`, () => {
|
||||
getList().dispatchEvent(new MouseEvent(event));
|
||||
expect(store.dispatch.mock.calls).toEqual(expectedCalls);
|
||||
if (shouldSelectPosition) {
|
||||
if (shouldIncludeRange) {
|
||||
expect(useNotes().setSelectedCommentPositionHover).toHaveBeenCalledWith(LINE_RANGE);
|
||||
} else {
|
||||
expect(useNotes().setSelectedCommentPositionHover).toHaveBeenCalledWith();
|
||||
}
|
||||
} else {
|
||||
expect(useNotes().setSelectedCommentPositionHover).not.toHaveBeenCalled();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import Vue, { nextTick } from 'vue';
|
|||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import NoteForm from '~/notes/components/note_form.vue';
|
||||
import createStore from '~/notes/stores';
|
||||
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
|
||||
import CommentFieldLayout from '~/notes/components/comment_field_layout.vue';
|
||||
import { AT_WHO_ACTIVE_CLASS } from '~/gfm_auto_complete';
|
||||
|
|
@ -22,7 +21,6 @@ jest.mock('~/lib/utils/autosave');
|
|||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('issue_note_form component', () => {
|
||||
let store;
|
||||
let pinia;
|
||||
let wrapper;
|
||||
let textarea;
|
||||
|
|
@ -31,7 +29,6 @@ describe('issue_note_form component', () => {
|
|||
|
||||
const createComponentWrapper = (propsData = {}, provide = {}, stubs = {}) => {
|
||||
wrapper = mountExtended(NoteForm, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: {
|
||||
...props,
|
||||
|
|
@ -63,13 +60,10 @@ describe('issue_note_form component', () => {
|
|||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
useNotes();
|
||||
useNotes().noteableData = noteableDataMock;
|
||||
useNotes().notesData = notesDataMock;
|
||||
useBatchComments().$patch({ isMergeRequest: true });
|
||||
|
||||
store = createStore();
|
||||
store.dispatch('setNoteableData', noteableDataMock);
|
||||
store.dispatch('setNotesData', notesDataMock);
|
||||
|
||||
props = {
|
||||
isEditing: false,
|
||||
noteBody: 'Magni suscipit eius consectetur enim et ex et commodi.',
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import Vuex from 'vuex';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
|
|
@ -18,7 +16,6 @@ import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_i
|
|||
import NoteForm from '~/notes/components/note_form.vue';
|
||||
import NoteableDiscussion from '~/notes/components/noteable_discussion.vue';
|
||||
import { COMMENT_FORM } from '~/notes/i18n';
|
||||
import notesModule from '~/notes/stores/modules';
|
||||
import { sprintf } from '~/locale';
|
||||
import { createAlert } from '~/alert';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
|
|
@ -26,45 +23,26 @@ import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
|||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import { useBatchComments } from '~/batch_comments/store';
|
||||
import {
|
||||
noteableDataMock,
|
||||
discussionMock,
|
||||
notesDataMock,
|
||||
loggedOutnoteableData,
|
||||
userDataMock,
|
||||
noteableDataMock,
|
||||
} from '../mock_data';
|
||||
import { useLocalStorageSpy } from '../../__helpers__/local_storage_helper';
|
||||
|
||||
Vue.use(Vuex);
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
jest.mock('~/behaviors/markdown/render_gfm');
|
||||
jest.mock('~/alert');
|
||||
|
||||
describe('noteable_discussion component', () => {
|
||||
let store;
|
||||
let pinia;
|
||||
let wrapper;
|
||||
let axiosMock;
|
||||
|
||||
const createStore = ({ saveNoteMock = jest.fn() } = {}) => {
|
||||
const baseModule = notesModule();
|
||||
|
||||
return new Vuex.Store({
|
||||
...baseModule,
|
||||
actions: {
|
||||
...baseModule.actions,
|
||||
saveNote: saveNoteMock,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const createComponent = ({ storeMock = createStore(), discussion = discussionMock } = {}) => {
|
||||
store = storeMock;
|
||||
store.dispatch('setNoteableData', noteableDataMock);
|
||||
store.dispatch('setNotesData', notesDataMock);
|
||||
|
||||
const createComponent = ({ discussion = discussionMock } = {}) => {
|
||||
wrapper = mountExtended(NoteableDiscussion, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: { discussion },
|
||||
});
|
||||
|
|
@ -73,6 +51,8 @@ describe('noteable_discussion component', () => {
|
|||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
useNotes().noteableData = noteableDataMock;
|
||||
useNotes().notesData = notesDataMock;
|
||||
useNotes().saveNote.mockResolvedValue();
|
||||
useNotes().fetchDiscussionDiffLines.mockResolvedValue();
|
||||
useBatchComments();
|
||||
|
|
@ -137,9 +117,7 @@ describe('noteable_discussion component', () => {
|
|||
localStorage.setItem(`autosave/Note/Issue/${discussionMock.id}/Reply`, 'draft');
|
||||
}
|
||||
window.gon.current_user_id = userDataMock.id;
|
||||
store.dispatch('setUserData', userDataMock);
|
||||
wrapper = mount(NoteableDiscussion, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: { discussion: discussionMock },
|
||||
});
|
||||
|
|
@ -193,7 +171,6 @@ describe('noteable_discussion component', () => {
|
|||
const discussion = { ...discussionMock, expanded: false };
|
||||
|
||||
wrapper.setProps({ discussion });
|
||||
store.dispatch = jest.fn();
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -201,7 +178,7 @@ describe('noteable_discussion component', () => {
|
|||
|
||||
await nextTick();
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('expandDiscussion', {
|
||||
expect(useNotes().expandDiscussion).toHaveBeenCalledWith({
|
||||
discussionId: discussion.id,
|
||||
});
|
||||
});
|
||||
|
|
@ -266,16 +243,14 @@ describe('noteable_discussion component', () => {
|
|||
describe('save reply', () => {
|
||||
describe('if response contains validation errors', () => {
|
||||
beforeEach(async () => {
|
||||
const storeMock = createStore({
|
||||
saveNoteMock: jest.fn().mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { errors: 'error 1 and error 2' },
|
||||
},
|
||||
}),
|
||||
useNotes().saveNote.mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { errors: 'error 1 and error 2' },
|
||||
},
|
||||
});
|
||||
|
||||
createComponent({ storeMock });
|
||||
createComponent();
|
||||
|
||||
wrapper.findComponent(DiscussionReplyPlaceholder).vm.$emit('focus');
|
||||
await nextTick();
|
||||
|
|
@ -300,10 +275,8 @@ describe('noteable_discussion component', () => {
|
|||
describe('user is logged in', () => {
|
||||
beforeEach(() => {
|
||||
window.gon.current_user_id = userDataMock.id;
|
||||
store.dispatch('setUserData', userDataMock);
|
||||
|
||||
wrapper = mount(NoteableDiscussion, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: { discussion: discussionMock },
|
||||
});
|
||||
|
|
@ -318,11 +291,9 @@ describe('noteable_discussion component', () => {
|
|||
describe('user is not logged in', () => {
|
||||
beforeEach(() => {
|
||||
window.gon.current_user_id = null;
|
||||
store.dispatch('setNoteableData', loggedOutnoteableData);
|
||||
store.dispatch('setNotesData', notesDataMock);
|
||||
useNotes().noteableData = loggedOutnoteableData;
|
||||
|
||||
wrapper = mount(NoteableDiscussion, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: { discussion: discussionMock },
|
||||
});
|
||||
|
|
@ -336,10 +307,8 @@ describe('noteable_discussion component', () => {
|
|||
});
|
||||
|
||||
it('supports direct call on showReplyForm', async () => {
|
||||
store = createStore();
|
||||
const mock = jest.fn();
|
||||
wrapper = mount(NoteableDiscussion, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: { discussion: discussionMock },
|
||||
stubs: { NoteForm: { methods: { append: mock }, render() {} } },
|
||||
|
|
|
|||
|
|
@ -1,18 +1,14 @@
|
|||
import Vue, { nextTick } from 'vue';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import Vuex from 'vuex';
|
||||
import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
|
||||
import { clone } from 'lodash';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import DiffsModule from '~/diffs/store/modules';
|
||||
import NoteActions from '~/notes/components/note_actions.vue';
|
||||
import NoteBody from '~/notes/components/note_body.vue';
|
||||
import NoteHeader from '~/notes/components/note_header.vue';
|
||||
import issueNote from '~/notes/components/noteable_note.vue';
|
||||
import NotesModule from '~/notes/stores/modules';
|
||||
import { NOTEABLE_TYPE_MAPPING } from '~/notes/constants';
|
||||
import { createAlert } from '~/alert';
|
||||
import { UPDATE_COMMENT_FORM } from '~/notes/i18n';
|
||||
|
|
@ -23,9 +19,9 @@ import { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
|
|||
import { useMockInternalEventsTracking } from 'helpers/tracking_internal_events_helper';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
import { noteableDataMock, notesDataMock, note } from '../mock_data';
|
||||
|
||||
Vue.use(Vuex);
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
jest.mock('~/alert');
|
||||
|
|
@ -51,7 +47,6 @@ const singleLineNotePosition = {
|
|||
};
|
||||
|
||||
describe('issue_note', () => {
|
||||
let store;
|
||||
let pinia;
|
||||
let wrapper;
|
||||
|
||||
|
|
@ -61,24 +56,11 @@ describe('issue_note', () => {
|
|||
|
||||
const findMultilineComment = () => wrapper.findByTestId('multiline-comment');
|
||||
|
||||
const createWrapper = (props = {}, storeUpdater = (s) => s) => {
|
||||
store = new Vuex.Store(
|
||||
storeUpdater({
|
||||
modules: {
|
||||
notes: NotesModule(),
|
||||
diffs: DiffsModule(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
store.dispatch('setNoteableData', noteableDataMock);
|
||||
store.dispatch('setNotesData', notesDataMock);
|
||||
|
||||
const createWrapper = (props = {}) => {
|
||||
// the component overwrites the `note` prop with every action, hence create a copy
|
||||
const noteCopy = clone(props.note || note);
|
||||
|
||||
wrapper = mountExtended(issueNote, {
|
||||
store,
|
||||
pinia,
|
||||
propsData: {
|
||||
note: noteCopy,
|
||||
|
|
@ -100,6 +82,9 @@ describe('issue_note', () => {
|
|||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
useNotes().noteableData = noteableDataMock;
|
||||
useNotes().notesData = notesDataMock;
|
||||
useNotes().updateNote.mockResolvedValue();
|
||||
});
|
||||
|
||||
describe('mutiline comments', () => {
|
||||
|
|
@ -306,17 +291,6 @@ describe('issue_note', () => {
|
|||
'<img src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" onload="alert(1)" />';
|
||||
const alertSpy = jest.spyOn(window, 'alert').mockImplementation(() => {});
|
||||
|
||||
store.hotUpdate({
|
||||
modules: {
|
||||
notes: {
|
||||
actions: {
|
||||
updateNote() {},
|
||||
setSelectedCommentPositionHover() {},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
findNoteBody().vm.$emit('handleFormUpdate', {
|
||||
noteText: noteBody,
|
||||
parentElement: null,
|
||||
|
|
@ -346,7 +320,6 @@ describe('issue_note', () => {
|
|||
});
|
||||
|
||||
describe('formUpdateHandler', () => {
|
||||
const updateNote = jest.fn();
|
||||
const params = {
|
||||
noteText: 'updated note text',
|
||||
parentElement: null,
|
||||
|
|
@ -354,26 +327,10 @@ describe('issue_note', () => {
|
|||
resolveDiscussion: false,
|
||||
};
|
||||
|
||||
const updateActions = () => {
|
||||
store.hotUpdate({
|
||||
modules: {
|
||||
notes: {
|
||||
actions: {
|
||||
updateNote,
|
||||
setSelectedCommentPositionHover() {},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
updateActions();
|
||||
});
|
||||
|
||||
afterEach(() => updateNote.mockReset());
|
||||
|
||||
it('emits handleUpdateNote', async () => {
|
||||
const updatedNote = { ...note, note_html: `<p dir="auto">${params.noteText}</p>\n` };
|
||||
|
||||
|
|
@ -435,7 +392,7 @@ describe('issue_note', () => {
|
|||
|
||||
describe('when updateNote returns errors', () => {
|
||||
beforeEach(() => {
|
||||
updateNote.mockRejectedValue({
|
||||
useNotes().updateNote.mockRejectedValue({
|
||||
response: {
|
||||
status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
|
||||
data: { errors: 'error 1 and error 2' },
|
||||
|
|
@ -462,27 +419,14 @@ describe('issue_note', () => {
|
|||
|
||||
describe('diffFile', () => {
|
||||
it.each`
|
||||
scenario | files | noteDef
|
||||
${'the note has no position'} | ${undefined} | ${note}
|
||||
${'the Diffs store has no data'} | ${[]} | ${{ ...note, position: singleLineNotePosition }}
|
||||
`(
|
||||
'returns `null` when $scenario and no diff file is provided as a prop',
|
||||
({ noteDef, diffs }) => {
|
||||
const storeUpdater = (rawStore) => {
|
||||
const updatedStore = { ...rawStore };
|
||||
|
||||
if (diffs) {
|
||||
updatedStore.modules.diffs.state.diffFiles = diffs;
|
||||
}
|
||||
|
||||
return updatedStore;
|
||||
};
|
||||
|
||||
createWrapper({ note: noteDef, discussionFile: null }, storeUpdater);
|
||||
|
||||
expect(findNoteBody().props().file).toBe(null);
|
||||
},
|
||||
);
|
||||
scenario | noteDef
|
||||
${'the note has no position'} | ${note}
|
||||
${'the Diffs store has no data'} | ${{ ...note, position: singleLineNotePosition }}
|
||||
`('returns `null` when $scenario and no diff file is provided as a prop', ({ noteDef }) => {
|
||||
useLegacyDiffs().diffFiles = [];
|
||||
createWrapper({ note: noteDef, discussionFile: null });
|
||||
expect(findNoteBody().props().file).toBe(null);
|
||||
});
|
||||
|
||||
it("returns the correct diff file from the Diffs store if it's available", () => {
|
||||
useLegacyDiffs().diffFiles = [{ file_hash: 'abc', testId: 'diffFileTest' }];
|
||||
|
|
@ -494,17 +438,10 @@ describe('issue_note', () => {
|
|||
});
|
||||
|
||||
it('returns the provided diff file if the more robust getters fail', () => {
|
||||
createWrapper(
|
||||
{
|
||||
note: { ...note, position: singleLineNotePosition },
|
||||
discussionFile: { testId: 'diffFileTest' },
|
||||
},
|
||||
(rawStore) => {
|
||||
const updatedStore = { ...rawStore };
|
||||
updatedStore.modules.diffs.state.diffFiles = [];
|
||||
return updatedStore;
|
||||
},
|
||||
);
|
||||
createWrapper({
|
||||
note: { ...note, position: singleLineNotePosition },
|
||||
discussionFile: { testId: 'diffFileTest' },
|
||||
});
|
||||
|
||||
expect(findNoteBody().props().file.testId).toBe('diffFileTest');
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import { createTestingPinia } from '@pinia/testing';
|
|||
import NotesActivityHeader from '~/notes/components/notes_activity_header.vue';
|
||||
import DiscussionFilter from '~/notes/components/discussion_filter.vue';
|
||||
import TimelineToggle from '~/notes/components/timeline_toggle.vue';
|
||||
import createStore from '~/notes/stores';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
|
|
@ -26,8 +25,6 @@ describe('~/notes/components/notes_activity_header.vue', () => {
|
|||
notesFilters,
|
||||
...props,
|
||||
},
|
||||
// why: Rendering async timeline toggle requires store
|
||||
store: createStore(),
|
||||
pinia,
|
||||
...options,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue, { nextTick } from 'vue';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import Vuex from 'vuex';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
|
||||
import createEventHub from '~/helpers/event_hub_factory';
|
||||
import * as utils from '~/lib/utils/common_utils';
|
||||
import discussionNavigation from '~/notes/mixins/discussion_navigation';
|
||||
import notesModule from '~/notes/stores/modules';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
const discussion = (id, index) => ({
|
||||
id,
|
||||
|
|
@ -30,14 +28,11 @@ const createComponent = () => ({
|
|||
},
|
||||
});
|
||||
|
||||
Vue.use(Vuex);
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('Discussion navigation mixin', () => {
|
||||
let wrapper;
|
||||
let store;
|
||||
let pinia;
|
||||
let expandDiscussion;
|
||||
|
||||
const findDiscussionEl = (id) => document.querySelector(`div[data-discussion-id="${id}"]`);
|
||||
|
||||
|
|
@ -60,20 +55,9 @@ describe('Discussion navigation mixin', () => {
|
|||
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
|
||||
expandDiscussion = jest.fn();
|
||||
const { actions, ...notesRest } = notesModule();
|
||||
store = new Vuex.Store({
|
||||
modules: {
|
||||
notes: {
|
||||
...notesRest,
|
||||
actions: { ...actions, expandDiscussion },
|
||||
},
|
||||
},
|
||||
});
|
||||
store.state.notes.discussions = createDiscussions();
|
||||
|
||||
wrapper = shallowMount(createComponent(), { store, pinia });
|
||||
useNotes().discussions = createDiscussions();
|
||||
useNotes().setCurrentDiscussionId.mockResolvedValue();
|
||||
wrapper = shallowMount(createComponent(), { pinia });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -88,19 +72,16 @@ describe('Discussion navigation mixin', () => {
|
|||
|
||||
({ vm } = wrapper);
|
||||
|
||||
jest.spyOn(store, 'dispatch');
|
||||
jest.spyOn(vm, 'jumpToNextDiscussion');
|
||||
});
|
||||
|
||||
it('triggers the setCurrentDiscussionId action with null as the value', () => {
|
||||
vm.jumpToFirstUnresolvedDiscussion();
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setCurrentDiscussionId', null);
|
||||
expect(useNotes().setCurrentDiscussionId).toHaveBeenCalledWith(null);
|
||||
});
|
||||
|
||||
it('triggers the jumpToNextDiscussion action when the previous store action succeeds', async () => {
|
||||
store.dispatch.mockResolvedValue();
|
||||
|
||||
vm.jumpToFirstUnresolvedDiscussion();
|
||||
|
||||
await nextTick();
|
||||
|
|
@ -153,7 +134,7 @@ describe('Discussion navigation mixin', () => {
|
|||
});
|
||||
|
||||
it('expands discussion', () => {
|
||||
expect(expandDiscussion).toHaveBeenCalledWith(expect.any(Object), {
|
||||
expect(useNotes().expandDiscussion).toHaveBeenCalledWith({
|
||||
discussionId: expectedId,
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,26 +1,24 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue from 'vue';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import Vuex from 'vuex';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
|
||||
import { userDataMock } from 'jest/notes/mock_data';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
Vue.use(Vuex);
|
||||
|
||||
const getters = {
|
||||
getUserData: () => userDataMock,
|
||||
};
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('Issue placeholder note component', () => {
|
||||
let pinia;
|
||||
let wrapper;
|
||||
|
||||
const findNote = () => wrapper.findComponent({ ref: 'note' });
|
||||
|
||||
const createComponent = (isIndividual = false, propsData = {}) => {
|
||||
wrapper = shallowMount(IssuePlaceholderNote, {
|
||||
store: new Vuex.Store({
|
||||
getters,
|
||||
}),
|
||||
pinia,
|
||||
propsData: {
|
||||
note: {
|
||||
body: 'Foo',
|
||||
|
|
@ -31,6 +29,12 @@ describe('Issue placeholder note component', () => {
|
|||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin] });
|
||||
useLegacyDiffs();
|
||||
useNotes().userData = userDataMock;
|
||||
});
|
||||
|
||||
it('matches snapshot', () => {
|
||||
createComponent();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,30 +1,38 @@
|
|||
import Vue from 'vue';
|
||||
import MockAdapter from 'axios-mock-adapter';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { PiniaVuePlugin } from 'pinia';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import createStore from '~/notes/stores';
|
||||
import IssueSystemNote from '~/vue_shared/components/notes/system_note.vue';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
|
||||
import { renderGFM } from '~/behaviors/markdown/render_gfm';
|
||||
import { globalAccessorPlugin } from '~/pinia/plugins';
|
||||
import { useLegacyDiffs } from '~/diffs/stores/legacy_diffs';
|
||||
import { useNotes } from '~/notes/store/legacy_notes';
|
||||
|
||||
jest.mock('~/behaviors/markdown/render_gfm');
|
||||
|
||||
Vue.use(PiniaVuePlugin);
|
||||
|
||||
describe('system note component', () => {
|
||||
let pinia;
|
||||
let vm;
|
||||
let props;
|
||||
let mock;
|
||||
|
||||
function createComponent(propsData = {}) {
|
||||
const store = createStore();
|
||||
store.dispatch('setTargetNoteHash', `note_${props.note.id}`);
|
||||
useNotes().setTargetNoteHash(`note_${props.note.id}`);
|
||||
|
||||
vm = mount(IssueSystemNote, {
|
||||
store,
|
||||
pinia,
|
||||
propsData,
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
pinia = createTestingPinia({ plugins: [globalAccessorPlugin], stubActions: false });
|
||||
props = {
|
||||
note: {
|
||||
id: '1424',
|
||||
|
|
@ -43,6 +51,8 @@ describe('system note component', () => {
|
|||
};
|
||||
|
||||
mock = new MockAdapter(axios);
|
||||
useLegacyDiffs();
|
||||
useNotes();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
|
|||
|
|
@ -51,8 +51,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic, feature_category: :code_revie
|
|||
entities.to_json
|
||||
end
|
||||
|
||||
# The current threshold is 3 query per entity maximum.
|
||||
expect(batch.count).to be_within(3 * query.count).of(control.count)
|
||||
expect(batch.count).to be <= control.count * query.count
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1271,7 +1271,7 @@ RSpec.describe API::Helpers, feature_category: :shared do
|
|||
it 'does not destroy given project' do
|
||||
expect(project).not_to receive(:destroy)
|
||||
|
||||
expect { helper.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
|
||||
expect { helper.destroy_conditionally!(project) }.to throw_symbol(:error).and not_change { Project.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetails, schema: 2024071
|
|||
it 'creates only the needed user_details entries' do
|
||||
expect(user_details.count).to eq(3)
|
||||
|
||||
expect { migration.perform }.to change { user_details.count }.by(0)
|
||||
expect { migration.perform }.not_to change { user_details.count }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
|
|||
end
|
||||
|
||||
it 'makes no Gitaly calls' do
|
||||
expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(0)
|
||||
expect { pipeline_status.load_status }.not_to change { Gitlab::GitalyClient.get_request_count }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest, feature_category: :source_c
|
|||
it 'only increments total counter' do
|
||||
expect { subject.match? }
|
||||
.to change { total_counter.get }.by(1)
|
||||
.and change { stale_counter.get }.by(0)
|
||||
.and not_change { stale_counter.get }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -212,7 +212,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, fe
|
|||
expect(job_record).to eq(previous_job)
|
||||
end
|
||||
|
||||
expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
|
||||
expect { runner.run_migration_job(migration) }.not_to change { job_relation.count }
|
||||
end
|
||||
|
||||
context 'when failed job has reached the maximum number of attempts' do
|
||||
|
|
@ -223,7 +223,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, fe
|
|||
it 'marks the migration as failed' do
|
||||
expect(migration_wrapper).not_to receive(:perform)
|
||||
|
||||
expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
|
||||
expect { runner.run_migration_job(migration) }.not_to change { job_relation.count }
|
||||
|
||||
expect(migration).to be_failed
|
||||
end
|
||||
|
|
@ -240,7 +240,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, fe
|
|||
expect(job_record).to eq(previous_job)
|
||||
end
|
||||
|
||||
expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
|
||||
expect { runner.run_migration_job(migration.reload) }.not_to change { job_relation.count }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -252,7 +252,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, fe
|
|||
it 'keeps the migration active' do
|
||||
expect(migration_wrapper).not_to receive(:perform)
|
||||
|
||||
expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
|
||||
expect { runner.run_migration_job(migration) }.not_to change { job_relation.count }
|
||||
|
||||
expect(migration.reload).to be_active
|
||||
end
|
||||
|
|
@ -276,7 +276,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, fe
|
|||
expect(job_record).to eq(previous_job)
|
||||
end
|
||||
|
||||
expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
|
||||
expect { runner.run_migration_job(migration.reload) }.not_to change { job_relation.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -42,12 +42,12 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
|
|||
it 'raises UnableToPartition error' do
|
||||
expect { prepare }
|
||||
.to raise_error(described_class::UnableToPartition)
|
||||
.and change {
|
||||
Gitlab::Database::PostgresConstraint
|
||||
.check_constraints
|
||||
.by_table_identifier(table_identifier)
|
||||
.count
|
||||
}.by(0)
|
||||
.and not_change {
|
||||
Gitlab::Database::PostgresConstraint
|
||||
.check_constraints
|
||||
.by_table_identifier(table_identifier)
|
||||
.count
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -105,12 +105,12 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
|
|||
expect { prepare }
|
||||
.to raise_error(described_class::UnableToPartition,
|
||||
starting_with('Error validating partitioning constraint'))
|
||||
.and change {
|
||||
Gitlab::Database::PostgresConstraint
|
||||
.check_constraints
|
||||
.by_table_identifier(table_identifier)
|
||||
.count
|
||||
}.by(0)
|
||||
.and not_change {
|
||||
Gitlab::Database::PostgresConstraint
|
||||
.check_constraints
|
||||
.by_table_identifier(table_identifier)
|
||||
.count
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -219,10 +219,9 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
|
|||
let(:until_table) { referencing_table_model.table_name }
|
||||
|
||||
it 'only truncates until the table specified' do
|
||||
expect do
|
||||
truncate_legacy_tables
|
||||
end.to change(referencing_table_model, :count).by(-5)
|
||||
.and change(referenced_table_model, :count).by(0)
|
||||
expect { truncate_legacy_tables }
|
||||
.to change(referencing_table_model, :count).by(-5)
|
||||
.and not_change(referenced_table_model, :count)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ RSpec.describe Gitlab::GitAccessProject do
|
|||
it 'does not create a new project' do
|
||||
expect { action }
|
||||
.to raise_specific_error
|
||||
.and change { Project.count }.by(0)
|
||||
.and not_change { Project.count }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -129,8 +129,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter, :cl
|
|||
|
||||
it 'does not import second approve and note' do
|
||||
expect { subject.execute }
|
||||
.to change { Note.count }.by(0)
|
||||
.and change { Approval.count }.by(0)
|
||||
.to not_change { Note.count }
|
||||
.and not_change { Approval.count }
|
||||
end
|
||||
|
||||
it 'only pushes placeholder references for reviewer' do
|
||||
|
|
@ -315,8 +315,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter, :cl
|
|||
|
||||
it 'does not import second approve and note' do
|
||||
expect { subject.execute }
|
||||
.to change { Note.count }.by(0)
|
||||
.and change { Approval.count }.by(0)
|
||||
.to not_change { Note.count }
|
||||
.and not_change { Approval.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_categor
|
|||
it 'is idempotent' do
|
||||
allow(importer).to receive(:each_release).and_return([github_release])
|
||||
expect { importer.execute }.to change { Release.count }.by(1)
|
||||
expect { importer.execute }.to change { Release.count }.by(0) # Idempotency check
|
||||
expect { importer.execute }.not_to change { Release.count } # Idempotency check
|
||||
end
|
||||
|
||||
context 'when the body has user mentions' do
|
||||
|
|
@ -324,7 +324,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_categor
|
|||
it 'is idempotent' do
|
||||
allow(importer).to receive(:each_release).and_return([github_release])
|
||||
expect { importer.execute }.to change { Release.count }.by(1)
|
||||
expect { importer.execute }.to change { Release.count }.by(0) # Idempotency check
|
||||
expect { importer.execute }.not_to change { Release.count } # Idempotency check
|
||||
end
|
||||
|
||||
context 'when the body has user mentions' do
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
|
|||
# We call the same command twice, as it means it would try to recreate
|
||||
# projects that were already created!
|
||||
expect { seed }.to change { group.projects.count }.by(seed_count)
|
||||
expect { seed }.to change { group.projects.count }.by(0)
|
||||
expect { seed }.not_to change { group.projects.count }
|
||||
end
|
||||
|
||||
it 'creates as many projects as specific in the argument' do
|
||||
|
|
|
|||
|
|
@ -64,9 +64,8 @@ RSpec.describe ::Gitlab::Seeders::Ci::VariablesGroupSeeder, feature_category: :c
|
|||
end
|
||||
|
||||
it 'skips seeding when group name is invalid' do
|
||||
expect { invalid_group_name_seeder.seed }.to change {
|
||||
group.variables.count
|
||||
}.by(0)
|
||||
expect { invalid_group_name_seeder.seed }
|
||||
.not_to change { group.variables.count }
|
||||
end
|
||||
|
||||
it 'skips CI variable creation if CI variable already exists' do
|
||||
|
|
|
|||
|
|
@ -64,9 +64,8 @@ RSpec.describe ::Gitlab::Seeders::Ci::VariablesProjectSeeder, feature_category:
|
|||
end
|
||||
|
||||
it 'skips seeding when project path is invalid' do
|
||||
expect { invalid_project_path_seeder.seed }.to change {
|
||||
project.variables.count
|
||||
}.by(0)
|
||||
expect { invalid_project_path_seeder.seed }
|
||||
.not_to change { project.variables.count }
|
||||
end
|
||||
|
||||
it 'skips CI variable creation if CI variable already exists' do
|
||||
|
|
|
|||
|
|
@ -32,9 +32,8 @@ RSpec.describe ::Gitlab::Seeders::ProjectEnvironmentSeeder, feature_category: :c
|
|||
end
|
||||
|
||||
it 'skips seeding when project path is invalid' do
|
||||
expect { invalid_project_path_seeder.seed }.to change {
|
||||
project.environments.count
|
||||
}.by(0)
|
||||
expect { invalid_project_path_seeder.seed }
|
||||
.not_to change { project.environments.count }
|
||||
end
|
||||
|
||||
it 'skips environment creation if environment already exists' do
|
||||
|
|
|
|||
|
|
@ -41,6 +41,8 @@ RSpec.describe Gitlab::TopologyServiceClient::BaseService, feature_category: :ce
|
|||
end
|
||||
end
|
||||
|
||||
let(:tls_config) { { tls: { enabled: true } } }
|
||||
|
||||
let(:config) do
|
||||
{
|
||||
ca_file: ca_file.path,
|
||||
|
|
@ -52,7 +54,7 @@ RSpec.describe Gitlab::TopologyServiceClient::BaseService, feature_category: :ce
|
|||
subject(:service_credentials) { base_service.send(:service_credentials) }
|
||||
|
||||
before do
|
||||
stub_config(cell: { enabled: true, topology_service_client: config })
|
||||
stub_config(cell: { enabled: true, topology_service_client: tls_config.merge(config) })
|
||||
end
|
||||
|
||||
after do
|
||||
|
|
@ -161,5 +163,11 @@ RSpec.describe Gitlab::TopologyServiceClient::BaseService, feature_category: :ce
|
|||
|
||||
include_examples 'insecure credentials'
|
||||
end
|
||||
|
||||
context 'when TLS is disabled' do
|
||||
let(:tls_config) { { tls: { enabled: false } } }
|
||||
|
||||
it { expect(service_credentials).to eq(:this_channel_is_insecure) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ RSpec.describe RemoveRecordsWithoutProjectFromProjectSavedRepliesTable, feature_
|
|||
|
||||
migrate!
|
||||
|
||||
expect { migrate! }.to change { project_saved_replies.count }.by(0)
|
||||
expect { migrate! }.not_to change { project_saved_replies.count }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,18 +3,28 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Packages::Downloadable, feature_category: :package_registry do
|
||||
context 'with a package' do
|
||||
describe '#touch_last_downloaded_at' do
|
||||
let_it_be(:package) { create(:generic_package) }
|
||||
context 'with a package', :aggregate_failures do
|
||||
let_it_be_with_reload(:package) { create(:generic_package) }
|
||||
|
||||
subject { package.touch_last_downloaded_at }
|
||||
|
||||
it 'updates the downloaded_at' do
|
||||
shared_examples 'updating the last_downloaded_at column' do
|
||||
specify do
|
||||
expect(::Gitlab::Database::LoadBalancing::SessionMap.current(package.load_balancer))
|
||||
.to receive(:without_sticky_writes).and_call_original
|
||||
expect { subject }
|
||||
.to change { package.last_downloaded_at }.from(nil).to(instance_of(ActiveSupport::TimeWithZone))
|
||||
expect { execute }
|
||||
.to change { package.reload.last_downloaded_at }.from(nil).to(instance_of(ActiveSupport::TimeWithZone))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#touch_last_downloaded_at' do
|
||||
subject(:execute) { package.touch_last_downloaded_at }
|
||||
|
||||
it_behaves_like 'updating the last_downloaded_at column'
|
||||
end
|
||||
|
||||
describe '.touch_last_downloaded_at' do
|
||||
subject(:execute) { ::Packages::Generic::Package.touch_last_downloaded_at(package.id) }
|
||||
|
||||
it_behaves_like 'updating the last_downloaded_at column'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ RSpec.describe Integrations::SlackWorkspace::ApiScope, feature_category: :integr
|
|||
.to change { described_class.count }.by(1)
|
||||
|
||||
expect { described_class.find_or_initialize_by_names(%w[baz foo]) }
|
||||
.to change { described_class.count }.by(0)
|
||||
.not_to change { described_class.count }
|
||||
|
||||
expect(described_class.pluck(:name)).to match_array(%w[foo bar baz buzz])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5768,6 +5768,25 @@ RSpec.describe User, feature_category: :user_profile do
|
|||
|
||||
context 'when owner is a non-owned group' do
|
||||
it_behaves_like 'group member'
|
||||
|
||||
context 'when access is provided by group invitation' do
|
||||
let_it_be(:invited_group) { create(:group) }
|
||||
let_it_be(:user) { create(:user, owner_of: invited_group) }
|
||||
|
||||
it 'returns false for owns_runner?' do
|
||||
expect(user.owns_runner?(runner)).to eq(false)
|
||||
end
|
||||
|
||||
context 'when invited_group is invited to group' do
|
||||
before do
|
||||
create(:group_group_link, :owner, shared_group: group, shared_with_group: invited_group)
|
||||
end
|
||||
|
||||
it 'returns true for owns_runner?' do
|
||||
expect(user.owns_runner?(runner)).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when in an owned group' do
|
||||
|
|
|
|||
|
|
@ -81,6 +81,31 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
|
|||
|
||||
it { expect_allowed :update_runner }
|
||||
end
|
||||
|
||||
context 'when access is provided by group invitation' do
|
||||
let_it_be(:invited_group) { create(:group) }
|
||||
let_it_be(:user) { create(:user, owner_of: invited_group) }
|
||||
|
||||
it { expect_disallowed :update_runner }
|
||||
|
||||
context 'when invited_group is invited to group' do
|
||||
before do
|
||||
create(:group_group_link, access_level, shared_group: group, shared_with_group: invited_group)
|
||||
end
|
||||
|
||||
context 'as owner' do
|
||||
let(:access_level) { :owner }
|
||||
|
||||
it { expect_allowed :update_runner }
|
||||
end
|
||||
|
||||
context 'as maintainer' do
|
||||
let(:access_level) { :maintainer }
|
||||
|
||||
it { expect_disallowed :update_runner }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with project runner' do
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
|
|||
end
|
||||
|
||||
context 'when authenticated' do
|
||||
it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do
|
||||
it 'avoids N+1 queries' do
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
get api(endpoint_path, user)
|
||||
end
|
||||
|
|
@ -100,6 +100,22 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
|
|||
end.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
context 'when merge requests are merged' do
|
||||
it 'avoids N+1 queries' do
|
||||
create(:merge_request, state: :merged, source_project: project, target_project: project, merge_user: create(:user))
|
||||
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
get api(endpoint_path, user)
|
||||
end
|
||||
|
||||
create(:merge_request, state: :merged, source_project: project, target_project: project, merge_user: create(:user))
|
||||
|
||||
expect do
|
||||
get api(endpoint_path, user)
|
||||
end.not_to exceed_query_limit(control)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when merge request is unchecked' do
|
||||
let(:check_service_class) { MergeRequests::MergeabilityCheckService }
|
||||
let(:mr_entity) { json_response.find { |mr| mr['id'] == merge_request.id } }
|
||||
|
|
|
|||
|
|
@ -1332,7 +1332,6 @@
|
|||
- './ee/spec/models/project_ci_cd_setting_spec.rb'
|
||||
- './ee/spec/models/project_import_data_spec.rb'
|
||||
- './ee/spec/models/ee/project_member_spec.rb'
|
||||
- './ee/spec/models/project_security_setting_spec.rb'
|
||||
- './ee/spec/models/protected_branch/required_code_owners_section_spec.rb'
|
||||
- './ee/spec/models/protected_branch/unprotect_access_level_spec.rb'
|
||||
- './ee/spec/models/protected_environments/approval_rule_spec.rb'
|
||||
|
|
|
|||
Loading…
Reference in New Issue