Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-02-22 12:07:55 +00:00
parent 2b0b59094a
commit fb336d5f6b
49 changed files with 877 additions and 206 deletions

View File

@ -1,6 +1,6 @@
<!--
When creating a new cop that could be applied to multiple applications,
we encourage you to add it to https://gitlab.com/gitlab-org/gitlab-styles gem.
we encourage you to add it to https://gitlab.com/gitlab-org/ruby/gems/gitlab-styles gem.
-->
## Description of the proposal

View File

@ -11,10 +11,11 @@ export default {
BoardSettingsSidebar,
BoardTopBar,
},
inject: ['initialBoardId'],
inject: ['initialBoardId', 'initialFilterParams'],
data() {
return {
boardId: this.initialBoardId,
filterParams: { ...this.initialFilterParams },
};
},
computed: {
@ -30,14 +31,19 @@ export default {
switchBoard(id) {
this.boardId = id;
},
setFilters(filters) {
const filterParams = { ...filters };
if (filterParams.groupBy) delete filterParams.groupBy;
this.filterParams = filterParams;
},
},
};
</script>
<template>
<div class="boards-app gl-relative" :class="{ 'is-compact': isSidebarOpen }">
<board-top-bar :board-id="boardId" @switchBoard="switchBoard" />
<board-content :board-id="boardId" />
<board-top-bar :board-id="boardId" @switchBoard="switchBoard" @setFilters="setFilters" />
<board-content :board-id="boardId" :filter-params="filterParams" />
<board-settings-sidebar />
</div>
</template>

View File

@ -20,6 +20,10 @@ export default {
type: String,
required: true,
},
filters: {
type: Object,
required: true,
},
},
computed: {
...mapState(['filterParams', 'highlightedLists']),
@ -33,6 +37,9 @@ export default {
isListDraggable() {
return isListDraggable(this.list);
},
filtersToUse() {
return this.isApolloBoard ? this.filters : this.filterParams;
},
},
watch: {
filterParams: {
@ -83,13 +90,13 @@ export default {
class="board-inner gl-display-flex gl-flex-direction-column gl-relative gl-h-full gl-rounded-base gl-bg-gray-50"
:class="{ 'board-column-highlighted': highlighted }"
>
<board-list-header :list="list" />
<board-list-header :list="list" :filter-params="filtersToUse" />
<board-list
ref="board-list"
:board-id="boardId"
:board-items="listItems"
:list="list"
:filter-params="filterParams"
:filter-params="filtersToUse"
/>
</div>
</div>

View File

@ -44,6 +44,10 @@ export default {
type: String,
required: true,
},
filterParams: {
type: Object,
required: true,
},
},
data() {
return {
@ -92,7 +96,7 @@ export default {
}),
fullPath: this.fullPath,
boardId: this.boardId,
filterParams: this.filterParams,
filters: this.filterParams,
};
},
boardListsToUse() {
@ -176,6 +180,7 @@ export default {
ref="board"
:board-id="boardId"
:list="list"
:filters="filterParams"
:data-draggable-item-type="$options.draggableItemTypes.list"
:class="{ 'gl-xs-display-none!': addColumnFormVisible }"
/>
@ -190,6 +195,7 @@ export default {
ref="swimlanes"
:lists="boardListsToUse"
:can-admin-list="canAdminList"
:filters="filterParams"
:style="{ height: boardHeight }"
/>

View File

@ -1,7 +1,7 @@
<script>
import { pickBy, isEmpty, mapValues } from 'lodash';
import { mapActions } from 'vuex';
import { getIdFromGraphQLId, isGid } from '~/graphql_shared/utils';
import { getIdFromGraphQLId, isGid, convertToGraphQLId } from '~/graphql_shared/utils';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { updateHistory, setUrlParams, queryToObject } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
@ -23,6 +23,7 @@ import {
} from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import { AssigneeFilterType } from '~/boards/constants';
import { TYPENAME_ITERATION } from '~/graphql_shared/constants';
import eventHub from '../eventhub';
export default {
@ -30,7 +31,7 @@ export default {
search: __('Search'),
},
components: { FilteredSearch },
inject: ['initialFilterParams'],
inject: ['initialFilterParams', 'isApolloBoard'],
props: {
tokens: {
type: Array,
@ -334,6 +335,17 @@ export default {
},
);
},
formattedFilterParams() {
const filtersCopy = { ...this.filterParams };
if (this.filterParams?.iterationId) {
filtersCopy.iterationId = convertToGraphQLId(
TYPENAME_ITERATION,
this.filterParams.iterationId,
);
}
return filtersCopy;
},
},
created() {
eventHub.$on('updateTokens', this.updateTokens);
@ -360,7 +372,11 @@ export default {
replace: true,
});
this.performSearch();
if (this.isApolloBoard) {
this.$emit('setFilters', this.formattedFilterParams);
} else {
this.performSearch();
}
},
getFilterParams(filters = []) {
const notFilters = filters.filter((item) => item.value.operator === '!=');

View File

@ -59,6 +59,10 @@ export default {
type: Array,
required: true,
},
filterParams: {
type: Object,
required: true,
},
},
data() {
return {
@ -108,7 +112,7 @@ export default {
},
},
computed: {
...mapState(['pageInfoByListId', 'listsFlags', 'filterParams', 'isUpdateIssueOrderInProgress']),
...mapState(['pageInfoByListId', 'listsFlags', 'isUpdateIssueOrderInProgress']),
boardListItems() {
return this.isApolloBoard
? this.currentList?.[`${this.issuableType}s`].nodes || []

View File

@ -76,9 +76,13 @@ export default {
required: false,
default: false,
},
filterParams: {
type: Object,
required: true,
},
},
computed: {
...mapState(['activeId', 'filterParams', 'boardId']),
...mapState(['activeId', 'boardId']),
...mapGetters(['isSwimlanesOn']),
isLoggedIn() {
return Boolean(this.currentUserId);

View File

@ -73,8 +73,11 @@ export default {
>
<boards-selector :board-apollo="board" @switchBoard="$emit('switchBoard', $event)" />
<new-board-button />
<issue-board-filtered-search v-if="isIssueBoard" />
<epic-board-filtered-search v-else />
<issue-board-filtered-search
v-if="isIssueBoard"
@setFilters="$emit('setFilters', $event)"
/>
<epic-board-filtered-search v-else @setFilters="$emit('setFilters', $event)" />
</div>
<div
class="filter-dropdown-container gl-md-display-flex gl-flex-direction-column gl-md-flex-direction-row gl-align-items-flex-start"

View File

@ -1,12 +1,11 @@
<script>
import { GlFilteredSearchToken } from '@gitlab/ui';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { mapActions } from 'vuex';
import { orderBy } from 'lodash';
import BoardFilteredSearch from 'ee_else_ce/boards/components/board_filtered_search.vue';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import issueBoardFilters from '~/boards/issue_board_filters';
import issueBoardFilters from 'ee_else_ce/boards/issue_board_filters';
import { TYPENAME_USER } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
@ -51,7 +50,7 @@ export default {
tokensCE() {
const { issue, incident } = this.$options.i18n;
const { types } = this.$options;
const { fetchUsers, fetchLabels } = issueBoardFilters(
const { fetchUsers, fetchLabels, fetchMilestones } = issueBoardFilters(
this.$apollo,
this.fullPath,
this.isGroupBoard,
@ -135,7 +134,7 @@ export default {
token: MilestoneToken,
unique: true,
shouldSkipSort: true,
fetchMilestones: this.fetchMilestones,
fetchMilestones,
},
{
icon: 'issues',
@ -176,7 +175,6 @@ export default {
},
},
methods: {
...mapActions(['fetchMilestones']),
preloadedUsers() {
return gon?.current_user_id
? [
@ -194,5 +192,9 @@ export default {
</script>
<template>
<board-filtered-search data-testid="issue-board-filtered-search" :tokens="tokens" />
<board-filtered-search
data-testid="issue-board-filtered-search"
:tokens="tokens"
@setFilters="$emit('setFilters', $event)"
/>
</template>

View File

@ -1,5 +1,5 @@
import boardListsQuery from 'ee_else_ce/boards/graphql/board_lists.query.graphql';
import { TYPE_ISSUE } from '~/issues/constants';
import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
import { s__, __ } from '~/locale';
import updateEpicSubscriptionMutation from '~/sidebar/queries/update_epic_subscription.mutation.graphql';
import updateEpicTitleMutation from '~/sidebar/queries/update_epic_title.mutation.graphql';
@ -12,14 +12,6 @@ import groupBoardQuery from './graphql/group_board.query.graphql';
import projectBoardQuery from './graphql/project_board.query.graphql';
import listIssuesQuery from './graphql/lists_issues.query.graphql';
/* eslint-disable-next-line @gitlab/require-i18n-strings */
export const AssigneeIdParamValues = ['Any', 'None'];
export const issuableTypes = {
issue: 'issue',
epic: 'epic',
};
export const BoardType = {
project: 'project',
group: 'group',
@ -94,7 +86,7 @@ export const titleQueries = {
[TYPE_ISSUE]: {
mutation: issueSetTitleMutation,
},
[issuableTypes.epic]: {
[TYPE_EPIC]: {
mutation: updateEpicTitleMutation,
},
};
@ -103,7 +95,7 @@ export const subscriptionQueries = {
[TYPE_ISSUE]: {
mutation: issueSetSubscriptionMutation,
},
[issuableTypes.epic]: {
[TYPE_EPIC]: {
mutation: updateEpicSubscriptionMutation,
},
};

View File

@ -1,5 +1,5 @@
query GroupBoardMilestones($fullPath: ID!, $searchTerm: String, $state: MilestoneStateEnum) {
group(fullPath: $fullPath) {
workspace: group(fullPath: $fullPath) {
id
milestones(
includeAncestors: true

View File

@ -1,5 +1,5 @@
query ProjectBoardMilestones($fullPath: ID!, $searchTerm: String, $state: MilestoneStateEnum) {
project(fullPath: $fullPath) {
workspace: project(fullPath: $fullPath) {
id
milestones(
searchTitle: $searchTerm

View File

@ -1,5 +1,7 @@
import groupBoardMembers from '~/boards/graphql/group_board_members.query.graphql';
import projectBoardMembers from '~/boards/graphql/project_board_members.query.graphql';
import groupBoardMilestonesQuery from './graphql/group_board_milestones.query.graphql';
import projectBoardMilestonesQuery from './graphql/project_board_milestones.query.graphql';
import boardLabels from './graphql/board_labels.query.graphql';
export default function issueBoardFilters(apollo, fullPath, isGroupBoard) {
@ -37,8 +39,27 @@ export default function issueBoardFilters(apollo, fullPath, isGroupBoard) {
.then(transformLabels);
};
const fetchMilestones = (searchTerm) => {
const variables = {
fullPath,
searchTerm,
};
const query = isGroupBoard ? groupBoardMilestonesQuery : projectBoardMilestonesQuery;
return apollo
.query({
query,
variables,
})
.then(({ data }) => {
return data.workspace?.milestones.nodes;
});
};
return {
fetchLabels,
fetchUsers,
fetchMilestones,
};
}

View File

@ -286,8 +286,8 @@ export default {
variables,
})
.then(({ data }) => {
const errors = data[boardType]?.errors;
const milestones = data[boardType]?.milestones.nodes;
const errors = data.workspace?.errors;
const milestones = data.workspace?.milestones.nodes;
if (errors?.[0]) {
throw new Error(errors[0]);

View File

@ -1,14 +1,14 @@
import { cloneDeep, pull, union } from 'lodash';
import Vue from 'vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { TYPE_EPIC } from '~/issues/constants';
import { s__, __ } from '~/locale';
import { formatIssue } from '../boards_util';
import { issuableTypes } from '../constants';
import * as mutationTypes from './mutation_types';
const updateListItemsCount = ({ state, listId, value }) => {
const list = state.boardLists[listId];
if (state.issuableType === issuableTypes.epic) {
if (state.issuableType === TYPE_EPIC) {
Vue.set(state.boardLists, listId, { ...list, epicsCount: list.epicsCount + value });
} else {
Vue.set(state.boardLists, listId, { ...list });

View File

@ -1,17 +1,10 @@
<script>
import { GlButton } from '@gitlab/ui';
import { __ } from '~/locale';
import Tracking from '~/tracking';
import eventHub from '../event_hub';
import updateMixin from '../mixins/update';
import getIssueStateQuery from '../queries/get_issue_state.query.graphql';
const issuableTypes = {
issue: __('Issue'),
epic: __('Epic'),
incident: __('Incident'),
};
const trackingMixin = Tracking.mixin({ label: 'delete_issue' });
export default {
@ -55,11 +48,6 @@ export default {
isSubmitEnabled() {
return this.formState.title.trim() !== '';
},
typeToShow() {
const { issueState, issuableType } = this;
const type = issueState.issueType ?? issuableType;
return issuableTypes[type];
},
},
methods: {
closeForm() {

View File

@ -1,5 +1,4 @@
import { issuableTypes } from '~/boards/constants';
import { TYPE_ISSUE } from '~/issues/constants';
import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
import blockingIssuesQuery from './graphql/blocking_issues.query.graphql';
import blockingEpicsQuery from './graphql/blocking_epics.query.graphql';
@ -7,7 +6,7 @@ export const blockingIssuablesQueries = {
[TYPE_ISSUE]: {
query: blockingIssuesQuery,
},
[issuableTypes.epic]: {
[TYPE_EPIC]: {
query: blockingEpicsQuery,
},
};

View File

@ -1,9 +1,8 @@
<script>
import { GlIcon, GlLink, GlPopover, GlLoadingIcon } from '@gitlab/ui';
import { issuableTypes } from '~/boards/constants';
import { TYPENAME_ISSUE, TYPENAME_EPIC } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { TYPE_ISSUE } from '~/issues/constants';
import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
import { truncate } from '~/lib/utils/text_utility';
import { __, n__, s__, sprintf } from '~/locale';
import { blockingIssuablesQueries } from './constants';
@ -12,12 +11,12 @@ export default {
i18n: {
issuableType: {
[TYPE_ISSUE]: __('issue'),
[issuableTypes.epic]: __('epic'),
[TYPE_EPIC]: __('epic'),
},
},
graphQLIdType: {
[TYPE_ISSUE]: TYPENAME_ISSUE,
[issuableTypes.epic]: TYPENAME_EPIC,
[TYPE_EPIC]: TYPENAME_EPIC,
},
referenceFormatter: {
[TYPE_ISSUE]: (r) => r.split('/')[1],
@ -43,7 +42,7 @@ export default {
type: String,
required: true,
validator(value) {
return [TYPE_ISSUE, issuableTypes.epic].includes(value);
return [TYPE_ISSUE, TYPE_EPIC].includes(value);
},
},
},
@ -88,7 +87,7 @@ export default {
},
computed: {
isEpic() {
return this.issuableType === issuableTypes.epic;
return this.issuableType === TYPE_EPIC;
},
displayedIssuables() {
const { defaultDisplayLimit, referenceFormatter } = this.$options;

View File

@ -5,7 +5,6 @@ import { clearDraft } from '~/lib/utils/autosave';
import Tracking from '~/tracking';
import { ASC } from '~/notes/constants';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { updateCommentState } from '~/work_items/graphql/cache_utils';
import { getWorkItemQuery } from '../../utils';
import createNoteMutation from '../../graphql/notes/create_work_item_note.mutation.graphql';
import { TRACKING_CATEGORY_SHOW, i18n } from '../../constants';
@ -142,7 +141,6 @@ export default {
async updateWorkItem(commentText) {
this.isSubmitting = true;
this.$emit('replying', commentText);
const { queryVariables, fetchByIid } = this;
try {
this.track('add_work_item_comment');
@ -160,7 +158,6 @@ export default {
if (createNoteData.data?.createNote?.errors?.length) {
throw new Error(createNoteData.data?.createNote?.errors[0]);
}
updateCommentState(store, createNoteData, fetchByIid, queryVariables);
},
});
clearDraft(this.autosaveKey);

View File

@ -8,7 +8,14 @@ import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
import { i18n, DEFAULT_PAGE_SIZE_NOTES } from '~/work_items/constants';
import { ASC, DESC } from '~/notes/constants';
import { getWorkItemNotesQuery } from '~/work_items/utils';
import {
updateCacheAfterCreatingNote,
updateCacheAfterDeletingNote,
} from '~/work_items/graphql/cache_utils';
import WorkItemDiscussion from '~/work_items/components/notes/work_item_discussion.vue';
import workItemNoteCreatedSubscription from '~/work_items/graphql/notes/work_item_note_created.subscription.graphql';
import workItemNoteUpdatedSubscription from '~/work_items/graphql/notes/work_item_note_updated.subscription.graphql';
import workItemNoteDeletedSubscription from '~/work_items/graphql/notes/work_item_note_deleted.subscription.graphql';
import deleteNoteMutation from '../graphql/notes/delete_work_item_notes.mutation.graphql';
import WorkItemAddNote from './notes/work_item_add_note.vue';
@ -137,6 +144,47 @@ export default {
this.fetchMoreNotes();
}
},
subscribeToMore: [
{
document: workItemNoteCreatedSubscription,
updateQuery(previousResult, { subscriptionData }) {
return updateCacheAfterCreatingNote(previousResult, subscriptionData, this.fetchByIid);
},
variables() {
return {
noteableId: this.workItemId,
};
},
skip() {
return !this.workItemId || this.hasNextPage;
},
},
{
document: workItemNoteDeletedSubscription,
updateQuery(previousResult, { subscriptionData }) {
return updateCacheAfterDeletingNote(previousResult, subscriptionData, this.fetchByIid);
},
variables() {
return {
noteableId: this.workItemId,
};
},
skip() {
return !this.workItemId || this.hasNextPage;
},
},
{
document: workItemNoteUpdatedSubscription,
variables() {
return {
noteableId: this.workItemId,
};
},
skip() {
return !this.workItemId;
},
},
],
},
},
methods: {

View File

@ -1,62 +1,100 @@
import { produce } from 'immer';
import { WIDGET_TYPE_NOTES } from '~/work_items/constants';
import { getWorkItemNotesQuery } from '~/work_items/utils';
const isNotesWidget = (widget) => widget.type === WIDGET_TYPE_NOTES;
const getNotesWidgetFromSourceData = (draftData, fetchByIid) => {
return fetchByIid
? draftData.workspace.workItems.nodes[0].widgets.find(isNotesWidget)
: draftData.workItem.widgets.find(isNotesWidget);
};
const updateNotesWidgetDataInDraftData = (draftData, notesWidget, fetchByIid) => {
const noteWidgetIndex = fetchByIid
? draftData.workspace.workItems.nodes[0].widgets.findIndex(isNotesWidget)
: draftData.workItem.widgets.findIndex(isNotesWidget);
if (fetchByIid) {
draftData.workspace.workItems.nodes[0].widgets[noteWidgetIndex] = notesWidget;
} else {
draftData.workItem.widgets[noteWidgetIndex] = notesWidget;
}
};
/**
* Updates the cache manually when adding a main comment
* Work Item note create subscription update query callback
*
* @param store
* @param createNoteData
* @param currentNotes
* @param subscriptionData
* @param fetchByIid
* @param queryVariables
* @param sortOrder
*/
export const updateCommentState = (store, { data: { createNote } }, fetchByIid, queryVariables) => {
const notesQuery = getWorkItemNotesQuery(fetchByIid);
const variables = {
...queryVariables,
pageSize: 100,
};
const sourceData = store.readQuery({
query: notesQuery,
variables,
});
const finalData = produce(sourceData, (draftData) => {
const notesWidget = fetchByIid
? draftData.workspace.workItems.nodes[0].widgets.find(
(widget) => widget.type === WIDGET_TYPE_NOTES,
)
: draftData.workItem.widgets.find((widget) => widget.type === WIDGET_TYPE_NOTES);
export const updateCacheAfterCreatingNote = (currentNotes, subscriptionData, fetchByIid) => {
if (!subscriptionData.data?.workItemNoteCreated) {
return currentNotes;
}
const newNote = subscriptionData.data.workItemNoteCreated;
// as notes are currently sorted/reversed on the frontend rather than in the query
// we only ever push.
// const arrayPushMethod = sortOrder === ASC ? 'push' : 'unshift';
const arrayPushMethod = 'push';
return produce(currentNotes, (draftData) => {
const notesWidget = getNotesWidgetFromSourceData(draftData, fetchByIid);
// manual update of cache with a completely new discussion
if (createNote.note.discussion.notes.nodes.length === 1) {
notesWidget.discussions.nodes[arrayPushMethod]({
id: createNote.note.discussion.id,
notes: {
nodes: createNote.note.discussion.notes.nodes,
__typename: 'NoteConnection',
},
// eslint-disable-next-line @gitlab/require-i18n-strings
__typename: 'Discussion',
});
if (!notesWidget.discussions) {
return;
}
if (fetchByIid) {
draftData.workspace.workItems.nodes[0].widgets[6] = notesWidget;
} else {
draftData.workItem.widgets[6] = notesWidget;
}
});
const discussion = notesWidget.discussions.nodes.find((d) => d.id === newNote.discussion.id);
store.writeQuery({
query: notesQuery,
variables,
data: finalData,
// handle the case where discussion already exists - we don't need to do anything, update will happen automatically
if (discussion) {
return;
}
notesWidget.discussions.nodes.push(newNote.discussion);
updateNotesWidgetDataInDraftData(draftData, notesWidget, fetchByIid);
});
};
/**
* Work Item note delete subscription update query callback
*
* @param currentNotes
* @param subscriptionData
* @param fetchByIid
*/
export const updateCacheAfterDeletingNote = (currentNotes, subscriptionData, fetchByIid) => {
if (!subscriptionData.data?.workItemNoteDeleted) {
return currentNotes;
}
const deletedNote = subscriptionData.data.workItemNoteDeleted;
const { id, discussionId, lastDiscussionNote } = deletedNote;
return produce(currentNotes, (draftData) => {
const notesWidget = getNotesWidgetFromSourceData(draftData, fetchByIid);
if (!notesWidget.discussions) {
return;
}
const discussionIndex = notesWidget.discussions.nodes.findIndex(
(discussion) => discussion.id === discussionId,
);
if (discussionIndex === -1) {
return;
}
if (lastDiscussionNote) {
notesWidget.discussions.nodes.splice(discussionIndex, 1);
} else {
const deletedThreadDiscussion = notesWidget.discussions.nodes[discussionIndex];
const deletedThreadIndex = deletedThreadDiscussion.notes.nodes.findIndex(
(note) => note.id === id,
);
deletedThreadDiscussion.notes.nodes.splice(deletedThreadIndex, 1);
notesWidget.discussions.nodes[discussionIndex] = deletedThreadDiscussion;
}
updateNotesWidgetDataInDraftData(draftData, notesWidget, fetchByIid);
});
};

View File

@ -27,15 +27,11 @@ class UserSyncedAttributesMetadata < ApplicationRecord
class << self
def syncable_attributes
return SYNCABLE_ATTRIBUTES if sync_name?
SYNCABLE_ATTRIBUTES - %i[name]
end
private
def sync_name?
Gitlab.config.ldap.sync_name
if Gitlab.config.ldap.enabled && !Gitlab.config.ldap.sync_name
SYNCABLE_ATTRIBUTES - %i[name]
else
SYNCABLE_ATTRIBUTES
end
end
end

View File

@ -63,6 +63,8 @@ Adds a member role to a group.
POST /groups/:id/member_roles
```
To add a member role to a group, the group must be at root-level (have no parent group).
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](rest/index.md#namespaced-path-encoding) owned by the authenticated user. |

View File

@ -124,7 +124,7 @@ lint:
# Write the code coverage report to gl-code-quality-report.json
# and print linting issues to stdout in the format: path/to/file:line description
# remove `--issues-exit-code 0` or set to non-zero to fail the job if linting issues are detected
- golangci-lint run --issues-exit-code 0 --out-format code-climate | tee gl-code-quality-report.json | jq -r '.[] | "\(.location.path):\(.location.lines.begin) \(.description)"'
- golangci-lint run --issues-exit-code 0 --print-issued-lines=false --out-format code-climate:gl-code-quality-report.json,line-number
artifacts:
reports:
codequality: gl-code-quality-report.json

View File

@ -43,7 +43,7 @@ Before adding a new cop to enforce a given style, make sure to discuss it with y
We maintain cops across several Ruby code bases, and not all of them are
specific to the GitLab application.
When creating a new cop that could be applied to multiple applications, we encourage you
to add it to our [`gitlab-styles`](https://gitlab.com/gitlab-org/gitlab-styles) gem.
to add it to our [`gitlab-styles`](https://gitlab.com/gitlab-org/ruby/gems/gitlab-styles) gem.
If the cop targets rules that only apply to the main GitLab application,
it should be added to [GitLab](https://gitlab.com/gitlab-org/gitlab) instead.

View File

@ -360,10 +360,10 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd/) and [P
1. Use one of the following methods to track the event:
- In the controller using the `RedisTracking` module and the following format:
- In the controller using the `ProductAnalyticsTracking` module and the following format:
```ruby
track_event(*controller_actions, name:, conditions: nil, destinations: [:redis_hll], &block)
track_custom_event(*controller_actions, name:, action:, label:, conditions: nil, destinations: [:redis_hll], &block)
```
Arguments:
@ -371,6 +371,8 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd/) and [P
- `controller_actions`: the controller actions to track.
- `name`: the event name.
- `conditions`: optional custom conditions. Uses the same format as Rails callbacks.
- `action`: optional action name for the triggered event. See [event schema](../snowplow/index.md#event-schema) for more details.
- `label`: optional label for the triggered event. See [event schema](../snowplow/index.md#event-schema) for more details.
- `destinations`: optional list of destinations. Currently supports `:redis_hll` and `:snowplow`. Default: `:redis_hll`.
- `&block`: optional block that computes and returns the `custom_id` that we want to track. This overrides the `visitor_id`.
@ -379,10 +381,14 @@ Implemented using Redis methods [PFADD](https://redis.io/commands/pfadd/) and [P
```ruby
# controller
class ProjectsController < Projects::ApplicationController
include RedisTracking
include ProductAnalyticsTracking
skip_before_action :authenticate_user!, only: :show
track_event :index, :show, name: 'users_visiting_projects'
track_custom_event :index, :show,
name: 'users_visiting_projects',
action: 'user_perform_visit',
label: 'redis_hll_counters.users_visiting_project_monthly',
destinations: %i[redis_hll snowplow]
def index
render html: 'index'

View File

@ -22,10 +22,7 @@ For a demo of Group Sync using Azure, see [Demo: SAML Group Sync](https://youtu.
## Configure SAML Group Sync
NOTE:
You must include the SAML configuration block on all Sidekiq nodes in addition to Rails application nodes if you:
- Use SAML Group Sync.
- Have multiple GitLab nodes, for example in a distributed or highly available architecture.
You must include the SAML configuration block on all Sidekiq nodes in addition to Rails application nodes if you use SAML Group Sync and have multiple GitLab nodes, for example in a distributed or highly available architecture.
NOTE:
SAML Group Sync is only supported for the [SAML provider named `saml`](../../../integration/saml.md#configure-gitlab-to-use-multiple-saml-idps).

View File

@ -205,3 +205,47 @@ The `afterDate` filter is not supported. Please use `beforeDate` or `inDateRange
}
}
```
## Raw data export
Exporting the raw event data from the underlying storage engine can help you debug and create datasets for data analysis.
### Export raw data with Cube queries
You can [query the raw data with the REST API](../../api/product_analytics.md#send-query-request-to-cube) and convert the JSON output to any required format.
You can export the raw data for a specific dimension by passing a list of dimensions to the `dimensions` key. For example, the following query outputs the raw data for the attributes listed:
```json
POST /api/v4/projects/PROJECT_ID/product_analytics/request/load?queryType=multi
{
"dimensions": [
"TrackedEvents.docEncoding",
"TrackedEvents.docHost",
"TrackedEvents.docPath",
"TrackedEvents.docSearch",
"TrackedEvents.eventType",
"TrackedEvents.idsAjsAnonymousId",
"TrackedEvents.localTzOffset",
"TrackedEvents.pageTitle",
"TrackedEvents.src",
"TrackedEvents.utcTime",
"TrackedEvents.vpSize"
],
"order": {
"TrackedEvents.apiKey": "asc"
}
}
```
If the request is successful, the returned JSON includes an array of rows of results.
### Caveats
Because Cube acts as an abstraction layer between the raw data and the API, the exported raw data has some caveats:
- Data is grouped by the selected dimensions. Therefore, the exported data might be incomplete, unless including both `utcTime` and `userAnonymousId`.
- Data is by default limited to 10,000 rows, but you can increase the limit to maximum 50,000 rows. If your dataset has more than 50,000 rows, you need to paginate through the results by using the `limit` and `offset` parameters.
- Data is always returned in JSON format. If you need it in a different format, you need to convert the JSON to the required format using a scripting language of your choice.
- [Issue 391683](https://gitlab.com/gitlab-org/gitlab/-/issues/391683) tracks the implementation of a more scalable export solution.

View File

@ -19,7 +19,7 @@ module Gitlab
[429, { 'Content-Type' => 'text/plain' }.merge(throttled_headers), [Gitlab::Throttle.rate_limiting_response_text]]
end
rack_attack.cache.store = Gitlab::RackAttack::InstrumentedCacheStore.new
rack_attack.cache.store = cache_store
# Configure the throttles
configure_throttles(rack_attack)
@ -27,6 +27,14 @@ module Gitlab
configure_user_allowlist
end
def self.cache_store
if ENV['GITLAB_RACK_ATTACK_NEW_STORE'] == '1'
Gitlab::RackAttack::Store.new
else
Gitlab::RackAttack::InstrumentedCacheStore.new
end
end
# Rate Limit HTTP headers are not standardized anywhere. This is the latest
# draft submitted to IETF:
# https://github.com/ietf-wg-httpapi/ratelimit-headers/blob/main/draft-ietf-httpapi-ratelimit-headers.md

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
module Gitlab
module RackAttack
class Store
InvalidAmount = Class.new(StandardError)
# The increment method gets called very often. The implementation below
# aims to minimize the number of Redis calls we make.
def increment(key, amount = 1, options = {})
# Our code below that prevents calling EXPIRE after every INCR assumes
# we always increment by 1. This is true in Rack::Attack as of v6.6.1.
# This guard should alert us if Rack::Attack changes its behavior in a
# future version.
raise InvalidAmount unless amount == 1
with do |redis|
key = namespace(key)
new_value = redis.incr(key)
expires_in = options[:expires_in]
redis.expire(key, expires_in) if new_value == 1 && expires_in
new_value
end
end
def read(key, _options = {})
with { |redis| redis.get(namespace(key)) }
end
def write(key, value, options = {})
with { |redis| redis.set(namespace(key), value, ex: options[:expires_in]) }
end
def delete(key, _options = {})
with { |redis| redis.del(namespace(key)) }
end
private
def with(&block)
# rubocop: disable CodeReuse/ActiveRecord
Gitlab::Redis::RateLimiting.with(&block)
# rubocop: enable CodeReuse/ActiveRecord
rescue ::Redis::BaseConnectionError
# Following the example of
# https://github.com/rack/rack-attack/blob/v6.6.1/lib/rack/attack/store_proxy/redis_proxy.rb#L61-L65,
# do not raise an error if we cannot connect to Redis. If
# Redis::RateLimiting is unavailable it should not take the site down.
nil
end
def namespace(key)
"#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:#{key}"
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
namespace :gitlab do
namespace :db do
namespace :decomposition do
desc 'Check if PostgreSQL max_connections needs to be increased'
task connection_status: :environment do
if Gitlab::Database.database_base_models.has_key?(:ci)
puts "GitLab database already running on two connections"
next
end
sql = <<~SQL
select q1.active, q2.max from
(select count(*) as active from pg_stat_activity) q1,
(select setting::int as max from pg_settings where name='max_connections') q2
SQL
active, max = ApplicationRecord.connection.select_one(sql).values
puts "Currently using #{active} connections out of #{max} max_connections,"
if active / max.to_f > 0.5
puts <<~ADVISE_INCREASE
which may run out when you switch to two database connections.
Consider increasing PostgreSQL 'max_connections' setting.
Depending on the installation method, there are different ways to
increase that setting. Please consult the GitLab documentation.
ADVISE_INCREASE
else
puts "which is enough for running GitLab using two database connections."
end
end
end
end
end

View File

@ -32419,75 +32419,18 @@ msgstr ""
msgid "Proceed"
msgstr ""
msgid "Product Analytics|Add the NPM package to your package.json using your preferred package manager:"
msgstr ""
msgid "Product Analytics|Add the script to the page and assign the client SDK to window:"
msgstr ""
msgid "Product Analytics|Analyze your product with Product Analytics"
msgstr ""
msgid "Product Analytics|Back to dashboards"
msgstr ""
msgid "Product Analytics|Creating your product analytics instance..."
msgstr ""
msgid "Product Analytics|Details on how to configure product analytics to collect data."
msgstr ""
msgid "Product Analytics|For the product analytics dashboard to start showing you some data, you need to add the analytics tracking code to your project."
msgstr ""
msgid "Product Analytics|Identifies the sender of tracking events"
msgstr ""
msgid "Product Analytics|Import the new package into your JS code:"
msgstr ""
msgid "Product Analytics|Instrument your application"
msgstr ""
msgid "Product Analytics|Instrumentation details"
msgstr ""
msgid "Product Analytics|SDK App ID"
msgstr ""
msgid "Product Analytics|SDK Host"
msgstr ""
msgid "Product Analytics|Set up Product Analytics to track how your product is performing. Combine it with your GitLab data to better understand where you can improve your product and development processes."
msgstr ""
msgid "Product Analytics|Set up product analytics"
msgstr ""
msgid "Product Analytics|Steps to add product analytics as a CommonJS module"
msgstr ""
msgid "Product Analytics|Steps to add product analytics as a HTML script tag"
msgstr ""
msgid "Product Analytics|Steps to add product analytics as an ESM module"
msgstr ""
msgid "Product Analytics|The host to send all tracking events to"
msgstr ""
msgid "Product Analytics|This might take a while, feel free to navigate away from this page and come back later."
msgstr ""
msgid "Product Analytics|To instrument your application, select one of the options below. After an option has been instrumented and data is being collected, this page will progress to the next step."
msgstr ""
msgid "Product analytics"
msgstr ""
msgid "ProductAnalytics|Add another dimension"
msgstr ""
msgid "ProductAnalytics|Add the NPM package to your package.json using your preferred package manager:"
msgstr ""
msgid "ProductAnalytics|Add the script to the page and assign the client SDK to window:"
msgstr ""
msgid "ProductAnalytics|Add to Dashboard"
msgstr ""
@ -32515,6 +32458,9 @@ msgstr ""
msgid "ProductAnalytics|Analytics dashboards"
msgstr ""
msgid "ProductAnalytics|Analyze your product with Product Analytics"
msgstr ""
msgid "ProductAnalytics|Any Click on elements"
msgstr ""
@ -32530,6 +32476,9 @@ msgstr ""
msgid "ProductAnalytics|Average per User"
msgstr ""
msgid "ProductAnalytics|Back to dashboards"
msgstr ""
msgid "ProductAnalytics|Browser"
msgstr ""
@ -32566,6 +32515,9 @@ msgstr ""
msgid "ProductAnalytics|Compares pageviews of all pages against each other"
msgstr ""
msgid "ProductAnalytics|Creating your product analytics instance..."
msgstr ""
msgid "ProductAnalytics|Dashboards are created by editing the projects dashboard files."
msgstr ""
@ -32575,6 +32527,9 @@ msgstr ""
msgid "ProductAnalytics|Data Table"
msgstr ""
msgid "ProductAnalytics|Details on how to configure product analytics to collect data."
msgstr ""
msgid "ProductAnalytics|Dimensions"
msgstr ""
@ -32599,6 +32554,9 @@ msgstr ""
msgid "ProductAnalytics|Feature usage"
msgstr ""
msgid "ProductAnalytics|For the product analytics dashboard to start showing you some data, you need to add the analytics tracking code to your project."
msgstr ""
msgid "ProductAnalytics|Go back"
msgstr ""
@ -32611,6 +32569,18 @@ msgstr ""
msgid "ProductAnalytics|How often sesions are repeated"
msgstr ""
msgid "ProductAnalytics|Identifies the sender of tracking events"
msgstr ""
msgid "ProductAnalytics|Import the new package into your JS code:"
msgstr ""
msgid "ProductAnalytics|Instrument your application"
msgstr ""
msgid "ProductAnalytics|Instrumentation details"
msgstr ""
msgid "ProductAnalytics|Language"
msgstr ""
@ -32671,15 +32641,45 @@ msgstr ""
msgid "ProductAnalytics|Resulting Data"
msgstr ""
msgid "ProductAnalytics|SDK App ID"
msgstr ""
msgid "ProductAnalytics|SDK Host"
msgstr ""
msgid "ProductAnalytics|Sessions"
msgstr ""
msgid "ProductAnalytics|Set up Product Analytics to track how your product is performing. Combine it with your GitLab data to better understand where you can improve your product and development processes."
msgstr ""
msgid "ProductAnalytics|Set up product analytics"
msgstr ""
msgid "ProductAnalytics|Single Statistic"
msgstr ""
msgid "ProductAnalytics|Steps to add product analytics as a CommonJS module"
msgstr ""
msgid "ProductAnalytics|Steps to add product analytics as a HTML script tag"
msgstr ""
msgid "ProductAnalytics|Steps to add product analytics as an ESM module"
msgstr ""
msgid "ProductAnalytics|The host to send all tracking events to"
msgstr ""
msgid "ProductAnalytics|There is no data for this type of chart currently. Please see the Setup tab if you have not configured the product analytics tool already."
msgstr ""
msgid "ProductAnalytics|This might take a while, feel free to navigate away from this page and come back later."
msgstr ""
msgid "ProductAnalytics|To instrument your application, select one of the options below. After an option has been instrumented and data is being collected, this page will progress to the next step."
msgstr ""
msgid "ProductAnalytics|Track specific features"
msgstr ""

View File

@ -92,6 +92,7 @@ export default function createComponent({
boardItems: [issue],
canAdminList: true,
boardId: 'gid://gitlab/Board/1',
filterParams: {},
...componentProps,
},
provide: {

View File

@ -28,6 +28,7 @@ describe('BoardApp', () => {
store,
provide: {
initialBoardId: 'gid://gitlab/Board/1',
initialFilterParams: {},
},
});
};

View File

@ -36,6 +36,7 @@ describe('Board Column Component', () => {
propsData: {
list: listMock,
boardId: 'gid://gitlab/Board/1',
filters: {},
},
provide: {
isApolloBoard: false,

View File

@ -61,6 +61,7 @@ describe('BoardContent', () => {
apolloProvider: fakeApollo,
propsData: {
boardId: 'gid://gitlab/Board/1',
filterParams: {},
...props,
},
provide: {

View File

@ -55,10 +55,10 @@ describe('BoardFilteredSearch', () => {
},
];
const createComponent = ({ initialFilterParams = {}, props = {} } = {}) => {
const createComponent = ({ initialFilterParams = {}, props = {}, provide = {} } = {}) => {
store = createStore();
wrapper = shallowMount(BoardFilteredSearch, {
provide: { initialFilterParams, fullPath: '' },
provide: { initialFilterParams, fullPath: '', isApolloBoard: false, ...provide },
store,
propsData: {
...props,
@ -191,4 +191,24 @@ describe('BoardFilteredSearch', () => {
]);
});
});
describe('when Apollo boards FF is on', () => {
beforeEach(() => {
createComponent({ provide: { isApolloBoard: true } });
});
it('emits setFilters and updates URL when onFilter is emitted', () => {
jest.spyOn(urlUtility, 'updateHistory');
findFilteredSearch().vm.$emit('onFilter', [{ value: { data: '' } }]);
expect(urlUtility.updateHistory).toHaveBeenCalledWith({
title: '',
replace: true,
url: 'http://test.host/',
});
expect(wrapper.emitted('setFilters')).toHaveLength(1);
});
});
});

View File

@ -68,6 +68,7 @@ describe('Board List Header Component', () => {
store,
propsData: {
list: listMock,
filterParams: {},
},
provide: {
boardId,

View File

@ -96,6 +96,11 @@ describe('BoardTopBar', () => {
it('does not render BoardAddNewColumnTrigger component', () => {
expect(wrapper.findComponent(BoardAddNewColumnTrigger).exists()).toBe(false);
});
it('emits setFilters when setFilters is emitted by filtered search', () => {
wrapper.findComponent(IssueBoardFilteredSearch).vm.$emit('setFilters');
expect(wrapper.emitted('setFilters')).toHaveLength(1);
});
});
describe('when user can admin list', () => {

View File

@ -2,10 +2,10 @@ import { orderBy } from 'lodash';
import { shallowMount } from '@vue/test-utils';
import BoardFilteredSearch from 'ee_else_ce/boards/components/board_filtered_search.vue';
import IssueBoardFilteredSpec from '~/boards/components/issue_board_filtered_search.vue';
import issueBoardFilters from '~/boards/issue_board_filters';
import issueBoardFilters from 'ee_else_ce/boards/issue_board_filters';
import { mockTokens } from '../mock_data';
jest.mock('~/boards/issue_board_filters');
jest.mock('ee_else_ce/boards/issue_board_filters');
describe('IssueBoardFilter', () => {
let wrapper;
@ -14,6 +14,9 @@ describe('IssueBoardFilter', () => {
const createComponent = ({ isSignedIn = false } = {}) => {
wrapper = shallowMount(IssueBoardFilteredSpec, {
propsData: {
boardId: 'gid://gitlab/Board/1',
},
provide: {
isSignedIn,
releasesFetchPath: '/releases',
@ -48,6 +51,11 @@ describe('IssueBoardFilter', () => {
expect(findBoardsFilteredSearch().exists()).toBe(true);
});
it('emits setFilters when setFilters is emitted', () => {
findBoardsFilteredSearch().vm.$emit('setFilters');
expect(wrapper.emitted('setFilters')).toHaveLength(1);
});
it.each`
isSignedIn
${true}

View File

@ -336,7 +336,7 @@ describe('fetchLists', () => {
describe('fetchMilestones', () => {
const queryResponse = {
data: {
project: {
workspace: {
milestones: {
nodes: mockMilestones,
},
@ -346,7 +346,7 @@ describe('fetchMilestones', () => {
const queryErrors = {
data: {
project: {
workspace: {
errors: ['You cannot view these milestones'],
milestones: {},
},

View File

@ -7,8 +7,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import IssuableBlockedIcon from '~/vue_shared/components/issuable_blocked_icon/issuable_blocked_icon.vue';
import { blockingIssuablesQueries } from '~/vue_shared/components/issuable_blocked_icon/constants';
import { issuableTypes } from '~/boards/constants';
import { TYPE_ISSUE } from '~/issues/constants';
import { TYPE_EPIC, TYPE_ISSUE } from '~/issues/constants';
import { truncate } from '~/lib/utils/text_utility';
import {
mockIssue,
@ -121,9 +120,9 @@ describe('IssuableBlockedIcon', () => {
};
it.each`
mockIssuable | issuableType | expectedIcon
${mockIssue} | ${TYPE_ISSUE} | ${'issue-block'}
${mockEpic} | ${issuableTypes.epic} | ${'entity-blocked'}
mockIssuable | issuableType | expectedIcon
${mockIssue} | ${TYPE_ISSUE} | ${'issue-block'}
${mockEpic} | ${TYPE_EPIC} | ${'entity-blocked'}
`(
'should render blocked icon for $issuableType',
({ mockIssuable, issuableType, expectedIcon }) => {
@ -153,9 +152,9 @@ describe('IssuableBlockedIcon', () => {
describe('on mouseenter on blocked icon', () => {
it.each`
item | issuableType | mockBlockingIssuable | issuableItem | blockingIssuablesSpy
${mockBlockedIssue1} | ${TYPE_ISSUE} | ${mockBlockingIssue1} | ${mockIssue} | ${jest.fn().mockResolvedValue(mockBlockingIssuablesResponse1)}
${mockBlockedEpic1} | ${issuableTypes.epic} | ${mockBlockingEpic1} | ${mockEpic} | ${jest.fn().mockResolvedValue(mockBlockingEpicIssuablesResponse1)}
item | issuableType | mockBlockingIssuable | issuableItem | blockingIssuablesSpy
${mockBlockedIssue1} | ${TYPE_ISSUE} | ${mockBlockingIssue1} | ${mockIssue} | ${jest.fn().mockResolvedValue(mockBlockingIssuablesResponse1)}
${mockBlockedEpic1} | ${TYPE_EPIC} | ${mockBlockingEpic1} | ${mockEpic} | ${jest.fn().mockResolvedValue(mockBlockingEpicIssuablesResponse1)}
`(
'should query for blocking issuables and render the result for $issuableType',
async ({ item, issuableType, issuableItem, mockBlockingIssuable, blockingIssuablesSpy }) => {

View File

@ -13,6 +13,9 @@ import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
import workItemNotesQuery from '~/work_items/graphql/notes/work_item_notes.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/notes/work_item_notes_by_iid.query.graphql';
import deleteWorkItemNoteMutation from '~/work_items/graphql/notes/delete_work_item_notes.mutation.graphql';
import workItemNoteCreatedSubscription from '~/work_items/graphql/notes/work_item_note_created.subscription.graphql';
import workItemNoteUpdatedSubscription from '~/work_items/graphql/notes/work_item_note_updated.subscription.graphql';
import workItemNoteDeletedSubscription from '~/work_items/graphql/notes/work_item_note_deleted.subscription.graphql';
import { DEFAULT_PAGE_SIZE_NOTES, WIDGET_TYPE_NOTES } from '~/work_items/constants';
import { ASC, DESC } from '~/notes/constants';
import {
@ -21,6 +24,9 @@ import {
mockWorkItemNotesByIidResponse,
mockMoreWorkItemNotesResponse,
mockWorkItemNotesResponseWithComments,
workItemNotesCreateSubscriptionResponse,
workItemNotesUpdateSubscriptionResponse,
workItemNotesDeleteSubscriptionResponse,
} from '../mock_data';
const mockWorkItemId = workItemQueryResponse.data.workItem.id;
@ -73,6 +79,15 @@ describe('WorkItemNotes component', () => {
const deleteWorkItemNoteMutationSuccessHandler = jest.fn().mockResolvedValue({
data: { destroyNote: { note: null, __typename: 'DestroyNote' } },
});
const notesCreateSubscriptionHandler = jest
.fn()
.mockResolvedValue(workItemNotesCreateSubscriptionResponse);
const notesUpdateSubscriptionHandler = jest
.fn()
.mockResolvedValue(workItemNotesUpdateSubscriptionResponse);
const notesDeleteSubscriptionHandler = jest
.fn()
.mockResolvedValue(workItemNotesDeleteSubscriptionResponse);
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const createComponent = ({
@ -86,6 +101,9 @@ describe('WorkItemNotes component', () => {
[workItemNotesQuery, defaultWorkItemNotesQueryHandler],
[workItemNotesByIidQuery, workItemNotesByIidQueryHandler],
[deleteWorkItemNoteMutation, deleteWINoteMutationHandler],
[workItemNoteCreatedSubscription, notesCreateSubscriptionHandler],
[workItemNoteUpdatedSubscription, notesUpdateSubscriptionHandler],
[workItemNoteDeletedSubscription, notesDeleteSubscriptionHandler],
]),
propsData: {
workItemId,
@ -334,4 +352,31 @@ describe('WorkItemNotes component', () => {
['Something went wrong when deleting a comment. Please try again'],
]);
});
describe('Notes subscriptions', () => {
beforeEach(async () => {
createComponent({
defaultWorkItemNotesQueryHandler: workItemNotesWithCommentsQueryHandler,
});
await waitForPromises();
});
it('has create notes subscription', () => {
expect(notesCreateSubscriptionHandler).toHaveBeenCalledWith({
noteableId: mockWorkItemId,
});
});
it('has delete notes subscription', () => {
expect(notesDeleteSubscriptionHandler).toHaveBeenCalledWith({
noteableId: mockWorkItemId,
});
});
it('has update notes subscription', () => {
expect(notesUpdateSubscriptionHandler).toHaveBeenCalledWith({
noteableId: mockWorkItemId,
});
});
});
});

View File

@ -2444,3 +2444,126 @@ export const mockWorkItemNotesResponseWithComments = {
},
},
};
export const workItemNotesCreateSubscriptionResponse = {
data: {
workItemNoteCreated: {
id: 'gid://gitlab/WeightNote/0f2f195ec0d1ef95ee9d5b10446b8e96a7d81864',
body: 'changed weight to **89**',
bodyHtml: '<p dir="auto">changed weight to <strong>89</strong></p>',
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
lastEditedBy: null,
system: true,
internal: false,
discussion: {
id: 'gid://gitlab/Discussion/8bbc4890b6ff0f2cde93a5a0947cd2b8a13d3b6e',
notes: {
nodes: [
{
id: 'gid://gitlab/WeightNote/0f2f195ec0d1ef95ee9d5b10446b8e96a9881864',
body: 'changed weight to **89**',
bodyHtml: '<p dir="auto">changed weight to <strong>89</strong></p>',
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
lastEditedBy: null,
system: true,
internal: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
},
userPermissions: {
adminNote: false,
awardEmoji: true,
readNote: true,
createNote: true,
resolveNote: true,
repositionNote: true,
__typename: 'NotePermissions',
},
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
id: 'gid://gitlab/User/1',
name: 'Administrator',
username: 'root',
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
__typename: 'Note',
},
],
},
},
userPermissions: {
adminNote: false,
awardEmoji: true,
readNote: true,
createNote: true,
resolveNote: true,
repositionNote: true,
__typename: 'NotePermissions',
},
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
id: 'gid://gitlab/User/1',
name: 'Administrator',
username: 'root',
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
__typename: 'Note',
},
},
};
export const workItemNotesUpdateSubscriptionResponse = {
data: {
workItemNoteUpdated: {
id: 'gid://gitlab/Note/0f2f195ec0d1ef95ee9d5b10446b8e96a9883894',
body: 'changed title',
bodyHtml: '<p dir="auto">changed title<strong>89</strong></p>',
systemNoteIconName: 'pencil',
createdAt: '2022-11-25T07:16:20Z',
lastEditedAt: null,
lastEditedBy: null,
system: true,
internal: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
},
userPermissions: {
adminNote: false,
awardEmoji: true,
readNote: true,
createNote: true,
resolveNote: true,
repositionNote: true,
__typename: 'NotePermissions',
},
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
id: 'gid://gitlab/User/1',
name: 'Administrator',
username: 'root',
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
__typename: 'Note',
},
},
};
export const workItemNotesDeleteSubscriptionResponse = {
data: {
workItemNoteDeleted: {
id: 'gid://gitlab/DiscussionNote/235',
discussionId: 'gid://gitlab/Discussion/2bb1162fd0d39297d1a68fdd7d4083d3780af0f3',
lastDiscussionNote: false,
},
},
};

View File

@ -420,6 +420,7 @@ RSpec.describe Gitlab::Auth::OAuth::User, feature_category: :authentication_and_
context "and LDAP user has an account already" do
context 'when sync_name is disabled' do
before do
allow(Gitlab.config.ldap).to receive(:enabled).and_return(true)
allow(Gitlab.config.ldap).to receive(:sync_name).and_return(false)
end

View File

@ -0,0 +1,113 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::RackAttack::Store, :clean_gitlab_redis_rate_limiting, feature_category: :scalability do
let(:store) { described_class.new }
let(:key) { 'foobar' }
let(:namespaced_key) { "cache:gitlab:#{key}" }
def with_redis(&block)
Gitlab::Redis::RateLimiting.with(&block)
end
describe '#increment' do
it 'increments without expiry' do
5.times do |i|
expect(store.increment(key, 1)).to eq(i + 1)
with_redis do |redis|
expect(redis.get(namespaced_key).to_i).to eq(i + 1)
expect(redis.ttl(namespaced_key)).to eq(-1)
end
end
end
it 'rejects amounts other than 1' do
expect { store.increment(key, 2) }.to raise_exception(described_class::InvalidAmount)
end
context 'with expiry' do
it 'increments and sets expiry' do
5.times do |i|
expect(store.increment(key, 1, expires_in: 456)).to eq(i + 1)
with_redis do |redis|
expect(redis.get(namespaced_key).to_i).to eq(i + 1)
expect(redis.ttl(namespaced_key)).to be_within(10).of(456)
end
end
end
end
end
describe '#read' do
subject { store.read(key) }
it 'reads the namespaced key' do
with_redis { |r| r.set(namespaced_key, '123') }
expect(subject).to eq('123')
end
end
describe '#write' do
subject { store.write(key, '123', options) }
let(:options) { {} }
it 'sets the key' do
subject
with_redis do |redis|
expect(redis.get(namespaced_key)).to eq('123')
expect(redis.ttl(namespaced_key)).to eq(-1)
end
end
context 'with expiry' do
let(:options) { { expires_in: 456 } }
it 'sets the key with expiry' do
subject
with_redis do |redis|
expect(redis.get(namespaced_key)).to eq('123')
expect(redis.ttl(namespaced_key)).to be_within(10).of(456)
end
end
end
end
describe '#delete' do
subject { store.delete(key) }
it { expect(subject).to eq(0) }
context 'when the key exists' do
before do
with_redis { |r| r.set(namespaced_key, '123') }
end
it { expect(subject).to eq(1) }
end
end
describe '#with' do
subject { store.send(:with, &:ping) }
it { expect(subject).to eq('PONG') }
context 'when redis is unavailable' do
before do
broken_redis = Redis.new(
url: 'redis://127.0.0.0:0',
instrumentation_class: Gitlab::Redis::RateLimiting.instrumentation_class
)
allow(Gitlab::Redis::RateLimiting).to receive(:with).and_yield(broken_redis)
end
it { expect(subject).to eq(nil) }
end
end
end

View File

@ -320,4 +320,18 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
end
end
describe '.cache_store' do
subject { described_class.cache_store }
it { expect(subject).to be_a(Gitlab::RackAttack::InstrumentedCacheStore) }
context 'when GITLAB_RACK_ATTACK_NEW_STORE is set' do
before do
stub_env('GITLAB_RACK_ATTACK_NEW_STORE', '1')
end
it { expect(subject).to be_a(Gitlab::RackAttack::Store) }
end
end
end

View File

@ -51,7 +51,7 @@ RSpec.describe ImportExportUpload do
let(:after_commit_callbacks) { described_class._commit_callbacks.select { |cb| cb.kind == :after } }
def find_callback(callbacks, key)
callbacks.find { |cb| cb.instance_variable_get(:@key) == key }
callbacks.find { |cb| cb.filter == key }
end
it 'export file is stored in after_commit callback' do

View File

@ -0,0 +1,61 @@
# frozen_string_literal: true
require 'rake_helper'
RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :pods do
let(:max_connections) { 500 }
let(:current_connections) { 300 }
subject { run_rake_task('gitlab:db:decomposition:connection_status') }
before :all do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/connection_status'
end
before do
allow(ApplicationRecord.connection).to receive(:select_one).with(any_args).and_return(
{ "active" => current_connections, "max" => max_connections }
)
end
context 'when separate ci database is not configured' do
before do
skip_if_multiple_databases_are_setup
end
context "when PostgreSQL max_connections is too low" do
it 'suggests to increase it' do
expect { subject }.to output(
"Currently using #{current_connections} connections out of #{max_connections} max_connections,\n" \
"which may run out when you switch to two database connections.\n\n" \
"Consider increasing PostgreSQL 'max_connections' setting.\n" \
"Depending on the installation method, there are different ways to\n" \
"increase that setting. Please consult the GitLab documentation.\n"
).to_stdout
end
end
context "when PostgreSQL max_connections is high enough" do
let(:max_connections) { 1000 }
it 'only shows current status' do
expect { subject }.to output(
"Currently using #{current_connections} connections out of #{max_connections} max_connections,\n" \
"which is enough for running GitLab using two database connections.\n"
).to_stdout
end
end
end
context 'when separate ci database is configured' do
before do
skip_if_multiple_databases_not_setup
end
it "does not show connection information" do
expect { subject }.to output(
"GitLab database already running on two connections\n"
).to_stdout
end
end
end