Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-08-03 12:09:25 +00:00
parent 3d8459c18b
commit aeee636c18
102 changed files with 2093 additions and 582 deletions

View File

@ -2,6 +2,10 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 16.2.3 (2023-08-03)
No changes.
## 16.2.2 (2023-08-01)
### Added (1 change)

View File

@ -404,7 +404,7 @@ group :development, :test do
gem 'parser', '~> 3.2', '>= 3.2.2.3'
gem 'pry-byebug'
gem 'pry-rails', '~> 0.3.9'
gem 'pry-shell', '~> 0.6.3'
gem 'pry-shell', '~> 0.6.1'
gem 'awesome_print', require: false

View File

@ -461,7 +461,7 @@
{"name":"pry","version":"0.14.2","platform":"ruby","checksum":"c4fe54efedaca1d351280b45b8849af363184696fcac1c72e0415f9bdac4334d"},
{"name":"pry-byebug","version":"3.10.1","platform":"ruby","checksum":"c8f975c32255bfdb29e151f5532130be64ff3d0042dc858d0907e849125581f8"},
{"name":"pry-rails","version":"0.3.9","platform":"ruby","checksum":"468662575abb6b67f4a9831219f99290d5eae7bf186e64dd810d0a3e4a8cc4b1"},
{"name":"pry-shell","version":"0.6.3","platform":"ruby","checksum":"17b9cdf0e318ab50dc12698da3e1b8f532518cbceb0353c42a9ce2dd066676c2"},
{"name":"pry-shell","version":"0.6.1","platform":"ruby","checksum":"a99a6b3dffe4df274ea1751866816906861a23851f13346e10a8e8f61b53360c"},
{"name":"public_suffix","version":"5.0.0","platform":"ruby","checksum":"26ee4fbce33ada25eb117ac71f2c24bf4d8b3414ab6b34f05b4708a3e90f1c6b"},
{"name":"puma","version":"6.3.0","platform":"java","checksum":"5e2ff95953608d1ba0350b80a3961a43e9bbb78ec60ebd5e4db1940c2921d5d8"},
{"name":"puma","version":"6.3.0","platform":"ruby","checksum":"b0e35b4fe7ae440237a9ff1647c6bb252a1c0951ff356020670d2e62c1aeeeec"},

View File

@ -1217,7 +1217,7 @@ GEM
pry (>= 0.13, < 0.15)
pry-rails (0.3.9)
pry (>= 0.10.4)
pry-shell (0.6.3)
pry-shell (0.6.1)
pry (>= 0.13.0)
tty-markdown
tty-prompt
@ -1941,7 +1941,7 @@ DEPENDENCIES
prometheus-client-mmap (~> 0.27)
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.3)
pry-shell (~> 0.6.1)
puma (~> 6.3)
rack (~> 2.2.7)
rack-attack (~> 6.6.1)

View File

@ -10,9 +10,10 @@ import {
import { mapActions, mapGetters, mapState } from 'vuex';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
import { __ } from '~/locale';
import { __, s__ } from '~/locale';
import { createListMutations, listsQuery, BoardType, ListType } from 'ee_else_ce/boards/constants';
import boardLabelsQuery from '../graphql/board_labels.query.graphql';
import { setError } from '../graphql/cache_updates';
import { getListByTypeId } from '../boards_util';
export default {
@ -70,6 +71,12 @@ export default {
skip() {
return !this.isApolloBoard;
},
error(error) {
setError({
error,
message: s__('Boards|An error occurred while fetching labels. Please try again.'),
});
},
},
},
computed: {
@ -102,36 +109,43 @@ export default {
},
methods: {
...mapActions(['createList', 'fetchLabels', 'highlightList']),
createListApollo({ labelId }) {
return this.$apollo.mutate({
mutation: createListMutations[this.issuableType].mutation,
variables: {
labelId,
boardId: this.boardId,
},
update: (
store,
{
data: {
boardListCreate: { list },
},
async createListApollo({ labelId }) {
try {
await this.$apollo.mutate({
mutation: createListMutations[this.issuableType].mutation,
variables: {
labelId,
boardId: this.boardId,
},
) => {
const sourceData = store.readQuery({
query: listsQuery[this.issuableType].query,
variables: this.listQueryVariables,
});
const data = produce(sourceData, (draftData) => {
draftData[this.boardType].board.lists.nodes.push(list);
});
store.writeQuery({
query: listsQuery[this.issuableType].query,
variables: this.listQueryVariables,
data,
});
this.$emit('highlight-list', list.id);
},
});
update: (
store,
{
data: {
boardListCreate: { list },
},
},
) => {
const sourceData = store.readQuery({
query: listsQuery[this.issuableType].query,
variables: this.listQueryVariables,
});
const data = produce(sourceData, (draftData) => {
draftData[this.boardType].board.lists.nodes.push(list);
});
store.writeQuery({
query: listsQuery[this.issuableType].query,
variables: this.listQueryVariables,
data,
});
this.$emit('highlight-list', list.id);
},
});
} catch (error) {
setError({
error,
message: s__('Boards|An error occurred while creating the list. Please try again.'),
});
}
},
addList() {
if (!this.selectedLabel) {

View File

@ -10,6 +10,7 @@ import { listsQuery } from 'ee_else_ce/boards/constants';
import { formatBoardLists } from 'ee_else_ce/boards/boards_util';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import errorQuery from '../graphql/client/error.query.graphql';
import { setError } from '../graphql/cache_updates';
export default {
i18n: {
@ -34,12 +35,12 @@ export default {
],
data() {
return {
boardListsApollo: {},
activeListId: '',
boardId: this.initialBoardId,
filterParams: { ...this.initialFilterParams },
addColumnFormVisible: false,
isShowingEpicsSwimlanes: Boolean(queryToObject(window.location.search).group_by),
apolloError: null,
error: null,
};
},
@ -74,8 +75,11 @@ export default {
const { lists } = data[this.boardType].board;
return formatBoardLists(lists);
},
error() {
this.apolloError = this.$options.i18n.fetchError;
error(error) {
setError({
error,
message: this.$options.i18n.fetchError,
});
},
},
error: {
@ -151,13 +155,12 @@ export default {
@toggleSwimlanes="isShowingEpicsSwimlanes = $event"
/>
<board-content
v-if="!isApolloBoard || boardListsApollo"
:board-id="boardId"
:add-column-form-visible="addColumnFormVisible"
:is-swimlanes-on="isSwimlanesOn"
:filter-params="filterParams"
:board-lists-apollo="boardListsApollo"
:apollo-error="apolloError || error"
:apollo-error="error"
:list-query-variables="listQueryVariables"
@setActiveList="setActiveId"
@setAddColumnFormVisibility="addColumnFormVisible = $event"

View File

@ -5,6 +5,7 @@ import produce from 'immer';
import Draggable from 'vuedraggable';
import { mapState, mapActions } from 'vuex';
import BoardAddNewColumn from 'ee_else_ce/boards/components/board_add_new_column.vue';
import { s__ } from '~/locale';
import { defaultSortableOptions } from '~/sortable/constants';
import {
DraggableItemTypes,
@ -13,6 +14,7 @@ import {
updateListQueries,
} from 'ee_else_ce/boards/constants';
import { calculateNewPosition } from 'ee_else_ce/boards/boards_util';
import { setError } from '../graphql/cache_updates';
import BoardColumn from './board_column.vue';
export default {
@ -122,7 +124,14 @@ export default {
this.highlightedLists = this.highlightedLists.filter((id) => id !== listId);
}, flashAnimationDuration);
},
updateListPosition({
dismissError() {
if (this.isApolloBoard) {
setError({ message: null, captureError: false });
} else {
this.unsetError();
}
},
async updateListPosition({
item: {
dataset: { listId: movedListId, draggableItemType },
},
@ -153,7 +162,7 @@ export default {
const targetPosition = this.boardListsById[displacedListId].position;
try {
this.$apollo.mutate({
await this.$apollo.mutate({
mutation: updateListQueries[this.issuableType].mutation,
variables: {
listId: movedListId,
@ -195,8 +204,11 @@ export default {
},
},
});
} catch {
// handle error
} catch (error) {
setError({
error,
message: s__('Boards|An error occurred while moving the list. Please try again.'),
});
}
},
},
@ -209,7 +221,7 @@ export default {
data-qa-selector="boards_list"
class="gl-flex-grow-1 gl-display-flex gl-flex-direction-column gl-min-h-0"
>
<gl-alert v-if="errorToDisplay" variant="danger" :dismissible="true" @dismiss="unsetError">
<gl-alert v-if="errorToDisplay" variant="danger" :dismissible="true" @dismiss="dismissError">
{{ errorToDisplay }}
</gl-alert>
<component

View File

@ -5,7 +5,7 @@ import { mapState, mapActions, mapGetters } from 'vuex';
import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import setActiveBoardItemMutation from 'ee_else_ce/boards/graphql/client/set_active_board_item.mutation.graphql';
import { __, sprintf } from '~/locale';
import { __, s__, sprintf } from '~/locale';
import BoardSidebarTimeTracker from '~/boards/components/sidebar/board_sidebar_time_tracker.vue';
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
import { INCIDENT } from '~/boards/constants';
@ -18,6 +18,7 @@ import SidebarSeverityWidget from '~/sidebar/components/severity/sidebar_severit
import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue';
import SidebarLabelsWidget from '~/sidebar/components/labels/labels_select_widget/labels_select_root.vue';
import { setError } from '../graphql/cache_updates';
export default {
components: {
@ -94,6 +95,12 @@ export default {
skip() {
return !this.isApolloBoard;
},
error(error) {
setError({
error,
message: s__('Boards|An error occurred while selecting the card. Please try again.'),
});
},
},
},
computed: {

View File

@ -3,7 +3,7 @@ import { GlLoadingIcon, GlIntersectionObserver } from '@gitlab/ui';
import Draggable from 'vuedraggable';
import { mapActions, mapState } from 'vuex';
import { STATUS_CLOSED } from '~/issues/constants';
import { sprintf, __ } from '~/locale';
import { sprintf, __, s__ } from '~/locale';
import { defaultSortableOptions } from '~/sortable/constants';
import { sortableStart, sortableEnd } from '~/sortable/utils';
import Tracking from '~/tracking';
@ -122,6 +122,12 @@ export default {
context: {
isSingleRequest: true,
},
error(error) {
setError({
error,
message: s__('Boards|An error occurred while fetching a list. Please try again.'),
});
},
},
toList: {
query() {
@ -142,8 +148,16 @@ export default {
context: {
isSingleRequest: true,
},
error() {
// handle error
error(error) {
setError({
error,
message: sprintf(
s__('Boards|An error occurred while moving the %{issuableType}. Please try again.'),
{
issuableType: this.isEpicBoard ? 'epic' : 'issue',
},
),
});
},
},
},
@ -442,8 +456,16 @@ export default {
},
},
});
} catch {
// handle error
} catch (error) {
setError({
error,
message: sprintf(
s__('Boards|An error occurred while moving the %{issuableType}. Please try again.'),
{
issuableType: this.isEpicBoard ? 'epic' : 'issue',
},
),
});
}
},
updateCacheAfterMovingItem({ issuableMoveList, fromListId, toListId, newIndex, cache }) {
@ -494,52 +516,64 @@ export default {
});
}
},
moveToPosition(positionInList, oldIndex, item) {
this.$apollo.mutate({
mutation: listIssuablesQueries[this.issuableType].moveMutation,
variables: {
...moveItemVariables({
iid: item.iid,
epicId: item.id,
fromListId: this.currentList.id,
toListId: this.currentList.id,
isIssue: !this.isEpicBoard,
boardId: this.boardId,
itemToMove: item,
}),
positionInList,
withColor: this.isEpicBoard && this.glFeatures.epicColorHighlight,
},
optimisticResponse: {
issuableMoveList: {
issuable: item,
errors: [],
async moveToPosition(positionInList, oldIndex, item) {
try {
await this.$apollo.mutate({
mutation: listIssuablesQueries[this.issuableType].moveMutation,
variables: {
...moveItemVariables({
iid: item.iid,
epicId: item.id,
fromListId: this.currentList.id,
toListId: this.currentList.id,
isIssue: !this.isEpicBoard,
boardId: this.boardId,
itemToMove: item,
}),
positionInList,
withColor: this.isEpicBoard && this.glFeatures.epicColorHighlight,
},
},
update: (cache, { data: { issuableMoveList } }) => {
const { issuable } = issuableMoveList;
removeItemFromList({
query: listIssuablesQueries[this.issuableType].query,
variables: { ...this.listQueryVariables, id: this.currentList.id },
boardType: this.boardType,
id: issuable.id,
issuableType: this.issuableType,
cache,
});
if (positionInList === 0 || this.listItemsCount <= this.boardListItems.length) {
const newIndex = positionInList === 0 ? 0 : this.boardListItems.length - 1;
addItemToList({
optimisticResponse: {
issuableMoveList: {
issuable: item,
errors: [],
},
},
update: (cache, { data: { issuableMoveList } }) => {
const { issuable } = issuableMoveList;
removeItemFromList({
query: listIssuablesQueries[this.issuableType].query,
variables: { ...this.listQueryVariables, id: this.currentList.id },
issuable,
newIndex,
boardType: this.boardType,
id: issuable.id,
issuableType: this.issuableType,
cache,
});
}
},
});
if (positionInList === 0 || this.listItemsCount <= this.boardListItems.length) {
const newIndex = positionInList === 0 ? 0 : this.boardListItems.length - 1;
addItemToList({
query: listIssuablesQueries[this.issuableType].query,
variables: { ...this.listQueryVariables, id: this.currentList.id },
issuable,
newIndex,
boardType: this.boardType,
issuableType: this.issuableType,
cache,
});
}
},
});
} catch (error) {
setError({
error,
message: sprintf(
s__('Boards|An error occurred while moving the %{issuableType}. Please try again.'),
{
issuableType: this.isEpicBoard ? 'epic' : 'issue',
},
),
});
}
},
async addListItem(input) {
this.toggleForm();
@ -583,7 +617,7 @@ export default {
} catch (error) {
setError({
message: sprintf(
__('An error occurred while creating the %{issuableType}. Please try again.'),
s__('Boards|An error occurred while creating the %{issuableType}. Please try again.'),
{
issuableType: this.isEpicBoard ? 'epic' : 'issue',
},

View File

@ -30,6 +30,7 @@ import {
toggleCollapsedMutations,
} from 'ee_else_ce/boards/constants';
import eventHub from '../eventhub';
import { setError } from '../graphql/cache_updates';
import ItemCount from './item_count.vue';
export default {
@ -39,6 +40,9 @@ export default {
listSettings: s__('Boards|Edit list settings'),
expand: s__('Boards|Expand'),
collapse: s__('Boards|Collapse'),
fetchError: s__(
"Boards|An error occurred while fetching list's information. Please try again.",
),
},
components: {
GlButton,
@ -206,6 +210,12 @@ export default {
context: {
isSingleRequest: true,
},
error(error) {
setError({
error,
message: this.$options.i18n.fetchError,
});
},
},
},
created() {
@ -293,8 +303,11 @@ export default {
},
},
});
} catch {
this.$emit('error');
} catch (error) {
setError({
error,
message: s__('Boards|An error occurred while updating the list. Please try again.'),
});
}
} else {
this.updateList({ listId: this.list.id, collapsed });

View File

@ -11,10 +11,11 @@ import {
deleteListQueries,
} from 'ee_else_ce/boards/constants';
import { isScopedLabel } from '~/lib/utils/common_utils';
import { __ } from '~/locale';
import { __, s__ } from '~/locale';
import eventHub from '~/sidebar/event_hub';
import Tracking from '~/tracking';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { setError } from '../graphql/cache_updates';
export default {
listSettingsText: __('List settings'),
@ -131,23 +132,30 @@ export default {
}
},
async deleteList(listId) {
await this.$apollo.mutate({
mutation: deleteListQueries[this.issuableType].mutation,
variables: {
listId,
},
update: (store) => {
store.updateQuery(
{ query: listsQuery[this.issuableType].query, variables: this.queryVariables },
(sourceData) =>
produce(sourceData, (draftData) => {
draftData[this.boardType].board.lists.nodes = draftData[
this.boardType
].board.lists.nodes.filter((list) => list.id !== listId);
}),
);
},
});
try {
await this.$apollo.mutate({
mutation: deleteListQueries[this.issuableType].mutation,
variables: {
listId,
},
update: (store) => {
store.updateQuery(
{ query: listsQuery[this.issuableType].query, variables: this.queryVariables },
(sourceData) =>
produce(sourceData, (draftData) => {
draftData[this.boardType].board.lists.nodes = draftData[
this.boardType
].board.lists.nodes.filter((list) => list.id !== listId);
}),
);
},
});
} catch (error) {
setError({
error,
message: s__('Boards|An error occurred while deleting the list. Please try again.'),
});
}
},
},
};

View File

@ -1,8 +1,10 @@
<script>
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
import { s__ } from '~/locale';
import BoardsSelector from 'ee_else_ce/boards/components/boards_selector.vue';
import IssueBoardFilteredSearch from 'ee_else_ce/boards/components/issue_board_filtered_search.vue';
import { getBoardQuery } from 'ee_else_ce/boards/boards_util';
import { setError } from '../graphql/cache_updates';
import ConfigToggle from './config_toggle.vue';
import NewBoardButton from './new_board_button.vue';
import ToggleFocus from './toggle_focus.vue';
@ -70,6 +72,12 @@ export default {
labels: board.labels?.nodes,
};
},
error(error) {
setError({
error,
message: s__('Boards|An error occurred while fetching board details. Please try again.'),
});
},
},
},
computed: {

View File

@ -24,12 +24,16 @@ import groupBoardsQuery from '../graphql/group_boards.query.graphql';
import projectBoardsQuery from '../graphql/project_boards.query.graphql';
import groupRecentBoardsQuery from '../graphql/group_recent_boards.query.graphql';
import projectRecentBoardsQuery from '../graphql/project_recent_boards.query.graphql';
import { setError } from '../graphql/cache_updates';
import { fullBoardId } from '../boards_util';
const MIN_BOARDS_TO_VIEW_RECENT = 10;
export default {
name: 'BoardsSelector',
i18n: {
fetchBoardsError: s__('Boards|An error occurred while fetching boards. Please try again.'),
},
components: {
BoardForm,
GlLoadingIcon,
@ -143,7 +147,7 @@ export default {
eventHub.$off('showBoardModal', this.showPage);
},
methods: {
...mapActions(['setError', 'fetchBoard', 'unsetActiveId']),
...mapActions(['fetchBoard', 'unsetActiveId']),
fullBoardId(boardId) {
return fullBoardId(boardId);
},
@ -179,6 +183,12 @@ export default {
watchLoading: (isLoading) => {
this.loadingBoards = isLoading;
},
error(error) {
setError({
error,
message: this.$options.i18n.fetchBoardsError,
});
},
});
this.loadRecentBoards();
@ -193,6 +203,14 @@ export default {
watchLoading: (isLoading) => {
this.loadingRecentBoards = isLoading;
},
error(error) {
setError({
error,
message: s__(
'Boards|An error occurred while fetching recent boards. Please try again.',
),
});
},
});
},
addBoard(board) {
@ -267,9 +285,6 @@ export default {
}
},
},
i18n: {
errorFetchingBoard: s__('Board|An error occurred while fetching the board, please try again.'),
},
};
</script>

View File

@ -326,6 +326,7 @@ export default {
</div>
<div
:title="$options.coverageStateLeft(props).text"
:data-tooltip-custom-class="$options.coverageStateLeft(props).class"
:class="[
$options.parallelViewLeftLineType(props),
$options.coverageStateLeft(props).class,
@ -466,6 +467,7 @@ export default {
</div>
<div
:title="$options.coverageStateRight(props).text"
:data-tooltip-custom-class="$options.coverageStateRight(props).class"
:class="[
props.line.right.type,
$options.coverageStateRight(props).class,

View File

@ -1,7 +1,7 @@
<!-- eslint-disable vue/multi-word-component-names -->
<script>
import { getLocationHash } from '~/lib/utils/url_utility';
import { linkRegex } from '../../utils';
import LineNumber from './line_number.vue';
export default {
@ -64,10 +64,19 @@ export default {
});
}
if (window.location.hash) {
const hash = getLocationHash();
const lineToMatch = `L${line.lineNumber + 1}`;
if (hash === lineToMatch) {
applyHighlight = true;
}
}
return h(
'div',
{
class: ['js-line', 'log-line', applyHighlight ? 'gl-bg-gray-500' : ''],
class: ['js-line', 'log-line', applyHighlight ? 'gl-bg-gray-700' : ''],
},
[
h(LineNumber, {

View File

@ -1,5 +1,6 @@
<script>
import { GlIcon } from '@gitlab/ui';
import { getLocationHash } from '~/lib/utils/url_utility';
import DurationBadge from './duration_badge.vue';
import LineNumber from './line_number.vue';
@ -32,6 +33,12 @@ export default {
iconName() {
return this.isClosed ? 'chevron-lg-right' : 'chevron-lg-down';
},
applyHighlight() {
const hash = getLocationHash();
const lineToMatch = `L${this.line.lineNumber + 1}`;
return hash === lineToMatch;
},
},
methods: {
handleOnClick() {
@ -44,6 +51,7 @@ export default {
<template>
<div
class="log-line collapsible-line d-flex justify-content-between ws-normal gl-align-items-flex-start"
:class="{ 'gl-bg-gray-700': applyHighlight }"
role="button"
@click="handleOnClick"
>

View File

@ -83,6 +83,11 @@ export default {
header: s__('PerformanceBar|Zoekt calls'),
keys: ['request', 'body'],
},
{
metric: 'ch',
header: s__('PerformanceBar|ClickHouse queries'),
keys: ['sql', 'database', 'statistics'],
},
{
metric: 'external-http',
title: 'external',

View File

@ -76,6 +76,11 @@ export default {
type: Boolean,
required: true,
},
isUsingLfs: {
type: Boolean,
required: false,
default: false,
},
},
computed: {
replaceModalTitle() {
@ -148,6 +153,7 @@ export default {
:can-push-code="canPushCode"
:can-push-to-branch="canPushToBranch"
:empty-repo="emptyRepo"
:is-using-lfs="isUsingLfs"
/>
</div>
</template>

View File

@ -233,7 +233,9 @@ export default {
const { createMergeRequestIn, forkProject } = this.userPermissions;
const { canModifyBlob } = this.blobInfo;
return this.isLoggedIn && !canModifyBlob && createMergeRequestIn && forkProject;
return (
this.isLoggedIn && !this.isUsingLfs && !canModifyBlob && createMergeRequestIn && forkProject
);
},
forkPath() {
const forkPaths = {
@ -406,6 +408,7 @@ export default {
:is-locked="Boolean(pathLockedByUser)"
:can-lock="canLock"
:show-fork-suggestion="showForkSuggestion"
:is-using-lfs="isUsingLfs"
@fork="setForkTarget('view')"
/>
</template>

View File

@ -1,4 +1,6 @@
const viewers = {
import { TEXT_FILE_TYPE, JSON_LANGUAGE } from '../../constants';
export const viewers = {
csv: () => import('./csv_viewer.vue'),
download: () => import('./download_viewer.vue'),
image: () => import('./image_viewer.vue'),
@ -18,7 +20,7 @@ const viewers = {
export const loadViewer = (type, isUsingLfs, hljsWorkerEnabled, language) => {
let viewer = viewers[type];
if (hljsWorkerEnabled && language === 'json') {
if (hljsWorkerEnabled && language === JSON_LANGUAGE && type === TEXT_FILE_TYPE) {
// The New Source Viewer currently only supports JSON files.
// More language support will be added in: https://gitlab.com/gitlab-org/gitlab/-/issues/415753
viewer = () => import('~/vue_shared/components/source_viewer/source_viewer_new.vue');

View File

@ -1,8 +1,18 @@
<script>
import { GlModal, GlFormGroup, GlFormInput, GlFormTextarea, GlToggle, GlForm } from '@gitlab/ui';
import {
GlModal,
GlFormGroup,
GlFormInput,
GlFormTextarea,
GlToggle,
GlForm,
GlSprintf,
GlLink,
} from '@gitlab/ui';
import csrf from '~/lib/utils/csrf';
import { __ } from '~/locale';
import { __, s__ } from '~/locale';
import validation from '~/vue_shared/directives/validation';
import { helpPagePath } from '~/helpers/help_page_helper';
import {
SECONDARY_OPTIONS_TEXT,
COMMIT_LABEL,
@ -28,8 +38,19 @@ export default {
GlFormTextarea,
GlToggle,
GlForm,
GlSprintf,
GlLink,
},
i18n: {
LFS_WARNING_TITLE: __("The file you're about to delete is tracked by LFS"),
LFS_WARNING_PRIMARY_CONTENT: s__(
'BlobViewer|If you delete the file, it will be removed from the branch %{branch}.',
),
LFS_WARNING_SECONDARY_CONTENT: s__(
'BlobViewer|This file will still take up space in your LFS storage. %{linkStart}How do I remove tracked objects from Git LFS?%{linkEnd}',
),
LFS_CONTINUE_TEXT: __('Continue…'),
LFS_CANCEL_TEXT: __('Cancel'),
PRIMARY_OPTIONS_TEXT: __('Delete file'),
SECONDARY_OPTIONS_TEXT,
COMMIT_LABEL,
@ -79,6 +100,11 @@ export default {
type: Boolean,
required: true,
},
isUsingLfs: {
type: Boolean,
required: false,
default: false,
},
},
data() {
const form = {
@ -91,6 +117,7 @@ export default {
},
};
return {
lfsWarningDismissed: false,
loading: false,
createNewMr: true,
error: '',
@ -99,7 +126,7 @@ export default {
},
computed: {
primaryOptions() {
return {
const defaultOptions = {
text: this.$options.i18n.PRIMARY_OPTIONS_TEXT,
attributes: {
variant: 'danger',
@ -107,6 +134,13 @@ export default {
disabled: this.loading || !this.form.state,
},
};
const lfsWarningOptions = {
text: this.$options.i18n.LFS_CONTINUE_TEXT,
attributes: { variant: 'confirm' },
};
return this.showLfsWarning ? lfsWarningOptions : defaultOptions;
},
cancelOptions() {
return {
@ -139,14 +173,39 @@ export default {
(hasFirstLineExceedMaxLength || hasOtherLineExceedMaxLength)
);
},
/* eslint-enable dot-notation */
showLfsWarning() {
return this.isUsingLfs && !this.lfsWarningDismissed;
},
title() {
return this.showLfsWarning ? this.$options.i18n.LFS_WARNING_TITLE : this.modalTitle;
},
showDeleteForm() {
return !this.isUsingLfs || (this.isUsingLfs && this.lfsWarningDismissed);
},
},
methods: {
show() {
this.$refs[this.modalId].show();
this.lfsWarningDismissed = false;
},
submitForm(e) {
cancel() {
this.$refs[this.modalId].hide();
},
async handleContinueLfsWarning() {
this.lfsWarningDismissed = true;
await this.$nextTick();
this.$refs.message?.$el.focus();
},
async handlePrimaryAction(e) {
e.preventDefault(); // Prevent modal from closing
if (this.showLfsWarning) {
this.lfsWarningDismissed = true;
await this.$nextTick();
this.$refs.message?.$el.focus();
return;
}
this.form.showValidation = true;
if (!this.form.state) {
@ -158,6 +217,7 @@ export default {
this.$refs.form.$el.submit();
},
},
deleteLfsHelpPath: helpPagePath('topics/git/lfs/index', { anchor: 'removing-objects-from-lfs' }),
};
</script>
@ -167,65 +227,85 @@ export default {
v-bind="$attrs"
data-testid="modal-delete"
:modal-id="modalId"
:title="modalTitle"
:title="title"
:action-primary="primaryOptions"
:action-cancel="cancelOptions"
@primary="submitForm"
@primary="handlePrimaryAction"
>
<gl-form ref="form" novalidate :action="deletePath" method="post">
<input type="hidden" name="_method" value="delete" />
<input :value="$options.csrf.token" type="hidden" name="authenticity_token" />
<template v-if="emptyRepo">
<input type="hidden" name="branch_name" :value="originalBranch" class="js-branch-name" />
</template>
<template v-else>
<input type="hidden" name="original_branch" :value="originalBranch" />
<input
v-if="createNewMr || !canPushToBranch"
type="hidden"
name="create_merge_request"
value="1"
/>
<gl-form-group
:label="$options.i18n.COMMIT_LABEL"
label-for="commit_message"
:invalid-feedback="form.fields['commit_message'].feedback"
>
<gl-form-textarea
v-model="form.fields['commit_message'].value"
v-validation:[form.showValidation]
name="commit_message"
data-qa-selector="commit_message_field"
:state="form.fields['commit_message'].state"
:disabled="loading"
required
<div v-if="showLfsWarning">
<p>
<gl-sprintf :message="$options.i18n.LFS_WARNING_PRIMARY_CONTENT">
<template #branch>
<code>{{ targetBranch }}</code>
</template>
</gl-sprintf>
</p>
<p>
<gl-sprintf :message="$options.i18n.LFS_WARNING_SECONDARY_CONTENT">
<template #link="{ content }">
<gl-link :href="$options.deleteLfsHelpPath">{{ content }}</gl-link>
</template>
</gl-sprintf>
</p>
</div>
<div v-if="showDeleteForm">
<gl-form ref="form" novalidate :action="deletePath" method="post">
<input type="hidden" name="_method" value="delete" />
<input :value="$options.csrf.token" type="hidden" name="authenticity_token" />
<template v-if="emptyRepo">
<input type="hidden" name="branch_name" :value="originalBranch" class="js-branch-name" />
</template>
<template v-else>
<input type="hidden" name="original_branch" :value="originalBranch" />
<input
v-if="createNewMr || !canPushToBranch"
type="hidden"
name="create_merge_request"
value="1"
/>
<p v-if="showHint" class="form-text gl-text-gray-600" data-testid="hint">
{{ $options.i18n.COMMIT_MESSAGE_HINT }}
</p>
</gl-form-group>
<gl-form-group
v-if="canPushCode"
:label="$options.i18n.TARGET_BRANCH_LABEL"
label-for="branch_name"
:invalid-feedback="form.fields['branch_name'].feedback"
>
<gl-form-input
v-model="form.fields['branch_name'].value"
v-validation:[form.showValidation]
:state="form.fields['branch_name'].state"
<gl-form-group
:label="$options.i18n.COMMIT_LABEL"
label-for="commit_message"
:invalid-feedback="form.fields['commit_message'].feedback"
>
<gl-form-textarea
ref="message"
v-model="form.fields['commit_message'].value"
v-validation:[form.showValidation]
name="commit_message"
data-qa-selector="commit_message_field"
:state="form.fields['commit_message'].state"
:disabled="loading"
required
/>
<p v-if="showHint" class="form-text gl-text-gray-600" data-testid="hint">
{{ $options.i18n.COMMIT_MESSAGE_HINT }}
</p>
</gl-form-group>
<gl-form-group
v-if="canPushCode"
:label="$options.i18n.TARGET_BRANCH_LABEL"
label-for="branch_name"
:invalid-feedback="form.fields['branch_name'].feedback"
>
<gl-form-input
v-model="form.fields['branch_name'].value"
v-validation:[form.showValidation]
:state="form.fields['branch_name'].state"
:disabled="loading"
name="branch_name"
required
/>
</gl-form-group>
<gl-toggle
v-if="showCreateNewMrToggle"
v-model="createNewMr"
:disabled="loading"
name="branch_name"
required
:label="$options.i18n.TOGGLE_CREATE_MR_LABEL"
/>
</gl-form-group>
<gl-toggle
v-if="showCreateNewMrToggle"
v-model="createNewMr"
:disabled="loading"
:label="$options.i18n.TOGGLE_CREATE_MR_LABEL"
/>
</template>
</gl-form>
</template>
</gl-form>
</div>
</gl-modal>
</template>

View File

@ -83,6 +83,8 @@ export const DEFAULT_BLOB_INFO = {
},
};
export const JSON_LANGUAGE = 'json';
export const OPENAPI_FILE_TYPE = 'openapi';
export const TEXT_FILE_TYPE = 'text';
export const LFS_STORAGE = 'lfs';

View File

@ -8,9 +8,14 @@ const getTooltipTitle = (element) => {
return element.getAttribute('title') || element.dataset.title;
};
const getTooltipCustomClass = (element) => {
return element.dataset.tooltipCustomClass;
};
const newTooltip = (element, config = {}) => {
const { placement, container, boundary, html, triggers } = element.dataset;
const title = getTooltipTitle(element);
const customClass = getTooltipCustomClass(element);
return {
id: uniqueId('gl-tooltip'),
@ -22,6 +27,7 @@ const newTooltip = (element, config = {}) => {
boundary,
triggers,
disabled: !title,
customClass,
...config,
};
};
@ -116,6 +122,7 @@ export default {
:boundary="tooltip.boundary"
:disabled="tooltip.disabled"
:show="tooltip.show"
:custom-class="tooltip.customClass"
@hidden="$emit('hidden', tooltip)"
>
<span v-if="tooltip.html" v-safe-html:[$options.safeHtmlConfig]="tooltip.title"></span>

View File

@ -1,7 +1,7 @@
<script>
import { GlDropdown, GlDropdownSectionHeader, GlDropdownItem, GlDropdownDivider } from '@gitlab/ui';
import { GlDisclosureDropdown } from '@gitlab/ui';
import { s__ } from '~/locale';
import { s__, __ } from '~/locale';
const objectiveActionItems = [
{
@ -29,10 +29,30 @@ export default {
keyResultActionItems,
objectiveActionItems,
components: {
GlDropdown,
GlDropdownSectionHeader,
GlDropdownItem,
GlDropdownDivider,
GlDisclosureDropdown,
},
computed: {
objectiveDropdownItems() {
return {
name: __('Objective'),
items: this.$options.objectiveActionItems.map((item) => ({
text: item.title,
action: () => this.change(item),
})),
};
},
keyResultDropdownItems() {
return {
name: __('Key result'),
items: this.$options.keyResultActionItems.map((item) => ({
text: item.title,
action: () => this.change(item),
})),
};
},
dropdownItems() {
return [this.objectiveDropdownItems, this.keyResultDropdownItems];
},
},
methods: {
change({ eventName }) {
@ -43,24 +63,10 @@ export default {
</script>
<template>
<gl-dropdown :text="__('Add')" size="small" right>
<gl-dropdown-section-header>{{ __('Objective') }}</gl-dropdown-section-header>
<gl-dropdown-item
v-for="item in $options.objectiveActionItems"
:key="item.eventName"
@click="change(item)"
>
{{ item.title }}
</gl-dropdown-item>
<gl-dropdown-divider />
<gl-dropdown-section-header>{{ __('Key result') }}</gl-dropdown-section-header>
<gl-dropdown-item
v-for="item in $options.keyResultActionItems"
:key="item.eventName"
@click="change(item)"
>
{{ item.title }}
</gl-dropdown-item>
</gl-dropdown>
<gl-disclosure-dropdown
:toggle-text="__('Add')"
size="small"
placement="right"
:items="dropdownItems"
/>
</template>

View File

@ -929,3 +929,13 @@ table.code {
border-bottom: 0;
}
}
.tooltip {
&.coverage {
left: -3px !important;
}
&.no-coverage {
left: -2px !important;
}
}

View File

@ -36,7 +36,9 @@ module WebHooks
# - and either:
# - disabled_until is nil (i.e. this was set by WebHook#fail!)
# - or disabled_until is in the future (i.e. this was set by WebHook#backoff!)
# - OR silent mode is enabled.
scope :disabled, -> do
return all if Gitlab::SilentMode.enabled?
return none unless auto_disabling_enabled?
where(
@ -52,7 +54,9 @@ module WebHooks
# - OR we have exceeded the grace period and neither of the following is true:
# - disabled_until is nil (i.e. this was set by WebHook#fail!)
# - disabled_until is in the future (i.e. this was set by WebHook#backoff!)
# - AND silent mode is not enabled.
scope :executable, -> do
return none if Gitlab::SilentMode.enabled?
return all unless auto_disabling_enabled?
where(

View File

@ -2467,7 +2467,7 @@ class Project < ApplicationRecord
break unless pages_enabled?
variables.append(key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host)
variables.append(key: 'CI_PAGES_URL', value: Gitlab::Pages::UrlBuilder.new(self).pages_url)
variables.append(key: 'CI_PAGES_URL', value: Gitlab::Pages::UrlBuilder.new(self).pages_url(with_unique_domain: true))
end
end

View File

@ -57,6 +57,11 @@ class WebHookService
end
def execute
if Gitlab::SilentMode.enabled?
log_silent_mode_enabled
return ServiceResponse.error(message: 'Silent mode enabled')
end
return ServiceResponse.error(message: 'Hook disabled') if disabled?
if recursion_blocked?
@ -98,6 +103,7 @@ class WebHookService
def async_execute
Gitlab::ApplicationContext.with_context(hook.application_context) do
break log_silent_mode_enabled if Gitlab::SilentMode.enabled?
break log_rate_limited if rate_limit!
break log_recursion_blocked if recursion_blocked?
@ -237,6 +243,10 @@ class WebHookService
)
end
def log_silent_mode_enabled
log_auth_error('GitLab is in silent mode')
end
def log_auth_error(message, params = {})
Gitlab::AuthLogger.error(
params.merge(

View File

@ -42,8 +42,8 @@
%td= _('Maximum snippet size')
%td= instance_configuration_human_size_cell(size_limits[:snippet_size_limit])
%tr
%td= s_('Import|Maximum import remote file size (MiB)')
%td= s_('Import|Maximum import remote file size (MB)')
%td= instance_configuration_human_size_cell(size_limits[:max_import_remote_file_size])
%tr
%td= s_('BulkImport|Direct Transfer maximum download file size (MiB)')
%td= s_('BulkImport|Direct transfer maximum download file size (MB)')
%td= instance_configuration_human_size_cell(size_limits[:max_import_remote_file_size])

View File

@ -15,16 +15,21 @@ module Users
return unless ::Gitlab::CurrentSettings.current_application_settings.deactivate_dormant_users
deactivate_users(User.dormant)
deactivate_users(User.with_no_activity)
admin_bot = User.admin_bot
return unless admin_bot
deactivate_users(User.dormant, admin_bot)
deactivate_users(User.with_no_activity, admin_bot)
end
private
def deactivate_users(scope)
def deactivate_users(scope, admin_bot)
with_context(caller_id: self.class.name.to_s) do
scope.each_batch do |batch|
batch.each(&:deactivate)
batch.each do |user|
Users::DeactivateService.new(admin_bot).execute(user)
end
end
end
end

View File

@ -26,6 +26,6 @@ ClickHouse::Client.configure do |config|
}
response = Gitlab::HTTP.post(url, options)
ClickHouse::Client::Response.new(response.body, response.code)
ClickHouse::Client::Response.new(response.body, response.code, response.headers)
end
end

View File

@ -18,6 +18,7 @@ Peek.into Peek::Views::Elasticsearch
Peek.into Peek::Views::Zoekt
Peek.into Peek::Views::Rugged
Peek.into Peek::Views::ExternalHttp
Peek.into Peek::Views::ClickHouse
Peek.into Peek::Views::BulletDetailed if defined?(Bullet)
Peek.into Peek::Views::Memory

View File

@ -0,0 +1,23 @@
---
key_path: counts_weekly.batched_background_migration_failed_jobs_metric
description: Count the number of failed jobs per batched background migration
product_section: enablement
product_stage: data_stores
product_group: database
value_type: number
status: active
milestone: "16.3"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/126165
time_frame: 7d
data_source: database
data_category: optional
instrumentation_class: BatchedBackgroundMigrationFailedJobsMetric
performance_indicator_type: []
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
value_json_schema: "config/metrics/objects_schemas/batched_background_migration_failed_jobs_metric.json"

View File

@ -0,0 +1,21 @@
{
"type": "array",
"items": {
"type": [
{
"type": "object",
"properties": {
"job_class_name": {
"type": "string"
},
"table_name": {
"type": "string"
},
"failed_jobs": {
"type": "integer"
}
}
}
]
}
}

View File

@ -596,9 +596,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
# Incoming recommended value for max connections is 500. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
patroni['postgresql']['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false
@ -726,8 +723,6 @@ The following IPs will be used as an example:
password: '<pgbouncer_password_hash>'
}
}
# Incoming recommended value for max db connections is 150. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
pgbouncer['max_db_connections'] = 150
# Configure Consul agent
consul['watchers'] = %w(postgresql)
@ -1222,7 +1217,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false

View File

@ -613,9 +613,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
# Incoming recommended value for max connections is 500. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
patroni['postgresql']['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false
@ -743,8 +740,6 @@ The following IPs will be used as an example:
password: '<pgbouncer_password_hash>'
}
}
# Incoming recommended value for max db connections is 150. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
pgbouncer['max_db_connections'] = 150
# Configure Consul agent
consul['watchers'] = %w(postgresql)
@ -1241,7 +1236,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false

View File

@ -883,9 +883,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
# Incoming recommended value for max connections is 500. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
patroni['postgresql']['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false
@ -1013,8 +1010,6 @@ The following IPs will be used as an example:
password: '<pgbouncer_password_hash>'
}
}
# Incoming recommended value for max db connections is 150. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
pgbouncer['max_db_connections'] = 150
# Configure Consul agent
consul['watchers'] = %w(postgresql)
@ -1169,7 +1164,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false

View File

@ -605,9 +605,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
# Incoming recommended value for max connections is 500. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
patroni['postgresql']['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false
@ -736,8 +733,6 @@ The following IPs will be used as an example:
password: '<pgbouncer_password_hash>'
}
}
# Incoming recommended value for max db connections is 150. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
pgbouncer['max_db_connections'] = 150
# Configure Consul agent
consul['watchers'] = %w(postgresql)
@ -1235,7 +1230,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false

View File

@ -876,9 +876,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
# Incoming recommended value for max connections is 500. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
patroni['postgresql']['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false
@ -1006,8 +1003,6 @@ The following IPs are used as an example:
password: '<pgbouncer_password_hash>'
}
}
# Incoming recommended value for max db connections is 150. See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5691.
pgbouncer['max_db_connections'] = 150
# Configure Consul agent
consul['watchers'] = %w(postgresql)
@ -1162,7 +1157,6 @@ in the second step, do not supply the `EXTERNAL_URL` value.
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['max_connections'] = 500
# Prevent database migrations from running on upgrade automatically
gitlab_rails['auto_migrate'] = false

View File

@ -67,7 +67,11 @@ Incoming emails still raise issues, but the users who sent the emails to [Servic
### Webhooks
[Project and group webhooks](../../user/project/integrations/webhooks.md), and [system hooks](../system_hooks.md) are suppressed. The relevant Sidekiq jobs fail 4 times and then disappear, while Silent Mode is enabled. [Issue 393639](https://gitlab.com/gitlab-org/gitlab/-/issues/393639) discusses preventing the Sidekiq jobs from running in the first place.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/393639) in GitLab 16.3.
[Project and group webhooks](../../user/project/integrations/webhooks.md) and [system hooks](../system_hooks.md) are suppressed.
In GitLab 16.2 and earlier, webhooks were triggered when Silent Mode was enabled, but the [webhook HTTP request was blocked](#outbound-http-requests).
Triggering webhook tests via the UI results in HTTP status 500 responses.

View File

@ -84,6 +84,8 @@ Example response:
"terms": "Hello world!",
"performance_bar_allowed_group_id": 42,
"user_show_add_ssh_key_message": true,
"allow_account_deletion": true,
"updating_name_disabled_for_users": false,
"local_markdown_version": 0,
"allow_local_requests_from_hooks_and_services": true,
"allow_local_requests_from_web_hooks_and_services": true,
@ -228,6 +230,7 @@ Example response:
"asset_proxy_enabled": true,
"asset_proxy_url": "https://assets.example.com",
"asset_proxy_allowlist": ["example.com", "*.example.com", "your-instance.com"],
"globally_allowed_ips": "",
"geo_node_allowed_ips": "0.0.0.0/0, ::/0",
"allow_local_requests_from_hooks_and_services": true,
"allow_local_requests_from_web_hooks_and_services": true,
@ -299,7 +302,10 @@ listed in the descriptions of the relevant settings.
| `admin_mode` | boolean | no | Require administrators to enable Admin Mode by re-authenticating for administrative tasks. |
| `admin_notification_email` | string | no | Deprecated: Use `abuse_notification_email` instead. If set, [abuse reports](../administration/review_abuse_reports.md) are sent to this address. Abuse reports are always available in the Admin Area. |
| `abuse_notification_email` | string | no | If set, [abuse reports](../administration/review_abuse_reports.md) are sent to this address. Abuse reports are always available in the Admin Area. |
| `notify_on_unknown_sign_in` | boolean | no | Enable sending notification if sign in from unknown IP address happens. |
| `after_sign_out_path` | string | no | Where to redirect users after logout. |
| `email_restrictions_enabled` | boolean | no | Enable restriction for sign-up by email. |
| `email_restrictions` | string | required by: `email_restrictions_enabled` | Regular expression that is checked against the email used during registration. |
| `after_sign_up_text` | string | no | Text shown to the user after signing up. |
| `akismet_api_key` | string | required by: `akismet_enabled` | API key for Akismet spam protection. |
| `akismet_enabled` | boolean | no | (**If enabled, requires:** `akismet_api_key`) Enable or disable Akismet spam protection. |
@ -331,6 +337,8 @@ listed in the descriptions of the relevant settings.
| `container_registry_expiration_policies_worker_capacity` | integer | no | Number of workers for [cleanup policies](../user/packages/container_registry/reduce_container_registry_storage.md#set-cleanup-limits-to-conserve-resources). |
| `container_registry_token_expire_delay` | integer | no | Container Registry token duration in minutes. |
| `package_registry_cleanup_policies_worker_capacity` | integer | no | Number of workers assigned to the packages cleanup policies. |
| `updating_name_disabled_for_users` | boolean | no | [Disable user profile name changes](../administration/settings/account_and_limit_settings.md#disable-user-profile-name-changes). |
| `allow_account_deletion` | boolean | no | Enable [users to delete their accounts](../administration/settings/account_and_limit_settings.md#prevent-users-from-deleting-their-accounts). |
| `deactivate_dormant_users` | boolean | no | Enable [automatic deactivation of dormant users](../administration/moderate_users.md#automatically-deactivate-dormant-users). |
| `deactivate_dormant_users_period` | integer | no | Length of time (in days) after which a user is considered dormant. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/336747) in GitLab 15.3. |
| `default_artifacts_expire_in` | string | no | Set the default expiration time for each job's artifacts. |
@ -392,6 +400,7 @@ listed in the descriptions of the relevant settings.
| `email_additional_text` **(PREMIUM)** | string | no | Additional text added to the bottom of every email for legal/auditing/compliance reasons. |
| `email_author_in_body` | boolean | no | Some email servers do not support overriding the email sender name. Enable this option to include the name of the author of the issue, merge request or comment in the email body instead. |
| `email_confirmation_setting` | string | no | Specifies whether users must confirm their email before sign in. Possible values are `off`, `soft`, and `hard`. |
| `custom_http_clone_url_root` | string | no | Set a custom Git clone URL for HTTP(S). |
| `enabled_git_access_protocol` | string | no | Enabled protocols for Git access. Allowed values are: `ssh`, `http`, and `nil` to allow both protocols. |
| `enforce_namespace_storage_limit` | boolean | no | Enabling this permits enforcement of namespace storage limits. |
| `enforce_terms` | boolean | no | (**If enabled, requires:** `terms`) Enforce application ToS to all users. |
@ -405,8 +414,11 @@ listed in the descriptions of the relevant settings.
| `external_pipeline_validation_service_url` | string | no | URL to use for pipeline validation requests. |
| `external_pipeline_validation_service_token` | string | no | Optional. Token to include as the `X-Gitlab-Token` header in requests to the URL in `external_pipeline_validation_service_url`. |
| `external_pipeline_validation_service_timeout` | integer | no | How long to wait for a response from the pipeline validation service. Assumes `OK` if it times out. |
| `static_objects_external_storage_url` | string | no | URL to an external storage for repository static objects. |
| `static_objects_external_storage_auth_token` | string | required by: `static_objects_external_storage_url` | Authentication token for the external storage linked in `static_objects_external_storage_url`. |
| `file_template_project_id` **(PREMIUM)** | integer | no | The ID of a project to load custom file templates from. |
| `first_day_of_week` | integer | no | Start day of the week for calendar views and date pickers. Valid values are `0` (default) for Sunday, `1` for Monday, and `6` for Saturday. |
| `globally_allowed_ips` | string | no | Comma-separated list of IP addresses and CIDRs always allowed for inbound traffic. For example, `1.1.1.1, 2.2.2.0/24`. |
| `geo_node_allowed_ips` **(PREMIUM)** | string | yes | Comma-separated list of IPs and CIDRs of allowed secondary nodes. For example, `1.1.1.1, 2.2.2.0/24`. |
| `geo_status_timeout` **(PREMIUM)** | integer | no | The amount of seconds after which a request to get a secondary node status times out. |
| `git_two_factor_session_expiry` **(PREMIUM)** | integer | no | Maximum duration (in minutes) of a session for Git operations when 2FA is enabled. |
@ -446,7 +458,7 @@ listed in the descriptions of the relevant settings.
| `max_attachment_size` | integer | no | Limit attachment size in MB. |
| `max_export_size` | integer | no | Maximum export size in MB. 0 for unlimited. Default = 0 (unlimited). |
| `max_import_size` | integer | no | Maximum import size in MB. 0 for unlimited. Default = 0 (unlimited). [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/251106) from 50 MB to 0 in GitLab 13.8. |
| `max_import_size` | integer | no | Maximum remote file size for imports from external object storages. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/384976) in GitLab 16.3. |
| `max_import_remote_file_size` | integer | no | Maximum remote file size for imports from external object storages. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/384976) in GitLab 16.3. |
| `max_pages_size` | integer | no | Maximum size of pages repositories in MB. |
| `max_personal_access_token_lifetime` **(ULTIMATE SELF)** | integer | no | Maximum allowable lifetime for access tokens in days. When left blank, default value of 365 is applied. When set, value must be 365 or less. When changed, existing access tokens with an expiration date beyond the maximum allowable lifetime are revoked.|
| `max_ssh_key_lifetime` **(ULTIMATE SELF)** | integer | no | Maximum allowable lifetime for SSH keys in days. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/1007) in GitLab 14.6. |
@ -471,6 +483,7 @@ listed in the descriptions of the relevant settings.
| `pages_domain_verification_enabled` | boolean | no | Require users to prove ownership of custom domains. Domain verification is an essential security measure for public GitLab sites. Users are required to demonstrate they control a domain before it is enabled. |
| `password_authentication_enabled_for_git` | boolean | no | Enable authentication for Git over HTTP(S) via a GitLab account password. Default is `true`. |
| `password_authentication_enabled_for_web` | boolean | no | Enable authentication for the web interface via a GitLab account password. Default is `true`. |
| `minimum_password_length` **(PREMIUM)** | integer | no | Indicates whether passwords require a minimum length. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85763) in GitLab 15.1. |
| `password_number_required` **(PREMIUM)** | boolean | no | Indicates whether passwords require at least one number. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85763) in GitLab 15.1. |
| `password_symbol_required` **(PREMIUM)** | boolean | no | Indicates whether passwords require at least one symbol character. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85763) in GitLab 15.1. |
| `password_uppercase_required` **(PREMIUM)** | boolean | no | Indicates whether passwords require at least one uppercase letter. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85763) in GitLab 15.1. |
@ -480,6 +493,11 @@ listed in the descriptions of the relevant settings.
| `performance_bar_enabled` | boolean | no | (Deprecated: Pass `performance_bar_allowed_group_path: nil` instead) Allow enabling the performance bar. |
| `personal_access_token_prefix` | string | no | Prefix for all generated personal access tokens. |
| `pipeline_limit_per_project_user_sha` | integer | no | Maximum number of pipeline creation requests per minute per user and commit. Disabled by default. |
| `gitpod_enabled` | boolean | no | (**If enabled, requires:** `gitpod_url`) Enable [Gitpod integration](../integration/gitpod.md). Default is `false`. |
| `gitpod_url` | boolean | required by: `gitpod_enabled` | The Gitpod instance URL for integration. |
| `kroki_enabled` | boolean | no | (**If enabled, requires:** `kroki_url`) Enable [Kroki integration](../administration/integration/kroki.md). Default is `false`. |
| `kroki_url` | boolean | required by: `kroki_enabled` | The Kroki instance URL for integration. |
| `kroki_formats` | object | no | Additional formats supported by the Kroki instance. Possible values are: <code>bpmn: (true|false)</code>, <code>blockdiag: (true|false)</code> and <code>excalidraw: (true|false)</code> |
| `plantuml_enabled` | boolean | no | (**If enabled, requires:** `plantuml_url`) Enable [PlantUML integration](../administration/integration/plantuml.md). Default is `false`. |
| `plantuml_url` | string | required by: `plantuml_enabled` | The PlantUML instance URL for integration. |
| `polling_interval_multiplier` | decimal | no | Interval multiplier used by endpoints that perform polling. Set to `0` to disable polling. |
@ -487,6 +505,9 @@ listed in the descriptions of the relevant settings.
| `projects_api_rate_limit_unauthenticated` | integer | no | [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/112283) in GitLab 15.10. Max number of requests per 10 minutes per IP address for unauthenticated requests to the [list all projects API](projects.md#list-all-projects). Default: 400. To disable throttling set to 0.|
| `prometheus_metrics_enabled` | boolean | no | Enable Prometheus metrics. |
| `protected_ci_variables` | boolean | no | CI/CD variables are protected by default. |
| `disable_overriding_approvers_per_merge_request` | boolean | no | Prevent editing approval rules in projects and merge requests |
| `prevent_merge_requests_author_approval` | boolean | no | Prevent approval by author |
| `prevent_merge_requests_committers_approval` | boolean | no | Prevent editing approval rules in projects and merge requests |
| `push_event_activities_limit` | integer | no | Maximum number of changes (branches or tags) in a single push above which a [bulk push event is created](../administration/settings/push_event_activities_limit.md). Setting to `0` does not disable throttling. |
| `push_event_hooks_limit` | integer | no | Maximum number of changes (branches or tags) in a single push above which webhooks and integrations are not triggered. Setting to `0` does not disable throttling. |
| `rate_limiting_response_text` | string | no | When rate limiting is enabled via the `throttle_*` settings, send this plain text response when a rate limit is exceeded. 'Retry later' is sent if this is blank. |
@ -495,6 +516,7 @@ listed in the descriptions of the relevant settings.
| `search_rate_limit` | integer | no | Max number of requests per minute for performing a search while authenticated. Default: 30. To disable throttling set to 0.|
| `search_rate_limit_unauthenticated` | integer | no | Max number of requests per minute for performing a search while unauthenticated. Default: 10. To disable throttling set to 0.|
| `recaptcha_enabled` | boolean | no | (**If enabled, requires:** `recaptcha_private_key` and `recaptcha_site_key`) Enable reCAPTCHA. |
| `login_recaptcha_protection_enabled` | boolean | no | Enable reCAPTCHA for login. |
| `recaptcha_private_key` | string | required by: `recaptcha_enabled` | Private key for reCAPTCHA. |
| `recaptcha_site_key` | string | required by: `recaptcha_enabled` | Site key for reCAPTCHA. |
| `receive_max_input_size` | integer | no | Maximum push size (MB). |
@ -511,6 +533,9 @@ listed in the descriptions of the relevant settings.
| `shared_runners_enabled` | boolean | no | (**If enabled, requires:** `shared_runners_text` and `shared_runners_minutes`) Enable shared runners for new projects. |
| `shared_runners_minutes` **(PREMIUM)** | integer | required by: `shared_runners_enabled` | Set the maximum number of compute minutes that a group can use on shared runners per month. |
| `shared_runners_text` | string | required by: `shared_runners_enabled` | Shared runners text. |
| `runner_token_expiration_interval` | integer | no | Set the expiration time (in seconds) of authentication tokens of newly registered instance runners. Minimum value is 7200 seconds. For more information, see [Automatically rotate authentication tokens](../ci/runners/configure_runners.md#automatically-rotate-authentication-tokens). |
| `group_runner_token_expiration_interval` | integer | no | Set the expiration time (in seconds) of authentication tokens of newly registered group runners. Minimum value is 7200 seconds. For more information, see [Automatically rotate authentication tokens](../ci/runners/configure_runners.md#automatically-rotate-authentication-tokens). |
| `project_runner_token_expiration_interval` | integer | no | Set the expiration time (in seconds) of authentication tokens of newly registered project runners. Minimum value is 7200 seconds. For more information, see [Automatically rotate authentication tokens](../ci/runners/configure_runners.md#automatically-rotate-authentication-tokens). |
| `sidekiq_job_limiter_mode` | string | no | `track` or `compress`. Sets the behavior for [Sidekiq job size limits](../administration/settings/sidekiq_job_limits.md). Default: 'compress'. |
| `sidekiq_job_limiter_compression_threshold_bytes` | integer | no | The threshold in bytes at which Sidekiq jobs are compressed before being stored in Redis. Default: 100,000 bytes (100 KB). |
| `sidekiq_job_limiter_limit_bytes` | integer | no | The threshold in bytes at which Sidekiq jobs are rejected. Default: 0 bytes (doesn't reject any job). |

View File

@ -152,7 +152,7 @@ For example, with a GitLab SaaS Premium license:
If you use `13,000` compute minutes during the month, the next month your additional compute minutes become
`2,000`. If you use `9,000` compute minutes during the month, your additional compute minutes remain the same.
If you bought additional compute minutes while on a trial subscription, those compute minutes are available after the trial ends or you upgrade to a paid plan.
Additional compute minutes bought on a trial subscription are available after the trial ends or upgrading to a paid plan.
You can find pricing for additional compute minutes on the
[GitLab Pricing page](https://about.gitlab.com/pricing/).

View File

@ -346,7 +346,7 @@ module Gitlab
module PauseControl
module Strategies
class CustomStrategy < Base
def enabled?
def should_pause?
ApplicationSetting.current.elasticsearch_pause_indexing?
end
end

View File

@ -378,4 +378,11 @@ If you're receiving multiple webhook requests, the webhook might have timed out.
GitLab expects a response in [10 seconds](../../../user/gitlab_com/index.md#other-limits). On self-managed GitLab instances, you can [change the webhook timeout limit](../../../administration/instance_limits.md#webhook-timeout).
If a webhook is not triggered, the webhook might be [automatically disabled](#failing-webhooks).
### Webhook is not triggered
> Webhooks not triggered in Silent Mode [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/393639) in GitLab 16.3.
If a webhook is not triggered, check that:
- The webhook was not [automatically disabled](#failing-webhooks).
- The GitLab instance is not in [Silent Mode](../../../administration/silent_mode/index.md).

View File

@ -3,6 +3,7 @@
require 'addressable'
require 'json'
require 'active_support/time'
require 'active_support/notifications'
require_relative "client/database"
require_relative "client/configuration"
require_relative "client/formatter"
@ -29,28 +30,41 @@ module ClickHouse
def self.select(query, database, configuration = self.configuration)
db = lookup_database(configuration, database)
response = configuration.http_post_proc.call(
db.uri.to_s,
db.headers,
"#{query} FORMAT JSON" # always return JSON
)
ActiveSupport::Notifications.instrument('sql.click_house', { query: query, database: database }) do |instrument|
response = configuration.http_post_proc.call(
db.uri.to_s,
db.headers,
"#{query} FORMAT JSON" # always return JSON
)
raise DatabaseError, response.body unless response.success?
raise DatabaseError, response.body unless response.success?
Formatter.format(configuration.json_parser.parse(response.body))
parsed_response = configuration.json_parser.parse(response.body)
instrument[:statistics] = parsed_response['statistics']&.symbolize_keys
Formatter.format(parsed_response)
end
end
# Executes any kinds of database query without returning any data (INSERT, DELETE)
def self.execute(query, database, configuration = self.configuration)
db = lookup_database(configuration, database)
response = configuration.http_post_proc.call(
db.uri.to_s,
db.headers,
query
)
ActiveSupport::Notifications.instrument('sql.click_house', { query: query, database: database }) do |instrument|
response = configuration.http_post_proc.call(
db.uri.to_s,
db.headers,
query
)
raise DatabaseError, response.body unless response.success?
raise DatabaseError, response.body unless response.success?
if response.headers['x-clickhouse-summary']
instrument[:statistics] =
Gitlab::Json.parse(response.headers['x-clickhouse-summary']).symbolize_keys
end
end
true
end

View File

@ -3,11 +3,12 @@
module ClickHouse
module Client
class Response
attr_reader :body
attr_reader :body, :headers
def initialize(body, http_status_code)
def initialize(body, http_status_code, headers = {})
@body = body
@http_status_code = http_status_code
@headers = headers
end
def success?

View File

@ -2,11 +2,13 @@
require 'csv'
require 'tempfile'
require 'zlib'
require_relative "csv_builder/version"
require_relative "csv_builder/builder"
require_relative "csv_builder/single_batch"
require_relative "csv_builder/stream"
require_relative "csv_builder/gzip"
# Generates CSV when given a collection and a mapping.
#

View File

@ -59,8 +59,10 @@ module CsvBuilder
@collection.each_batch(order_hint: :created_at) do |relation|
relation.preload(@associations_to_preload).order(:id).each(&block)
end
else
elsif @collection.respond_to?(:find_each)
@collection.find_each(&block)
else
@collection.each(&block)
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module CsvBuilder
class Gzip < CsvBuilder::Builder
# Writes the CSV file compressed and yields the written tempfile.
#
# Example:
# > CsvBuilder::Gzip.new(Issue, { title: -> (row) { row.title.upcase }, id: :id }).render do |tempfile|
# > puts tempfile.path
# > puts `zcat #{tempfile.path}`
# > end
def render
Tempfile.open(['csv_builder_gzip', '.csv.gz']) do |tempfile|
csv = CSV.new(Zlib::GzipWriter.open(tempfile.path))
write_csv csv, until_condition: -> {} # truncation must be handled outside of the CsvBuilder
csv.close
yield tempfile
end
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CsvBuilder::Gzip do
let(:event_1) { double(title: 'Added salt', description: 'A teaspoon') }
let(:event_2) { double(title: 'Added sugar', description: 'Just a pinch') }
let(:items) { [event_1, event_2] }
subject(:builder) { described_class.new(items, 'Title' => 'title', 'Description' => 'description') }
describe '#render' do
it 'returns yields a tempfile' do
written_content = nil
builder.render do |tempfile|
reader = Zlib::GzipReader.new(tempfile)
written_content = reader.read.split("\n")
end
expect(written_content).to eq(
[
"Title,Description",
"Added salt,A teaspoon",
"Added sugar,Just a pinch"
])
end
it 'requires a block' do
expect { builder.render }.to raise_error(LocalJumpError)
end
end
end

View File

@ -2,126 +2,141 @@
RSpec.describe CsvBuilder do
let(:object) { double(question: :answer) }
let(:fake_relation) { described_class::FakeRelation.new([object]) }
let(:csv_data) { subject.render }
let(:subject) do
described_class.new(
fake_relation, 'Q & A' => :question, 'Reversed' => ->(o) { o.question.to_s.reverse })
enumerable, 'Q & A' => :question, 'Reversed' => ->(o) { o.question.to_s.reverse })
end
before do
stub_const("#{described_class}::FakeRelation", Array)
shared_examples 'csv builder examples' do
let(:items) { [object] }
described_class::FakeRelation.class_eval do
def find_each(&block)
each(&block)
end
it "has a version number" do
expect(CsvBuilder::Version::VERSION).not_to be nil
end
end
it "has a version number" do
expect(CsvBuilder::Version::VERSION).not_to be nil
end
it 'generates a csv' do
expect(csv_data.scan(/(,|\n)/).join).to include ",\n,"
end
it 'generates a csv' do
expect(csv_data.scan(/(,|\n)/).join).to include ",\n,"
end
it 'uses a temporary file to reduce memory allocation' do
expect(CSV).to receive(:new).with(instance_of(Tempfile)).and_call_original
it 'uses a temporary file to reduce memory allocation' do
expect(CSV).to receive(:new).with(instance_of(Tempfile)).and_call_original
subject.render
end
subject.render
end
it 'counts the number of rows' do
subject.render
expect(subject.rows_written).to eq 1
end
describe 'rows_expected' do
it 'uses rows_written if CSV rendered successfully' do
it 'counts the number of rows' do
subject.render
expect(fake_relation).not_to receive(:count)
expect(subject.rows_expected).to eq 1
expect(subject.rows_written).to eq 1
end
it 'falls back to calling .count before rendering begins' do
expect(subject.rows_expected).to eq 1
end
end
describe 'rows_expected' do
it 'uses rows_written if CSV rendered successfully' do
subject.render
describe 'truncation' do
let(:big_object) { double(question: 'Long' * 1024) }
let(:row_size) { big_object.question.length * 2 }
let(:fake_relation) { described_class::FakeRelation.new([big_object, big_object, big_object]) }
expect(enumerable).not_to receive(:count)
expect(subject.rows_expected).to eq 1
end
it 'occurs after given number of bytes' do
expect(subject.render(row_size * 2).length).to be_between(row_size * 2, row_size * 3)
expect(subject).to be_truncated
expect(subject.rows_written).to eq 2
it 'falls back to calling .count before rendering begins' do
expect(subject.rows_expected).to eq 1
end
end
it 'is ignored by default' do
expect(subject.render.length).to be > row_size * 3
expect(subject.rows_written).to eq 3
describe 'truncation' do
let(:big_object) { double(question: 'Long' * 1024) }
let(:row_size) { big_object.question.length * 2 }
let(:items) { [big_object, big_object, big_object] }
it 'occurs after given number of bytes' do
expect(subject.render(row_size * 2).length).to be_between(row_size * 2, row_size * 3)
expect(subject).to be_truncated
expect(subject.rows_written).to eq 2
end
it 'is ignored by default' do
expect(subject.render.length).to be > row_size * 3
expect(subject.rows_written).to eq 3
end
it 'causes rows_expected to fall back to .count' do
subject.render(0)
expect(enumerable).to receive(:count).and_call_original
expect(subject.rows_expected).to eq 3
end
end
it 'causes rows_expected to fall back to .count' do
subject.render(0)
it 'avoids loading all data in a single query' do
expect(enumerable).to receive(:find_each)
expect(fake_relation).to receive(:count).and_call_original
expect(subject.rows_expected).to eq 3
end
end
it 'avoids loading all data in a single query' do
expect(fake_relation).to receive(:find_each)
subject.render
end
it 'uses hash keys as headers' do
expect(csv_data).to start_with 'Q & A'
end
it 'gets data by calling method provided as hash value' do
expect(csv_data).to include 'answer'
end
it 'allows lamdas to look up more complicated data' do
expect(csv_data).to include 'rewsna'
end
describe 'excel sanitization' do
let(:dangerous_title) { double(title: "=cmd|' /C calc'!A0 title", description: "*safe_desc") }
let(:dangerous_desc) { double(title: "*safe_title", description: "=cmd|' /C calc'!A0 desc") }
let(:fake_relation) { described_class::FakeRelation.new([dangerous_title, dangerous_desc]) }
let(:subject) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
let(:csv_data) { subject.render }
it 'sanitizes dangerous characters at the beginning of a column' do
expect(csv_data).to include "'=cmd|' /C calc'!A0 title"
expect(csv_data).to include "'=cmd|' /C calc'!A0 desc"
subject.render
end
it 'does not sanitize safe symbols at the beginning of a column' do
expect(csv_data).not_to include "'*safe_desc"
expect(csv_data).not_to include "'*safe_title"
it 'uses hash keys as headers' do
expect(csv_data).to start_with 'Q & A'
end
context 'when dangerous characters are after a line break' do
it 'does not append single quote to description' do
fake_object = double(title: "Safe title", description: "With task list\n-[x] todo 1")
fake_relation = described_class::FakeRelation.new([fake_object])
builder = described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description')
it 'gets data by calling method provided as hash value' do
expect(csv_data).to include 'answer'
end
csv_data = builder.render
it 'allows lamdas to look up more complicated data' do
expect(csv_data).to include 'rewsna'
end
expect(csv_data).to eq("Title,Description\nSafe title,\"With task list\n-[x] todo 1\"\n")
describe 'excel sanitization' do
let(:dangerous_title) { double(title: "=cmd|' /C calc'!A0 title", description: "*safe_desc") }
let(:dangerous_desc) { double(title: "*safe_title", description: "=cmd|' /C calc'!A0 desc") }
let(:items) { [dangerous_title, dangerous_desc] }
let(:subject) { described_class.new(enumerable, 'Title' => 'title', 'Description' => 'description') }
let(:csv_data) { subject.render }
it 'sanitizes dangerous characters at the beginning of a column' do
expect(csv_data).to include "'=cmd|' /C calc'!A0 title"
expect(csv_data).to include "'=cmd|' /C calc'!A0 desc"
end
it 'does not sanitize safe symbols at the beginning of a column' do
expect(csv_data).not_to include "'*safe_desc"
expect(csv_data).not_to include "'*safe_title"
end
context 'when dangerous characters are after a line break' do
let(:items) { [double(title: "Safe title", description: "With task list\n-[x] todo 1")] }
it 'does not append single quote to description' do
builder = described_class.new(enumerable, 'Title' => 'title', 'Description' => 'description')
csv_data = builder.render
expect(csv_data).to eq("Title,Description\nSafe title,\"With task list\n-[x] todo 1\"\n")
end
end
end
end
context 'when ActiveRecord::Relation like object is given' do
let(:enumerable) { described_class::FakeRelation.new(items) }
before do
stub_const("#{described_class}::FakeRelation", Array)
described_class::FakeRelation.class_eval do
def find_each(&block)
each(&block)
end
end
end
it_behaves_like 'csv builder examples'
end
context 'when Enumerable like object is given' do
let(:enumerable) { items }
it_behaves_like 'csv builder examples'
end
end

View File

@ -40,6 +40,7 @@ GEM
parser (3.2.2.3)
ast (~> 2.4.1)
racc
pg (1.5.3)
pg_query (4.2.1)
google-protobuf (>= 3.22.3)
proc_to_ast (0.1.0)
@ -126,6 +127,7 @@ PLATFORMS
DEPENDENCIES
gitlab-schema-validation!
gitlab-styles (~> 10.1.0)
pg (~> 1.5.3)
pry
rspec (~> 3.0)
rspec-benchmark (~> 0.6.0)

View File

@ -23,6 +23,7 @@ Gem::Specification.new do |spec|
spec.add_runtime_dependency "pg_query"
spec.add_development_dependency "gitlab-styles", "~> 10.1.0"
spec.add_development_dependency "pg", "~> 1.5.3"
spec.add_development_dependency "pry"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "rspec-benchmark", "~> 0.6.0"

View File

@ -28,8 +28,12 @@ require_relative 'validation/validators/different_definition_triggers'
require_relative 'validation/validators/extra_triggers'
require_relative 'validation/validators/missing_triggers'
require_relative 'validation/sources/connection_adapters/base'
require_relative 'validation/sources/connection_adapters/active_record_adapter'
require_relative 'validation/sources/connection_adapters/pg_adapter'
require_relative 'validation/sources/structure_sql'
require_relative 'validation/sources/database'
require_relative 'validation/sources/connection'
require_relative 'validation/schema_objects/base'
require_relative 'validation/schema_objects/column'

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
module Gitlab
module Schema
module Validation
module Sources
class AdapterNotSupportedError < StandardError
def initialize(adapter)
@adapter = adapter
end
def message
"#{adapter} is not supported"
end
private
attr_reader :adapter
end
class Connection
CONNECTION_ADAPTERS = {
'Gitlab::Database::LoadBalancing::ConnectionProxy' => ConnectionAdapters::ActiveRecordAdapter,
'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter' => ConnectionAdapters::ActiveRecordAdapter,
'PG::Connection' => ConnectionAdapters::PgAdapter
}.freeze
def initialize(connection)
@connection_adapter = fetch_adapter(connection)
end
def current_schema
connection_adapter.current_schema
end
def select_rows(sql, schemas = [])
connection_adapter.select_rows(sql, schemas)
end
def exec_query(sql, schemas = [])
connection_adapter.exec_query(sql, schemas)
end
private
attr_reader :connection_adapter
def fetch_adapter(connection)
CONNECTION_ADAPTERS.fetch(connection.class.name).new(connection)
rescue KeyError => e
raise AdapterNotSupportedError, e.key
end
end
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Gitlab
module Schema
module Validation
module Sources
module ConnectionAdapters
class ActiveRecordAdapter < Base
extend Forwardable
def_delegators :@connection, :current_schema
def exec_query(sql, schemas)
connection.exec_query(sql, nil, schemas)
end
def select_rows(sql, schemas)
connection.select_rows(sql, nil, schemas)
end
end
end
end
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Gitlab
module Schema
module Validation
module Sources
module ConnectionAdapters
class Base
def initialize(connection)
@connection = connection
end
def current_schema
raise NotImplementedError, "#{self.class} does not implement #{__method__}"
end
def select_rows(sql, schemas = [])
raise NotImplementedError, "#{self.class} does not implement #{__method__}"
end
def exec_query(sql, schemas = [])
raise NotImplementedError, "#{self.class} does not implement #{__method__}"
end
private
attr_reader :connection
end
end
end
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Gitlab
module Schema
module Validation
module Sources
module ConnectionAdapters
class PgAdapter < Base
def initialize(connection)
@connection = connection
@connection.type_map_for_results = PG::BasicTypeMapForResults.new(connection)
end
def current_schema
connection.exec('SELECT current_schema').first['current_schema']
end
def exec_query(sql, schemas)
connection.exec(sql, schemas)
end
def select_rows(sql, schemas)
exec_query(sql, schemas).values
end
end
end
end
end
end
end

View File

@ -8,7 +8,7 @@ module Gitlab
STATIC_PARTITIONS_SCHEMA = 'gitlab_partitions_static'
def initialize(connection)
@connection = connection
@connection = Connection.new(connection)
end
def fetch_index_by_name(index_name)
@ -102,7 +102,7 @@ module Gitlab
SQL
# rubocop:enable Rails/SquishedSQLHeredocs
connection.select_rows(sql, nil, schemas).to_h
connection.select_rows(sql, schemas).to_h
end
def table_map
@ -136,7 +136,7 @@ module Gitlab
SQL
# rubocop:enable Rails/SquishedSQLHeredocs
connection.exec_query(sql, nil, schemas).group_by { |row| row['table_name'] }
connection.exec_query(sql, schemas).group_by { |row| row['table_name'] }
end
def fetch_indexes
@ -148,7 +148,7 @@ module Gitlab
SQL
# rubocop:enable Rails/SquishedSQLHeredocs
connection.select_rows(sql, nil, schemas).to_h
connection.select_rows(sql, schemas).to_h
end
def index_map
@ -183,7 +183,7 @@ module Gitlab
SQL
# rubocop:enable Rails/SquishedSQLHeredocs
connection.exec_query(sql, nil, [connection.current_schema])
connection.exec_query(sql, [connection.current_schema])
end
end
end

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Schema::Validation::Sources::Connection, feature_category: :database do
let(:sql) { 'SELECT column_one, column_two FROM my_table WHERE schema_name IN ($1);' }
let(:schemas) { ['public'] }
let(:query_result) do
[
{ 'name' => 'Person one', 'email' => 'person.one@gitlab.com' },
{ 'name' => 'Person two', 'email' => 'person.two@gitlab.com' }
]
end
let(:rows) { query_result.map(&:values) }
context 'when using active record for postgres adapter' do
let(:schema) { 'public' }
let(:connection_class_name) { 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter' }
let(:adapter_class) { Gitlab::Schema::Validation::Sources::ConnectionAdapters::ActiveRecordAdapter }
it_behaves_like 'connection adapter'
end
context 'when using gitlab active record adapter' do
let(:schema) { 'gitlab_main' }
let(:connection_class_name) { 'Gitlab::Database::LoadBalancing::ConnectionProxy' }
let(:adapter_class) { Gitlab::Schema::Validation::Sources::ConnectionAdapters::ActiveRecordAdapter }
it_behaves_like 'connection adapter'
end
context 'when using postgres adapter' do
let(:schema) { 'public' }
let(:connection_class_name) { 'PG::Connection' }
let(:adapter_class) { Gitlab::Schema::Validation::Sources::ConnectionAdapters::PgAdapter }
before do
allow(connection_object).to receive(:exec)
allow(connection_object).to receive(:type_map_for_results=)
end
it_behaves_like 'connection adapter'
end
context 'when using an unsupported connection adapter' do
subject(:connection) { described_class.new(connection_object) }
let(:connection_class_name) { 'ActiveRecord::ConnectionAdapters::InvalidAdapter' }
let(:connection_class) { class_double(Class, name: connection_class_name) }
let(:connection_object) { instance_double(connection_class_name, class: connection_class) }
let(:error_class) { Gitlab::Schema::Validation::Sources::AdapterNotSupportedError }
let(:error_message) { 'ActiveRecord::ConnectionAdapters::InvalidAdapter is not supported' }
it { expect { connection }.to raise_error(error_class, error_message) }
end
end

View File

@ -2,6 +2,7 @@
require "gitlab/schema/validation"
require 'rspec-parameterized'
require 'pg'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.shared_examples 'connection adapter' do
subject(:connection) { described_class.new(connection_object) }
let(:connection_class) { class_double(Class, name: connection_class_name) }
let(:connection_object) { instance_double(connection_class_name, class: connection_class) }
let(:adapter) do
instance_double(
described_class::CONNECTION_ADAPTERS[connection_class_name],
current_schema: schema,
exec_query: query_result,
select_rows: rows
)
end
before do
allow(connection).to receive(:connection_adapter).and_return(adapter)
end
context 'when using a valid connection adapter' do
describe '#current_schema' do
it { expect(connection.current_schema).to eq(schema) }
end
describe '#select_rows' do
it { expect(connection.select_rows(sql, schemas)).to eq(rows) }
end
describe '#exec_query' do
it { expect(connection.exec_query(sql, schemas)).to eq(query_result) }
end
end
end

View File

@ -10,8 +10,11 @@ RSpec.shared_examples 'foreign key validators' do |validator, expected_result|
let(:inconsistency_type) { validator.to_s }
let(:database_name) { 'main' }
let(:schema) { 'public' }
let(:connection_class) { class_double(Class, name: 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter') }
# rubocop:disable RSpec/VerifiedDoubleReference
let(:connection) { instance_double('connection', exec_query: database_query, current_schema: 'public') }
let(:connection) do
instance_double('connection', class: connection_class, exec_query: database_query, current_schema: 'public')
end
# rubocop:enable RSpec/VerifiedDoubleReference
let(:database) { Gitlab::Schema::Validation::Sources::Database.new(connection) }

View File

@ -13,9 +13,12 @@ RSpec.shared_examples 'index validators' do |validator, expected_result|
end
let(:inconsistency_type) { validator.name }
let(:connection_class) { class_double(Class, name: 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter') }
# rubocop:disable RSpec/VerifiedDoubleReference
let(:connection) { instance_double('connection', select_rows: database_indexes, current_schema: 'public') }
let(:connection) do
instance_double('connection', class: connection_class, select_rows: database_indexes, current_schema: 'public')
end
# rubocop:enable RSpec/VerifiedDoubleReference
let(:schema) { 'public' }

View File

@ -7,9 +7,13 @@ RSpec.shared_examples "table validators" do |validator, expected_result|
let(:structure_file_path) { 'spec/fixtures/structure.sql' }
let(:inconsistency_type) { validator.to_s }
let(:connection_class) { class_double(Class, name: 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter') }
# rubocop:disable RSpec/VerifiedDoubleReference
let(:connection) { instance_double('connection', exec_query: database_tables, current_schema: 'public') }
let(:connection) do
instance_double('connection', class: connection_class, exec_query: database_tables, current_schema: 'public')
end
# rubocop:enable RSpec/VerifiedDoubleReference
let(:schema) { 'public' }
let(:database) { Gitlab::Schema::Validation::Sources::Database.new(connection) }
let(:structure_file) { Gitlab::Schema::Validation::Sources::StructureSql.new(structure_file_path, schema) }

View File

@ -11,9 +11,12 @@ RSpec.shared_examples 'trigger validators' do |validator, expected_result|
let(:database_name) { 'main' }
let(:schema) { 'public' }
let(:database) { Gitlab::Schema::Validation::Sources::Database.new(connection) }
let(:connection_class) { class_double(Class, name: 'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter') }
# rubocop:disable RSpec/VerifiedDoubleReference
let(:connection) { instance_double('connection', select_rows: database_triggers, current_schema: 'public') }
let(:connection) do
instance_double('connection', class: connection_class, select_rows: database_triggers, current_schema: 'public')
end
# rubocop:enable RSpec/VerifiedDoubleReference
let(:database_triggers) do

View File

@ -201,7 +201,7 @@ module API
optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab for Jira Cloud app"
optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab for Jira Cloud app"
optional :bulk_import_enabled, type: Boolean, desc: 'Enable migrating GitLab groups and projects by direct transfer'
optional :bulk_import_max_download_file, type: Integer, desc: 'Maximum download file size when importing from source GitLab instances by direct transfer'
optional :bulk_import_max_download_file, type: Integer, desc: 'Maximum download file size in MB when importing from source GitLab instances by direct transfer'
optional :allow_runner_registration_token, type: Boolean, desc: 'Allow registering runners using a registration token'
optional :ci_max_includes, type: Integer, desc: 'Maximum number of includes per pipeline'
optional :security_policy_global_group_approvers_enabled, type: Boolean, desc: 'Query scan result policy approval groups globally'

View File

@ -1,5 +1,5 @@
variables:
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.51.1'
DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.52.0'
.dast-auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.51.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.52.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -1,5 +1,5 @@
variables:
AUTO_DEPLOY_IMAGE_VERSION: 'v2.51.1'
AUTO_DEPLOY_IMAGE_VERSION: 'v2.52.0'
.auto-deploy:
image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class BatchedBackgroundMigrationFailedJobsMetric < DatabaseMetric
relation do
Gitlab::Database::BackgroundMigration::BatchedMigration
.joins(:batched_jobs)
.where(batched_jobs: { status: '2' })
.group(%w[table_name job_class_name])
.order(%w[table_name job_class_name])
.select(['table_name', 'job_class_name', 'COUNT(batched_jobs) AS number_of_failed_jobs'])
end
timestamp_column(:created_at)
operation :count
def value
relation.map do |batched_migration|
{
job_class_name: batched_migration.job_class_name,
table_name: batched_migration.table_name,
number_of_failed_jobs: batched_migration.number_of_failed_jobs
}
end
end
end
end
end
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module Peek
module Views
class ClickHouse < DetailedView
DEFAULT_THRESHOLDS = {
calls: 5,
duration: 1000,
individual_call: 1000
}.freeze
THRESHOLDS = {
production: {
calls: 5,
duration: 1000,
individual_call: 1000
}
}.freeze
def key
'ch'
end
def self.thresholds
@thresholds ||= THRESHOLDS.fetch(Rails.env.to_sym, DEFAULT_THRESHOLDS)
end
private
def setup_subscribers
super
subscribe('sql.click_house') do |_, start, finish, _, data|
detail_store << generate_detail(start, finish, data) if Gitlab::PerformanceBar.enabled_for_request?
end
end
def generate_detail(start, finish, data)
{
start: start,
duration: finish - start,
sql: data[:query].strip,
backtrace: Gitlab::BacktraceCleaner.clean_backtrace(caller),
database: "database: #{data[:database]}",
statistics: "query stats: #{data[:statistics]}"
}
end
end
end
end

View File

@ -4942,9 +4942,6 @@ msgstr ""
msgid "An error occurred while checking group path. Please refresh and try again."
msgstr ""
msgid "An error occurred while creating the %{issuableType}. Please try again."
msgstr ""
msgid "An error occurred while creating the issue. Please try again."
msgstr ""
@ -7809,6 +7806,12 @@ msgstr ""
msgid "Blame for binary files is not supported."
msgstr ""
msgid "BlobViewer|If you delete the file, it will be removed from the branch %{branch}."
msgstr ""
msgid "BlobViewer|This file will still take up space in your LFS storage. %{linkStart}How do I remove tracked objects from Git LFS?%{linkEnd}"
msgstr ""
msgid "BlobViewer|View on %{environmentName}"
msgstr ""
@ -7972,6 +7975,9 @@ msgid_plural "Boards|+ %{displayedIssuablesCount} more %{issuableType}s"
msgstr[0] ""
msgstr[1] ""
msgid "Boards|An error occurred while creating the %{issuableType}. Please try again."
msgstr ""
msgid "Boards|An error occurred while creating the epic. Please try again."
msgstr ""
@ -7981,12 +7987,27 @@ msgstr ""
msgid "Boards|An error occurred while creating the list. Please try again."
msgstr ""
msgid "Boards|An error occurred while deleting the list. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching a list. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching board details. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching board. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching boards. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching child groups. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching epics. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching group projects. Please try again."
msgstr ""
@ -7996,9 +8017,27 @@ msgstr ""
msgid "Boards|An error occurred while fetching issues. Please reload the page."
msgstr ""
msgid "Boards|An error occurred while fetching issues. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching iterations. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching labels. Please reload the page."
msgstr ""
msgid "Boards|An error occurred while fetching labels. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching list's information. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching milestones. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching recent boards. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching the board epics. Please reload the page."
msgstr ""
@ -8014,21 +8053,39 @@ msgstr ""
msgid "Boards|An error occurred while fetching the board. Please reload the page."
msgstr ""
msgid "Boards|An error occurred while fetching unassigned issues. Please try again."
msgstr ""
msgid "Boards|An error occurred while fetching users. Please try again."
msgstr ""
msgid "Boards|An error occurred while generating lists. Please reload the page."
msgstr ""
msgid "Boards|An error occurred while moving the %{issuableType}. Please try again."
msgstr ""
msgid "Boards|An error occurred while moving the epic. Please try again."
msgstr ""
msgid "Boards|An error occurred while moving the issue. Please try again."
msgstr ""
msgid "Boards|An error occurred while moving the list. Please try again."
msgstr ""
msgid "Boards|An error occurred while removing the list. Please try again."
msgstr ""
msgid "Boards|An error occurred while selecting the card. Please try again."
msgstr ""
msgid "Boards|An error occurred while updating the board list. Please try again."
msgstr ""
msgid "Boards|An error occurred while updating the list. Please try again."
msgstr ""
msgid "Boards|Blocked by %{blockedByCount} %{issuableType}"
msgid_plural "Boards|Blocked by %{blockedByCount} %{issuableType}s"
msgstr[0] ""
@ -8076,9 +8133,6 @@ msgstr ""
msgid "Boards|View scope"
msgstr ""
msgid "Board|An error occurred while fetching the board, please try again."
msgstr ""
msgid "Board|Are you sure you want to delete this board?"
msgstr ""
@ -8649,9 +8703,6 @@ msgstr ""
msgid "BulkImport|Destination"
msgstr ""
msgid "BulkImport|Direct Transfer maximum download file size (MiB)"
msgstr ""
msgid "BulkImport|Direct transfer maximum download file size (MB)"
msgstr ""
@ -12842,6 +12893,9 @@ msgstr ""
msgid "Continue with overages"
msgstr ""
msgid "Continue…"
msgstr ""
msgid "Continuous Integration and Deployment"
msgstr ""
@ -23918,9 +23972,6 @@ msgstr ""
msgid "Import|Maximum import remote file size (MB)"
msgstr ""
msgid "Import|Maximum import remote file size (MiB)"
msgstr ""
msgid "Import|Maximum remote file size for imports from external object storages. For example, AWS S3."
msgstr ""
@ -25776,6 +25827,9 @@ msgstr ""
msgid "Issue title"
msgstr ""
msgid "Issue type"
msgstr ""
msgid "Issue types"
msgstr ""
@ -33583,6 +33637,9 @@ msgstr ""
msgid "PerformanceBar|Bullet notifications"
msgstr ""
msgid "PerformanceBar|ClickHouse queries"
msgstr ""
msgid "PerformanceBar|DOM Content Loaded"
msgstr ""
@ -46714,6 +46771,9 @@ msgstr ""
msgid "The file has been successfully deleted."
msgstr ""
msgid "The file you're about to delete is tracked by LFS"
msgstr ""
msgid "The finding is not a vulnerability because it is part of a test or is test data."
msgstr ""

View File

@ -249,10 +249,10 @@
"cheerio": "^1.0.0-rc.9",
"commander": "^2.20.3",
"custom-jquery-matchers": "^2.1.0",
"eslint": "8.45.0",
"eslint": "8.46.0",
"eslint-import-resolver-jest": "3.0.2",
"eslint-import-resolver-webpack": "0.13.2",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-import": "^2.28.0",
"eslint-plugin-no-jquery": "2.7.0",
"eslint-plugin-no-unsanitized": "^4.0.2",
"fake-indexeddb": "^4.0.1",

View File

@ -31,7 +31,7 @@ printf "${Color_Off}"
printf "${BBlue}Running Remote Development backend specs${Color_Off}\n\n"
bin/spring rspec -r spec_helper \
bin/rspec -r spec_helper \
$(find . -path '**/remote_development/**/*_spec.rb') \
ee/spec/graphql/types/query_type_spec.rb \
ee/spec/graphql/types/subscription_type_spec.rb \

View File

@ -0,0 +1,37 @@
import * as Sentry from '@sentry/browser';
import { setError } from '~/boards/graphql/cache_updates';
import { defaultClient } from '~/graphql_shared/issuable_client';
import setErrorMutation from '~/boards/graphql/client/set_error.mutation.graphql';
describe('setError', () => {
let sentryCaptureExceptionSpy;
const errorMessage = 'Error';
const error = new Error(errorMessage);
beforeEach(() => {
jest.spyOn(defaultClient, 'mutate').mockResolvedValue();
sentryCaptureExceptionSpy = jest.spyOn(Sentry, 'captureException');
});
it('calls setErrorMutation and capture Sentry error', () => {
setError({ message: errorMessage, error });
expect(defaultClient.mutate).toHaveBeenCalledWith({
mutation: setErrorMutation,
variables: { error: errorMessage },
});
expect(sentryCaptureExceptionSpy).toHaveBeenCalledWith(error);
});
it('does not capture Sentry error when captureError is false', () => {
setError({ message: errorMessage, error, captureError: false });
expect(defaultClient.mutate).toHaveBeenCalledWith({
mutation: setErrorMutation,
variables: { error: errorMessage },
});
expect(sentryCaptureExceptionSpy).not.toHaveBeenCalled();
});
});

View File

@ -3,12 +3,14 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumn from '~/boards/components/board_add_new_column.vue';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
import defaultState from '~/boards/stores/state';
import createBoardListMutation from 'ee_else_ce/boards/graphql/board_list_create.mutation.graphql';
import boardLabelsQuery from '~/boards/graphql/board_labels.query.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import {
mockLabelList,
createBoardListResponse,
@ -21,13 +23,14 @@ Vue.use(VueApollo);
describe('BoardAddNewColumn', () => {
let wrapper;
let mockApollo;
const createBoardListQueryHandler = jest.fn().mockResolvedValue(createBoardListResponse);
const labelsQueryHandler = jest.fn().mockResolvedValue(labelsQueryResponse);
const mockApollo = createMockApollo([
[boardLabelsQuery, labelsQueryHandler],
[createBoardListMutation, createBoardListQueryHandler],
]);
const errorMessage = 'Failed to create list';
const createBoardListQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
const errorMessageLabels = 'Failed to fetch labels';
const labelsQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessageLabels));
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
const findAddNewColumnForm = () => wrapper.findComponent(BoardAddNewColumnForm);
@ -53,7 +56,14 @@ describe('BoardAddNewColumn', () => {
actions = {},
provide = {},
lists = {},
labelsHandler = labelsQueryHandler,
createHandler = createBoardListQueryHandler,
} = {}) => {
mockApollo = createMockApollo([
[boardLabelsQuery, labelsHandler],
[createBoardListMutation, createHandler],
]);
wrapper = shallowMountExtended(BoardAddNewColumn, {
apolloProvider: mockApollo,
propsData: {
@ -111,6 +121,10 @@ describe('BoardAddNewColumn', () => {
mockApollo.clients.defaultClient.cache.writeQuery = jest.fn();
};
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
describe('Add list button', () => {
it('calls addList', async () => {
const getListByLabelId = jest.fn().mockReturnValue(null);
@ -208,11 +222,52 @@ describe('BoardAddNewColumn', () => {
findAddNewColumnForm().vm.$emit('add-list');
await nextTick();
await waitForPromises();
expect(wrapper.emitted('highlight-list')).toEqual([[mockLabelList.id]]);
expect(createBoardListQueryHandler).not.toHaveBeenCalledWith();
});
});
describe('when fetch labels query fails', () => {
beforeEach(() => {
mountComponent({
provide: { isApolloBoard: true },
labelsHandler: labelsQueryHandlerFailure,
});
});
it('sets error', async () => {
findDropdown().vm.$emit('show');
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
describe('when create list mutation fails', () => {
beforeEach(() => {
mountComponent({
selectedId: mockLabelList.label.id,
provide: { isApolloBoard: true },
createHandler: createBoardListQueryHandlerFailure,
});
});
it('sets error', async () => {
findDropdown().vm.$emit('show');
await nextTick();
expect(labelsQueryHandler).toHaveBeenCalled();
selectLabel(mockLabelList.label.id);
findAddNewColumnForm().vm.$emit('add-list');
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});
});

View File

@ -9,13 +9,17 @@ import BoardApp from '~/boards/components/board_app.vue';
import eventHub from '~/boards/eventhub';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import boardListsQuery from 'ee_else_ce/boards/graphql/board_lists.query.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { rawIssue, boardListsQueryResponse } from '../mock_data';
describe('BoardApp', () => {
let wrapper;
let store;
let mockApollo;
const errorMessage = 'Failed to fetch lists';
const boardListQueryHandler = jest.fn().mockResolvedValue(boardListsQueryResponse);
const mockApollo = createMockApollo([[boardListsQuery, boardListQueryHandler]]);
const boardListQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
Vue.use(Vuex);
Vue.use(VueApollo);
@ -33,7 +37,12 @@ describe('BoardApp', () => {
});
};
const createComponent = ({ isApolloBoard = false, issue = rawIssue } = {}) => {
const createComponent = ({
isApolloBoard = false,
issue = rawIssue,
handler = boardListQueryHandler,
} = {}) => {
mockApollo = createMockApollo([[boardListsQuery, handler]]);
mockApollo.clients.defaultClient.cache.writeQuery({
query: activeBoardItemQuery,
data: {
@ -57,6 +66,10 @@ describe('BoardApp', () => {
});
};
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
afterEach(() => {
store = null;
});
@ -104,5 +117,13 @@ describe('BoardApp', () => {
expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
});
it('sets error on fetch lists failure', async () => {
createComponent({ isApolloBoard: true, handler: boardListQueryHandlerFailure });
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});

View File

@ -1,7 +1,7 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import Vue from 'vue';
import Vue, { nextTick } from 'vue';
import Draggable from 'vuedraggable';
import Vuex from 'vuex';
@ -10,6 +10,7 @@ import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
import getters from 'ee_else_ce/boards/stores/getters';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import BoardColumn from '~/boards/components/board_column.vue';
import BoardContent from '~/boards/components/board_content.vue';
import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
@ -36,6 +37,8 @@ describe('BoardContent', () => {
let mockApollo;
const updateListHandler = jest.fn().mockResolvedValue(updateBoardListResponse);
const errorMessage = 'Failed to update list';
const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
const defaultState = {
isShowingEpicsSwimlanes: false,
@ -60,8 +63,9 @@ describe('BoardContent', () => {
issuableType = 'issue',
isIssueBoard = true,
isEpicBoard = false,
handler = updateListHandler,
} = {}) => {
mockApollo = createMockApollo([[updateBoardListMutation, updateListHandler]]);
mockApollo = createMockApollo([[updateBoardListMutation, handler]]);
const listQueryVariables = { isProject: true };
mockApollo.clients.defaultClient.writeQuery({
@ -107,6 +111,11 @@ describe('BoardContent', () => {
const findBoardColumns = () => wrapper.findAllComponents(BoardColumn);
const findBoardAddNewColumn = () => wrapper.findComponent(BoardAddNewColumn);
const findDraggable = () => wrapper.findComponent(Draggable);
const findError = () => wrapper.findComponent(GlAlert);
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
describe('default', () => {
beforeEach(() => {
@ -123,7 +132,7 @@ describe('BoardContent', () => {
it('does not display EpicsSwimlanes component', () => {
expect(wrapper.findComponent(EpicsSwimlanes).exists()).toBe(false);
expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
expect(findError().exists()).toBe(false);
});
it('sets delay and delayOnTouchOnly attributes on board list', () => {
@ -169,6 +178,18 @@ describe('BoardContent', () => {
});
describe('when Apollo boards FF is on', () => {
const moveList = () => {
const movableListsOrder = [mockLists[0].id, mockLists[1].id];
findDraggable().vm.$emit('end', {
item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
newIndex: 1,
to: {
children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
},
});
};
beforeEach(async () => {
createComponent({ isApolloBoard: true });
await waitForPromises();
@ -183,19 +204,38 @@ describe('BoardContent', () => {
});
it('reorders lists', async () => {
const movableListsOrder = [mockLists[0].id, mockLists[1].id];
findDraggable().vm.$emit('end', {
item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
newIndex: 1,
to: {
children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
},
});
moveList();
await waitForPromises();
expect(updateListHandler).toHaveBeenCalled();
});
it('sets error on reorder lists failure', async () => {
createComponent({ isApolloBoard: true, handler: updateListHandlerFailure });
moveList();
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
describe('when error is passed', () => {
beforeEach(async () => {
createComponent({ isApolloBoard: true, props: { apolloError: 'Error' } });
await waitForPromises();
});
it('displays error banner', () => {
expect(findError().exists()).toBe(true);
});
it('dismisses error', async () => {
findError().vm.$emit('dismiss');
await nextTick();
expect(cacheUpdates.setError).toHaveBeenCalledWith({ message: null, captureError: false });
});
});
});
describe('when "add column" form is visible', () => {

View File

@ -4,6 +4,7 @@ import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import {
boardListQueryResponse,
mockLabelList,
@ -12,6 +13,7 @@ import {
import BoardListHeader from '~/boards/components/board_list_header.vue';
import updateBoardListMutation from '~/boards/graphql/board_list_update.mutation.graphql';
import { ListType } from '~/boards/constants';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql';
Vue.use(VueApollo);
@ -25,7 +27,11 @@ describe('Board List Header Component', () => {
const updateListSpy = jest.fn();
const toggleListCollapsedSpy = jest.fn();
const mockClientToggleListCollapsedResolver = jest.fn();
const updateListHandler = jest.fn().mockResolvedValue(updateBoardListResponse);
const updateListHandlerSuccess = jest.fn().mockResolvedValue(updateBoardListResponse);
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
afterEach(() => {
fakeApollo = null;
@ -39,6 +45,7 @@ describe('Board List Header Component', () => {
withLocalStorage = true,
currentUserId = 1,
listQueryHandler = jest.fn().mockResolvedValue(boardListQueryResponse()),
updateListHandler = updateListHandlerSuccess,
injectedProps = {},
} = {}) => {
const boardId = 'gid://gitlab/Board/1';
@ -271,7 +278,7 @@ describe('Board List Header Component', () => {
findCaret().vm.$emit('click');
await nextTick();
expect(updateListHandler).not.toHaveBeenCalled();
expect(updateListHandlerSuccess).not.toHaveBeenCalled();
});
it('calls update list mutation when user is logged in', async () => {
@ -280,7 +287,50 @@ describe('Board List Header Component', () => {
findCaret().vm.$emit('click');
await nextTick();
expect(updateListHandler).toHaveBeenCalledWith({ listId: mockLabelList.id, collapsed: true });
expect(updateListHandlerSuccess).toHaveBeenCalledWith({
listId: mockLabelList.id,
collapsed: true,
});
});
describe('when fetch list query fails', () => {
const errorMessage = 'Failed to fetch list';
const listQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
beforeEach(() => {
createComponent({
listQueryHandler: listQueryHandlerFailure,
injectedProps: { isApolloBoard: true },
});
});
it('sets error', async () => {
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
describe('when update list mutation fails', () => {
const errorMessage = 'Failed to update list';
const updateListHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
beforeEach(() => {
createComponent({
currentUserId: 1,
updateListHandler: updateListHandlerFailure,
injectedProps: { isApolloBoard: true },
});
});
it('sets error', async () => {
await waitForPromises();
findCaret().vm.$emit('click');
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});
});

View File

@ -6,11 +6,13 @@ import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
import { inactiveId, LIST } from '~/boards/constants';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import actions from '~/boards/stores/actions';
import getters from '~/boards/stores/getters';
import mutations from '~/boards/stores/mutations';
@ -31,12 +33,17 @@ describe('BoardSettingsSidebar', () => {
const destroyBoardListMutationHandlerSuccess = jest
.fn()
.mockResolvedValue(destroyBoardListMutationResponse);
const errorMessage = 'Failed to delete list';
const destroyBoardListMutationHandlerFailure = jest
.fn()
.mockRejectedValue(new Error(errorMessage));
const createComponent = ({
canAdminList = false,
list = {},
sidebarType = LIST,
activeId = inactiveId,
destroyBoardListMutationHandler = destroyBoardListMutationHandlerSuccess,
isApolloBoard = false,
} = {}) => {
const boardLists = {
@ -49,9 +56,7 @@ describe('BoardSettingsSidebar', () => {
actions,
});
mockApollo = createMockApollo([
[destroyBoardListMutation, destroyBoardListMutationHandlerSuccess],
]);
mockApollo = createMockApollo([[destroyBoardListMutation, destroyBoardListMutationHandler]]);
wrapper = extendedWrapper(
shallowMount(BoardSettingsSidebar, {
@ -90,6 +95,10 @@ describe('BoardSettingsSidebar', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findRemoveButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
it('finds a MountingPortal component', () => {
createComponent();
@ -214,5 +223,23 @@ describe('BoardSettingsSidebar', () => {
createComponent({ canAdminList: true, activeId: listId, list: mockLabelList });
expect(findModal().props('modalId')).toBe(modalID);
});
it('sets error when destroy list mutation fails', async () => {
createComponent({
canAdminList: true,
activeId: listId,
list: mockLabelList,
destroyBoardListMutationHandler: destroyBoardListMutationHandlerFailure,
isApolloBoard: true,
});
findRemoveButton().vm.$emit('click');
wrapper.findComponent(GlModal).vm.$emit('primary');
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});

View File

@ -3,6 +3,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BoardTopBar from '~/boards/components/board_top_bar.vue';
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
@ -11,6 +12,7 @@ import ConfigToggle from '~/boards/components/config_toggle.vue';
import IssueBoardFilteredSearch from '~/boards/components/issue_board_filtered_search.vue';
import NewBoardButton from '~/boards/components/new_board_button.vue';
import ToggleFocus from '~/boards/components/toggle_focus.vue';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import groupBoardQuery from '~/boards/graphql/group_board.query.graphql';
@ -32,12 +34,18 @@ describe('BoardTopBar', () => {
const projectBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockProjectBoardResponse);
const groupBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockGroupBoardResponse);
const errorMessage = 'Failed to fetch board';
const boardQueryHandlerFailure = jest.fn().mockRejectedValue(new Error(errorMessage));
const createComponent = ({ provide = {} } = {}) => {
const createComponent = ({
provide = {},
projectBoardQueryHandler = projectBoardQueryHandlerSuccess,
groupBoardQueryHandler = groupBoardQueryHandlerSuccess,
} = {}) => {
const store = createStore();
mockApollo = createMockApollo([
[projectBoardQuery, projectBoardQueryHandlerSuccess],
[groupBoardQuery, groupBoardQueryHandlerSuccess],
[projectBoardQuery, projectBoardQueryHandler],
[groupBoardQuery, groupBoardQueryHandler],
]);
wrapper = shallowMount(BoardTopBar, {
@ -65,6 +73,10 @@ describe('BoardTopBar', () => {
});
};
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
afterEach(() => {
mockApollo = null;
});
@ -134,5 +146,25 @@ describe('BoardTopBar', () => {
expect(queryHandler).toHaveBeenCalled();
expect(notCalledHandler).not.toHaveBeenCalled();
});
it.each`
boardType
${WORKSPACE_GROUP}
${WORKSPACE_PROJECT}
`('sets error when $boardType board query fails', async ({ boardType }) => {
createComponent({
provide: {
boardType,
isProjectBoard: boardType === WORKSPACE_PROJECT,
isGroupBoard: boardType === WORKSPACE_GROUP,
isApolloBoard: true,
},
groupBoardQueryHandler: boardQueryHandlerFailure,
projectBoardQueryHandler: boardQueryHandlerFailure,
});
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
});

View File

@ -9,6 +9,7 @@ import groupBoardsQuery from '~/boards/graphql/group_boards.query.graphql';
import projectBoardsQuery from '~/boards/graphql/project_boards.query.graphql';
import groupRecentBoardsQuery from '~/boards/graphql/group_recent_boards.query.graphql';
import projectRecentBoardsQuery from '~/boards/graphql/project_recent_boards.query.graphql';
import * as cacheUpdates from '~/boards/graphql/cache_updates';
import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@ -37,7 +38,6 @@ describe('BoardsSelector', () => {
const createStore = () => {
store = new Vuex.Store({
actions: {
setError: jest.fn(),
setBoardConfig: jest.fn(),
},
state: {
@ -77,16 +77,19 @@ describe('BoardsSelector', () => {
.fn()
.mockResolvedValue(mockEmptyProjectRecentBoardsResponse);
const boardsHandlerFailure = jest.fn().mockRejectedValue(new Error('error'));
const createComponent = ({
projectBoardsQueryHandler = projectBoardsQueryHandlerSuccess,
projectRecentBoardsQueryHandler = projectRecentBoardsQueryHandlerSuccess,
groupBoardsQueryHandler = groupBoardsQueryHandlerSuccess,
isGroupBoard = false,
isProjectBoard = false,
provide = {},
} = {}) => {
fakeApollo = createMockApollo([
[projectBoardsQuery, projectBoardsQueryHandler],
[groupBoardsQuery, groupBoardsQueryHandlerSuccess],
[groupBoardsQuery, groupBoardsQueryHandler],
[projectRecentBoardsQuery, projectRecentBoardsQueryHandler],
[groupRecentBoardsQuery, groupRecentBoardsQueryHandlerSuccess],
]);
@ -115,6 +118,10 @@ describe('BoardsSelector', () => {
});
};
beforeEach(() => {
cacheUpdates.setError = jest.fn();
});
afterEach(() => {
fakeApollo = null;
});
@ -246,6 +253,29 @@ describe('BoardsSelector', () => {
expect(queryHandler).toHaveBeenCalled();
expect(notCalledHandler).not.toHaveBeenCalled();
});
it.each`
boardType
${WORKSPACE_GROUP}
${WORKSPACE_PROJECT}
`('sets error when fetching $boardType boards fails', async ({ boardType }) => {
createStore();
createComponent({
isGroupBoard: boardType === WORKSPACE_GROUP,
isProjectBoard: boardType === WORKSPACE_PROJECT,
projectBoardsQueryHandler: boardsHandlerFailure,
groupBoardsQueryHandler: boardsHandlerFailure,
});
await nextTick();
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
findDropdown().vm.$emit('show');
await waitForPromises();
expect(cacheUpdates.setError).toHaveBeenCalled();
});
});
describe('dropdown visibility', () => {

View File

@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
import DurationBadge from '~/jobs/components/log/duration_badge.vue';
import LineHeader from '~/jobs/components/log/line_header.vue';
import LineNumber from '~/jobs/components/log/line_number.vue';
@ -15,7 +16,7 @@ describe('Job Log Header Line', () => {
style: 'term-fg-l-green',
},
],
lineNumber: 0,
lineNumber: 76,
},
isClosed: true,
path: '/jashkenas/underscore/-/jobs/335',
@ -89,4 +90,30 @@ describe('Job Log Header Line', () => {
expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
});
});
describe('line highlighting', () => {
describe('with hash', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353#L77`);
createComponent(data);
});
it('highlights line', () => {
expect(wrapper.classes()).toContain('gl-bg-gray-700');
});
});
describe('without hash', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353`);
createComponent(data);
});
it('does not highlight line', () => {
expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
});
});
});
});

View File

@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Line from '~/jobs/components/log/line.vue';
import LineNumber from '~/jobs/components/log/line_number.vue';
import setWindowLocation from 'helpers/set_window_location_helper';
const httpUrl = 'http://example.com';
const httpsUrl = 'https://example.com';
@ -203,7 +204,7 @@ describe('Job Log Line', () => {
searchResults: mockSearchResults,
});
expect(wrapper.classes()).toContain('gl-bg-gray-500');
expect(wrapper.classes()).toContain('gl-bg-gray-700');
});
it('does not apply highlight class to search result elements', () => {
@ -218,7 +219,49 @@ describe('Job Log Line', () => {
searchResults: mockSearchResults,
});
expect(wrapper.classes()).not.toContain('gl-bg-gray-500');
expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
});
});
describe('job log hash highlighting', () => {
describe('with hash', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353#L77`);
});
it('applies highlight class to job log line', () => {
createComponent({
line: {
offset: 24526,
content: [{ text: 'job log content' }],
section: 'custom-section',
lineNumber: 76,
},
path: '/root/ci-project/-/jobs/6353',
});
expect(wrapper.classes()).toContain('gl-bg-gray-700');
});
});
describe('without hash', () => {
beforeEach(() => {
setWindowLocation(`http://foo.com/root/ci-project/-/jobs/6353`);
});
it('does not apply highlight class to job log line', () => {
createComponent({
line: {
offset: 24500,
content: [{ text: 'line' }],
section: 'custom-section',
lineNumber: 10,
},
path: '/root/ci-project/-/jobs/6353',
});
expect(wrapper.classes()).not.toContain('gl-bg-gray-700');
});
});
});
});

View File

@ -17,6 +17,7 @@ const DEFAULT_PROPS = {
isLocked: false,
canLock: true,
showForkSuggestion: false,
isUsingLfs: true,
};
const DEFAULT_INJECT = {
@ -146,7 +147,7 @@ describe('BlobButtonGroup component', () => {
createComponent();
const { targetBranch, originalBranch } = DEFAULT_INJECT;
const { name, canPushCode, deletePath, emptyRepo } = DEFAULT_PROPS;
const { name, canPushCode, deletePath, emptyRepo, isUsingLfs } = DEFAULT_PROPS;
const title = `Delete ${name}`;
expect(findDeleteBlobModal().props()).toMatchObject({
@ -157,6 +158,7 @@ describe('BlobButtonGroup component', () => {
canPushCode,
deletePath,
emptyRepo,
isUsingLfs,
});
});
});

View File

@ -0,0 +1,11 @@
import { loadViewer, viewers } from '~/repository/components/blob_viewers';
import { OPENAPI_FILE_TYPE, JSON_LANGUAGE } from '~/repository/constants';
describe('Blob Viewers index', () => {
describe('loadViewer', () => {
it('loads the openapi viewer', () => {
const result = loadViewer(OPENAPI_FILE_TYPE, false, true, JSON_LANGUAGE);
expect(result).toBe(viewers[OPENAPI_FILE_TYPE]);
});
});
});

View File

@ -1,7 +1,10 @@
import { GlFormTextarea, GlModal, GlFormInput, GlToggle, GlForm } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { GlFormTextarea, GlModal, GlFormInput, GlToggle, GlForm, GlSprintf } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { RENDER_ALL_SLOTS_TEMPLATE, stubComponent } from 'helpers/stub_component';
import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
import { sprintf } from '~/locale';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@ -17,6 +20,8 @@ const initialProps = {
emptyRepo: false,
};
const { i18n } = DeleteBlobModal;
describe('DeleteBlobModal', () => {
let wrapper;
@ -30,10 +35,14 @@ describe('DeleteBlobModal', () => {
static: true,
visible: true,
},
stubs: {
GlSprintf,
GlModal: stubComponent(GlModal, { template: RENDER_ALL_SLOTS_TEMPLATE }),
},
});
};
const createComponent = createComponentFactory(shallowMount);
const createComponent = createComponentFactory(shallowMountExtended);
const createFullComponent = createComponentFactory(mount);
const findModal = () => wrapper.findComponent(GlModal);
@ -49,6 +58,35 @@ describe('DeleteBlobModal', () => {
await findCommitTextarea().vm.$emit('input', commitText);
};
describe('LFS files', () => {
const lfsTitleText = i18n.LFS_WARNING_TITLE;
const primaryLfsText = sprintf(i18n.LFS_WARNING_PRIMARY_CONTENT, {
branch: initialProps.targetBranch,
});
const secondaryLfsText = sprintf(i18n.LFS_WARNING_SECONDARY_CONTENT, {
linkStart: '',
linkEnd: '',
});
beforeEach(() => createComponent({ isUsingLfs: true }));
it('renders a modal containing LFS text', () => {
expect(findModal().props('title')).toBe(lfsTitleText);
expect(findModal().text()).toContain(primaryLfsText);
expect(findModal().text()).toContain(secondaryLfsText);
});
it('hides the LFS content if the continue button is clicked', async () => {
findModal().vm.$emit('primary', { preventDefault: jest.fn() });
await nextTick();
expect(findModal().props('title')).not.toBe(lfsTitleText);
expect(findModal().text()).not.toContain(primaryLfsText);
expect(findModal().text()).not.toContain(secondaryLfsText);
});
});
it('renders Modal component', () => {
createComponent();

View File

@ -1,11 +1,17 @@
import { GlDropdownSectionHeader } from '@gitlab/ui';
import { GlDisclosureDropdown, GlDisclosureDropdownGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import OkrActionsSplitButton from '~/work_items/components/work_item_links/okr_actions_split_button.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
const createComponent = () => {
return extendedWrapper(shallowMount(OkrActionsSplitButton));
return extendedWrapper(
shallowMount(OkrActionsSplitButton, {
stubs: {
GlDisclosureDropdown,
},
}),
);
};
describe('RelatedItemsTree', () => {
@ -18,11 +24,11 @@ describe('RelatedItemsTree', () => {
describe('OkrActionsSplitButton', () => {
describe('template', () => {
it('renders objective and key results sections', () => {
expect(wrapper.findAllComponents(GlDropdownSectionHeader).at(0).text()).toContain(
expect(wrapper.findAllComponents(GlDisclosureDropdownGroup).at(0).props('group').name).toBe(
'Objective',
);
expect(wrapper.findAllComponents(GlDropdownSectionHeader).at(1).text()).toContain(
expect(wrapper.findAllComponents(GlDisclosureDropdownGroup).at(1).props('group').name).toBe(
'Key result',
);
});

View File

@ -24,13 +24,6 @@ RSpec.describe 'ClickHouse::Client', feature_category: :database do
expect(databases).not_to be_empty
end
it 'returns data from the DB via `select` method' do
result = ClickHouse::Client.select("SELECT 1 AS value", :main)
# returns JSON if successful. Otherwise error
expect(result).to eq([{ 'value' => 1 }])
end
it 'does not return data via `execute` method' do
result = ClickHouse::Client.execute("SELECT 1 AS value", :main)

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrationFailedJobsMetric, feature_category: :database do
let(:expected_value) do
[
{
job_class_name: 'job',
number_of_failed_jobs: 1,
table_name: 'jobs'
},
{
job_class_name: 'test',
number_of_failed_jobs: 2,
table_name: 'users'
}
]
end
let_it_be(:active_migration) do
create(:batched_background_migration, :active, table_name: 'users', job_class_name: 'test', created_at: 5.days.ago)
end
let_it_be(:failed_migration) do
create(:batched_background_migration, :failed, table_name: 'jobs', job_class_name: 'job', created_at: 4.days.ago)
end
let_it_be(:batched_job) { create(:batched_background_migration_job, :failed, batched_migration: active_migration) }
let_it_be(:batched_job_2) { create(:batched_background_migration_job, :failed, batched_migration: active_migration) }
let_it_be(:batched_job_3) { create(:batched_background_migration_job, :failed, batched_migration: failed_migration) }
let_it_be(:old_migration) { create(:batched_background_migration, :failed, created_at: 99.days.ago) }
let_it_be(:old_batched_job) { create(:batched_background_migration_job, :failed, batched_migration: old_migration) }
it_behaves_like 'a correct instrumented metric value', { time_frame: '7d' }
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_category: :database do
before do
allow(::Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
end
describe '#results' do
let(:results) { described_class.new.results }
it 'includes performance details' do
::Gitlab::SafeRequestStore.clear!
data = ClickHouse::Client.select('SELECT 1 AS value', :main)
ClickHouse::Client.execute('INSERT INTO events (id) VALUES (1)', :main)
expect(data).to eq([{ 'value' => 1 }])
expect(results[:calls]).to eq(2)
expect(results[:duration]).to be_kind_of(String)
expect(results[:details]).to match_array([
a_hash_including({
sql: 'SELECT 1 AS value',
database: 'database: main'
}),
a_hash_including({
sql: 'INSERT INTO events (id) VALUES (1)',
database: 'database: main',
statistics: include('written_rows=>"1"')
})
])
end
end
end

View File

@ -7300,6 +7300,32 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
describe '#pages_variables' do
let(:group) { build(:group, path: 'group') }
let(:project) { build(:project, path: 'project', namespace: group) }
it 'returns the pages variables' do
expect(project.pages_variables.to_hash).to eq({
'CI_PAGES_DOMAIN' => 'example.com',
'CI_PAGES_URL' => 'http://group.example.com/project'
})
end
it 'returns the pages variables' do
build(
:project_setting,
project: project,
pages_unique_domain_enabled: true,
pages_unique_domain: 'unique-domain'
)
expect(project.pages_variables.to_hash).to eq({
'CI_PAGES_DOMAIN' => 'example.com',
'CI_PAGES_URL' => 'http://unique-domain.example.com'
})
end
end
describe '#closest_setting' do
shared_examples_for 'fetching closest setting' do
let!(:namespace) { create(:namespace) }

View File

@ -295,6 +295,20 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'blocks and logs an error' do
stub_full_request(project_hook.url, method: :post)
expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
expect(service_instance.execute).to be_error
expect(WebMock).not_to have_requested(:post, stubbed_hostname(project_hook.url))
end
end
it 'handles exceptions' do
exceptions = Gitlab::HTTP::HTTP_ERRORS + [
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError
@ -733,6 +747,19 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state,
end
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'does not queue a worker and logs an error' do
expect(WebHookWorker).not_to receive(:perform_async)
expect(Gitlab::AuthLogger).to receive(:error).with(include(message: 'GitLab is in silent mode'))
service_instance.async_execute
end
end
context 'when hook has custom context attributes' do
it 'includes the attributes in the worker context' do
expect(WebHookWorker).to receive(:perform_async) do

View File

@ -83,6 +83,20 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do
expect(find_hooks.disabled).to be_empty
end
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'causes no hooks to be considered executable' do
expect(find_hooks.executable).to be_empty
end
it 'causes all hooks to be considered disabled' do
expect(find_hooks.disabled.count).to eq(16)
end
end
end
describe '#executable?', :freeze_time do

View File

@ -2,7 +2,7 @@
RSpec.shared_examples 'a hook that does not get automatically disabled on failure' do
describe '.executable/.disabled', :freeze_time do
let!(:executables) do
let!(:webhooks) do
[
[0, Time.current],
[0, 1.minute.from_now],
@ -29,9 +29,23 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur
it 'finds the correct set of project hooks' do
expect(find_hooks).to all(be_executable)
expect(find_hooks.executable).to match_array executables
expect(find_hooks.executable).to match_array(webhooks)
expect(find_hooks.disabled).to be_empty
end
context 'when silent mode is enabled' do
before do
stub_application_setting(silent_mode_enabled: true)
end
it 'causes no hooks to be considered executable' do
expect(find_hooks.executable).to be_empty
end
it 'causes all hooks to be considered disabled' do
expect(find_hooks.disabled).to match_array(webhooks)
end
end
end
describe '#executable?', :freeze_time do

View File

@ -10,6 +10,13 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
let_it_be(:inactive) { create(:user, last_activity_on: nil, created_at: User::MINIMUM_DAYS_CREATED.days.ago.to_date) }
let_it_be(:inactive_recently_created) { create(:user, last_activity_on: nil, created_at: (User::MINIMUM_DAYS_CREATED - 1).days.ago.to_date) }
let(:admin_bot) { create(:user, :admin_bot) }
let(:deactivation_service) { instance_spy(Users::DeactivateService) }
before do
allow(Users::DeactivateService).to receive(:new).and_return(deactivation_service)
end
subject(:worker) { described_class.new }
it 'does not run for SaaS', :saas do
@ -17,8 +24,7 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
worker.perform
expect(User.dormant.count).to eq(1)
expect(User.with_no_activity.count).to eq(1)
expect(deactivation_service).not_to have_received(:execute)
end
context 'when automatic deactivation of dormant users is enabled' do
@ -29,29 +35,33 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
it 'deactivates dormant users' do
worker.perform
expect(User.dormant.count).to eq(0)
expect(User.with_no_activity.count).to eq(0)
expect(deactivation_service).to have_received(:execute).twice
end
where(:user_type, :expected_state) do
:human | 'deactivated'
:support_bot | 'active'
:alert_bot | 'active'
:human | 'deactivated'
:support_bot | 'active'
:alert_bot | 'active'
:visual_review_bot | 'active'
:service_user | 'deactivated'
:ghost | 'active'
:project_bot | 'active'
:migration_bot | 'active'
:security_bot | 'active'
:automation_bot | 'active'
:service_user | 'deactivated'
:ghost | 'active'
:project_bot | 'active'
:migration_bot | 'active'
:security_bot | 'active'
:automation_bot | 'active'
end
with_them do
it 'deactivates certain user types' do
user = create(:user, user_type: user_type, state: :active, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date)
worker.perform
expect(user.reload.state).to eq(expected_state)
if expected_state == 'deactivated'
expect(deactivation_service).to have_received(:execute).with(user)
else
expect(deactivation_service).not_to have_received(:execute).with(user)
end
end
end
@ -61,22 +71,14 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
worker.perform
expect(human_user.reload.state).to eq('blocked')
expect(service_user.reload.state).to eq('blocked')
expect(deactivation_service).not_to have_received(:execute).with(human_user)
expect(deactivation_service).not_to have_received(:execute).with(service_user)
end
it 'does not deactivate recently created users' do
worker.perform
expect(inactive_recently_created.reload.state).to eq('active')
end
it 'triggers update of highest user role for deactivated users', :clean_gitlab_redis_shared_state do
[dormant, inactive].each do |user|
expect(UpdateHighestRoleWorker).to receive(:perform_in).with(anything, user.id)
end
worker.perform
expect(deactivation_service).not_to have_received(:execute).with(inactive_recently_created)
end
end
@ -88,8 +90,7 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
it 'does nothing' do
worker.perform
expect(User.dormant.count).to eq(1)
expect(User.with_no_activity.count).to eq(1)
expect(deactivation_service).not_to have_received(:execute)
end
end
end

View File

@ -2,6 +2,7 @@ package builds
import (
"bytes"
"context"
"encoding/json"
"errors"
"io"
@ -55,7 +56,7 @@ var (
type largeBodyError struct{ error }
type WatchKeyHandler func(key, value string, timeout time.Duration) (redis.WatchKeyStatus, error)
type WatchKeyHandler func(ctx context.Context, key, value string, timeout time.Duration) (redis.WatchKeyStatus, error)
type runnerRequest struct {
Token string `json:"token,omitempty"`
@ -102,11 +103,11 @@ func proxyRegisterRequest(h http.Handler, w http.ResponseWriter, r *http.Request
h.ServeHTTP(w, r)
}
func watchForRunnerChange(watchHandler WatchKeyHandler, token, lastUpdate string, duration time.Duration) (redis.WatchKeyStatus, error) {
func watchForRunnerChange(ctx context.Context, watchHandler WatchKeyHandler, token, lastUpdate string, duration time.Duration) (redis.WatchKeyStatus, error) {
registerHandlerOpenAtWatching.Inc()
defer registerHandlerOpenAtWatching.Dec()
return watchHandler(runnerBuildQueue+token, lastUpdate, duration)
return watchHandler(ctx, runnerBuildQueue+token, lastUpdate, duration)
}
func RegisterHandler(h http.Handler, watchHandler WatchKeyHandler, pollingDuration time.Duration) http.Handler {
@ -140,7 +141,7 @@ func RegisterHandler(h http.Handler, watchHandler WatchKeyHandler, pollingDurati
return
}
result, err := watchForRunnerChange(watchHandler, runnerRequest.Token,
result, err := watchForRunnerChange(r.Context(), watchHandler, runnerRequest.Token,
runnerRequest.LastUpdate, pollingDuration)
if err != nil {
registerHandlerWatchErrors.Inc()

View File

@ -2,6 +2,7 @@ package builds
import (
"bytes"
"context"
"errors"
"io"
"net/http"
@ -71,7 +72,7 @@ func TestRegisterHandlerMissingData(t *testing.T) {
func expectWatcherToBeExecuted(t *testing.T, watchKeyStatus redis.WatchKeyStatus, watchKeyError error,
httpStatus int, msgAndArgs ...interface{}) {
executed := false
watchKeyHandler := func(key, value string, timeout time.Duration) (redis.WatchKeyStatus, error) {
watchKeyHandler := func(ctx context.Context, key, value string, timeout time.Duration) (redis.WatchKeyStatus, error) {
executed = true
return watchKeyStatus, watchKeyError
}

View File

@ -1,6 +1,7 @@
package redis
import (
"context"
"errors"
"fmt"
"strings"
@ -251,7 +252,7 @@ const (
WatchKeyStatusNoChange
)
func (kw *KeyWatcher) WatchKey(key, value string, timeout time.Duration) (WatchKeyStatus, error) {
func (kw *KeyWatcher) WatchKey(_ context.Context, key, value string, timeout time.Duration) (WatchKeyStatus, error) {
notify := make(chan string, 1)
if err := kw.addSubscription(key, notify); err != nil {
return WatchKeyStatusNoChange, err

Some files were not shown because too many files have changed in this diff Show More