Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
a2f3b3e5cf
commit
71c85847eb
|
|
@ -230,9 +230,6 @@ export default {
|
|||
}
|
||||
},
|
||||
diffViewType() {
|
||||
if (!this.glFeatures.unifiedDiffLines && (this.needsReload() || this.needsFirstLoad())) {
|
||||
this.refetchDiffData();
|
||||
}
|
||||
this.adjustView();
|
||||
},
|
||||
shouldShow() {
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ export default {
|
|||
return this.getUserData;
|
||||
},
|
||||
mappedLines() {
|
||||
if (this.glFeatures.unifiedDiffLines && this.glFeatures.unifiedDiffComponents) {
|
||||
if (this.glFeatures.unifiedDiffComponents) {
|
||||
return this.diffLines(this.diffFile, true).map(mapParallel(this)) || [];
|
||||
}
|
||||
|
||||
|
|
@ -95,9 +95,7 @@ export default {
|
|||
if (this.isInlineView) {
|
||||
return this.diffFile.highlighted_diff_lines.map(mapInline(this));
|
||||
}
|
||||
return this.glFeatures.unifiedDiffLines
|
||||
? this.diffLines(this.diffFile).map(mapParallel(this))
|
||||
: this.diffFile.parallel_diff_lines.map(mapParallel(this)) || [];
|
||||
return this.diffLines(this.diffFile).map(mapParallel(this));
|
||||
},
|
||||
},
|
||||
updated() {
|
||||
|
|
@ -129,9 +127,7 @@ export default {
|
|||
<template>
|
||||
<div class="diff-content">
|
||||
<div class="diff-viewer">
|
||||
<template
|
||||
v-if="isTextFile && glFeatures.unifiedDiffLines && glFeatures.unifiedDiffComponents"
|
||||
>
|
||||
<template v-if="isTextFile && glFeatures.unifiedDiffComponents">
|
||||
<diff-view
|
||||
:diff-file="diffFile"
|
||||
:diff-lines="mappedLines"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { GlIcon } from '@gitlab/ui';
|
|||
import { deprecatedCreateFlash as createFlash } from '~/flash';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { UNFOLD_COUNT, INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '../constants';
|
||||
import { UNFOLD_COUNT, INLINE_DIFF_VIEW_TYPE, INLINE_DIFF_LINES_KEY } from '../constants';
|
||||
import * as utils from '../store/utils';
|
||||
|
||||
const EXPAND_ALL = 0;
|
||||
|
|
@ -14,7 +14,6 @@ const EXPAND_DOWN = 2;
|
|||
const lineNumberByViewType = (viewType, diffLine) => {
|
||||
const numberGetters = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: line => line?.new_line,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: line => (line?.right || line?.left)?.new_line,
|
||||
};
|
||||
const numberGetter = numberGetters[viewType];
|
||||
return numberGetter && numberGetter(diffLine);
|
||||
|
|
@ -57,9 +56,6 @@ export default {
|
|||
},
|
||||
computed: {
|
||||
...mapState({
|
||||
diffViewType(state) {
|
||||
return this.glFeatures.unifiedDiffLines ? INLINE_DIFF_VIEW_TYPE : state.diffs.diffViewType;
|
||||
},
|
||||
diffFiles: state => state.diffs.diffFiles,
|
||||
}),
|
||||
canExpandUp() {
|
||||
|
|
@ -77,16 +73,14 @@ export default {
|
|||
...mapActions('diffs', ['loadMoreLines']),
|
||||
getPrevLineNumber(oldLineNumber, newLineNumber) {
|
||||
const diffFile = utils.findDiffFile(this.diffFiles, this.fileHash);
|
||||
const lines = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: diffFile.highlighted_diff_lines,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: diffFile.parallel_diff_lines,
|
||||
};
|
||||
const index = utils.getPreviousLineIndex(this.diffViewType, diffFile, {
|
||||
const index = utils.getPreviousLineIndex(INLINE_DIFF_VIEW_TYPE, diffFile, {
|
||||
oldLineNumber,
|
||||
newLineNumber,
|
||||
});
|
||||
|
||||
return lineNumberByViewType(this.diffViewType, lines[this.diffViewType][index - 2]) || 0;
|
||||
return (
|
||||
lineNumberByViewType(INLINE_DIFF_VIEW_TYPE, diffFile[INLINE_DIFF_LINES_KEY][index - 2]) || 0
|
||||
);
|
||||
},
|
||||
callLoadMoreLines(
|
||||
endpoint,
|
||||
|
|
@ -113,7 +107,7 @@ export default {
|
|||
this.isRequesting = true;
|
||||
const endpoint = this.contextLinesPath;
|
||||
const { fileHash } = this;
|
||||
const view = this.diffViewType;
|
||||
const view = INLINE_DIFF_VIEW_TYPE;
|
||||
const oldLineNumber = this.line.meta_data.old_pos || 0;
|
||||
const newLineNumber = this.line.meta_data.new_pos || 0;
|
||||
const offset = newLineNumber - oldLineNumber;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import noteForm from '../../notes/components/note_form.vue';
|
|||
import MultilineCommentForm from '../../notes/components/multiline_comment_form.vue';
|
||||
import autosave from '../../notes/mixins/autosave';
|
||||
import userAvatarLink from '../../vue_shared/components/user_avatar/user_avatar_link.vue';
|
||||
import { DIFF_NOTE_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '../constants';
|
||||
import { DIFF_NOTE_TYPE, INLINE_DIFF_LINES_KEY, PARALLEL_DIFF_VIEW_TYPE } from '../constants';
|
||||
import {
|
||||
commentLineOptions,
|
||||
formatLineRange,
|
||||
|
|
@ -102,13 +102,13 @@ export default {
|
|||
};
|
||||
const getDiffLines = () => {
|
||||
if (this.diffViewType === PARALLEL_DIFF_VIEW_TYPE) {
|
||||
return (this.glFeatures.unifiedDiffLines
|
||||
? this.diffLines(this.diffFile)
|
||||
: this.diffFile.parallel_diff_lines
|
||||
).reduce(combineSides, []);
|
||||
return this.diffLines(this.diffFile, this.glFeatures.unifiedDiffComponents).reduce(
|
||||
combineSides,
|
||||
[],
|
||||
);
|
||||
}
|
||||
|
||||
return this.diffFile.highlighted_diff_lines;
|
||||
return this.diffFile[INLINE_DIFF_LINES_KEY];
|
||||
};
|
||||
const side = this.line.type === 'new' ? 'right' : 'left';
|
||||
const lines = getDiffLines();
|
||||
|
|
|
|||
|
|
@ -30,13 +30,11 @@ import {
|
|||
OLD_LINE_KEY,
|
||||
NEW_LINE_KEY,
|
||||
TYPE_KEY,
|
||||
LEFT_LINE_KEY,
|
||||
MAX_RENDERING_DIFF_LINES,
|
||||
MAX_RENDERING_BULK_ROWS,
|
||||
MIN_RENDERING_MS,
|
||||
START_RENDERING_INDEX,
|
||||
INLINE_DIFF_LINES_KEY,
|
||||
PARALLEL_DIFF_LINES_KEY,
|
||||
DIFFS_PER_PAGE,
|
||||
DIFF_WHITESPACE_COOKIE_NAME,
|
||||
SHOW_WHITESPACE,
|
||||
|
|
@ -77,7 +75,7 @@ export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
|
|||
const urlParams = {
|
||||
per_page: DIFFS_PER_PAGE,
|
||||
w: state.showWhitespace ? '0' : '1',
|
||||
view: window.gon?.features?.unifiedDiffLines ? 'inline' : state.diffViewType,
|
||||
view: 'inline',
|
||||
};
|
||||
|
||||
commit(types.SET_BATCH_LOADING, true);
|
||||
|
|
@ -140,7 +138,7 @@ export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
|
|||
export const fetchDiffFilesMeta = ({ commit, state }) => {
|
||||
const worker = new TreeWorker();
|
||||
const urlParams = {
|
||||
view: window.gon?.features?.unifiedDiffLines ? 'inline' : state.diffViewType,
|
||||
view: 'inline',
|
||||
};
|
||||
|
||||
commit(types.SET_LOADING, true);
|
||||
|
|
@ -401,15 +399,10 @@ export const toggleFileDiscussions = ({ getters, dispatch }, diff) => {
|
|||
export const toggleFileDiscussionWrappers = ({ commit }, diff) => {
|
||||
const discussionWrappersExpanded = allDiscussionWrappersExpanded(diff);
|
||||
const lineCodesWithDiscussions = new Set();
|
||||
const { parallel_diff_lines: parallelLines, highlighted_diff_lines: inlineLines } = diff;
|
||||
const allLines = inlineLines.concat(
|
||||
parallelLines.map(line => line.left),
|
||||
parallelLines.map(line => line.right),
|
||||
);
|
||||
const lineHasDiscussion = line => Boolean(line?.discussions.length);
|
||||
const registerDiscussionLine = line => lineCodesWithDiscussions.add(line.line_code);
|
||||
|
||||
allLines.filter(lineHasDiscussion).forEach(registerDiscussionLine);
|
||||
diff[INLINE_DIFF_LINES_KEY].filter(lineHasDiscussion).forEach(registerDiscussionLine);
|
||||
|
||||
if (lineCodesWithDiscussions.size) {
|
||||
Array.from(lineCodesWithDiscussions).forEach(lineCode => {
|
||||
|
|
@ -508,61 +501,26 @@ export const receiveFullDiffError = ({ commit }, filePath) => {
|
|||
createFlash(s__('MergeRequest|Error loading full diff. Please try again.'));
|
||||
};
|
||||
|
||||
export const setExpandedDiffLines = ({ commit, state }, { file, data }) => {
|
||||
const expandedDiffLines = {
|
||||
highlighted_diff_lines: convertExpandLines({
|
||||
diffLines: file.highlighted_diff_lines,
|
||||
typeKey: TYPE_KEY,
|
||||
oldLineKey: OLD_LINE_KEY,
|
||||
newLineKey: NEW_LINE_KEY,
|
||||
data,
|
||||
mapLine: ({ line, oldLine, newLine }) =>
|
||||
Object.assign(line, {
|
||||
old_line: oldLine,
|
||||
new_line: newLine,
|
||||
line_code: `${file.file_hash}_${oldLine}_${newLine}`,
|
||||
}),
|
||||
}),
|
||||
parallel_diff_lines: convertExpandLines({
|
||||
diffLines: file.parallel_diff_lines,
|
||||
typeKey: [LEFT_LINE_KEY, TYPE_KEY],
|
||||
oldLineKey: [LEFT_LINE_KEY, OLD_LINE_KEY],
|
||||
newLineKey: [LEFT_LINE_KEY, NEW_LINE_KEY],
|
||||
data,
|
||||
mapLine: ({ line, oldLine, newLine }) => ({
|
||||
left: {
|
||||
...line,
|
||||
old_line: oldLine,
|
||||
line_code: `${file.file_hash}_${oldLine}_${newLine}`,
|
||||
},
|
||||
right: {
|
||||
...line,
|
||||
new_line: newLine,
|
||||
line_code: `${file.file_hash}_${newLine}_${oldLine}`,
|
||||
},
|
||||
export const setExpandedDiffLines = ({ commit }, { file, data }) => {
|
||||
const expandedDiffLines = convertExpandLines({
|
||||
diffLines: file[INLINE_DIFF_LINES_KEY],
|
||||
typeKey: TYPE_KEY,
|
||||
oldLineKey: OLD_LINE_KEY,
|
||||
newLineKey: NEW_LINE_KEY,
|
||||
data,
|
||||
mapLine: ({ line, oldLine, newLine }) =>
|
||||
Object.assign(line, {
|
||||
old_line: oldLine,
|
||||
new_line: newLine,
|
||||
line_code: `${file.file_hash}_${oldLine}_${newLine}`,
|
||||
}),
|
||||
}),
|
||||
};
|
||||
const unifiedDiffLinesEnabled = window.gon?.features?.unifiedDiffLines;
|
||||
const currentDiffLinesKey =
|
||||
state.diffViewType === INLINE_DIFF_VIEW_TYPE || unifiedDiffLinesEnabled
|
||||
? INLINE_DIFF_LINES_KEY
|
||||
: PARALLEL_DIFF_LINES_KEY;
|
||||
const hiddenDiffLinesKey =
|
||||
state.diffViewType === INLINE_DIFF_VIEW_TYPE ? PARALLEL_DIFF_LINES_KEY : INLINE_DIFF_LINES_KEY;
|
||||
});
|
||||
|
||||
if (!unifiedDiffLinesEnabled) {
|
||||
commit(types.SET_HIDDEN_VIEW_DIFF_FILE_LINES, {
|
||||
filePath: file.file_path,
|
||||
lines: expandedDiffLines[hiddenDiffLinesKey],
|
||||
});
|
||||
}
|
||||
|
||||
if (expandedDiffLines[currentDiffLinesKey].length > MAX_RENDERING_DIFF_LINES) {
|
||||
if (expandedDiffLines.length > MAX_RENDERING_DIFF_LINES) {
|
||||
let index = START_RENDERING_INDEX;
|
||||
commit(types.SET_CURRENT_VIEW_DIFF_FILE_LINES, {
|
||||
filePath: file.file_path,
|
||||
lines: expandedDiffLines[currentDiffLinesKey].slice(0, index),
|
||||
lines: expandedDiffLines.slice(0, index),
|
||||
});
|
||||
commit(types.TOGGLE_DIFF_FILE_RENDERING_MORE, file.file_path);
|
||||
|
||||
|
|
@ -571,10 +529,10 @@ export const setExpandedDiffLines = ({ commit, state }, { file, data }) => {
|
|||
|
||||
while (
|
||||
t.timeRemaining() >= MIN_RENDERING_MS &&
|
||||
index !== expandedDiffLines[currentDiffLinesKey].length &&
|
||||
index !== expandedDiffLines.length &&
|
||||
index - startIndex !== MAX_RENDERING_BULK_ROWS
|
||||
) {
|
||||
const line = expandedDiffLines[currentDiffLinesKey][index];
|
||||
const line = expandedDiffLines[index];
|
||||
|
||||
if (line) {
|
||||
commit(types.ADD_CURRENT_VIEW_DIFF_FILE_LINES, { filePath: file.file_path, line });
|
||||
|
|
@ -582,7 +540,7 @@ export const setExpandedDiffLines = ({ commit, state }, { file, data }) => {
|
|||
}
|
||||
}
|
||||
|
||||
if (index !== expandedDiffLines[currentDiffLinesKey].length) {
|
||||
if (index !== expandedDiffLines.length) {
|
||||
idleCallback(idleCb);
|
||||
} else {
|
||||
commit(types.TOGGLE_DIFF_FILE_RENDERING_MORE, file.file_path);
|
||||
|
|
@ -593,7 +551,7 @@ export const setExpandedDiffLines = ({ commit, state }, { file, data }) => {
|
|||
} else {
|
||||
commit(types.SET_CURRENT_VIEW_DIFF_FILE_LINES, {
|
||||
filePath: file.file_path,
|
||||
lines: expandedDiffLines[currentDiffLinesKey],
|
||||
lines: expandedDiffLines,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
@ -627,7 +585,7 @@ export const toggleFullDiff = ({ dispatch, commit, getters, state }, filePath) =
|
|||
}
|
||||
};
|
||||
|
||||
export function switchToFullDiffFromRenamedFile({ commit, dispatch, state }, { diffFile }) {
|
||||
export function switchToFullDiffFromRenamedFile({ commit, dispatch }, { diffFile }) {
|
||||
return axios
|
||||
.get(diffFile.context_lines_path, {
|
||||
params: {
|
||||
|
|
@ -638,7 +596,7 @@ export function switchToFullDiffFromRenamedFile({ commit, dispatch, state }, { d
|
|||
.then(({ data }) => {
|
||||
const lines = data.map((line, index) =>
|
||||
prepareLineForRenamedFile({
|
||||
diffViewType: window.gon?.features?.unifiedDiffLines ? 'inline' : state.diffViewType,
|
||||
diffViewType: 'inline',
|
||||
line,
|
||||
diffFile,
|
||||
index,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
import { __, n__ } from '~/locale';
|
||||
import { parallelizeDiffLines } from './utils';
|
||||
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '../constants';
|
||||
import {
|
||||
PARALLEL_DIFF_VIEW_TYPE,
|
||||
INLINE_DIFF_VIEW_TYPE,
|
||||
INLINE_DIFF_LINES_KEY,
|
||||
} from '../constants';
|
||||
|
||||
export * from './getters_versions_dropdowns';
|
||||
|
||||
|
|
@ -54,24 +58,10 @@ export const diffHasAllCollapsedDiscussions = (state, getters) => diff => {
|
|||
* @param {Object} diff
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
export const diffHasExpandedDiscussions = state => diff => {
|
||||
const lines = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: diff.highlighted_diff_lines || [],
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: (diff.parallel_diff_lines || []).reduce((acc, line) => {
|
||||
if (line.left) {
|
||||
acc.push(line.left);
|
||||
}
|
||||
|
||||
if (line.right) {
|
||||
acc.push(line.right);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []),
|
||||
};
|
||||
return lines[window.gon?.features?.unifiedDiffLines ? 'inline' : state.diffViewType]
|
||||
.filter(l => l.discussions.length >= 1)
|
||||
.some(l => l.discussionsExpanded);
|
||||
export const diffHasExpandedDiscussions = () => diff => {
|
||||
return diff[INLINE_DIFF_LINES_KEY].filter(l => l.discussions.length >= 1).some(
|
||||
l => l.discussionsExpanded,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -79,24 +69,8 @@ export const diffHasExpandedDiscussions = state => diff => {
|
|||
* @param {Boolean} diff
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
export const diffHasDiscussions = state => diff => {
|
||||
const lines = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: diff.highlighted_diff_lines || [],
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: (diff.parallel_diff_lines || []).reduce((acc, line) => {
|
||||
if (line.left) {
|
||||
acc.push(line.left);
|
||||
}
|
||||
|
||||
if (line.right) {
|
||||
acc.push(line.right);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []),
|
||||
};
|
||||
return lines[window.gon?.features?.unifiedDiffLines ? 'inline' : state.diffViewType].some(
|
||||
l => l.discussions.length >= 1,
|
||||
);
|
||||
export const diffHasDiscussions = () => diff => {
|
||||
return diff[INLINE_DIFF_LINES_KEY].some(l => l.discussions.length >= 1);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -35,7 +35,6 @@ export const RECEIVE_FULL_DIFF_SUCCESS = 'RECEIVE_FULL_DIFF_SUCCESS';
|
|||
export const RECEIVE_FULL_DIFF_ERROR = 'RECEIVE_FULL_DIFF_ERROR';
|
||||
export const SET_FILE_COLLAPSED = 'SET_FILE_COLLAPSED';
|
||||
|
||||
export const SET_HIDDEN_VIEW_DIFF_FILE_LINES = 'SET_HIDDEN_VIEW_DIFF_FILE_LINES';
|
||||
export const SET_CURRENT_VIEW_DIFF_FILE_LINES = 'SET_CURRENT_VIEW_DIFF_FILE_LINES';
|
||||
export const ADD_CURRENT_VIEW_DIFF_FILE_LINES = 'ADD_CURRENT_VIEW_DIFF_FILE_LINES';
|
||||
export const TOGGLE_DIFF_FILE_RENDERING_MORE = 'TOGGLE_DIFF_FILE_RENDERING_MORE';
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import {
|
||||
DIFF_FILE_MANUAL_COLLAPSE,
|
||||
DIFF_FILE_AUTOMATIC_COLLAPSE,
|
||||
INLINE_DIFF_VIEW_TYPE,
|
||||
} from '../constants';
|
||||
import {
|
||||
findDiffFile,
|
||||
addLineReferences,
|
||||
|
|
@ -14,6 +9,11 @@ import {
|
|||
isDiscussionApplicableToLine,
|
||||
updateLineInFile,
|
||||
} from './utils';
|
||||
import {
|
||||
DIFF_FILE_MANUAL_COLLAPSE,
|
||||
DIFF_FILE_AUTOMATIC_COLLAPSE,
|
||||
INLINE_DIFF_LINES_KEY,
|
||||
} from '../constants';
|
||||
import * as types from './mutation_types';
|
||||
|
||||
function updateDiffFilesInState(state, files) {
|
||||
|
|
@ -109,25 +109,7 @@ export default {
|
|||
|
||||
if (!diffFile) return;
|
||||
|
||||
if (diffFile.highlighted_diff_lines.length) {
|
||||
diffFile.highlighted_diff_lines.find(l => l.line_code === lineCode).hasForm = hasForm;
|
||||
}
|
||||
|
||||
if (diffFile.parallel_diff_lines.length) {
|
||||
const line = diffFile.parallel_diff_lines.find(l => {
|
||||
const { left, right } = l;
|
||||
|
||||
return (left && left.line_code === lineCode) || (right && right.line_code === lineCode);
|
||||
});
|
||||
|
||||
if (line.left && line.left.line_code === lineCode) {
|
||||
line.left.hasForm = hasForm;
|
||||
}
|
||||
|
||||
if (line.right && line.right.line_code === lineCode) {
|
||||
line.right.hasForm = hasForm;
|
||||
}
|
||||
}
|
||||
diffFile[INLINE_DIFF_LINES_KEY].find(l => l.line_code === lineCode).hasForm = hasForm;
|
||||
},
|
||||
|
||||
[types.ADD_CONTEXT_LINES](state, options) {
|
||||
|
|
@ -157,11 +139,7 @@ export default {
|
|||
});
|
||||
|
||||
addContextLines({
|
||||
inlineLines: diffFile.highlighted_diff_lines,
|
||||
parallelLines: diffFile.parallel_diff_lines,
|
||||
diffViewType: window.gon?.features?.unifiedDiffLines
|
||||
? INLINE_DIFF_VIEW_TYPE
|
||||
: state.diffViewType,
|
||||
inlineLines: diffFile[INLINE_DIFF_LINES_KEY],
|
||||
contextLines: lines,
|
||||
bottom,
|
||||
lineNumbers,
|
||||
|
|
@ -219,8 +197,8 @@ export default {
|
|||
|
||||
state.diffFiles.forEach(file => {
|
||||
if (file.file_hash === fileHash) {
|
||||
if (file.highlighted_diff_lines.length) {
|
||||
file.highlighted_diff_lines.forEach(line => {
|
||||
if (file[INLINE_DIFF_LINES_KEY].length) {
|
||||
file[INLINE_DIFF_LINES_KEY].forEach(line => {
|
||||
Object.assign(
|
||||
line,
|
||||
setDiscussionsExpanded(lineCheck(line) ? mapDiscussions(line) : line),
|
||||
|
|
@ -228,25 +206,7 @@ export default {
|
|||
});
|
||||
}
|
||||
|
||||
if (file.parallel_diff_lines.length) {
|
||||
file.parallel_diff_lines.forEach(line => {
|
||||
const left = line.left && lineCheck(line.left);
|
||||
const right = line.right && lineCheck(line.right);
|
||||
|
||||
if (left || right) {
|
||||
Object.assign(line, {
|
||||
left: line.left ? setDiscussionsExpanded(mapDiscussions(line.left)) : null,
|
||||
right: line.right
|
||||
? setDiscussionsExpanded(mapDiscussions(line.right, () => !left))
|
||||
: null,
|
||||
});
|
||||
}
|
||||
|
||||
return line;
|
||||
});
|
||||
}
|
||||
|
||||
if (!file.parallel_diff_lines.length || !file.highlighted_diff_lines.length) {
|
||||
if (!file[INLINE_DIFF_LINES_KEY].length) {
|
||||
const newDiscussions = (file.discussions || [])
|
||||
.filter(d => d.id !== discussion.id)
|
||||
.concat(discussion);
|
||||
|
|
@ -369,31 +329,15 @@ export default {
|
|||
renderFile(file);
|
||||
}
|
||||
},
|
||||
[types.SET_HIDDEN_VIEW_DIFF_FILE_LINES](state, { filePath, lines }) {
|
||||
const file = state.diffFiles.find(f => f.file_path === filePath);
|
||||
const hiddenDiffLinesKey =
|
||||
state.diffViewType === 'inline' ? 'parallel_diff_lines' : 'highlighted_diff_lines';
|
||||
|
||||
file[hiddenDiffLinesKey] = lines;
|
||||
},
|
||||
[types.SET_CURRENT_VIEW_DIFF_FILE_LINES](state, { filePath, lines }) {
|
||||
const file = state.diffFiles.find(f => f.file_path === filePath);
|
||||
let currentDiffLinesKey;
|
||||
|
||||
if (window.gon?.features?.unifiedDiffLines || state.diffViewType === 'inline') {
|
||||
currentDiffLinesKey = 'highlighted_diff_lines';
|
||||
} else {
|
||||
currentDiffLinesKey = 'parallel_diff_lines';
|
||||
}
|
||||
|
||||
file[currentDiffLinesKey] = lines;
|
||||
file[INLINE_DIFF_LINES_KEY] = lines;
|
||||
},
|
||||
[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, { filePath, line }) {
|
||||
const file = state.diffFiles.find(f => f.file_path === filePath);
|
||||
const currentDiffLinesKey =
|
||||
state.diffViewType === 'inline' ? 'highlighted_diff_lines' : 'parallel_diff_lines';
|
||||
|
||||
file[currentDiffLinesKey].push(line);
|
||||
file[INLINE_DIFF_LINES_KEY].push(line);
|
||||
},
|
||||
[types.TOGGLE_DIFF_FILE_RENDERING_MORE](state, filePath) {
|
||||
const file = state.diffFiles.find(f => f.file_path === filePath);
|
||||
|
|
|
|||
|
|
@ -12,8 +12,7 @@ import {
|
|||
MATCH_LINE_TYPE,
|
||||
LINES_TO_BE_RENDERED_DIRECTLY,
|
||||
TREE_TYPE,
|
||||
INLINE_DIFF_VIEW_TYPE,
|
||||
PARALLEL_DIFF_VIEW_TYPE,
|
||||
INLINE_DIFF_LINES_KEY,
|
||||
SHOW_WHITESPACE,
|
||||
NO_SHOW_WHITESPACE,
|
||||
} from '../constants';
|
||||
|
|
@ -178,43 +177,16 @@ export const findIndexInInlineLines = (lines, lineNumbers) => {
|
|||
);
|
||||
};
|
||||
|
||||
export const findIndexInParallelLines = (lines, lineNumbers) => {
|
||||
const { oldLineNumber, newLineNumber } = lineNumbers;
|
||||
|
||||
return lines.findIndex(
|
||||
line =>
|
||||
line.left &&
|
||||
line.right &&
|
||||
line.left.old_line === oldLineNumber &&
|
||||
line.right.new_line === newLineNumber,
|
||||
);
|
||||
};
|
||||
|
||||
const indexGettersByViewType = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: findIndexInInlineLines,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: findIndexInParallelLines,
|
||||
};
|
||||
|
||||
export const getPreviousLineIndex = (diffViewType, file, lineNumbers) => {
|
||||
const findIndex = indexGettersByViewType[diffViewType];
|
||||
const lines = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: file.highlighted_diff_lines,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: file.parallel_diff_lines,
|
||||
};
|
||||
|
||||
return findIndex && findIndex(lines[diffViewType], lineNumbers);
|
||||
return findIndexInInlineLines(file[INLINE_DIFF_LINES_KEY], lineNumbers);
|
||||
};
|
||||
|
||||
export function removeMatchLine(diffFile, lineNumbers, bottom) {
|
||||
const indexForInline = findIndexInInlineLines(diffFile.highlighted_diff_lines, lineNumbers);
|
||||
const indexForParallel = findIndexInParallelLines(diffFile.parallel_diff_lines, lineNumbers);
|
||||
const indexForInline = findIndexInInlineLines(diffFile[INLINE_DIFF_LINES_KEY], lineNumbers);
|
||||
const factor = bottom ? 1 : -1;
|
||||
|
||||
if (indexForInline > -1) {
|
||||
diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1);
|
||||
}
|
||||
if (indexForParallel > -1) {
|
||||
diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1);
|
||||
diffFile[INLINE_DIFF_LINES_KEY].splice(indexForInline + factor, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -257,24 +229,6 @@ export function addLineReferences(lines, lineNumbers, bottom, isExpandDown, next
|
|||
return linesWithNumbers;
|
||||
}
|
||||
|
||||
function addParallelContextLines(options) {
|
||||
const { parallelLines, contextLines, lineNumbers, isExpandDown } = options;
|
||||
const normalizedParallelLines = contextLines.map(line => ({
|
||||
left: line,
|
||||
right: line,
|
||||
line_code: line.line_code,
|
||||
}));
|
||||
const factor = isExpandDown ? 1 : 0;
|
||||
|
||||
if (!isExpandDown && options.bottom) {
|
||||
parallelLines.push(...normalizedParallelLines);
|
||||
} else {
|
||||
const parallelIndex = findIndexInParallelLines(parallelLines, lineNumbers);
|
||||
|
||||
parallelLines.splice(parallelIndex + factor, 0, ...normalizedParallelLines);
|
||||
}
|
||||
}
|
||||
|
||||
function addInlineContextLines(options) {
|
||||
const { inlineLines, contextLines, lineNumbers, isExpandDown } = options;
|
||||
const factor = isExpandDown ? 1 : 0;
|
||||
|
|
@ -289,16 +243,7 @@ function addInlineContextLines(options) {
|
|||
}
|
||||
|
||||
export function addContextLines(options) {
|
||||
const { diffViewType } = options;
|
||||
const contextLineHandlers = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: addInlineContextLines,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: addParallelContextLines,
|
||||
};
|
||||
const contextLineHandler = contextLineHandlers[diffViewType];
|
||||
|
||||
if (contextLineHandler) {
|
||||
contextLineHandler(options);
|
||||
}
|
||||
addInlineContextLines(options);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -324,41 +269,29 @@ export function trimFirstCharOfLineContent(line = {}) {
|
|||
return parsedLine;
|
||||
}
|
||||
|
||||
function getLineCode({ left, right }, index) {
|
||||
if (left && left.line_code) {
|
||||
return left.line_code;
|
||||
} else if (right && right.line_code) {
|
||||
return right.line_code;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
function diffFileUniqueId(file) {
|
||||
return `${file.content_sha}-${file.file_hash}`;
|
||||
}
|
||||
|
||||
function mergeTwoFiles(target, source) {
|
||||
const originalInline = target.highlighted_diff_lines;
|
||||
const originalParallel = target.parallel_diff_lines;
|
||||
const originalInline = target[INLINE_DIFF_LINES_KEY];
|
||||
const missingInline = !originalInline.length;
|
||||
const missingParallel = !originalParallel.length;
|
||||
|
||||
return {
|
||||
...target,
|
||||
highlighted_diff_lines: missingInline ? source.highlighted_diff_lines : originalInline,
|
||||
parallel_diff_lines: missingParallel ? source.parallel_diff_lines : originalParallel,
|
||||
[INLINE_DIFF_LINES_KEY]: missingInline ? source[INLINE_DIFF_LINES_KEY] : originalInline,
|
||||
parallel_diff_lines: null,
|
||||
renderIt: source.renderIt,
|
||||
collapsed: source.collapsed,
|
||||
};
|
||||
}
|
||||
|
||||
function ensureBasicDiffFileLines(file) {
|
||||
const missingInline = !file.highlighted_diff_lines;
|
||||
const missingParallel = !file.parallel_diff_lines || window.gon?.features?.unifiedDiffLines;
|
||||
const missingInline = !file[INLINE_DIFF_LINES_KEY];
|
||||
|
||||
Object.assign(file, {
|
||||
highlighted_diff_lines: missingInline ? [] : file.highlighted_diff_lines,
|
||||
parallel_diff_lines: missingParallel ? [] : file.parallel_diff_lines,
|
||||
[INLINE_DIFF_LINES_KEY]: missingInline ? [] : file[INLINE_DIFF_LINES_KEY],
|
||||
parallel_diff_lines: null,
|
||||
});
|
||||
|
||||
return file;
|
||||
|
|
@ -382,7 +315,7 @@ function prepareLine(line, file) {
|
|||
}
|
||||
}
|
||||
|
||||
export function prepareLineForRenamedFile({ line, diffViewType, diffFile, index = 0 }) {
|
||||
export function prepareLineForRenamedFile({ line, diffFile, index = 0 }) {
|
||||
/*
|
||||
Renamed files are a little different than other diffs, which
|
||||
is why this is distinct from `prepareDiffFileLines` below.
|
||||
|
|
@ -407,48 +340,23 @@ export function prepareLineForRenamedFile({ line, diffViewType, diffFile, index
|
|||
|
||||
prepareLine(cleanLine, diffFile); // WARNING: In-Place Mutations!
|
||||
|
||||
if (diffViewType === PARALLEL_DIFF_VIEW_TYPE) {
|
||||
return {
|
||||
left: { ...cleanLine },
|
||||
right: { ...cleanLine },
|
||||
line_code: cleanLine.line_code,
|
||||
};
|
||||
}
|
||||
|
||||
return cleanLine;
|
||||
}
|
||||
|
||||
function prepareDiffFileLines(file) {
|
||||
const inlineLines = file.highlighted_diff_lines;
|
||||
const parallelLines = file.parallel_diff_lines;
|
||||
let parallelLinesCount = 0;
|
||||
const inlineLines = file[INLINE_DIFF_LINES_KEY];
|
||||
|
||||
inlineLines.forEach(line => prepareLine(line, file)); // WARNING: In-Place Mutations!
|
||||
|
||||
parallelLines.forEach((line, index) => {
|
||||
Object.assign(line, { line_code: getLineCode(line, index) });
|
||||
|
||||
if (line.left) {
|
||||
parallelLinesCount += 1;
|
||||
prepareLine(line.left, file); // WARNING: In-Place Mutations!
|
||||
}
|
||||
|
||||
if (line.right) {
|
||||
parallelLinesCount += 1;
|
||||
prepareLine(line.right, file); // WARNING: In-Place Mutations!
|
||||
}
|
||||
});
|
||||
|
||||
Object.assign(file, {
|
||||
inlineLinesCount: inlineLines.length,
|
||||
parallelLinesCount,
|
||||
});
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
function getVisibleDiffLines(file) {
|
||||
return Math.max(file.inlineLinesCount, file.parallelLinesCount);
|
||||
return file.inlineLinesCount;
|
||||
}
|
||||
|
||||
function finalizeDiffFile(file) {
|
||||
|
|
@ -490,43 +398,14 @@ export function prepareDiffData(diff, priorFiles = []) {
|
|||
|
||||
export function getDiffPositionByLineCode(diffFiles) {
|
||||
let lines = [];
|
||||
const hasInlineDiffs = diffFiles.some(file => file.highlighted_diff_lines.length > 0);
|
||||
|
||||
if (hasInlineDiffs) {
|
||||
// In either of these cases, we can use `highlighted_diff_lines` because
|
||||
// that will include all of the parallel diff lines, too
|
||||
lines = diffFiles.reduce((acc, diffFile) => {
|
||||
diffFile[INLINE_DIFF_LINES_KEY].forEach(line => {
|
||||
acc.push({ file: diffFile, line });
|
||||
});
|
||||
|
||||
lines = diffFiles.reduce((acc, diffFile) => {
|
||||
diffFile.highlighted_diff_lines.forEach(line => {
|
||||
acc.push({ file: diffFile, line });
|
||||
});
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
} else {
|
||||
// If we're in single diff view mode and the inline lines haven't been
|
||||
// loaded yet, we need to parse the parallel lines
|
||||
|
||||
lines = diffFiles.reduce((acc, diffFile) => {
|
||||
diffFile.parallel_diff_lines.forEach(pair => {
|
||||
// It's possible for a parallel line to have an opposite line that doesn't exist
|
||||
// For example: *deleted* lines will have `null` right lines, while
|
||||
// *added* lines will have `null` left lines.
|
||||
// So we have to check each line before we push it onto the array so we're not
|
||||
// pushing null line diffs
|
||||
|
||||
if (pair.left) {
|
||||
acc.push({ file: diffFile, line: pair.left });
|
||||
}
|
||||
|
||||
if (pair.right) {
|
||||
acc.push({ file: diffFile, line: pair.right });
|
||||
}
|
||||
});
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
return lines.reduce((acc, { file, line }) => {
|
||||
if (line.line_code) {
|
||||
|
|
@ -739,24 +618,10 @@ export const convertExpandLines = ({
|
|||
export const idleCallback = cb => requestIdleCallback(cb);
|
||||
|
||||
function getLinesFromFileByLineCode(file, lineCode) {
|
||||
const parallelLines = file.parallel_diff_lines;
|
||||
const inlineLines = file.highlighted_diff_lines;
|
||||
const inlineLines = file[INLINE_DIFF_LINES_KEY];
|
||||
const matchesCode = line => line.line_code === lineCode;
|
||||
|
||||
return [
|
||||
...parallelLines.reduce((acc, line) => {
|
||||
if (line.left) {
|
||||
acc.push(line.left);
|
||||
}
|
||||
|
||||
if (line.right) {
|
||||
acc.push(line.right);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []),
|
||||
...inlineLines,
|
||||
].filter(matchesCode);
|
||||
return inlineLines.filter(matchesCode);
|
||||
}
|
||||
|
||||
export const updateLineInFile = (selectedFile, lineCode, updateFn) => {
|
||||
|
|
@ -771,12 +636,7 @@ export const allDiscussionWrappersExpanded = diff => {
|
|||
}
|
||||
};
|
||||
|
||||
diff.parallel_diff_lines.forEach(line => {
|
||||
changeExpandedResult(line.left);
|
||||
changeExpandedResult(line.right);
|
||||
});
|
||||
|
||||
diff.highlighted_diff_lines.forEach(line => {
|
||||
diff[INLINE_DIFF_LINES_KEY].forEach(line => {
|
||||
changeExpandedResult(line);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import {
|
|||
commentLineOptions,
|
||||
formatLineRange,
|
||||
} from './multiline_comment_utils';
|
||||
import { INLINE_DIFF_LINES_KEY } from '~/diffs/constants';
|
||||
|
||||
export default {
|
||||
name: 'NoteableNote',
|
||||
|
|
@ -169,12 +170,8 @@ export default {
|
|||
return this.line && this.startLineNumber !== this.endLineNumber;
|
||||
},
|
||||
commentLineOptions() {
|
||||
const sideA = this.line.type === 'new' ? 'right' : 'left';
|
||||
const sideB = sideA === 'left' ? 'right' : 'left';
|
||||
const lines = this.diffFile.highlighted_diff_lines.length
|
||||
? this.diffFile.highlighted_diff_lines
|
||||
: this.diffFile.parallel_diff_lines.map(l => l[sideA] || l[sideB]);
|
||||
return commentLineOptions(lines, this.commentLineStart, this.line.line_code, sideA);
|
||||
const lines = this.diffFile[INLINE_DIFF_LINES_KEY].length;
|
||||
return commentLineOptions(lines, this.commentLineStart, this.line.line_code);
|
||||
},
|
||||
diffFile() {
|
||||
if (this.commentLineStart.line_code) {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,13 @@
|
|||
<script>
|
||||
import { mapGetters } from 'vuex';
|
||||
import { GlModalDirective, GlTooltipDirective, GlFriendlyWrap, GlIcon, GlButton } from '@gitlab/ui';
|
||||
import { mapState, mapGetters, mapActions } from 'vuex';
|
||||
import {
|
||||
GlModalDirective,
|
||||
GlTooltipDirective,
|
||||
GlFriendlyWrap,
|
||||
GlIcon,
|
||||
GlButton,
|
||||
GlPagination,
|
||||
} from '@gitlab/ui';
|
||||
import { __ } from '~/locale';
|
||||
import TestCaseDetails from './test_case_details.vue';
|
||||
|
||||
|
|
@ -10,6 +17,7 @@ export default {
|
|||
GlIcon,
|
||||
GlFriendlyWrap,
|
||||
GlButton,
|
||||
GlPagination,
|
||||
TestCaseDetails,
|
||||
},
|
||||
directives: {
|
||||
|
|
@ -24,11 +32,15 @@ export default {
|
|||
},
|
||||
},
|
||||
computed: {
|
||||
...mapGetters(['getSuiteTests']),
|
||||
...mapState(['pageInfo']),
|
||||
...mapGetters(['getSuiteTests', 'getSuiteTestCount']),
|
||||
hasSuites() {
|
||||
return this.getSuiteTests.length > 0;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setPage']),
|
||||
},
|
||||
wrapSymbols: ['::', '#', '.', '_', '-', '/', '\\'],
|
||||
};
|
||||
</script>
|
||||
|
|
@ -129,6 +141,14 @@ export default {
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<gl-pagination
|
||||
v-model="pageInfo.page"
|
||||
class="gl-display-flex gl-justify-content-center"
|
||||
:per-page="pageInfo.perPage"
|
||||
:total-items="getSuiteTestCount"
|
||||
@input="setPage"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div v-else>
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ export const fetchTestSuite = ({ state, commit, dispatch }, index) => {
|
|||
});
|
||||
};
|
||||
|
||||
export const setPage = ({ commit }, page) => commit(types.SET_PAGE, page);
|
||||
export const setSelectedSuiteIndex = ({ commit }, data) =>
|
||||
commit(types.SET_SELECTED_SUITE_INDEX, data);
|
||||
export const removeSelectedSuiteIndex = ({ commit }) =>
|
||||
|
|
|
|||
|
|
@ -14,5 +14,10 @@ export const getSelectedSuite = state =>
|
|||
|
||||
export const getSuiteTests = state => {
|
||||
const { test_cases: testCases = [] } = getSelectedSuite(state);
|
||||
return testCases.map(addIconStatus);
|
||||
const { page, perPage } = state.pageInfo;
|
||||
const start = (page - 1) * perPage;
|
||||
|
||||
return testCases.map(addIconStatus).slice(start, start + perPage);
|
||||
};
|
||||
|
||||
export const getSuiteTestCount = state => getSelectedSuite(state)?.test_cases?.length || 0;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
export const SET_PAGE = 'SET_PAGE';
|
||||
export const SET_SELECTED_SUITE_INDEX = 'SET_SELECTED_SUITE_INDEX';
|
||||
export const SET_SUMMARY = 'SET_SUMMARY';
|
||||
export const SET_SUITE = 'SET_SUITE';
|
||||
|
|
|
|||
|
|
@ -1,6 +1,14 @@
|
|||
import * as types from './mutation_types';
|
||||
|
||||
export default {
|
||||
[types.SET_PAGE](state, page) {
|
||||
Object.assign(state, {
|
||||
pageInfo: Object.assign(state.pageInfo, {
|
||||
page,
|
||||
}),
|
||||
});
|
||||
},
|
||||
|
||||
[types.SET_SUITE](state, { suite = {}, index = null }) {
|
||||
state.testReports.test_suites[index] = { ...suite, hasFullSuite: true };
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,4 +4,8 @@ export default ({ summaryEndpoint = '', suiteEndpoint = '' }) => ({
|
|||
testReports: {},
|
||||
selectedSuiteIndex: null,
|
||||
isLoading: false,
|
||||
pageInfo: {
|
||||
page: 1,
|
||||
perPage: 20,
|
||||
},
|
||||
});
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
|
|||
}
|
||||
|
||||
options = additional_attributes.merge(
|
||||
diff_view: unified_diff_lines_view_type(@merge_request.project),
|
||||
diff_view: "inline",
|
||||
merge_ref_head_diff: render_merge_ref_head_diff?
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
|
|||
push_frontend_feature_flag(:approvals_commented_by, @project, default_enabled: true)
|
||||
push_frontend_feature_flag(:hide_jump_to_next_unresolved_in_threads, default_enabled: true)
|
||||
push_frontend_feature_flag(:merge_request_widget_graphql, @project)
|
||||
push_frontend_feature_flag(:unified_diff_lines, @project, default_enabled: true)
|
||||
push_frontend_feature_flag(:unified_diff_components, @project)
|
||||
push_frontend_feature_flag(:highlight_current_diff_row, @project)
|
||||
push_frontend_feature_flag(:default_merge_ref_for_diffs, @project)
|
||||
|
|
@ -481,7 +480,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
|
|||
|
||||
def endpoint_metadata_url(project, merge_request)
|
||||
params = request.query_parameters
|
||||
params[:view] = unified_diff_lines_view_type(project)
|
||||
params[:view] = "inline"
|
||||
|
||||
if Feature.enabled?(:default_merge_ref_for_diffs, project)
|
||||
params = params.merge(diff_head: true)
|
||||
|
|
|
|||
|
|
@ -203,14 +203,6 @@ module DiffHelper
|
|||
set_secure_cookie(:diff_view, params.delete(:view), type: CookiesHelper::COOKIE_TYPE_PERMANENT) if params[:view].present?
|
||||
end
|
||||
|
||||
def unified_diff_lines_view_type(project)
|
||||
if Feature.enabled?(:unified_diff_lines, project, default_enabled: true)
|
||||
'inline'
|
||||
else
|
||||
diff_view
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def diff_btn(title, name, selected)
|
||||
|
|
|
|||
|
|
@ -15,9 +15,11 @@ module StorageHelper
|
|||
counter_wikis: storage_counter(statistics.wiki_size),
|
||||
counter_build_artifacts: storage_counter(statistics.build_artifacts_size),
|
||||
counter_lfs_objects: storage_counter(statistics.lfs_objects_size),
|
||||
counter_snippets: storage_counter(statistics.snippets_size)
|
||||
counter_snippets: storage_counter(statistics.snippets_size),
|
||||
counter_packages: storage_counter(statistics.packages_size),
|
||||
counter_uploads: storage_counter(statistics.uploads_size)
|
||||
}
|
||||
|
||||
_("Repository: %{counter_repositories} / Wikis: %{counter_wikis} / Build Artifacts: %{counter_build_artifacts} / LFS: %{counter_lfs_objects} / Snippets: %{counter_snippets}") % counters
|
||||
_("Repository: %{counter_repositories} / Wikis: %{counter_wikis} / Build Artifacts: %{counter_build_artifacts} / LFS: %{counter_lfs_objects} / Snippets: %{counter_snippets} / Packages: %{counter_packages} / Uploads: %{counter_uploads}") % counters
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class Analytics::DevopsAdoption::Segment < ApplicationRecord
|
|||
has_many :groups, through: :segment_selections
|
||||
|
||||
validates :name, presence: true, uniqueness: true, length: { maximum: 255 }
|
||||
validate :validate_segment_count
|
||||
validate :validate_segment_count, on: :create
|
||||
|
||||
accepts_nested_attributes_for :segment_selections, allow_destroy: true
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class Analytics::DevopsAdoption::SegmentSelection < ApplicationRecord
|
|||
validates :group_id, uniqueness: { scope: :segment_id, if: :group }
|
||||
|
||||
validate :exclusive_project_or_group
|
||||
validate :validate_selection_count
|
||||
validate :validate_selection_count, on: :create
|
||||
|
||||
private
|
||||
|
||||
|
|
@ -27,9 +27,9 @@ class Analytics::DevopsAdoption::SegmentSelection < ApplicationRecord
|
|||
def validate_selection_count
|
||||
return unless segment
|
||||
|
||||
selection_count_for_segment = self.class.where(segment: segment).count
|
||||
|
||||
if selection_count_for_segment >= ALLOWED_SELECTIONS_PER_SEGMENT
|
||||
# handle single model creation and bulk creation from accepts_nested_attributes_for
|
||||
selections = segment.segment_selections + [self]
|
||||
if selections.reject(&:marked_for_destruction?).uniq.size > ALLOWED_SELECTIONS_PER_SEGMENT
|
||||
errors.add(:segment, s_('DevopsAdoptionSegmentSelection|The maximum number of selections has been reached'))
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add migration that updated users that don't need to have 2fa established
|
||||
merge_request: 47193
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add GraphQL mutations for Devops Adoption Segment
|
||||
merge_request: 47066
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Paginate unit test report
|
||||
merge_request: 47953
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: unified_diff_lines
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40131
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/241188
|
||||
milestone: '13.4'
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: true
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# # frozen_string_literal: true
|
||||
|
||||
class ScheduleUpdateExistingUsersThatRequireTwoFactorAuth < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
MIGRATION = 'UpdateExistingUsersThatRequireTwoFactorAuth'
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
INDEX_NAME = 'index_users_on_require_two_factor_authentication_from_group'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
class User < ActiveRecord::Base
|
||||
include EachBatch
|
||||
|
||||
self.table_name = 'users'
|
||||
end
|
||||
|
||||
def up
|
||||
add_concurrent_index :users,
|
||||
:require_two_factor_authentication_from_group,
|
||||
where: 'require_two_factor_authentication_from_group = TRUE',
|
||||
name: INDEX_NAME
|
||||
|
||||
relation = User.where(require_two_factor_authentication_from_group: true)
|
||||
|
||||
queue_background_migration_jobs_by_range_at_intervals(
|
||||
relation, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :users, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
4875c1def91676d73f14c2fbff9318fc4ab1f26535503fd9700044b687e9714e
|
||||
|
|
@ -22203,6 +22203,8 @@ CREATE INDEX index_users_on_name_trigram ON users USING gin (name gin_trgm_ops);
|
|||
|
||||
CREATE INDEX index_users_on_public_email ON users USING btree (public_email) WHERE ((public_email)::text <> ''::text);
|
||||
|
||||
CREATE INDEX index_users_on_require_two_factor_authentication_from_group ON users USING btree (require_two_factor_authentication_from_group);
|
||||
|
||||
CREATE UNIQUE INDEX index_users_on_reset_password_token ON users USING btree (reset_password_token);
|
||||
|
||||
CREATE INDEX index_users_on_state ON users USING btree (state);
|
||||
|
|
|
|||
|
|
@ -180,12 +180,6 @@ the steps bellow.
|
|||
Feature.enable(:repository_push_audit_event)
|
||||
```
|
||||
|
||||
## Retention policy
|
||||
|
||||
On GitLab.com, Audit Event records become subject to deletion after 400 days, or when your license is downgraded to a tier that does not include access to Audit Events. Data that is subject to deletion will be deleted at GitLab's discretion, possibly without additional notice.
|
||||
|
||||
If you require a longer retention period, you should independently archive your Audit Event data, which you can retrieve through the [Audit Events API](../api/audit_events.md).
|
||||
|
||||
## Export to CSV **(PREMIUM ONLY)**
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/1449) in [GitLab Premium](https://about.gitlab.com/pricing/) 13.4.
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ PUT /application/appearance
|
|||
| --------------------------------- | ------- | -------- | ----------- |
|
||||
| `title` | string | no | Instance title on the sign in / sign up page
|
||||
| `description` | string | no | Markdown text shown on the sign in / sign up page
|
||||
| `logo` | mixed | no | Instance image used on the sign in / sign up page
|
||||
| `logo` | mixed | no | Instance image used on the sign in / sign up page. See [Change logo](#change-logo)
|
||||
| `header_logo` | mixed | no | Instance image used for the main navigation bar
|
||||
| `favicon` | mixed | no | Instance favicon in `.ico` or `.png` format
|
||||
| `new_project_guidelines` | string | no | Markdown text shown on the new project page
|
||||
|
|
@ -87,3 +87,36 @@ Example response:
|
|||
"email_header_and_footer_enabled": true
|
||||
}
|
||||
```
|
||||
|
||||
## Change logo
|
||||
|
||||
Upload a logo to your GitLab instance.
|
||||
|
||||
To upload an avatar from your file system, use the `--form` argument. This causes
|
||||
cURL to post data using the header `Content-Type: multipart/form-data`. The
|
||||
`file=` parameter must point to an image file on your file system and be
|
||||
preceded by `@`.
|
||||
|
||||
```plaintext
|
||||
PUT /application/appearance
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | ------ | -------- | -------------- |
|
||||
| `logo` | string | Yes | File to upload |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --location --request PUT "https://gitlab.example.com/api/v4/application/appearance?data=image/png" \
|
||||
--header "Content-Type: multipart/form-data" \
|
||||
--header "PRIVATE-TOKEN: <your_access_token>" \
|
||||
--form "logo=@/path/to/logo.png"
|
||||
```
|
||||
|
||||
Returned object:
|
||||
|
||||
```json
|
||||
{
|
||||
"logo":"/uploads/-/system/appearance/logo/1/logo.png"
|
||||
```
|
||||
|
|
|
|||
|
|
@ -932,6 +932,11 @@ type AlertTodoCreatePayload {
|
|||
todo: Todo
|
||||
}
|
||||
|
||||
"""
|
||||
Identifier of Analytics::DevopsAdoption::Segment
|
||||
"""
|
||||
scalar AnalyticsDevopsAdoptionSegmentID
|
||||
|
||||
"""
|
||||
User availability status
|
||||
"""
|
||||
|
|
@ -3894,6 +3899,46 @@ type CreateCustomEmojiPayload {
|
|||
errors: [String!]!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated input type of CreateDevopsAdoptionSegment
|
||||
"""
|
||||
input CreateDevopsAdoptionSegmentInput {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
The array of group IDs to set for the segment
|
||||
"""
|
||||
groupIds: [GroupID!]
|
||||
|
||||
"""
|
||||
Name of the segment
|
||||
"""
|
||||
name: String!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated return type of CreateDevopsAdoptionSegment
|
||||
"""
|
||||
type CreateDevopsAdoptionSegmentPayload {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
Errors encountered during execution of the mutation.
|
||||
"""
|
||||
errors: [String!]!
|
||||
|
||||
"""
|
||||
The segment after mutation
|
||||
"""
|
||||
segment: DevopsAdoptionSegment
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated input type of CreateDiffNote
|
||||
"""
|
||||
|
|
@ -5298,6 +5343,36 @@ type DeleteAnnotationPayload {
|
|||
errors: [String!]!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated input type of DeleteDevopsAdoptionSegment
|
||||
"""
|
||||
input DeleteDevopsAdoptionSegmentInput {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
ID of the segment
|
||||
"""
|
||||
id: AnalyticsDevopsAdoptionSegmentID!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated return type of DeleteDevopsAdoptionSegment
|
||||
"""
|
||||
type DeleteDevopsAdoptionSegmentPayload {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
Errors encountered during execution of the mutation.
|
||||
"""
|
||||
errors: [String!]!
|
||||
}
|
||||
|
||||
"""
|
||||
The response from the AdminSidekiqQueuesDeleteJobs mutation
|
||||
"""
|
||||
|
|
@ -13704,6 +13779,7 @@ type Mutation {
|
|||
. Available only when feature flag `custom_emoji` is enabled
|
||||
"""
|
||||
createCustomEmoji(input: CreateCustomEmojiInput!): CreateCustomEmojiPayload
|
||||
createDevopsAdoptionSegment(input: CreateDevopsAdoptionSegmentInput!): CreateDevopsAdoptionSegmentPayload
|
||||
createDiffNote(input: CreateDiffNoteInput!): CreateDiffNotePayload
|
||||
createEpic(input: CreateEpicInput!): CreateEpicPayload
|
||||
createImageDiffNote(input: CreateImageDiffNoteInput!): CreateImageDiffNotePayload
|
||||
|
|
@ -13723,6 +13799,7 @@ type Mutation {
|
|||
dastSiteTokenCreate(input: DastSiteTokenCreateInput!): DastSiteTokenCreatePayload
|
||||
dastSiteValidationCreate(input: DastSiteValidationCreateInput!): DastSiteValidationCreatePayload
|
||||
deleteAnnotation(input: DeleteAnnotationInput!): DeleteAnnotationPayload
|
||||
deleteDevopsAdoptionSegment(input: DeleteDevopsAdoptionSegmentInput!): DeleteDevopsAdoptionSegmentPayload
|
||||
designManagementDelete(input: DesignManagementDeleteInput!): DesignManagementDeletePayload
|
||||
designManagementMove(input: DesignManagementMoveInput!): DesignManagementMovePayload
|
||||
designManagementUpload(input: DesignManagementUploadInput!): DesignManagementUploadPayload
|
||||
|
|
@ -13804,6 +13881,7 @@ type Mutation {
|
|||
updateBoardEpicUserPreferences(input: UpdateBoardEpicUserPreferencesInput!): UpdateBoardEpicUserPreferencesPayload
|
||||
updateBoardList(input: UpdateBoardListInput!): UpdateBoardListPayload
|
||||
updateContainerExpirationPolicy(input: UpdateContainerExpirationPolicyInput!): UpdateContainerExpirationPolicyPayload
|
||||
updateDevopsAdoptionSegment(input: UpdateDevopsAdoptionSegmentInput!): UpdateDevopsAdoptionSegmentPayload
|
||||
updateEpic(input: UpdateEpicInput!): UpdateEpicPayload
|
||||
|
||||
"""
|
||||
|
|
@ -22167,6 +22245,51 @@ type UpdateContainerExpirationPolicyPayload {
|
|||
errors: [String!]!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated input type of UpdateDevopsAdoptionSegment
|
||||
"""
|
||||
input UpdateDevopsAdoptionSegmentInput {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
The array of group IDs to set for the segment
|
||||
"""
|
||||
groupIds: [GroupID!]
|
||||
|
||||
"""
|
||||
ID of the segment
|
||||
"""
|
||||
id: AnalyticsDevopsAdoptionSegmentID!
|
||||
|
||||
"""
|
||||
Name of the segment
|
||||
"""
|
||||
name: String!
|
||||
}
|
||||
|
||||
"""
|
||||
Autogenerated return type of UpdateDevopsAdoptionSegment
|
||||
"""
|
||||
type UpdateDevopsAdoptionSegmentPayload {
|
||||
"""
|
||||
A unique identifier for the client performing the mutation.
|
||||
"""
|
||||
clientMutationId: String
|
||||
|
||||
"""
|
||||
Errors encountered during execution of the mutation.
|
||||
"""
|
||||
errors: [String!]!
|
||||
|
||||
"""
|
||||
The segment after mutation
|
||||
"""
|
||||
segment: DevopsAdoptionSegment
|
||||
}
|
||||
|
||||
input UpdateDiffImagePositionInput {
|
||||
"""
|
||||
Total height of the image
|
||||
|
|
|
|||
|
|
@ -2401,6 +2401,16 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "SCALAR",
|
||||
"name": "AnalyticsDevopsAdoptionSegmentID",
|
||||
"description": "Identifier of Analytics::DevopsAdoption::Segment",
|
||||
"fields": null,
|
||||
"inputFields": null,
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "ENUM",
|
||||
"name": "AvailabilityEnum",
|
||||
|
|
@ -10627,6 +10637,126 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "CreateDevopsAdoptionSegmentInput",
|
||||
"description": "Autogenerated input type of CreateDevopsAdoptionSegment",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "Name of the segment",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "groupIds",
|
||||
"description": "The array of group IDs to set for the segment",
|
||||
"type": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "GroupID",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "CreateDevopsAdoptionSegmentPayload",
|
||||
"description": "Autogenerated return type of CreateDevopsAdoptionSegment",
|
||||
"fields": [
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "errors",
|
||||
"description": "Errors encountered during execution of the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "segment",
|
||||
"description": "The segment after mutation",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "DevopsAdoptionSegment",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [
|
||||
|
||||
],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "CreateDiffNoteInput",
|
||||
|
|
@ -14498,6 +14628,94 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "DeleteDevopsAdoptionSegmentInput",
|
||||
"description": "Autogenerated input type of DeleteDevopsAdoptionSegment",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of the segment",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "AnalyticsDevopsAdoptionSegmentID",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "DeleteDevopsAdoptionSegmentPayload",
|
||||
"description": "Autogenerated return type of DeleteDevopsAdoptionSegment",
|
||||
"fields": [
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "errors",
|
||||
"description": "Errors encountered during execution of the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [
|
||||
|
||||
],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "DeleteJobsResponse",
|
||||
|
|
@ -38192,6 +38410,33 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "createDevopsAdoptionSegment",
|
||||
"description": null,
|
||||
"args": [
|
||||
{
|
||||
"name": "input",
|
||||
"description": null,
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "CreateDevopsAdoptionSegmentInput",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "CreateDevopsAdoptionSegmentPayload",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "createDiffNote",
|
||||
"description": null,
|
||||
|
|
@ -38705,6 +38950,33 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "deleteDevopsAdoptionSegment",
|
||||
"description": null,
|
||||
"args": [
|
||||
{
|
||||
"name": "input",
|
||||
"description": null,
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "DeleteDevopsAdoptionSegmentInput",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "DeleteDevopsAdoptionSegmentPayload",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "designManagementDelete",
|
||||
"description": null,
|
||||
|
|
@ -40568,6 +40840,33 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "updateDevopsAdoptionSegment",
|
||||
"description": null,
|
||||
"args": [
|
||||
{
|
||||
"name": "input",
|
||||
"description": null,
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateDevopsAdoptionSegmentInput",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "UpdateDevopsAdoptionSegmentPayload",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "updateEpic",
|
||||
"description": null,
|
||||
|
|
@ -64429,6 +64728,140 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateDevopsAdoptionSegmentInput",
|
||||
"description": "Autogenerated input type of UpdateDevopsAdoptionSegment",
|
||||
"fields": null,
|
||||
"inputFields": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "Name of the segment",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "groupIds",
|
||||
"description": "The array of group IDs to set for the segment",
|
||||
"type": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "GroupID",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of the segment",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "AnalyticsDevopsAdoptionSegmentID",
|
||||
"ofType": null
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"interfaces": null,
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "UpdateDevopsAdoptionSegmentPayload",
|
||||
"description": "Autogenerated return type of UpdateDevopsAdoptionSegment",
|
||||
"fields": [
|
||||
{
|
||||
"name": "clientMutationId",
|
||||
"description": "A unique identifier for the client performing the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "errors",
|
||||
"description": "Errors encountered during execution of the mutation.",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "SCALAR",
|
||||
"name": "String",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "segment",
|
||||
"description": "The segment after mutation",
|
||||
"args": [
|
||||
|
||||
],
|
||||
"type": {
|
||||
"kind": "OBJECT",
|
||||
"name": "DevopsAdoptionSegment",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [
|
||||
|
||||
],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "INPUT_OBJECT",
|
||||
"name": "UpdateDiffImagePositionInput",
|
||||
|
|
|
|||
|
|
@ -643,6 +643,16 @@ Autogenerated return type of CreateCustomEmoji.
|
|||
| `customEmoji` | CustomEmoji | The new custom emoji |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
|
||||
### CreateDevopsAdoptionSegmentPayload
|
||||
|
||||
Autogenerated return type of CreateDevopsAdoptionSegment.
|
||||
|
||||
| Field | Type | Description |
|
||||
| ----- | ---- | ----------- |
|
||||
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
| `segment` | DevopsAdoptionSegment | The segment after mutation |
|
||||
|
||||
### CreateDiffNotePayload
|
||||
|
||||
Autogenerated return type of CreateDiffNote.
|
||||
|
|
@ -892,6 +902,15 @@ Autogenerated return type of DeleteAnnotation.
|
|||
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
|
||||
### DeleteDevopsAdoptionSegmentPayload
|
||||
|
||||
Autogenerated return type of DeleteDevopsAdoptionSegment.
|
||||
|
||||
| Field | Type | Description |
|
||||
| ----- | ---- | ----------- |
|
||||
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
|
||||
### DeleteJobsResponse
|
||||
|
||||
The response from the AdminSidekiqQueuesDeleteJobs mutation.
|
||||
|
|
@ -3325,6 +3344,16 @@ Autogenerated return type of UpdateContainerExpirationPolicy.
|
|||
| `containerExpirationPolicy` | ContainerExpirationPolicy | The container expiration policy after mutation |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
|
||||
### UpdateDevopsAdoptionSegmentPayload
|
||||
|
||||
Autogenerated return type of UpdateDevopsAdoptionSegment.
|
||||
|
||||
| Field | Type | Description |
|
||||
| ----- | ---- | ----------- |
|
||||
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
|
||||
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
|
||||
| `segment` | DevopsAdoptionSegment | The segment after mutation |
|
||||
|
||||
### UpdateEpicPayload
|
||||
|
||||
Autogenerated return type of UpdateEpic.
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ Every API call to vulnerabilities must be [authenticated](README.md#authenticati
|
|||
|
||||
Vulnerability permissions inherit permissions from their project. If a project is
|
||||
private, and a user isn't a member of the project to which the vulnerability
|
||||
belongs, requests to that project will return a `404 Not Found` status code.
|
||||
belongs, requests to that project returns a `404 Not Found` status code.
|
||||
|
||||
## Single vulnerability
|
||||
|
||||
|
|
@ -77,7 +77,7 @@ Confirms a given vulnerability. Returns status code `304` if the vulnerability i
|
|||
|
||||
If an authenticated user does not have permission to
|
||||
[confirm vulnerabilities](../user/permissions.md#project-members-permissions),
|
||||
this request will result in a `403` status code.
|
||||
this request results in a `403` status code.
|
||||
|
||||
```plaintext
|
||||
POST /vulnerabilities/:id/confirm
|
||||
|
|
@ -127,7 +127,7 @@ Resolves a given vulnerability. Returns status code `304` if the vulnerability i
|
|||
|
||||
If an authenticated user does not have permission to
|
||||
[resolve vulnerabilities](../user/permissions.md#project-members-permissions),
|
||||
this request will result in a `403` status code.
|
||||
this request results in a `403` status code.
|
||||
|
||||
```plaintext
|
||||
POST /vulnerabilities/:id/resolve
|
||||
|
|
@ -177,7 +177,7 @@ Dismisses a given vulnerability. Returns status code `304` if the vulnerability
|
|||
|
||||
If an authenticated user does not have permission to
|
||||
[dismiss vulnerabilities](../user/permissions.md#project-members-permissions),
|
||||
this request will result in a `403` status code.
|
||||
this request results in a `403` status code.
|
||||
|
||||
```plaintext
|
||||
POST /vulnerabilities/:id/dismiss
|
||||
|
|
@ -227,7 +227,7 @@ Reverts a given vulnerability to detected state. Returns status code `304` if th
|
|||
|
||||
If an authenticated user does not have permission to
|
||||
[revert vulnerability to detected state](../user/permissions.md#project-members-permissions),
|
||||
this request will result in a `403` status code.
|
||||
this request results in a `403` status code.
|
||||
|
||||
```plaintext
|
||||
POST /vulnerabilities/:id/revert
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ GET /security/vulnerability_exports/:id/download
|
|||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/security/vulnerability_exports/2/download"
|
||||
```
|
||||
|
||||
The response will be `404 Not Found` if the vulnerability export is not finished yet or was not found.
|
||||
The response is `404 Not Found` if the vulnerability export is not finished yet or was not found.
|
||||
|
||||
Example response:
|
||||
|
||||
|
|
|
|||
|
|
@ -18,11 +18,11 @@ Every API call to vulnerability findings must be [authenticated](README.md#authe
|
|||
|
||||
Vulnerability findings are project-bound entities. If a user is not
|
||||
a member of a project and the project is private, a request on
|
||||
that project will result in a `404` status code.
|
||||
that project results in a `404` status code.
|
||||
|
||||
If a user is able to access the project but does not have permission to
|
||||
[use the Project Security Dashboard](../user/permissions.md#project-members-permissions),
|
||||
any request for vulnerability findings of this project will result in a `403` status code.
|
||||
any request for vulnerability findings of this project results in a `403` status code.
|
||||
|
||||
CAUTION: **Caution:**
|
||||
This API is in an alpha stage and considered unstable.
|
||||
|
|
|
|||
|
|
@ -152,9 +152,9 @@ Docker-in-Docker service and
|
|||
[GitLab.com shared runners](../../user/gitlab_com/index.md#shared-runners)
|
||||
support this.
|
||||
|
||||
GitLab Runner 11.11 or later is required, but it is not supported if GitLab
|
||||
Runner is installed using the [Helm chart](https://docs.gitlab.com/runner/install/kubernetes.html).
|
||||
See the [related issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/83) for details.
|
||||
##### Docker
|
||||
|
||||
> Introduced in GitLab Runner 11.11.
|
||||
|
||||
1. Install [GitLab Runner](https://docs.gitlab.com/runner/install/).
|
||||
1. Register GitLab Runner from the command line to use `docker` and `privileged`
|
||||
|
|
@ -217,6 +217,62 @@ See the [related issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/iss
|
|||
# The 'docker' hostname is the alias of the service container as described at
|
||||
# https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services.
|
||||
#
|
||||
# Specify to Docker where to create the certificates, Docker will
|
||||
# create them automatically on boot, and will create
|
||||
# `/certs/client` that will be shared between the service and job
|
||||
# container, thanks to volume mount from config.toml
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
||||
before_script:
|
||||
- docker info
|
||||
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- docker build -t my-docker-image .
|
||||
- docker run my-docker-image /script/to/run/tests
|
||||
```
|
||||
|
||||
##### Kubernetes
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/106) in GitLab Runner Helm Chart 0.23.0.
|
||||
|
||||
1. Using the
|
||||
[Helm chart](https://docs.gitlab.com/runner/install/kubernetes.html), update the
|
||||
[`values.yml` file](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/blob/00c1a2098f303dffb910714752e9a981e119f5b5/values.yaml#L133-137)
|
||||
to specify a volume mount.
|
||||
|
||||
```yaml
|
||||
runners:
|
||||
config: |
|
||||
[[runners]]
|
||||
[runners.kubernetes]
|
||||
image = "ubuntu:20.04"
|
||||
privileged = true
|
||||
[[runners.kubernetes.volumes.empty_dir]]
|
||||
name = "docker-certs"
|
||||
mount_path = "/certs/client"
|
||||
medium = "Memory"
|
||||
```
|
||||
|
||||
1. You can now use `docker` in the build script (note the inclusion of the
|
||||
`docker:19.03.13-dind` service):
|
||||
|
||||
```yaml
|
||||
image: docker:19.03.13
|
||||
|
||||
variables:
|
||||
# When using dind service, we need to instruct docker to talk with
|
||||
# the daemon started inside of the service. The daemon is available
|
||||
# with a network connection instead of the default
|
||||
# /var/run/docker.sock socket.
|
||||
DOCKER_HOST: tcp://docker:2376
|
||||
#
|
||||
# The 'docker' hostname is the alias of the service container as described at
|
||||
# https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services.
|
||||
# If you're using GitLab Runner 12.7 or earlier with the Kubernetes executor and Kubernetes 1.6 or earlier,
|
||||
# the variable must be set to tcp://localhost:2376 because of how the
|
||||
# Kubernetes executor connects services to the job container
|
||||
|
|
@ -227,9 +283,14 @@ See the [related issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/iss
|
|||
# `/certs/client` that will be shared between the service and job
|
||||
# container, thanks to volume mount from config.toml
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
# These are usually specified by the entrypoint, however the
|
||||
# Kubernetes executor doesn't run entrypoints
|
||||
# https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4125
|
||||
DOCKER_TLS_VERIFY: 1
|
||||
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
- docker:19.03.13-dind
|
||||
|
||||
before_script:
|
||||
- docker info
|
||||
|
|
|
|||
|
|
@ -1,80 +1,101 @@
|
|||
---
|
||||
stage: none
|
||||
group: unassigned
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
# Cached queries guidelines
|
||||
|
||||
Rails provides an [SQL query cache](https://guides.rubyonrails.org/caching_with_rails.html#sql-caching),
|
||||
used to cache the results of database queries for the duration of the request.
|
||||
Rails provides an [SQL query cache](https://guides.rubyonrails.org/caching_with_rails.html#sql-caching)
|
||||
which is used to cache the results of database queries for the duration of a request.
|
||||
When Rails encounters the same query again within the same request, it uses the cached
|
||||
result set instead of running the query against the database again.
|
||||
|
||||
If Rails encounters the same query again for that request,
|
||||
it will use the cached result set as opposed to running the query against the database again.
|
||||
|
||||
The query results are only cached for the duration of that single request, it does not persist across multiple requests.
|
||||
The query results are only cached for the duration of that single request, and
|
||||
don't persist across multiple requests.
|
||||
|
||||
## Why cached queries are considered bad
|
||||
|
||||
The cached queries help with reducing DB load, but they still:
|
||||
Cached queries help by reducing the load on the database, but they still:
|
||||
|
||||
- Consume memory.
|
||||
- Require as to re-instantiate each `ActiveRecord` object.
|
||||
- Require as to re-instantiate each relation of the object.
|
||||
- Make us spend additional CPU-cycles to look into a list of cached queries.
|
||||
|
||||
The Cached SQL queries are cheaper, but they are not cheap at all from `memory` perspective.
|
||||
They could mask [N+1 query problem](https://guides.rubyonrails.org/active_record_querying.html#eager-loading-associations),
|
||||
so we should threat them the same way we threat regular N+1 queries.
|
||||
- Require Rails to re-instantiate each `ActiveRecord` object.
|
||||
- Require Rails to re-instantiate each relation of the object.
|
||||
- Make us spend additional CPU cycles to look into a list of cached queries.
|
||||
|
||||
In case of N+1 queries, masked with cached queries, we are executing the same query N times.
|
||||
It will not hit the database N times, it will return the cached results instead.
|
||||
This is still expensive since we need to re-initialize objects each time, and this is CPU/Memory expensive.
|
||||
Instead, we should use the same in-memory objects, if possible.
|
||||
Although cached queries are cheaper from a database perspective, they are potentially
|
||||
more expensive from a memory perspective. They could mask
|
||||
[N+1 query problems](https://guides.rubyonrails.org/active_record_querying.html#eager-loading-associations),
|
||||
so you should treat them the same way you treat regular N+1 queries.
|
||||
|
||||
When we introduce a new feature, we should avoid N+1 problems,
|
||||
minimize the [query count](merge_request_performance_guidelines.md#query-counts), and pay special attention that [cached
|
||||
queries](merge_request_performance_guidelines.md#cached-queries) are not masking N+1 problems.
|
||||
In cases of N+1 queries masked by cached queries, the same query is executed N times.
|
||||
It will not hit the database N times but instead returns the cached results N times.
|
||||
This is still expensive because you need to re-initialize objects each time at a
|
||||
greater expense to the CPU and memory resources. Instead, you should use the same
|
||||
in-memory objects whenever possible.
|
||||
|
||||
## How to detect
|
||||
When you introduce a new feature, you should:
|
||||
|
||||
- Avoid N+1 queries.
|
||||
- Minimize the [query count](merge_request_performance_guidelines.md#query-counts).
|
||||
- Pay special attention to ensure
|
||||
[cached queries](merge_request_performance_guidelines.md#cached-queries) are not
|
||||
masking N+1 problems.
|
||||
|
||||
## How to detect cached queries
|
||||
|
||||
### Detect potential offenders by using Kibana
|
||||
|
||||
On GitLab.com, we are logging entries with the number of executed cached queries in the
|
||||
`pubsub-redis-inf-gprd*` index with the [`db_cached_count`](https://log.gprd.gitlab.net/goto/77d18d80ad84c5df1bf1da5c2cd35b82).
|
||||
We can filter endpoints that have a large number of executed cached queries. For example, if we encounter an endpoint
|
||||
that has 100+ `db_cached_count`, this could indicate that there is an N+1 problem masked with cached queries.
|
||||
We should probably investigate this endpoint further, to check if we are executing duplicated cached queries.
|
||||
GitLab.com, logs entries with the number of executed cached queries in the
|
||||
`pubsub-redis-inf-gprd*` index as
|
||||
[`db_cached_count`](https://log.gprd.gitlab.net/goto/77d18d80ad84c5df1bf1da5c2cd35b82).
|
||||
You can filter by endpoints that have a large number of executed cached queries. For
|
||||
example, an endpoint with a `db_cached_count` greater than 100 can indicate an N+1 problem which
|
||||
is masked by cached queries. You should investigate this endpoint further to determine
|
||||
if it is indeed executing duplicated cached queries.
|
||||
|
||||
For more cached queries Kibana visualizations see [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/259007).
|
||||
For more Kibana visualizations related to cached queries, read
|
||||
[issue #259007, 'Provide metrics that would help us to detect the potential N+1 CACHED SQL calls'](https://gitlab.com/gitlab-org/gitlab/-/issues/259007).
|
||||
|
||||
### Inspect suspicious endpoint using Performance Bar
|
||||
### Inspect suspicious endpoints using the Performance Bar
|
||||
|
||||
When building features, you could use the [performance bar](../administration/monitoring/performance/performance_bar.md)
|
||||
to list database queries, which will include cached queries as well. The performance bar will show a warning
|
||||
when threshold of total executed queries (including cached ones) has exceeded 100 queries.
|
||||
When building features, use the
|
||||
[performance bar](../administration/monitoring/performance/performance_bar.md)
|
||||
to view the list of database queries, including cached queries. The
|
||||
performance bar shows a warning when the number of total executed and cached queries is
|
||||
greater than 100.
|
||||
|
||||
To learn more about the statistics available to you, read the
|
||||
[Performance Bar documentation](../administration/monitoring/performance/performance_bar.md).
|
||||
|
||||
## What to look for
|
||||
|
||||
Using [Kibana](cached_queries.md#detect-potential-offenders-by-using-kibana), you can look for a large number
|
||||
of executed cached queries. End-points with large number of `db_cached_count` could indicate that there
|
||||
are probably a lot of duplicated cached queries, which often indicates a masked N+1 problem.
|
||||
Using [Kibana](#detect-potential-offenders-by-using-kibana), you can look for a large number
|
||||
of executed cached queries. Endpoints with a large `db_cached_count` could suggest a large number
|
||||
of duplicated cached queries, which often indicates a masked N+1 problem.
|
||||
|
||||
When you investigate specific endpoint, you could use
|
||||
the [performance bar](cached_queries.md#inspect-suspicious-endpoint-using-performance-bar).
|
||||
If you see a lot of similar queries, this often indicates an N+1 query issue (or a similar kind of query batching problem).
|
||||
If you see same cached query executed multiple times, this often indicates a masked N+1 query problem.
|
||||
When you investigate a specific endpoint, use
|
||||
the [performance bar](#inspect-suspicious-endpoints-using-the-performance-bar)
|
||||
to identify similar and cached queries, which may also indicate an N+1 query issue
|
||||
(or a similar kind of query batching problem).
|
||||
|
||||
For example, let's say you wanted to debug `GroupMembers` page.
|
||||
### An example
|
||||
|
||||
In the left corner of the performance bar you could see **Database queries** showing the total number of database queries
|
||||
For example, let's debug the "Group Members" page. In the left corner of the
|
||||
performance bar, **Database queries** shows the total number of database queries
|
||||
and the number of executed cached queries:
|
||||
|
||||

|
||||
|
||||
We can see that there are 55 cached queries. By clicking on the number, a modal window with more details is shown.
|
||||
Cached queries are marked with the `cached` label, so they are easy to spot. We can see that there are multiple duplicated
|
||||
cached queries:
|
||||
The page included 55 cached queries. Clicking the number displays a modal window
|
||||
with more details about queries. Cached queries are marked with the `cached` label
|
||||
below the query. You can see multiple duplicate cached queries in this modal window:
|
||||
|
||||

|
||||
|
||||
If we click on `...` for one of them, it will expand the actual stack trace:
|
||||
Click **...** to expand the actual stack trace:
|
||||
|
||||
```shell
|
||||
```ruby
|
||||
[
|
||||
"app/models/group.rb:305:in `has_owner?'",
|
||||
"ee/app/views/shared/members/ee/_license_badge.html.haml:1",
|
||||
|
|
@ -99,24 +120,30 @@ If we click on `...` for one of them, it will expand the actual stack trace:
|
|||
]
|
||||
```
|
||||
|
||||
The stack trace, shows us that we obviously have an N+1 problem, since we are repeatably executing for each group member:
|
||||
The stack trace shows an N+1 problem, because the code repeatedly executes
|
||||
`group.has_owner?(current_user)` for each group member. To solve this issue,
|
||||
move the repeated line of code outside of the loop, passing the result to each rendered member instead:
|
||||
|
||||
```ruby
|
||||
group.has_owner?(current_user)
|
||||
```erb
|
||||
- current_user_is_group_owner = @group && @group.has_owner?(current_user)
|
||||
|
||||
= render partial: 'shared/members/member',
|
||||
collection: @members, as: :member,
|
||||
locals: { membership_source: @group,
|
||||
group: @group,
|
||||
current_user_is_group_owner: current_user_is_group_owner }
|
||||
```
|
||||
|
||||
This is easily solvable by extracting this check, above the loop.
|
||||
|
||||
After [the fix](https://gitlab.com/gitlab-org/gitlab/-/issues/231468), we now have:
|
||||
After [fixing the cached query](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/44626/diffs#27c2761d66e496495be07d0925697f7e62b5bd14), the performance bar now shows only
|
||||
6 cached queries:
|
||||
|
||||

|
||||
|
||||
## How to measure the impact of the change
|
||||
|
||||
We can use the [memory profiler](performance.md#using-memory-profiler) to profile our code.
|
||||
For the previous example, we could wrap the profiler around the `Groups::GroupMembersController#index` action.
|
||||
|
||||
We had:
|
||||
Use the [memory profiler](performance.md#using-memory-profiler) to profile your code.
|
||||
For [this example](#an-example), wrap the profiler around the `Groups::GroupMembersController#index` action. Before the fix, the application had
|
||||
the following statistics:
|
||||
|
||||
- Total allocated: 7133601 bytes (84858 objects)
|
||||
- Total retained: 757595 bytes (6070 objects)
|
||||
|
|
@ -124,7 +151,8 @@ We had:
|
|||
- `db_cached_count`: 55
|
||||
- `db_duration`: 303ms
|
||||
|
||||
After the fix, we can see that we have reduced the allocated memory as well as the number of cached queries and improved execution time:
|
||||
The fix reduced the allocated memory, and the number of cached queries. These
|
||||
factors help improve the overall execution time:
|
||||
|
||||
- Total allocated: 5313899 bytes (65290 objects), 1810KB (25%) less
|
||||
- Total retained: 685593 bytes (5278 objects), 72KB (9%) less
|
||||
|
|
@ -132,7 +160,7 @@ After the fix, we can see that we have reduced the allocated memory as well as t
|
|||
- `db_cached_count`: 6 (89% less)
|
||||
- `db_duration`: 162ms (87% faster)
|
||||
|
||||
## See also
|
||||
## For more information
|
||||
|
||||
- [Metrics that would help us detect the potential N+1 Cached SQL calls](https://gitlab.com/gitlab-org/gitlab/-/issues/259007)
|
||||
- [Merge Request performance guidelines for cached queries](merge_request_performance_guidelines.md#cached-queries)
|
||||
|
|
|
|||
|
|
@ -23,6 +23,12 @@ All newly-introduced feature flags should be [disabled by default](process.md#fe
|
|||
NOTE: **Note:**
|
||||
This document is the subject of continued work as part of an epic to [improve internal usage of Feature Flags](https://gitlab.com/groups/gitlab-org/-/epics/3551). Raise any suggestions as new issues and attach them to the epic.
|
||||
|
||||
## Risk of a broken master (main) branch
|
||||
|
||||
Feature flags **must** be used in the MR that introduces them. Not doing so causes a
|
||||
[broken master](https://about.gitlab.com/handbook/engineering/workflow/#broken-master) scenario due
|
||||
to the `rspec:feature-flags` job that only runs on the `master` branch.
|
||||
|
||||
## Types of feature flags
|
||||
|
||||
Choose a feature flag type that matches the expected usage.
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
# Secure Partner Integration - Onboarding Process
|
||||
|
||||
If you want to integrate your product with the [Secure Stage](https://about.gitlab.com/direction/secure/),
|
||||
this page will help you understand the developer workflow GitLab intends for
|
||||
this page describes the developer workflow GitLab intends for
|
||||
our users to follow with regards to security results. These should be used as
|
||||
guidelines so you can build an integration that fits with the workflow GitLab
|
||||
users are already familiar with.
|
||||
|
|
@ -29,7 +29,7 @@ tiers so that we can provide the most value to our mutual customers.
|
|||
## What is the GitLab Developer Workflow?
|
||||
|
||||
This workflow is how GitLab users interact with our product and expect it to
|
||||
function. Understanding how users use GitLab today will help you choose the
|
||||
function. Understanding how users use GitLab today helps you choose the
|
||||
best place to integrate your own product and its results into GitLab.
|
||||
|
||||
- Developers want to write code without using a new tool to consume results
|
||||
|
|
@ -101,7 +101,7 @@ and complete an integration with the Secure stage.
|
|||
- Users can interact with the findings from your artifact within their workflow. They can dismiss the findings or accept them and create a backlog issue.
|
||||
- To automatically create issues without user interaction, use the [issue API](../../api/issues.md). This will be replaced by [Standalone Vulnerabilities](https://gitlab.com/groups/gitlab-org/-/epics/634) in the future.
|
||||
1. Optional: Provide auto-remediation steps:
|
||||
- If you specified `remediations` in your artifact, it is proposed through our [auto-remediation](../../user/application_security/index.md#automatic-remediation-for-vulnerabilities)
|
||||
- If you specified `remediations` in your artifact, it is proposed through our [automatic remediation](../../user/application_security/index.md#automatic-remediation-for-vulnerabilities)
|
||||
interface.
|
||||
1. Demo the integration to GitLab:
|
||||
- After you have tested and are ready to demo your integration please
|
||||
|
|
@ -112,7 +112,7 @@ and complete an integration with the Secure stage.
|
|||
to support your go-to-market as appropriate.
|
||||
- Examples of supported marketing could include being listed on our [Security Partner page](https://about.gitlab.com/partners/#security),
|
||||
doing an [Unfiltered blog post](https://about.gitlab.com/handbook/marketing/blog/unfiltered/),
|
||||
doing a co-branded webinar, or producing a co-branded whitepaper.
|
||||
doing a co-branded webinar, or producing a co-branded white paper.
|
||||
|
||||
We have a [video playlist](https://www.youtube.com/playlist?list=PL05JrBw4t0KpMqYxJiOLz-uBIr5w-yP4A)
|
||||
that may be helpful as part of this process. This covers various topics related to integrating your
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ Rails provides an [SQL Query Cache](cached_queries.md#cached-queries-guidelines)
|
|||
used to cache the results of database queries for the duration of the request.
|
||||
|
||||
See [why cached queries are considered bad](cached_queries.md#why-cached-queries-are-considered-bad) and
|
||||
[how to detect them](cached_queries.md#how-to-detect).
|
||||
[how to detect them](cached_queries.md#how-to-detect-cached-queries).
|
||||
|
||||
The code introduced by a merge request, should not execute multiple duplicated cached queries.
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ Integration with services such as Campfire, Flowdock, HipChat, Pivotal Tracker,
|
|||
|
||||
### SSL certificate errors
|
||||
|
||||
When trying to integrate GitLab with services that are using self-signed certificates, it is very likely that SSL certificate errors will occur in different parts of the application, most likely Sidekiq.
|
||||
When trying to integrate GitLab with services that are using self-signed certificates, it is very likely that SSL certificate errors occur in different parts of the application, most likely Sidekiq.
|
||||
|
||||
There are two approaches you can take to solve this:
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Facebook OAuth2 OmniAuth Provider
|
||||
|
||||
To enable the Facebook OmniAuth provider you must register your application with Facebook. Facebook will generate an app ID and secret key for you to use.
|
||||
To enable the Facebook OmniAuth provider you must register your application with Facebook. Facebook generates an app ID and secret key for you to use.
|
||||
|
||||
1. Sign in to the [Facebook Developer Platform](https://developers.facebook.com/).
|
||||
|
||||
|
|
@ -101,4 +101,4 @@ To enable the Facebook OmniAuth provider you must register your application with
|
|||
1. [Reconfigure](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../administration/restart_gitlab.md#installations-from-source) for the changes to take effect if you
|
||||
installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page there should now be a Facebook icon below the regular sign in form. Click the icon to begin the authentication process. Facebook will ask the user to sign in and authorize the GitLab application. If everything goes well the user will be returned to GitLab and will be signed in.
|
||||
On the sign in page there should now be a Facebook icon below the regular sign in form. Click the icon to begin the authentication process. Facebook asks the user to sign in and authorize the GitLab application. If everything goes well the user is returned to GitLab and signed in.
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@ with your GitHub account.
|
|||
|
||||
## Enabling GitHub OAuth
|
||||
|
||||
To enable the GitHub OmniAuth provider, you'll need an OAuth 2 Client ID and Client Secret from GitHub. To get these credentials, sign into GitHub and follow their procedure for [Creating an OAuth App](https://developer.github.com/apps/building-oauth-apps/creating-an-oauth-app/).
|
||||
To enable the GitHub OmniAuth provider, you need an OAuth 2 Client ID and Client Secret from GitHub. To get these credentials, sign into GitHub and follow their procedure for [Creating an OAuth App](https://developer.github.com/apps/building-oauth-apps/creating-an-oauth-app/).
|
||||
|
||||
When you create an OAuth 2 app in GitHub, you'll need the following information:
|
||||
When you create an OAuth 2 app in GitHub, you need the following information:
|
||||
|
||||
- The URL of your GitLab instance, such as `https://gitlab.example.com`.
|
||||
- The authorization callback URL; in this case, `https://gitlab.example.com/users/auth`. Include the port number if your GitLab instance uses a non-default port.
|
||||
|
|
@ -24,7 +24,7 @@ To prevent an [OAuth2 covert redirect](https://oauth.net/advisories/2014-1-cover
|
|||
|
||||
See [Initial OmniAuth Configuration](omniauth.md#initial-omniauth-configuration) for initial settings.
|
||||
|
||||
Once you have configured the GitHub provider, you'll need the following information, which you'll need to substitute in the GitLab configuration file, in the steps shown next.
|
||||
After you have configured the GitHub provider, you need the following information, which you must substitute in the GitLab configuration file, in the steps shown next.
|
||||
|
||||
| Setting from GitHub | Substitute in the GitLab configuration file | Description |
|
||||
|:---------------------|:---------------------------------------------|:------------|
|
||||
|
|
@ -101,12 +101,12 @@ Follow these steps to incorporate the GitHub OAuth 2 app in your GitLab server:
|
|||
|
||||
1. Refresh the GitLab sign in page. You should now see a GitHub icon below the regular sign in form.
|
||||
|
||||
1. Click the icon to begin the authentication process. GitHub will ask the user to sign in and authorize the GitLab application.
|
||||
1. Click the icon to begin the authentication process. GitHub asks the user to sign in and authorize the GitLab application.
|
||||
|
||||
## GitHub Enterprise with self-signed Certificate
|
||||
|
||||
If you are attempting to import projects from GitHub Enterprise with a self-signed
|
||||
certificate and the imports are failing, you will need to disable SSL verification.
|
||||
certificate and the imports are failing, you must disable SSL verification.
|
||||
It should be disabled by adding `verify_ssl` to `false` in the provider configuration
|
||||
and changing the global Git `sslVerify` option to `false` in the GitLab server.
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ gitlab_rails['omniauth_providers'] = [
|
|||
]
|
||||
```
|
||||
|
||||
You will also need to disable Git SSL verification on the server hosting GitLab.
|
||||
You must also disable Git SSL verification on the server hosting GitLab.
|
||||
|
||||
```ruby
|
||||
omnibus_gitconfig['system'] = { "http" => ["sslVerify = false"] }
|
||||
|
|
@ -142,7 +142,7 @@ For installation from source:
|
|||
args: { scope: 'user:email' } }
|
||||
```
|
||||
|
||||
You will also need to disable Git SSL verification on the server hosting GitLab.
|
||||
You must also disable Git SSL verification on the server hosting GitLab.
|
||||
|
||||
```shell
|
||||
git config --global http.sslVerify false
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
Import projects from GitLab.com and login to your GitLab instance with your GitLab.com account.
|
||||
|
||||
To enable the GitLab.com OmniAuth provider you must register your application with GitLab.com.
|
||||
GitLab.com will generate an application ID and secret key for you to use.
|
||||
GitLab.com generates an application ID and secret key for you to use.
|
||||
|
||||
1. Sign in to GitLab.com
|
||||
|
||||
|
|
@ -85,5 +85,5 @@ GitLab.com will generate an application ID and secret key for you to use.
|
|||
installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page there should now be a GitLab.com icon below the regular sign in form.
|
||||
Click the icon to begin the authentication process. GitLab.com will ask the user to sign in and authorize the GitLab application.
|
||||
If everything goes well the user will be returned to your GitLab instance and will be signed in.
|
||||
Click the icon to begin the authentication process. GitLab.com asks the user to sign in and authorize the GitLab application.
|
||||
If everything goes well the user is returned to your GitLab instance and is signed in.
|
||||
|
|
|
|||
|
|
@ -8,15 +8,14 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
GitLab supports [Google actions in email](https://developers.google.com/gmail/markup/actions/actions-overview).
|
||||
|
||||
If correctly set up, emails that require an action will be marked in Gmail.
|
||||
If correctly set up, emails that require an action are marked in Gmail.
|
||||
|
||||

|
||||
|
||||
To get this functioning, you need to be registered with Google. For instructions, see
|
||||
[Register with Google](https://developers.google.com/gmail/markup/registering-with-google).
|
||||
|
||||
*This process has a lot of steps so make sure that you fulfill all requirements set by Google.*
|
||||
*Your application will be rejected by Google if you fail to do so.*
|
||||
*This process has a lot of steps so make sure that you fulfill all requirements set by Google to avoid your application being rejected by Google.*
|
||||
|
||||
In particular, note:
|
||||
|
||||
|
|
@ -25,6 +24,6 @@ In particular, note:
|
|||
(order of hundred emails a day minimum to Gmail) for a few weeks at least".
|
||||
- Have a very low rate of spam complaints from users.
|
||||
- Emails must be authenticated via DKIM or SPF.
|
||||
- Before sending the final form ("Gmail Schema Whitelist Request"), you must send a real email from your production server. This means that you will have to find a way to send this email from the email address you are registering. You can do this by, for example, forwarding the real email from the email address you are registering or going into the rails console on the GitLab server and triggering the email sending from there.
|
||||
- Before sending the final form ("Gmail Schema Whitelist Request"), you must send a real email from your production server. This means that you must find a way to send this email from the email address you are registering. You can do this by, for example, forwarding the real email from the email address you are registering or going into the rails console on the GitLab server and triggering the email sending from there.
|
||||
|
||||
You can check how it looks going through all the steps laid out in the "Registering with Google" doc in [this GitLab.com issue](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/1517).
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
# Google OAuth2 OmniAuth Provider
|
||||
|
||||
To enable the Google OAuth2 OmniAuth provider you must register your application
|
||||
with Google. Google will generate a client ID and secret key for you to use.
|
||||
with Google. Google generates a client ID and secret key for you to use.
|
||||
|
||||
## Enabling Google OAuth
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ In Google's side:
|
|||
```
|
||||
|
||||
1. You should now be able to see a Client ID and Client secret. Note them down
|
||||
or keep this page open as you will need them later.
|
||||
or keep this page open as you need them later.
|
||||
1. To enable projects to access [Google Kubernetes Engine](../user/project/clusters/index.md), you must also
|
||||
enable these APIs:
|
||||
- Google Kubernetes Engine API
|
||||
|
|
@ -98,7 +98,7 @@ On your GitLab server:
|
|||
|
||||
1. Change `YOUR_APP_ID` to the client ID from the Google Developer page
|
||||
1. Similarly, change `YOUR_APP_SECRET` to the client secret
|
||||
1. Make sure that you configure GitLab to use an FQDN as Google will not accept
|
||||
1. Make sure that you configure GitLab to use a fully-qualified domain name, as Google doesn't accept
|
||||
raw IP addresses.
|
||||
|
||||
For Omnibus packages:
|
||||
|
|
@ -119,6 +119,6 @@ On your GitLab server:
|
|||
installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page there should now be a Google icon below the regular sign in
|
||||
form. Click the icon to begin the authentication process. Google will ask the
|
||||
form. Click the icon to begin the authentication process. Google asks the
|
||||
user to sign in and authorize the GitLab application. If everything goes well
|
||||
the user will be returned to GitLab and will be signed in.
|
||||
the user is returned to GitLab and is signed in.
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ In GitLab, perform the following steps.
|
|||
|
||||
Jenkins needs read access to the GitLab repository. We already specified a
|
||||
private key to use in Jenkins, now we need to add a public one to the GitLab
|
||||
project. For that case we will need a Deploy key. Read the documentation on
|
||||
project. For that case we need a Deploy key. Read the documentation on
|
||||
[how to set up a Deploy key](../ssh/README.md#deploy-keys).
|
||||
|
||||
### Jenkins service
|
||||
|
|
@ -50,14 +50,13 @@ Now navigate to GitLab services page and activate Jenkins
|
|||
|
||||

|
||||
|
||||
Done! Now when you push to GitLab - it will create a build for Jenkins.
|
||||
And also you will be able to see merge request build status with a link to the Jenkins build.
|
||||
Done! Now when you push to GitLab - it creates a build for Jenkins, and you can view the merge request build status with a link to the Jenkins build.
|
||||
|
||||
### Multi-project Configuration
|
||||
|
||||
The GitLab Hook plugin in Jenkins supports the automatic creation of a project
|
||||
for each feature branch. After configuration GitLab will trigger feature branch
|
||||
builds and a corresponding project will be created in Jenkins.
|
||||
for each feature branch. After configuration GitLab triggers feature branch
|
||||
builds and a corresponding project is created in Jenkins.
|
||||
|
||||
Configure the GitLab Hook plugin in Jenkins. Go to 'Manage Jenkins' and then
|
||||
'Configure System'. Find the 'GitLab Web Hook' section and configure as shown below.
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc
|
|||
|
||||
## Limitations of this Strategy
|
||||
|
||||
- It can only be used for Single Sign on, and will not provide any other access granted by any OAuth provider
|
||||
- It can only be used for Single Sign on, and doesn't provide any other access granted by any OAuth provider
|
||||
(importing projects or users, etc)
|
||||
- It only supports the Authorization Grant flow (most common for client-server applications, like GitLab)
|
||||
- It is not able to fetch user information from more than one URL
|
||||
|
|
@ -37,7 +37,7 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc
|
|||
```
|
||||
|
||||
1. You should now be able to get a Client ID and Client Secret.
|
||||
Where this shows up will differ for each provider.
|
||||
Where this shows up differs for each provider.
|
||||
This may also be called Application ID and Secret
|
||||
|
||||
1. On your GitLab server, open the configuration file.
|
||||
|
|
@ -64,6 +64,6 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc
|
|||
1. Restart GitLab for the changes to take effect
|
||||
|
||||
On the sign in page there should now be a new button below the regular sign in form.
|
||||
Click the button to begin your provider's authentication process. This will direct
|
||||
Click the button to begin your provider's authentication process. This directs
|
||||
the browser to your OAuth2 Provider's authentication page. If everything goes well
|
||||
the user will be returned to your GitLab instance and will be signed in.
|
||||
the user is returned to your GitLab instance and is signed in.
|
||||
|
|
|
|||
|
|
@ -48,12 +48,12 @@ In order to add a new application via your profile, navigate to
|
|||

|
||||
|
||||
In the application form, enter a **Name** (arbitrary), and make sure to set up
|
||||
correctly the **Redirect URI** which is the URL where users will be sent after
|
||||
correctly the **Redirect URI** which is the URL where users are sent after
|
||||
they authorize with GitLab.
|
||||
|
||||

|
||||
|
||||
When you hit **Submit** you will be provided with the application ID and
|
||||
When you click **Submit** you are provided with the application ID and
|
||||
the application secret which you can then use with your application that
|
||||
connects to GitLab.
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ the user authorization step is automatically skipped for this application.
|
|||
|
||||
## Authorized applications
|
||||
|
||||
Every application you authorized to use your GitLab credentials will be shown
|
||||
Every application you authorized to use your GitLab credentials is shown
|
||||
in the **Authorized applications** section under **Profile Settings > Applications**.
|
||||
|
||||

|
||||
|
|
|
|||
|
|
@ -51,23 +51,23 @@ that are in common for all providers that we need to consider.
|
|||
|
||||
NOTE: **Note:**
|
||||
Starting from GitLab 11.4, OmniAuth is enabled by default. If you're using an
|
||||
earlier version, you'll need to explicitly enable it.
|
||||
earlier version, you must explicitly enable it.
|
||||
|
||||
- `allow_single_sign_on` allows you to specify the providers you want to allow to
|
||||
automatically create an account. It defaults to `false`. If `false` users must
|
||||
be created manually or they will not be able to sign in via OmniAuth.
|
||||
be created manually or they can't sign in via OmniAuth.
|
||||
- `auto_link_ldap_user` can be used if you have [LDAP / ActiveDirectory](../administration/auth/ldap/index.md)
|
||||
integration enabled. It defaults to `false`. When enabled, users automatically
|
||||
created through an OmniAuth provider will have their LDAP identity created in GitLab as well.
|
||||
created through an OmniAuth provider have their LDAP identity created in GitLab as well.
|
||||
- `block_auto_created_users` defaults to `true`. If `true` auto created users will
|
||||
be blocked by default and will have to be unblocked by an administrator before
|
||||
be blocked by default and must be unblocked by an administrator before
|
||||
they are able to sign in.
|
||||
|
||||
NOTE: **Note:**
|
||||
If you set `block_auto_created_users` to `false`, make sure to only
|
||||
define providers under `allow_single_sign_on` that you are able to control, like
|
||||
SAML, Shibboleth, Crowd or Google, or set it to `false` otherwise any user on
|
||||
the Internet will be able to successfully sign in to your GitLab without
|
||||
the Internet can successfully sign in to your GitLab without
|
||||
administrative approval.
|
||||
|
||||
NOTE: **Note:**
|
||||
|
|
@ -141,8 +141,8 @@ OmniAuth provider for an existing user.
|
|||
1. Go to profile settings (the silhouette icon in the top right corner).
|
||||
1. Select the "Account" tab.
|
||||
1. Under "Connected Accounts" select the desired OmniAuth provider, such as Twitter.
|
||||
1. The user will be redirected to the provider. Once the user authorized GitLab
|
||||
they will be redirected back to GitLab.
|
||||
1. The user is redirected to the provider. After the user authorizes GitLab,
|
||||
they are redirected back to GitLab.
|
||||
|
||||
The chosen OmniAuth provider is now active and can be used to sign in to GitLab from then on.
|
||||
|
||||
|
|
@ -171,8 +171,8 @@ omniauth:
|
|||
> Introduced in GitLab 8.7.
|
||||
|
||||
You can define which OmniAuth providers you want to be `external` so that all users
|
||||
**creating accounts, or logging in via these providers** will not be able to have
|
||||
access to internal projects. You will need to use the full name of the provider,
|
||||
**creating accounts, or logging in via these providers** can't have
|
||||
access to internal projects. You must use the full name of the provider,
|
||||
like `google_oauth2` for Google. Refer to the examples for the full names of the
|
||||
supported providers.
|
||||
|
||||
|
|
@ -206,7 +206,7 @@ these cases you can use the OmniAuth provider.
|
|||
|
||||
### Steps
|
||||
|
||||
These steps are fairly general and you will need to figure out the exact details
|
||||
These steps are fairly general and you must figure out the exact details
|
||||
from the OmniAuth provider's documentation.
|
||||
|
||||
- Stop GitLab:
|
||||
|
|
@ -343,8 +343,8 @@ omniauth:
|
|||
auto_sign_in_with_provider: azure_oauth2
|
||||
```
|
||||
|
||||
Keep in mind that every sign-in attempt will be redirected to the OmniAuth
|
||||
provider; you won't be able to sign in using local credentials. Ensure at least
|
||||
Keep in mind that every sign-in attempt is redirected to the OmniAuth
|
||||
provider; you can't sign in using local credentials. Ensure at least
|
||||
one of the OmniAuth users has admin permissions.
|
||||
|
||||
You may also bypass the auto sign in feature by browsing to
|
||||
|
|
|
|||
|
|
@ -82,8 +82,8 @@ To get the credentials (a pair of Client ID and Client Secret), you must [create
|
|||
1. [Reconfigure GitLab]( ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure ) or [restart GitLab]( ../administration/restart_gitlab.md#installations-from-source ) for the changes to take effect if you installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page, there should now be a Salesforce icon below the regular sign in form.
|
||||
Click the icon to begin the authentication process. Salesforce will ask the user to sign in and authorize the GitLab application.
|
||||
If everything goes well, the user will be returned to GitLab and will be signed in.
|
||||
Click the icon to begin the authentication process. Salesforce asks the user to sign in and authorize the GitLab application.
|
||||
If everything goes well, the user is returned to GitLab and is signed in.
|
||||
|
||||
NOTE: **Note:**
|
||||
GitLab requires the email address of each new user. Once the user is logged in using Salesforce, GitLab will redirect the user to the profile page where they will have to provide the email and verify the email.
|
||||
GitLab requires the email address of each new user. Once the user is logged in using Salesforce, GitLab redirects the user to the profile page where they must provide the email and verify the email.
|
||||
|
|
|
|||
|
|
@ -54,10 +54,10 @@ The following changes are needed to enable Shibboleth:
|
|||
|
||||
NOTE: **Note:**
|
||||
Starting from GitLab 11.4, OmniAuth is enabled by default. If you're using an
|
||||
earlier version, you'll need to explicitly enable it in `/etc/gitlab/gitlab.rb`.
|
||||
earlier version, you must explicitly enable it in `/etc/gitlab/gitlab.rb`.
|
||||
|
||||
1. In addition, add Shibboleth to `/etc/gitlab/gitlab.rb` as an OmniAuth provider.
|
||||
User attributes will be sent from the
|
||||
User attributes are sent from the
|
||||
Apache reverse proxy to GitLab as headers with the names from the Shibboleth
|
||||
attribute mapping. Therefore the values of the `args` hash
|
||||
should be in the form of `"HTTP_ATTRIBUTE"`. The keys in the hash are arguments
|
||||
|
|
@ -100,12 +100,12 @@ The following changes are needed to enable Shibboleth:
|
|||
1. [Reconfigure](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart](../administration/restart_gitlab.md#installations-from-source) GitLab for the changes to take effect if you
|
||||
installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page, there should now be a "Sign in with: Shibboleth" icon below the regular sign in form. Click the icon to begin the authentication process. You will be redirected to IdP server (depends on your Shibboleth module configuration). If everything goes well the user will be returned to GitLab and will be signed in.
|
||||
On the sign in page, there should now be a "Sign in with: Shibboleth" icon below the regular sign in form. Click the icon to begin the authentication process. You are redirected to IdP server (depends on your Shibboleth module configuration). If everything goes well the user is returned to GitLab and is signed in.
|
||||
|
||||
## Apache 2.4 / GitLab 8.6 update
|
||||
|
||||
The order of the first 2 Location directives is important. If they are reversed,
|
||||
you will not get a Shibboleth session!
|
||||
requesting a Shibboleth session fails!
|
||||
|
||||
```plaintext
|
||||
<Location />
|
||||
|
|
|
|||
|
|
@ -37,11 +37,11 @@ It is possible to create new issue, display issue details and search up to 5 iss
|
|||
|
||||
## Deploy command
|
||||
|
||||
In order to deploy to an environment, GitLab will try to find a deployment
|
||||
In order to deploy to an environment, GitLab tries to find a deployment
|
||||
manual action in the pipeline.
|
||||
|
||||
If there is only one action for a given environment, it is going to be triggered.
|
||||
If there is more than one action defined, GitLab will try to find an action
|
||||
If there is only one action for a given environment, it is triggered.
|
||||
If there is more than one action defined, GitLab tries to find an action
|
||||
which name equals the environment name we want to deploy to.
|
||||
|
||||
Command will return an error when no matching action has been found.
|
||||
The command returns an error when no matching action has been found.
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ GitLab **merge requests** to Trello cards.
|
|||
|
||||
## Configuring the Power-Up
|
||||
|
||||
In order to get started, you will need to configure your Power-Up.
|
||||
In order to get started, you must configure your Power-Up.
|
||||
|
||||
In Trello:
|
||||
|
||||
|
|
@ -23,19 +23,19 @@ In Trello:
|
|||
1. Select the `Settings` (gear) icon
|
||||
1. In the popup menu, select `Authorize Account`
|
||||
|
||||
In this popup, fill in your `API URL` and `Personal Access Token`. After that, you will be able to attach any merge request to any Trello card on your selected Trello board.
|
||||
In this popup, fill in your `API URL` and `Personal Access Token`. After that, you can attach any merge request to any Trello card on your selected Trello board.
|
||||
|
||||
## What is my API URL?
|
||||
|
||||
Your API URL should be your GitLab instance URL with `/api/v4` appended in the end of the URL.
|
||||
For example, if your GitLab instance URL is `https://gitlab.com`, your API URL would be `https://gitlab.com/api/v4`.
|
||||
If your instance's URL is `https://example.com`, your API URL will be `https://example.com/api/v4`.
|
||||
If your instance's URL is `https://example.com`, your API URL is `https://example.com/api/v4`.
|
||||
|
||||

|
||||
|
||||
## What is my Personal Access Token?
|
||||
|
||||
Your GitLab's personal access token will enable your GitLab account to be accessed
|
||||
Your GitLab's personal access token enables your GitLab account to be accessed
|
||||
from Trello.
|
||||
|
||||
> Find it in GitLab by clicking on your avatar (upright corner), from which you access
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Twitter OAuth2 OmniAuth Provider
|
||||
|
||||
To enable the Twitter OmniAuth provider you must register your application with Twitter. Twitter will generate a client ID and secret key for you to use.
|
||||
To enable the Twitter OmniAuth provider you must register your application with Twitter. Twitter generates a client ID and secret key for you to use.
|
||||
|
||||
1. Sign in to [Twitter Application Management](https://developer.twitter.com/apps).
|
||||
|
||||
|
|
@ -85,4 +85,4 @@ To enable the Twitter OmniAuth provider you must register your application with
|
|||
1. [Reconfigure](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../administration/restart_gitlab.md#installations-from-source) for the changes to take effect if you
|
||||
installed GitLab via Omnibus or from source respectively.
|
||||
|
||||
On the sign in page there should now be a Twitter icon below the regular sign in form. Click the icon to begin the authentication process. Twitter will ask the user to sign in and authorize the GitLab application. If everything goes well the user will be returned to GitLab and will be signed in.
|
||||
On the sign in page there should now be a Twitter icon below the regular sign in form. Click the icon to begin the authentication process. Twitter asks the user to sign in and authorize the GitLab application. If everything goes well the user is returned to GitLab and signed in.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ to go for help. You can customize and display this information on the GitLab ser
|
|||
|
||||
## Adding a help message to the help page
|
||||
|
||||
You can add a help message, which will be shown on the GitLab `/help` page (e.g.,
|
||||
You can add a help message, which is shown on the GitLab `/help` page (e.g.,
|
||||
<https://gitlab.com/help>) in a new section at the top of the `/help` page:
|
||||
|
||||
1. Navigate to **Admin Area > Settings > Preferences**, then expand **Help page**.
|
||||
|
|
@ -27,7 +27,7 @@ You can add a help message, which will be shown on the GitLab `/help` page (e.g.
|
|||
|
||||
## Adding a help message to the login page **(STARTER)**
|
||||
|
||||
You can add a help message, which will be shown on the GitLab login page in a new section
|
||||
You can add a help message, which is shown on the GitLab login page in a new section
|
||||
titled `Need Help?`, located below the login page message:
|
||||
|
||||
1. Navigate to **Admin Area > Settings > Preferences**, then expand **Help page**.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ type: reference
|
|||
This setting allows you to rate limit the requests to raw endpoints, defaults to `300` requests per minute.
|
||||
It can be modified in **Admin Area > Settings > Network > Performance Optimization**.
|
||||
|
||||
For example, requests over `300` per minute to `https://gitlab.com/gitlab-org/gitlab-foss/raw/master/app/controllers/application_controller.rb` will be blocked. Access to the raw file will be released after 1 minute.
|
||||
For example, requests over `300` per minute to `https://gitlab.com/gitlab-org/gitlab-foss/raw/master/app/controllers/application_controller.rb` are blocked. Access to the raw file is released after 1 minute.
|
||||
|
||||

|
||||
|
||||
|
|
|
|||
|
|
@ -25,9 +25,9 @@ You can restrict the password authentication for web interface and Git over HTTP
|
|||
|
||||
## Two-factor authentication
|
||||
|
||||
When this feature enabled, all users will have to use the [two-factor authentication](../../profile/account/two_factor_authentication.md).
|
||||
When this feature enabled, all users must use the [two-factor authentication](../../profile/account/two_factor_authentication.md).
|
||||
|
||||
Once the two-factor authentication is configured as mandatory, the users will be allowed
|
||||
Once the two-factor authentication is configured as mandatory, the users are allowed
|
||||
to skip forced configuration of two-factor authentication for the configurable grace
|
||||
period in hours.
|
||||
|
||||
|
|
@ -44,13 +44,13 @@ see [Email notification for unknown sign-ins](../../profile/unknown_sign_in_noti
|
|||
|
||||
## Sign-in information
|
||||
|
||||
All users that are not logged-in will be redirected to the page represented by the configured
|
||||
"Home page URL" if value is not empty.
|
||||
All users that are not logged in are redirected to the page represented by the configured
|
||||
**Home page URL** if value is not empty.
|
||||
|
||||
All users will be redirect to the page represented by the configured "After sign out path"
|
||||
All users are redirected to the page represented by the configured **After sign out path**
|
||||
after sign out if value is not empty.
|
||||
|
||||
In the Sign-in restrictions section, scroll to the "Sign-in text" text box. You can add a
|
||||
In the **Sign-in restrictions** section, scroll to the **Sign-in text** field. You can add a
|
||||
custom message for your users in Markdown format.
|
||||
|
||||
For example, if you include the following information in the noted text box:
|
||||
|
|
@ -61,7 +61,7 @@ For example, if you include the following information in the noted text box:
|
|||
To access this text box, navigate to Admin Area > Settings > General, and expand the "Sign-in restrictions" section.
|
||||
```
|
||||
|
||||
Your users will see the "Custom sign-in text" when they navigate to the sign-in screen for your
|
||||
Your users see the **Custom sign-in text** when they navigate to the sign-in screen for your
|
||||
GitLab instance:
|
||||
|
||||

|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ type: reference
|
|||
|
||||
# Usage statistics **(CORE ONLY)**
|
||||
|
||||
GitLab Inc. will periodically collect information about your instance in order
|
||||
GitLab Inc. periodically collects information about your instance in order
|
||||
to perform various actions.
|
||||
|
||||
All statistics are opt-out. You can enable/disable them in the
|
||||
|
|
@ -22,7 +22,7 @@ If your GitLab instance is behind a proxy, set the appropriate [proxy configurat
|
|||
|
||||
## Version Check **(CORE ONLY)**
|
||||
|
||||
If enabled, version check will inform you if a new version is available and the
|
||||
If enabled, version check informs you if a new version is available and the
|
||||
importance of it through a status. This is shown on the help page (i.e. `/help`)
|
||||
for all signed in users, and on the admin pages. The statuses are:
|
||||
|
||||
|
|
@ -37,10 +37,10 @@ GitLab Inc. collects your instance's version and hostname (through the HTTP
|
|||
referer) as part of the version check. No other information is collected.
|
||||
|
||||
This information is used, among other things, to identify to which versions
|
||||
patches will need to be backported, making sure active GitLab instances remain
|
||||
patches must be backported, making sure active GitLab instances remain
|
||||
secure.
|
||||
|
||||
If you disable version check, this information will not be collected. Enable or
|
||||
If you disable version check, this information isn't collected. Enable or
|
||||
disable the version check in **Admin Area > Settings > Metrics and profiling > Usage statistics**.
|
||||
|
||||
### Request flow example
|
||||
|
|
@ -65,8 +65,8 @@ See [Usage Ping guide](../../../development/product_analytics/usage_ping.md).
|
|||
|
||||
## Instance-level statistics **(CORE ONLY)**
|
||||
|
||||
Once usage ping is enabled, GitLab will gather data from other instances and
|
||||
will be able to show [usage statistics](../analytics/index.md)
|
||||
After usage ping is enabled, GitLab gathers data from other instances and
|
||||
can show [usage statistics](../analytics/index.md)
|
||||
of your instance to your admins in **Admin Area > Analytics**.
|
||||
|
||||
<!-- ## Troubleshooting
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ users to not set that header and bypass the GitLab rate limiter.
|
|||
Note that the bypass only works if the header is set to `1`.
|
||||
|
||||
Requests that bypassed the rate limiter because of the bypass header
|
||||
will be marked with `"throttle_safelist":"throttle_bypass_header"` in
|
||||
are marked with `"throttle_safelist":"throttle_bypass_header"` in
|
||||
[`production_json.log`](../../../administration/logs.md#production_jsonlog).
|
||||
|
||||
To disable the bypass mechanism, make sure the environment variable
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
description: "Learn how long your open merge requests have spent in code review, and what distinguishes the longest-running." # Up to ~200 chars long. They will be displayed in Google Search snippets. It may help to write the page intro first, and then reuse it here.
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
description: "Merge Request Analytics help you understand the efficiency of your code review process, and the productivity of your team." # Up to ~200 chars long. They will be displayed in Google Search snippets. It may help to write the page intro first, and then reuse it here.
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -161,8 +161,9 @@ headers whose values you want masked. For details on how to mask headers, see
|
|||
|
||||
It's also possible to authenticate the user before performing the DAST checks.
|
||||
|
||||
**Important:** It is highly recommended that you configure the scanner to authenticate to the application,
|
||||
or it will not be able to check most of the application for security risks, as most
|
||||
NOTE: **Note:**
|
||||
We highly recommended that you configure the scanner to authenticate to the application,
|
||||
otherwise it cannot check most of the application for security risks, as most
|
||||
of your application is likely not accessible without authentication. It is also recommended
|
||||
that you periodically confirm the scanner's authentication is still working as this tends to break over
|
||||
time due to authentication changes to the application.
|
||||
|
|
@ -488,8 +489,8 @@ variables:
|
|||
|
||||
When using `DAST_PATHS` and `DAST_PATHS_FILE`, note the following:
|
||||
|
||||
- `DAST_WEBSITE` must be defined when using either `DAST_PATHS_FILE` or `DAST_PATHS`. The paths listed in either will use `DAST_WEBSITE` to build the URLs to scan
|
||||
- Spidering is disabed when `DAST_PATHS` or `DAST_PATHS_FILE` are defined
|
||||
- `DAST_WEBSITE` must be defined when using either `DAST_PATHS_FILE` or `DAST_PATHS`. The paths listed in either use `DAST_WEBSITE` to build the URLs to scan
|
||||
- Spidering is disabled when `DAST_PATHS` or `DAST_PATHS_FILE` are defined
|
||||
- `DAST_PATHS_FILE` and `DAST_PATHS` can not be used together
|
||||
- The `DAST_PATHS` environment variable has a limit of about 130kb. If you have a list or paths
|
||||
greater than this, use `DAST_PATHS_FILE`.
|
||||
|
|
@ -531,7 +532,7 @@ DAST can be [configured](#customizing-the-dast-settings) using environment varia
|
|||
| `SECURE_ANALYZERS_PREFIX` | URL | Set the Docker registry base address from which to download the analyzer. |
|
||||
| `DAST_WEBSITE` | URL | The URL of the website to scan. `DAST_API_SPECIFICATION` must be specified if this is omitted. |
|
||||
| `DAST_API_SPECIFICATION` | URL or string | The API specification to import. The specification can be hosted at a URL, or the name of a file present in the `/zap/wrk` directory. `DAST_WEBSITE` must be specified if this is omitted. |
|
||||
| `DAST_SPIDER_START_AT_HOST` | boolean | Set to `false` to prevent DAST from resetting the target to its host before scanning. When `true`, non-host targets `http://test.site/some_path` will be reset to `http://test.site` before scan. Default: `true`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/258805) in GitLab 13.6. |
|
||||
| `DAST_SPIDER_START_AT_HOST` | boolean | Set to `false` to prevent DAST from resetting the target to its host before scanning. When `true`, non-host targets `http://test.site/some_path` is reset to `http://test.site` before scan. Default: `true`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/258805) in GitLab 13.6. |
|
||||
| `DAST_AUTH_URL` | URL | The URL of the page containing the sign-in HTML form on the target website. `DAST_USERNAME` and `DAST_PASSWORD` are submitted with the login form to create an authenticated scan. Not supported for API scans. |
|
||||
| `DAST_USERNAME` | string | The username to authenticate to in the website. |
|
||||
| `DAST_PASSWORD` | string | The password to authenticate to in the website. |
|
||||
|
|
@ -825,8 +826,8 @@ sample reports can be found in the
|
|||
|
||||
There are two formats of data in the JSON report that are used side by side:
|
||||
|
||||
- The proprietary ZAP format that will be eventually deprecated.
|
||||
- A common format that will be the default in the future.
|
||||
- The proprietary ZAP format, which is planned to be deprecated.
|
||||
- A common format that is planned to the default in the future.
|
||||
|
||||
### Other formats
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ Dependency Paths are supported for the following package managers:
|
|||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10536) in GitLab Ultimate 12.3.
|
||||
|
||||
If the [License Compliance](../../compliance/license_compliance/index.md) CI job is configured,
|
||||
the [discovered licenses](../../compliance/license_compliance/index.md#supported-languages-and-package-managers) will be displayed on this page.
|
||||
the [discovered licenses](../../compliance/license_compliance/index.md#supported-languages-and-package-managers) are displayed on this page.
|
||||
|
||||
## Downloading the Dependency List
|
||||
|
||||
|
|
|
|||
|
|
@ -356,10 +356,10 @@ Here are the requirements for using dependency scanning in an offline environmen
|
|||
|
||||
- GitLab Runner with the [`docker` or `kubernetes` executor](#requirements).
|
||||
- Docker Container Registry with locally available copies of dependency scanning [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers) images.
|
||||
- If you have a limited access environment you will need to allow access, such as using a proxy, to the advisory database: `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git`.
|
||||
- If you have a limited access environment you need to allow access, such as using a proxy, to the advisory database: `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git`.
|
||||
If you are unable to permit access to `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git` you must host an offline copy of this `git` repository and set the `GEMNASIUM_DB_REMOTE_URL` variable to the URL of this repository. For more information on configuration variables, see [Dependency Scanning](#configuring-dependency-scanning).
|
||||
|
||||
This advisory database is constantly being updated, so you will need to periodically sync your local copy with GitLab's.
|
||||
This advisory database is constantly being updated, so you must periodically sync your local copy with GitLab's.
|
||||
|
||||
- _Only if scanning Ruby projects_: Host an offline Git copy of the [advisory database](https://github.com/rubysec/ruby-advisory-db).
|
||||
- _Only if scanning npm/yarn projects_: Host an offline copy of the [retire.js](https://github.com/RetireJS/retire.js/) [node](https://github.com/RetireJS/retire.js/blob/master/repository/npmrepository.json) and [js](https://github.com/RetireJS/retire.js/blob/master/repository/jsrepository.json) advisory databases.
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ must come in through physical media (USB drive, hard drive, writeable DVD, etc.)
|
|||
|
||||
## Overview
|
||||
|
||||
GitLab scanners generally will connect to the internet to download the
|
||||
GitLab scanners usually connect to the internet to download the
|
||||
latest sets of signatures, rules, and patches. A few extra steps are necessary
|
||||
to configure the tools to function properly by using resources available on your local network.
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ hosting the latest versions of that dependency or image.
|
|||
|
||||
### Scanner signature and rule updates
|
||||
|
||||
When connected to the internet, some scanners will reference public databases
|
||||
When connected to the internet, some scanners reference public databases
|
||||
for the latest sets of signatures and rules to check against. Without connectivity,
|
||||
this is not possible. Depending on the scanner, you must therefore disable
|
||||
these automatic update checks and either use the databases that they came
|
||||
|
|
@ -131,7 +131,7 @@ a bastion, and used only for this specific project.
|
|||
|
||||
#### Scheduling the updates
|
||||
|
||||
By default, this project's pipeline will run only once, when the `.gitlab-ci.yml` is added to the
|
||||
By default, this project's pipeline runs only once, when the `.gitlab-ci.yml` is added to the
|
||||
repo. To update the GitLab security scanners and signatures, it's necessary to run this pipeline
|
||||
regularly. GitLab provides a way to [schedule pipelines](../../../ci/pipelines/schedules.md). For
|
||||
example, you can set this up to download and store the Docker images every week.
|
||||
|
|
@ -139,7 +139,7 @@ example, you can set this up to download and store the Docker images every week.
|
|||
Some images can be updated more frequently than others. For example, the [vulnerability database](https://hub.docker.com/r/arminc/clair-db/tags)
|
||||
for Container Scanning is updated daily. To update this single image, create a new Scheduled
|
||||
Pipeline that runs daily and set `SECURE_BINARIES_ANALYZERS` to `clair-vulnerabilities-db`. Only
|
||||
this job will be triggered, and the image will be updated daily and made available in the project
|
||||
this job is triggered, and the image is updated daily and made available in the project
|
||||
registry.
|
||||
|
||||
#### Using the secure bundle created
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ is **not** `19.03.0`. See [troubleshooting information](#error-response-from-dae
|
|||
|
||||
## Supported languages and frameworks
|
||||
|
||||
GitLab SAST supports a variety of languages, package managers, and frameworks. Our SAST security scanners also feature automatic language detection which works even for mixed-language projects. If any supported language is detected in project source code we will automatically run the appropriate SAST analyzers.
|
||||
GitLab SAST supports a variety of languages, package managers, and frameworks. Our SAST security scanners also feature automatic language detection which works even for mixed-language projects. If any supported language is detected in project source code we automatically run the appropriate SAST analyzers.
|
||||
|
||||
You can also [view our language roadmap](https://about.gitlab.com/direction/secure/static-analysis/sast/#language-support) and [request other language support by opening an issue](https://gitlab.com/groups/gitlab-org/-/epics/297).
|
||||
|
||||
|
|
@ -336,7 +336,7 @@ a `before_script` execution to prepare your scan job.
|
|||
To pass your project's dependencies as artifacts, the dependencies must be included
|
||||
in the project's working directory and specified using the `artifacts:path` configuration.
|
||||
If all dependencies are present, the `COMPILE=false` variable can be provided to the
|
||||
analyzer and compilation will be skipped:
|
||||
analyzer and compilation is skipped:
|
||||
|
||||
```yaml
|
||||
image: maven:3.6-jdk-8-alpine
|
||||
|
|
@ -410,7 +410,7 @@ Some analyzers make it possible to filter out vulnerabilities under a given thre
|
|||
|
||||
| Environment variable | Default value | Description |
|
||||
|-------------------------------|--------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `SAST_EXCLUDED_PATHS` | `spec, test, tests, tmp` | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, or file or folder paths (for example, `doc,spec` ). Parent directories will also match patterns. |
|
||||
| `SAST_EXCLUDED_PATHS` | `spec, test, tests, tmp` | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, or file or folder paths (for example, `doc,spec` ). Parent directories also match patterns. |
|
||||
| `SEARCH_MAX_DEPTH` | 4 | Maximum number of directories traversed when searching for source code files. |
|
||||
| `SAST_BANDIT_EXCLUDED_PATHS` | | Comma-separated list of paths to exclude from scan. Uses Python's [`fnmatch` syntax](https://docs.python.org/2/library/fnmatch.html); For example: `'*/tests/*, */venv/*'` |
|
||||
| `SAST_BRAKEMAN_LEVEL` | 1 | Ignore Brakeman vulnerabilities under given confidence level. Integer, 1=Low 3=High. |
|
||||
|
|
@ -424,7 +424,7 @@ Some analyzers can be customized with environment variables.
|
|||
| Environment variable | Analyzer | Description |
|
||||
|---------------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `SCAN_KUBERNETES_MANIFESTS` | Kubesec | Set to `"true"` to scan Kubernetes manifests. |
|
||||
| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` uses to generate a Kubernetes manifest that `kubesec` will scan. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
|
||||
| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` uses to generate a Kubernetes manifest that `kubesec` scans. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
|
||||
| `KUBESEC_HELM_OPTIONS` | Kubesec | Additional arguments for the `helm` executable. |
|
||||
| `COMPILE` | SpotBugs | Set to `false` to disable project compilation and dependency fetching. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/195252) in GitLab 13.1. |
|
||||
| `ANT_HOME` | SpotBugs | The `ANT_HOME` environment variable. |
|
||||
|
|
@ -459,7 +459,7 @@ analyzer containers: `DOCKER_`, `CI`, `GITLAB_`, `FF_`, `HOME`, `PWD`, `OLDPWD`,
|
|||
|
||||
Receive early access to experimental features.
|
||||
|
||||
Currently, this will enable scanning of iOS and Android apps via the [MobSF analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/).
|
||||
Currently, this enables scanning of iOS and Android apps via the [MobSF analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/).
|
||||
|
||||
To enable experimental features, add the following to your `.gitlab-ci.yml` file:
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ display all detected and confirmed vulnerabilities.
|
|||
|
||||
The Vulnerability Report first displays the time at which the last pipeline completed on the project's
|
||||
default branch. There's also a link to view this in more detail. In the case of any pipeline failures,
|
||||
you will see the number of failures clearly indicated. The failure notification takes you directly to
|
||||
the number of failures is indicated. The failure notification takes you directly to
|
||||
the **Failed jobs** tab of the pipeline page.
|
||||
|
||||
The Vulnerability Report next displays the total number of vulnerabilities by severity (for example,
|
||||
|
|
@ -142,7 +142,7 @@ Next to the timeline chart is a list of projects, grouped and sorted by the seve
|
|||
| B | One or more "low" |
|
||||
| A | Zero vulnerabilities |
|
||||
|
||||
Projects with no vulnerability tests configured will not appear in the list. Additionally, dismissed
|
||||
Projects with no vulnerability tests configured don't appear in the list. Additionally, dismissed
|
||||
vulnerabilities are excluded.
|
||||
|
||||
Navigate to the group's [vulnerability report](#vulnerability-report-1) to view the vulnerabilities found.
|
||||
|
|
@ -225,7 +225,7 @@ are discovered.
|
|||
|
||||
To ensure the information on the Security Dashboard is regularly updated,
|
||||
[configure a scheduled pipeline](../../../ci/pipelines/schedules.md) to run a
|
||||
daily security scan. This will update the information displayed on the Security
|
||||
daily security scan. This updates the information displayed on the Security
|
||||
Dashboard regardless of how often the default branch is updated.
|
||||
|
||||
That way, reports are created even if no code change happens.
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ the following values:
|
|||
|-----------|------------------------------------------------------------------------------------------------------------------|
|
||||
| Detected | The default state for a newly discovered vulnerability |
|
||||
| Confirmed | A user has seen this vulnerability and confirmed it to be accurate |
|
||||
| Dismissed | A user has seen this vulnerability and dismissed it because it is not accurate or otherwise will not be resolved |
|
||||
| Dismissed | A user has seen this vulnerability and dismissed it because it is not accurate or otherwise not to be resolved |
|
||||
| Resolved | The vulnerability has been fixed and is no longer valid |
|
||||
|
||||
A timeline shows you when the vulnerability status has changed
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
|
|||
|
||||
GitLab checks the License Compliance report, compares the licenses between the
|
||||
source and target branches, and shows the information right on the merge request.
|
||||
Denied licenses will be clearly visible with an `x` red icon next to them
|
||||
Denied licenses are notated with an `x` red icon next to them
|
||||
as well as new licenses which need a decision from you. In addition, you can
|
||||
[manually allow or deny](#policies)
|
||||
licenses in your project's license compliance policy section. If GitLab detects a denied license
|
||||
|
|
@ -30,10 +30,10 @@ to remove the license.
|
|||
|
||||
NOTE: **Note:**
|
||||
If the license compliance report doesn't have anything to compare to, no information
|
||||
will be displayed in the merge request area. That is the case when you add the
|
||||
is displayed in the merge request area. That is the case when you add the
|
||||
`license_scanning` job in your `.gitlab-ci.yml` for the first time.
|
||||
Consecutive merge requests will have something to compare to and the license
|
||||
compliance report will be shown properly.
|
||||
Consecutive merge requests have something to compare to and the license
|
||||
compliance report is shown properly.
|
||||
|
||||

|
||||
|
||||
|
|
@ -114,7 +114,7 @@ Before GitLab 12.8, the `license_scanning` job was named `license_management`. G
|
|||
the `license_management` job, so you must migrate to the `license_scanning` job and use the new
|
||||
`License-Scanning.gitlab-ci.yml` template.
|
||||
|
||||
The results will be saved as a
|
||||
The results are saved as a
|
||||
[License Compliance report artifact](../../../ci/pipelines/job_artifacts.md#artifactsreportslicense_scanning)
|
||||
that you can later download and analyze. Due to implementation limitations, we
|
||||
always take the latest License Compliance artifact available. Behind the scenes, the
|
||||
|
|
@ -160,7 +160,7 @@ in the project automated setup, like the download and installation of a certific
|
|||
For that, a `LICENSE_MANAGEMENT_SETUP_CMD` environment variable can be passed to the container,
|
||||
with the required commands to run before the license detection.
|
||||
|
||||
If present, this variable will override the setup step necessary to install all the packages
|
||||
If present, this variable overrides the setup step necessary to install all the packages
|
||||
of your application (e.g.: for a project with a `Gemfile`, the setup step could be
|
||||
`bundle install`).
|
||||
|
||||
|
|
@ -695,7 +695,7 @@ requirements must be met:
|
|||
[supported languages and package managers](#supported-languages-and-package-managers).
|
||||
|
||||
Once everything is set, navigate to **Security & Compliance > License Compliance**
|
||||
in your project's sidebar, and you'll see the licenses displayed, where:
|
||||
in your project's sidebar, and the licenses are displayed, where:
|
||||
|
||||
- **Name:** The name of the license.
|
||||
- **Component:** The components which have this license.
|
||||
|
|
@ -708,8 +708,8 @@ in your project's sidebar, and you'll see the licenses displayed, where:
|
|||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22465) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
|
||||
|
||||
Policies allow you to specify licenses that are `allowed` or `denied` in a project. If a `denied`
|
||||
license is newly committed it will disallow a merge request and instruct the developer to remove it.
|
||||
Note, the merge request will not be able to be merged until the `denied` license is removed.
|
||||
license is newly committed it blocks the merge request and instructs the developer to remove it.
|
||||
Note, the merge request is not able to be merged until the `denied` license is removed.
|
||||
You may add a [`License-Check` approval rule](#enabling-license-approvals-within-a-project),
|
||||
which enables a designated approver that can approve and then merge a merge request with `denied` license.
|
||||
|
||||
|
|
@ -771,7 +771,7 @@ specify the desired version by adding a
|
|||
or using the appropriate [`ASDF_<tool>_VERSION`](https://asdf-vm.com/#/core-configuration?id=environment-variables) environment variable to
|
||||
activate the appropriate version.
|
||||
|
||||
For example, the following `.tool-versions` file will activate version `12.16.3` of [Node.js](https://nodejs.org/)
|
||||
For example, the following `.tool-versions` file activates version `12.16.3` of [Node.js](https://nodejs.org/)
|
||||
and version `2.7.2` of [Ruby](https://www.ruby-lang.org/).
|
||||
|
||||
```plaintext
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
type: reference
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
# Contribution Analytics **(STARTER)**
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
type: reference, howto
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
type: reference
|
||||
stage: Manage
|
||||
group: Value Stream Management
|
||||
group: Optimize
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,110 @@
|
|||
# frozen_string_literal: true
|
||||
# rubocop:disable Style/Documentation
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class UpdateExistingUsersThatRequireTwoFactorAuth # rubocop:disable Metrics/ClassLength
|
||||
def perform(start_id, stop_id)
|
||||
ActiveRecord::Base.connection.execute <<~SQL
|
||||
UPDATE
|
||||
users
|
||||
SET
|
||||
require_two_factor_authentication_from_group = FALSE
|
||||
WHERE
|
||||
users.id BETWEEN #{start_id}
|
||||
AND #{stop_id}
|
||||
AND users.require_two_factor_authentication_from_group = TRUE
|
||||
AND users.id NOT IN ( SELECT DISTINCT
|
||||
users_groups_query.user_id
|
||||
FROM (
|
||||
SELECT
|
||||
users.id AS user_id,
|
||||
members.source_id AS group_ids
|
||||
FROM
|
||||
users
|
||||
LEFT JOIN members ON members.source_type = 'Namespace'
|
||||
AND members.requested_at IS NULL
|
||||
AND members.user_id = users.id
|
||||
AND members.type = 'GroupMember'
|
||||
WHERE
|
||||
users.require_two_factor_authentication_from_group = TRUE
|
||||
AND users.id BETWEEN #{start_id}
|
||||
AND #{stop_id}) AS users_groups_query
|
||||
INNER JOIN LATERAL ( WITH RECURSIVE "base_and_ancestors" AS (
|
||||
(
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"namespaces"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group'
|
||||
AND "namespaces"."id" = users_groups_query.group_ids)
|
||||
UNION (
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"namespaces",
|
||||
"base_and_ancestors"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group'
|
||||
AND "namespaces"."id" = "base_and_ancestors"."parent_id")),
|
||||
"base_and_descendants" AS (
|
||||
(
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"namespaces"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group'
|
||||
AND "namespaces"."id" = users_groups_query.group_ids)
|
||||
UNION (
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"namespaces",
|
||||
"base_and_descendants"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group'
|
||||
AND "namespaces"."parent_id" = "base_and_descendants"."id"))
|
||||
SELECT
|
||||
"namespaces".*
|
||||
FROM ((
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"base_and_ancestors" AS "namespaces"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group')
|
||||
UNION (
|
||||
SELECT
|
||||
"namespaces"."type",
|
||||
"namespaces"."id",
|
||||
"namespaces"."parent_id",
|
||||
"namespaces"."require_two_factor_authentication"
|
||||
FROM
|
||||
"base_and_descendants" AS "namespaces"
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group')) namespaces
|
||||
WHERE
|
||||
"namespaces"."type" = 'Group'
|
||||
AND "namespaces"."require_two_factor_authentication" = TRUE) AS hierarchy_tree ON TRUE);
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
# For large tables, PostgreSQL can take a long time to count rows due to MVCC.
|
||||
# Implements a distinct batch counter based on HyperLogLog algorithm
|
||||
# Needs indexes on the column below to calculate max, min and range queries
|
||||
# For larger tables just set higher batch_size with index optimization
|
||||
#
|
||||
# In order to not use a possible complex time consuming query when calculating min and max values,
|
||||
# the start and finish can be sent specifically, start and finish should contain max and min values for PRIMARY KEY of
|
||||
# relation (most cases `id` column) rather than counted attribute eg:
|
||||
# estimate_distinct_count(start: ::Project.with_active_services.minimum(:id), finish: ::Project.with_active_services.maximum(:id))
|
||||
#
|
||||
# Grouped relations are NOT supported yet.
|
||||
#
|
||||
# @example Usage
|
||||
# ::Gitlab::Database::PostgresHllBatchDistinctCount.new(::Project, :creator_id).estimate_distinct_count
|
||||
# ::Gitlab::Database::PostgresHllBatchDistinctCount.new(::Project.with_active_services.service_desk_enabled.where(time_period))
|
||||
# .estimate_distinct_count(
|
||||
# batch_size: 1_000,
|
||||
# start: ::Project.with_active_services.service_desk_enabled.where(time_period).minimum(:id),
|
||||
# finish: ::Project.with_active_services.service_desk_enabled.where(time_period).maximum(:id)
|
||||
# )
|
||||
#
|
||||
# @note HyperLogLog is an PROBABILISTIC algorithm that ESTIMATES distinct count of given attribute value for supplied relation
|
||||
# Like all probabilistic algorithm is has ERROR RATE margin, that can affect values,
|
||||
# for given implementation no higher value was reported (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45673#accuracy-estimation) than 5.3%
|
||||
# for the most of a cases this value is lower. However, if the exact value is necessary other tools has to be used.
|
||||
class PostgresHllBatchDistinctCounter
|
||||
FALLBACK = -1
|
||||
MIN_REQUIRED_BATCH_SIZE = 1_250
|
||||
MAX_ALLOWED_LOOPS = 10_000
|
||||
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
|
||||
|
||||
# Each query should take < 500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
|
||||
DEFAULT_BATCH_SIZE = 100_000
|
||||
|
||||
BIT_31_MASK = "B'0#{'1' * 31}'"
|
||||
BIT_9_MASK = "B'#{'0' * 23}#{'1' * 9}'"
|
||||
# @example source_query
|
||||
# SELECT CAST(('X' || md5(CAST(%{column} as text))) as bit(32)) attr_hash_32_bits
|
||||
# FROM %{relation}
|
||||
# WHERE %{pkey} >= %{batch_start}
|
||||
# AND %{pkey} < %{batch_end}
|
||||
# AND %{column} IS NOT NULL
|
||||
BUCKETED_DATA_SQL = <<~SQL
|
||||
WITH hashed_attributes AS (%{source_query})
|
||||
SELECT (attr_hash_32_bits & #{BIT_9_MASK})::int AS bucket_num,
|
||||
(31 - floor(log(2, min((attr_hash_32_bits & #{BIT_31_MASK})::int))))::int as bucket_hash
|
||||
FROM hashed_attributes
|
||||
GROUP BY 1 ORDER BY 1
|
||||
SQL
|
||||
|
||||
TOTAL_BUCKETS_NUMBER = 512
|
||||
|
||||
def initialize(relation, column = nil)
|
||||
@relation = relation
|
||||
@column = column || relation.primary_key
|
||||
end
|
||||
|
||||
def unwanted_configuration?(finish, batch_size, start)
|
||||
batch_size <= MIN_REQUIRED_BATCH_SIZE ||
|
||||
(finish - start) / batch_size >= MAX_ALLOWED_LOOPS ||
|
||||
start > finish
|
||||
end
|
||||
|
||||
def estimate_distinct_count(batch_size: nil, start: nil, finish: nil)
|
||||
raise 'BatchCount can not be run inside a transaction' if ActiveRecord::Base.connection.transaction_open?
|
||||
|
||||
batch_size ||= DEFAULT_BATCH_SIZE
|
||||
|
||||
start = actual_start(start)
|
||||
finish = actual_finish(finish)
|
||||
|
||||
raise "Batch counting expects positive values only for #{@column}" if start < 0 || finish < 0
|
||||
return FALLBACK if unwanted_configuration?(finish, batch_size, start)
|
||||
|
||||
batch_start = start
|
||||
hll_blob = {}
|
||||
|
||||
while batch_start <= finish
|
||||
begin
|
||||
hll_blob.merge!(hll_blob_for_batch(batch_start, batch_start + batch_size)) {|_key, old, new| new > old ? new : old }
|
||||
batch_start += batch_size
|
||||
end
|
||||
sleep(SLEEP_TIME_IN_SECONDS)
|
||||
end
|
||||
|
||||
estimate_cardinality(hll_blob)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# arbitrary values that are present in #estimate_cardinality
|
||||
# are sourced from https://www.sisense.com/blog/hyperloglog-in-pure-sql/
|
||||
# article, they are not representing any entity and serves as tune value
|
||||
# for the whole equation
|
||||
def estimate_cardinality(hll_blob)
|
||||
num_zero_buckets = TOTAL_BUCKETS_NUMBER - hll_blob.size
|
||||
|
||||
num_uniques = (
|
||||
((TOTAL_BUCKETS_NUMBER**2) * (0.7213 / (1 + 1.079 / TOTAL_BUCKETS_NUMBER))) /
|
||||
(num_zero_buckets + hll_blob.values.sum { |bucket_hash, _| 2**(-1 * bucket_hash)} )
|
||||
).to_i
|
||||
|
||||
if num_zero_buckets > 0 && num_uniques < 2.5 * TOTAL_BUCKETS_NUMBER
|
||||
((0.7213 / (1 + 1.079 / TOTAL_BUCKETS_NUMBER)) * (TOTAL_BUCKETS_NUMBER *
|
||||
Math.log2(TOTAL_BUCKETS_NUMBER.to_f / num_zero_buckets)))
|
||||
else
|
||||
num_uniques
|
||||
end
|
||||
end
|
||||
|
||||
def hll_blob_for_batch(start, finish)
|
||||
@relation
|
||||
.connection
|
||||
.execute(BUCKETED_DATA_SQL % { source_query: source_query(start, finish) })
|
||||
.map(&:values)
|
||||
.to_h
|
||||
end
|
||||
|
||||
# Generate the source query SQL snippet for the provided id range
|
||||
#
|
||||
# @example SQL query template
|
||||
# SELECT CAST(('X' || md5(CAST(%{column} as text))) as bit(32)) attr_hash_32_bits
|
||||
# FROM %{relation}
|
||||
# WHERE %{pkey} >= %{batch_start} AND %{pkey} < %{batch_end}
|
||||
# AND %{column} IS NOT NULL
|
||||
#
|
||||
# @param start initial id range
|
||||
# @param finish final id range
|
||||
# @return [String] SQL query fragment
|
||||
def source_query(start, finish)
|
||||
col_as_arel = @column.is_a?(Arel::Attributes::Attribute) ? @column : Arel.sql(@column.to_s)
|
||||
col_as_text = Arel::Nodes::NamedFunction.new('CAST', [col_as_arel.as('text')])
|
||||
md5_of_col = Arel::Nodes::NamedFunction.new('md5', [col_as_text])
|
||||
md5_as_hex = Arel::Nodes::Concat.new(Arel.sql("'X'"), md5_of_col)
|
||||
bits = Arel::Nodes::NamedFunction.new('CAST', [md5_as_hex.as('bit(32)')])
|
||||
|
||||
@relation
|
||||
.where(@relation.primary_key => (start...finish))
|
||||
.where(col_as_arel.not_eq(nil))
|
||||
.select(bits.as('attr_hash_32_bits')).to_sql
|
||||
end
|
||||
|
||||
def actual_start(start)
|
||||
start || @relation.unscope(:group, :having).minimum(@relation.primary_key) || 0
|
||||
end
|
||||
|
||||
def actual_finish(finish)
|
||||
finish || @relation.unscope(:group, :having).maximum(@relation.primary_key) || 0
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -22998,7 +22998,7 @@ msgstr ""
|
|||
msgid "Repository synchronization concurrency limit"
|
||||
msgstr ""
|
||||
|
||||
msgid "Repository: %{counter_repositories} / Wikis: %{counter_wikis} / Build Artifacts: %{counter_build_artifacts} / LFS: %{counter_lfs_objects} / Snippets: %{counter_snippets}"
|
||||
msgid "Repository: %{counter_repositories} / Wikis: %{counter_wikis} / Build Artifacts: %{counter_build_artifacts} / LFS: %{counter_lfs_objects} / Snippets: %{counter_snippets} / Packages: %{counter_packages} / Uploads: %{counter_uploads}"
|
||||
msgstr ""
|
||||
|
||||
msgid "RepositorySettingsAccessLevel|Select"
|
||||
|
|
@ -29375,6 +29375,9 @@ msgstr ""
|
|||
msgid "UsageQuota|Unlimited"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Uploads"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Usage"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -122,13 +122,17 @@ module QA
|
|||
|
||||
def select_all_activities_filter
|
||||
select_filter_with_text('Show all activity')
|
||||
|
||||
wait_until do
|
||||
has_no_element?(:discussion_filter_container) && has_element?(:comment_field)
|
||||
end
|
||||
end
|
||||
|
||||
def select_comments_only_filter
|
||||
select_filter_with_text('Show comments only')
|
||||
|
||||
wait_until do
|
||||
has_no_element?(:system_note_content)
|
||||
has_no_element?(:discussion_filter_container) && has_no_element?(:system_note_content)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -145,6 +149,8 @@ module QA
|
|||
click_element :note_dropdown
|
||||
click_element :discussion_menu_item
|
||||
click_element :comment_button
|
||||
|
||||
has_comment?(text)
|
||||
end
|
||||
|
||||
def toggle_comments(position)
|
||||
|
|
|
|||
|
|
@ -4,22 +4,20 @@ module QA
|
|||
RSpec.describe 'Plan', :reliable do
|
||||
describe 'collapse comments in issue discussions' do
|
||||
let(:my_first_reply) { 'My first reply' }
|
||||
let(:one_reply) { '1 reply' }
|
||||
let(:issue) { Resource::Issue.fabricate_via_api! }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
||||
Resource::Issue.fabricate_via_api!.visit!
|
||||
|
||||
Page::Project::Issue::Show.perform do |show|
|
||||
show.select_all_activities_filter
|
||||
show.start_discussion('My first discussion')
|
||||
show.reply_to_discussion(1, my_first_reply)
|
||||
end
|
||||
issue.visit!
|
||||
end
|
||||
|
||||
it 'collapses and expands reply for comments in an issue', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/434' do
|
||||
Page::Project::Issue::Show.perform do |show|
|
||||
one_reply = "1 reply"
|
||||
show.select_all_activities_filter
|
||||
show.start_discussion('My first discussion')
|
||||
show.reply_to_discussion(1, my_first_reply)
|
||||
|
||||
show.collapse_replies
|
||||
expect(show).to have_content(one_reply)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ RSpec.describe "Admin > Admin sees project statistics" do
|
|||
let(:project) { create(:project, :repository) }
|
||||
|
||||
it "shows project statistics" do
|
||||
expect(page).to have_content("Storage: 0 Bytes (Repository: 0 Bytes / Wikis: 0 Bytes / Build Artifacts: 0 Bytes / LFS: 0 Bytes / Snippets: 0 Bytes)")
|
||||
expect(page).to have_content("Storage: 0 Bytes (Repository: 0 Bytes / Wikis: 0 Bytes / Build Artifacts: 0 Bytes / LFS: 0 Bytes / Snippets: 0 Bytes / Packages: 0 Bytes / Uploads: 0 Bytes)")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,6 @@ RSpec.describe 'Merge request > User resolves conflicts', :js do
|
|||
let(:project) { create(:project, :repository) }
|
||||
let(:user) { project.creator }
|
||||
|
||||
before do
|
||||
# In order to have the diffs collapsed, we need to disable the increase feature
|
||||
stub_feature_flags(gitlab_git_diff_size_limit_increase: false)
|
||||
end
|
||||
|
||||
def create_merge_request(source_branch)
|
||||
create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start', source_project: project, merge_status: :unchecked) do |mr|
|
||||
mr.mark_as_unmergeable
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
|
|||
|
||||
before do
|
||||
stub_application_setting(auto_devops_enabled: false)
|
||||
stub_feature_flags(ci_merge_request_pipeline: true)
|
||||
stub_ci_pipeline_yaml_file(YAML.dump(config))
|
||||
project.add_maintainer(user)
|
||||
sign_in(user)
|
||||
|
|
|
|||
|
|
@ -155,10 +155,6 @@ RSpec.describe 'Diff file viewer', :js do
|
|||
|
||||
context 'binary file that appears to be text in the first 1024 bytes' do
|
||||
before do
|
||||
# The file we're visiting is smaller than 10 KB and we want it collapsed
|
||||
# so we need to disable the size increase feature.
|
||||
stub_feature_flags(gitlab_git_diff_size_limit_increase: false)
|
||||
|
||||
visit_commit('7b1cf4336b528e0f3d1d140ee50cafdbc703597c')
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@ RSpec.describe ForkProjectsFinder do
|
|||
let(:private_fork_member) { create(:user) }
|
||||
|
||||
before do
|
||||
stub_feature_flags(object_pools: source_project)
|
||||
|
||||
private_fork.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
|
||||
private_fork.add_developer(private_fork_member)
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
|
|||
import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue';
|
||||
import CommitWidget from '~/diffs/components/commit_widget.vue';
|
||||
import TreeList from '~/diffs/components/tree_list.vue';
|
||||
import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
|
||||
import createDiffsStore from '../create_diffs_store';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import * as urlUtils from '~/lib/utils/url_utility';
|
||||
|
|
@ -75,12 +74,6 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
}
|
||||
|
||||
function getOppositeViewType(currentViewType) {
|
||||
return currentViewType === INLINE_DIFF_VIEW_TYPE
|
||||
? PARALLEL_DIFF_VIEW_TYPE
|
||||
: INLINE_DIFF_VIEW_TYPE;
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
// setup globals (needed for component to mount :/)
|
||||
window.mrTabs = {
|
||||
|
|
@ -125,104 +118,6 @@ describe('diffs/components/app', () => {
|
|||
wrapper.vm.$nextTick(done);
|
||||
});
|
||||
|
||||
describe('when the diff view type changes and it should load a single diff view style', () => {
|
||||
const noLinesDiff = {
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: [],
|
||||
};
|
||||
const parallelLinesDiff = {
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: ['line'],
|
||||
};
|
||||
const inlineLinesDiff = {
|
||||
highlighted_diff_lines: ['line'],
|
||||
parallel_diff_lines: [],
|
||||
};
|
||||
const fullDiff = {
|
||||
highlighted_diff_lines: ['line'],
|
||||
parallel_diff_lines: ['line'],
|
||||
};
|
||||
|
||||
function expectFetchToOccur({ vueInstance, done = () => {}, existingFiles = 1 } = {}) {
|
||||
vueInstance.$nextTick(() => {
|
||||
expect(vueInstance.diffFiles.length).toEqual(existingFiles);
|
||||
expect(vueInstance.fetchDiffFilesBatch).toHaveBeenCalled();
|
||||
|
||||
done();
|
||||
});
|
||||
}
|
||||
|
||||
it('fetches diffs if it has none', done => {
|
||||
wrapper.vm.isLatestVersion = () => false;
|
||||
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, existingFiles: 0, done });
|
||||
});
|
||||
|
||||
it('fetches diffs if it has both view styles, but no lines in either', done => {
|
||||
wrapper.vm.isLatestVersion = () => false;
|
||||
|
||||
store.state.diffs.diffFiles.push(noLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('fetches diffs if it only has inline view style', done => {
|
||||
wrapper.vm.isLatestVersion = () => false;
|
||||
|
||||
store.state.diffs.diffFiles.push(inlineLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('fetches diffs if it only has parallel view style', done => {
|
||||
wrapper.vm.isLatestVersion = () => false;
|
||||
|
||||
store.state.diffs.diffFiles.push(parallelLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('fetches batch diffs if it has none', done => {
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, existingFiles: 0, done });
|
||||
});
|
||||
|
||||
it('fetches batch diffs if it has both view styles, but no lines in either', done => {
|
||||
store.state.diffs.diffFiles.push(noLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('fetches batch diffs if it only has inline view style', done => {
|
||||
store.state.diffs.diffFiles.push(inlineLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('fetches batch diffs if it only has parallel view style', done => {
|
||||
store.state.diffs.diffFiles.push(parallelLinesDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expectFetchToOccur({ vueInstance: wrapper.vm, done });
|
||||
});
|
||||
|
||||
it('does not fetch batch diffs if it has already fetched both styles of diff', () => {
|
||||
store.state.diffs.diffFiles.push(fullDiff);
|
||||
store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
|
||||
|
||||
expect(wrapper.vm.diffFiles.length).toEqual(1);
|
||||
expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(0);
|
||||
wrapper.vm.isLatestVersion = () => false;
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ import DiffDiscussions from '~/diffs/components/diff_discussions.vue';
|
|||
import { IMAGE_DIFF_POSITION_TYPE } from '~/diffs/constants';
|
||||
import diffFileMockData from '../mock_data/diff_file';
|
||||
import { diffViewerModes } from '~/ide/constants';
|
||||
import { diffLines } from '~/diffs/store/getters';
|
||||
import DiffView from '~/diffs/components/diff_view.vue';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
|
|
@ -74,7 +73,7 @@ describe('DiffContent', () => {
|
|||
isInlineView: isInlineViewGetterMock,
|
||||
isParallelView: isParallelViewGetterMock,
|
||||
getCommentFormForDiffFile: getCommentFormForDiffFileGetterMock,
|
||||
diffLines,
|
||||
diffLines: () => () => [...diffFileMockData.parallel_diff_lines],
|
||||
},
|
||||
actions: {
|
||||
saveDiffDiscussion: saveDiffDiscussionMock,
|
||||
|
|
@ -122,11 +121,11 @@ describe('DiffContent', () => {
|
|||
expect(wrapper.find(ParallelDiffView).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render diff view if `unifiedDiffLines` & `unifiedDiffComponents` are true', () => {
|
||||
it('should render diff view if `unifiedDiffComponents` are true', () => {
|
||||
isParallelViewGetterMock.mockReturnValue(true);
|
||||
createComponent({
|
||||
props: { diffFile: textDiffFile },
|
||||
provide: { glFeatures: { unifiedDiffLines: true, unifiedDiffComponents: true } },
|
||||
provide: { glFeatures: { unifiedDiffComponents: true } },
|
||||
});
|
||||
|
||||
expect(wrapper.find(DiffView).exists()).toBe(true);
|
||||
|
|
|
|||
|
|
@ -5,18 +5,16 @@ import { getByText } from '@testing-library/dom';
|
|||
import { createStore } from '~/mr_notes/stores';
|
||||
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
|
||||
import { getPreviousLineIndex } from '~/diffs/store/utils';
|
||||
import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
|
||||
import { INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
|
||||
import diffFileMockData from '../mock_data/diff_file';
|
||||
|
||||
const EXPAND_UP_CLASS = '.js-unfold';
|
||||
const EXPAND_DOWN_CLASS = '.js-unfold-down';
|
||||
const lineSources = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: 'highlighted_diff_lines',
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: 'parallel_diff_lines',
|
||||
};
|
||||
const lineHandlers = {
|
||||
[INLINE_DIFF_VIEW_TYPE]: line => line,
|
||||
[PARALLEL_DIFF_VIEW_TYPE]: line => line.right || line.left,
|
||||
};
|
||||
|
||||
function makeLoadMoreLinesPayload({
|
||||
|
|
@ -126,7 +124,6 @@ describe('DiffExpansionCell', () => {
|
|||
describe('any row', () => {
|
||||
[
|
||||
{ diffViewType: INLINE_DIFF_VIEW_TYPE, lineIndex: 8, file: { parallel_diff_lines: [] } },
|
||||
{ diffViewType: PARALLEL_DIFF_VIEW_TYPE, lineIndex: 7, file: { highlighted_diff_lines: [] } },
|
||||
].forEach(({ diffViewType, file, lineIndex }) => {
|
||||
describe(`with diffViewType (${diffViewType})`, () => {
|
||||
beforeEach(() => {
|
||||
|
|
|
|||
|
|
@ -1235,10 +1235,6 @@ describe('DiffsStoreActions', () => {
|
|||
{ file: { file_path: 'path' }, data: [] },
|
||||
{ diffViewType: 'inline' },
|
||||
[
|
||||
{
|
||||
type: 'SET_HIDDEN_VIEW_DIFF_FILE_LINES',
|
||||
payload: { filePath: 'path', lines: ['test'] },
|
||||
},
|
||||
{
|
||||
type: 'SET_CURRENT_VIEW_DIFF_FILE_LINES',
|
||||
payload: { filePath: 'path', lines: ['test'] },
|
||||
|
|
@ -1258,10 +1254,6 @@ describe('DiffsStoreActions', () => {
|
|||
{ file: { file_path: 'path' }, data: [] },
|
||||
{ diffViewType: 'inline' },
|
||||
[
|
||||
{
|
||||
type: 'SET_HIDDEN_VIEW_DIFF_FILE_LINES',
|
||||
payload: { filePath: 'path', lines },
|
||||
},
|
||||
{
|
||||
type: 'SET_CURRENT_VIEW_DIFF_FILE_LINES',
|
||||
payload: { filePath: 'path', lines: lines.slice(0, 200) },
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import createState from '~/diffs/store/modules/diff_state';
|
||||
import mutations from '~/diffs/store/mutations';
|
||||
import * as types from '~/diffs/store/mutation_types';
|
||||
import { INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
|
||||
import { INLINE_DIFF_VIEW_TYPE, INLINE_DIFF_LINES_KEY } from '~/diffs/constants';
|
||||
import diffFileMockData from '../mock_data/diff_file';
|
||||
import * as utils from '~/diffs/store/utils';
|
||||
|
||||
|
|
@ -74,7 +74,7 @@ describe('DiffsStoreMutations', () => {
|
|||
{
|
||||
content_sha: diffFileMockData.content_sha,
|
||||
file_hash: diffFileMockData.file_hash,
|
||||
highlighted_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -84,11 +84,11 @@ describe('DiffsStoreMutations', () => {
|
|||
|
||||
mutations[types.SET_DIFF_DATA](state, diffMock);
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines).toBeUndefined();
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY]).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SET_DIFFSET_DIFF_DATA_BATCH_DATA', () => {
|
||||
describe('SET_DIFF_DATA_BATCH_DATA', () => {
|
||||
it('should set diff data batch type properly', () => {
|
||||
const state = { diffFiles: [] };
|
||||
const diffMock = {
|
||||
|
|
@ -97,9 +97,6 @@ describe('DiffsStoreMutations', () => {
|
|||
|
||||
mutations[types.SET_DIFF_DATA_BATCH](state, diffMock);
|
||||
|
||||
const firstLine = state.diffFiles[0].parallel_diff_lines[0];
|
||||
|
||||
expect(firstLine.right.text).toBeUndefined();
|
||||
expect(state.diffFiles[0].renderIt).toEqual(true);
|
||||
expect(state.diffFiles[0].collapsed).toEqual(false);
|
||||
});
|
||||
|
|
@ -142,8 +139,7 @@ describe('DiffsStoreMutations', () => {
|
|||
};
|
||||
const diffFile = {
|
||||
file_hash: options.fileHash,
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
};
|
||||
const state = { diffFiles: [diffFile], diffViewType: 'viewType' };
|
||||
const lines = [{ old_line: 1, new_line: 1 }];
|
||||
|
|
@ -171,9 +167,7 @@ describe('DiffsStoreMutations', () => {
|
|||
);
|
||||
|
||||
expect(utils.addContextLines).toHaveBeenCalledWith({
|
||||
inlineLines: diffFile.highlighted_diff_lines,
|
||||
parallelLines: diffFile.parallel_diff_lines,
|
||||
diffViewType: 'viewType',
|
||||
inlineLines: diffFile[INLINE_DIFF_LINES_KEY],
|
||||
contextLines: options.contextLines,
|
||||
bottom: options.params.bottom,
|
||||
lineNumbers: options.lineNumbers,
|
||||
|
|
@ -225,19 +219,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_2',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
|
|
@ -267,12 +249,8 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
|
||||
});
|
||||
|
||||
it('should not duplicate discussions on line', () => {
|
||||
|
|
@ -291,19 +269,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_2',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
|
|
@ -333,24 +299,16 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
|
||||
|
||||
mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
|
||||
discussion,
|
||||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
|
||||
});
|
||||
|
||||
it('updates existing discussion', () => {
|
||||
|
|
@ -369,19 +327,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_2',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
|
|
@ -411,12 +357,8 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
|
||||
|
||||
mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
|
||||
discussion: {
|
||||
|
|
@ -427,11 +369,8 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].notes.length).toBe(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].notes.length).toBe(1);
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].resolved).toBe(true);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].resolved).toBe(true);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].notes.length).toBe(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].resolved).toBe(true);
|
||||
});
|
||||
|
||||
it('should not duplicate inline diff discussions', () => {
|
||||
|
|
@ -450,7 +389,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [
|
||||
|
|
@ -472,7 +411,6 @@ describe('DiffsStoreMutations', () => {
|
|||
discussions: [],
|
||||
},
|
||||
],
|
||||
parallel_diff_lines: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -497,7 +435,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toBe(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should add legacy discussions to the given line', () => {
|
||||
|
|
@ -517,19 +455,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
|
|
@ -557,11 +483,8 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
|
||||
});
|
||||
|
||||
it('should add discussions by line_codes and positions attributes', () => {
|
||||
|
|
@ -580,19 +503,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
|
|
@ -624,11 +535,8 @@ describe('DiffsStoreMutations', () => {
|
|||
diffPositionByLineCode,
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions).toHaveLength(1);
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toBe(1);
|
||||
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions).toHaveLength(1);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toBe(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions).toHaveLength(1);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toBe(1);
|
||||
});
|
||||
|
||||
it('should add discussion to file', () => {
|
||||
|
|
@ -638,8 +546,7 @@ describe('DiffsStoreMutations', () => {
|
|||
{
|
||||
file_hash: 'ABC',
|
||||
discussions: [],
|
||||
parallel_diff_lines: [],
|
||||
highlighted_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -668,30 +575,7 @@ describe('DiffsStoreMutations', () => {
|
|||
diffFiles: [
|
||||
{
|
||||
file_hash: 'ABC',
|
||||
parallel_diff_lines: [
|
||||
{
|
||||
left: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [
|
||||
{
|
||||
id: 1,
|
||||
line_code: 'ABC_1',
|
||||
notes: [],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
line_code: 'ABC_1',
|
||||
notes: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
right: {
|
||||
line_code: 'ABC_1',
|
||||
discussions: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{
|
||||
line_code: 'ABC_1',
|
||||
discussions: [
|
||||
|
|
@ -717,8 +601,7 @@ describe('DiffsStoreMutations', () => {
|
|||
lineCode: 'ABC_1',
|
||||
});
|
||||
|
||||
expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(0);
|
||||
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(0);
|
||||
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -776,11 +659,7 @@ describe('DiffsStoreMutations', () => {
|
|||
it('sets hasForm on lines', () => {
|
||||
const file = {
|
||||
file_hash: 'hash',
|
||||
parallel_diff_lines: [
|
||||
{ left: { line_code: '123', hasForm: false }, right: {} },
|
||||
{ left: {}, right: { line_code: '124', hasForm: false } },
|
||||
],
|
||||
highlighted_diff_lines: [
|
||||
[INLINE_DIFF_LINES_KEY]: [
|
||||
{ line_code: '123', hasForm: false },
|
||||
{ line_code: '124', hasForm: false },
|
||||
],
|
||||
|
|
@ -795,11 +674,8 @@ describe('DiffsStoreMutations', () => {
|
|||
fileHash: 'hash',
|
||||
});
|
||||
|
||||
expect(file.highlighted_diff_lines[0].hasForm).toBe(true);
|
||||
expect(file.highlighted_diff_lines[1].hasForm).toBe(false);
|
||||
|
||||
expect(file.parallel_diff_lines[0].left.hasForm).toBe(true);
|
||||
expect(file.parallel_diff_lines[1].right.hasForm).toBe(false);
|
||||
expect(file[INLINE_DIFF_LINES_KEY][0].hasForm).toBe(true);
|
||||
expect(file[INLINE_DIFF_LINES_KEY][1].hasForm).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -885,8 +761,7 @@ describe('DiffsStoreMutations', () => {
|
|||
file_path: 'test',
|
||||
isLoadingFullFile: true,
|
||||
isShowingFullFile: false,
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -903,8 +778,7 @@ describe('DiffsStoreMutations', () => {
|
|||
file_path: 'test',
|
||||
isLoadingFullFile: true,
|
||||
isShowingFullFile: false,
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -927,80 +801,42 @@ describe('DiffsStoreMutations', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('SET_HIDDEN_VIEW_DIFF_FILE_LINES', () => {
|
||||
[
|
||||
{ current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
|
||||
{ current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
|
||||
].forEach(({ current, hidden, diffViewType }) => {
|
||||
it(`sets the ${hidden} lines when diff view is ${diffViewType}`, () => {
|
||||
const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
|
||||
const state = {
|
||||
diffFiles: [file],
|
||||
diffViewType,
|
||||
};
|
||||
|
||||
mutations[types.SET_HIDDEN_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
lines: ['test'],
|
||||
});
|
||||
|
||||
expect(file[`${current}_diff_lines`]).toEqual([]);
|
||||
expect(file[`${hidden}_diff_lines`]).toEqual(['test']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SET_CURRENT_VIEW_DIFF_FILE_LINES', () => {
|
||||
[
|
||||
{ current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
|
||||
{ current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
|
||||
].forEach(({ current, hidden, diffViewType }) => {
|
||||
it(`sets the ${current} lines when diff view is ${diffViewType}`, () => {
|
||||
const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
|
||||
const state = {
|
||||
diffFiles: [file],
|
||||
diffViewType,
|
||||
};
|
||||
it(`sets the highlighted lines`, () => {
|
||||
const file = { file_path: 'test', [INLINE_DIFF_LINES_KEY]: [] };
|
||||
const state = {
|
||||
diffFiles: [file],
|
||||
};
|
||||
|
||||
mutations[types.SET_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
lines: ['test'],
|
||||
});
|
||||
|
||||
expect(file[`${current}_diff_lines`]).toEqual(['test']);
|
||||
expect(file[`${hidden}_diff_lines`]).toEqual([]);
|
||||
mutations[types.SET_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
lines: ['test'],
|
||||
});
|
||||
|
||||
expect(file[INLINE_DIFF_LINES_KEY]).toEqual(['test']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ADD_CURRENT_VIEW_DIFF_FILE_LINES', () => {
|
||||
[
|
||||
{ current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
|
||||
{ current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
|
||||
].forEach(({ current, hidden, diffViewType }) => {
|
||||
it(`pushes to ${current} lines when diff view is ${diffViewType}`, () => {
|
||||
const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
|
||||
const state = {
|
||||
diffFiles: [file],
|
||||
diffViewType,
|
||||
};
|
||||
it('pushes to inline lines', () => {
|
||||
const file = { file_path: 'test', [INLINE_DIFF_LINES_KEY]: [] };
|
||||
const state = {
|
||||
diffFiles: [file],
|
||||
};
|
||||
|
||||
mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
line: 'test',
|
||||
});
|
||||
|
||||
expect(file[`${current}_diff_lines`]).toEqual(['test']);
|
||||
expect(file[`${hidden}_diff_lines`]).toEqual([]);
|
||||
|
||||
mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
line: 'test2',
|
||||
});
|
||||
|
||||
expect(file[`${current}_diff_lines`]).toEqual(['test', 'test2']);
|
||||
expect(file[`${hidden}_diff_lines`]).toEqual([]);
|
||||
mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
line: 'test',
|
||||
});
|
||||
|
||||
expect(file[INLINE_DIFF_LINES_KEY]).toEqual(['test']);
|
||||
|
||||
mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
|
||||
filePath: 'test',
|
||||
line: 'test2',
|
||||
});
|
||||
|
||||
expect(file[INLINE_DIFF_LINES_KEY]).toEqual(['test', 'test2']);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {
|
|||
OLD_LINE_TYPE,
|
||||
MATCH_LINE_TYPE,
|
||||
INLINE_DIFF_VIEW_TYPE,
|
||||
PARALLEL_DIFF_VIEW_TYPE,
|
||||
INLINE_DIFF_LINES_KEY,
|
||||
} from '~/diffs/constants';
|
||||
import { MERGE_REQUEST_NOTEABLE_TYPE } from '~/notes/constants';
|
||||
import diffFileMockData from '../mock_data/diff_file';
|
||||
|
|
@ -20,14 +20,6 @@ import { noteableDataMock } from '../../notes/mock_data';
|
|||
const getDiffFileMock = () => JSON.parse(JSON.stringify(diffFileMockData));
|
||||
const getDiffMetadataMock = () => JSON.parse(JSON.stringify(diffMetadata));
|
||||
|
||||
function extractLinesFromFile(file) {
|
||||
const unpackedParallel = file.parallel_diff_lines
|
||||
.flatMap(({ left, right }) => [left, right])
|
||||
.filter(Boolean);
|
||||
|
||||
return [...file.highlighted_diff_lines, ...unpackedParallel];
|
||||
}
|
||||
|
||||
describe('DiffsStoreUtils', () => {
|
||||
describe('findDiffFile', () => {
|
||||
const files = [{ file_hash: 1, name: 'one' }];
|
||||
|
|
@ -45,7 +37,7 @@ describe('DiffsStoreUtils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('findIndexInInlineLines and findIndexInParallelLines', () => {
|
||||
describe('findIndexInInlineLines', () => {
|
||||
const expectSet = (method, lines, invalidLines) => {
|
||||
expect(method(lines, { oldLineNumber: 3, newLineNumber: 5 })).toEqual(4);
|
||||
expect(method(invalidLines || lines, { oldLineNumber: 32, newLineNumber: 53 })).toEqual(-1);
|
||||
|
|
@ -53,44 +45,26 @@ describe('DiffsStoreUtils', () => {
|
|||
|
||||
describe('findIndexInInlineLines', () => {
|
||||
it('should return correct index for given line numbers', () => {
|
||||
expectSet(utils.findIndexInInlineLines, getDiffFileMock().highlighted_diff_lines);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findIndexInParallelLines', () => {
|
||||
it('should return correct index for given line numbers', () => {
|
||||
expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, []);
|
||||
expectSet(utils.findIndexInInlineLines, getDiffFileMock()[INLINE_DIFF_LINES_KEY]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPreviousLineIndex', () => {
|
||||
[
|
||||
{ diffViewType: INLINE_DIFF_VIEW_TYPE, file: { parallel_diff_lines: [] } },
|
||||
{ diffViewType: PARALLEL_DIFF_VIEW_TYPE, file: { highlighted_diff_lines: [] } },
|
||||
].forEach(({ diffViewType, file }) => {
|
||||
describe(`with diffViewType (${diffViewType}) in split diffs`, () => {
|
||||
let diffFile;
|
||||
describe(`with diffViewType (inline) in split diffs`, () => {
|
||||
let diffFile;
|
||||
|
||||
beforeEach(() => {
|
||||
diffFile = { ...clone(diffFileMockData), ...file };
|
||||
});
|
||||
beforeEach(() => {
|
||||
diffFile = { ...clone(diffFileMockData) };
|
||||
});
|
||||
|
||||
it('should return the correct previous line number', () => {
|
||||
const emptyLines =
|
||||
diffViewType === INLINE_DIFF_VIEW_TYPE
|
||||
? diffFile.parallel_diff_lines
|
||||
: diffFile.highlighted_diff_lines;
|
||||
|
||||
// This expectation asserts that we cannot possibly be using the opposite view type lines in the next expectation
|
||||
expect(emptyLines.length).toBe(0);
|
||||
expect(
|
||||
utils.getPreviousLineIndex(diffViewType, diffFile, {
|
||||
oldLineNumber: 3,
|
||||
newLineNumber: 5,
|
||||
}),
|
||||
).toBe(4);
|
||||
});
|
||||
it('should return the correct previous line number', () => {
|
||||
expect(
|
||||
utils.getPreviousLineIndex(INLINE_DIFF_VIEW_TYPE, diffFile, {
|
||||
oldLineNumber: 3,
|
||||
newLineNumber: 5,
|
||||
}),
|
||||
).toBe(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -100,82 +74,50 @@ describe('DiffsStoreUtils', () => {
|
|||
const diffFile = getDiffFileMock();
|
||||
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
|
||||
const inlineIndex = utils.findIndexInInlineLines(
|
||||
diffFile.highlighted_diff_lines,
|
||||
diffFile[INLINE_DIFF_LINES_KEY],
|
||||
lineNumbers,
|
||||
);
|
||||
const parallelIndex = utils.findIndexInParallelLines(
|
||||
diffFile.parallel_diff_lines,
|
||||
lineNumbers,
|
||||
);
|
||||
const atInlineIndex = diffFile.highlighted_diff_lines[inlineIndex];
|
||||
const atParallelIndex = diffFile.parallel_diff_lines[parallelIndex];
|
||||
const atInlineIndex = diffFile[INLINE_DIFF_LINES_KEY][inlineIndex];
|
||||
|
||||
utils.removeMatchLine(diffFile, lineNumbers, false);
|
||||
|
||||
expect(diffFile.highlighted_diff_lines[inlineIndex]).not.toEqual(atInlineIndex);
|
||||
expect(diffFile.parallel_diff_lines[parallelIndex]).not.toEqual(atParallelIndex);
|
||||
expect(diffFile[INLINE_DIFF_LINES_KEY][inlineIndex]).not.toEqual(atInlineIndex);
|
||||
|
||||
utils.removeMatchLine(diffFile, lineNumbers, true);
|
||||
|
||||
expect(diffFile.highlighted_diff_lines[inlineIndex + 1]).not.toEqual(atInlineIndex);
|
||||
expect(diffFile.parallel_diff_lines[parallelIndex + 1]).not.toEqual(atParallelIndex);
|
||||
expect(diffFile[INLINE_DIFF_LINES_KEY][inlineIndex + 1]).not.toEqual(atInlineIndex);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addContextLines', () => {
|
||||
[INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE].forEach(diffViewType => {
|
||||
it(`should add context lines for ${diffViewType}`, () => {
|
||||
const diffFile = getDiffFileMock();
|
||||
const inlineLines = diffFile.highlighted_diff_lines;
|
||||
const parallelLines = diffFile.parallel_diff_lines;
|
||||
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
|
||||
const contextLines = [{ lineNumber: 42, line_code: '123' }];
|
||||
const options = { inlineLines, parallelLines, contextLines, lineNumbers, diffViewType };
|
||||
const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
|
||||
const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers);
|
||||
const normalizedParallelLine = {
|
||||
left: options.contextLines[0],
|
||||
right: options.contextLines[0],
|
||||
line_code: '123',
|
||||
};
|
||||
it(`should add context lines`, () => {
|
||||
const diffFile = getDiffFileMock();
|
||||
const inlineLines = diffFile[INLINE_DIFF_LINES_KEY];
|
||||
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
|
||||
const contextLines = [{ lineNumber: 42, line_code: '123' }];
|
||||
const options = { inlineLines, contextLines, lineNumbers };
|
||||
const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
|
||||
|
||||
utils.addContextLines(options);
|
||||
utils.addContextLines(options);
|
||||
|
||||
if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
|
||||
expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
|
||||
} else {
|
||||
expect(parallelLines[parallelIndex]).toEqual(normalizedParallelLine);
|
||||
}
|
||||
});
|
||||
expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
|
||||
});
|
||||
|
||||
it(`should add context lines properly with bottom parameter for ${diffViewType}`, () => {
|
||||
const diffFile = getDiffFileMock();
|
||||
const inlineLines = diffFile.highlighted_diff_lines;
|
||||
const parallelLines = diffFile.parallel_diff_lines;
|
||||
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
|
||||
const contextLines = [{ lineNumber: 42, line_code: '123' }];
|
||||
const options = {
|
||||
inlineLines,
|
||||
parallelLines,
|
||||
contextLines,
|
||||
lineNumbers,
|
||||
bottom: true,
|
||||
diffViewType,
|
||||
};
|
||||
const normalizedParallelLine = {
|
||||
left: options.contextLines[0],
|
||||
right: options.contextLines[0],
|
||||
line_code: '123',
|
||||
};
|
||||
it(`should add context lines properly with bottom parameter`, () => {
|
||||
const diffFile = getDiffFileMock();
|
||||
const inlineLines = diffFile[INLINE_DIFF_LINES_KEY];
|
||||
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
|
||||
const contextLines = [{ lineNumber: 42, line_code: '123' }];
|
||||
const options = {
|
||||
inlineLines,
|
||||
contextLines,
|
||||
lineNumbers,
|
||||
bottom: true,
|
||||
};
|
||||
|
||||
utils.addContextLines(options);
|
||||
utils.addContextLines(options);
|
||||
|
||||
if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
|
||||
expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
|
||||
} else {
|
||||
expect(parallelLines[parallelLines.length - 1]).toEqual(normalizedParallelLine);
|
||||
}
|
||||
});
|
||||
expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -195,7 +137,6 @@ describe('DiffsStoreUtils', () => {
|
|||
new_line: 3,
|
||||
old_line: 1,
|
||||
},
|
||||
diffViewType: PARALLEL_DIFF_VIEW_TYPE,
|
||||
linePosition: LINE_POSITION_LEFT,
|
||||
lineRange: { start_line_code: 'abc_1_1', end_line_code: 'abc_2_2' },
|
||||
};
|
||||
|
|
@ -256,7 +197,6 @@ describe('DiffsStoreUtils', () => {
|
|||
new_line: 3,
|
||||
old_line: 1,
|
||||
},
|
||||
diffViewType: PARALLEL_DIFF_VIEW_TYPE,
|
||||
linePosition: LINE_POSITION_LEFT,
|
||||
};
|
||||
|
||||
|
|
@ -424,20 +364,6 @@ describe('DiffsStoreUtils', () => {
|
|||
expect(preppedLine).toEqual(correctLine);
|
||||
});
|
||||
|
||||
it('returns a nested object with "left" and "right" lines + the line code for `parallel` lines', () => {
|
||||
preppedLine = utils.prepareLineForRenamedFile({
|
||||
diffViewType: PARALLEL_DIFF_VIEW_TYPE,
|
||||
line: sourceLine,
|
||||
index: lineIndex,
|
||||
diffFile,
|
||||
});
|
||||
|
||||
expect(Object.keys(preppedLine)).toEqual(['left', 'right', 'line_code']);
|
||||
expect(preppedLine.left).toEqual(correctLine);
|
||||
expect(preppedLine.right).toEqual(correctLine);
|
||||
expect(preppedLine.line_code).toEqual(correctLine.line_code);
|
||||
});
|
||||
|
||||
it.each`
|
||||
brokenSymlink
|
||||
${false}
|
||||
|
|
@ -474,13 +400,13 @@ describe('DiffsStoreUtils', () => {
|
|||
|
||||
preparedDiff = { diff_files: [mock] };
|
||||
splitInlineDiff = {
|
||||
diff_files: [{ ...mock, parallel_diff_lines: undefined }],
|
||||
diff_files: [{ ...mock }],
|
||||
};
|
||||
splitParallelDiff = {
|
||||
diff_files: [{ ...mock, highlighted_diff_lines: undefined }],
|
||||
diff_files: [{ ...mock, [INLINE_DIFF_LINES_KEY]: undefined }],
|
||||
};
|
||||
completedDiff = {
|
||||
diff_files: [{ ...mock, highlighted_diff_lines: undefined }],
|
||||
diff_files: [{ ...mock, [INLINE_DIFF_LINES_KEY]: undefined }],
|
||||
};
|
||||
|
||||
preparedDiff.diff_files = utils.prepareDiffData(preparedDiff);
|
||||
|
|
@ -490,19 +416,7 @@ describe('DiffsStoreUtils', () => {
|
|||
});
|
||||
|
||||
it('sets the renderIt and collapsed attribute on files', () => {
|
||||
const firstParallelDiffLine = preparedDiff.diff_files[0].parallel_diff_lines[2];
|
||||
|
||||
expect(firstParallelDiffLine.left.discussions.length).toBe(0);
|
||||
expect(firstParallelDiffLine.left).not.toHaveAttr('text');
|
||||
expect(firstParallelDiffLine.right.discussions.length).toBe(0);
|
||||
expect(firstParallelDiffLine.right).not.toHaveAttr('text');
|
||||
const firstParallelChar = firstParallelDiffLine.right.rich_text.charAt(0);
|
||||
|
||||
expect(firstParallelChar).not.toBe(' ');
|
||||
expect(firstParallelChar).not.toBe('+');
|
||||
expect(firstParallelChar).not.toBe('-');
|
||||
|
||||
const checkLine = preparedDiff.diff_files[0].highlighted_diff_lines[0];
|
||||
const checkLine = preparedDiff.diff_files[0][INLINE_DIFF_LINES_KEY][0];
|
||||
|
||||
expect(checkLine.discussions.length).toBe(0);
|
||||
expect(checkLine).not.toHaveAttr('text');
|
||||
|
|
@ -516,29 +430,14 @@ describe('DiffsStoreUtils', () => {
|
|||
expect(preparedDiff.diff_files[0].collapsed).toBeFalsy();
|
||||
});
|
||||
|
||||
it('adds line_code to all lines', () => {
|
||||
expect(
|
||||
preparedDiff.diff_files[0].parallel_diff_lines.filter(line => !line.line_code),
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('uses right line code if left has none', () => {
|
||||
const firstLine = preparedDiff.diff_files[0].parallel_diff_lines[0];
|
||||
|
||||
expect(firstLine.line_code).toEqual(firstLine.right.line_code);
|
||||
});
|
||||
|
||||
it('guarantees an empty array for both diff styles', () => {
|
||||
expect(splitInlineDiff.diff_files[0].parallel_diff_lines.length).toEqual(0);
|
||||
expect(splitInlineDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
|
||||
expect(splitParallelDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
|
||||
expect(splitParallelDiff.diff_files[0].highlighted_diff_lines.length).toEqual(0);
|
||||
expect(splitInlineDiff.diff_files[0][INLINE_DIFF_LINES_KEY].length).toBeGreaterThan(0);
|
||||
expect(splitParallelDiff.diff_files[0][INLINE_DIFF_LINES_KEY].length).toEqual(0);
|
||||
});
|
||||
|
||||
it('merges existing diff files with newly loaded diff files to ensure split diffs are eventually completed', () => {
|
||||
expect(completedDiff.diff_files.length).toEqual(1);
|
||||
expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
|
||||
expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
|
||||
expect(completedDiff.diff_files[0][INLINE_DIFF_LINES_KEY].length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('leaves files in the existing state', () => {
|
||||
|
|
@ -555,11 +454,11 @@ describe('DiffsStoreUtils', () => {
|
|||
|
||||
it('completes an existing split diff without overwriting existing diffs', () => {
|
||||
// The current state has a file that has only loaded inline lines
|
||||
const priorFiles = [{ ...mock, parallel_diff_lines: [] }];
|
||||
const priorFiles = [{ ...mock }];
|
||||
// The next (batch) load loads two files: the other half of that file, and a new file
|
||||
const fakeBatch = [
|
||||
{ ...mock, highlighted_diff_lines: undefined },
|
||||
{ ...mock, highlighted_diff_lines: undefined, content_sha: 'ABC', file_hash: 'DEF' },
|
||||
{ ...mock, [INLINE_DIFF_LINES_KEY]: undefined },
|
||||
{ ...mock, [INLINE_DIFF_LINES_KEY]: undefined, content_sha: 'ABC', file_hash: 'DEF' },
|
||||
];
|
||||
const updatedFilesList = utils.prepareDiffData({ diff_files: fakeBatch }, priorFiles);
|
||||
|
||||
|
|
@ -584,7 +483,7 @@ describe('DiffsStoreUtils', () => {
|
|||
...splitInlineDiff.diff_files,
|
||||
...splitParallelDiff.diff_files,
|
||||
...completedDiff.diff_files,
|
||||
].flatMap(file => extractLinesFromFile(file));
|
||||
].flatMap(file => [...file[INLINE_DIFF_LINES_KEY]]);
|
||||
|
||||
lines.forEach(line => {
|
||||
expect(line.commentsDisabled).toBe(false);
|
||||
|
|
@ -608,8 +507,7 @@ describe('DiffsStoreUtils', () => {
|
|||
});
|
||||
|
||||
it('guarantees an empty array of lines for both diff styles', () => {
|
||||
expect(preparedDiffFiles[0].parallel_diff_lines.length).toEqual(0);
|
||||
expect(preparedDiffFiles[0].highlighted_diff_lines.length).toEqual(0);
|
||||
expect(preparedDiffFiles[0][INLINE_DIFF_LINES_KEY].length).toEqual(0);
|
||||
});
|
||||
|
||||
it('leaves files in the existing state', () => {
|
||||
|
|
@ -647,8 +545,7 @@ describe('DiffsStoreUtils', () => {
|
|||
fileMock,
|
||||
{
|
||||
...metaMock.diff_files[0],
|
||||
highlighted_diff_lines: [],
|
||||
parallel_diff_lines: [],
|
||||
[INLINE_DIFF_LINES_KEY]: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
@ -1217,7 +1114,7 @@ describe('DiffsStoreUtils', () => {
|
|||
it('converts inline diff lines to parallel diff lines', () => {
|
||||
const file = getDiffFileMock();
|
||||
|
||||
expect(utils.parallelizeDiffLines(file.highlighted_diff_lines)).toEqual(
|
||||
expect(utils.parallelizeDiffLines(file[INLINE_DIFF_LINES_KEY])).toEqual(
|
||||
file.parallel_diff_lines,
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -10,11 +10,19 @@ describe('Getters TestReports Store', () => {
|
|||
const defaultState = {
|
||||
testReports,
|
||||
selectedSuiteIndex: 0,
|
||||
pageInfo: {
|
||||
page: 1,
|
||||
perPage: 2,
|
||||
},
|
||||
};
|
||||
|
||||
const emptyState = {
|
||||
testReports: {},
|
||||
selectedSuite: null,
|
||||
pageInfo: {
|
||||
page: 1,
|
||||
perPage: 2,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
|
|
@ -59,15 +67,17 @@ describe('Getters TestReports Store', () => {
|
|||
});
|
||||
|
||||
describe('getSuiteTests', () => {
|
||||
it('should return the test cases inside the suite', () => {
|
||||
it('should return the current page of test cases inside the suite', () => {
|
||||
setupState();
|
||||
|
||||
const cases = getters.getSuiteTests(state);
|
||||
const expected = testReports.test_suites[0].test_cases.map(x => ({
|
||||
...x,
|
||||
formattedTime: formattedTime(x.execution_time),
|
||||
icon: iconForTestStatus(x.status),
|
||||
}));
|
||||
const expected = testReports.test_suites[0].test_cases
|
||||
.map(x => ({
|
||||
...x,
|
||||
formattedTime: formattedTime(x.execution_time),
|
||||
icon: iconForTestStatus(x.status),
|
||||
}))
|
||||
.slice(0, state.pageInfo.perPage);
|
||||
|
||||
expect(cases).toEqual(expected);
|
||||
});
|
||||
|
|
@ -78,4 +88,15 @@ describe('Getters TestReports Store', () => {
|
|||
expect(getters.getSuiteTests(state)).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSuiteTestCount', () => {
|
||||
it('should return the total number of test cases', () => {
|
||||
setupState();
|
||||
|
||||
const testCount = getters.getSuiteTestCount(state);
|
||||
const expected = testReports.test_suites[0].test_cases.length;
|
||||
|
||||
expect(testCount).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -12,12 +12,25 @@ describe('Mutations TestReports Store', () => {
|
|||
testReports: {},
|
||||
selectedSuite: null,
|
||||
isLoading: false,
|
||||
pageInfo: {
|
||||
page: 1,
|
||||
perPage: 2,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockState = { ...defaultState };
|
||||
});
|
||||
|
||||
describe('set page', () => {
|
||||
it('should set the current page to display', () => {
|
||||
const pageToDisplay = 3;
|
||||
mutations[types.SET_PAGE](mockState, pageToDisplay);
|
||||
|
||||
expect(mockState.pageInfo.page).toEqual(pageToDisplay);
|
||||
});
|
||||
});
|
||||
|
||||
describe('set suite', () => {
|
||||
it('should set the suite at the given index', () => {
|
||||
mockState.testReports = testReports;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import Vuex from 'vuex';
|
||||
import { shallowMount, createLocalVue } from '@vue/test-utils';
|
||||
import { getJSONFixture } from 'helpers/fixtures';
|
||||
import { GlButton, GlFriendlyWrap } from '@gitlab/ui';
|
||||
import { GlButton, GlFriendlyWrap, GlPagination } from '@gitlab/ui';
|
||||
import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
|
||||
import * as getters from '~/pipelines/stores/test_reports/getters';
|
||||
import { TestStatus } from '~/pipelines/constants';
|
||||
|
|
@ -26,13 +26,17 @@ describe('Test reports suite table', () => {
|
|||
const findCaseRowAtIndex = index => wrapper.findAll('.js-case-row').at(index);
|
||||
const findIconForRow = (row, status) => row.find(`.ci-status-icon-${status}`);
|
||||
|
||||
const createComponent = (suite = testSuite) => {
|
||||
const createComponent = (suite = testSuite, perPage = 20) => {
|
||||
store = new Vuex.Store({
|
||||
state: {
|
||||
testReports: {
|
||||
test_suites: [suite],
|
||||
},
|
||||
selectedSuiteIndex: 0,
|
||||
pageInfo: {
|
||||
page: 1,
|
||||
perPage,
|
||||
},
|
||||
},
|
||||
getters,
|
||||
});
|
||||
|
|
@ -86,4 +90,20 @@ describe('Test reports suite table', () => {
|
|||
expect(button.attributes('data-clipboard-text')).toBe(file);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a test suite has more test cases than the pagination size', () => {
|
||||
const perPage = 2;
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent(testSuite, perPage);
|
||||
});
|
||||
|
||||
it('renders one page of test cases', () => {
|
||||
expect(allCaseRows().length).toBe(perPage);
|
||||
});
|
||||
|
||||
it('renders a pagination component', () => {
|
||||
expect(wrapper.find(GlPagination).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -358,30 +358,4 @@ RSpec.describe DiffHelper do
|
|||
expect(diff_file_path_text(diff_file, max: 10)).to eq("...open.rb")
|
||||
end
|
||||
end
|
||||
|
||||
describe 'unified_diff_lines_view_type' do
|
||||
before do
|
||||
controller.params[:view] = 'parallel'
|
||||
end
|
||||
|
||||
describe 'unified diffs enabled' do
|
||||
before do
|
||||
stub_feature_flags(unified_diff_lines: true)
|
||||
end
|
||||
|
||||
it 'returns inline view' do
|
||||
expect(helper.unified_diff_lines_view_type(project)).to eq 'inline'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'unified diffs disabled' do
|
||||
before do
|
||||
stub_feature_flags(unified_diff_lines: false)
|
||||
end
|
||||
|
||||
it 'returns parallel view' do
|
||||
expect(helper.unified_diff_lines_view_type(project)).to eq :parallel
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue