Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-02-22 15:10:48 +00:00
parent 980d813e90
commit 232e7582b0
97 changed files with 1255 additions and 741 deletions

View File

@ -111,4 +111,4 @@ include:
- local: .gitlab/ci/dast.gitlab-ci.yml
- local: .gitlab/ci/workhorse.gitlab-ci.yml
- local: .gitlab/ci/graphql.gitlab-ci.yml
- local: .gitlab/ci/verify-lockfile.gitlab-ci.yml
- remote: 'https://gitlab.com/gitlab-org/frontend/untamper-my-lockfile/-/raw/main/.gitlab-ci-template.yml'

View File

@ -1136,7 +1136,6 @@
- ".gitlab/ci/test-metadata.gitlab-ci.yml"
- "scripts/rspec_helpers.sh"
- <<: *if-dot-com-ee-schedule
changes: *code-backstage-patterns
###################
# workhorse rules #

View File

@ -1,11 +0,0 @@
verify_lockfile:
stage: test
image: registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7.2-git-2.29-lfs-2.9-node-14.15-yarn-1.22-graphicsmagick-1.3.34
needs: []
rules:
- changes:
- yarn.lock
script:
- npm config set @dappelt:registry https://gitlab.com/api/v4/projects/22564149/packages/npm/
- npx lockfile-lint@4.3.7 --path yarn.lock --allowed-hosts yarn --validate-https
- npx @dappelt/untamper-my-lockfile --lockfile yarn.lock

View File

@ -216,8 +216,6 @@ linters:
- 'app/views/projects/merge_requests/conflicts/_commit_stats.html.haml'
- 'app/views/projects/merge_requests/conflicts/_file_actions.html.haml'
- 'app/views/projects/merge_requests/conflicts/_submit_form.html.haml'
- 'app/views/projects/merge_requests/conflicts/components/_diff_file_editor.html.haml'
- 'app/views/projects/merge_requests/conflicts/components/_inline_conflict_lines.html.haml'
- 'app/views/projects/merge_requests/conflicts/show.html.haml'
- 'app/views/projects/merge_requests/creations/_diffs.html.haml'
- 'app/views/projects/merge_requests/creations/_new_compare.html.haml'

View File

@ -1977,7 +1977,6 @@ Gitlab/NamespacedClass:
- 'app/validators/untrusted_regexp_validator.rb'
- 'app/validators/nested_attributes_duplicates_validator.rb'
- 'app/validators/x509_certificate_credentials_validator.rb'
- 'app/validators/zoom_url_validator.rb'
- 'app/workers/admin_email_worker.rb'
- 'app/workers/approve_blocked_pending_approval_users_worker.rb'
- 'app/workers/archive_trace_worker.rb'

View File

@ -253,7 +253,7 @@ export default {
:class="{ 'gl-bg-blue-50': isDiscussionActive }"
@error="$emit('update-note-error', $event)"
/>
<li v-show="isReplyPlaceholderVisible" class="reply-wrapper">
<li v-show="isReplyPlaceholderVisible" class="reply-wrapper discussion-reply-holder">
<reply-placeholder
v-if="!isFormVisible"
class="qa-discussion-reply"

View File

@ -23,9 +23,7 @@ import {
ALERT_OVERFLOW_HIDDEN,
ALERT_MERGE_CONFLICT,
ALERT_COLLAPSED_FILES,
EVT_VIEW_FILE_BY_FILE,
} from '../constants';
import eventHub from '../event_hub';
import { reviewStatuses } from '../utils/file_reviews';
import { diffsApp } from '../utils/performance';
@ -332,16 +330,11 @@ export default {
subscribeToEvents() {
notesEventHub.$once('fetchDiffData', this.fetchData);
notesEventHub.$on('refetchDiffData', this.refetchDiffData);
eventHub.$on(EVT_VIEW_FILE_BY_FILE, this.fileByFileListener);
},
unsubscribeFromEvents() {
eventHub.$off(EVT_VIEW_FILE_BY_FILE, this.fileByFileListener);
notesEventHub.$off('refetchDiffData', this.refetchDiffData);
notesEventHub.$off('fetchDiffData', this.fetchData);
},
fileByFileListener({ setting } = {}) {
this.setFileByFile({ fileByFile: setting });
},
navigateToDiffFileNumber(number) {
this.navigateToDiffFileIndex(number - 1);
},

View File

@ -1,9 +1,6 @@
<script>
import { GlButtonGroup, GlButton, GlDropdown, GlFormCheckbox } from '@gitlab/ui';
import { mapActions, mapGetters, mapState } from 'vuex';
import { EVT_VIEW_FILE_BY_FILE } from '../constants';
import eventHub from '../event_hub';
import { SETTINGS_DROPDOWN } from '../i18n';
export default {
@ -24,9 +21,10 @@ export default {
'setParallelDiffViewType',
'setRenderTreeList',
'setShowWhitespace',
'setFileByFile',
]),
toggleFileByFile() {
eventHub.$emit(EVT_VIEW_FILE_BY_FILE, { setting: !this.viewDiffsFileByFile });
this.setFileByFile({ fileByFile: !this.viewDiffsFileByFile });
},
},
};

View File

@ -103,7 +103,6 @@ export const RENAMED_DIFF_TRANSITIONS = {
// MR Diffs known events
export const EVT_EXPAND_ALL_FILES = 'mr:diffs:expandAllFiles';
export const EVT_VIEW_FILE_BY_FILE = 'mr:diffs:preference:fileByFile';
export const EVT_PERF_MARK_FILE_TREE_START = 'mr:diffs:perf:fileTreeStart';
export const EVT_PERF_MARK_FILE_TREE_END = 'mr:diffs:perf:fileTreeEnd';
export const EVT_PERF_MARK_DIFF_FILES_START = 'mr:diffs:perf:filesStart';

View File

@ -741,12 +741,7 @@ export const navigateToDiffFileIndex = ({ commit, state }, index) => {
export const setFileByFile = ({ commit }, { fileByFile }) => {
const fileViewMode = fileByFile ? DIFF_VIEW_FILE_BY_FILE : DIFF_VIEW_ALL_FILES;
commit(types.SET_FILE_BY_FILE, fileByFile);
Cookies.set(DIFF_FILE_BY_FILE_COOKIE_NAME, fileViewMode);
historyPushState(
mergeUrlParams({ [DIFF_FILE_BY_FILE_COOKIE_NAME]: fileViewMode }, window.location.href),
);
};
export function reviewFile({ commit, state }, { file, reviewed = true }) {

View File

@ -1,22 +1,13 @@
import Cookies from 'js-cookie';
import { getParameterValues } from '~/lib/utils/url_utility';
import { DIFF_FILE_BY_FILE_COOKIE_NAME, DIFF_VIEW_FILE_BY_FILE } from '../constants';
export function fileByFile(pref = false) {
const search = getParameterValues(DIFF_FILE_BY_FILE_COOKIE_NAME)?.[0];
const cookie = Cookies.get(DIFF_FILE_BY_FILE_COOKIE_NAME);
let viewFileByFile = pref;
// use the cookie first, if it exists
if (cookie) {
viewFileByFile = cookie === DIFF_VIEW_FILE_BY_FILE;
return cookie === DIFF_VIEW_FILE_BY_FILE;
}
// the search parameter of the URL should override, if it exists
if (search) {
viewFileByFile = search === DIFF_VIEW_FILE_BY_FILE;
}
return viewFileByFile;
return pref;
}

View File

@ -7,6 +7,7 @@ export const STATUSES = {
FINISHED: 'finished',
FAILED: 'failed',
SCHEDULED: 'scheduled',
CREATED: 'created',
STARTED: 'started',
NONE: 'none',
SCHEDULING: 'scheduling',
@ -23,6 +24,11 @@ const STATUS_MAP = {
text: __('Failed'),
textClass: 'text-danger',
},
[STATUSES.CREATED]: {
icon: 'pending',
text: __('Scheduled'),
textClass: 'text-warning',
},
[STATUSES.SCHEDULED]: {
icon: 'pending',
text: __('Scheduled'),

View File

@ -33,11 +33,6 @@ export default {
type: String,
required: true,
},
canCreateGroup: {
type: Boolean,
required: false,
default: false,
},
},
data() {
@ -176,7 +171,6 @@ export default {
:key="group.id"
:group="group"
:available-namespaces="availableNamespaces"
:can-create-group="canCreateGroup"
@update-target-namespace="updateTargetNamespace(group.id, $event)"
@update-new-name="updateNewName(group.id, $event)"
@import-group="importGroup(group.id)"

View File

@ -24,11 +24,6 @@ export default {
type: Array,
required: true,
},
canCreateGroup: {
type: Boolean,
required: false,
default: false,
},
},
computed: {
isDisabled() {
@ -45,19 +40,18 @@ export default {
text: namespace.full_path,
}));
if (!this.canCreateGroup) {
return { data: availableNamespacesData };
const select2Config = {
data: [{ id: '', text: s__('BulkImport|No parent') }],
};
if (availableNamespacesData.length) {
select2Config.data.push({
text: s__('BulkImport|Existing groups'),
children: availableNamespacesData,
});
}
return {
data: [
{ id: '', text: s__('BulkImport|No parent') },
{
text: s__('BulkImport|Existing groups'),
children: availableNamespacesData,
},
],
};
return select2Config;
},
},
methods: {

View File

@ -15,52 +15,71 @@ export const clientTypenames = {
BulkImportPageInfo: 'ClientBulkImportPageInfo',
};
export function createResolvers({ endpoints }) {
export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGroupsManager }) {
let statusPoller;
let sourceGroupManager;
const getGroupsManager = (client) => {
if (!sourceGroupManager) {
sourceGroupManager = new GroupsManager({ client, sourceUrl });
}
return sourceGroupManager;
};
return {
Query: {
async bulkImportSourceGroups(_, vars, { client }) {
const {
data: { availableNamespaces },
} = await client.query({ query: availableNamespacesQuery });
if (!statusPoller) {
statusPoller = new StatusPoller({
client,
groupManager: getGroupsManager(client),
pollPath: endpoints.jobs,
});
statusPoller.startPolling();
}
return axios
.get(endpoints.status, {
const groupsManager = getGroupsManager(client);
return Promise.all([
axios.get(endpoints.status, {
params: {
page: vars.page,
per_page: vars.perPage,
filter: vars.filter,
},
})
.then(({ headers, data }) => {
}),
client.query({ query: availableNamespacesQuery }),
]).then(
([
{ headers, data },
{
data: { availableNamespaces },
},
]) => {
const pagination = parseIntPagination(normalizeHeaders(headers));
return {
__typename: clientTypenames.BulkImportSourceGroupConnection,
nodes: data.importable_data.map((group) => ({
__typename: clientTypenames.BulkImportSourceGroup,
...group,
status: STATUSES.NONE,
import_target: {
new_name: group.full_path,
target_namespace: availableNamespaces[0].full_path,
},
})),
nodes: data.importable_data.map((group) => {
const cachedImportState = groupsManager.getImportStateFromStorageByGroupId(
group.id,
);
return {
__typename: clientTypenames.BulkImportSourceGroup,
...group,
status: cachedImportState?.status ?? STATUSES.NONE,
import_target: cachedImportState?.importTarget ?? {
new_name: group.full_path,
target_namespace: availableNamespaces[0]?.full_path ?? '',
},
};
}),
pageInfo: {
__typename: clientTypenames.BulkImportPageInfo,
...pagination,
},
};
});
},
);
},
availableNamespaces: () =>
@ -73,21 +92,21 @@ export function createResolvers({ endpoints }) {
},
Mutation: {
setTargetNamespace(_, { targetNamespace, sourceGroupId }, { client }) {
new SourceGroupsManager({ client }).updateById(sourceGroupId, (sourceGroup) => {
getGroupsManager(client).updateById(sourceGroupId, (sourceGroup) => {
// eslint-disable-next-line no-param-reassign
sourceGroup.import_target.target_namespace = targetNamespace;
});
},
setNewName(_, { newName, sourceGroupId }, { client }) {
new SourceGroupsManager({ client }).updateById(sourceGroupId, (sourceGroup) => {
getGroupsManager(client).updateById(sourceGroupId, (sourceGroup) => {
// eslint-disable-next-line no-param-reassign
sourceGroup.import_target.new_name = newName;
});
},
async importGroup(_, { sourceGroupId }, { client }) {
const groupManager = new SourceGroupsManager({ client });
const groupManager = getGroupsManager(client);
const group = groupManager.findById(sourceGroupId);
groupManager.setImportStatus(group, STATUSES.SCHEDULING);
try {
@ -101,8 +120,7 @@ export function createResolvers({ endpoints }) {
},
],
});
groupManager.setImportStatus(group, STATUSES.STARTED);
SourceGroupsManager.attachImportId(group, response.data.id);
groupManager.startImport({ group, importId: response.data.id });
} catch (e) {
createFlash({
message: s__('BulkImport|Importing the group failed'),
@ -116,5 +134,5 @@ export function createResolvers({ endpoints }) {
};
}
export const createApolloClient = ({ endpoints }) =>
createDefaultClient(createResolvers({ endpoints }), { assumeImmutableResults: true });
export const createApolloClient = ({ sourceUrl, endpoints }) =>
createDefaultClient(createResolvers({ sourceUrl, endpoints }), { assumeImmutableResults: true });

View File

@ -1,5 +1,7 @@
import { defaultDataIdFromObject } from 'apollo-cache-inmemory';
import produce from 'immer';
import { debounce, merge } from 'lodash';
import { STATUSES } from '../../../constants';
import ImportSourceGroupFragment from '../fragments/bulk_import_source_group_item.fragment.graphql';
function extractTypeConditionFromFragment(fragment) {
@ -13,15 +15,24 @@ function generateGroupId(id) {
});
}
export class SourceGroupsManager {
static importMap = new Map();
export const KEY = 'gl-bulk-imports-import-state';
export const DEBOUNCE_INTERVAL = 200;
static attachImportId(group, importId) {
SourceGroupsManager.importMap.set(importId, group.id);
export class SourceGroupsManager {
constructor({ client, sourceUrl, storage = window.localStorage }) {
this.client = client;
this.sourceUrl = sourceUrl;
this.storage = storage;
this.importStates = this.loadImportStatesFromStorage();
}
constructor({ client }) {
this.client = client;
loadImportStatesFromStorage() {
try {
return JSON.parse(this.storage.getItem(KEY)) ?? {};
} catch {
return {};
}
}
findById(id) {
@ -42,8 +53,48 @@ export class SourceGroupsManager {
this.update(group, fn);
}
findByImportId(importId) {
return this.findById(SourceGroupsManager.importMap.get(importId));
saveImportState(importId, group) {
this.importStates[this.getStorageKey(importId)] = {
id: group.id,
importTarget: group.import_target,
status: group.status,
};
this.saveImportStatesToStorage();
}
getImportStateFromStorage(importId) {
return this.importStates[this.getStorageKey(importId)];
}
getImportStateFromStorageByGroupId(groupId) {
const PREFIX = this.getStorageKey('');
const [, importState] =
Object.entries(this.importStates).find(
([key, group]) => key.startsWith(PREFIX) && group.id === groupId,
) ?? [];
return importState;
}
getStorageKey(importId) {
return `${this.sourceUrl}|${importId}`;
}
saveImportStatesToStorage = debounce(() => {
try {
// storage might be changed in other tab so fetch first
this.storage.setItem(
KEY,
JSON.stringify(merge({}, this.loadImportStatesFromStorage(), this.importStates)),
);
} catch {
// empty catch intentional: storage might be unavailable or full
}
}, DEBOUNCE_INTERVAL);
startImport({ group, importId }) {
this.setImportStatus(group, STATUSES.CREATED);
this.saveImportState(importId, group);
}
setImportStatus(group, status) {
@ -52,4 +103,22 @@ export class SourceGroupsManager {
sourceGroup.status = status;
});
}
setImportStatusByImportId(importId, status) {
const importState = this.getImportStateFromStorage(importId);
if (!importState) {
return;
}
if (importState.status !== status) {
importState.status = status;
}
const group = this.findById(importState.id);
if (group?.id) {
this.setImportStatus(group, status);
}
this.saveImportStatesToStorage();
}
}

View File

@ -3,12 +3,9 @@ import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import { s__ } from '~/locale';
import { SourceGroupsManager } from './source_groups_manager';
export class StatusPoller {
constructor({ client, pollPath }) {
this.client = client;
constructor({ groupManager, pollPath }) {
this.eTagPoll = new Poll({
resource: {
fetchJobs: () => axios.get(pollPath),
@ -29,7 +26,7 @@ export class StatusPoller {
}
});
this.groupManager = new SourceGroupsManager({ client });
this.groupManager = groupManager;
}
startPolling() {
@ -38,10 +35,7 @@ export class StatusPoller {
async updateImportsStatuses(importStatuses) {
importStatuses.forEach(({ id, status_name: statusName }) => {
const group = this.groupManager.findByImportId(id);
if (group.id) {
this.groupManager.setImportStatus(group, statusName);
}
this.groupManager.setImportStatusByImportId(id, statusName);
});
}
}

View File

@ -21,6 +21,7 @@ export function mountImportGroupsApp(mountElement) {
} = mountElement.dataset;
const apolloProvider = new VueApollo({
defaultClient: createApolloClient({
sourceUrl,
endpoints: {
status: statusPath,
availableNamespaces: availableNamespacesPath,

View File

@ -1,115 +0,0 @@
// This is a true violation of @gitlab/no-runtime-template-compiler, as it relies on
// app/views/projects/merge_requests/conflicts/components/_diff_file_editor.html.haml
// for its template.
/* eslint-disable no-param-reassign, @gitlab/no-runtime-template-compiler */
import { debounce } from 'lodash';
import Vue from 'vue';
import { deprecatedCreateFlash as flash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
((global) => {
global.mergeConflicts = global.mergeConflicts || {};
global.mergeConflicts.diffFileEditor = Vue.extend({
props: {
file: {
type: Object,
required: true,
},
onCancelDiscardConfirmation: {
type: Function,
required: true,
},
onAcceptDiscardConfirmation: {
type: Function,
required: true,
},
},
data() {
return {
saved: false,
fileLoaded: false,
originalContent: '',
};
},
computed: {
classObject() {
return {
saved: this.saved,
};
},
},
watch: {
'file.showEditor': function showEditorWatcher(val) {
this.resetEditorContent();
if (!val || this.fileLoaded) {
return;
}
this.loadEditor();
},
},
mounted() {
if (this.file.loadEditor) {
this.loadEditor();
}
},
methods: {
loadEditor() {
const EditorPromise = import(/* webpackChunkName: 'EditorLite' */ '~/editor/editor_lite');
const DataPromise = axios.get(this.file.content_path);
Promise.all([EditorPromise, DataPromise])
.then(
([
{ default: EditorLite },
{
data: { content, new_path: path },
},
]) => {
const contentEl = this.$el.querySelector('.editor');
this.originalContent = content;
this.fileLoaded = true;
this.editor = new EditorLite().createInstance({
el: contentEl,
blobPath: path,
blobContent: content,
});
this.editor.onDidChangeModelContent(
debounce(this.saveDiffResolution.bind(this), 250),
);
},
)
.catch(() => {
flash(__('An error occurred while loading the file'));
});
},
saveDiffResolution() {
this.saved = true;
// This probably be better placed in the data provider
/* eslint-disable vue/no-mutating-props */
this.file.content = this.editor.getValue();
this.file.resolveEditChanged = this.file.content !== this.originalContent;
this.file.promptDiscardConfirmation = false;
/* eslint-enable vue/no-mutating-props */
},
resetEditorContent() {
if (this.fileLoaded) {
this.editor.setValue(this.originalContent);
}
},
cancelDiscardConfirmation(file) {
this.onCancelDiscardConfirmation(file);
},
acceptDiscardConfirmation(file) {
this.onAcceptDiscardConfirmation(file);
},
},
});
})(window.gl || (window.gl = {}));

View File

@ -0,0 +1,128 @@
<script>
import { debounce } from 'lodash';
import { deprecatedCreateFlash as flash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
export default {
props: {
file: {
type: Object,
required: true,
},
onCancelDiscardConfirmation: {
type: Function,
required: true,
},
onAcceptDiscardConfirmation: {
type: Function,
required: true,
},
},
data() {
return {
saved: false,
fileLoaded: false,
originalContent: '',
};
},
computed: {
classObject() {
return {
saved: this.saved,
};
},
},
watch: {
'file.showEditor': function showEditorWatcher(val) {
this.resetEditorContent();
if (!val || this.fileLoaded) {
return;
}
this.loadEditor();
},
},
mounted() {
if (this.file.loadEditor) {
this.loadEditor();
}
},
methods: {
loadEditor() {
const EditorPromise = import(/* webpackChunkName: 'EditorLite' */ '~/editor/editor_lite');
const DataPromise = axios.get(this.file.content_path);
Promise.all([EditorPromise, DataPromise])
.then(
([
{ default: EditorLite },
{
data: { content, new_path: path },
},
]) => {
const contentEl = this.$el.querySelector('.editor');
this.originalContent = content;
this.fileLoaded = true;
this.editor = new EditorLite().createInstance({
el: contentEl,
blobPath: path,
blobContent: content,
});
this.editor.onDidChangeModelContent(debounce(this.saveDiffResolution.bind(this), 250));
},
)
.catch(() => {
flash(__('An error occurred while loading the file'));
});
},
saveDiffResolution() {
this.saved = true;
// This probably be better placed in the data provider
/* eslint-disable vue/no-mutating-props */
this.file.content = this.editor.getValue();
this.file.resolveEditChanged = this.file.content !== this.originalContent;
this.file.promptDiscardConfirmation = false;
/* eslint-enable vue/no-mutating-props */
},
resetEditorContent() {
if (this.fileLoaded) {
this.editor.setValue(this.originalContent);
}
},
cancelDiscardConfirmation(file) {
this.onCancelDiscardConfirmation(file);
},
acceptDiscardConfirmation(file) {
this.onAcceptDiscardConfirmation(file);
},
},
};
</script>
<template>
<div v-show="file.showEditor" class="diff-editor-wrap">
<div v-if="file.promptDiscardConfirmation" class="discard-changes-alert-wrap">
<div class="discard-changes-alert">
{{ __('Are you sure you want to discard your changes?') }}
<div class="discard-actions">
<button
class="btn btn-sm btn-danger-secondary gl-button"
@click="acceptDiscardConfirmation(file)"
>
{{ __('Discard changes') }}
</button>
<button class="btn btn-default btn-sm gl-button" @click="cancelDiscardConfirmation(file)">
{{ __('Cancel') }}
</button>
</div>
</div>
</div>
<div :class="classObject" class="editor-wrap">
<div class="editor" style="height: 350px" data-editor-loading="true"></div>
</div>
</div>
</template>

View File

@ -1,22 +0,0 @@
// This is a true violation of @gitlab/no-runtime-template-compiler, as it relies on
// app/views/projects/merge_requests/conflicts/components/_inline_conflict_lines.html.haml
// for its template.
/* eslint-disable no-param-reassign, @gitlab/no-runtime-template-compiler */
import Vue from 'vue';
import actionsMixin from '../mixins/line_conflict_actions';
import utilsMixin from '../mixins/line_conflict_utils';
((global) => {
global.mergeConflicts = global.mergeConflicts || {};
global.mergeConflicts.inlineConflictLines = Vue.extend({
mixins: [utilsMixin, actionsMixin],
props: {
file: {
type: Object,
required: true,
},
},
});
})(window.gl || (window.gl = {}));

View File

@ -0,0 +1,47 @@
<script>
import { GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
import actionsMixin from '../mixins/line_conflict_actions';
import utilsMixin from '../mixins/line_conflict_utils';
export default {
directives: {
SafeHtml,
},
mixins: [utilsMixin, actionsMixin],
props: {
file: {
type: Object,
required: true,
},
},
};
</script>
<template>
<table class="diff-wrap-lines code code-commit js-syntax-highlight">
<tr
v-for="line in file.inlineLines"
:key="(line.isHeader ? line.id : line.new_line) + line.richText"
class="line_holder diff-inline"
>
<template v-if="line.isHeader">
<td :class="lineCssClass(line)" class="diff-line-num header"></td>
<td :class="lineCssClass(line)" class="diff-line-num header"></td>
<td :class="lineCssClass(line)" class="line_content header">
<strong>{{ line.richText }}</strong>
<button class="btn" @click="handleSelected(file, line.id, line.section)">
{{ line.buttonTitle }}
</button>
</td>
</template>
<template v-else>
<td :class="lineCssClass(line)" class="diff-line-num new_line">
<a>{{ line.new_line }}</a>
</td>
<td :class="lineCssClass(line)" class="diff-line-num old_line">
<a>{{ line.old_line }}</a>
</td>
<td v-safe-html="line.richText" :class="lineCssClass(line)" class="line_content"></td>
</template>
</tr>
</table>
</template>

View File

@ -1,37 +0,0 @@
/* eslint-disable no-param-reassign */
import Vue from 'vue';
import actionsMixin from '../mixins/line_conflict_actions';
import utilsMixin from '../mixins/line_conflict_utils';
((global) => {
global.mergeConflicts = global.mergeConflicts || {};
global.mergeConflicts.parallelConflictLines = Vue.extend({
mixins: [utilsMixin, actionsMixin],
props: {
file: {
type: Object,
required: true,
},
},
// This is a true violation of @gitlab/no-runtime-template-compiler, as it
// has a template string.
// eslint-disable-next-line @gitlab/no-runtime-template-compiler
template: `
<table class="diff-wrap-lines code js-syntax-highlight">
<tr class="line_holder parallel" v-for="section in file.parallelLines">
<template v-for="line in section">
<td class="diff-line-num header" :class="lineCssClass(line)" v-if="line.isHeader"></td>
<td class="line_content header" :class="lineCssClass(line)" v-if="line.isHeader">
<strong>{{line.richText}}</strong>
<button class="btn" @click="handleSelected(file, line.id, line.section)">{{line.buttonTitle}}</button>
</td>
<td class="diff-line-num old_line" :class="lineCssClass(line)" v-if="!line.isHeader">{{line.lineNumber}}</td>
<td class="line_content parallel" :class="lineCssClass(line)" v-if="!line.isHeader" v-html="line.richText"></td>
</template>
</tr>
</table>
`,
});
})(window.gl || (window.gl = {}));

View File

@ -0,0 +1,47 @@
<script>
import { GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
import actionsMixin from '../mixins/line_conflict_actions';
import utilsMixin from '../mixins/line_conflict_utils';
export default {
directives: {
SafeHtml,
},
mixins: [utilsMixin, actionsMixin],
props: {
file: {
type: Object,
required: true,
},
},
};
</script>
<template>
<!-- Unfortunately there isn't a good key for these sections -->
<!-- eslint-disable vue/require-v-for-key -->
<table class="diff-wrap-lines code js-syntax-highlight">
<tr v-for="section in file.parallelLines" class="line_holder parallel">
<template v-for="line in section">
<template v-if="line.isHeader">
<td class="diff-line-num header" :class="lineCssClass(line)"></td>
<td class="line_content header" :class="lineCssClass(line)">
<strong>{{ line.richText }}</strong>
<button class="btn" @click="handleSelected(file, line.id, line.section)">
{{ line.buttonTitle }}
</button>
</td>
</template>
<template v-else>
<td class="diff-line-num old_line" :class="lineCssClass(line)">
{{ line.lineNumber }}
</td>
<td
v-safe-html="line.richText"
class="line_content parallel"
:class="lineCssClass(line)"
></td>
</template>
</template>
</tr>
</table>
</template>

View File

@ -10,10 +10,10 @@ import { deprecatedCreateFlash as createFlash } from '../flash';
import initIssuableSidebar from '../init_issuable_sidebar';
import './merge_conflict_store';
import syntaxHighlight from '../syntax_highlight';
import DiffFileEditor from './components/diff_file_editor.vue';
import InlineConflictLines from './components/inline_conflict_lines.vue';
import ParallelConflictLines from './components/parallel_conflict_lines.vue';
import MergeConflictsService from './merge_conflict_service';
import './components/diff_file_editor';
import './components/inline_conflict_lines';
import './components/parallel_conflict_lines';
export default function initMergeConflicts() {
const INTERACTIVE_RESOLVE_MODE = 'interactive';
@ -30,9 +30,9 @@ export default function initMergeConflicts() {
el: '#conflicts',
components: {
FileIcon,
'diff-file-editor': gl.mergeConflicts.diffFileEditor,
'inline-conflict-lines': gl.mergeConflicts.inlineConflictLines,
'parallel-conflict-lines': gl.mergeConflicts.parallelConflictLines,
DiffFileEditor,
InlineConflictLines,
ParallelConflictLines,
},
data: mergeConflictsStore.state,
computed: {

View File

@ -7,6 +7,12 @@ export default {
GlLink,
GlSprintf,
},
inject: {
upgradePath: {
from: 'upgradePath',
default: '#',
},
},
i18n: {
UPGRADE_CTA,
},
@ -17,7 +23,7 @@ export default {
<span>
<gl-sprintf :message="$options.i18n.UPGRADE_CTA">
<template #link="{ content }">
<gl-link target="_blank" href="https://about.gitlab.com/pricing/">
<gl-link target="_blank" :href="upgradePath">
{{ content }}
</gl-link>
</template>

View File

@ -14,13 +14,14 @@ export const initStaticSecurityConfiguration = (el) => {
defaultClient: createDefaultClient(),
});
const { projectPath } = el.dataset;
const { projectPath, upgradePath } = el.dataset;
return new Vue({
el,
apolloProvider,
provide: {
projectPath,
upgradePath,
},
render(createElement) {
return createElement(SecurityConfigurationApp);

View File

@ -1,9 +1,11 @@
import { omitBy, isUndefined } from 'lodash';
const standardContext = { ...window.gl?.snowplowStandardContext };
export const STANDARD_CONTEXT = {
schema: 'iglu:com.gitlab/gitlab_standard/jsonschema/1-0-3',
schema: standardContext.schema,
data: {
environment: process.env.NODE_ENV,
...(standardContext.data || {}),
source: 'gitlab-javascript',
},
};

View File

@ -20,6 +20,12 @@ export default {
},
},
watch: {
value() {
$(this.$refs.dropdownInput).val(this.value).trigger('change');
},
},
mounted() {
loadCSSFile(gon.select2_css_path)
.then(() => {

View File

@ -6,6 +6,7 @@ class Projects::CompareController < Projects::ApplicationController
include DiffForPath
include DiffHelper
include RendersCommits
include CompareHelper
# Authorize
before_action :require_non_empty_project
@ -37,16 +38,18 @@ class Projects::CompareController < Projects::ApplicationController
end
def create
if params[:from].blank? || params[:to].blank?
from_to_vars = {
from: params[:from].presence,
to: params[:to].presence,
from_project_id: params[:from_project_id].presence
}
if from_to_vars[:from].blank? || from_to_vars[:to].blank?
flash[:alert] = "You must select a Source and a Target revision"
from_to_vars = {
from: params[:from].presence,
to: params[:to].presence
}
redirect_to project_compare_index_path(@project, from_to_vars)
redirect_to project_compare_index_path(source_project, from_to_vars)
else
redirect_to project_compare_path(@project,
params[:from], params[:to])
redirect_to project_compare_path(source_project, from_to_vars)
end
end
@ -73,13 +76,34 @@ class Projects::CompareController < Projects::ApplicationController
return if valid.all?
flash[:alert] = "Invalid branch name"
redirect_to project_compare_index_path(@project)
redirect_to project_compare_index_path(source_project)
end
# target == start_ref == from
def target_project
strong_memoize(:target_project) do
next source_project unless params.key?(:from_project_id)
next source_project unless Feature.enabled?(:compare_repo_dropdown, source_project, default_enabled: :yaml)
next source_project if params[:from_project_id].to_i == source_project.id
target_project = target_projects(source_project).find_by_id(params[:from_project_id])
# Just ignore the field if it points at a non-existent or hidden project
next source_project unless target_project && can?(current_user, :download_code, target_project)
target_project
end
end
# source == head_ref == to
def source_project
project
end
def compare
return @compare if defined?(@compare)
@compare = CompareService.new(@project, head_ref).execute(@project, start_ref)
@compare = CompareService.new(source_project, head_ref).execute(target_project, start_ref)
end
def start_ref
@ -102,9 +126,9 @@ class Projects::CompareController < Projects::ApplicationController
def define_environment
if compare
environment_params = @repository.branch_exists?(head_ref) ? { ref: head_ref } : { commit: compare.commit }
environment_params = source_project.repository.branch_exists?(head_ref) ? { ref: head_ref } : { commit: compare.commit }
environment_params[:find_latest] = true
@environment = EnvironmentsFinder.new(@project, current_user, environment_params).execute.last
@environment = EnvironmentsFinder.new(source_project, current_user, environment_params).execute.last
end
end
@ -114,8 +138,8 @@ class Projects::CompareController < Projects::ApplicationController
# rubocop: disable CodeReuse/ActiveRecord
def merge_request
@merge_request ||= MergeRequestsFinder.new(current_user, project_id: @project.id).execute.opened
.find_by(source_project: @project, source_branch: head_ref, target_branch: start_ref)
@merge_request ||= MergeRequestsFinder.new(current_user, project_id: target_project.id).execute.opened
.find_by(source_project: source_project, source_branch: head_ref, target_branch: start_ref)
end
# rubocop: enable CodeReuse/ActiveRecord
end

View File

@ -47,6 +47,7 @@ class IssuableFinder
NEGATABLE_PARAMS_HELPER_KEYS = %i[project_id scope status include_subgroups].freeze
attr_accessor :current_user, :params
attr_reader :original_params
attr_writer :parent
delegate(*%i[assignee milestones], to: :params)
@ -87,7 +88,7 @@ class IssuableFinder
end
def valid_params
@valid_params ||= scalar_params + [array_params.merge(not: {})]
@valid_params ||= scalar_params + [array_params.merge(or: {}, not: {})]
end
end
@ -101,6 +102,7 @@ class IssuableFinder
def initialize(current_user, params = {})
@current_user = current_user
@original_params = params
@params = params_class.new(params, current_user, klass)
end
@ -142,7 +144,7 @@ class IssuableFinder
end
def should_filter_negated_args?
return false unless Feature.enabled?(:not_issuable_queries, params.group || params.project, default_enabled: true)
return false unless not_filters_enabled?
# API endpoints send in `nil` values so we test if there are any non-nil
not_params.present? && not_params.values.any?
@ -150,7 +152,6 @@ class IssuableFinder
# Negates all params found in `negatable_params`
def filter_negated_items(items)
items = by_negated_author(items)
items = by_negated_assignee(items)
items = by_negated_label(items)
items = by_negated_milestone(items)
@ -372,31 +373,14 @@ class IssuableFinder
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def by_author(items)
if params.author
items.where(author_id: params.author.id)
elsif params.no_author?
items.where(author_id: nil)
elsif params.author_id? || params.author_username? # author not found
items.none
else
items
end
Issuables::AuthorFilter.new(
items,
params: original_params,
or_filters_enabled: or_filters_enabled?,
not_filters_enabled: not_filters_enabled?
).filter
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def by_negated_author(items)
if not_params.author
items.where.not(author_id: not_params.author.id)
elsif not_params.author_id? || not_params.author_username? # author not found
items.none
else
items
end
end
# rubocop: enable CodeReuse/ActiveRecord
def by_assignee(items)
if params.filter_by_no_assignee?
@ -514,4 +498,20 @@ class IssuableFinder
def by_non_archived(items)
params[:non_archived].present? ? items.non_archived : items
end
def or_filters_enabled?
strong_memoize(:or_filters_enabled) do
Feature.enabled?(:or_issuable_queries, feature_flag_scope, default_enabled: :yaml)
end
end
def not_filters_enabled?
strong_memoize(:not_filters_enabled) do
Feature.enabled?(:not_issuable_queries, feature_flag_scope, default_enabled: :yaml)
end
end
def feature_flag_scope
params.group || params.project
end
end

View File

@ -27,19 +27,6 @@ class IssuableFinder
params.present?
end
def author_id?
params[:author_id].present? && params[:author_id] != NONE
end
def author_username?
params[:author_username].present? && params[:author_username] != NONE
end
def no_author?
# author_id takes precedence over author_username
params[:author_id] == NONE || params[:author_username] == NONE
end
def filter_by_no_assignee?
params[:assignee_id].to_s.downcase == FILTER_NONE
end
@ -169,20 +156,6 @@ class IssuableFinder
end
end
# rubocop: disable CodeReuse/ActiveRecord
def author
strong_memoize(:author) do
if author_id?
User.find_by(id: params[:author_id])
elsif author_username?
User.find_by_username(params[:author_username])
else
nil
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def assignees
strong_memoize(:assignees) do

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Issuables
class AuthorFilter < BaseFilter
def filter
filtered = by_author(issuables)
filtered = by_author_union(filtered)
by_negated_author(filtered)
end
private
def by_author(issuables)
if params[:author_id].present?
issuables.authored(params[:author_id])
elsif params[:author_username].present?
issuables.authored(User.by_username(params[:author_username]))
else
issuables
end
end
def by_author_union(issuables)
return issuables unless or_filters_enabled? && or_params&.fetch(:author_username).present?
issuables.authored(User.by_username(or_params[:author_username]))
end
def by_negated_author(issuables)
return issuables unless not_filters_enabled? && not_params.present?
if not_params[:author_id].present?
issuables.not_authored(not_params[:author_id])
elsif not_params[:author_username].present?
issuables.not_authored(User.by_username(not_params[:author_username]))
else
issuables
end
end
end
end

View File

@ -0,0 +1,36 @@
# frozen_string_literal: true
module Issuables
class BaseFilter
attr_reader :issuables, :params
def initialize(issuables, params:, or_filters_enabled: false, not_filters_enabled: false)
@issuables = issuables
@params = params
@or_filters_enabled = or_filters_enabled
@not_filters_enabled = not_filters_enabled
end
def filter
raise NotImplementedError
end
private
def or_params
params[:or]
end
def not_params
params[:not]
end
def or_filters_enabled?
@or_filters_enabled
end
def not_filters_enabled?
@not_filters_enabled
end
end
end

View File

@ -5,29 +5,30 @@ class MergeRequestTargetProjectFinder
attr_reader :current_user, :source_project
def initialize(current_user: nil, source_project:)
def initialize(current_user: nil, source_project:, project_feature: :merge_requests)
@current_user = current_user
@source_project = source_project
@project_feature = project_feature
end
# rubocop: disable CodeReuse/ActiveRecord
def execute(include_routes: false)
if source_project.fork_network
include_routes ? projects.inc_routes : projects
else
Project.where(id: source_project)
Project.id_in(source_project.id)
end
end
# rubocop: enable CodeReuse/ActiveRecord
private
attr_reader :project_feature
def projects
source_project
.fork_network
.projects
.public_or_visible_to_user(current_user)
.non_archived
.with_feature_available_for_user(:merge_requests, current_user)
.with_feature_available_for_user(project_feature, current_user)
end
end

View File

@ -252,6 +252,7 @@ module ApplicationSettingsHelper
:housekeeping_incremental_repack_period,
:html_emails_enabled,
:import_sources,
:in_product_marketing_emails_enabled,
:invisible_captcha_enabled,
:max_artifacts_size,
:max_attachment_size,

View File

@ -1,22 +1,31 @@
# frozen_string_literal: true
module CompareHelper
def create_mr_button?(from = params[:from], to = params[:to], project = @project)
def create_mr_button?(from: params[:from], to: params[:to], source_project: @project, target_project: @target_project)
from.present? &&
to.present? &&
from != to &&
can?(current_user, :create_merge_request_from, project) &&
project.repository.branch_exists?(from) &&
project.repository.branch_exists?(to)
can?(current_user, :create_merge_request_from, source_project) &&
can?(current_user, :create_merge_request_in, target_project) &&
target_project.repository.branch_exists?(from) &&
source_project.repository.branch_exists?(to)
end
def create_mr_path(from = params[:from], to = params[:to], project = @project)
def create_mr_path(from: params[:from], to: params[:to], source_project: @project, target_project: @target_project)
project_new_merge_request_path(
project,
target_project,
merge_request: {
source_project_id: source_project.id,
source_branch: to,
target_project_id: target_project.id,
target_branch: from
}
)
end
def target_projects(source_project)
MergeRequestTargetProjectFinder
.new(current_user: current_user, source_project: source_project, project_feature: :repository)
.execute(include_routes: true)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Projects
module Security
module ConfigurationHelper
def security_upgrade_path
'https://about.gitlab.com/pricing/'
end
end
end
end
::Projects::Security::ConfigurationHelper.prepend_if_ee('::EE::Projects::Security::ConfigurationHelper')

View File

@ -4,10 +4,17 @@ module Ci
class RunnerNamespace < ApplicationRecord
extend Gitlab::Ci::Model
belongs_to :runner, inverse_of: :runner_namespaces, validate: true
belongs_to :runner, inverse_of: :runner_namespaces
belongs_to :namespace, inverse_of: :runner_namespaces, class_name: '::Namespace'
belongs_to :group, class_name: '::Group', foreign_key: :namespace_id
validates :runner_id, uniqueness: { scope: :namespace_id }
validate :group_runner_type
private
def group_runner_type
errors.add(:runner, 'is not a group runner') unless runner&.group_type?
end
end
end

View File

@ -86,6 +86,7 @@ module Issuable
before_validation :truncate_description_on_import!
scope :authored, ->(user) { where(author_id: user) }
scope :not_authored, ->(user) { where.not(author_id: user) }
scope :recent, -> { reorder(id: :desc) }
scope :of_projects, ->(ids) { where(project_id: ids) }
scope :opened, -> { with_state(:opened) }

View File

@ -67,8 +67,6 @@ class Namespace < ApplicationRecord
validate :changing_shared_runners_enabled_is_allowed
validate :changing_allow_descendants_override_disabled_shared_runners_is_allowed
validates_associated :runners
delegate :name, to: :owner, allow_nil: true, prefix: true
delegate :avatar_url, to: :owner, allow_nil: true

View File

@ -10,7 +10,7 @@ class ZoomMeeting < ApplicationRecord
validates :project, presence: true, unless: :importing?
validates :issue, presence: true, unless: :importing?
validates :url, presence: true, length: { maximum: 255 }, zoom_url: true
validates :url, presence: true, length: { maximum: 255 }, 'gitlab/utils/zoom_url': true
validates :issue, same_project_association: true, unless: :importing?
enum issue_status: {

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
# Gitlab::Utils::ZoomUrlValidator
#
# Custom validator for zoom urls
#
module Gitlab
module Utils
class ZoomUrlValidator < ActiveModel::EachValidator
ALLOWED_SCHEMES = %w(https).freeze
def validate_each(record, attribute, value)
links_count = Gitlab::ZoomLinkExtractor.new(value).links.size
valid = Gitlab::UrlSanitizer.valid?(value, allowed_schemes: ALLOWED_SCHEMES)
return if links_count == 1 && valid
record.errors.add(:url, 'must contain one valid Zoom URL')
end
end
end
end

View File

@ -1,18 +0,0 @@
# frozen_string_literal: true
# ZoomUrlValidator
#
# Custom validator for zoom urls
#
class ZoomUrlValidator < ActiveModel::EachValidator
ALLOWED_SCHEMES = %w(https).freeze
def validate_each(record, attribute, value)
links_count = Gitlab::ZoomLinkExtractor.new(value).links.size
valid = Gitlab::UrlSanitizer.valid?(value, allowed_schemes: ALLOWED_SCHEMES)
return if links_count == 1 && valid
record.errors.add(:url, 'must contain one valid Zoom URL')
end
end

View File

@ -25,4 +25,12 @@
= render_if_exists 'admin/application_settings/email_additional_text_setting', form: f
.form-group
.form-check
= f.check_box :in_product_marketing_emails_enabled, class: 'form-check-input'
= f.label :in_product_marketing_emails_enabled, class: 'form-check-label' do
= _('Enable in-product marketing emails')
.form-text.text-muted
= _('By default, GitLab sends emails to help guide users through the onboarding process.')
= f.submit _('Save changes'), class: "gl-button btn btn-success", data: { qa_selector: 'save_changes_button' }

View File

@ -21,7 +21,8 @@
%ul.content-list.event_commits
= render "events/commit", project: project, event: event
- create_mr = event.new_ref? && create_mr_button?(project.default_branch, event.ref_name, project) && event.authored_by?(current_user)
- create_mr = event.new_ref? && create_mr_button?(from: project.default_branch, to: event.ref_name, source_project: project, target_project: project) && event.authored_by?(current_user)
- create_mr_path = create_mr_path(from: project.default_branch, to: event.ref_name, source_project: project, target_project: project) if create_mr
- if event.commits_count > 1
%li.commits-stat
%span ... and #{pluralize(event.commits_count - 1, 'more commit')}.
@ -40,9 +41,9 @@
- if create_mr
%span
or
= link_to create_mr_path(project.default_branch, event.ref_name, project) do
= link_to create_mr_path do
create a merge request
- elsif create_mr
%li.commits-stat
= link_to create_mr_path(project.default_branch, event.ref_name, project) do
= link_to create_mr_path do
Create Merge Request

View File

@ -9,5 +9,4 @@
available_namespaces_path: import_available_namespaces_path(format: :json),
create_bulk_import_path: import_bulk_imports_path(format: :json),
jobs_path: realtime_changes_import_bulk_imports_path(format: :json),
can_create_group: current_user.can_create_group?.to_s,
source_url: @source_url } }

View File

@ -8,3 +8,6 @@
n.src=w;g.parentNode.insertBefore(n,g)}}(window,document,"script","#{asset_url('snowplow/sp.js')}","snowplow"));
window.snowplowOptions = #{Gitlab::Tracking.snowplow_options(@group).to_json}
gl = window.gl || {};
gl.snowplowStandardContext = #{Gitlab::Tracking::StandardContext.new.to_context.to_json.to_json}

View File

@ -35,8 +35,8 @@
.gl-display-inline-flex.gl-vertical-align-middle.gl-mr-5
%svg.s24
- if merge_project && create_mr_button?(@repository.root_ref, branch.name)
= link_to create_mr_path(@repository.root_ref, branch.name), class: 'gl-button btn btn-default' do
- if merge_project && create_mr_button?(from: @repository.root_ref, to: branch.name, source_project: @project, target_project: @project)
= link_to create_mr_path(from: @repository.root_ref, to: branch.name, source_project: @project, target_project: @project), class: 'gl-button btn btn-default' do
= _('Merge request')
- if branch.name != @repository.root_ref

View File

@ -18,9 +18,9 @@
- if @merge_request.present?
.control.d-none.d-md-block
= link_to _("View open merge request"), project_merge_request_path(@project, @merge_request), class: 'btn gl-button'
- elsif create_mr_button?(@repository.root_ref, @ref)
- elsif create_mr_button?(from: @repository.root_ref, to: @ref, source_project: @project, target_project: @project)
.control.d-none.d-md-block
= link_to _("Create merge request"), create_mr_path(@repository.root_ref, @ref), class: 'btn gl-button btn-success'
= link_to _("Create merge request"), create_mr_path(from: @repository.root_ref, to: @ref, source_project: @project, target_project: @project), class: 'btn gl-button btn-success'
.control
= form_tag(project_commits_path(@project, @id), method: :get, class: 'commits-search-form js-signature-container', data: { 'signatures-path' => namespace_project_signatures_path }) do

View File

@ -1,10 +0,0 @@
%diff-file-editor{ "inline-template" => "true", ":file" => "file", ":on-cancel-discard-confirmation" => "cancelDiscardConfirmation", ":on-accept-discard-confirmation" => "acceptDiscardConfirmation" }
.diff-editor-wrap{ "v-show" => "file.showEditor" }
.discard-changes-alert-wrap{ "v-if" => "file.promptDiscardConfirmation" }
.discard-changes-alert
Are you sure you want to discard your changes?
.discard-actions
%button.btn.btn-sm.btn-danger-secondary.gl-button{ "@click" => "acceptDiscardConfirmation(file)" } Discard changes
%button.btn.btn-default.btn-sm.gl-button{ "@click" => "cancelDiscardConfirmation(file)" } Cancel
.editor-wrap{ ":class" => "classObject" }
.editor{ "style" => "height: 350px", data: { 'editor-loading': true } }

View File

@ -1,14 +0,0 @@
%inline-conflict-lines{ "inline-template" => "true", ":file" => "file" }
%table.diff-wrap-lines.code.code-commit.js-syntax-highlight
%tr.line_holder.diff-inline{ "v-for" => "line in file.inlineLines" }
%td.diff-line-num.new_line{ ":class" => "lineCssClass(line)", "v-if" => "!line.isHeader" }
%a {{line.new_line}}
%td.diff-line-num.old_line{ ":class" => "lineCssClass(line)", "v-if" => "!line.isHeader" }
%a {{line.old_line}}
%td.line_content{ ":class" => "lineCssClass(line)", "v-if" => "!line.isHeader", "v-html" => "line.richText" }
%td.diff-line-num.header{ ":class" => "lineCssClass(line)", "v-if" => "line.isHeader" }
%td.diff-line-num.header{ ":class" => "lineCssClass(line)", "v-if" => "line.isHeader" }
%td.line_content.header{ ":class" => "lineCssClass(line)", "v-if" => "line.isHeader" }
%strong{ "v-html" => "line.richText" }
%button.btn{ "@click" => "handleSelected(file, line.id, line.section)" }
{{line.buttonTitle}}

View File

@ -27,10 +27,10 @@
= render partial: 'projects/merge_requests/conflicts/file_actions'
.diff-content.diff-wrap-lines
.file-content{ "v-show" => "!isParallel && file.resolveMode === 'interactive' && file.type === 'text'" }
= render partial: "projects/merge_requests/conflicts/components/inline_conflict_lines"
%inline-conflict-lines{ ":file" => "file" }
.file-content{ "v-show" => "isParallel && file.resolveMode === 'interactive' && file.type === 'text'" }
%parallel-conflict-lines{ ":file" => "file" }
%div{ "v-show" => "file.resolveMode === 'edit' || file.type === 'text-editor'" }
= render partial: "projects/merge_requests/conflicts/components/diff_file_editor"
%diff-file-editor{ ":file" => "file", ":on-cancel-discard-confirmation" => "cancelDiscardConfirmation", ":on-accept-discard-confirmation" => "acceptDiscardConfirmation" }
= render partial: "projects/merge_requests/conflicts/submit_form"

View File

@ -1,4 +1,4 @@
- breadcrumb_title _("Security Configuration")
- page_title _("Security Configuration")
#js-security-configuration-static{ data: {project_path: @project.full_path} }
#js-security-configuration-static{ data: { project_path: @project.full_path, upgrade_path: security_upgrade_path } }

View File

@ -9,6 +9,7 @@ module Namespaces
urgency :low
def perform
return unless Gitlab::CurrentSettings.in_product_marketing_emails_enabled
return unless Gitlab::Experimentation.active?(:in_product_marketing_emails)
Namespaces::InProductMarketingEmailsService.send_for_all_tracks_and_intervals

View File

@ -0,0 +1,5 @@
---
title: Improve performance of validations when a group has a lot of runners
merge_request: 54774
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Add application setting for enabling in-product marketing emails
merge_request: 54324
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Added a missing class to reply placeholder
merge_request: 54817
author:
type: fixed

View File

@ -0,0 +1,8 @@
---
name: compare_repo_dropdown
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/issues/14615
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/322141
milestone: '13.9'
type: development
group: group::source code
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: or_issuable_queries
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54444
rollout_issue_url:
milestone: '13.10'
type: development
group: group::project management
default_enabled: false

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddInProductMarketingEmailsEnabledSetting < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :in_product_marketing_emails_enabled, :boolean, null: false, default: true
end
end

View File

@ -0,0 +1 @@
0aa6f7385cf13c2b0ee9b7d2a51b63dd208feccffecee8f08ea3d183ebb5ffb4

View File

@ -9398,6 +9398,7 @@ CREATE TABLE application_settings (
keep_latest_artifact boolean DEFAULT true NOT NULL,
notes_create_limit integer DEFAULT 300 NOT NULL,
notes_create_limit_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
in_product_marketing_emails_enabled boolean DEFAULT true NOT NULL,
kroki_formats jsonb DEFAULT '{}'::jsonb NOT NULL,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_registry_exp_policies_worker_capacity_positive CHECK ((container_registry_expiration_policies_worker_capacity >= 0)),

View File

@ -305,6 +305,7 @@ listed in the descriptions of the relevant settings.
| `housekeeping_incremental_repack_period` | integer | required by: `housekeeping_enabled` | Number of Git pushes after which an incremental `git repack` is run. |
| `html_emails_enabled` | boolean | no | Enable HTML emails. |
| `import_sources` | array of strings | no | Sources to allow project import from, possible values: `github`, `bitbucket`, `bitbucket_server`, `gitlab`, `fogbugz`, `git`, `gitlab_project`, `gitea`, `manifest`, and `phabricator`. |
| `in_product_marketing_emails_enabled` | boolean | no | Enable in-product marketing emails. Enabled by default. |
| `invisible_captcha_enabled` | boolean | no | <!-- vale gitlab.Spelling = NO --> Enable Invisible Captcha <!-- vale gitlab.Spelling = YES --> spam detection during sign-up. Disabled by default. |
| `issues_create_limit` | integer | no | Max number of issue creation requests per minute per user. Disabled by default.|
| `local_markdown_version` | integer | no | Increase this value when any cached Markdown should be invalidated. |

View File

@ -355,8 +355,8 @@ You can also store template files in a central repository and `include` them in
otherwise the external file is not included.
You can't use [YAML anchors](#anchors) across different YAML files sourced by `include`.
You can only refer to anchors in the same file. Instead of YAML anchors, you can
use the [`extends` keyword](#extends).
You can only refer to anchors in the same file. To reuse configuration from different
YAML files, use [`!reference` tags](#reference-tags) or the [`extends` keyword](#extends).
`include` supports the following inclusion methods:
@ -4391,9 +4391,10 @@ Use anchors to duplicate or inherit properties. Use anchors with [hidden jobs](#
to provide templates for your jobs. When there are duplicate keys, GitLab
performs a reverse deep merge based on the keys.
You can't use YAML anchors across multiple files when leveraging the [`include`](#include)
feature. Anchors are only valid in the file they were defined in. Instead
of using YAML anchors, you can use the [`extends` keyword](#extends).
You can't use YAML anchors across multiple files when using the [`include`](#include)
keyword. Anchors are only valid in the file they were defined in. To reuse configuration
from different YAML files, use [`!reference` tags](#reference-tags) or the
[`extends` keyword](#extends).
The following example uses anchors and map merging. It creates two jobs,
`test1` and `test2`, that inherit the `.job_template` configuration, each

View File

@ -121,7 +121,7 @@ job:
- echo -e "\e[31mThis text is red,\e[0m but this text isn't\e[31m however this text is red again."
```
You can define the color codes in Shell variables, or even [custom environment variables](../variables/README.md#custom-cicd-variables),
You can define the color codes in Shell variables, or even [custom CI/CD variables](../variables/README.md#custom-cicd-variables),
which makes the commands easier to read and reusable.
For example, using the same example as above and variables defined in a `before_script`:

View File

@ -7312,8 +7312,8 @@ Projects with a GitHub service pipeline enabled
| --- | --- |
| `key_path` | **`counts.projects_reporting_ci_cd_back_to_github`** |
| `product_section` | ops |
| `product_stage` | |
| `product_group` | `group::verify` |
| `product_stage` | verify |
| `product_group` | `group::continuous_integration` |
| `product_category` | `continuous_integration` |
| `value_type` | number |
| `status` | data_available |
@ -7321,7 +7321,6 @@ Projects with a GitHub service pipeline enabled
| `data_source` | Database |
| `distribution` | ee |
| `tier` | premium, ultimate |
| `skip_validation` | true |
## `counts.projects_slack_active`

View File

@ -28,15 +28,16 @@ Each metric is defined in a separate YAML file consisting of a number of fields:
|---------------------|----------|----------------------------------------------------------------|
| `key_path` | yes | JSON key path for the metric, location in Usage Ping payload. |
| `description` | yes | |
| `value_type` | yes | |
| `status` | yes | |
| `product_group` | yes | The [group](https://about.gitlab.com/handbook/product/categories/#devops-stages) that owns the metric. |
| `time_frame` | yes | `string`; may be set to a value like "7d" |
| `data_source` | yes | `string`: may be set to a value like `database` or `redis_hll`. |
| `product_section` | yes | The [section](https://gitlab.com/gitlab-com/www-gitlab-com/-/blob/master/data/sections.yml). |
| `product_stage` | no | The [stage](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) for the metric. |
| `product_group` | yes | The [group](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) that owns the metric. |
| `product_category` | no | The [product category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) for the metric. |
| `value_type` | yes | `string`; one of `string`, `number`, `boolean`. |
| `status` | yes | `string`; status of the metric, may be set to `data_available`, `implemented`. |
| `time_frame` | yes | `string`; may be set to a value like `7d`, `28d`, `all`, `none`. |
| `data_source` | yes | `string`: may be set to a value like `database`, `redis`, `redis_hll`, `prometheus`, `ruby`. |
| `distribution` | yes | The [distribution](https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/#definitions) where the metric applies. |
| `tier` | yes | The [tier]( https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/) where the metric applies. |
| `product_category` | no | The [product category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) for the metric. |
| `product_stage` | no | The [stage](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) for the metric. |
| `milestone` | no | The milestone when the metric is introduced. |
| `milestone_removed` | no | The milestone when the metric is removed. |
| `introduced_by_url` | no | The URL to the Merge Request that introduced the metric. |

View File

@ -116,7 +116,7 @@ and the CI YAML file:
image: registry.gitlab.com/gitlab-org/terraform-images/stable:latest
```
1. In the `.gitlab-ci.yml` file, define some environment variables to ease
1. In the `.gitlab-ci.yml` file, define some CI/CD variables to ease
development. In this example, `TF_ROOT` is the directory where the Terraform
commands must be executed, `TF_ADDRESS` is the URL to the state on the GitLab
instance where this pipeline runs, and the final path segment in `TF_ADDRESS`
@ -212,7 +212,7 @@ as [Terraform input variables](https://www.terraform.io/docs/language/values/var
- **username**: The username to authenticate with the data source. If you are using a [Personal Access Token](../profile/personal_access_tokens.md) for
authentication, this is your GitLab username. If you are using GitLab CI, this is `'gitlab-ci-token'`.
- **password**: The password to authenticate with the data source. If you are using a Personal Access Token for
authentication, this is the token value. If you are using GitLab CI, it is the contents of the `${CI_JOB_TOKEN}` CI variable.
authentication, this is the token value. If you are using GitLab CI, it is the contents of the `${CI_JOB_TOKEN}` CI/CD variable.
An example setup is shown below:

View File

@ -86,7 +86,7 @@ differentiates the new cluster from the rest.
When adding more than one Kubernetes cluster to your project, you need to differentiate
them with an environment scope. The environment scope associates clusters with [environments](../../../ci/environments/index.md) similar to how the
[environment-specific variables](../../../ci/variables/README.md#limit-the-environment-scopes-of-cicd-variables) work.
[environment-specific CI/CD variables](../../../ci/variables/README.md#limit-the-environment-scopes-of-cicd-variables) work.
The default environment scope is `*`, which means all jobs, regardless of their
environment, use that cluster. Each scope can be used only by a single cluster
@ -200,7 +200,7 @@ To clear the cache:
You do not need to specify a base domain on cluster settings when using GitLab Serverless. The domain in that case
is specified as part of the Knative installation. See [Installing Applications](#installing-applications).
Specifying a base domain automatically sets `KUBE_INGRESS_BASE_DOMAIN` as an environment variable.
Specifying a base domain automatically sets `KUBE_INGRESS_BASE_DOMAIN` as an deployment variable.
If you are using [Auto DevOps](../../../topics/autodevops/index.md), this domain is used for the different
stages. For example, Auto Review Apps and Auto Deploy.
@ -288,7 +288,7 @@ A Kubernetes cluster can be the destination for a deployment job. If
the cluster from your jobs using tools such as `kubectl` or `helm`.
- You don't use the GitLab cluster integration, you can still deploy to your
cluster. However, you must configure Kubernetes tools yourself
using [environment variables](../../../ci/variables/README.md#custom-cicd-variables)
using [CI/CD variables](../../../ci/variables/README.md#custom-cicd-variables)
before you can interact with the cluster from your jobs.
### Deployment variables
@ -314,7 +314,7 @@ The Kubernetes cluster integration exposes these
GitLab CI/CD build environment to deployment jobs. Deployment jobs have
[defined a target environment](../../../ci/environments/index.md#defining-environments).
| Variable | Description |
| Deployment Variable | Description |
|----------------------------|-------------|
| `KUBE_URL` | Equal to the API URL. |
| `KUBE_TOKEN` | The Kubernetes token of the [environment service account](add_remove_clusters.md#access-controls). Prior to GitLab 11.5, `KUBE_TOKEN` was the Kubernetes token of the main service account of the cluster integration. |
@ -352,7 +352,7 @@ When you customize the namespace, existing environments remain linked to their c
namespaces until you [clear the cluster cache](#clearing-the-cluster-cache).
WARNING:
By default, anyone who can create a deployment job can access any CI variable in
By default, anyone who can create a deployment job can access any CI/CD variable in
an environment's deployment job. This includes `KUBECONFIG`, which gives access to
any secret available to the associated service account in your cluster.
To keep your production credentials safe, consider using
@ -406,7 +406,7 @@ deployments, replica sets, and pods are annotated with:
- `app.gitlab.com/app: $CI_PROJECT_PATH_SLUG`
`$CI_ENVIRONMENT_SLUG` and `$CI_PROJECT_PATH_SLUG` are the values of
the CI variables.
the CI/CD variables.
You must be the project owner or have `maintainer` permissions to use terminals.
Support is limited to the first container in the first pod of your environment.
@ -431,7 +431,7 @@ Reasons for failure include:
- The token you gave GitLab does not have [`cluster-admin`](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles)
privileges required by GitLab.
- Missing `KUBECONFIG` or `KUBE_TOKEN` variables. To be passed to your job, they must have a matching
- Missing `KUBECONFIG` or `KUBE_TOKEN` deployment variables. To be passed to your job, they must have a matching
[`environment:name`](../../../ci/environments/index.md#defining-environments). If your job has no
`environment:name` set, the Kubernetes credentials are not passed to it.

View File

@ -367,8 +367,7 @@ sam init -h
### Setting up your AWS credentials with your GitLab account
In order to interact with your AWS account, the GitLab CI/CD pipelines require both
`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be set in the project's CI/CD
variables.
`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be set in the project's CI/CD variables.
To set these:
@ -424,8 +423,8 @@ deploys your application. If your:
- Incompatible versions of software. For example, Python runtime version might be
different from the Python on the build machine. Address this by installing the
required versions of the software.
- You may not be able to access your AWS account from GitLab. Check the environment
variables you set up with AWS credentials.
- You may not be able to access your AWS account from GitLab. Check the CI/CD variables
you set up with AWS credentials.
- You may not have permission to deploy a serverless application. Make sure you
provide all required permissions to deploy a serverless application.

View File

@ -396,7 +396,7 @@ kubectl create secret generic my-secrets -n "$KUBE_NAMESPACE" --from-literal MY_
#### Part of deployment job
You can extend your `.gitlab-ci.yml` to create the secrets during deployment using the [environment variables](../../../../ci/variables/README.md)
You can extend your `.gitlab-ci.yml` to create the secrets during deployment using the [CI/CD variables](../../../../ci/variables/README.md)
stored securely under your GitLab project.
```yaml

View File

@ -90,7 +90,7 @@ To display the Deploy Boards for a specific [environment](../../ci/environments/
[`kubernetes`](https://docs.gitlab.com/runner/executors/kubernetes.html) executor.
1. Configure the [Kubernetes integration](clusters/index.md) in your project for the
cluster. The Kubernetes namespace is of particular note as you need it
for your deployment scripts (exposed by the `KUBE_NAMESPACE` environment variable).
for your deployment scripts (exposed by the `KUBE_NAMESPACE` deployment variable).
1. Ensure Kubernetes annotations of `app.gitlab.com/env: $CI_ENVIRONMENT_SLUG`
and `app.gitlab.com/app: $CI_PROJECT_PATH_SLUG` are applied to the
deployments, replica sets, and pods, where `$CI_ENVIRONMENT_SLUG` and
@ -163,6 +163,6 @@ version of your application.
## Further reading
- [GitLab Auto deploy](../../topics/autodevops/stages.md#auto-deploy)
- [GitLab CI/CD environment variables](../../ci/variables/README.md)
- [GitLab CI/CD variables](../../ci/variables/README.md)
- [Environments and deployments](../../ci/environments/index.md)
- [Kubernetes deploy example](https://gitlab.com/gitlab-examples/kubernetes-deploy)

View File

@ -169,7 +169,7 @@ apply consistently when cloning the repository of related projects.
There's a special case when it comes to Deploy Tokens. If a user creates one
named `gitlab-deploy-token`, the username and token of the Deploy Token is
automatically exposed to the CI/CD jobs as environment variables: `CI_DEPLOY_USER`
automatically exposed to the CI/CD jobs as CI/CD variables: `CI_DEPLOY_USER`
and `CI_DEPLOY_PASSWORD`, respectively.
After you create the token, you can sign in to the Container Registry by using

View File

@ -34,12 +34,12 @@ git push -o <push_option>
## Push options for GitLab CI/CD
You can use push options to skip a CI/CD pipeline, or pass environment variables.
You can use push options to skip a CI/CD pipeline, or pass CI/CD variables.
| Push option | Description | Introduced in version |
| ------------------------------ | ------------------------------------------------------------------------------------------- |---------------------- |
| `ci.skip` | Do not create a CI pipeline for the latest push. | [11.7](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15643) |
| `ci.variable="<name>=<value>"` | Provide [environment variables](../../ci/variables/README.md) to be used in a CI pipeline, if one is created due to the push. | [12.6](https://gitlab.com/gitlab-org/gitlab/-/issues/27983) |
| `ci.variable="<name>=<value>"` | Provide [CI/CD variables](../../ci/variables/README.md) to be used in a CI pipeline, if one is created due to the push. | [12.6](https://gitlab.com/gitlab-org/gitlab/-/issues/27983) |
An example of using `ci.skip`:
@ -47,7 +47,7 @@ An example of using `ci.skip`:
git push -o ci.skip
```
An example of passing some environment variables for a pipeline:
An example of passing some CI/CD variables for a pipeline:
```shell
git push -o ci.variable="MAX_RETRIES=10" -o ci.variable="MAX_TIME=600"

View File

@ -421,7 +421,7 @@ terminal:
See [this issue](https://gitlab.com/gitlab-org/webide-file-sync/-/issues/7) for
more information.
- `$CI_PROJECT_DIR` is a
[predefined environment variable](../../../ci/variables/predefined_variables.md)
[predefined CI/CD variable](../../../ci/variables/predefined_variables.md)
for GitLab Runners. This is where your project's repository resides.
After you have configured the web terminal for file syncing, then when the web

View File

@ -91,6 +91,7 @@ module API
optional :import_sources, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce,
values: %w[github bitbucket bitbucket_server gitlab google_code fogbugz git gitlab_project gitea manifest phabricator],
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :in_product_marketing_emails_enabled, type: Boolean, desc: 'By default, in-product marketing emails are enabled. To disable these emails, disable this option.'
optional :invisible_captcha_enabled, type: Boolean, desc: 'Enable Invisible Captcha spam detection during signup.'
optional :max_artifacts_size, type: Integer, desc: "Set the maximum file size for each job's artifacts"
optional :max_attachment_size, type: Integer, desc: 'Maximum attachment size in MB'

View File

@ -3960,6 +3960,9 @@ msgstr ""
msgid "Are you sure you want to discard this comment?"
msgstr ""
msgid "Are you sure you want to discard your changes?"
msgstr ""
msgid "Are you sure you want to erase this build?"
msgstr ""
@ -5162,6 +5165,9 @@ msgstr ""
msgid "By default GitLab sends emails in HTML and plain text formats so mail clients can choose what format to use. Disable this option if you only want to send emails in plain text format."
msgstr ""
msgid "By default, GitLab sends emails to help guide users through the onboarding process."
msgstr ""
msgid "By default, all projects and groups will use the global notifications setting."
msgstr ""
@ -11198,6 +11204,9 @@ msgstr ""
msgid "Enable header and footer in emails"
msgstr ""
msgid "Enable in-product marketing emails"
msgstr ""
msgid "Enable integration"
msgstr ""

View File

@ -3,8 +3,21 @@
require 'spec_helper'
RSpec.describe Projects::CompareController do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:user) { create(:user) }
let(:private_fork) { fork_project(project, nil, repository: true).tap { |fork| fork.update!(visibility: 'private') } }
let(:public_fork) do
fork_project(project, nil, repository: true).tap do |fork|
fork.update!(visibility: 'public')
# Create a reference that only exists in this project
fork.repository.create_ref('refs/heads/improve/awesome', 'refs/heads/improve/more-awesome')
end
end
before do
sign_in(user)
@ -32,18 +45,20 @@ RSpec.describe Projects::CompareController do
{
namespace_id: project.namespace,
project_id: project,
from: source_ref,
to: target_ref,
from_project_id: from_project_id,
from: from_ref,
to: to_ref,
w: whitespace
}
end
let(:whitespace) { nil }
context 'when the refs exist' do
context 'when the refs exist in the same project' do
context 'when we set the white space param' do
let(:source_ref) { "08f22f25" }
let(:target_ref) { "66eceea0" }
let(:from_project_id) { nil }
let(:from_ref) { '08f22f25' }
let(:to_ref) { '66eceea0' }
let(:whitespace) { 1 }
it 'shows some diffs with ignore whitespace change option' do
@ -60,8 +75,9 @@ RSpec.describe Projects::CompareController do
end
context 'when we do not set the white space param' do
let(:source_ref) { "improve%2Fawesome" }
let(:target_ref) { "feature" }
let(:from_project_id) { nil }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'feature' }
let(:whitespace) { nil }
it 'sets the diffs and commits ivars' do
@ -74,9 +90,40 @@ RSpec.describe Projects::CompareController do
end
end
context 'when the refs exist in different projects that the user can see' do
let(:from_project_id) { public_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
let(:to_ref) { 'feature' }
let(:whitespace) { nil }
it 'shows the diff' do
show_request
expect(response).to be_successful
expect(assigns(:diffs).diff_files.first).not_to be_nil
expect(assigns(:commits).length).to be >= 1
end
end
context 'when the refs exist in different projects but the user cannot see' do
let(:from_project_id) { private_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
let(:to_ref) { 'feature' }
let(:whitespace) { nil }
it 'does not show the diff' do
show_request
expect(response).to be_successful
expect(assigns(:diffs)).to be_empty
expect(assigns(:commits)).to be_empty
end
end
context 'when the source ref does not exist' do
let(:source_ref) { 'non-existent-source-ref' }
let(:target_ref) { "feature" }
let(:from_project_id) { nil }
let(:from_ref) { 'non-existent-source-ref' }
let(:to_ref) { 'feature' }
it 'sets empty diff and commit ivars' do
show_request
@ -88,8 +135,9 @@ RSpec.describe Projects::CompareController do
end
context 'when the target ref does not exist' do
let(:target_ref) { 'non-existent-target-ref' }
let(:source_ref) { "improve%2Fawesome" }
let(:from_project_id) { nil }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'non-existent-target-ref' }
it 'sets empty diff and commit ivars' do
show_request
@ -101,8 +149,9 @@ RSpec.describe Projects::CompareController do
end
context 'when the target ref is invalid' do
let(:target_ref) { "master%' AND 2554=4423 AND '%'='" }
let(:source_ref) { "improve%2Fawesome" }
let(:from_project_id) { nil }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { "master%' AND 2554=4423 AND '%'='" }
it 'shows a flash message and redirects' do
show_request
@ -113,8 +162,9 @@ RSpec.describe Projects::CompareController do
end
context 'when the source ref is invalid' do
let(:source_ref) { "master%' AND 2554=4423 AND '%'='" }
let(:target_ref) { "improve%2Fawesome" }
let(:from_project_id) { nil }
let(:from_ref) { "master%' AND 2554=4423 AND '%'='" }
let(:to_ref) { 'improve%2Fawesome' }
it 'shows a flash message and redirects' do
show_request
@ -126,24 +176,33 @@ RSpec.describe Projects::CompareController do
end
describe 'GET diff_for_path' do
def diff_for_path(extra_params = {})
params = {
namespace_id: project.namespace,
project_id: project
}
subject(:diff_for_path_request) { get :diff_for_path, params: request_params }
get :diff_for_path, params: params.merge(extra_params)
let(:request_params) do
{
from_project_id: from_project_id,
from: from_ref,
to: to_ref,
namespace_id: project.namespace,
project_id: project,
old_path: old_path,
new_path: new_path
}
end
let(:existing_path) { 'files/ruby/feature.rb' }
let(:source_ref) { "improve%2Fawesome" }
let(:target_ref) { "feature" }
context 'when the source and target refs exist' do
let(:from_project_id) { nil }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'feature' }
let(:old_path) { existing_path }
let(:new_path) { existing_path }
context 'when the source and target refs exist in the same project' do
context 'when the user has access target the project' do
context 'when the path exists in the diff' do
it 'disables diff notes' do
diff_for_path(from: source_ref, to: target_ref, old_path: existing_path, new_path: existing_path)
diff_for_path_request
expect(assigns(:diff_notes_disabled)).to be_truthy
end
@ -154,16 +213,17 @@ RSpec.describe Projects::CompareController do
meth.call(diffs)
end
diff_for_path(from: source_ref, to: target_ref, old_path: existing_path, new_path: existing_path)
diff_for_path_request
end
end
context 'when the path does not exist in the diff' do
before do
diff_for_path(from: source_ref, to: target_ref, old_path: existing_path.succ, new_path: existing_path.succ)
end
let(:old_path) { existing_path.succ }
let(:new_path) { existing_path.succ }
it 'returns a 404' do
diff_for_path_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
@ -172,31 +232,56 @@ RSpec.describe Projects::CompareController do
context 'when the user does not have access target the project' do
before do
project.team.truncate
diff_for_path(from: source_ref, to: target_ref, old_path: existing_path, new_path: existing_path)
end
it 'returns a 404' do
diff_for_path_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when the source ref does not exist' do
before do
diff_for_path(from: source_ref.succ, to: target_ref, old_path: existing_path, new_path: existing_path)
context 'when the source and target refs exist in different projects and the user can see' do
let(:from_project_id) { public_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
it 'shows the diff for that path' do
expect(controller).to receive(:render_diff_for_path).and_wrap_original do |meth, diffs|
expect(diffs.diff_files.map(&:new_path)).to contain_exactly(existing_path)
meth.call(diffs)
end
diff_for_path_request
end
end
context 'when the source and target refs exist in different projects and the user cannot see' do
let(:from_project_id) { private_fork.id }
it 'does not show the diff for that path' do
diff_for_path_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when the source ref does not exist' do
let(:from_ref) { 'this-ref-does-not-exist' }
it 'returns a 404' do
diff_for_path_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when the target ref does not exist' do
before do
diff_for_path(from: source_ref, to: target_ref.succ, old_path: existing_path, new_path: existing_path)
end
let(:to_ref) { 'this-ref-does-not-exist' }
it 'returns a 404' do
diff_for_path_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
@ -209,53 +294,54 @@ RSpec.describe Projects::CompareController do
{
namespace_id: project.namespace,
project_id: project,
from: source_ref,
to: target_ref
from_project_id: from_project_id,
from: from_ref,
to: to_ref
}
end
context 'when sending valid params' do
let(:source_ref) { "improve%2Fawesome" }
let(:target_ref) { "feature" }
let(:from_ref) { 'awesome%2Ffeature' }
let(:to_ref) { 'feature' }
it 'redirects back to show' do
create_request
context 'without a from_project_id' do
let(:from_project_id) { nil }
expect(response).to redirect_to(project_compare_path(project, to: target_ref, from: source_ref))
it 'redirects to the show page' do
create_request
expect(response).to redirect_to(project_compare_path(project, from: from_ref, to: to_ref))
end
end
context 'with a from_project_id' do
let(:from_project_id) { 'something or another' }
it 'redirects to the show page without interpreting from_project_id' do
create_request
expect(response).to redirect_to(project_compare_path(project, from: from_ref, to: to_ref, from_project_id: from_project_id))
end
end
end
context 'when sending invalid params' do
context 'when the source ref is empty and target ref is set' do
let(:source_ref) { '' }
let(:target_ref) { 'master' }
it 'redirects back to index and preserves the target ref' do
create_request
expect(response).to redirect_to(project_compare_index_path(project, to: target_ref))
end
where(:from_ref, :to_ref, :from_project_id, :expected_redirect_params) do
'' | '' | '' | {}
'main' | '' | '' | { from: 'main' }
'' | 'main' | '' | { to: 'main' }
'' | '' | '1' | { from_project_id: 1 }
'main' | '' | '1' | { from: 'main', from_project_id: 1 }
'' | 'main' | '1' | { to: 'main', from_project_id: 1 }
end
context 'when the target ref is empty and source ref is set' do
let(:source_ref) { 'master' }
let(:target_ref) { '' }
with_them do
let(:expected_redirect) { project_compare_index_path(project, expected_redirect_params) }
it 'redirects back to index and preserves source ref' do
it 'redirects back to the index' do
create_request
expect(response).to redirect_to(project_compare_index_path(project, from: source_ref))
end
end
context 'when the target and source ref are empty' do
let(:source_ref) { '' }
let(:target_ref) { '' }
it 'redirects back to index' do
create_request
expect(response).to redirect_to(namespace_project_compare_index_path)
expect(response).to redirect_to(expected_redirect)
end
end
end
@ -268,15 +354,15 @@ RSpec.describe Projects::CompareController do
{
namespace_id: project.namespace,
project_id: project,
from: source_ref,
to: target_ref,
from: from_ref,
to: to_ref,
format: :json
}
end
context 'when the source and target refs exist' do
let(:source_ref) { "improve%2Fawesome" }
let(:target_ref) { "feature" }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'feature' }
context 'when the user has access to the project' do
render_views
@ -285,14 +371,14 @@ RSpec.describe Projects::CompareController do
let(:non_signature_commit) { build(:commit, project: project, safe_message: "message", sha: 'non_signature_commit') }
before do
escaped_source_ref = Addressable::URI.unescape(source_ref)
escaped_target_ref = Addressable::URI.unescape(target_ref)
escaped_from_ref = Addressable::URI.unescape(from_ref)
escaped_to_ref = Addressable::URI.unescape(to_ref)
compare_service = CompareService.new(project, escaped_target_ref)
compare = compare_service.execute(project, escaped_source_ref)
compare_service = CompareService.new(project, escaped_to_ref)
compare = compare_service.execute(project, escaped_from_ref)
expect(CompareService).to receive(:new).with(project, escaped_target_ref).and_return(compare_service)
expect(compare_service).to receive(:execute).with(project, escaped_source_ref).and_return(compare)
expect(CompareService).to receive(:new).with(project, escaped_to_ref).and_return(compare_service)
expect(compare_service).to receive(:execute).with(project, escaped_from_ref).and_return(compare)
expect(compare).to receive(:commits).and_return([signature_commit, non_signature_commit])
expect(non_signature_commit).to receive(:has_signature?).and_return(false)
@ -313,6 +399,7 @@ RSpec.describe Projects::CompareController do
context 'when the user does not have access to the project' do
before do
project.team.truncate
project.update!(visibility: 'private')
end
it 'returns a 404' do
@ -324,8 +411,8 @@ RSpec.describe Projects::CompareController do
end
context 'when the source ref does not exist' do
let(:source_ref) { 'non-existent-ref-source' }
let(:target_ref) { "feature" }
let(:from_ref) { 'non-existent-ref-source' }
let(:to_ref) { 'feature' }
it 'returns no signatures' do
signatures_request
@ -336,8 +423,8 @@ RSpec.describe Projects::CompareController do
end
context 'when the target ref does not exist' do
let(:target_ref) { 'non-existent-ref-target' }
let(:source_ref) { "improve%2Fawesome" }
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'non-existent-ref-target' }
it 'returns no signatures' do
signatures_request

View File

@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe 'Merge Request button' do
shared_examples 'Merge request button only shown when allowed' do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:forked_project) { create(:project, :public, :repository, forked_from_project: project) }
include ProjectForksHelper
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
let(:forked_project) { fork_project(project, user, repository: true) }
shared_examples 'Merge request button only shown when allowed' do
context 'not logged in' do
it 'does not show Create merge request button' do
visit url
@ -23,9 +25,15 @@ RSpec.describe 'Merge Request button' do
end
it 'shows Create merge request button' do
href = project_new_merge_request_path(project,
merge_request: { source_branch: 'feature',
target_branch: 'master' })
href = project_new_merge_request_path(
project,
merge_request: {
source_project_id: project.id,
source_branch: 'feature',
target_project_id: project.id,
target_branch: 'master'
}
)
visit url
@ -75,12 +83,16 @@ RSpec.describe 'Merge Request button' do
end
context 'on own fork of project' do
let(:user) { forked_project.owner }
it 'shows Create merge request button' do
href = project_new_merge_request_path(forked_project,
merge_request: { source_branch: 'feature',
target_branch: 'master' })
href = project_new_merge_request_path(
forked_project,
merge_request: {
source_project_id: forked_project.id,
source_branch: 'feature',
target_project_id: forked_project.id,
target_branch: 'master'
}
)
visit fork_url
@ -101,11 +113,33 @@ RSpec.describe 'Merge Request button' do
end
context 'on compare page' do
let(:label) { 'Create merge request' }
it_behaves_like 'Merge request button only shown when allowed' do
let(:label) { 'Create merge request' }
let(:url) { project_compare_path(project, from: 'master', to: 'feature') }
let(:fork_url) { project_compare_path(forked_project, from: 'master', to: 'feature') }
end
it 'shows the correct merge request button when viewing across forks' do
sign_in(user)
project.add_developer(user)
href = project_new_merge_request_path(
project,
merge_request: {
source_project_id: forked_project.id,
source_branch: 'feature',
target_project_id: project.id,
target_branch: 'master'
}
)
visit project_compare_path(forked_project, from: 'master', to: 'feature', from_project_id: project.id)
within("#content-body") do
expect(page).to have_link(label, href: href)
end
end
end
context 'on commits page' do

View File

@ -179,33 +179,54 @@ RSpec.describe IssuesFinder do
end
end
context 'filtering by author ID' do
let(:params) { { author_id: user2.id } }
context 'filtering by author' do
context 'by author ID' do
let(:params) { { author_id: user2.id } }
it 'returns issues created by that user' do
expect(issues).to contain_exactly(issue3)
end
end
context 'filtering by not author ID' do
let(:params) { { not: { author_id: user2.id } } }
it 'returns issues not created by that user' do
expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
end
end
context 'filtering by nonexistent author ID and issue term using CTE for search' do
let(:params) do
{
author_id: 'does-not-exist',
search: 'git',
attempt_group_search_optimizations: true
}
it 'returns issues created by that user' do
expect(issues).to contain_exactly(issue3)
end
end
it 'returns no results' do
expect(issues).to be_empty
context 'using OR' do
let(:issue6) { create(:issue, project: project2) }
let(:params) { { or: { author_username: [issue3.author.username, issue6.author.username] } } }
it 'returns issues created by any of the given users' do
expect(issues).to contain_exactly(issue3, issue6)
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(or_issuable_queries: false)
end
it 'does not add any filter' do
expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5, issue6)
end
end
end
context 'filtering by NOT author ID' do
let(:params) { { not: { author_id: user2.id } } }
it 'returns issues not created by that user' do
expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
end
end
context 'filtering by nonexistent author ID and issue term using CTE for search' do
let(:params) do
{
author_id: 'does-not-exist',
search: 'git',
attempt_group_search_optimizations: true
}
end
it 'returns no results' do
expect(issues).to be_empty
end
end
end

View File

@ -16,13 +16,22 @@ RSpec.describe MergeRequestTargetProjectFinder do
expect(finder.execute).to contain_exactly(base_project, other_fork, forked_project)
end
it 'does not include projects that have merge requests turned off' do
it 'does not include projects that have merge requests turned off by default' do
other_fork.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
base_project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
expect(finder.execute).to contain_exactly(forked_project)
end
it 'includes projects that have merge requests turned off by default with a more-permissive project feature' do
finder = described_class.new(current_user: user, source_project: forked_project, project_feature: :repository)
other_fork.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
base_project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
expect(finder.execute).to contain_exactly(base_project, other_fork, forked_project)
end
it 'does not contain archived projects' do
base_project.update!(archived: true)

View File

@ -41,30 +41,51 @@ RSpec.describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request1)
end
it 'filters by nonexistent author ID and MR term using CTE for search' do
params = {
author_id: 'does-not-exist',
search: 'git',
attempt_group_search_optimizations: true
}
context 'filtering by author' do
subject(:merge_requests) { described_class.new(user, params).execute }
merge_requests = described_class.new(user, params).execute
context 'using OR' do
let(:params) { { or: { author_username: [merge_request1.author.username, merge_request2.author.username] } } }
expect(merge_requests).to be_empty
end
before do
merge_request1.update!(author: create(:user))
merge_request2.update!(author: create(:user))
end
context 'filtering by not author ID' do
let(:params) { { not: { author_id: user2.id } } }
it 'returns merge requests created by any of the given users' do
expect(merge_requests).to contain_exactly(merge_request1, merge_request2)
end
before do
merge_request2.update!(author: user2)
merge_request3.update!(author: user2)
context 'when feature flag is disabled' do
before do
stub_feature_flags(or_issuable_queries: false)
end
it 'does not add any filter' do
expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request4, merge_request5)
end
end
end
it 'returns merge requests not created by that user' do
merge_requests = described_class.new(user, params).execute
context 'with nonexistent author ID and MR term using CTE for search' do
let(:params) { { author_id: 'does-not-exist', search: 'git', attempt_group_search_optimizations: true } }
expect(merge_requests).to contain_exactly(merge_request1, merge_request4, merge_request5)
it 'returns no results' do
expect(merge_requests).to be_empty
end
end
context 'filtering by not author ID' do
let(:params) { { not: { author_id: user2.id } } }
before do
merge_request2.update!(author: user2)
merge_request3.update!(author: user2)
end
it 'returns merge requests not created by that user' do
expect(merge_requests).to contain_exactly(merge_request1, merge_request4, merge_request5)
end
end
end

View File

@ -14,9 +14,6 @@ import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import TreeList from '~/diffs/components/tree_list.vue';
import { EVT_VIEW_FILE_BY_FILE } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import createDiffsStore from '../create_diffs_store';
@ -699,24 +696,5 @@ describe('diffs/components/app', () => {
},
);
});
describe('control via event stream', () => {
it.each`
setting
${true}
${false}
`(
'triggers the action with the new fileByFile setting - $setting - when the event with that setting is received',
async ({ setting }) => {
createComponent();
await nextTick();
eventHub.$emit(EVT_VIEW_FILE_BY_FILE, { setting });
await nextTick();
expect(store.state.diffs.viewDiffsFileByFile).toBe(setting);
},
);
});
});
});

View File

@ -1,11 +1,7 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import SettingsDropdown from '~/diffs/components/settings_dropdown.vue';
import {
EVT_VIEW_FILE_BY_FILE,
PARALLEL_DIFF_VIEW_TYPE,
INLINE_DIFF_VIEW_TYPE,
} from '~/diffs/constants';
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
import diffModule from '~/diffs/store/modules';
@ -48,6 +44,7 @@ describe('Diff settings dropdown component', () => {
setParallelDiffViewType: jest.fn(),
setRenderTreeList: jest.fn(),
setShowWhitespace: jest.fn(),
setFileByFile: jest.fn(),
};
});
@ -196,12 +193,12 @@ describe('Diff settings dropdown component', () => {
);
it.each`
start | emit
start | setting
${true} | ${false}
${false} | ${true}
`(
'when the file by file setting starts as $start, toggling the checkbox should emit an event set to $emit',
async ({ start, emit }) => {
'when the file by file setting starts as $start, toggling the checkbox should call setFileByFile with $setting',
async ({ start, setting }) => {
createComponent((store) => {
Object.assign(store.state.diffs, {
viewDiffsFileByFile: start,
@ -214,7 +211,9 @@ describe('Diff settings dropdown component', () => {
await vm.$nextTick();
expect(eventHub.$emit).toHaveBeenCalledWith(EVT_VIEW_FILE_BY_FILE, { setting: emit });
expect(actions.setFileByFile).toHaveBeenLastCalledWith(expect.anything(), {
fileByFile: setting,
});
},
);
});

View File

@ -5,32 +5,25 @@ import {
DIFF_VIEW_ALL_FILES,
} from '~/diffs/constants';
import { fileByFile } from '~/diffs/utils/preferences';
import { getParameterValues } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility');
describe('diffs preferences', () => {
describe('fileByFile', () => {
it.each`
result | preference | cookie | searchParam
${false} | ${false} | ${undefined} | ${undefined}
${true} | ${true} | ${undefined} | ${undefined}
${true} | ${false} | ${DIFF_VIEW_FILE_BY_FILE} | ${undefined}
${false} | ${true} | ${DIFF_VIEW_ALL_FILES} | ${undefined}
${true} | ${false} | ${undefined} | ${[DIFF_VIEW_FILE_BY_FILE]}
${false} | ${true} | ${undefined} | ${[DIFF_VIEW_ALL_FILES]}
${true} | ${false} | ${DIFF_VIEW_FILE_BY_FILE} | ${[DIFF_VIEW_FILE_BY_FILE]}
${true} | ${true} | ${DIFF_VIEW_ALL_FILES} | ${[DIFF_VIEW_FILE_BY_FILE]}
${false} | ${false} | ${DIFF_VIEW_ALL_FILES} | ${[DIFF_VIEW_ALL_FILES]}
${false} | ${true} | ${DIFF_VIEW_FILE_BY_FILE} | ${[DIFF_VIEW_ALL_FILES]}
`(
'should return $result when { preference: $preference, cookie: $cookie, search: $searchParam }',
({ result, preference, cookie, searchParam }) => {
if (cookie) {
Cookies.set(DIFF_FILE_BY_FILE_COOKIE_NAME, cookie);
}
afterEach(() => {
Cookies.remove(DIFF_FILE_BY_FILE_COOKIE_NAME);
});
getParameterValues.mockReturnValue(searchParam);
it.each`
result | preference | cookie
${true} | ${false} | ${DIFF_VIEW_FILE_BY_FILE}
${false} | ${true} | ${DIFF_VIEW_ALL_FILES}
${true} | ${false} | ${DIFF_VIEW_FILE_BY_FILE}
${false} | ${true} | ${DIFF_VIEW_ALL_FILES}
${false} | ${false} | ${DIFF_VIEW_ALL_FILES}
${true} | ${true} | ${DIFF_VIEW_FILE_BY_FILE}
`(
'should return $result when { preference: $preference, cookie: $cookie }',
({ result, preference, cookie }) => {
Cookies.set(DIFF_FILE_BY_FILE_COOKIE_NAME, cookie);
expect(fileByFile(preference)).toBe(result);
},

View File

@ -75,23 +75,24 @@ describe('import table row', () => {
});
});
it('renders only namespaces if user cannot create new group', () => {
it('renders only no parent option if available namespaces list is empty', () => {
createComponent({
canCreateGroup: false,
group: getFakeGroup(STATUSES.NONE),
availableNamespaces: [],
});
const dropdownData = findNamespaceDropdown().props().options.data;
const noParentOption = dropdownData.find((o) => o.text === 'No parent');
const existingGroupOption = dropdownData.find((o) => o.text === 'Existing groups');
expect(noParentOption).toBeUndefined();
expect(dropdownData).toHaveLength(availableNamespacesFixture.length);
expect(noParentOption.id).toBe('');
expect(existingGroupOption).toBeUndefined();
});
it('renders no parent option in available namespaces if user can create new group', () => {
it('renders both no parent option and available namespaces list when available namespaces list is not empty', () => {
createComponent({
canCreateGroup: true,
group: getFakeGroup(STATUSES.NONE),
availableNamespaces: availableNamespacesFixture,
});
const dropdownData = findNamespaceDropdown().props().options.data;

View File

@ -27,7 +27,7 @@ describe('import table', () => {
];
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
const createComponent = ({ bulkImportSourceGroups, canCreateGroup }) => {
const createComponent = ({ bulkImportSourceGroups }) => {
apolloProvider = createMockApollo([], {
Query: {
availableNamespaces: () => availableNamespacesFixture,
@ -43,7 +43,6 @@ describe('import table', () => {
wrapper = shallowMount(ImportTable, {
propsData: {
sourceUrl: 'https://demo.host',
canCreateGroup,
},
stubs: {
GlSprintf,
@ -100,25 +99,6 @@ describe('import table', () => {
expect(wrapper.findAll(ImportTableRow)).toHaveLength(FAKE_GROUPS.length);
});
it.each`
canCreateGroup | userPermissions
${true} | ${'user can create new top-level group'}
${false} | ${'user cannot create new top-level group'}
`('correctly passes canCreateGroup to rows when $userPermissions', async ({ canCreateGroup }) => {
createComponent({
bulkImportSourceGroups: () => ({
nodes: FAKE_GROUPS,
pageInfo: FAKE_PAGE_INFO,
}),
canCreateGroup,
});
await waitForPromises();
wrapper.findAllComponents(ImportTableRow).wrappers.forEach((w) => {
expect(w.props().canCreateGroup).toBe(canCreateGroup);
});
});
it('does not render status string when result list is empty', async () => {
createComponent({
bulkImportSourceGroups: jest.fn().mockResolvedValue({

View File

@ -35,15 +35,19 @@ describe('Bulk import resolvers', () => {
let axiosMockAdapter;
let client;
beforeEach(() => {
axiosMockAdapter = new MockAdapter(axios);
client = createMockClient({
const createClient = (extraResolverArgs) => {
return createMockClient({
cache: new InMemoryCache({
fragmentMatcher: { match: () => true },
addTypename: false,
}),
resolvers: createResolvers({ endpoints: FAKE_ENDPOINTS }),
resolvers: createResolvers({ endpoints: FAKE_ENDPOINTS, ...extraResolverArgs }),
});
};
beforeEach(() => {
axiosMockAdapter = new MockAdapter(axios);
client = createClient();
});
afterEach(() => {
@ -82,6 +86,44 @@ describe('Bulk import resolvers', () => {
.reply(httpStatus.OK, availableNamespacesFixture);
});
it('respects cached import state when provided by group manager', async () => {
const FAKE_STATUS = 'DEMO_STATUS';
const FAKE_IMPORT_TARGET = {};
const TARGET_INDEX = 0;
const clientWithMockedManager = createClient({
GroupsManager: jest.fn().mockImplementation(() => ({
getImportStateFromStorageByGroupId(groupId) {
if (groupId === statusEndpointFixture.importable_data[TARGET_INDEX].id) {
return {
status: FAKE_STATUS,
importTarget: FAKE_IMPORT_TARGET,
};
}
return null;
},
})),
});
const clientResponse = await clientWithMockedManager.query({
query: bulkImportSourceGroupsQuery,
});
const clientResults = clientResponse.data.bulkImportSourceGroups.nodes;
expect(clientResults[TARGET_INDEX].import_target).toBe(FAKE_IMPORT_TARGET);
expect(clientResults[TARGET_INDEX].status).toBe(FAKE_STATUS);
});
it('populates each result instance with empty import_target when there are no available namespaces', async () => {
axiosMockAdapter.onGet(FAKE_ENDPOINTS.availableNamespaces).reply(httpStatus.OK, []);
const response = await client.query({ query: bulkImportSourceGroupsQuery });
results = response.data.bulkImportSourceGroups.nodes;
expect(results.every((r) => r.import_target.target_namespace === '')).toBe(true);
});
describe('when called', () => {
beforeEach(async () => {
const response = await client.query({ query: bulkImportSourceGroupsQuery });
@ -220,14 +262,14 @@ describe('Bulk import resolvers', () => {
expect(intermediateResults[0].status).toBe(STATUSES.SCHEDULING);
});
it('sets group status to STARTED when request completes', async () => {
it('sets import status to CREATED when request completes', async () => {
axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
await client.mutate({
mutation: importGroupMutation,
variables: { sourceGroupId: GROUP_ID },
});
expect(results[0].status).toBe(STATUSES.STARTED);
expect(results[0].status).toBe(STATUSES.CREATED);
});
it('resets status to NONE if request fails', async () => {

View File

@ -1,11 +1,17 @@
import { defaultDataIdFromObject } from 'apollo-cache-inmemory';
import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
import ImportSourceGroupFragment from '~/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql';
import { SourceGroupsManager } from '~/import_entities/import_groups/graphql/services/source_groups_manager';
import {
KEY,
SourceGroupsManager,
} from '~/import_entities/import_groups/graphql/services/source_groups_manager';
const FAKE_SOURCE_URL = 'http://demo.host';
describe('SourceGroupsManager', () => {
let manager;
let client;
let storage;
const getFakeGroup = () => ({
__typename: clientTypenames.BulkImportSourceGroup,
@ -17,8 +23,53 @@ describe('SourceGroupsManager', () => {
readFragment: jest.fn(),
writeFragment: jest.fn(),
};
storage = {
getItem: jest.fn(),
setItem: jest.fn(),
};
manager = new SourceGroupsManager({ client });
manager = new SourceGroupsManager({ client, storage, sourceUrl: FAKE_SOURCE_URL });
});
describe('storage management', () => {
const IMPORT_ID = 1;
const IMPORT_TARGET = { destination_name: 'demo', destination_namespace: 'foo' };
const STATUS = 'FAKE_STATUS';
const FAKE_GROUP = { id: 1, import_target: IMPORT_TARGET, status: STATUS };
it('loads state from storage on creation', () => {
expect(storage.getItem).toHaveBeenCalledWith(KEY);
});
it('saves to storage when import is starting', () => {
manager.startImport({
importId: IMPORT_ID,
group: FAKE_GROUP,
});
const storedObject = JSON.parse(storage.setItem.mock.calls[0][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
id: FAKE_GROUP.id,
importTarget: IMPORT_TARGET,
status: STATUS,
});
});
it('saves to storage when import status is updated', () => {
const CHANGED_STATUS = 'changed';
manager.startImport({
importId: IMPORT_ID,
group: FAKE_GROUP,
});
manager.setImportStatusByImportId(IMPORT_ID, CHANGED_STATUS);
const storedObject = JSON.parse(storage.setItem.mock.calls[1][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
id: FAKE_GROUP.id,
importTarget: IMPORT_TARGET,
status: CHANGED_STATUS,
});
});
});
it('finds item by group id', () => {

View File

@ -2,7 +2,6 @@ import MockAdapter from 'axios-mock-adapter';
import Visibility from 'visibilityjs';
import createFlash from '~/flash';
import { STATUSES } from '~/import_entities/constants';
import { SourceGroupsManager } from '~/import_entities/import_groups/graphql/services/source_groups_manager';
import { StatusPoller } from '~/import_entities/import_groups/graphql/services/status_poller';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
@ -18,24 +17,21 @@ jest.mock('~/import_entities/import_groups/graphql/services/source_groups_manage
}));
const FAKE_POLL_PATH = '/fake/poll/path';
const CLIENT_MOCK = {};
describe('Bulk import status poller', () => {
let poller;
let mockAdapter;
let groupManager;
const getPollHistory = () => mockAdapter.history.get.filter((x) => x.url === FAKE_POLL_PATH);
beforeEach(() => {
mockAdapter = new MockAdapter(axios);
mockAdapter.onGet(FAKE_POLL_PATH).reply(200, {});
poller = new StatusPoller({ client: CLIENT_MOCK, pollPath: FAKE_POLL_PATH });
});
it('creates source group manager with proper client', () => {
expect(SourceGroupsManager.mock.calls).toHaveLength(1);
const [[{ client }]] = SourceGroupsManager.mock.calls;
expect(client).toBe(CLIENT_MOCK);
groupManager = {
setImportStatusByImportId: jest.fn(),
};
poller = new StatusPoller({ groupManager, pollPath: FAKE_POLL_PATH });
});
it('creates poller with proper config', () => {
@ -100,14 +96,9 @@ describe('Bulk import status poller', () => {
it('when success response arrives updates relevant group status', () => {
const FAKE_ID = 5;
const [[pollConfig]] = Poll.mock.calls;
const [managerInstance] = SourceGroupsManager.mock.instances;
managerInstance.findByImportId.mockReturnValue({ id: FAKE_ID });
pollConfig.successCallback({ data: [{ id: FAKE_ID, status_name: STATUSES.FINISHED }] });
expect(managerInstance.setImportStatus).toHaveBeenCalledWith(
expect.objectContaining({ id: FAKE_ID }),
STATUSES.FINISHED,
);
expect(groupManager.setImportStatusByImportId).toHaveBeenCalledWith(FAKE_ID, STATUSES.FINISHED);
});
});

View File

@ -2,27 +2,28 @@ import { mount } from '@vue/test-utils';
import { UPGRADE_CTA } from '~/security_configuration/components/features_constants';
import Upgrade from '~/security_configuration/components/upgrade.vue';
const TEST_URL = 'http://www.example.test';
let wrapper;
const createComponent = () => {
wrapper = mount(Upgrade, {});
const createComponent = (componentData = {}) => {
wrapper = mount(Upgrade, componentData);
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
describe('Upgrade component', () => {
beforeEach(() => {
createComponent({ provide: { upgradePath: TEST_URL } });
});
it('renders correct text in link', () => {
expect(wrapper.text()).toMatchInterpolatedText(UPGRADE_CTA);
});
it('renders link with correct attributes', () => {
it('renders link with correct default attributes', () => {
expect(wrapper.find('a').attributes()).toMatchObject({
href: 'https://about.gitlab.com/pricing/',
href: TEST_URL,
target: '_blank',
});
});

View File

@ -78,6 +78,34 @@ describe('Tracking', () => {
navigator.msDoNotTrack = undefined;
});
describe('builds the standard context', () => {
let standardContext;
beforeAll(async () => {
window.gl = window.gl || {};
window.gl.snowplowStandardContext = {
schema: 'iglu:com.gitlab/gitlab_standard',
data: {
environment: 'testing',
source: 'unknown',
},
};
jest.resetModules();
({ STANDARD_CONTEXT: standardContext } = await import('~/tracking'));
});
it('uses server data', () => {
expect(standardContext.schema).toBe('iglu:com.gitlab/gitlab_standard');
expect(standardContext.data.environment).toBe('testing');
});
it('overrides schema source', () => {
expect(standardContext.data.source).toBe('gitlab-javascript');
});
});
it('tracks to snowplow (our current tracking system)', () => {
Tracking.event('_category_', '_eventName_', { label: '_label_' });

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Security::ConfigurationHelper do
let(:current_user) { create(:user) }
describe 'security_upgrade_path' do
subject { security_upgrade_path }
it { is_expected.to eq('https://about.gitlab.com/pricing/') }
end
end

View File

@ -40,41 +40,39 @@ RSpec.describe Ci::Runner do
context 'runner_type validations' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let(:project_runner) { create(:ci_runner, :project, projects: [project]) }
let(:instance_runner) { create(:ci_runner, :instance) }
it 'disallows assigning group to project_type runner' do
project_runner.groups << build(:group)
project_runner = build(:ci_runner, :project, groups: [group])
expect(project_runner).not_to be_valid
expect(project_runner.errors.full_messages).to include('Runner cannot have groups assigned')
end
it 'disallows assigning group to instance_type runner' do
instance_runner.groups << build(:group)
instance_runner = build(:ci_runner, :instance, groups: [group])
expect(instance_runner).not_to be_valid
expect(instance_runner.errors.full_messages).to include('Runner cannot have groups assigned')
end
it 'disallows assigning project to group_type runner' do
group_runner.projects << build(:project)
group_runner = build(:ci_runner, :instance, projects: [project])
expect(group_runner).not_to be_valid
expect(group_runner.errors.full_messages).to include('Runner cannot have projects assigned')
end
it 'disallows assigning project to instance_type runner' do
instance_runner.projects << build(:project)
instance_runner = build(:ci_runner, :instance, projects: [project])
expect(instance_runner).not_to be_valid
expect(instance_runner.errors.full_messages).to include('Runner cannot have projects assigned')
end
it 'fails to save a group assigned to a project runner even if the runner is already saved' do
group.runners << project_runner
expect { group.save! }
project_runner = create(:ci_runner, :project, projects: [project])
expect { create(:group, runners: [project_runner]) }
.to raise_error(ActiveRecord::RecordInvalid)
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe ZoomUrlValidator do
RSpec.describe Gitlab::Utils::ZoomUrlValidator do
let(:zoom_meeting) { build(:zoom_meeting) }
describe 'validations' do

View File

@ -3,9 +3,39 @@
require 'spec_helper'
RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
context 'when the experiment is inactive' do
context 'when the application setting is enabled' do
before do
stub_experiment(in_product_marketing_emails: false)
stub_application_setting(in_product_marketing_emails_enabled: true)
end
context 'when the experiment is inactive' do
before do
stub_experiment(in_product_marketing_emails: false)
end
it 'does not execute the in product marketing emails service' do
expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
subject.perform
end
end
context 'when the experiment is active' do
before do
stub_experiment(in_product_marketing_emails: true)
end
it 'calls the send_for_all_tracks_and_intervals method on the in product marketing emails service' do
expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
subject.perform
end
end
end
context 'when the application setting is disabled' do
before do
stub_application_setting(in_product_marketing_emails_enabled: false)
end
it 'does not execute the in product marketing emails service' do
@ -14,16 +44,4 @@ RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
subject.perform
end
end
context 'when the experiment is active' do
before do
stub_experiment(in_product_marketing_emails: true)
end
it 'calls the send_for_all_tracks_and_intervals method on the in product marketing emails service' do
expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
subject.perform
end
end
end