Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
846a84f2e9
commit
49089d4fb1
|
|
@ -1 +1 @@
|
|||
1.20.0
|
||||
1.21.0
|
||||
|
|
|
|||
|
|
@ -9,3 +9,5 @@ export const FILTER_TYPE = {
|
|||
none: 'none',
|
||||
any: 'any',
|
||||
};
|
||||
|
||||
export const MAX_HISTORY_SIZE = 5;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import { uniq } from 'lodash';
|
||||
import { uniqWith, isEqual } from 'lodash';
|
||||
|
||||
import { MAX_HISTORY_SIZE } from '../constants';
|
||||
|
||||
class RecentSearchesStore {
|
||||
constructor(initialState = {}, allowedKeys) {
|
||||
|
|
@ -17,8 +19,12 @@ class RecentSearchesStore {
|
|||
}
|
||||
|
||||
setRecentSearches(searches = []) {
|
||||
const trimmedSearches = searches.map(search => search.trim());
|
||||
this.state.recentSearches = uniq(trimmedSearches).slice(0, 5);
|
||||
const trimmedSearches = searches.map(search =>
|
||||
typeof search === 'string' ? search.trim() : search,
|
||||
);
|
||||
|
||||
// Do object equality check to remove duplicates.
|
||||
this.state.recentSearches = uniqWith(trimmedSearches, isEqual).slice(0, MAX_HISTORY_SIZE);
|
||||
return this.state.recentSearches;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -79,11 +79,13 @@ export const getFileData = (
|
|||
return service
|
||||
.getFileData(url)
|
||||
.then(({ data }) => {
|
||||
setPageTitleForFile(state, file);
|
||||
|
||||
if (data) commit(types.SET_FILE_DATA, { data, file });
|
||||
if (openFile) commit(types.TOGGLE_FILE_OPEN, path);
|
||||
if (makeFileActive) dispatch('setFileActive', path);
|
||||
|
||||
if (makeFileActive) {
|
||||
setPageTitleForFile(state, file);
|
||||
dispatch('setFileActive', path);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
dispatch('setErrorMessage', {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
<script>
|
||||
import { get } from 'lodash';
|
||||
import { mapActions, mapState, mapGetters } from 'vuex';
|
||||
import { GlCard, GlDeprecatedButton, GlLoadingIcon } from '@gitlab/ui';
|
||||
import Tracking from '~/tracking';
|
||||
|
|
@ -31,7 +32,8 @@ export default {
|
|||
tracking: {
|
||||
label: 'docker_container_retention_and_expiration_policies',
|
||||
},
|
||||
formIsValid: true,
|
||||
fieldsAreValid: true,
|
||||
apiErrors: null,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -39,7 +41,7 @@ export default {
|
|||
...mapGetters({ isEdited: 'getIsEdited' }),
|
||||
...mapComputed([{ key: 'settings', getter: 'getSettings' }], 'updateSettings'),
|
||||
isSubmitButtonDisabled() {
|
||||
return !this.formIsValid || this.isLoading;
|
||||
return !this.fieldsAreValid || this.isLoading;
|
||||
},
|
||||
isCancelButtonDisabled() {
|
||||
return !this.isEdited || this.isLoading;
|
||||
|
|
@ -49,13 +51,35 @@ export default {
|
|||
...mapActions(['resetSettings', 'saveSettings']),
|
||||
reset() {
|
||||
this.track('reset_form');
|
||||
this.apiErrors = null;
|
||||
this.resetSettings();
|
||||
},
|
||||
setApiErrors(response) {
|
||||
const messages = get(response, 'data.message', []);
|
||||
|
||||
this.apiErrors = Object.keys(messages).reduce((acc, curr) => {
|
||||
if (curr.startsWith('container_expiration_policy.')) {
|
||||
const key = curr.replace('container_expiration_policy.', '');
|
||||
acc[key] = get(messages, [curr, 0], '');
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
},
|
||||
submit() {
|
||||
this.track('submit_form');
|
||||
this.apiErrors = null;
|
||||
this.saveSettings()
|
||||
.then(() => this.$toast.show(UPDATE_SETTINGS_SUCCESS_MESSAGE, { type: 'success' }))
|
||||
.catch(() => this.$toast.show(UPDATE_SETTINGS_ERROR_MESSAGE, { type: 'error' }));
|
||||
.catch(({ response }) => {
|
||||
this.setApiErrors(response);
|
||||
this.$toast.show(UPDATE_SETTINGS_ERROR_MESSAGE, { type: 'error' });
|
||||
});
|
||||
},
|
||||
onModelChange(changePayload) {
|
||||
this.settings = changePayload.newValue;
|
||||
if (this.apiErrors) {
|
||||
this.apiErrors[changePayload.modified] = undefined;
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -69,11 +93,13 @@ export default {
|
|||
</template>
|
||||
<template #default>
|
||||
<expiration-policy-fields
|
||||
v-model="settings"
|
||||
:value="settings"
|
||||
:form-options="formOptions"
|
||||
:is-loading="isLoading"
|
||||
@validated="formIsValid = true"
|
||||
@invalidated="formIsValid = false"
|
||||
:api-errors="apiErrors"
|
||||
@validated="fieldsAreValid = true"
|
||||
@invalidated="fieldsAreValid = false"
|
||||
@input="onModelChange"
|
||||
/>
|
||||
</template>
|
||||
<template #footer>
|
||||
|
|
|
|||
|
|
@ -34,6 +34,11 @@ export default {
|
|||
required: false,
|
||||
default: () => ({}),
|
||||
},
|
||||
apiErrors: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
isLoading: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
|
|
@ -56,9 +61,8 @@ export default {
|
|||
},
|
||||
},
|
||||
i18n: {
|
||||
textAreaInvalidFeedback: TEXT_AREA_INVALID_FEEDBACK,
|
||||
enableToggleLabel: ENABLE_TOGGLE_LABEL,
|
||||
enableToggleDescription: ENABLE_TOGGLE_DESCRIPTION,
|
||||
ENABLE_TOGGLE_LABEL,
|
||||
ENABLE_TOGGLE_DESCRIPTION,
|
||||
},
|
||||
selectList: [
|
||||
{
|
||||
|
|
@ -86,7 +90,6 @@ export default {
|
|||
label: NAME_REGEX_LABEL,
|
||||
model: 'name_regex',
|
||||
placeholder: NAME_REGEX_PLACEHOLDER,
|
||||
stateVariable: 'nameRegexState',
|
||||
description: NAME_REGEX_DESCRIPTION,
|
||||
},
|
||||
{
|
||||
|
|
@ -94,7 +97,6 @@ export default {
|
|||
label: NAME_REGEX_KEEP_LABEL,
|
||||
model: 'name_regex_keep',
|
||||
placeholder: NAME_REGEX_KEEP_PLACEHOLDER,
|
||||
stateVariable: 'nameKeepRegexState',
|
||||
description: NAME_REGEX_KEEP_DESCRIPTION,
|
||||
},
|
||||
],
|
||||
|
|
@ -111,16 +113,34 @@ export default {
|
|||
policyEnabledText() {
|
||||
return this.enabled ? ENABLED_TEXT : DISABLED_TEXT;
|
||||
},
|
||||
textAreaState() {
|
||||
textAreaValidation() {
|
||||
const nameRegexErrors =
|
||||
this.apiErrors?.name_regex || this.validateRegexLength(this.name_regex);
|
||||
const nameKeepRegexErrors =
|
||||
this.apiErrors?.name_regex_keep || this.validateRegexLength(this.name_regex_keep);
|
||||
|
||||
return {
|
||||
nameRegexState: this.validateNameRegex(this.name_regex),
|
||||
nameKeepRegexState: this.validateNameRegex(this.name_regex_keep),
|
||||
/*
|
||||
* The state has this form:
|
||||
* null: gray border, no message
|
||||
* true: green border, no message ( because none is configured)
|
||||
* false: red border, error message
|
||||
* So in this function we keep null if the are no message otherwise we 'invert' the error message
|
||||
*/
|
||||
name_regex: {
|
||||
state: nameRegexErrors === null ? null : !nameRegexErrors,
|
||||
message: nameRegexErrors,
|
||||
},
|
||||
name_regex_keep: {
|
||||
state: nameKeepRegexErrors === null ? null : !nameKeepRegexErrors,
|
||||
message: nameKeepRegexErrors,
|
||||
},
|
||||
};
|
||||
},
|
||||
fieldsValidity() {
|
||||
return (
|
||||
this.textAreaState.nameRegexState !== false &&
|
||||
this.textAreaState.nameKeepRegexState !== false
|
||||
this.textAreaValidation.name_regex.state !== false &&
|
||||
this.textAreaValidation.name_regex_keep.state !== false
|
||||
);
|
||||
},
|
||||
isFormElementDisabled() {
|
||||
|
|
@ -140,8 +160,11 @@ export default {
|
|||
},
|
||||
},
|
||||
methods: {
|
||||
validateNameRegex(value) {
|
||||
return value ? value.length <= NAME_REGEX_LENGTH : null;
|
||||
validateRegexLength(value) {
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
return value.length <= NAME_REGEX_LENGTH ? '' : TEXT_AREA_INVALID_FEEDBACK;
|
||||
},
|
||||
idGenerator(id) {
|
||||
return `${id}_${this.uniqueId}`;
|
||||
|
|
@ -160,7 +183,7 @@ export default {
|
|||
:label-cols="labelCols"
|
||||
:label-align="labelAlign"
|
||||
:label-for="idGenerator('expiration-policy-toggle')"
|
||||
:label="$options.i18n.enableToggleLabel"
|
||||
:label="$options.i18n.ENABLE_TOGGLE_LABEL"
|
||||
>
|
||||
<div class="d-flex align-items-start">
|
||||
<gl-toggle
|
||||
|
|
@ -169,7 +192,7 @@ export default {
|
|||
:disabled="isLoading"
|
||||
/>
|
||||
<span class="mb-2 ml-2 lh-2">
|
||||
<gl-sprintf :message="$options.i18n.enableToggleDescription">
|
||||
<gl-sprintf :message="$options.i18n.ENABLE_TOGGLE_DESCRIPTION">
|
||||
<template #toggleStatus>
|
||||
<strong>{{ policyEnabledText }}</strong>
|
||||
</template>
|
||||
|
|
@ -210,8 +233,8 @@ export default {
|
|||
:label-cols="labelCols"
|
||||
:label-align="labelAlign"
|
||||
:label-for="idGenerator(textarea.name)"
|
||||
:state="textAreaState[textarea.stateVariable]"
|
||||
:invalid-feedback="$options.i18n.textAreaInvalidFeedback"
|
||||
:state="textAreaValidation[textarea.model].state"
|
||||
:invalid-feedback="textAreaValidation[textarea.model].message"
|
||||
>
|
||||
<template #label>
|
||||
<gl-sprintf :message="textarea.label">
|
||||
|
|
@ -224,7 +247,7 @@ export default {
|
|||
:id="idGenerator(textarea.name)"
|
||||
:value="value[textarea.model]"
|
||||
:placeholder="textarea.placeholder"
|
||||
:state="textAreaState[textarea.stateVariable]"
|
||||
:state="textAreaValidation[textarea.model].state"
|
||||
:disabled="isFormElementDisabled"
|
||||
trim
|
||||
@input="updateModel($event, textarea.model)"
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ export const ENABLE_TOGGLE_DESCRIPTION = s__(
|
|||
);
|
||||
|
||||
export const TEXT_AREA_INVALID_FEEDBACK = s__(
|
||||
'ContainerRegistry|The value of this input should be less than 255 characters',
|
||||
'ContainerRegistry|The value of this input should be less than 256 characters',
|
||||
);
|
||||
|
||||
export const EXPIRATION_INTERVAL_LABEL = s__('ContainerRegistry|Expiration interval:');
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ export const mapComputedToEvent = (list, root) => {
|
|||
return this[root][e];
|
||||
},
|
||||
set(value) {
|
||||
this.$emit('input', { ...this[root], [e]: value });
|
||||
this.$emit('input', { newValue: { ...this[root], [e]: value }, modified: e });
|
||||
},
|
||||
};
|
||||
});
|
||||
|
|
|
|||
|
|
@ -98,6 +98,15 @@ export default {
|
|||
{},
|
||||
);
|
||||
},
|
||||
tokenTitles() {
|
||||
return this.tokens.reduce(
|
||||
(tokenSymbols, token) => ({
|
||||
...tokenSymbols,
|
||||
[token.type]: token.title,
|
||||
}),
|
||||
{},
|
||||
);
|
||||
},
|
||||
sortDirectionIcon() {
|
||||
return this.selectedSortDirection === SortDirection.ascending
|
||||
? 'sort-lowest'
|
||||
|
|
@ -112,11 +121,10 @@ export default {
|
|||
watch: {
|
||||
/**
|
||||
* GlFilteredSearch currently doesn't emit any event when
|
||||
* search field is cleared, but we still want our parent
|
||||
* component to know that filters were cleared and do
|
||||
* necessary data refetch, so this watcher is basically
|
||||
* a dirty hack/workaround to identify if filter input
|
||||
* was cleared. :(
|
||||
* tokens are manually removed from search field so we'd
|
||||
* never know when user actually clears all the tokens.
|
||||
* This watcher listens for updates to `filterValue` on
|
||||
* such instances. :(
|
||||
*/
|
||||
filterValue(value) {
|
||||
const [firstVal] = value;
|
||||
|
|
@ -188,25 +196,16 @@ export default {
|
|||
: SortDirection.ascending;
|
||||
this.$emit('onSort', this.selectedSortOption.sortDirection[this.selectedSortDirection]);
|
||||
},
|
||||
handleClearHistory() {
|
||||
const resultantSearches = this.recentSearchesStore.setRecentSearches([]);
|
||||
this.recentSearchesService.save(resultantSearches);
|
||||
},
|
||||
handleFilterSubmit(filters) {
|
||||
if (this.recentSearchesStorageKey) {
|
||||
this.recentSearchesPromise
|
||||
.then(() => {
|
||||
if (filters.length) {
|
||||
const searchTokens = filters.map(filter => {
|
||||
// check filter was plain text search
|
||||
if (typeof filter === 'string') {
|
||||
return filter;
|
||||
}
|
||||
// filter was a token.
|
||||
return `${filter.type}:${filter.value.operator}${this.tokenSymbols[filter.type]}${
|
||||
filter.value.data
|
||||
}`;
|
||||
});
|
||||
|
||||
const resultantSearches = this.recentSearchesStore.addRecentSearch(
|
||||
searchTokens.join(' '),
|
||||
);
|
||||
const resultantSearches = this.recentSearchesStore.addRecentSearch(filters);
|
||||
this.recentSearchesService.save(resultantSearches);
|
||||
}
|
||||
})
|
||||
|
|
@ -228,8 +227,23 @@ export default {
|
|||
:available-tokens="tokens"
|
||||
:history-items="getRecentSearches()"
|
||||
class="flex-grow-1"
|
||||
@history-item-selected="$emit('onFilter', filters)"
|
||||
@clear-history="handleClearHistory"
|
||||
@submit="handleFilterSubmit"
|
||||
/>
|
||||
@clear="$emit('onFilter', [])"
|
||||
>
|
||||
<template #history-item="{ historyItem }">
|
||||
<template v-for="token in historyItem">
|
||||
<span v-if="typeof token === 'string'" :key="token" class="gl-px-1">"{{ token }}"</span>
|
||||
<span v-else :key="`${token.type}-${token.value.data}`" class="gl-px-1">
|
||||
<span v-if="tokenTitles[token.type]"
|
||||
>{{ tokenTitles[token.type] }} :{{ token.value.operator }}</span
|
||||
>
|
||||
<strong>{{ tokenSymbols[token.type] }}{{ token.value.data }}</strong>
|
||||
</span>
|
||||
</template>
|
||||
</template>
|
||||
</gl-filtered-search>
|
||||
<gl-button-group class="sort-dropdown-container d-flex">
|
||||
<gl-dropdown :text="selectedSortOption.title" :right="true" class="w-100">
|
||||
<gl-dropdown-item
|
||||
|
|
|
|||
|
|
@ -46,6 +46,16 @@ export default {
|
|||
return this.authors.find(author => author.username.toLowerCase() === this.currentValue);
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
active: {
|
||||
immediate: true,
|
||||
handler(newValue) {
|
||||
if (!newValue && !this.authors.length) {
|
||||
this.fetchAuthorBySearchTerm(this.value.data);
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
fetchAuthorBySearchTerm(searchTerm) {
|
||||
const fetchPromise = this.config.fetchPath
|
||||
|
|
@ -89,9 +99,9 @@ export default {
|
|||
<span>{{ activeAuthor ? activeAuthor.name : inputValue }}</span>
|
||||
</template>
|
||||
<template #suggestions>
|
||||
<gl-filtered-search-suggestion :value="$options.anyAuthor">{{
|
||||
__('Any')
|
||||
}}</gl-filtered-search-suggestion>
|
||||
<gl-filtered-search-suggestion :value="$options.anyAuthor">
|
||||
{{ __('Any') }}
|
||||
</gl-filtered-search-suggestion>
|
||||
<gl-dropdown-divider />
|
||||
<gl-loading-icon v-if="loading" />
|
||||
<template v-else>
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@ import renderKramdownList from './renderers/render_kramdown_list';
|
|||
import renderKramdownText from './renderers/render_kramdown_text';
|
||||
import renderIdentifierParagraph from './renderers/render_identifier_paragraph';
|
||||
import renderEmbeddedRubyText from './renderers/render_embedded_ruby_text';
|
||||
import renderFontAwesomeHtmlInline from './renderers/render_font_awesome_html_inline';
|
||||
|
||||
const htmlInlineRenderers = [renderFontAwesomeHtmlInline];
|
||||
const htmlRenderers = [renderHtml];
|
||||
const listRenderers = [renderKramdownList];
|
||||
const paragraphRenderers = [renderIdentifierParagraph];
|
||||
|
|
@ -26,7 +28,7 @@ const buildCustomRendererFunctions = (customRenderers, defaults) => {
|
|||
};
|
||||
|
||||
const buildCustomHTMLRenderer = (
|
||||
customRenderers = { htmlBlock: [], list: [], paragraph: [], text: [] },
|
||||
customRenderers = { htmlBlock: [], htmlInline: [], list: [], paragraph: [], text: [] },
|
||||
) => {
|
||||
const defaults = {
|
||||
htmlBlock(node, context) {
|
||||
|
|
@ -34,6 +36,11 @@ const buildCustomHTMLRenderer = (
|
|||
|
||||
return executeRenderer(allHtmlRenderers, node, context);
|
||||
},
|
||||
htmlInline(node, context) {
|
||||
const allHtmlInlineRenderers = [...customRenderers.htmlInline, ...htmlInlineRenderers];
|
||||
|
||||
return executeRenderer(allHtmlInlineRenderers, node, context);
|
||||
},
|
||||
list(node, context) {
|
||||
const allListRenderers = [...customRenderers.list, ...listRenderers];
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,14 @@ const buildToken = (type, tagName, props) => {
|
|||
return { type, tagName, ...props };
|
||||
};
|
||||
|
||||
export const buildUneditableOpenTokens = token => {
|
||||
const TAG_TYPES = {
|
||||
block: 'div',
|
||||
inline: 'span',
|
||||
};
|
||||
|
||||
export const buildUneditableOpenTokens = (token, type = TAG_TYPES.block) => {
|
||||
return [
|
||||
buildToken('openTag', 'div', {
|
||||
buildToken('openTag', type, {
|
||||
attributes: { contenteditable: false },
|
||||
classNames: [
|
||||
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
|
||||
|
|
@ -14,10 +19,17 @@ export const buildUneditableOpenTokens = token => {
|
|||
];
|
||||
};
|
||||
|
||||
export const buildUneditableCloseToken = () => buildToken('closeTag', 'div');
|
||||
export const buildUneditableCloseToken = (type = TAG_TYPES.block) => buildToken('closeTag', type);
|
||||
|
||||
export const buildUneditableCloseTokens = token => {
|
||||
return [token, buildToken('closeTag', 'div')];
|
||||
export const buildUneditableCloseTokens = (token, type = TAG_TYPES.block) => {
|
||||
return [token, buildUneditableCloseToken(type)];
|
||||
};
|
||||
|
||||
export const buildUneditableInlineTokens = token => {
|
||||
return [
|
||||
...buildUneditableOpenTokens(token, TAG_TYPES.inline),
|
||||
buildUneditableCloseToken(TAG_TYPES.inline),
|
||||
];
|
||||
};
|
||||
|
||||
export const buildUneditableTokens = token => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
import { buildUneditableInlineTokens } from './build_uneditable_token';
|
||||
|
||||
const fontAwesomeRegexOpen = /<i class="fa.+>/;
|
||||
|
||||
const canRender = ({ literal }) => {
|
||||
return fontAwesomeRegexOpen.test(literal);
|
||||
};
|
||||
|
||||
const render = (_, { origin }) => buildUneditableInlineTokens(origin());
|
||||
|
||||
export default { canRender, render };
|
||||
|
|
@ -22,6 +22,7 @@ module ServiceParams
|
|||
:comment_on_event_enabled,
|
||||
:comment_detail,
|
||||
:confidential_issues_events,
|
||||
:confluence_url,
|
||||
:default_irc_uri,
|
||||
:device,
|
||||
:disable_diffs,
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class Projects::ServicesController < Projects::ApplicationController
|
|||
return { error: true, message: _('Validations failed.'), service_response: @service.errors.full_messages.join(','), test_failed: false }
|
||||
end
|
||||
|
||||
result = Integrations::Test::ProjectService.new(@service, current_user, params[:event]).execute
|
||||
result = ::Integrations::Test::ProjectService.new(@service, current_user, params[:event]).execute
|
||||
|
||||
unless result[:success]
|
||||
return { error: true, message: _('Test failed.'), service_response: result[:message].to_s, test_failed: true }
|
||||
|
|
|
|||
|
|
@ -28,10 +28,12 @@ module IconsHelper
|
|||
end
|
||||
|
||||
def sprite_icon_path
|
||||
# SVG Sprites currently don't work across domains, so in the case of a CDN
|
||||
# we have to set the current path deliberately to prevent addition of asset_host
|
||||
sprite_base_url = Gitlab.config.gitlab.url if ActionController::Base.asset_host
|
||||
ActionController::Base.helpers.image_path('icons.svg', host: sprite_base_url)
|
||||
@sprite_icon_path ||= begin
|
||||
# SVG Sprites currently don't work across domains, so in the case of a CDN
|
||||
# we have to set the current path deliberately to prevent addition of asset_host
|
||||
sprite_base_url = Gitlab.config.gitlab.url if ActionController::Base.asset_host
|
||||
ActionController::Base.helpers.image_path('icons.svg', host: sprite_base_url)
|
||||
end
|
||||
end
|
||||
|
||||
def sprite_file_icons_path
|
||||
|
|
|
|||
|
|
@ -400,7 +400,7 @@ module ProjectsHelper
|
|||
nav_tabs = [:home]
|
||||
|
||||
unless project.empty_repo?
|
||||
nav_tabs << [:files, :commits, :network, :graphs, :forks] if can?(current_user, :download_code, project)
|
||||
nav_tabs += [:files, :commits, :network, :graphs, :forks] if can?(current_user, :download_code, project)
|
||||
nav_tabs << :releases if can?(current_user, :read_release, project)
|
||||
end
|
||||
|
||||
|
|
@ -421,30 +421,30 @@ module ProjectsHelper
|
|||
nav_tabs << :operations
|
||||
end
|
||||
|
||||
if can?(current_user, :read_cycle_analytics, project)
|
||||
nav_tabs << :cycle_analytics
|
||||
end
|
||||
|
||||
tab_ability_map.each do |tab, ability|
|
||||
if can?(current_user, ability, project)
|
||||
nav_tabs << tab
|
||||
end
|
||||
end
|
||||
|
||||
nav_tabs << external_nav_tabs(project)
|
||||
apply_external_nav_tabs(nav_tabs, project)
|
||||
|
||||
nav_tabs.flatten
|
||||
nav_tabs
|
||||
end
|
||||
|
||||
def external_nav_tabs(project)
|
||||
[].tap do |tabs|
|
||||
tabs << :external_issue_tracker if project.external_issue_tracker
|
||||
tabs << :external_wiki if project.external_wiki
|
||||
def apply_external_nav_tabs(nav_tabs, project)
|
||||
nav_tabs << :external_issue_tracker if project.external_issue_tracker
|
||||
nav_tabs << :external_wiki if project.external_wiki
|
||||
|
||||
if project.has_confluence?
|
||||
nav_tabs.delete(:wiki)
|
||||
nav_tabs << :confluence
|
||||
end
|
||||
end
|
||||
|
||||
def tab_ability_map
|
||||
{
|
||||
cycle_analytics: :read_cycle_analytics,
|
||||
environments: :read_environment,
|
||||
metrics_dashboards: :metrics_dashboard,
|
||||
milestones: :read_milestone,
|
||||
|
|
|
|||
|
|
@ -7,10 +7,13 @@ module Ci
|
|||
include UpdateProjectStatistics
|
||||
include UsageStatistics
|
||||
include Sortable
|
||||
include IgnorableColumns
|
||||
extend Gitlab::Ci::Model
|
||||
|
||||
NotSupportedAdapterError = Class.new(StandardError)
|
||||
|
||||
ignore_columns :locked, remove_after: '2020-07-22', remove_with: '13.4'
|
||||
|
||||
TEST_REPORT_FILE_TYPES = %w[junit].freeze
|
||||
COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
|
||||
ACCESSIBILITY_REPORT_FILE_TYPES = %w[accessibility].freeze
|
||||
|
|
@ -108,10 +111,6 @@ module Ci
|
|||
|
||||
PLAN_LIMIT_PREFIX = 'ci_max_artifact_size_'
|
||||
|
||||
# This is required since we cannot add a default to the database
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/215418
|
||||
attribute :locked, :boolean, default: false
|
||||
|
||||
belongs_to :project
|
||||
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
|
||||
|
||||
|
|
@ -130,7 +129,6 @@ module Ci
|
|||
scope :with_files_stored_locally, -> { where(file_store: ::JobArtifactUploader::Store::LOCAL) }
|
||||
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
|
||||
scope :for_sha, ->(sha, project_id) { joins(job: :pipeline).where(ci_pipelines: { sha: sha, project_id: project_id }) }
|
||||
scope :for_ref, ->(ref, project_id) { joins(job: :pipeline).where(ci_pipelines: { ref: ref, project_id: project_id }) }
|
||||
scope :for_job_name, ->(name) { joins(:job).where(ci_builds: { name: name }) }
|
||||
|
||||
scope :with_file_types, -> (file_types) do
|
||||
|
|
@ -167,8 +165,7 @@ module Ci
|
|||
|
||||
scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
|
||||
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
|
||||
scope :locked, -> { where(locked: true) }
|
||||
scope :unlocked, -> { where(locked: [false, nil]) }
|
||||
scope :unlocked, -> { joins(job: :pipeline).merge(::Ci::Pipeline.unlocked).order(expire_at: :desc) }
|
||||
|
||||
scope :scoped_project, -> { where('ci_job_artifacts.project_id = projects.id') }
|
||||
|
||||
|
|
|
|||
|
|
@ -113,6 +113,8 @@ module Ci
|
|||
# extend this `Hash` with new values.
|
||||
enum failure_reason: ::Ci::PipelineEnums.failure_reasons
|
||||
|
||||
enum locked: { unlocked: 0, artifacts_locked: 1 }
|
||||
|
||||
state_machine :status, initial: :created do
|
||||
event :enqueue do
|
||||
transition [:created, :manual, :waiting_for_resource, :preparing, :skipped, :scheduled] => :pending
|
||||
|
|
@ -247,6 +249,14 @@ module Ci
|
|||
|
||||
pipeline.run_after_commit { AutoDevops::DisableWorker.perform_async(pipeline.id) }
|
||||
end
|
||||
|
||||
after_transition any => [:success] do |pipeline|
|
||||
next unless Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(pipeline.project)
|
||||
|
||||
pipeline.run_after_commit do
|
||||
Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(pipeline.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
scope :internal, -> { where(source: internal_sources) }
|
||||
|
|
@ -260,6 +270,12 @@ module Ci
|
|||
scope :for_id, -> (id) { where(id: id) }
|
||||
scope :for_iid, -> (iid) { where(iid: iid) }
|
||||
scope :created_after, -> (time) { where('ci_pipelines.created_at > ?', time) }
|
||||
scope :created_before_id, -> (id) { where('ci_pipelines.id < ?', id) }
|
||||
scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
|
||||
|
||||
scope :outside_pipeline_family, ->(pipeline) do
|
||||
where.not(id: pipeline.same_family_pipeline_ids)
|
||||
end
|
||||
|
||||
scope :with_reports, -> (reports_scope) do
|
||||
where('EXISTS (?)', ::Ci::Build.latest.with_reports(reports_scope).where('ci_pipelines.id=ci_builds.commit_id').select(1))
|
||||
|
|
@ -801,12 +817,16 @@ module Ci
|
|||
end
|
||||
|
||||
# If pipeline is a child of another pipeline, include the parent
|
||||
# and the siblings, otherwise return only itself.
|
||||
# and the siblings, otherwise return only itself and children.
|
||||
def same_family_pipeline_ids
|
||||
if (parent = parent_pipeline)
|
||||
[parent.id] + parent.child_pipelines.pluck(:id)
|
||||
Ci::Pipeline.where(id: parent.id)
|
||||
.or(Ci::Pipeline.where(id: parent.child_pipelines.select(:id)))
|
||||
.select(:id)
|
||||
else
|
||||
[self.id]
|
||||
Ci::Pipeline.where(id: self.id)
|
||||
.or(Ci::Pipeline.where(id: self.child_pipelines.select(:id)))
|
||||
.select(:id)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -897,6 +917,10 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def has_archive_artifacts?
|
||||
complete? && builds.latest.with_existing_job_artifacts(Ci::JobArtifact.archive.or(Ci::JobArtifact.metadata)).exists?
|
||||
end
|
||||
|
||||
def has_exposed_artifacts?
|
||||
complete? && builds.latest.with_exposed_artifacts.exists?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1117,14 +1117,8 @@ class MergeRequest < ApplicationRecord
|
|||
end
|
||||
|
||||
def source_branch_exists?
|
||||
if Feature.enabled?(:memoize_source_branch_merge_request, project)
|
||||
strong_memoize(:source_branch_exists) do
|
||||
next false unless self.source_project
|
||||
|
||||
self.source_project.repository.branch_exists?(self.source_branch)
|
||||
end
|
||||
else
|
||||
return false unless self.source_project
|
||||
strong_memoize(:source_branch_exists) do
|
||||
next false unless self.source_project
|
||||
|
||||
self.source_project.repository.branch_exists?(self.source_branch)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -169,6 +169,7 @@ class Project < ApplicationRecord
|
|||
has_one :custom_issue_tracker_service
|
||||
has_one :bugzilla_service
|
||||
has_one :gitlab_issue_tracker_service, inverse_of: :project
|
||||
has_one :confluence_service
|
||||
has_one :external_wiki_service
|
||||
has_one :prometheus_service, inverse_of: :project
|
||||
has_one :mock_ci_service
|
||||
|
|
@ -1286,6 +1287,11 @@ class Project < ApplicationRecord
|
|||
update_column(:has_external_wiki, services.external_wikis.any?) if Gitlab::Database.read_write?
|
||||
end
|
||||
|
||||
def has_confluence?
|
||||
ConfluenceService.feature_enabled?(self) && # rubocop:disable CodeReuse/ServiceClass
|
||||
project_setting.has_confluence?
|
||||
end
|
||||
|
||||
def find_or_initialize_services
|
||||
available_services_names = Service.available_services_names - disabled_services
|
||||
|
||||
|
|
@ -1295,7 +1301,11 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def disabled_services
|
||||
[]
|
||||
strong_memoize(:disabled_services) do
|
||||
[].tap do |disabled_services|
|
||||
disabled_services.push(ConfluenceService.to_param) unless ConfluenceService.feature_enabled?(self) # rubocop:disable CodeReuse/ServiceClass
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def find_or_initialize_service(name)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,97 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ConfluenceService < Service
|
||||
include ActionView::Helpers::UrlHelper
|
||||
|
||||
VALID_SCHEME_MATCH = %r{\Ahttps?\Z}.freeze
|
||||
VALID_HOST_MATCH = %r{\A.+\.atlassian\.net\Z}.freeze
|
||||
VALID_PATH_MATCH = %r{\A/wiki(/|\Z)}.freeze
|
||||
|
||||
FEATURE_FLAG = :confluence_integration
|
||||
|
||||
prop_accessor :confluence_url
|
||||
|
||||
validates :confluence_url, presence: true, if: :activated?
|
||||
validate :validate_confluence_url_is_cloud, if: :activated?
|
||||
|
||||
after_commit :cache_project_has_confluence
|
||||
|
||||
def self.feature_enabled?(actor)
|
||||
::Feature.enabled?(FEATURE_FLAG, actor)
|
||||
end
|
||||
|
||||
def self.to_param
|
||||
'confluence'
|
||||
end
|
||||
|
||||
def self.supported_events
|
||||
%w()
|
||||
end
|
||||
|
||||
def title
|
||||
s_('ConfluenceService|Confluence Workspace')
|
||||
end
|
||||
|
||||
def description
|
||||
s_('ConfluenceService|Connect a Confluence Cloud Workspace to your GitLab project')
|
||||
end
|
||||
|
||||
def detailed_description
|
||||
return unless project.wiki_enabled?
|
||||
|
||||
if activated?
|
||||
wiki_url = project.wiki.web_url
|
||||
|
||||
s_(
|
||||
'ConfluenceService|Your GitLab Wiki can be accessed here: %{wiki_link}. To re-enable your GitLab Wiki, disable this integration' %
|
||||
{ wiki_link: link_to(wiki_url, wiki_url) }
|
||||
).html_safe
|
||||
else
|
||||
s_('ConfluenceService|Enabling the Confluence Workspace will disable the default GitLab Wiki. Your GitLab Wiki data will be saved and you can always re-enable it later by turning off this integration').html_safe
|
||||
end
|
||||
end
|
||||
|
||||
def fields
|
||||
[
|
||||
{
|
||||
type: 'text',
|
||||
name: 'confluence_url',
|
||||
title: 'Confluence Cloud Workspace URL',
|
||||
placeholder: s_('ConfluenceService|The URL of the Confluence Workspace'),
|
||||
required: true
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
def can_test?
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def validate_confluence_url_is_cloud
|
||||
unless confluence_uri_valid?
|
||||
errors.add(:confluence_url, 'URL must be to a Confluence Cloud Workspace hosted on atlassian.net')
|
||||
end
|
||||
end
|
||||
|
||||
def confluence_uri_valid?
|
||||
return false unless confluence_url
|
||||
|
||||
uri = URI.parse(confluence_url)
|
||||
|
||||
(uri.scheme&.match(VALID_SCHEME_MATCH) &&
|
||||
uri.host&.match(VALID_HOST_MATCH) &&
|
||||
uri.path&.match(VALID_PATH_MATCH)).present?
|
||||
|
||||
rescue URI::InvalidURIError
|
||||
false
|
||||
end
|
||||
|
||||
def cache_project_has_confluence
|
||||
return unless project && !project.destroyed?
|
||||
|
||||
project.project_setting.save! unless project.project_setting.persisted?
|
||||
project.project_setting.update_column(:has_confluence, active?)
|
||||
end
|
||||
end
|
||||
|
|
@ -12,7 +12,7 @@ class Service < ApplicationRecord
|
|||
ignore_columns %i[title description], remove_with: '13.4', remove_after: '2020-09-22'
|
||||
|
||||
SERVICE_NAMES = %w[
|
||||
alerts asana assembla bamboo bugzilla buildkite campfire custom_issue_tracker discord
|
||||
alerts asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker discord
|
||||
drone_ci emails_on_push external_wiki flowdock hangouts_chat hipchat irker jira
|
||||
mattermost mattermost_slash_commands microsoft_teams packagist pipelines_email
|
||||
pivotaltracker prometheus pushover redmine slack slack_slash_commands teamcity unify_circuit webex_teams youtrack
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ module Branches
|
|||
end
|
||||
|
||||
if repository.rm_branch(current_user, branch_name)
|
||||
unlock_artifacts(branch_name)
|
||||
ServiceResponse.success(message: 'Branch was deleted')
|
||||
else
|
||||
ServiceResponse.error(
|
||||
|
|
@ -28,5 +29,11 @@ module Branches
|
|||
rescue Gitlab::Git::PreReceiveError => ex
|
||||
ServiceResponse.error(message: ex.message, http_status: 400)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unlock_artifacts(branch_name)
|
||||
Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{branch_name}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -104,11 +104,6 @@ module Ci
|
|||
expire_in: expire_in)
|
||||
end
|
||||
|
||||
if Feature.enabled?(:keep_latest_artifact_for_ref, project)
|
||||
artifact.locked = true
|
||||
artifact_metadata&.locked = true
|
||||
end
|
||||
|
||||
[artifact, artifact_metadata]
|
||||
end
|
||||
|
||||
|
|
@ -128,7 +123,6 @@ module Ci
|
|||
Ci::JobArtifact.transaction do
|
||||
artifact.save!
|
||||
artifact_metadata&.save!
|
||||
unlock_previous_artifacts!
|
||||
|
||||
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
|
||||
job.update_column(:artifacts_expire_at, artifact.expire_at)
|
||||
|
|
@ -146,12 +140,6 @@ module Ci
|
|||
error(error.message, :bad_request)
|
||||
end
|
||||
|
||||
def unlock_previous_artifacts!
|
||||
return unless Feature.enabled?(:keep_latest_artifact_for_ref, project)
|
||||
|
||||
Ci::JobArtifact.for_ref(job.ref, project.id).locked.update_all(locked: false)
|
||||
end
|
||||
|
||||
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
|
||||
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
|
||||
return false unless existing_artifact
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ module Ci
|
|||
private
|
||||
|
||||
def destroy_batch
|
||||
artifact_batch = if Feature.enabled?(:keep_latest_artifact_for_ref)
|
||||
artifact_batch = if Gitlab::Ci::Features.destroy_only_unlocked_expired_artifacts_enabled?
|
||||
Ci::JobArtifact.expired(BATCH_SIZE).unlocked
|
||||
else
|
||||
Ci::JobArtifact.expired(BATCH_SIZE)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class UnlockArtifactsService < ::BaseService
|
||||
BATCH_SIZE = 100
|
||||
|
||||
def execute(ci_ref, before_pipeline = nil)
|
||||
query = <<~SQL.squish
|
||||
UPDATE "ci_pipelines"
|
||||
SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
|
||||
WHERE "ci_pipelines"."id" in (
|
||||
#{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
|
||||
LIMIT #{BATCH_SIZE}
|
||||
FOR UPDATE SKIP LOCKED
|
||||
)
|
||||
RETURNING "ci_pipelines"."id";
|
||||
SQL
|
||||
|
||||
loop do
|
||||
break if ActiveRecord::Base.connection.exec_query(query).empty?
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def collect_pipelines(ci_ref, before_pipeline)
|
||||
pipeline_scope = ci_ref.pipelines
|
||||
pipeline_scope = pipeline_scope.before_pipeline(before_pipeline) if before_pipeline
|
||||
|
||||
pipeline_scope.artifacts_locked
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -29,6 +29,7 @@ module Git
|
|||
perform_housekeeping
|
||||
|
||||
stop_environments
|
||||
unlock_artifacts
|
||||
|
||||
true
|
||||
end
|
||||
|
|
@ -60,6 +61,12 @@ module Git
|
|||
Ci::StopEnvironmentsService.new(project, current_user).execute(branch_name)
|
||||
end
|
||||
|
||||
def unlock_artifacts
|
||||
return unless removing_branch?
|
||||
|
||||
Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, ref)
|
||||
end
|
||||
|
||||
def execute_related_hooks
|
||||
BranchHooksService.new(project, current_user, params).execute
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,7 +10,25 @@ module Git
|
|||
project.repository.before_push_tag
|
||||
TagHooksService.new(project, current_user, params).execute
|
||||
|
||||
unlock_artifacts
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unlock_artifacts
|
||||
return unless removing_tag?
|
||||
|
||||
Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, ref)
|
||||
end
|
||||
|
||||
def removing_tag?
|
||||
Gitlab::Git.blank_ref?(newrev)
|
||||
end
|
||||
|
||||
def tag_name
|
||||
Gitlab::Git.ref_name(ref)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ module Tags
|
|||
.new(project, current_user, tag: tag_name)
|
||||
.execute
|
||||
|
||||
unlock_artifacts(tag_name)
|
||||
|
||||
success('Tag was removed')
|
||||
else
|
||||
error('Failed to remove tag')
|
||||
|
|
@ -33,5 +35,11 @@ module Tags
|
|||
def success(message)
|
||||
super().merge(message: message)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unlock_artifacts(tag_name)
|
||||
Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, "#{::Gitlab::Git::TAG_REF_PREFIX}#{tag_name}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -293,6 +293,20 @@
|
|||
|
||||
= render 'layouts/nav/sidebar/analytics_links', links: project_analytics_navbar_links(@project, current_user)
|
||||
|
||||
- if project_nav_tab?(:confluence)
|
||||
- confluence_url = @project.confluence_service.confluence_url
|
||||
= nav_link do
|
||||
= link_to confluence_url, class: 'shortcuts-confluence' do
|
||||
.nav-icon-container
|
||||
= sprite_icon('external-link')
|
||||
%span.nav-item-name
|
||||
= _('Confluence')
|
||||
%ul.sidebar-sub-level-items.is-fly-out-only
|
||||
= nav_link(html_options: { class: 'fly-out-top-item' } ) do
|
||||
= link_to confluence_url, target: '_blank', rel: 'noopener noreferrer' do
|
||||
%strong.fly-out-top-item-name
|
||||
= _('Confluence')
|
||||
|
||||
- if project_nav_tab? :wiki
|
||||
- wiki_url = wiki_path(@project.wiki)
|
||||
= nav_link(controller: :wikis) do
|
||||
|
|
|
|||
|
|
@ -859,6 +859,22 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: pipeline_background:ci_pipeline_success_unlock_artifacts
|
||||
:feature_category: :continuous_integration
|
||||
:has_external_dependencies:
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: pipeline_background:ci_ref_delete_unlock_artifacts
|
||||
:feature_category: :continuous_integration
|
||||
:has_external_dependencies:
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: pipeline_cache:expire_job_cache
|
||||
:feature_category: :continuous_integration
|
||||
:has_external_dependencies:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class PipelineSuccessUnlockArtifactsWorker
|
||||
include ApplicationWorker
|
||||
include PipelineBackgroundQueue
|
||||
|
||||
idempotent!
|
||||
|
||||
def perform(pipeline_id)
|
||||
::Ci::Pipeline.find_by_id(pipeline_id).try do |pipeline|
|
||||
break unless pipeline.has_archive_artifacts?
|
||||
|
||||
::Ci::UnlockArtifactsService
|
||||
.new(pipeline.project, pipeline.user)
|
||||
.execute(pipeline.ci_ref, pipeline)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
class RefDeleteUnlockArtifactsWorker
|
||||
include ApplicationWorker
|
||||
include PipelineBackgroundQueue
|
||||
|
||||
idempotent!
|
||||
|
||||
def perform(project_id, user_id, ref_path)
|
||||
::Project.find_by_id(project_id).try do |project|
|
||||
::User.find_by_id(user_id).try do |user|
|
||||
::Ci::Ref.find_by_ref_path(ref_path).try do |ci_ref|
|
||||
::Ci::UnlockArtifactsService
|
||||
.new(project, user)
|
||||
.execute(ci_ref)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Avoid N+1 calls for image_path when rendering commits
|
||||
merge_request: 36724
|
||||
author:
|
||||
type: performance
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: 'Cleanup policies: display API error messages under form field'
|
||||
merge_request: 36190
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: 'Web IDE: Page title should not be .editorconfig when the IDE is first loaded.'
|
||||
merge_request: 36783
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Do not depend on artifacts from previous stages in Auto DevOps deployments
|
||||
merge_request: 36741
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add a custom HTML renderer to the Static Site Editor for font awesome inline HTML syntax
|
||||
merge_request: 36361
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix error message when saving an integration and testing the settings.
|
||||
merge_request: 36700
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Upgrade GitLab Pages to 1.21.0
|
||||
merge_request: 36214
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -158,6 +158,8 @@ module Gitlab
|
|||
# Webpack dev server configuration is handled in initializers/static_files.rb
|
||||
config.webpack.dev_server.enabled = false
|
||||
|
||||
config.action_mailer.delivery_job = "ActionMailer::MailDeliveryJob"
|
||||
|
||||
# Enable the asset pipeline
|
||||
config.assets.enabled = true
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddLockedToCiPipelines < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
add_column :ci_pipelines, :locked, :integer, limit: 2, null: false, default: 0
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_column :ci_pipelines, :locked
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPartialIndexToLockedPipelines < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_pipelines, [:ci_ref_id, :id], name: 'idx_ci_pipelines_artifacts_locked', where: 'locked = 1'
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index :ci_pipelines, 'idx_ci_pipelines_artifacts_locked'
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddDefaultValueStreamToGroupsWithGroupStages < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
class Group < ActiveRecord::Base
|
||||
def self.find_sti_class(typename)
|
||||
if typename == 'Group'
|
||||
Group
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
self.table_name = 'namespaces'
|
||||
has_many :group_value_streams
|
||||
has_many :group_stages
|
||||
end
|
||||
|
||||
class GroupValueStream < ActiveRecord::Base
|
||||
self.table_name = 'analytics_cycle_analytics_group_value_streams'
|
||||
has_many :group_stages
|
||||
belongs_to :group
|
||||
end
|
||||
|
||||
class GroupStage < ActiveRecord::Base
|
||||
self.table_name = 'analytics_cycle_analytics_group_stages'
|
||||
belongs_to :group_value_stream
|
||||
end
|
||||
|
||||
def up
|
||||
Group.where(type: 'Group').joins(:group_stages).distinct.find_each do |group|
|
||||
Group.transaction do
|
||||
group_value_stream = group.group_value_streams.first_or_create!(name: 'default')
|
||||
group.group_stages.update_all(group_value_stream_id: group_value_stream.id)
|
||||
end
|
||||
end
|
||||
|
||||
change_column_null :analytics_cycle_analytics_group_stages, :group_value_stream_id, false
|
||||
end
|
||||
|
||||
def down
|
||||
change_column_null :analytics_cycle_analytics_group_stages, :group_value_stream_id, true
|
||||
|
||||
GroupValueStream.where(name: 'default').includes(:group_stages).find_each do |value_stream|
|
||||
GroupValueStream.transaction do
|
||||
value_stream.group_stages.update_all(group_value_stream_id: nil)
|
||||
value_stream.destroy!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ValidateForeignKeyOnCycleAnalyticsGroupStages < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
# same as in db/migrate/20200701064756_add_not_valid_foreign_key_to_cycle_analytics_group_stages.rb
|
||||
CONSTRAINT_NAME = 'fk_analytics_cycle_analytics_group_stages_group_value_stream_id'
|
||||
|
||||
def up
|
||||
validate_foreign_key :analytics_cycle_analytics_group_stages, :group_value_stream_id, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_foreign_key_if_exists :analytics_cycle_analytics_group_stages, column: :group_value_stream_id, name: CONSTRAINT_NAME
|
||||
add_foreign_key :analytics_cycle_analytics_group_stages, :analytics_cycle_analytics_group_value_streams,
|
||||
column: :group_value_stream_id, name: CONSTRAINT_NAME, on_delete: :cascade, validate: false
|
||||
end
|
||||
end
|
||||
|
|
@ -8801,7 +8801,7 @@ CREATE TABLE public.analytics_cycle_analytics_group_stages (
|
|||
hidden boolean DEFAULT false NOT NULL,
|
||||
custom boolean DEFAULT true NOT NULL,
|
||||
name character varying(255) NOT NULL,
|
||||
group_value_stream_id bigint
|
||||
group_value_stream_id bigint NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.analytics_cycle_analytics_group_stages_id_seq
|
||||
|
|
@ -10091,7 +10091,8 @@ CREATE TABLE public.ci_pipelines (
|
|||
source_sha bytea,
|
||||
target_sha bytea,
|
||||
external_pull_request_id bigint,
|
||||
ci_ref_id bigint
|
||||
ci_ref_id bigint,
|
||||
locked smallint DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE public.ci_pipelines_config (
|
||||
|
|
@ -18478,6 +18479,8 @@ CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_and_note_id_index ON public.ep
|
|||
|
||||
CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON public.epic_user_mentions USING btree (epic_id) WHERE (note_id IS NULL);
|
||||
|
||||
CREATE INDEX idx_ci_pipelines_artifacts_locked ON public.ci_pipelines USING btree (ci_ref_id, id) WHERE (locked = 1);
|
||||
|
||||
CREATE INDEX idx_deployment_clusters_on_cluster_id_and_kubernetes_namespace ON public.deployment_clusters USING btree (cluster_id, kubernetes_namespace);
|
||||
|
||||
CREATE UNIQUE INDEX idx_deployment_merge_requests_unique_index ON public.deployment_merge_requests USING btree (deployment_id, merge_request_id);
|
||||
|
|
@ -21260,7 +21263,7 @@ ALTER TABLE ONLY public.merge_request_metrics
|
|||
ADD CONSTRAINT fk_ae440388cc FOREIGN KEY (latest_closed_by_id) REFERENCES public.users(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY public.analytics_cycle_analytics_group_stages
|
||||
ADD CONSTRAINT fk_analytics_cycle_analytics_group_stages_group_value_stream_id FOREIGN KEY (group_value_stream_id) REFERENCES public.analytics_cycle_analytics_group_value_streams(id) ON DELETE CASCADE NOT VALID;
|
||||
ADD CONSTRAINT fk_analytics_cycle_analytics_group_stages_group_value_stream_id FOREIGN KEY (group_value_stream_id) REFERENCES public.analytics_cycle_analytics_group_value_streams(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY public.fork_network_members
|
||||
ADD CONSTRAINT fk_b01280dae4 FOREIGN KEY (forked_from_project_id) REFERENCES public.projects(id) ON DELETE SET NULL;
|
||||
|
|
@ -23635,6 +23638,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200527152657
|
||||
20200527170649
|
||||
20200527211000
|
||||
20200527211605
|
||||
20200528054112
|
||||
20200528123703
|
||||
20200528125905
|
||||
|
|
@ -23710,6 +23714,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200625045442
|
||||
20200625082258
|
||||
20200625113337
|
||||
20200625174052
|
||||
20200625190458
|
||||
20200626060151
|
||||
20200626130220
|
||||
|
|
@ -23717,6 +23722,8 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200630091656
|
||||
20200630110826
|
||||
20200701064756
|
||||
20200701070435
|
||||
20200701091253
|
||||
20200701093859
|
||||
20200701205710
|
||||
20200702123805
|
||||
|
|
|
|||
|
|
@ -186,7 +186,7 @@ user.save
|
|||
Which would return:
|
||||
|
||||
```ruby
|
||||
Enqueued ActionMailer::DeliveryJob (Job ID: 05915c4e-c849-4e14-80bb-696d5ae22065) to Sidekiq(mailers) with arguments: "DeviseMailer", "password_change", "deliver_now", #<GlobalID:0x00007f42d8ccebe8 @uri=#<URI::GID gid://gitlab/User/1>>
|
||||
Enqueued ActionMailer::MailDeliveryJob (Job ID: 05915c4e-c849-4e14-80bb-696d5ae22065) to Sidekiq(mailers) with arguments: "DeviseMailer", "password_change", "deliver_now", #<GlobalID:0x00007f42d8ccebe8 @uri=#<URI::GID gid://gitlab/User/1>>
|
||||
=> true
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ Example log output:
|
|||
|
||||
```json
|
||||
{"severity":"INFO","time":"2020-06-08T14:37:37.892Z","class":"AdminEmailsWorker","args":["[FILTERED]","[FILTERED]","[FILTERED]"],"retry":3,"queue":"admin_emails","backtrace":true,"jid":"9e35e2674ac7b12d123e13cc","created_at":"2020-06-08T14:37:37.373Z","meta.user":"root","meta.caller_id":"Admin::EmailsController#create","correlation_id":"37D3lArJmT1","uber-trace-id":"2d942cc98cc1b561:6dc94409cfdd4d77:9fbe19bdee865293:1","enqueued_at":"2020-06-08T14:37:37.410Z","pid":65011,"message":"AdminEmailsWorker JID-9e35e2674ac7b12d123e13cc: done: 0.48085 sec","job_status":"done","scheduling_latency_s":0.001012,"redis_calls":9,"redis_duration_s":0.004608,"redis_read_bytes":696,"redis_write_bytes":6141,"duration_s":0.48085,"cpu_s":0.308849,"completed_at":"2020-06-08T14:37:37.892Z","db_duration_s":0.010742}
|
||||
{"severity":"INFO","time":"2020-06-08T14:37:37.894Z","class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ActionMailer::DeliveryJob","queue":"mailers","args":["[FILTERED]"],"retry":3,"backtrace":true,"jid":"e47a4f6793d475378432e3c8","created_at":"2020-06-08T14:37:37.884Z","meta.user":"root","meta.caller_id":"AdminEmailsWorker","correlation_id":"37D3lArJmT1","uber-trace-id":"2d942cc98cc1b561:29344de0f966446d:5c3b0e0e1bef987b:1","enqueued_at":"2020-06-08T14:37:37.885Z","pid":65011,"message":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper JID-e47a4f6793d475378432e3c8: start","job_status":"start","scheduling_latency_s":0.009473}
|
||||
{"severity":"INFO","time":"2020-06-08T14:37:37.894Z","class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ActionMailer::MailDeliveryJob","queue":"mailers","args":["[FILTERED]"],"retry":3,"backtrace":true,"jid":"e47a4f6793d475378432e3c8","created_at":"2020-06-08T14:37:37.884Z","meta.user":"root","meta.caller_id":"AdminEmailsWorker","correlation_id":"37D3lArJmT1","uber-trace-id":"2d942cc98cc1b561:29344de0f966446d:5c3b0e0e1bef987b:1","enqueued_at":"2020-06-08T14:37:37.885Z","pid":65011,"message":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper JID-e47a4f6793d475378432e3c8: start","job_status":"start","scheduling_latency_s":0.009473}
|
||||
{"severity":"INFO","time":"2020-06-08T14:39:50.648Z","class":"NewIssueWorker","args":["455","1"],"retry":3,"queue":"new_issue","backtrace":true,"jid":"a24af71f96fd129ec47f5d1e","created_at":"2020-06-08T14:39:50.643Z","meta.user":"root","meta.project":"h5bp/html5-boilerplate","meta.root_namespace":"h5bp","meta.caller_id":"Projects::IssuesController#create","correlation_id":"f9UCZHqhuP7","uber-trace-id":"28f65730f99f55a3:a5d2b62dec38dffc:48ddd092707fa1b7:1","enqueued_at":"2020-06-08T14:39:50.646Z","pid":65011,"message":"NewIssueWorker JID-a24af71f96fd129ec47f5d1e: start","job_status":"start","scheduling_latency_s":0.001144}
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -12408,6 +12408,7 @@ enum ServiceType {
|
|||
BUGZILLA_SERVICE
|
||||
BUILDKITE_SERVICE
|
||||
CAMPFIRE_SERVICE
|
||||
CONFLUENCE_SERVICE
|
||||
CUSTOM_ISSUE_TRACKER_SERVICE
|
||||
DISCORD_SERVICE
|
||||
DRONE_CI_SERVICE
|
||||
|
|
|
|||
|
|
@ -36439,6 +36439,12 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "CONFLUENCE_SERVICE",
|
||||
"description": null,
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "CUSTOM_ISSUE_TRACKER_SERVICE",
|
||||
"description": null,
|
||||
|
|
|
|||
|
|
@ -493,6 +493,73 @@ Get Emails on push service settings for a project.
|
|||
GET /projects/:id/services/emails-on-push
|
||||
```
|
||||
|
||||
## Confluence service
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/220934) in GitLab 13.2.
|
||||
> - It's deployed behind a feature flag, disabled by default.
|
||||
> - It's disabled on GitLab.com.
|
||||
> - It's able to be enabled or disabled per-project
|
||||
> - It's not recommended for production use.
|
||||
> - To use it in GitLab self-managed instances, ask a GitLab administrator to
|
||||
[enable it](#enable-or-disable-the-confluence-service-core-only). **(CORE ONLY)**
|
||||
|
||||
Replaces the link to the internal wiki with a link to a Confluence Cloud Workspace.
|
||||
|
||||
### Create/Edit Confluence service
|
||||
|
||||
Set Confluence service for a project.
|
||||
|
||||
```plaintext
|
||||
PUT /projects/:id/services/confluence
|
||||
```
|
||||
|
||||
Parameters:
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `confluence_url` | string | true | The URL of the Confluence Cloud Workspace hosted on atlassian.net. |
|
||||
|
||||
### Delete Confluence service
|
||||
|
||||
Delete Confluence service for a project.
|
||||
|
||||
```plaintext
|
||||
DELETE /projects/:id/services/confluence
|
||||
```
|
||||
|
||||
### Get Confluence service settings
|
||||
|
||||
Get Confluence service settings for a project.
|
||||
|
||||
```plaintext
|
||||
GET /projects/:id/services/confluence
|
||||
```
|
||||
|
||||
### Enable or disable the Confluence service **(CORE ONLY)**
|
||||
|
||||
The Confluence service is under development and not ready for production use. It is
|
||||
deployed behind a feature flag that is **disabled by default**.
|
||||
[GitLab administrators with access to the GitLab Rails console](../administration/feature_flags.md)
|
||||
can enable it for your instance. The Confluence service can be enabled or disabled per-project
|
||||
|
||||
To enable it:
|
||||
|
||||
```ruby
|
||||
# Instance-wide
|
||||
Feature.enable(:confluence_integration)
|
||||
# or by project
|
||||
Feature.enable(:confluence_integration, Project.find(<project id>))
|
||||
```
|
||||
|
||||
To disable it:
|
||||
|
||||
```ruby
|
||||
# Instance-wide
|
||||
Feature.disable(:confluence_integration)
|
||||
# or by project
|
||||
Feature.disable(:confluence_integration, Project.find(<project id>))
|
||||
```
|
||||
|
||||
## External Wiki
|
||||
|
||||
Replaces the link to the internal wiki with a link to an external wiki.
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ store:
|
|||
- |
|
||||
echo "-----BEGIN CERTIFICATE-----
|
||||
...
|
||||
-----END CERTIFICATE-----" >> /kaniko/ssl/certs/ca-certificates.crt
|
||||
-----END CERTIFICATE-----" >> /kaniko/ssl/certs/additional-ca-cert-bundle.crt
|
||||
```
|
||||
|
||||
## Video walkthrough of a working example
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ Click on the service links to see further configuration instructions and details
|
|||
| Buildkite | Continuous integration and deployments | Yes |
|
||||
| [Bugzilla](bugzilla.md) | Bugzilla issue tracker | No |
|
||||
| Campfire | Simple web-based real-time group chat | No |
|
||||
| Confluence | Replaces the link to the internal wiki with a link to a Confluence Cloud Workspace. Service is behind a feature flag, disabled by default ([see details](../../../api/services.md#enable-or-disable-the-confluence-service-core-only)). | No |
|
||||
| Custom Issue Tracker | Custom issue tracker | No |
|
||||
| [Discord Notifications](discord_notifications.md) | Receive event notifications in Discord | No |
|
||||
| Drone CI | Continuous Integration platform built on Docker, written in Go | Yes |
|
||||
|
|
|
|||
|
|
@ -288,6 +288,14 @@ module API
|
|||
desc: 'Campfire room'
|
||||
}
|
||||
],
|
||||
'confluence' => [
|
||||
{
|
||||
required: true,
|
||||
name: :confluence_url,
|
||||
type: String,
|
||||
desc: 'The URL of the Confluence Cloud Workspace hosted on atlassian.net'
|
||||
}
|
||||
],
|
||||
'custom-issue-tracker' => [
|
||||
{
|
||||
required: true,
|
||||
|
|
@ -757,6 +765,7 @@ module API
|
|||
::BambooService,
|
||||
::BugzillaService,
|
||||
::BuildkiteService,
|
||||
::ConfluenceService,
|
||||
::CampfireService,
|
||||
::CustomIssueTrackerService,
|
||||
::DiscordService,
|
||||
|
|
|
|||
|
|
@ -53,6 +53,14 @@ module Gitlab
|
|||
def self.raise_job_rules_without_workflow_rules_warning?
|
||||
::Feature.enabled?(:ci_raise_job_rules_without_workflow_rules_warning)
|
||||
end
|
||||
|
||||
def self.keep_latest_artifacts_for_ref_enabled?(project)
|
||||
::Feature.enabled?(:keep_latest_artifacts_for_ref, project, default_enabled: false)
|
||||
end
|
||||
|
||||
def self.destroy_only_unlocked_expired_artifacts_enabled?
|
||||
::Feature.enabled?(:destroy_only_unlocked_expired_artifacts, default_enabled: false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,7 +20,11 @@ module Gitlab
|
|||
pipeline_schedule: @command.schedule,
|
||||
merge_request: @command.merge_request,
|
||||
external_pull_request: @command.external_pull_request,
|
||||
variables_attributes: Array(@command.variables_attributes)
|
||||
variables_attributes: Array(@command.variables_attributes),
|
||||
# This should be removed and set on the database column default
|
||||
# level when the keep_latest_artifacts_for_ref feature flag is
|
||||
# removed.
|
||||
locked: ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(@command.project) ? :artifacts_locked : :unlocked
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
.auto-deploy:
|
||||
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.0"
|
||||
dependencies: []
|
||||
|
||||
include:
|
||||
- template: Jobs/Deploy/ECS.gitlab-ci.yml
|
||||
|
|
@ -42,7 +43,6 @@ stop_review:
|
|||
environment:
|
||||
name: review/$CI_COMMIT_REF_NAME
|
||||
action: stop
|
||||
dependencies: []
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
|
||||
|
|
|
|||
|
|
@ -29,13 +29,13 @@ module Gitlab
|
|||
override :write_cache
|
||||
def write_cache
|
||||
highlight_cache.write_if_empty
|
||||
diff_stats_cache&.write_if_empty(diff_stats_collection)
|
||||
diff_stats_cache.write_if_empty(diff_stats_collection)
|
||||
end
|
||||
|
||||
override :clear_cache
|
||||
def clear_cache
|
||||
highlight_cache.clear
|
||||
diff_stats_cache&.clear
|
||||
diff_stats_cache.clear
|
||||
end
|
||||
|
||||
def real_size
|
||||
|
|
@ -52,9 +52,7 @@ module Gitlab
|
|||
|
||||
def diff_stats_cache
|
||||
strong_memoize(:diff_stats_cache) do
|
||||
if Feature.enabled?(:cache_diff_stats_merge_request, project)
|
||||
Gitlab::Diff::StatsCache.new(cachable_key: @merge_request_diff.cache_key)
|
||||
end
|
||||
Gitlab::Diff::StatsCache.new(cachable_key: @merge_request_diff.cache_key)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -63,7 +61,7 @@ module Gitlab
|
|||
strong_memoize(:diff_stats) do
|
||||
next unless fetch_diff_stats?
|
||||
|
||||
diff_stats_cache&.read || super
|
||||
diff_stats_cache.read || super
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -309,6 +309,7 @@ excluded_attributes:
|
|||
- :merge_request_id
|
||||
- :external_pull_request_id
|
||||
- :ci_ref_id
|
||||
- :locked
|
||||
stages:
|
||||
- :pipeline_id
|
||||
merge_access_levels:
|
||||
|
|
|
|||
|
|
@ -26,9 +26,9 @@ module Gitlab
|
|||
job = ::Marginalia::Comment.marginalia_job
|
||||
|
||||
# We are using 'Marginalia::SidekiqInstrumentation' which does not support 'ActiveJob::Base'.
|
||||
# Gitlab also uses 'ActionMailer::DeliveryJob' which inherits from ActiveJob::Base.
|
||||
# Gitlab also uses 'ActionMailer::MailDeliveryJob' which inherits from ActiveJob::Base.
|
||||
# So below condition is used to return metadata for such jobs.
|
||||
if job && job.is_a?(ActionMailer::DeliveryJob)
|
||||
if job.is_a?(ActionMailer::MailDeliveryJob) || job.is_a?(ActionMailer::DeliveryJob)
|
||||
{
|
||||
"class" => job.arguments.first,
|
||||
"jid" => job.job_id
|
||||
|
|
|
|||
|
|
@ -6199,6 +6199,24 @@ msgstr ""
|
|||
msgid "Confirmation required"
|
||||
msgstr ""
|
||||
|
||||
msgid "Confluence"
|
||||
msgstr ""
|
||||
|
||||
msgid "ConfluenceService|Confluence Workspace"
|
||||
msgstr ""
|
||||
|
||||
msgid "ConfluenceService|Connect a Confluence Cloud Workspace to your GitLab project"
|
||||
msgstr ""
|
||||
|
||||
msgid "ConfluenceService|Enabling the Confluence Workspace will disable the default GitLab Wiki. Your GitLab Wiki data will be saved and you can always re-enable it later by turning off this integration"
|
||||
msgstr ""
|
||||
|
||||
msgid "ConfluenceService|The URL of the Confluence Workspace"
|
||||
msgstr ""
|
||||
|
||||
msgid "ConfluenceService|Your GitLab Wiki can be accessed here: %{wiki_link}. To re-enable your GitLab Wiki, disable this integration"
|
||||
msgstr ""
|
||||
|
||||
msgid "Congratulations! You have enabled Two-factor Authentication!"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -6433,7 +6451,7 @@ msgstr ""
|
|||
msgid "ContainerRegistry|The last tag related to this image was recently removed. This empty image and any associated data will be automatically removed as part of the regular Garbage Collection process. If you have any questions, contact your administrator."
|
||||
msgstr ""
|
||||
|
||||
msgid "ContainerRegistry|The value of this input should be less than 255 characters"
|
||||
msgid "ContainerRegistry|The value of this input should be less than 256 characters"
|
||||
msgstr ""
|
||||
|
||||
msgid "ContainerRegistry|There are no container images available in this group"
|
||||
|
|
@ -20549,6 +20567,9 @@ msgstr ""
|
|||
msgid "SecurityReports|Add projects"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Add projects to your group"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Comment added to '%{vulnerabilityName}'"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20621,7 +20642,7 @@ msgstr ""
|
|||
msgid "SecurityReports|More information"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|No vulnerabilities found for dashboard"
|
||||
msgid "SecurityReports|No vulnerabilities found"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|No vulnerabilities found for this group"
|
||||
|
|
@ -20672,12 +20693,18 @@ msgstr ""
|
|||
msgid "SecurityReports|Severity"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Sorry, your filter produced no results"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Status"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|The rating \"unknown\" indicates that the underlying scanner doesn’t contain or provide a severity rating."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|The security dashboard displays the latest security findings for projects you wish to monitor. Add projects to your group to view their vulnerabilities here."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|The security dashboard displays the latest security findings for projects you wish to monitor. Select \"Edit dashboard\" to add and remove projects."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20711,6 +20738,9 @@ msgstr ""
|
|||
msgid "SecurityReports|There was an error while generating the report."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|To widen your search, change or remove filters above"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Unable to add %{invalidProjectsMessage}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20729,7 +20759,7 @@ msgstr ""
|
|||
msgid "SecurityReports|While it's rare to have no vulnerabilities for your project, it can happen. In any event, we ask that you double check your settings to make sure you've set up your dashboard correctly."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|While it's rare to have no vulnerabilities, it can happen. In any event, we ask that you please double check your settings to make sure you've set up your dashboard correctly."
|
||||
msgid "SecurityReports|While it's rare to have no vulnerabilities, it can happen. In any event, we ask that you double check your settings to make sure you've set up your dashboard correctly."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|Won't fix / Accept risk"
|
||||
|
|
|
|||
|
|
@ -89,6 +89,12 @@ FactoryBot.define do
|
|||
end
|
||||
end
|
||||
|
||||
factory :confluence_service do
|
||||
project
|
||||
active { true }
|
||||
confluence_url { 'https://example.atlassian.net/wiki' }
|
||||
end
|
||||
|
||||
factory :bugzilla_service do
|
||||
project
|
||||
active { true }
|
||||
|
|
|
|||
|
|
@ -44,6 +44,15 @@ describe('RecentSearchesStore', () => {
|
|||
expect(store.state.recentSearches).toEqual(['baz', 'qux']);
|
||||
});
|
||||
|
||||
it('handles non-string values', () => {
|
||||
store.setRecentSearches(['foo ', { foo: 'bar' }, { foo: 'bar' }, ['foobar']]);
|
||||
|
||||
// 1. String values will be trimmed of leading/trailing spaces
|
||||
// 2. Comparison will account for objects to remove duplicates
|
||||
// 3. Old behaviour of handling string values stays as it is.
|
||||
expect(store.state.recentSearches).toEqual(['foo', { foo: 'bar' }, ['foobar']]);
|
||||
});
|
||||
|
||||
it('only keeps track of 5 items', () => {
|
||||
store.setRecentSearches(['1', '2', '3', '4', '5', '6', '7']);
|
||||
|
||||
|
|
|
|||
|
|
@ -51,35 +51,27 @@ describe('IDE store file actions', () => {
|
|||
store.state.entries[localFile.path] = localFile;
|
||||
});
|
||||
|
||||
it('closes open files', done => {
|
||||
store
|
||||
.dispatch('closeFile', localFile)
|
||||
.then(() => {
|
||||
expect(localFile.opened).toBeFalsy();
|
||||
expect(localFile.active).toBeFalsy();
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('closes open files', () => {
|
||||
return store.dispatch('closeFile', localFile).then(() => {
|
||||
expect(localFile.opened).toBeFalsy();
|
||||
expect(localFile.active).toBeFalsy();
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('closes file even if file has changes', done => {
|
||||
it('closes file even if file has changes', () => {
|
||||
store.state.changedFiles.push(localFile);
|
||||
|
||||
store
|
||||
return store
|
||||
.dispatch('closeFile', localFile)
|
||||
.then(Vue.nextTick)
|
||||
.then(() => {
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
expect(store.state.changedFiles.length).toBe(1);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
it('closes file & opens next available file', done => {
|
||||
it('closes file & opens next available file', () => {
|
||||
const f = {
|
||||
...file('newOpenFile'),
|
||||
url: '/newOpenFile',
|
||||
|
|
@ -88,31 +80,23 @@ describe('IDE store file actions', () => {
|
|||
store.state.openFiles.push(f);
|
||||
store.state.entries[f.path] = f;
|
||||
|
||||
store
|
||||
return store
|
||||
.dispatch('closeFile', localFile)
|
||||
.then(Vue.nextTick)
|
||||
.then(() => {
|
||||
expect(router.push).toHaveBeenCalledWith(`/project${f.url}`);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
it('removes file if it pending', done => {
|
||||
it('removes file if it pending', () => {
|
||||
store.state.openFiles.push({
|
||||
...localFile,
|
||||
pending: true,
|
||||
});
|
||||
|
||||
store
|
||||
.dispatch('closeFile', localFile)
|
||||
.then(() => {
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
return store.dispatch('closeFile', localFile).then(() => {
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -264,61 +248,48 @@ describe('IDE store file actions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('calls the service', done => {
|
||||
store
|
||||
.dispatch('getFileData', { path: localFile.path })
|
||||
.then(() => {
|
||||
expect(service.getFileData).toHaveBeenCalledWith(
|
||||
`${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`,
|
||||
);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('calls the service', () => {
|
||||
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
|
||||
expect(service.getFileData).toHaveBeenCalledWith(
|
||||
`${RELATIVE_URL_ROOT}/test/test/-/7297abc/${localFile.path}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets document title with the branchId', done => {
|
||||
store
|
||||
.dispatch('getFileData', { path: localFile.path })
|
||||
.then(() => {
|
||||
expect(document.title).toBe(`${localFile.path} · master · test/test · GitLab`);
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('sets document title with the branchId', () => {
|
||||
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
|
||||
expect(document.title).toBe(`${localFile.path} · master · test/test · GitLab`);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the file as active', done => {
|
||||
store
|
||||
.dispatch('getFileData', { path: localFile.path })
|
||||
.then(() => {
|
||||
expect(localFile.active).toBeTruthy();
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('sets the file as active', () => {
|
||||
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
|
||||
expect(localFile.active).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the file not as active if we pass makeFileActive false', done => {
|
||||
store
|
||||
it('sets the file not as active if we pass makeFileActive false', () => {
|
||||
return store
|
||||
.dispatch('getFileData', { path: localFile.path, makeFileActive: false })
|
||||
.then(() => {
|
||||
expect(localFile.active).toBeFalsy();
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
it('adds the file to open files', done => {
|
||||
store
|
||||
.dispatch('getFileData', { path: localFile.path })
|
||||
it('does not update the page title with the path of the file if makeFileActive is false', () => {
|
||||
document.title = 'dummy title';
|
||||
return store
|
||||
.dispatch('getFileData', { path: localFile.path, makeFileActive: false })
|
||||
.then(() => {
|
||||
expect(store.state.openFiles.length).toBe(1);
|
||||
expect(store.state.openFiles[0].name).toBe(localFile.name);
|
||||
expect(document.title).toBe(`dummy title`);
|
||||
});
|
||||
});
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('adds the file to open files', () => {
|
||||
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
|
||||
expect(store.state.openFiles.length).toBe(1);
|
||||
expect(store.state.openFiles[0].name).toBe(localFile.name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -342,15 +313,10 @@ describe('IDE store file actions', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('sets document title considering `prevPath` on a file', done => {
|
||||
store
|
||||
.dispatch('getFileData', { path: localFile.path })
|
||||
.then(() => {
|
||||
expect(document.title).toBe(`new-shiny-file · master · test/test · GitLab`);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('sets document title considering `prevPath` on a file', () => {
|
||||
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
|
||||
expect(document.title).toBe(`new-shiny-file · master · test/test · GitLab`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -397,29 +363,19 @@ describe('IDE store file actions', () => {
|
|||
mock.onGet(/(.*)/).replyOnce(200, 'raw');
|
||||
});
|
||||
|
||||
it('calls getRawFileData service method', done => {
|
||||
store
|
||||
.dispatch('getRawFileData', { path: tmpFile.path })
|
||||
.then(() => {
|
||||
expect(service.getRawFileData).toHaveBeenCalledWith(tmpFile);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('calls getRawFileData service method', () => {
|
||||
return store.dispatch('getRawFileData', { path: tmpFile.path }).then(() => {
|
||||
expect(service.getRawFileData).toHaveBeenCalledWith(tmpFile);
|
||||
});
|
||||
});
|
||||
|
||||
it('updates file raw data', done => {
|
||||
store
|
||||
.dispatch('getRawFileData', { path: tmpFile.path })
|
||||
.then(() => {
|
||||
expect(tmpFile.raw).toBe('raw');
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('updates file raw data', () => {
|
||||
return store.dispatch('getRawFileData', { path: tmpFile.path }).then(() => {
|
||||
expect(tmpFile.raw).toBe('raw');
|
||||
});
|
||||
});
|
||||
|
||||
it('calls also getBaseRawFileData service method', done => {
|
||||
it('calls also getBaseRawFileData service method', () => {
|
||||
jest.spyOn(service, 'getBaseRawFileData').mockReturnValue(Promise.resolve('baseraw'));
|
||||
|
||||
store.state.currentProjectId = 'gitlab-org/gitlab-ce';
|
||||
|
|
@ -436,15 +392,10 @@ describe('IDE store file actions', () => {
|
|||
|
||||
tmpFile.mrChange = { new_file: false };
|
||||
|
||||
store
|
||||
.dispatch('getRawFileData', { path: tmpFile.path })
|
||||
.then(() => {
|
||||
expect(service.getBaseRawFileData).toHaveBeenCalledWith(tmpFile, 'SHA');
|
||||
expect(tmpFile.baseRaw).toBe('baseraw');
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
return store.dispatch('getRawFileData', { path: tmpFile.path }).then(() => {
|
||||
expect(service.getBaseRawFileData).toHaveBeenCalledWith(tmpFile, 'SHA');
|
||||
expect(tmpFile.baseRaw).toBe('baseraw');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sets file loading to true', () => {
|
||||
|
|
@ -501,15 +452,10 @@ describe('IDE store file actions', () => {
|
|||
mock.onGet(/(.*)/).replyOnce(200, JSON.stringify({ test: '123' }));
|
||||
});
|
||||
|
||||
it('does not parse returned JSON', done => {
|
||||
store
|
||||
.dispatch('getRawFileData', { path: tmpFile.path })
|
||||
.then(() => {
|
||||
expect(tmpFile.raw).toEqual('{"test":"123"}');
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('does not parse returned JSON', () => {
|
||||
return store.dispatch('getRawFileData', { path: tmpFile.path }).then(() => {
|
||||
expect(tmpFile.raw).toEqual('{"test":"123"}');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -558,32 +504,25 @@ describe('IDE store file actions', () => {
|
|||
store.state.entries[tmpFile.path] = tmpFile;
|
||||
});
|
||||
|
||||
it('updates file content', done => {
|
||||
callAction()
|
||||
.then(() => {
|
||||
expect(tmpFile.content).toBe('content\n');
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
it('updates file content', () => {
|
||||
return callAction().then(() => {
|
||||
expect(tmpFile.content).toBe('content\n');
|
||||
});
|
||||
});
|
||||
|
||||
it('adds file into stagedFiles array', done => {
|
||||
store
|
||||
it('adds file into stagedFiles array', () => {
|
||||
return store
|
||||
.dispatch('changeFileContent', {
|
||||
path: tmpFile.path,
|
||||
content: 'content',
|
||||
})
|
||||
.then(() => {
|
||||
expect(store.state.stagedFiles.length).toBe(1);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
it('adds file not more than once into stagedFiles array', done => {
|
||||
store
|
||||
it('adds file not more than once into stagedFiles array', () => {
|
||||
return store
|
||||
.dispatch('changeFileContent', {
|
||||
path: tmpFile.path,
|
||||
content: 'content',
|
||||
|
|
@ -596,14 +535,11 @@ describe('IDE store file actions', () => {
|
|||
)
|
||||
.then(() => {
|
||||
expect(store.state.stagedFiles.length).toBe(1);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
|
||||
it('removes file from changedFiles array if not changed', done => {
|
||||
store
|
||||
it('removes file from changedFiles array if not changed', () => {
|
||||
return store
|
||||
.dispatch('changeFileContent', {
|
||||
path: tmpFile.path,
|
||||
content: 'content\n',
|
||||
|
|
@ -616,10 +552,7 @@ describe('IDE store file actions', () => {
|
|||
)
|
||||
.then(() => {
|
||||
expect(store.state.changedFiles.length).toBe(0);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch(done.fail);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -777,52 +710,36 @@ describe('IDE store file actions', () => {
|
|||
store.state.entries[f.path] = f;
|
||||
});
|
||||
|
||||
it('makes file pending in openFiles', done => {
|
||||
store
|
||||
.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' })
|
||||
.then(() => {
|
||||
expect(store.state.openFiles[0].pending).toBe(true);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
it('makes file pending in openFiles', () => {
|
||||
return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(() => {
|
||||
expect(store.state.openFiles[0].pending).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns true when opened', done => {
|
||||
store
|
||||
.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' })
|
||||
.then(added => {
|
||||
expect(added).toBe(true);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
it('returns true when opened', () => {
|
||||
return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(added => {
|
||||
expect(added).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns false when already opened', done => {
|
||||
it('returns false when already opened', () => {
|
||||
store.state.openFiles.push({
|
||||
...f,
|
||||
active: true,
|
||||
key: `pending-${f.key}`,
|
||||
});
|
||||
|
||||
store
|
||||
.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' })
|
||||
.then(added => {
|
||||
expect(added).toBe(false);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(added => {
|
||||
expect(added).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('pushes router URL when added', done => {
|
||||
it('pushes router URL when added', () => {
|
||||
store.state.currentBranchId = 'master';
|
||||
|
||||
store
|
||||
.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' })
|
||||
.then(() => {
|
||||
expect(router.push).toHaveBeenCalledWith('/project/123/tree/master/');
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
return store.dispatch('openPendingTab', { file: f, keyPrefix: 'pending' }).then(() => {
|
||||
expect(router.push).toHaveBeenCalledWith('/project/123/tree/master/');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -838,26 +755,18 @@ describe('IDE store file actions', () => {
|
|||
};
|
||||
});
|
||||
|
||||
it('removes pending file from open files', done => {
|
||||
it('removes pending file from open files', () => {
|
||||
store.state.openFiles.push(f);
|
||||
|
||||
store
|
||||
.dispatch('removePendingTab', f)
|
||||
.then(() => {
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
return store.dispatch('removePendingTab', f).then(() => {
|
||||
expect(store.state.openFiles.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('emits event to dispose model', done => {
|
||||
store
|
||||
.dispatch('removePendingTab', f)
|
||||
.then(() => {
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith(`editor.update.model.dispose.${f.key}`);
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
it('emits event to dispose model', () => {
|
||||
return store.dispatch('removePendingTab', f).then(() => {
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith(`editor.update.model.dispose.${f.key}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -866,14 +775,10 @@ describe('IDE store file actions', () => {
|
|||
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
it('emits event that files have changed', done => {
|
||||
store
|
||||
.dispatch('triggerFilesChange')
|
||||
.then(() => {
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith('ide.files.change');
|
||||
})
|
||||
.then(done)
|
||||
.catch(done.fail);
|
||||
it('emits event that files have changed', () => {
|
||||
return store.dispatch('triggerFilesChange').then(() => {
|
||||
expect(eventHub.$emit).toHaveBeenCalledWith('ide.files.change');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,28 +3,14 @@ import { TEST_HOST } from 'helpers/test_constants';
|
|||
import Anomaly from '~/monitoring/components/charts/anomaly.vue';
|
||||
|
||||
import { colorValues } from '~/monitoring/constants';
|
||||
import {
|
||||
anomalyDeploymentData,
|
||||
mockProjectDir,
|
||||
anomalyMockGraphData,
|
||||
anomalyMockResultValues,
|
||||
} from '../../mock_data';
|
||||
import { anomalyDeploymentData, mockProjectDir } from '../../mock_data';
|
||||
import { anomalyGraphData } from '../../graph_data';
|
||||
import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
|
||||
|
||||
const mockProjectPath = `${TEST_HOST}${mockProjectDir}`;
|
||||
|
||||
const makeAnomalyGraphData = (datasetName, template = anomalyMockGraphData) => {
|
||||
const metrics = anomalyMockResultValues[datasetName].map((values, index) => ({
|
||||
...template.metrics[index],
|
||||
result: [
|
||||
{
|
||||
metrics: {},
|
||||
values,
|
||||
},
|
||||
],
|
||||
}));
|
||||
return { ...template, metrics };
|
||||
};
|
||||
const TEST_UPPER = 11;
|
||||
const TEST_LOWER = 9;
|
||||
|
||||
describe('Anomaly chart component', () => {
|
||||
let wrapper;
|
||||
|
|
@ -38,13 +24,22 @@ describe('Anomaly chart component', () => {
|
|||
const getTimeSeriesProps = () => findTimeSeries().props();
|
||||
|
||||
describe('wrapped monitor-time-series-chart component', () => {
|
||||
const dataSetName = 'noAnomaly';
|
||||
const dataSet = anomalyMockResultValues[dataSetName];
|
||||
const mockValues = ['10', '10', '10'];
|
||||
|
||||
const mockGraphData = anomalyGraphData(
|
||||
{},
|
||||
{
|
||||
upper: mockValues.map(() => String(TEST_UPPER)),
|
||||
values: mockValues,
|
||||
lower: mockValues.map(() => String(TEST_LOWER)),
|
||||
},
|
||||
);
|
||||
|
||||
const inputThresholds = ['some threshold'];
|
||||
|
||||
beforeEach(() => {
|
||||
setupAnomalyChart({
|
||||
graphData: makeAnomalyGraphData(dataSetName),
|
||||
graphData: mockGraphData,
|
||||
deploymentData: anomalyDeploymentData,
|
||||
thresholds: inputThresholds,
|
||||
projectPath: mockProjectPath,
|
||||
|
|
@ -65,21 +60,21 @@ describe('Anomaly chart component', () => {
|
|||
|
||||
it('receives "metric" with all data', () => {
|
||||
const { graphData } = getTimeSeriesProps();
|
||||
const query = graphData.metrics[0];
|
||||
const expectedQuery = makeAnomalyGraphData(dataSetName).metrics[0];
|
||||
expect(query).toEqual(expectedQuery);
|
||||
const metric = graphData.metrics[0];
|
||||
const expectedMetric = mockGraphData.metrics[0];
|
||||
expect(metric).toEqual(expectedMetric);
|
||||
});
|
||||
|
||||
it('receives the "metric" results', () => {
|
||||
const { graphData } = getTimeSeriesProps();
|
||||
const { result } = graphData.metrics[0];
|
||||
const { values } = result[0];
|
||||
const [metricDataset] = dataSet;
|
||||
expect(values).toEqual(expect.any(Array));
|
||||
|
||||
values.forEach(([, y], index) => {
|
||||
expect(y).toBeCloseTo(metricDataset[index][1]);
|
||||
});
|
||||
expect(values).toEqual([
|
||||
[expect.any(String), 10],
|
||||
[expect.any(String), 10],
|
||||
[expect.any(String), 10],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -108,14 +103,13 @@ describe('Anomaly chart component', () => {
|
|||
|
||||
it('upper boundary values are stacked on top of lower boundary', () => {
|
||||
const [lowerSeries, upperSeries] = series;
|
||||
const [, upperDataset, lowerDataset] = dataSet;
|
||||
|
||||
lowerSeries.data.forEach(([, y], i) => {
|
||||
expect(y).toBeCloseTo(lowerDataset[i][1]);
|
||||
lowerSeries.data.forEach(([, y]) => {
|
||||
expect(y).toBeCloseTo(TEST_LOWER);
|
||||
});
|
||||
|
||||
upperSeries.data.forEach(([, y], i) => {
|
||||
expect(y).toBeCloseTo(upperDataset[i][1] - lowerDataset[i][1]);
|
||||
upperSeries.data.forEach(([, y]) => {
|
||||
expect(y).toBeCloseTo(TEST_UPPER - TEST_LOWER);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -140,11 +134,10 @@ describe('Anomaly chart component', () => {
|
|||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('does not display anomalies', () => {
|
||||
const { symbolSize, itemStyle } = seriesConfig;
|
||||
const [metricDataset] = dataSet;
|
||||
|
||||
metricDataset.forEach((v, dataIndex) => {
|
||||
mockValues.forEach((v, dataIndex) => {
|
||||
const size = symbolSize(null, { dataIndex });
|
||||
const color = itemStyle.color({ dataIndex });
|
||||
|
||||
|
|
@ -155,9 +148,10 @@ describe('Anomaly chart component', () => {
|
|||
});
|
||||
|
||||
it('can format y values (to use in tooltips)', () => {
|
||||
expect(parseFloat(wrapper.vm.yValueFormatted(0, 0))).toEqual(dataSet[0][0][1]);
|
||||
expect(parseFloat(wrapper.vm.yValueFormatted(1, 0))).toEqual(dataSet[1][0][1]);
|
||||
expect(parseFloat(wrapper.vm.yValueFormatted(2, 0))).toEqual(dataSet[2][0][1]);
|
||||
mockValues.forEach((v, dataIndex) => {
|
||||
const formatted = wrapper.vm.yValueFormatted(0, dataIndex);
|
||||
expect(parseFloat(formatted)).toEqual(parseFloat(v));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -179,12 +173,18 @@ describe('Anomaly chart component', () => {
|
|||
});
|
||||
|
||||
describe('with no boundary data', () => {
|
||||
const dataSetName = 'noBoundary';
|
||||
const dataSet = anomalyMockResultValues[dataSetName];
|
||||
const noBoundaryData = anomalyGraphData(
|
||||
{},
|
||||
{
|
||||
upper: [],
|
||||
values: ['10', '10', '10'],
|
||||
lower: [],
|
||||
},
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
setupAnomalyChart({
|
||||
graphData: makeAnomalyGraphData(dataSetName),
|
||||
graphData: noBoundaryData,
|
||||
deploymentData: anomalyDeploymentData,
|
||||
});
|
||||
});
|
||||
|
|
@ -204,7 +204,7 @@ describe('Anomaly chart component', () => {
|
|||
});
|
||||
|
||||
it('can format y values (to use in tooltips)', () => {
|
||||
expect(parseFloat(wrapper.vm.yValueFormatted(0, 0))).toEqual(dataSet[0][0][1]);
|
||||
expect(parseFloat(wrapper.vm.yValueFormatted(0, 0))).toEqual(10);
|
||||
expect(wrapper.vm.yValueFormatted(1, 0)).toBe(''); // missing boundary
|
||||
expect(wrapper.vm.yValueFormatted(2, 0)).toBe(''); // missing boundary
|
||||
});
|
||||
|
|
@ -212,12 +212,20 @@ describe('Anomaly chart component', () => {
|
|||
});
|
||||
|
||||
describe('with one anomaly', () => {
|
||||
const dataSetName = 'oneAnomaly';
|
||||
const dataSet = anomalyMockResultValues[dataSetName];
|
||||
const mockValues = ['10', '20', '10'];
|
||||
|
||||
const oneAnomalyData = anomalyGraphData(
|
||||
{},
|
||||
{
|
||||
upper: mockValues.map(() => TEST_UPPER),
|
||||
values: mockValues,
|
||||
lower: mockValues.map(() => TEST_LOWER),
|
||||
},
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
setupAnomalyChart({
|
||||
graphData: makeAnomalyGraphData(dataSetName),
|
||||
graphData: oneAnomalyData,
|
||||
deploymentData: anomalyDeploymentData,
|
||||
});
|
||||
});
|
||||
|
|
@ -226,13 +234,12 @@ describe('Anomaly chart component', () => {
|
|||
it('displays one anomaly', () => {
|
||||
const { seriesConfig } = getTimeSeriesProps();
|
||||
const { symbolSize, itemStyle } = seriesConfig;
|
||||
const [metricDataset] = dataSet;
|
||||
|
||||
const bigDots = metricDataset.filter((v, dataIndex) => {
|
||||
const bigDots = mockValues.filter((v, dataIndex) => {
|
||||
const size = symbolSize(null, { dataIndex });
|
||||
return size > 0.1;
|
||||
});
|
||||
const redDots = metricDataset.filter((v, dataIndex) => {
|
||||
const redDots = mockValues.filter((v, dataIndex) => {
|
||||
const color = itemStyle.color({ dataIndex });
|
||||
return color === colorValues.anomalySymbol;
|
||||
});
|
||||
|
|
@ -244,13 +251,21 @@ describe('Anomaly chart component', () => {
|
|||
});
|
||||
|
||||
describe('with offset', () => {
|
||||
const dataSetName = 'negativeBoundary';
|
||||
const dataSet = anomalyMockResultValues[dataSetName];
|
||||
const expectedOffset = 4; // Lowst point in mock data is -3.70, it gets rounded
|
||||
const mockValues = ['10', '11', '12'];
|
||||
const mockUpper = ['20', '20', '20'];
|
||||
const mockLower = ['-1', '-2', '-3.70'];
|
||||
const expectedOffset = 4; // Lowest point in mock data is -3.70, it gets rounded
|
||||
|
||||
beforeEach(() => {
|
||||
setupAnomalyChart({
|
||||
graphData: makeAnomalyGraphData(dataSetName),
|
||||
graphData: anomalyGraphData(
|
||||
{},
|
||||
{
|
||||
upper: mockUpper,
|
||||
values: mockValues,
|
||||
lower: mockLower,
|
||||
},
|
||||
),
|
||||
deploymentData: anomalyDeploymentData,
|
||||
});
|
||||
});
|
||||
|
|
@ -266,11 +281,11 @@ describe('Anomaly chart component', () => {
|
|||
const { graphData } = getTimeSeriesProps();
|
||||
const { result } = graphData.metrics[0];
|
||||
const { values } = result[0];
|
||||
const [metricDataset] = dataSet;
|
||||
|
||||
expect(values).toEqual(expect.any(Array));
|
||||
|
||||
values.forEach(([, y], index) => {
|
||||
expect(y).toBeCloseTo(metricDataset[index][1] + expectedOffset);
|
||||
expect(y).toBeCloseTo(parseFloat(mockValues[index]) + expectedOffset);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -281,14 +296,12 @@ describe('Anomaly chart component', () => {
|
|||
const { option } = getTimeSeriesProps();
|
||||
const { series } = option;
|
||||
const [lowerSeries, upperSeries] = series;
|
||||
const [, upperDataset, lowerDataset] = dataSet;
|
||||
|
||||
lowerSeries.data.forEach(([, y], i) => {
|
||||
expect(y).toBeCloseTo(lowerDataset[i][1] + expectedOffset);
|
||||
expect(y).toBeCloseTo(parseFloat(mockLower[i]) + expectedOffset);
|
||||
});
|
||||
|
||||
upperSeries.data.forEach(([, y], i) => {
|
||||
expect(y).toBeCloseTo(upperDataset[i][1] - lowerDataset[i][1]);
|
||||
expect(y).toBeCloseTo(parseFloat(mockUpper[i] - mockLower[i]));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import AlertWidget from '~/monitoring/components/alert_widget.vue';
|
|||
|
||||
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
|
||||
import {
|
||||
anomalyMockGraphData,
|
||||
mockLogsHref,
|
||||
mockLogsPath,
|
||||
mockNamespace,
|
||||
|
|
@ -19,7 +18,7 @@ import {
|
|||
barMockData,
|
||||
} from '../mock_data';
|
||||
import { dashboardProps, graphData, graphDataEmpty } from '../fixture_data';
|
||||
import { singleStatGraphData } from '../graph_data';
|
||||
import { anomalyGraphData, singleStatGraphData } from '../graph_data';
|
||||
|
||||
import { panelTypes } from '~/monitoring/constants';
|
||||
|
||||
|
|
@ -233,7 +232,7 @@ describe('Dashboard Panel', () => {
|
|||
${dataWithType(panelTypes.AREA_CHART)} | ${MonitorTimeSeriesChart} | ${true}
|
||||
${dataWithType(panelTypes.LINE_CHART)} | ${MonitorTimeSeriesChart} | ${true}
|
||||
${singleStatGraphData()} | ${MonitorSingleStatChart} | ${true}
|
||||
${anomalyMockGraphData} | ${MonitorAnomalyChart} | ${false}
|
||||
${anomalyGraphData()} | ${MonitorAnomalyChart} | ${false}
|
||||
${dataWithType(panelTypes.COLUMN)} | ${MonitorColumnChart} | ${false}
|
||||
${dataWithType(panelTypes.STACKED_COLUMN)} | ${MonitorStackedColumnChart} | ${false}
|
||||
${graphDataPrometheusQueryRangeMultiTrack} | ${MonitorHeatmapChart} | ${false}
|
||||
|
|
|
|||
|
|
@ -124,3 +124,41 @@ export const singleStatGraphData = (panelOptions = {}, dataOptions = {}) => {
|
|||
...panelOptions,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate mock graph data according to options
|
||||
*
|
||||
* @param {Object} panelOptions - Panel options as in YML.
|
||||
* @param {Object} dataOptions
|
||||
* @param {Array} dataOptions.values - Metric values
|
||||
* @param {Array} dataOptions.upper - Upper boundary values
|
||||
* @param {Array} dataOptions.lower - Lower boundary values
|
||||
*/
|
||||
export const anomalyGraphData = (panelOptions = {}, dataOptions = {}) => {
|
||||
const { values, upper, lower } = dataOptions;
|
||||
|
||||
return mapPanelToViewModel({
|
||||
title: 'Anomaly Panel',
|
||||
type: panelTypes.ANOMALY_CHART,
|
||||
x_label: 'X Axis',
|
||||
y_label: 'Y Axis',
|
||||
metrics: [
|
||||
{
|
||||
label: `Metric`,
|
||||
state: metricStates.OK,
|
||||
result: matrixSingleResult({ values }),
|
||||
},
|
||||
{
|
||||
label: `Upper boundary`,
|
||||
state: metricStates.OK,
|
||||
result: matrixSingleResult({ values: upper }),
|
||||
},
|
||||
{
|
||||
label: `Lower boundary`,
|
||||
state: metricStates.OK,
|
||||
result: matrixSingleResult({ values: lower }),
|
||||
},
|
||||
],
|
||||
...panelOptions,
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -51,136 +51,6 @@ export const anomalyDeploymentData = [
|
|||
},
|
||||
];
|
||||
|
||||
export const anomalyMockResultValues = {
|
||||
noAnomaly: [
|
||||
[
|
||||
['2019-08-19T19:00:00.000Z', 1.25],
|
||||
['2019-08-19T20:00:00.000Z', 1.45],
|
||||
['2019-08-19T21:00:00.000Z', 1.55],
|
||||
['2019-08-19T22:00:00.000Z', 1.48],
|
||||
],
|
||||
[
|
||||
// upper boundary
|
||||
['2019-08-19T19:00:00.000Z', 2],
|
||||
['2019-08-19T20:00:00.000Z', 2.55],
|
||||
['2019-08-19T21:00:00.000Z', 2.65],
|
||||
['2019-08-19T22:00:00.000Z', 3.0],
|
||||
],
|
||||
[
|
||||
// lower boundary
|
||||
['2019-08-19T19:00:00.000Z', 0.45],
|
||||
['2019-08-19T20:00:00.000Z', 0.65],
|
||||
['2019-08-19T21:00:00.000Z', 0.7],
|
||||
['2019-08-19T22:00:00.000Z', 0.8],
|
||||
],
|
||||
],
|
||||
noBoundary: [
|
||||
[
|
||||
['2019-08-19T19:00:00.000Z', 1.25],
|
||||
['2019-08-19T20:00:00.000Z', 1.45],
|
||||
['2019-08-19T21:00:00.000Z', 1.55],
|
||||
['2019-08-19T22:00:00.000Z', 1.48],
|
||||
],
|
||||
[
|
||||
// empty upper boundary
|
||||
],
|
||||
[
|
||||
// empty lower boundary
|
||||
],
|
||||
],
|
||||
oneAnomaly: [
|
||||
[
|
||||
['2019-08-19T19:00:00.000Z', 1.25],
|
||||
['2019-08-19T20:00:00.000Z', 3.45], // anomaly
|
||||
['2019-08-19T21:00:00.000Z', 1.55],
|
||||
],
|
||||
[
|
||||
// upper boundary
|
||||
['2019-08-19T19:00:00.000Z', 2],
|
||||
['2019-08-19T20:00:00.000Z', 2.55],
|
||||
['2019-08-19T21:00:00.000Z', 2.65],
|
||||
],
|
||||
[
|
||||
// lower boundary
|
||||
['2019-08-19T19:00:00.000Z', 0.45],
|
||||
['2019-08-19T20:00:00.000Z', 0.65],
|
||||
['2019-08-19T21:00:00.000Z', 0.7],
|
||||
],
|
||||
],
|
||||
negativeBoundary: [
|
||||
[
|
||||
['2019-08-19T19:00:00.000Z', 1.25],
|
||||
['2019-08-19T20:00:00.000Z', 3.45], // anomaly
|
||||
['2019-08-19T21:00:00.000Z', 1.55],
|
||||
],
|
||||
[
|
||||
// upper boundary
|
||||
['2019-08-19T19:00:00.000Z', 2],
|
||||
['2019-08-19T20:00:00.000Z', 2.55],
|
||||
['2019-08-19T21:00:00.000Z', 2.65],
|
||||
],
|
||||
[
|
||||
// lower boundary
|
||||
['2019-08-19T19:00:00.000Z', -1.25],
|
||||
['2019-08-19T20:00:00.000Z', -2.65],
|
||||
['2019-08-19T21:00:00.000Z', -3.7], // lowest point
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export const anomalyMockGraphData = {
|
||||
title: 'Requests Per Second Mock Data',
|
||||
type: 'anomaly-chart',
|
||||
weight: 3,
|
||||
metrics: [
|
||||
{
|
||||
metricId: '90',
|
||||
id: 'metric',
|
||||
query_range: 'MOCK_PROMETHEUS_METRIC_QUERY_RANGE',
|
||||
unit: 'RPS',
|
||||
label: 'Metrics RPS',
|
||||
metric_id: 90,
|
||||
prometheus_endpoint_path: 'MOCK_METRIC_PEP',
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
values: [['2019-08-19T19:00:00.000Z', 0]],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
metricId: '91',
|
||||
id: 'upper',
|
||||
query_range: '...',
|
||||
unit: 'RPS',
|
||||
label: 'Upper Limit Metrics RPS',
|
||||
metric_id: 91,
|
||||
prometheus_endpoint_path: 'MOCK_UPPER_PEP',
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
values: [['2019-08-19T19:00:00.000Z', 0]],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
metricId: '92',
|
||||
id: 'lower',
|
||||
query_range: '...',
|
||||
unit: 'RPS',
|
||||
label: 'Lower Limit Metrics RPS',
|
||||
metric_id: 92,
|
||||
prometheus_endpoint_path: 'MOCK_LOWER_PEP',
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
values: [['2019-08-19T19:00:00.000Z', 0]],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const deploymentData = [
|
||||
{
|
||||
id: 111,
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import * as monitoringUtils from '~/monitoring/utils';
|
||||
import * as urlUtils from '~/lib/utils/url_utility';
|
||||
import { TEST_HOST } from 'jest/helpers/test_constants';
|
||||
import { mockProjectDir, anomalyMockGraphData, barMockData } from './mock_data';
|
||||
import { mockProjectDir, barMockData } from './mock_data';
|
||||
import { singleStatGraphData, anomalyGraphData } from './graph_data';
|
||||
import { metricsDashboardViewModel, graphData } from './fixture_data';
|
||||
import { singleStatGraphData } from './graph_data';
|
||||
|
||||
const mockPath = `${TEST_HOST}${mockProjectDir}/-/environments/29/metrics`;
|
||||
|
||||
|
|
@ -102,12 +102,12 @@ describe('monitoring/utils', () => {
|
|||
let fourMetrics;
|
||||
beforeEach(() => {
|
||||
oneMetric = singleStatGraphData();
|
||||
threeMetrics = anomalyMockGraphData;
|
||||
threeMetrics = anomalyGraphData();
|
||||
|
||||
const metrics = [...threeMetrics.metrics];
|
||||
metrics.push(threeMetrics.metrics[0]);
|
||||
fourMetrics = {
|
||||
...anomalyMockGraphData,
|
||||
...anomalyGraphData(),
|
||||
metrics,
|
||||
};
|
||||
});
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import {
|
|||
UPDATE_SETTINGS_ERROR_MESSAGE,
|
||||
UPDATE_SETTINGS_SUCCESS_MESSAGE,
|
||||
} from '~/registry/shared/constants';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { stringifiedFormOptions } from '../../shared/mock_data';
|
||||
|
||||
describe('Settings Form', () => {
|
||||
|
|
@ -36,12 +37,17 @@ describe('Settings Form', () => {
|
|||
const findSaveButton = () => wrapper.find({ ref: 'save-button' });
|
||||
const findLoadingIcon = (parent = wrapper) => parent.find(GlLoadingIcon);
|
||||
|
||||
const mountComponent = () => {
|
||||
const mountComponent = (data = {}) => {
|
||||
wrapper = shallowMount(component, {
|
||||
stubs: {
|
||||
GlCard,
|
||||
GlLoadingIcon,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
...data,
|
||||
};
|
||||
},
|
||||
mocks: {
|
||||
$toast: {
|
||||
show: jest.fn(),
|
||||
|
|
@ -55,7 +61,6 @@ describe('Settings Form', () => {
|
|||
store = createStore();
|
||||
store.dispatch('setInitialState', stringifiedFormOptions);
|
||||
dispatchSpy = jest.spyOn(store, 'dispatch');
|
||||
mountComponent();
|
||||
jest.spyOn(Tracking, 'event');
|
||||
});
|
||||
|
||||
|
|
@ -63,20 +68,30 @@ describe('Settings Form', () => {
|
|||
wrapper.destroy();
|
||||
});
|
||||
|
||||
describe('data binding', () => {
|
||||
it('v-model change update the settings property', () => {
|
||||
mountComponent();
|
||||
findFields().vm.$emit('input', { newValue: 'foo' });
|
||||
expect(dispatchSpy).toHaveBeenCalledWith('updateSettings', { settings: 'foo' });
|
||||
});
|
||||
|
||||
it('v-model change update the api error property', () => {
|
||||
const apiErrors = { baz: 'bar' };
|
||||
mountComponent({ apiErrors });
|
||||
expect(findFields().props('apiErrors')).toEqual(apiErrors);
|
||||
findFields().vm.$emit('input', { newValue: 'foo', modified: 'baz' });
|
||||
expect(findFields().props('apiErrors')).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('form', () => {
|
||||
let form;
|
||||
beforeEach(() => {
|
||||
mountComponent();
|
||||
form = findForm();
|
||||
dispatchSpy.mockReturnValue();
|
||||
});
|
||||
|
||||
describe('data binding', () => {
|
||||
it('v-model change update the settings property', () => {
|
||||
findFields().vm.$emit('input', 'foo');
|
||||
expect(dispatchSpy).toHaveBeenCalledWith('updateSettings', { settings: 'foo' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('form reset event', () => {
|
||||
beforeEach(() => {
|
||||
form.trigger('reset');
|
||||
|
|
@ -108,24 +123,40 @@ describe('Settings Form', () => {
|
|||
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'submit_form', trackingPayload);
|
||||
});
|
||||
|
||||
it('show a success toast when submit succeed', () => {
|
||||
it('show a success toast when submit succeed', async () => {
|
||||
dispatchSpy.mockResolvedValue();
|
||||
form.trigger('submit');
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
|
||||
type: 'success',
|
||||
});
|
||||
await waitForPromises();
|
||||
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
|
||||
type: 'success',
|
||||
});
|
||||
});
|
||||
|
||||
it('show an error toast when submit fails', () => {
|
||||
dispatchSpy.mockRejectedValue();
|
||||
form.trigger('submit');
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
describe('when submit fails', () => {
|
||||
it('shows an error', async () => {
|
||||
dispatchSpy.mockRejectedValue({ response: {} });
|
||||
form.trigger('submit');
|
||||
await waitForPromises();
|
||||
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
|
||||
type: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses the error messages', async () => {
|
||||
dispatchSpy.mockRejectedValue({
|
||||
response: {
|
||||
data: {
|
||||
message: {
|
||||
foo: 'bar',
|
||||
'container_expiration_policy.name': ['baz'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
form.trigger('submit');
|
||||
await waitForPromises();
|
||||
expect(findFields().props('apiErrors')).toEqual({ name: 'baz' });
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -134,6 +165,7 @@ describe('Settings Form', () => {
|
|||
describe('cancel button', () => {
|
||||
beforeEach(() => {
|
||||
store.commit('SET_SETTINGS', { foo: 'bar' });
|
||||
mountComponent();
|
||||
});
|
||||
|
||||
it('has type reset', () => {
|
||||
|
|
@ -165,6 +197,7 @@ describe('Settings Form', () => {
|
|||
describe('when isLoading is true', () => {
|
||||
beforeEach(() => {
|
||||
store.commit('TOGGLE_LOADING');
|
||||
mountComponent();
|
||||
});
|
||||
afterEach(() => {
|
||||
store.commit('TOGGLE_LOADING');
|
||||
|
|
|
|||
|
|
@ -114,7 +114,6 @@ exports[`Expiration Policy Form renders 1`] = `
|
|||
|
||||
<gl-form-group-stub
|
||||
id="expiration-policy-name-matching-group"
|
||||
invalid-feedback="The value of this input should be less than 255 characters"
|
||||
label-align="right"
|
||||
label-cols="3"
|
||||
label-for="expiration-policy-name-matching"
|
||||
|
|
@ -131,7 +130,6 @@ exports[`Expiration Policy Form renders 1`] = `
|
|||
</gl-form-group-stub>
|
||||
<gl-form-group-stub
|
||||
id="expiration-policy-keep-name-group"
|
||||
invalid-feedback="The value of this input should be less than 255 characters"
|
||||
label-align="right"
|
||||
label-cols="3"
|
||||
label-for="expiration-policy-keep-name"
|
||||
|
|
|
|||
|
|
@ -94,7 +94,9 @@ describe('Expiration Policy Form', () => {
|
|||
: 'input';
|
||||
element.vm.$emit(modelUpdateEvent, value);
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.emitted('input')).toEqual([[{ [modelName]: value }]]);
|
||||
expect(wrapper.emitted('input')).toEqual([
|
||||
[{ newValue: { [modelName]: value }, modified: modelName }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -126,42 +128,61 @@ describe('Expiration Policy Form', () => {
|
|||
});
|
||||
|
||||
describe.each`
|
||||
modelName | elementName | stateVariable
|
||||
${'name_regex'} | ${'name-matching'} | ${'nameRegexState'}
|
||||
${'name_regex_keep'} | ${'keep-name'} | ${'nameKeepRegexState'}
|
||||
`('regex textarea validation', ({ modelName, elementName, stateVariable }) => {
|
||||
describe(`when name regex is longer than ${NAME_REGEX_LENGTH}`, () => {
|
||||
const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
|
||||
modelName | elementName
|
||||
${'name_regex'} | ${'name-matching'}
|
||||
${'name_regex_keep'} | ${'keep-name'}
|
||||
`('regex textarea validation', ({ modelName, elementName }) => {
|
||||
const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
|
||||
|
||||
beforeEach(() => {
|
||||
mountComponent({ value: { [modelName]: invalidString } });
|
||||
describe('when apiError contains an error message', () => {
|
||||
const errorMessage = 'something went wrong';
|
||||
|
||||
it('shows the error message on the relevant field', () => {
|
||||
mountComponent({ apiErrors: { [modelName]: errorMessage } });
|
||||
expect(findFormGroup(elementName).attributes('invalid-feedback')).toBe(errorMessage);
|
||||
});
|
||||
|
||||
it(`${stateVariable} is false`, () => {
|
||||
expect(wrapper.vm.textAreaState[stateVariable]).toBe(false);
|
||||
});
|
||||
|
||||
it('emit the @invalidated event', () => {
|
||||
expect(wrapper.emitted('invalidated')).toBeTruthy();
|
||||
it('gives precedence to API errors compared to local ones', () => {
|
||||
mountComponent({
|
||||
apiErrors: { [modelName]: errorMessage },
|
||||
value: { [modelName]: invalidString },
|
||||
});
|
||||
expect(findFormGroup(elementName).attributes('invalid-feedback')).toBe(errorMessage);
|
||||
});
|
||||
});
|
||||
|
||||
it('if the user did not type validation is null', () => {
|
||||
mountComponent({ value: { [modelName]: '' } });
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.vm.textAreaState[stateVariable]).toBe(null);
|
||||
describe('when apiErrors is empty', () => {
|
||||
it('if the user did not type validation is null', async () => {
|
||||
mountComponent({ value: { [modelName]: '' } });
|
||||
expect(findFormGroup(elementName).attributes('state')).toBeUndefined();
|
||||
expect(wrapper.emitted('validated')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
it(`if the user typed and is less than ${NAME_REGEX_LENGTH} state is true`, () => {
|
||||
mountComponent({ value: { [modelName]: 'foo' } });
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
it(`if the user typed and is less than ${NAME_REGEX_LENGTH} state is true`, () => {
|
||||
mountComponent({ value: { [modelName]: 'foo' } });
|
||||
|
||||
const formGroup = findFormGroup(elementName);
|
||||
const formElement = findFormElements(elementName, formGroup);
|
||||
expect(formGroup.attributes('state')).toBeTruthy();
|
||||
expect(formElement.attributes('state')).toBeTruthy();
|
||||
});
|
||||
|
||||
describe(`when name regex is longer than ${NAME_REGEX_LENGTH}`, () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({ value: { [modelName]: invalidString } });
|
||||
});
|
||||
|
||||
it('textAreaValidation state is false', () => {
|
||||
expect(findFormGroup(elementName).attributes('state')).toBeUndefined();
|
||||
// we are forced to check the model attribute because falsy attrs are all casted to undefined in attrs
|
||||
// while in this case false shows an error and null instead shows nothing.
|
||||
expect(wrapper.vm.textAreaValidation[modelName].state).toBe(false);
|
||||
});
|
||||
|
||||
it('emit the @invalidated event', () => {
|
||||
expect(wrapper.emitted('invalidated')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import { SortDirection } from '~/vue_shared/components/filtered_search_bar/const
|
|||
import RecentSearchesStore from '~/filtered_search/stores/recent_searches_store';
|
||||
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
|
||||
|
||||
import { mockAvailableTokens, mockSortOptions } from './mock_data';
|
||||
import { mockAvailableTokens, mockSortOptions, mockHistoryItems } from './mock_data';
|
||||
|
||||
const createComponent = ({
|
||||
namespace = 'gitlab-org/gitlab-test',
|
||||
|
|
@ -53,11 +53,17 @@ describe('FilteredSearchBarRoot', () => {
|
|||
|
||||
describe('computed', () => {
|
||||
describe('tokenSymbols', () => {
|
||||
it('returns array of map containing type and symbols from `tokens` prop', () => {
|
||||
it('returns a map containing type and symbols from `tokens` prop', () => {
|
||||
expect(wrapper.vm.tokenSymbols).toEqual({ author_username: '@' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('tokenTitles', () => {
|
||||
it('returns a map containing type and title from `tokens` prop', () => {
|
||||
expect(wrapper.vm.tokenTitles).toEqual({ author_username: 'Author' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortDirectionIcon', () => {
|
||||
it('returns string "sort-lowest" when `selectedSortDirection` is "ascending"', () => {
|
||||
wrapper.setData({
|
||||
|
|
@ -172,6 +178,19 @@ describe('FilteredSearchBarRoot', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('handleClearHistory', () => {
|
||||
it('clears search history from recent searches store', () => {
|
||||
jest.spyOn(wrapper.vm.recentSearchesStore, 'setRecentSearches').mockReturnValue([]);
|
||||
jest.spyOn(wrapper.vm.recentSearchesService, 'save');
|
||||
|
||||
wrapper.vm.handleClearHistory();
|
||||
|
||||
expect(wrapper.vm.recentSearchesStore.setRecentSearches).toHaveBeenCalledWith([]);
|
||||
expect(wrapper.vm.recentSearchesService.save).toHaveBeenCalledWith([]);
|
||||
expect(wrapper.vm.getRecentSearches()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleFilterSubmit', () => {
|
||||
const mockFilters = [
|
||||
{
|
||||
|
|
@ -186,14 +205,11 @@ describe('FilteredSearchBarRoot', () => {
|
|||
|
||||
it('calls `recentSearchesStore.addRecentSearch` with serialized value of provided `filters` param', () => {
|
||||
jest.spyOn(wrapper.vm.recentSearchesStore, 'addRecentSearch');
|
||||
// jest.spyOn(wrapper.vm.recentSearchesService, 'save');
|
||||
|
||||
wrapper.vm.handleFilterSubmit(mockFilters);
|
||||
|
||||
return wrapper.vm.recentSearchesPromise.then(() => {
|
||||
expect(wrapper.vm.recentSearchesStore.addRecentSearch).toHaveBeenCalledWith(
|
||||
'author_username:=@root foo',
|
||||
);
|
||||
expect(wrapper.vm.recentSearchesStore.addRecentSearch).toHaveBeenCalledWith(mockFilters);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -203,9 +219,7 @@ describe('FilteredSearchBarRoot', () => {
|
|||
wrapper.vm.handleFilterSubmit(mockFilters);
|
||||
|
||||
return wrapper.vm.recentSearchesPromise.then(() => {
|
||||
expect(wrapper.vm.recentSearchesService.save).toHaveBeenCalledWith([
|
||||
'author_username:=@root foo',
|
||||
]);
|
||||
expect(wrapper.vm.recentSearchesService.save).toHaveBeenCalledWith([mockFilters]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -224,6 +238,8 @@ describe('FilteredSearchBarRoot', () => {
|
|||
selectedSortDirection: SortDirection.descending,
|
||||
});
|
||||
|
||||
wrapper.vm.recentSearchesStore.setRecentSearches(mockHistoryItems);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
||||
|
|
@ -232,6 +248,7 @@ describe('FilteredSearchBarRoot', () => {
|
|||
|
||||
expect(glFilteredSearchEl.props('placeholder')).toBe('Filter requirements');
|
||||
expect(glFilteredSearchEl.props('availableTokens')).toEqual(mockAvailableTokens);
|
||||
expect(glFilteredSearchEl.props('historyItems')).toEqual(mockHistoryItems);
|
||||
});
|
||||
|
||||
it('renders sort dropdown component', () => {
|
||||
|
|
|
|||
|
|
@ -44,6 +44,29 @@ export const mockAuthorToken = {
|
|||
|
||||
export const mockAvailableTokens = [mockAuthorToken];
|
||||
|
||||
export const mockHistoryItems = [
|
||||
[
|
||||
{
|
||||
type: 'author_username',
|
||||
value: {
|
||||
data: 'toby',
|
||||
operator: '=',
|
||||
},
|
||||
},
|
||||
'duo',
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'author_username',
|
||||
value: {
|
||||
data: 'root',
|
||||
operator: '=',
|
||||
},
|
||||
},
|
||||
'si',
|
||||
],
|
||||
];
|
||||
|
||||
export const mockSortOptions = [
|
||||
{
|
||||
id: 1,
|
||||
|
|
|
|||
|
|
@ -11,11 +11,12 @@ import { mockAuthorToken, mockAuthors } from '../mock_data';
|
|||
|
||||
jest.mock('~/flash');
|
||||
|
||||
const createComponent = ({ config = mockAuthorToken, value = { data: '' } } = {}) =>
|
||||
const createComponent = ({ config = mockAuthorToken, value = { data: '' }, active = false } = {}) =>
|
||||
mount(AuthorToken, {
|
||||
propsData: {
|
||||
config,
|
||||
value,
|
||||
active,
|
||||
},
|
||||
provide: {
|
||||
portalName: 'fake target',
|
||||
|
|
@ -51,29 +52,23 @@ describe('AuthorToken', () => {
|
|||
describe('computed', () => {
|
||||
describe('currentValue', () => {
|
||||
it('returns lowercase string for `value.data`', () => {
|
||||
wrapper.setProps({
|
||||
value: { data: 'FOO' },
|
||||
});
|
||||
wrapper = createComponent({ value: { data: 'FOO' } });
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(wrapper.vm.currentValue).toBe('foo');
|
||||
});
|
||||
expect(wrapper.vm.currentValue).toBe('foo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('activeAuthor', () => {
|
||||
it('returns object for currently present `value.data`', () => {
|
||||
it('returns object for currently present `value.data`', async () => {
|
||||
wrapper = createComponent({ value: { data: mockAuthors[0].username } });
|
||||
|
||||
wrapper.setData({
|
||||
authors: mockAuthors,
|
||||
});
|
||||
|
||||
wrapper.setProps({
|
||||
value: { data: mockAuthors[0].username },
|
||||
});
|
||||
await wrapper.vm.$nextTick();
|
||||
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(wrapper.vm.activeAuthor).toEqual(mockAuthors[0]);
|
||||
});
|
||||
expect(wrapper.vm.activeAuthor).toEqual(mockAuthors[0]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,14 +2,17 @@ import {
|
|||
buildUneditableOpenTokens,
|
||||
buildUneditableCloseToken,
|
||||
buildUneditableCloseTokens,
|
||||
buildUneditableInlineTokens,
|
||||
buildUneditableTokens,
|
||||
} from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
|
||||
|
||||
import {
|
||||
originInlineToken,
|
||||
originToken,
|
||||
uneditableOpenTokens,
|
||||
uneditableCloseToken,
|
||||
uneditableCloseTokens,
|
||||
uneditableInlineTokens,
|
||||
uneditableTokens,
|
||||
} from './mock_data';
|
||||
|
||||
|
|
@ -38,8 +41,17 @@ describe('Build Uneditable Token renderer helper', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('buildUneditableInlineTokens', () => {
|
||||
it('returns a 3-item array of tokens with the originInlineToken wrapped in the middle of inline tokens', () => {
|
||||
const result = buildUneditableInlineTokens(originInlineToken);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result).toStrictEqual(uneditableInlineTokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildUneditableTokens', () => {
|
||||
it('returns a 3-item array of tokens with the originToken wrapped in the middle', () => {
|
||||
it('returns a 3-item array of tokens with the originToken wrapped in the middle of block tokens', () => {
|
||||
const result = buildUneditableTokens(originToken);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
|
|
|
|||
|
|
@ -12,20 +12,36 @@ export const normalTextNode = buildMockTextNode('This is just normal text.');
|
|||
|
||||
// Token spec helpers
|
||||
|
||||
const uneditableOpenToken = {
|
||||
type: 'openTag',
|
||||
tagName: 'div',
|
||||
attributes: { contenteditable: false },
|
||||
classNames: [
|
||||
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
|
||||
],
|
||||
const buildUneditableOpenToken = type => {
|
||||
return {
|
||||
type: 'openTag',
|
||||
tagName: type,
|
||||
attributes: { contenteditable: false },
|
||||
classNames: [
|
||||
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
const buildUneditableCloseToken = type => {
|
||||
return { type: 'closeTag', tagName: type };
|
||||
};
|
||||
|
||||
export const uneditableCloseToken = { type: 'closeTag', tagName: 'div' };
|
||||
export const originToken = {
|
||||
type: 'text',
|
||||
content: '{:.no_toc .hidden-md .hidden-lg}',
|
||||
};
|
||||
export const uneditableOpenTokens = [uneditableOpenToken, originToken];
|
||||
export const uneditableCloseToken = buildUneditableCloseToken('div');
|
||||
export const uneditableOpenTokens = [buildUneditableOpenToken('div'), originToken];
|
||||
export const uneditableCloseTokens = [originToken, uneditableCloseToken];
|
||||
export const uneditableTokens = [...uneditableOpenTokens, uneditableCloseToken];
|
||||
|
||||
export const originInlineToken = {
|
||||
type: 'text',
|
||||
content: '<i>Inline</i> content',
|
||||
};
|
||||
export const uneditableInlineTokens = [
|
||||
buildUneditableOpenToken('span'),
|
||||
originInlineToken,
|
||||
buildUneditableCloseToken('span'),
|
||||
];
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_font_awesome_html_inline';
|
||||
import { buildUneditableInlineTokens } from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
|
||||
|
||||
import { normalTextNode } from './mock_data';
|
||||
|
||||
const fontAwesomeInlineHtmlNode = {
|
||||
firstChild: null,
|
||||
literal: '<i class="far fa-paper-plane" id="biz-tech-icons">',
|
||||
type: 'html',
|
||||
};
|
||||
|
||||
describe('Render Font Awesome Inline HTML renderer', () => {
|
||||
describe('canRender', () => {
|
||||
it('should return true when the argument `literal` has font awesome inline html syntax', () => {
|
||||
expect(renderer.canRender(fontAwesomeInlineHtmlNode)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when the argument `literal` lacks font awesome inline html syntax', () => {
|
||||
expect(renderer.canRender(normalTextNode)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('render', () => {
|
||||
it('should return uneditable inline tokens', () => {
|
||||
const token = { type: 'text', tagName: null, content: fontAwesomeInlineHtmlNode.literal };
|
||||
const context = { origin: () => token };
|
||||
|
||||
expect(renderer.render(fontAwesomeInlineHtmlNode, context)).toStrictEqual(
|
||||
buildUneditableInlineTokens(token),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -22,8 +22,13 @@ RSpec.describe IconsHelper do
|
|||
|
||||
describe 'sprite_icon_path' do
|
||||
it 'returns relative path' do
|
||||
expect(sprite_icon_path)
|
||||
.to eq icons_path
|
||||
expect(sprite_icon_path).to eq(icons_path)
|
||||
end
|
||||
|
||||
it 'only calls image_path once when called multiple times' do
|
||||
expect(ActionController::Base.helpers).to receive(:image_path).once.and_call_original
|
||||
|
||||
2.times { sprite_icon_path }
|
||||
end
|
||||
|
||||
context 'when an asset_host is set in the config it will return an absolute local URL' do
|
||||
|
|
|
|||
|
|
@ -444,8 +444,8 @@ RSpec.describe ProjectsHelper do
|
|||
end
|
||||
|
||||
describe '#get_project_nav_tabs' do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let(:project) { create(:project) }
|
||||
let(:user) { create(:user) }
|
||||
|
||||
before do
|
||||
allow(helper).to receive(:can?) { true }
|
||||
|
|
@ -501,6 +501,20 @@ RSpec.describe ProjectsHelper do
|
|||
is_expected.not_to include(:external_wiki)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when project has confluence enabled' do
|
||||
before do
|
||||
allow(project).to receive(:has_confluence?).and_return(true)
|
||||
end
|
||||
|
||||
it { is_expected.to include(:confluence) }
|
||||
it { is_expected.not_to include(:wiki) }
|
||||
end
|
||||
|
||||
context 'when project does not have confluence enabled' do
|
||||
it { is_expected.not_to include(:confluence) }
|
||||
it { is_expected.to include(:wiki) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#can_view_operations_tab?' do
|
||||
|
|
|
|||
|
|
@ -322,6 +322,7 @@ project:
|
|||
- last_event
|
||||
- services
|
||||
- campfire_service
|
||||
- confluence_service
|
||||
- discord_service
|
||||
- drone_ci_service
|
||||
- emails_on_push_service
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require Rails.root.join('db', 'post_migrate', '20200701070435_add_default_value_stream_to_groups_with_group_stages.rb')
|
||||
|
||||
RSpec.describe AddDefaultValueStreamToGroupsWithGroupStages, schema: 20200624142207 do
|
||||
let(:groups) { table(:namespaces) }
|
||||
let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
|
||||
let(:value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
|
||||
|
||||
let!(:group) { groups.create!(name: 'test', path: 'path', type: 'Group') }
|
||||
let!(:group_stage) { group_stages.create!(name: 'test', group_id: group.id, start_event_identifier: 1, end_event_identifier: 2) }
|
||||
|
||||
describe '#up' do
|
||||
it 'creates default value stream record for the group' do
|
||||
migrate!
|
||||
|
||||
group_value_streams = value_streams.where(group_id: group.id)
|
||||
expect(group_value_streams.size).to eq(1)
|
||||
|
||||
value_stream = group_value_streams.first
|
||||
expect(value_stream.name).to eq('default')
|
||||
end
|
||||
|
||||
it 'migrates existing stages to the default value stream' do
|
||||
migrate!
|
||||
|
||||
group_stage.reload
|
||||
|
||||
value_stream = value_streams.find_by(group_id: group.id, name: 'default')
|
||||
expect(group_stage.group_value_stream_id).to eq(value_stream.id)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#down' do
|
||||
it 'sets the group_value_stream_id to nil' do
|
||||
described_class.new.down
|
||||
|
||||
group_stage.reload
|
||||
|
||||
expect(group_stage.group_value_stream_id).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -174,18 +174,6 @@ RSpec.describe Ci::JobArtifact do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.for_ref' do
|
||||
let(:first_pipeline) { create(:ci_pipeline, ref: 'first_ref') }
|
||||
let(:second_pipeline) { create(:ci_pipeline, ref: 'second_ref', project: first_pipeline.project) }
|
||||
let!(:first_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline)) }
|
||||
let!(:second_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline)) }
|
||||
|
||||
it 'returns job artifacts for a given pipeline ref' do
|
||||
expect(described_class.for_ref(first_pipeline.ref, first_pipeline.project.id)).to eq([first_artifact])
|
||||
expect(described_class.for_ref(second_pipeline.ref, first_pipeline.project.id)).to eq([second_artifact])
|
||||
end
|
||||
end
|
||||
|
||||
describe '.for_job_name' do
|
||||
it 'returns job artifacts for a given job name' do
|
||||
first_job = create(:ci_build, name: 'first')
|
||||
|
|
|
|||
|
|
@ -219,6 +219,50 @@ RSpec.describe Ci::Pipeline, :mailer do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.outside_pipeline_family' do
|
||||
subject(:outside_pipeline_family) { described_class.outside_pipeline_family(upstream_pipeline) }
|
||||
|
||||
let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
|
||||
let(:child_pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
let!(:other_pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
before do
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: upstream_pipeline),
|
||||
source_project: project,
|
||||
pipeline: child_pipeline,
|
||||
project: project)
|
||||
end
|
||||
|
||||
it 'only returns pipelines outside pipeline family' do
|
||||
expect(outside_pipeline_family).to contain_exactly(other_pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.before_pipeline' do
|
||||
subject(:before_pipeline) { described_class.before_pipeline(child_pipeline) }
|
||||
|
||||
let!(:older_other_pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
let!(:upstream_pipeline) { create(:ci_pipeline, project: project) }
|
||||
let!(:child_pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
let!(:other_pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
before do
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: upstream_pipeline),
|
||||
source_project: project,
|
||||
pipeline: child_pipeline,
|
||||
project: project)
|
||||
end
|
||||
|
||||
it 'only returns older pipelines outside pipeline family' do
|
||||
expect(before_pipeline).to contain_exactly(older_other_pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#merge_request?' do
|
||||
let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
|
||||
let(:merge_request) { create(:merge_request) }
|
||||
|
|
@ -2635,6 +2679,55 @@ RSpec.describe Ci::Pipeline, :mailer do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#same_family_pipeline_ids' do
|
||||
subject(:same_family_pipeline_ids) { pipeline.same_family_pipeline_ids }
|
||||
|
||||
context 'when pipeline is not child nor parent' do
|
||||
it 'returns just the pipeline id' do
|
||||
expect(same_family_pipeline_ids).to contain_exactly(pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is child' do
|
||||
let(:parent) { create(:ci_pipeline, project: pipeline.project) }
|
||||
let(:sibling) { create(:ci_pipeline, project: pipeline.project) }
|
||||
|
||||
before do
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: parent),
|
||||
source_project: parent.project,
|
||||
pipeline: pipeline,
|
||||
project: pipeline.project)
|
||||
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: parent),
|
||||
source_project: parent.project,
|
||||
pipeline: sibling,
|
||||
project: sibling.project)
|
||||
end
|
||||
|
||||
it 'returns parent sibling and self ids' do
|
||||
expect(same_family_pipeline_ids).to contain_exactly(parent, pipeline, sibling)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is parent' do
|
||||
let(:child) { create(:ci_pipeline, project: pipeline.project) }
|
||||
|
||||
before do
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: pipeline),
|
||||
source_project: pipeline.project,
|
||||
pipeline: child,
|
||||
project: child.project)
|
||||
end
|
||||
|
||||
it 'returns self and child ids' do
|
||||
expect(same_family_pipeline_ids).to contain_exactly(pipeline, child)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#stuck?' do
|
||||
before do
|
||||
create(:ci_build, :pending, pipeline: pipeline)
|
||||
|
|
@ -3179,6 +3272,32 @@ RSpec.describe Ci::Pipeline, :mailer do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when transitioning to success' do
|
||||
context 'when feature is enabled' do
|
||||
before do
|
||||
stub_feature_flags(keep_latest_artifacts_for_ref: true)
|
||||
end
|
||||
|
||||
it 'calls the PipelineSuccessUnlockArtifactsWorker' do
|
||||
expect(Ci::PipelineSuccessUnlockArtifactsWorker).to receive(:perform_async).with(pipeline.id)
|
||||
|
||||
pipeline.succeed!
|
||||
end
|
||||
end
|
||||
|
||||
context 'when feature is disabled' do
|
||||
before do
|
||||
stub_feature_flags(keep_latest_artifacts_for_ref: false)
|
||||
end
|
||||
|
||||
it 'does not call the PipelineSuccessUnlockArtifactsWorker' do
|
||||
expect(Ci::PipelineSuccessUnlockArtifactsWorker).not_to receive(:perform_async)
|
||||
|
||||
pipeline.succeed!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#default_branch?' do
|
||||
|
|
|
|||
|
|
@ -1248,51 +1248,21 @@ RSpec.describe MergeRequest do
|
|||
let(:merge_request) { subject }
|
||||
let(:repository) { merge_request.source_project.repository }
|
||||
|
||||
context 'when memoize_source_branch_merge_request feature is enabled' do
|
||||
before do
|
||||
stub_feature_flags(memoize_source_branch_merge_request: true)
|
||||
end
|
||||
context 'when the source project is set' do
|
||||
it 'memoizes the value and returns the result' do
|
||||
expect(repository).to receive(:branch_exists?).once.with(merge_request.source_branch).and_return(true)
|
||||
|
||||
context 'when the source project is set' do
|
||||
it 'memoizes the value and returns the result' do
|
||||
expect(repository).to receive(:branch_exists?).once.with(merge_request.source_branch).and_return(true)
|
||||
|
||||
2.times { expect(merge_request.source_branch_exists?).to eq(true) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the source project is not set' do
|
||||
before do
|
||||
merge_request.source_project = nil
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
expect(merge_request.source_branch_exists?).to eq(false)
|
||||
end
|
||||
2.times { expect(merge_request.source_branch_exists?).to eq(true) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when memoize_source_branch_merge_request feature is disabled' do
|
||||
context 'when the source project is not set' do
|
||||
before do
|
||||
stub_feature_flags(memoize_source_branch_merge_request: false)
|
||||
merge_request.source_project = nil
|
||||
end
|
||||
|
||||
context 'when the source project is set' do
|
||||
it 'does not memoize the value and returns the result' do
|
||||
expect(repository).to receive(:branch_exists?).twice.with(merge_request.source_branch).and_return(true)
|
||||
|
||||
2.times { expect(merge_request.source_branch_exists?).to eq(true) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the source project is not set' do
|
||||
before do
|
||||
merge_request.source_project = nil
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
expect(merge_request.source_branch_exists?).to eq(false)
|
||||
end
|
||||
it 'returns false' do
|
||||
expect(merge_request.source_branch_exists?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ConfluenceService do
|
||||
describe 'Associations' do
|
||||
it { is_expected.to belong_to :project }
|
||||
it { is_expected.to have_one :service_hook }
|
||||
end
|
||||
|
||||
describe 'Validations' do
|
||||
before do
|
||||
subject.active = active
|
||||
end
|
||||
|
||||
context 'when service is active' do
|
||||
let(:active) { true }
|
||||
|
||||
it { is_expected.not_to allow_value('https://example.com').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('example.com').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('foo').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('ftp://example.atlassian.net/wiki').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('https://example.atlassian.net').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('https://.atlassian.net/wiki').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('https://example.atlassian.net/wikifoo').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('').for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value(nil).for(:confluence_url) }
|
||||
it { is_expected.not_to allow_value('😊').for(:confluence_url) }
|
||||
it { is_expected.to allow_value('https://example.atlassian.net/wiki').for(:confluence_url) }
|
||||
it { is_expected.to allow_value('http://example.atlassian.net/wiki').for(:confluence_url) }
|
||||
it { is_expected.to allow_value('https://example.atlassian.net/wiki/').for(:confluence_url) }
|
||||
it { is_expected.to allow_value('http://example.atlassian.net/wiki/').for(:confluence_url) }
|
||||
it { is_expected.to allow_value('https://example.atlassian.net/wiki/foo').for(:confluence_url) }
|
||||
|
||||
it { is_expected.to validate_presence_of(:confluence_url) }
|
||||
end
|
||||
|
||||
context 'when service is inactive' do
|
||||
let(:active) { false }
|
||||
|
||||
it { is_expected.not_to validate_presence_of(:confluence_url) }
|
||||
it { is_expected.to allow_value('foo').for(:confluence_url) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#detailed_description' do
|
||||
it 'can correctly return a link to the project wiki when active' do
|
||||
project = create(:project)
|
||||
subject.project = project
|
||||
subject.active = true
|
||||
|
||||
expect(subject.detailed_description).to include(Gitlab::Routing.url_helpers.project_wikis_url(project))
|
||||
end
|
||||
|
||||
context 'when the project wiki is not enabled' do
|
||||
it 'returns nil when both active or inactive', :aggregate_failures do
|
||||
project = create(:project, :wiki_disabled)
|
||||
subject.project = project
|
||||
|
||||
[true, false].each do |active|
|
||||
subject.active = active
|
||||
|
||||
expect(subject.detailed_description).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Caching has_confluence on project_settings' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
subject { project.project_setting.has_confluence? }
|
||||
|
||||
it 'sets the property to true when service is active' do
|
||||
create(:confluence_service, project: project, active: true)
|
||||
|
||||
is_expected.to be(true)
|
||||
end
|
||||
|
||||
it 'sets the property to false when service is not active' do
|
||||
create(:confluence_service, project: project, active: false)
|
||||
|
||||
is_expected.to be(false)
|
||||
end
|
||||
|
||||
it 'creates a project_setting record if one was not already created' do
|
||||
expect { create(:confluence_service) }.to change { ProjectSetting.count }.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -63,6 +63,7 @@ RSpec.describe Project do
|
|||
it { is_expected.to have_one(:bugzilla_service) }
|
||||
it { is_expected.to have_one(:gitlab_issue_tracker_service) }
|
||||
it { is_expected.to have_one(:external_wiki_service) }
|
||||
it { is_expected.to have_one(:confluence_service) }
|
||||
it { is_expected.to have_one(:project_feature) }
|
||||
it { is_expected.to have_one(:project_repository) }
|
||||
it { is_expected.to have_one(:container_expiration_policy) }
|
||||
|
|
@ -1041,6 +1042,32 @@ RSpec.describe Project do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#has_confluence?' do
|
||||
let_it_be(:project) { build_stubbed(:project) }
|
||||
|
||||
it 'returns false when project_setting.has_confluence property is false' do
|
||||
project.project_setting.has_confluence = false
|
||||
|
||||
expect(project.has_confluence?).to be(false)
|
||||
end
|
||||
|
||||
context 'when project_setting.has_confluence property is true' do
|
||||
before do
|
||||
project.project_setting.has_confluence = true
|
||||
end
|
||||
|
||||
it 'returns true' do
|
||||
expect(project.has_confluence?).to be(true)
|
||||
end
|
||||
|
||||
it 'returns false when confluence integration feature flag is disabled' do
|
||||
stub_feature_flags(ConfluenceService::FEATURE_FLAG => false)
|
||||
|
||||
expect(project.has_confluence?).to be(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#external_wiki' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
|
|
@ -5385,6 +5412,20 @@ RSpec.describe Project do
|
|||
expect(services.count).to eq(2)
|
||||
expect(services.map(&:title)).to eq(['JetBrains TeamCity CI', 'Pushover'])
|
||||
end
|
||||
|
||||
describe 'interaction with the confluence integration feature flag' do
|
||||
it 'contains a ConfluenceService when feature flag is enabled' do
|
||||
stub_feature_flags(ConfluenceService::FEATURE_FLAG => true)
|
||||
|
||||
expect(subject.find_or_initialize_services).to include(ConfluenceService)
|
||||
end
|
||||
|
||||
it 'does not contain a ConfluenceService when the confluence integration feature flag is disabled' do
|
||||
stub_feature_flags(ConfluenceService::FEATURE_FLAG => false)
|
||||
|
||||
expect(subject.find_or_initialize_services).not_to include(ConfluenceService)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#find_or_initialize_service' do
|
||||
|
|
|
|||
|
|
@ -36,9 +36,9 @@ RSpec.describe API::Jobs do
|
|||
end
|
||||
|
||||
let_it_be(:pipeline, reload: true) do
|
||||
create(:ci_empty_pipeline, project: project,
|
||||
sha: project.commit.id,
|
||||
ref: project.default_branch)
|
||||
create(:ci_pipeline, project: project,
|
||||
sha: project.commit.id,
|
||||
ref: project.default_branch)
|
||||
end
|
||||
|
||||
let!(:job) do
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ RSpec.describe Branches::DeleteService do
|
|||
subject(:service) { described_class.new(project, user) }
|
||||
|
||||
shared_examples 'a deleted branch' do |branch_name|
|
||||
before do
|
||||
allow(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async)
|
||||
end
|
||||
|
||||
it 'removes the branch' do
|
||||
expect(branch_exists?(branch_name)).to be true
|
||||
|
||||
|
|
@ -18,6 +22,12 @@ RSpec.describe Branches::DeleteService do
|
|||
expect(result.status).to eq :success
|
||||
expect(branch_exists?(branch_name)).to be false
|
||||
end
|
||||
|
||||
it 'calls the RefDeleteUnlockArtifactsWorker' do
|
||||
expect(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async).with(project.id, user.id, "refs/heads/#{branch_name}")
|
||||
|
||||
service.execute(branch_name)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
|
|
|
|||
|
|
@ -30,26 +30,6 @@ RSpec.describe Ci::CreateJobArtifactsService do
|
|||
describe '#execute' do
|
||||
subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
|
||||
|
||||
context 'locking' do
|
||||
let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
|
||||
let!(:latest_artifact) { create(:ci_job_artifact, job: old_job, locked: true) }
|
||||
let!(:other_artifact) { create(:ci_job_artifact, locked: true) }
|
||||
|
||||
it 'locks the new artifact' do
|
||||
subject
|
||||
|
||||
expect(Ci::JobArtifact.last).to have_attributes(locked: true)
|
||||
end
|
||||
|
||||
it 'unlocks all other artifacts for the same ref' do
|
||||
expect { subject }.to change { latest_artifact.reload.locked }.from(true).to(false)
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for other refs' do
|
||||
expect { subject }.not_to change { other_artifact.reload.locked }.from(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when artifacts file is uploaded' do
|
||||
it 'saves artifact for the given type' do
|
||||
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
|
|||
context 'when artifact is expired' do
|
||||
context 'when artifact is not locked' do
|
||||
before do
|
||||
artifact.update!(locked: false)
|
||||
artifact.job.pipeline.unlocked!
|
||||
end
|
||||
|
||||
it 'destroys job artifact' do
|
||||
|
|
@ -24,7 +24,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
|
|||
|
||||
context 'when artifact is locked' do
|
||||
before do
|
||||
artifact.update!(locked: true)
|
||||
artifact.job.pipeline.artifacts_locked!
|
||||
end
|
||||
|
||||
it 'does not destroy job artifact' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,97 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::UnlockArtifactsService do
|
||||
describe '#execute' do
|
||||
subject(:execute) { described_class.new(pipeline.project, pipeline.user).execute(ci_ref, before_pipeline) }
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::BATCH_SIZE", 1)
|
||||
end
|
||||
|
||||
[true, false].each do |tag|
|
||||
context "when tag is #{tag}" do
|
||||
let(:ref) { 'master' }
|
||||
let(:ref_path) { tag ? "#{::Gitlab::Git::TAG_REF_PREFIX}#{ref}" : "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{ref}" }
|
||||
let(:ci_ref) { create(:ci_ref, ref_path: ref_path) }
|
||||
|
||||
let!(:old_unlocked_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :unlocked) }
|
||||
let!(:older_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
let!(:older_ambiguous_pipeline) { create(:ci_pipeline, ref: ref, tag: !tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
let!(:pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
let!(:child_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
let!(:newer_pipeline) { create(:ci_pipeline, ref: ref, tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
let!(:other_ref_pipeline) { create(:ci_pipeline, ref: 'other_ref', tag: tag, project: ci_ref.project, locked: :artifacts_locked) }
|
||||
|
||||
before do
|
||||
create(:ci_sources_pipeline,
|
||||
source_job: create(:ci_build, pipeline: pipeline),
|
||||
source_project: ci_ref.project,
|
||||
pipeline: child_pipeline,
|
||||
project: ci_ref.project)
|
||||
end
|
||||
|
||||
context 'when running on a ref before a pipeline' do
|
||||
let(:before_pipeline) { pipeline }
|
||||
|
||||
it 'unlocks artifacts from older pipelines' do
|
||||
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for tag or branch with same name as ref' do
|
||||
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from newer pipelines' do
|
||||
expect { execute }.not_to change { newer_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not lock artifacts from old unlocked pipelines' do
|
||||
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts from the same pipeline' do
|
||||
expect { execute }.not_to change { pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for other refs' do
|
||||
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for child pipeline' do
|
||||
expect { execute }.not_to change { child_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when running on just the ref' do
|
||||
let(:before_pipeline) { nil }
|
||||
|
||||
it 'unlocks artifacts from older pipelines' do
|
||||
expect { execute }.to change { older_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'unlocks artifacts from newer pipelines' do
|
||||
expect { execute }.to change { newer_pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'unlocks artifacts from the same pipeline' do
|
||||
expect { execute }.to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for tag or branch with same name as ref' do
|
||||
expect { execute }.not_to change { older_ambiguous_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
|
||||
it 'does not lock artifacts from old unlocked pipelines' do
|
||||
expect { execute }.not_to change { old_unlocked_pipeline.reload.locked }.from('unlocked')
|
||||
end
|
||||
|
||||
it 'does not unlock artifacts for other refs' do
|
||||
expect { execute }.not_to change { other_ref_pipeline.reload.locked }.from('artifacts_locked')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -635,6 +635,37 @@ RSpec.describe Git::BranchPushService, services: true do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'artifacts' do
|
||||
context 'create branch' do
|
||||
let(:oldrev) { blankrev }
|
||||
|
||||
it 'does nothing' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
|
||||
|
||||
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
|
||||
end
|
||||
end
|
||||
|
||||
context 'update branch' do
|
||||
it 'does nothing' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
|
||||
|
||||
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
|
||||
end
|
||||
end
|
||||
|
||||
context 'delete branch' do
|
||||
let(:newrev) { blankrev }
|
||||
|
||||
it 'unlocks artifacts' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker)
|
||||
.to receive(:perform_async).with(project.id, user.id, "refs/heads/#{branch}")
|
||||
|
||||
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Hooks' do
|
||||
context 'run on a branch' do
|
||||
it 'delegates to Git::BranchHooksService' do
|
||||
|
|
|
|||
|
|
@ -10,9 +10,11 @@ RSpec.describe Git::TagPushService do
|
|||
let(:project) { create(:project, :repository) }
|
||||
let(:service) { described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) }
|
||||
|
||||
let(:oldrev) { Gitlab::Git::BLANK_SHA }
|
||||
let(:blankrev) { Gitlab::Git::BLANK_SHA }
|
||||
let(:oldrev) { blankrev }
|
||||
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
|
||||
let(:ref) { 'refs/tags/v1.1.0' }
|
||||
let(:tag) { 'v1.1.0' }
|
||||
let(:ref) { "refs/tags/#{tag}" }
|
||||
|
||||
describe "Push tags" do
|
||||
subject do
|
||||
|
|
@ -58,4 +60,35 @@ RSpec.describe Git::TagPushService do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'artifacts' do
|
||||
context 'create tag' do
|
||||
let(:oldrev) { blankrev }
|
||||
|
||||
it 'does nothing' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'update tag' do
|
||||
it 'does nothing' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async)
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'delete tag' do
|
||||
let(:newrev) { blankrev }
|
||||
|
||||
it 'unlocks artifacts' do
|
||||
expect(::Ci::RefDeleteUnlockArtifactsWorker)
|
||||
.to receive(:perform_async).with(project.id, user.id, "refs/tags/#{tag}")
|
||||
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,6 +11,10 @@ RSpec.describe Tags::DestroyService do
|
|||
describe '#execute' do
|
||||
subject { service.execute(tag_name) }
|
||||
|
||||
before do
|
||||
allow(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async)
|
||||
end
|
||||
|
||||
it 'removes the tag' do
|
||||
expect(repository).to receive(:before_remove_tag)
|
||||
expect(service).to receive(:success)
|
||||
|
|
@ -18,6 +22,12 @@ RSpec.describe Tags::DestroyService do
|
|||
service.execute('v1.1.0')
|
||||
end
|
||||
|
||||
it 'calls the RefDeleteUnlockArtifactsWorker' do
|
||||
expect(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async).with(project.id, user.id, 'refs/tags/v1.1.0')
|
||||
|
||||
service.execute('v1.1.0')
|
||||
end
|
||||
|
||||
context 'when there is an associated release on the tag' do
|
||||
let(:tag) { repository.tags.first }
|
||||
let(:tag_name) { tag.name }
|
||||
|
|
|
|||
|
|
@ -38,26 +38,26 @@ module NotificationHelpers
|
|||
end
|
||||
|
||||
def expect_delivery_jobs_count(count)
|
||||
expect(ActionMailer::DeliveryJob).to have_been_enqueued.exactly(count).times
|
||||
expect(ActionMailer::MailDeliveryJob).to have_been_enqueued.exactly(count).times
|
||||
end
|
||||
|
||||
def expect_no_delivery_jobs
|
||||
expect(ActionMailer::DeliveryJob).not_to have_been_enqueued
|
||||
expect(ActionMailer::MailDeliveryJob).not_to have_been_enqueued
|
||||
end
|
||||
|
||||
def expect_any_delivery_jobs
|
||||
expect(ActionMailer::DeliveryJob).to have_been_enqueued.at_least(:once)
|
||||
expect(ActionMailer::MailDeliveryJob).to have_been_enqueued.at_least(:once)
|
||||
end
|
||||
|
||||
def have_enqueued_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
|
||||
have_enqueued_job(ActionMailer::DeliveryJob).with(mailer, mail, delivery, *args)
|
||||
have_enqueued_job(ActionMailer::MailDeliveryJob).with(mailer, mail, delivery, args: args)
|
||||
end
|
||||
|
||||
def expect_enqueud_email(*args, mailer: "Notify", mail: "", delivery: "deliver_now")
|
||||
expect(ActionMailer::DeliveryJob).to have_been_enqueued.with(mailer, mail, delivery, *args)
|
||||
expect(ActionMailer::MailDeliveryJob).to have_been_enqueued.with(mailer, mail, delivery, args: args)
|
||||
end
|
||||
|
||||
def expect_not_enqueud_email(*args, mailer: "Notify", mail: "")
|
||||
expect(ActionMailer::DeliveryJob).not_to have_been_enqueued.with(mailer, mail, *args, any_args)
|
||||
expect(ActionMailer::MailDeliveryJob).not_to have_been_enqueued.with(mailer, mail, args: any_args)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ Service.available_services_names.each do |service|
|
|||
service_attrs_list.inject({}) do |hash, k|
|
||||
if k =~ /^(token*|.*_token|.*_key)/
|
||||
hash.merge!(k => 'secrettoken')
|
||||
elsif service == 'confluence' && k == :confluence_url
|
||||
hash.merge!(k => 'https://example.atlassian.net/wiki')
|
||||
elsif k =~ /^(.*_url|url|webhook)/
|
||||
hash.merge!(k => "http://example.com")
|
||||
elsif service_klass.method_defined?("#{k}?")
|
||||
|
|
|
|||
|
|
@ -67,77 +67,51 @@ RSpec.shared_examples 'cacheable diff collection' do
|
|||
end
|
||||
|
||||
describe '#write_cache' do
|
||||
before do
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
end
|
||||
|
||||
it 'calls Gitlab::Diff::HighlightCache#write_if_empty' do
|
||||
expect(highlight_cache).to receive(:write_if_empty).once
|
||||
|
||||
subject.write_cache
|
||||
end
|
||||
|
||||
context 'when the feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: true)
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
end
|
||||
it 'calls Gitlab::Diff::StatsCache#write_if_empty with diff stats' do
|
||||
diff_stats = Gitlab::Git::DiffStatsCollection.new([])
|
||||
|
||||
it 'calls Gitlab::Diff::StatsCache#write_if_empty with diff stats' do
|
||||
diff_stats = Gitlab::Git::DiffStatsCollection.new([])
|
||||
expect(diffable.project.repository)
|
||||
.to receive(:diff_stats).and_return(diff_stats)
|
||||
|
||||
expect(diffable.project.repository)
|
||||
.to receive(:diff_stats).and_return(diff_stats)
|
||||
expect(stats_cache).to receive(:write_if_empty).once.with(diff_stats)
|
||||
|
||||
expect(stats_cache).to receive(:write_if_empty).once.with(diff_stats)
|
||||
|
||||
subject.write_cache
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: false)
|
||||
end
|
||||
|
||||
it 'does not call Gitlab::Diff::StatsCache#write_if_empty' do
|
||||
expect(stats_cache).not_to receive(:write_if_empty)
|
||||
|
||||
subject.write_cache
|
||||
end
|
||||
subject.write_cache
|
||||
end
|
||||
end
|
||||
|
||||
describe '#clear_cache' do
|
||||
before do
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
end
|
||||
|
||||
it 'calls Gitlab::Diff::HighlightCache#clear' do
|
||||
expect(highlight_cache).to receive(:clear).once
|
||||
|
||||
subject.clear_cache
|
||||
end
|
||||
|
||||
context 'when the feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: true)
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
end
|
||||
it 'calls Gitlab::Diff::StatsCache#clear' do
|
||||
expect(stats_cache).to receive(:clear).once
|
||||
|
||||
it 'calls Gitlab::Diff::StatsCache#clear' do
|
||||
expect(stats_cache).to receive(:clear).once
|
||||
|
||||
subject.clear_cache
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: false)
|
||||
end
|
||||
|
||||
it 'does not calls Gitlab::Diff::StatsCache#clear' do
|
||||
expect(stats_cache).not_to receive(:clear)
|
||||
|
||||
subject.clear_cache
|
||||
end
|
||||
subject.clear_cache
|
||||
end
|
||||
end
|
||||
|
||||
describe '#diff_files' do
|
||||
before do
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
end
|
||||
|
||||
it 'calls Gitlab::Diff::HighlightCache#decorate' do
|
||||
expect(highlight_cache).to receive(:decorate)
|
||||
.with(instance_of(Gitlab::Diff::File))
|
||||
|
|
@ -146,40 +120,19 @@ RSpec.shared_examples 'cacheable diff collection' do
|
|||
subject.diff_files
|
||||
end
|
||||
|
||||
context 'when the feature swtich is enabled' do
|
||||
context 'when there are stats cached' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: true)
|
||||
expect(Gitlab::Diff::StatsCache).to receive(:new).with(cachable_key: diffable.cache_key) { stats_cache }
|
||||
allow(stats_cache).to receive(:read).and_return(Gitlab::Git::DiffStatsCollection.new([]))
|
||||
end
|
||||
|
||||
context 'when there are stats cached' do
|
||||
before do
|
||||
allow(stats_cache).to receive(:read).and_return(Gitlab::Git::DiffStatsCollection.new([]))
|
||||
end
|
||||
it 'does not make a diff stats rpc call' do
|
||||
expect(diffable.project.repository).not_to receive(:diff_stats)
|
||||
|
||||
it 'does not make a diff stats rpc call' do
|
||||
expect(diffable.project.repository).not_to receive(:diff_stats)
|
||||
|
||||
subject.diff_files
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no stats cached' do
|
||||
it 'makes a diff stats rpc call' do
|
||||
expect(diffable.project.repository)
|
||||
.to receive(:diff_stats)
|
||||
.with(diffable.diff_refs.base_sha, diffable.diff_refs.head_sha)
|
||||
|
||||
subject.diff_files
|
||||
end
|
||||
subject.diff_files
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the feature switch is disabled' do
|
||||
before do
|
||||
stub_feature_flags(cache_diff_stats_merge_request: false)
|
||||
end
|
||||
|
||||
context 'when there are no stats cached' do
|
||||
it 'makes a diff stats rpc call' do
|
||||
expect(diffable.project.repository)
|
||||
.to receive(:diff_stats)
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
|
|||
it 'does not show the wiki tab' do
|
||||
render
|
||||
|
||||
expect(rendered).not_to have_link('Wiki', href: wiki_path(project.wiki))
|
||||
expect(rendered).not_to have_link('Wiki')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -109,6 +109,38 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'confluence tab' do
|
||||
let!(:service) { create(:confluence_service, project: project, active: active) }
|
||||
|
||||
before do
|
||||
render
|
||||
end
|
||||
|
||||
context 'when the Confluence integration is active' do
|
||||
let(:active) { true }
|
||||
|
||||
it 'shows the Confluence tab' do
|
||||
expect(rendered).to have_link('Confluence', href: service.confluence_url)
|
||||
end
|
||||
|
||||
it 'does not show the GitLab wiki tab' do
|
||||
expect(rendered).not_to have_link('Wiki')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it is disabled' do
|
||||
let(:active) { false }
|
||||
|
||||
it 'does not show the Confluence tab' do
|
||||
expect(rendered).not_to have_link('Confluence')
|
||||
end
|
||||
|
||||
it 'shows the GitLab wiki tab' do
|
||||
expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'ci/cd settings tab' do
|
||||
before do
|
||||
project.update!(archived: project_archived)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue