Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
10052df753
commit
2516f0d87b
|
|
@ -91,9 +91,9 @@ export default {
|
|||
<h3 class="page-title gl-m-0">{{ title }}</h3>
|
||||
</div>
|
||||
|
||||
<div v-if="error.length" class="alert alert-danger">
|
||||
<gl-alert v-if="error.length" variant="warning" class="gl-mb-5" :dismissible="false">
|
||||
<p v-for="(message, index) in error" :key="index" class="gl-mb-0">{{ message }}</p>
|
||||
</div>
|
||||
</gl-alert>
|
||||
|
||||
<feature-flag-form
|
||||
:name="name"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
<script>
|
||||
import { GlAlert } from '@gitlab/ui';
|
||||
import { mapState, mapActions } from 'vuex';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import FeatureFlagForm from './form.vue';
|
||||
|
|
@ -10,6 +11,7 @@ import featureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
|||
export default {
|
||||
components: {
|
||||
FeatureFlagForm,
|
||||
GlAlert,
|
||||
},
|
||||
mixins: [featureFlagsMixin()],
|
||||
inject: {
|
||||
|
|
@ -61,9 +63,9 @@ export default {
|
|||
<div>
|
||||
<h3 class="page-title">{{ s__('FeatureFlags|New feature flag') }}</h3>
|
||||
|
||||
<div v-if="error.length" class="alert alert-danger">
|
||||
<p v-for="(message, index) in error" :key="index" class="mb-0">{{ message }}</p>
|
||||
</div>
|
||||
<gl-alert v-if="error.length" variant="warning" class="gl-mb-5" :dismissible="false">
|
||||
<p v-for="(message, index) in error" :key="index" class="gl-mb-0">{{ message }}</p>
|
||||
</gl-alert>
|
||||
|
||||
<feature-flag-form
|
||||
:cancel-path="path"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,97 @@
|
|||
<script>
|
||||
import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui';
|
||||
import { mapState, mapActions } from 'vuex';
|
||||
import { __, s__ } from '~/locale';
|
||||
import PackageTypeToken from './tokens/package_type_token.vue';
|
||||
import { ASCENDING_ODER, DESCENDING_ORDER } from '../constants';
|
||||
import getTableHeaders from '../utils';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlSorting,
|
||||
GlSortingItem,
|
||||
GlFilteredSearch,
|
||||
},
|
||||
computed: {
|
||||
...mapState({
|
||||
isGroupPage: (state) => state.config.isGroupPage,
|
||||
orderBy: (state) => state.sorting.orderBy,
|
||||
sort: (state) => state.sorting.sort,
|
||||
filter: (state) => state.filter,
|
||||
}),
|
||||
internalFilter: {
|
||||
get() {
|
||||
return this.filter;
|
||||
},
|
||||
set(value) {
|
||||
this.setFilter(value);
|
||||
},
|
||||
},
|
||||
sortText() {
|
||||
const field = this.sortableFields.find((s) => s.orderBy === this.orderBy);
|
||||
return field ? field.label : '';
|
||||
},
|
||||
sortableFields() {
|
||||
return getTableHeaders(this.isGroupPage);
|
||||
},
|
||||
isSortAscending() {
|
||||
return this.sort === ASCENDING_ODER;
|
||||
},
|
||||
tokens() {
|
||||
return [
|
||||
{
|
||||
type: 'type',
|
||||
icon: 'package',
|
||||
title: s__('PackageRegistry|Type'),
|
||||
unique: true,
|
||||
token: PackageTypeToken,
|
||||
operators: [{ value: '=', description: __('is'), default: 'true' }],
|
||||
},
|
||||
];
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setSorting', 'setFilter']),
|
||||
onDirectionChange() {
|
||||
const sort = this.isSortAscending ? DESCENDING_ORDER : ASCENDING_ODER;
|
||||
this.setSorting({ sort });
|
||||
this.$emit('sort:changed');
|
||||
},
|
||||
onSortItemClick(item) {
|
||||
this.setSorting({ orderBy: item });
|
||||
this.$emit('sort:changed');
|
||||
},
|
||||
clearSearch() {
|
||||
this.setFilter([]);
|
||||
this.$emit('filter:changed');
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="gl-display-flex gl-p-5 gl-bg-gray-10 gl-border-solid gl-border-1 gl-border-gray-100">
|
||||
<gl-filtered-search
|
||||
v-model="internalFilter"
|
||||
class="gl-mr-4 gl-flex-fill-1"
|
||||
:placeholder="__('Filter results')"
|
||||
:available-tokens="tokens"
|
||||
@submit="$emit('filter:changed')"
|
||||
@clear="clearSearch"
|
||||
/>
|
||||
<gl-sorting
|
||||
:text="sortText"
|
||||
:is-ascending="isSortAscending"
|
||||
@sortDirectionChange="onDirectionChange"
|
||||
>
|
||||
<gl-sorting-item
|
||||
v-for="item in sortableFields"
|
||||
ref="packageListSortItem"
|
||||
:key="item.orderBy"
|
||||
@click="onSortItemClick(item.orderBy)"
|
||||
>
|
||||
{{ item.label }}
|
||||
</gl-sorting-item>
|
||||
</gl-sorting>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
<script>
|
||||
import { GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { mapActions } from 'vuex';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlSearchBoxByClick,
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setFilter']),
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-search-box-by-click
|
||||
:placeholder="s__('PackageRegistry|Filter by name')"
|
||||
@submit="$emit('filter')"
|
||||
@input="setFilter"
|
||||
/>
|
||||
</template>
|
||||
|
|
@ -1,39 +1,43 @@
|
|||
<script>
|
||||
import { mapActions, mapState } from 'vuex';
|
||||
import { GlEmptyState, GlTab, GlTabs, GlLink, GlSprintf } from '@gitlab/ui';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import createFlash from '~/flash';
|
||||
import { historyReplaceState } from '~/lib/utils/common_utils';
|
||||
import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
|
||||
import PackageFilter from './packages_filter.vue';
|
||||
import PackageList from './packages_list.vue';
|
||||
import PackageSort from './packages_sort.vue';
|
||||
import { PACKAGE_REGISTRY_TABS, DELETE_PACKAGE_SUCCESS_MESSAGE } from '../constants';
|
||||
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '../constants';
|
||||
import PackageTitle from './package_title.vue';
|
||||
import PackageSearch from './package_search.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlEmptyState,
|
||||
GlTab,
|
||||
GlTabs,
|
||||
GlLink,
|
||||
GlSprintf,
|
||||
PackageFilter,
|
||||
PackageList,
|
||||
PackageSort,
|
||||
PackageTitle,
|
||||
PackageSearch,
|
||||
},
|
||||
computed: {
|
||||
...mapState({
|
||||
emptyListIllustration: (state) => state.config.emptyListIllustration,
|
||||
emptyListHelpUrl: (state) => state.config.emptyListHelpUrl,
|
||||
filterQuery: (state) => state.filterQuery,
|
||||
filter: (state) => state.filter,
|
||||
selectedType: (state) => state.selectedType,
|
||||
packageHelpUrl: (state) => state.config.packageHelpUrl,
|
||||
packagesCount: (state) => state.pagination?.total,
|
||||
}),
|
||||
tabsToRender() {
|
||||
return PACKAGE_REGISTRY_TABS;
|
||||
emptySearch() {
|
||||
return (
|
||||
this.filter.filter((f) => f.type !== 'filtered-search-term' || f.value?.data).length === 0
|
||||
);
|
||||
},
|
||||
|
||||
emptyStateTitle() {
|
||||
return this.emptySearch
|
||||
? s__('PackageRegistry|There are no packages yet')
|
||||
: s__('PackageRegistry|Sorry, your filter produced no results');
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
|
|
@ -48,27 +52,6 @@ export default {
|
|||
onPackageDeleteRequest(item) {
|
||||
return this.requestDeletePackage(item);
|
||||
},
|
||||
tabChanged(index) {
|
||||
const selected = PACKAGE_REGISTRY_TABS[index];
|
||||
|
||||
if (selected !== this.selectedType) {
|
||||
this.setSelectedType(selected);
|
||||
this.requestPackagesList();
|
||||
}
|
||||
},
|
||||
emptyStateTitle({ title, type }) {
|
||||
if (this.filterQuery) {
|
||||
return s__('PackageRegistry|Sorry, your filter produced no results');
|
||||
}
|
||||
|
||||
if (type) {
|
||||
return sprintf(s__('PackageRegistry|There are no %{packageType} packages yet'), {
|
||||
packageType: title,
|
||||
});
|
||||
}
|
||||
|
||||
return s__('PackageRegistry|There are no packages yet');
|
||||
},
|
||||
checkDeleteAlert() {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const showAlert = urlParams.get(SHOW_DELETE_SUCCESS_ALERT);
|
||||
|
|
@ -92,33 +75,21 @@ export default {
|
|||
<template>
|
||||
<div>
|
||||
<package-title :package-help-url="packageHelpUrl" :packages-count="packagesCount" />
|
||||
<package-search @sort:changed="requestPackagesList" @filter:changed="requestPackagesList" />
|
||||
|
||||
<gl-tabs @input="tabChanged">
|
||||
<template #tabs-end>
|
||||
<div
|
||||
class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
|
||||
>
|
||||
<package-filter class="gl-mr-2" @filter="requestPackagesList" />
|
||||
<package-sort @sort:changed="requestPackagesList" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<gl-tab v-for="(tab, index) in tabsToRender" :key="index" :title="tab.title">
|
||||
<package-list @page:changed="onPageChanged" @package:delete="onPackageDeleteRequest">
|
||||
<template #empty-state>
|
||||
<gl-empty-state :title="emptyStateTitle(tab)" :svg-path="emptyListIllustration">
|
||||
<template #description>
|
||||
<gl-sprintf v-if="filterQuery" :message="$options.i18n.widenFilters" />
|
||||
<gl-sprintf v-else :message="$options.i18n.noResults">
|
||||
<template #noPackagesLink="{ content }">
|
||||
<gl-link :href="emptyListHelpUrl" target="_blank">{{ content }}</gl-link>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
<package-list @page:changed="onPageChanged" @package:delete="onPackageDeleteRequest">
|
||||
<template #empty-state>
|
||||
<gl-empty-state :title="emptyStateTitle" :svg-path="emptyListIllustration">
|
||||
<template #description>
|
||||
<gl-sprintf v-if="!emptySearch" :message="$options.i18n.widenFilters" />
|
||||
<gl-sprintf v-else :message="$options.i18n.noResults">
|
||||
<template #noPackagesLink="{ content }">
|
||||
<gl-link :href="emptyListHelpUrl" target="_blank">{{ content }}</gl-link>
|
||||
</template>
|
||||
</gl-empty-state>
|
||||
</gl-sprintf>
|
||||
</template>
|
||||
</package-list>
|
||||
</gl-tab>
|
||||
</gl-tabs>
|
||||
</gl-empty-state>
|
||||
</template>
|
||||
</package-list>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
<script>
|
||||
import { GlSorting, GlSortingItem } from '@gitlab/ui';
|
||||
import { mapState, mapActions } from 'vuex';
|
||||
import { ASCENDING_ODER, DESCENDING_ORDER } from '../constants';
|
||||
import getTableHeaders from '../utils';
|
||||
|
||||
export default {
|
||||
name: 'PackageSort',
|
||||
components: {
|
||||
GlSorting,
|
||||
GlSortingItem,
|
||||
},
|
||||
computed: {
|
||||
...mapState({
|
||||
isGroupPage: (state) => state.config.isGroupPage,
|
||||
orderBy: (state) => state.sorting.orderBy,
|
||||
sort: (state) => state.sorting.sort,
|
||||
}),
|
||||
sortText() {
|
||||
const field = this.sortableFields.find((s) => s.orderBy === this.orderBy);
|
||||
return field ? field.label : '';
|
||||
},
|
||||
sortableFields() {
|
||||
return getTableHeaders(this.isGroupPage);
|
||||
},
|
||||
isSortAscending() {
|
||||
return this.sort === ASCENDING_ODER;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setSorting']),
|
||||
onDirectionChange() {
|
||||
const sort = this.isSortAscending ? DESCENDING_ORDER : ASCENDING_ODER;
|
||||
this.setSorting({ sort });
|
||||
this.$emit('sort:changed');
|
||||
},
|
||||
onSortItemClick(item) {
|
||||
this.setSorting({ orderBy: item });
|
||||
this.$emit('sort:changed');
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-sorting
|
||||
:text="sortText"
|
||||
:is-ascending="isSortAscending"
|
||||
@sortDirectionChange="onDirectionChange"
|
||||
>
|
||||
<gl-sorting-item
|
||||
v-for="item in sortableFields"
|
||||
ref="packageListSortItem"
|
||||
:key="item.orderBy"
|
||||
@click="onSortItemClick(item.orderBy)"
|
||||
>
|
||||
{{ item.label }}
|
||||
</gl-sorting-item>
|
||||
</gl-sorting>
|
||||
</template>
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
<script>
|
||||
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
|
||||
import { PACKAGE_TYPES } from '../../constants';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlFilteredSearchToken,
|
||||
GlFilteredSearchSuggestion,
|
||||
},
|
||||
PACKAGE_TYPES,
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<gl-filtered-search-token v-bind="{ ...$attrs }" v-on="$listeners">
|
||||
<template #suggestions>
|
||||
<gl-filtered-search-suggestion
|
||||
v-for="(type, index) in $options.PACKAGE_TYPES"
|
||||
:key="index"
|
||||
:value="type.type"
|
||||
>
|
||||
{{ type.title }}
|
||||
</gl-filtered-search-suggestion>
|
||||
</template>
|
||||
</gl-filtered-search-token>
|
||||
</template>
|
||||
|
|
@ -55,11 +55,7 @@ export const SORT_FIELDS = [
|
|||
},
|
||||
];
|
||||
|
||||
export const PACKAGE_REGISTRY_TABS = [
|
||||
{
|
||||
title: __('All'),
|
||||
type: null,
|
||||
},
|
||||
export const PACKAGE_TYPES = [
|
||||
{
|
||||
title: s__('PackageRegistry|Composer'),
|
||||
type: PackageType.COMPOSER,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ import { getNewPaginationPage } from '../utils';
|
|||
export const setInitialState = ({ commit }, data) => commit(types.SET_INITIAL_STATE, data);
|
||||
export const setLoading = ({ commit }, data) => commit(types.SET_MAIN_LOADING, data);
|
||||
export const setSorting = ({ commit }, data) => commit(types.SET_SORTING, data);
|
||||
export const setSelectedType = ({ commit }, data) => commit(types.SET_SELECTED_TYPE, data);
|
||||
export const setFilter = ({ commit }, data) => commit(types.SET_FILTER, data);
|
||||
|
||||
export const receivePackagesListSuccess = ({ commit }, { data, headers }) => {
|
||||
|
|
@ -29,9 +28,9 @@ export const requestPackagesList = ({ dispatch, state }, params = {}) => {
|
|||
const { page = DEFAULT_PAGE, per_page = DEFAULT_PAGE_SIZE } = params;
|
||||
const { sort, orderBy } = state.sorting;
|
||||
|
||||
const type = state.selectedType?.type?.toLowerCase();
|
||||
const nameFilter = state.filterQuery?.toLowerCase();
|
||||
const packageFilters = { package_type: type, package_name: nameFilter };
|
||||
const type = state.filter.find((f) => f.type === 'type');
|
||||
const name = state.filter.find((f) => f.type === 'filtered-search-term');
|
||||
const packageFilters = { package_type: type?.value?.data, package_name: name?.value?.data };
|
||||
|
||||
const apiMethod = state.config.isGroupPage ? 'groupPackages' : 'projectPackages';
|
||||
|
||||
|
|
|
|||
|
|
@ -4,5 +4,4 @@ export const SET_PACKAGE_LIST_SUCCESS = 'SET_PACKAGE_LIST_SUCCESS';
|
|||
export const SET_PAGINATION = 'SET_PAGINATION';
|
||||
export const SET_MAIN_LOADING = 'SET_MAIN_LOADING';
|
||||
export const SET_SORTING = 'SET_SORTING';
|
||||
export const SET_SELECTED_TYPE = 'SET_SELECTED_TYPE';
|
||||
export const SET_FILTER = 'SET_FILTER';
|
||||
|
|
|
|||
|
|
@ -28,11 +28,7 @@ export default {
|
|||
state.sorting = { ...state.sorting, ...sorting };
|
||||
},
|
||||
|
||||
[types.SET_SELECTED_TYPE](state, type) {
|
||||
state.selectedType = type;
|
||||
},
|
||||
|
||||
[types.SET_FILTER](state, query) {
|
||||
state.filterQuery = query;
|
||||
[types.SET_FILTER](state, filter) {
|
||||
state.filter = filter;
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import { PACKAGE_REGISTRY_TABS } from '../constants';
|
||||
|
||||
export default () => ({
|
||||
/**
|
||||
* Determine if the component is loading data from the API
|
||||
|
|
@ -49,9 +47,8 @@ export default () => ({
|
|||
/**
|
||||
* The search query that is used to filter packages by name
|
||||
*/
|
||||
filterQuery: '',
|
||||
filter: [],
|
||||
/**
|
||||
* The selected TAB of the package types tabs
|
||||
*/
|
||||
selectedType: PACKAGE_REGISTRY_TABS[0],
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
import { mapState, mapActions, mapGetters } from 'vuex';
|
||||
import { componentNames } from '~/reports/components/issue_body';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import ReportSection from '~/reports/components/report_section.vue';
|
||||
import createStore from './store';
|
||||
|
||||
|
|
@ -11,6 +12,7 @@ export default {
|
|||
components: {
|
||||
ReportSection,
|
||||
},
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
props: {
|
||||
headPath: {
|
||||
type: String,
|
||||
|
|
@ -30,6 +32,11 @@ export default {
|
|||
required: false,
|
||||
default: null,
|
||||
},
|
||||
codequalityReportsPath: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
codequalityHelpPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
|
|
@ -37,7 +44,7 @@ export default {
|
|||
},
|
||||
componentNames,
|
||||
computed: {
|
||||
...mapState(['newIssues', 'resolvedIssues']),
|
||||
...mapState(['newIssues', 'resolvedIssues', 'hasError', 'statusReason']),
|
||||
...mapGetters([
|
||||
'hasCodequalityIssues',
|
||||
'codequalityStatus',
|
||||
|
|
@ -51,10 +58,11 @@ export default {
|
|||
headPath: this.headPath,
|
||||
baseBlobPath: this.baseBlobPath,
|
||||
headBlobPath: this.headBlobPath,
|
||||
reportsPath: this.codequalityReportsPath,
|
||||
helpPath: this.codequalityHelpPath,
|
||||
});
|
||||
|
||||
this.fetchReports();
|
||||
this.fetchReports(this.glFeatures.codequalityMrDiff);
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['fetchReports', 'setPaths']),
|
||||
|
|
@ -80,5 +88,7 @@ export default {
|
|||
:popover-options="codequalityPopover"
|
||||
:show-report-section-status-icon="false"
|
||||
class="js-codequality-widget mr-widget-border-top mr-report"
|
||||
/>
|
||||
>
|
||||
<template v-if="hasError" #sub-heading>{{ statusReason }}</template>
|
||||
</report-section>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -4,9 +4,20 @@ import { parseCodeclimateMetrics, doCodeClimateComparison } from './utils/codequ
|
|||
|
||||
export const setPaths = ({ commit }, paths) => commit(types.SET_PATHS, paths);
|
||||
|
||||
export const fetchReports = ({ state, dispatch, commit }) => {
|
||||
export const fetchReports = ({ state, dispatch, commit }, diffFeatureFlagEnabled) => {
|
||||
commit(types.REQUEST_REPORTS);
|
||||
|
||||
if (diffFeatureFlagEnabled) {
|
||||
return axios
|
||||
.get(state.reportsPath)
|
||||
.then(({ data }) => {
|
||||
return dispatch('receiveReportsSuccess', {
|
||||
newIssues: parseCodeclimateMetrics(data.new_errors, state.headBlobPath),
|
||||
resolvedIssues: parseCodeclimateMetrics(data.resolved_errors, state.baseBlobPath),
|
||||
});
|
||||
})
|
||||
.catch((error) => dispatch('receiveReportsError', error));
|
||||
}
|
||||
if (!state.basePath) {
|
||||
return dispatch('receiveReportsError');
|
||||
}
|
||||
|
|
@ -18,13 +29,13 @@ export const fetchReports = ({ state, dispatch, commit }) => {
|
|||
),
|
||||
)
|
||||
.then((data) => dispatch('receiveReportsSuccess', data))
|
||||
.catch(() => dispatch('receiveReportsError'));
|
||||
.catch((error) => dispatch('receiveReportsError', error));
|
||||
};
|
||||
|
||||
export const receiveReportsSuccess = ({ commit }, data) => {
|
||||
commit(types.RECEIVE_REPORTS_SUCCESS, data);
|
||||
};
|
||||
|
||||
export const receiveReportsError = ({ commit }) => {
|
||||
commit(types.RECEIVE_REPORTS_ERROR);
|
||||
export const receiveReportsError = ({ commit }, error) => {
|
||||
commit(types.RECEIVE_REPORTS_ERROR, error);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ export default {
|
|||
state.headPath = paths.headPath;
|
||||
state.baseBlobPath = paths.baseBlobPath;
|
||||
state.headBlobPath = paths.headBlobPath;
|
||||
state.reportsPath = paths.reportsPath;
|
||||
state.helpPath = paths.helpPath;
|
||||
},
|
||||
[types.REQUEST_REPORTS](state) {
|
||||
|
|
@ -13,12 +14,14 @@ export default {
|
|||
},
|
||||
[types.RECEIVE_REPORTS_SUCCESS](state, data) {
|
||||
state.hasError = false;
|
||||
state.statusReason = '';
|
||||
state.isLoading = false;
|
||||
state.newIssues = data.newIssues;
|
||||
state.resolvedIssues = data.resolvedIssues;
|
||||
},
|
||||
[types.RECEIVE_REPORTS_ERROR](state) {
|
||||
[types.RECEIVE_REPORTS_ERROR](state, error) {
|
||||
state.isLoading = false;
|
||||
state.hasError = true;
|
||||
state.statusReason = error?.response?.data?.status_reason;
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
export default () => ({
|
||||
basePath: null,
|
||||
headPath: null,
|
||||
reportsPath: null,
|
||||
|
||||
baseBlobPath: null,
|
||||
headBlobPath: null,
|
||||
|
||||
isLoading: false,
|
||||
hasError: false,
|
||||
statusReason: '',
|
||||
|
||||
newIssues: [],
|
||||
resolvedIssues: [],
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@ import CodeQualityComparisonWorker from '../../workers/codequality_comparison_wo
|
|||
export const parseCodeclimateMetrics = (issues = [], path = '') => {
|
||||
return issues.map((issue) => {
|
||||
const parsedIssue = {
|
||||
...issue,
|
||||
name: issue.description,
|
||||
path: issue.file_path,
|
||||
urlPath: `${path}/${issue.file_path}#L${issue.line}`,
|
||||
...issue,
|
||||
};
|
||||
|
||||
if (issue?.location?.path) {
|
||||
|
|
|
|||
|
|
@ -464,6 +464,7 @@ export default {
|
|||
:head-path="mr.codeclimate.head_path"
|
||||
:head-blob-path="mr.headBlobPath"
|
||||
:base-blob-path="mr.baseBlobPath"
|
||||
:codequality-reports-path="mr.codequalityReportsPath"
|
||||
:codequality-help-path="mr.codequalityHelpPath"
|
||||
/>
|
||||
|
||||
|
|
|
|||
|
|
@ -241,10 +241,11 @@ export default class MergeRequestStore {
|
|||
this.isDismissedSuggestPipeline = data.is_dismissed_suggest_pipeline;
|
||||
this.securityReportsDocsPath = data.security_reports_docs_path;
|
||||
|
||||
// codeclimate
|
||||
// code quality
|
||||
const blobPath = data.blob_path || {};
|
||||
this.headBlobPath = blobPath.head_path || '';
|
||||
this.baseBlobPath = blobPath.base_path || '';
|
||||
this.codequalityReportsPath = data.codequality_reports_path;
|
||||
this.codequalityHelpPath = data.codequality_help_path;
|
||||
this.codeclimate = data.codeclimate;
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module MergeRequests
|
||||
# OldestPerCommitFinder is used to retrieve the oldest merge requests for
|
||||
# every given commit, grouped per commit SHA.
|
||||
#
|
||||
# This finder is useful when you need to efficiently retrieve the first/oldest
|
||||
# merge requests for multiple commits, and you want to do so in batches;
|
||||
# instead of running a query for every commit.
|
||||
class OldestPerCommitFinder
|
||||
def initialize(project)
|
||||
@project = project
|
||||
end
|
||||
|
||||
# Returns a Hash that maps a commit ID to the oldest merge request that
|
||||
# introduced that commit.
|
||||
def execute(commits)
|
||||
id_rows = MergeRequestDiffCommit
|
||||
.oldest_merge_request_id_per_commit(@project.id, commits.map(&:id))
|
||||
|
||||
mrs = MergeRequest
|
||||
.preload_target_project
|
||||
.id_in(id_rows.map { |r| r[:merge_request_id] })
|
||||
.index_by(&:id)
|
||||
|
||||
id_rows.each_with_object({}) do |row, hash|
|
||||
if (mr = mrs[row[:merge_request_id]])
|
||||
hash[row[:sha]] = mr
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -31,11 +31,10 @@ module AvatarsHelper
|
|||
end
|
||||
|
||||
def avatar_icon_for_user(user = nil, size = nil, scale = 2, only_path: true)
|
||||
if user
|
||||
user.avatar_url(size: size, only_path: only_path) || default_avatar
|
||||
else
|
||||
gravatar_icon(nil, size, scale)
|
||||
end
|
||||
return gravatar_icon(nil, size, scale) unless user
|
||||
return default_avatar if user.blocked?
|
||||
|
||||
user.avatar_url(size: size, only_path: only_path) || default_avatar
|
||||
end
|
||||
|
||||
def gravatar_icon(user_email = '', size = nil, scale = 2)
|
||||
|
|
|
|||
|
|
@ -35,4 +35,23 @@ class MergeRequestDiffCommit < ApplicationRecord
|
|||
|
||||
Gitlab::Database.bulk_insert(self.table_name, rows) # rubocop:disable Gitlab/BulkInsert
|
||||
end
|
||||
|
||||
def self.oldest_merge_request_id_per_commit(project_id, shas)
|
||||
# This method is defined here and not on MergeRequest, otherwise the SHA
|
||||
# values used in the WHERE below won't be encoded correctly.
|
||||
select(['merge_request_diff_commits.sha AS sha', 'min(merge_requests.id) AS merge_request_id'])
|
||||
.joins(:merge_request_diff)
|
||||
.joins(
|
||||
'INNER JOIN merge_requests ' \
|
||||
'ON merge_requests.latest_merge_request_diff_id = merge_request_diffs.id'
|
||||
)
|
||||
.where(sha: shas)
|
||||
.where(
|
||||
merge_requests: {
|
||||
target_project_id: project_id,
|
||||
state_id: MergeRequest.available_states[:merged]
|
||||
}
|
||||
)
|
||||
.group(:sha)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,99 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Repositories
|
||||
# A service class for generating a changelog section.
|
||||
class ChangelogService
|
||||
DEFAULT_TRAILER = 'Changelog'
|
||||
DEFAULT_FILE = 'CHANGELOG.md'
|
||||
|
||||
# The `project` specifies the `Project` to generate the changelog section
|
||||
# for.
|
||||
#
|
||||
# The `user` argument specifies a `User` to use for committing the changes
|
||||
# to the Git repository.
|
||||
#
|
||||
# The `version` arguments must be a version `String` using semantic
|
||||
# versioning as the format.
|
||||
#
|
||||
# The arguments `from` and `to` must specify a Git ref or SHA to use for
|
||||
# fetching the commits to include in the changelog. The SHA/ref set in the
|
||||
# `from` argument isn't included in the list.
|
||||
#
|
||||
# The `date` argument specifies the date of the release, and defaults to the
|
||||
# current time/date.
|
||||
#
|
||||
# The `branch` argument specifies the branch to commit the changes to. The
|
||||
# branch must already exist.
|
||||
#
|
||||
# The `trailer` argument is the Git trailer to use for determining what
|
||||
# commits to include in the changelog.
|
||||
#
|
||||
# The `file` arguments specifies the name/path of the file to commit the
|
||||
# changes to. If the file doesn't exist, it's created automatically.
|
||||
#
|
||||
# The `message` argument specifies the commit message to use when committing
|
||||
# the changelog changes.
|
||||
#
|
||||
# rubocop: disable Metrics/ParameterLists
|
||||
def initialize(
|
||||
project,
|
||||
user,
|
||||
version:,
|
||||
from:,
|
||||
to:,
|
||||
date: DateTime.now,
|
||||
branch: project.default_branch_or_master,
|
||||
trailer: DEFAULT_TRAILER,
|
||||
file: DEFAULT_FILE,
|
||||
message: "Add changelog for version #{version}"
|
||||
)
|
||||
@project = project
|
||||
@user = user
|
||||
@version = version
|
||||
@from = from
|
||||
@to = to
|
||||
@date = date
|
||||
@branch = branch
|
||||
@trailer = trailer
|
||||
@file = file
|
||||
@message = message
|
||||
end
|
||||
# rubocop: enable Metrics/ParameterLists
|
||||
|
||||
def execute
|
||||
# For every entry we want to only include the merge request that
|
||||
# originally introduced the commit, which is the oldest merge request that
|
||||
# contains the commit. We fetch there merge requests in batches, reducing
|
||||
# the number of SQL queries needed to get this data.
|
||||
mrs_finder = MergeRequests::OldestPerCommitFinder.new(@project)
|
||||
config = Gitlab::Changelog::Config.from_git(@project)
|
||||
release = Gitlab::Changelog::Release
|
||||
.new(version: @version, date: @date, config: config)
|
||||
|
||||
commits =
|
||||
CommitsWithTrailerFinder.new(project: @project, from: @from, to: @to)
|
||||
|
||||
commits.each_page(@trailer) do |page|
|
||||
mrs = mrs_finder.execute(page)
|
||||
|
||||
# Preload the authors. This ensures we only need a single SQL query per
|
||||
# batch of commits, instead of needing a query for every commit.
|
||||
page.each(&:lazy_author)
|
||||
|
||||
page.each do |commit|
|
||||
release.add_entry(
|
||||
title: commit.title,
|
||||
commit: commit,
|
||||
category: commit.trailers.fetch(@trailer),
|
||||
author: commit.author,
|
||||
merge_request: mrs[commit.id]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
Gitlab::Changelog::Committer
|
||||
.new(@project, @user)
|
||||
.commit(release: release, file: @file, branch: @branch, message: @message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Redesign the search UI for the package list
|
||||
merge_request: 52575
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Remove avatar of the blocked user
|
||||
merge_request: 52051
|
||||
author: Yogi (@yo)
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add API for generating Markdown changelogs
|
||||
merge_request: 52116
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update styling of validation messages in New Feature Flag form
|
||||
merge_request: 52217
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add `gl-button` to promotion buttons on issue sidebar
|
||||
merge_request: 51287
|
||||
author: Yogi (@yo)
|
||||
type: other
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: changelog_api
|
||||
introduced_by_url: '13.9'
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/300043
|
||||
milestone: '13.9'
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: false
|
||||
|
|
@ -78,7 +78,7 @@ class ObjectStoreSettings
|
|||
# "background_upload" => false,
|
||||
# "proxy_download" => false,
|
||||
# "remote_directory" => "artifacts"
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# Settings.lfs['object_store'] = {
|
||||
# "enabled" => true,
|
||||
|
|
@ -97,7 +97,7 @@ class ObjectStoreSettings
|
|||
# "background_upload" => false,
|
||||
# "proxy_download" => true,
|
||||
# "remote_directory" => "lfs-objects"
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# Note that with the common config:
|
||||
# 1. Only one object store credentials can now be used. This is
|
||||
|
|
@ -124,9 +124,9 @@ class ObjectStoreSettings
|
|||
target_config = common_config.merge(overrides.slice(*ALLOWED_OBJECT_STORE_OVERRIDES))
|
||||
section = settings.try(store_type)
|
||||
|
||||
next unless section
|
||||
next unless uses_object_storage?(section)
|
||||
|
||||
if section['enabled'] && target_config['bucket'].blank?
|
||||
if target_config['bucket'].blank?
|
||||
missing_bucket_for(store_type)
|
||||
next
|
||||
end
|
||||
|
|
@ -140,10 +140,26 @@ class ObjectStoreSettings
|
|||
target_config['consolidated_settings'] = true
|
||||
section['object_store'] = target_config
|
||||
end
|
||||
|
||||
settings
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Admins can selectively disable object storage for a specific type. If
|
||||
# this hasn't been set, we assume that the consolidated settings
|
||||
# should be used.
|
||||
def uses_object_storage?(section)
|
||||
# Use to_h to avoid https://gitlab.com/gitlab-org/gitlab/-/issues/286873
|
||||
section = section.to_h
|
||||
|
||||
enabled_globally = section.fetch('enabled', false)
|
||||
object_store_settings = section.fetch('object_store', {})
|
||||
os_enabled = object_store_settings.fetch('enabled', true)
|
||||
|
||||
enabled_globally && os_enabled
|
||||
end
|
||||
|
||||
# We only can use the common object storage settings if:
|
||||
# 1. The common settings are defined
|
||||
# 2. The legacy settings are not defined
|
||||
|
|
@ -152,8 +168,9 @@ class ObjectStoreSettings
|
|||
return false unless settings.dig('object_store', 'connection').present?
|
||||
|
||||
WORKHORSE_ACCELERATED_TYPES.each do |store|
|
||||
# to_h is needed because something strange happens to
|
||||
# Settingslogic#dig when stub_storage_settings is run in tests:
|
||||
# to_h is needed because we define `default` as a Gitaly storage name
|
||||
# in stub_storage_settings. This causes Settingslogic to redefine Hash#default,
|
||||
# which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
|
||||
#
|
||||
# (byebug) section.dig
|
||||
# *** ArgumentError Exception: wrong number of arguments (given 0, expected 1+)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddOldestMergeRequestsIndex < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::SchemaHelpers
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
# Set this constant to true if this migration requires downtime.
|
||||
DOWNTIME = false
|
||||
|
||||
INDEX = 'index_on_merge_requests_for_latest_diffs'
|
||||
|
||||
def up
|
||||
return if index_exists_by_name?('merge_requests', INDEX)
|
||||
|
||||
execute "CREATE INDEX CONCURRENTLY #{INDEX} ON merge_requests " \
|
||||
'USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id)'
|
||||
|
||||
create_comment(
|
||||
'INDEX',
|
||||
INDEX,
|
||||
'Index used to efficiently obtain the oldest merge request for a commit SHA'
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
return unless index_exists_by_name?('merge_requests', INDEX)
|
||||
|
||||
execute "DROP INDEX CONCURRENTLY #{INDEX}"
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
c173ba86340efe39977f1b319d1ebcead634e3bfe819a30e230fb4f81766f28a
|
||||
|
|
@ -22487,6 +22487,10 @@ CREATE UNIQUE INDEX index_on_instance_statistics_recorded_at_and_identifier ON a
|
|||
|
||||
CREATE INDEX index_on_label_links_all_columns ON label_links USING btree (target_id, label_id, target_type);
|
||||
|
||||
CREATE INDEX index_on_merge_requests_for_latest_diffs ON merge_requests USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id);
|
||||
|
||||
COMMENT ON INDEX index_on_merge_requests_for_latest_diffs IS 'Index used to efficiently obtain the oldest merge request for a commit SHA';
|
||||
|
||||
CREATE INDEX index_on_namespaces_lower_name ON namespaces USING btree (lower((name)::text));
|
||||
|
||||
CREATE INDEX index_on_namespaces_lower_path ON namespaces USING btree (lower((path)::text));
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ Akismet
|
|||
Alertmanager
|
||||
Algolia
|
||||
Alibaba
|
||||
Aliyun
|
||||
allowlist
|
||||
allowlisted
|
||||
allowlisting
|
||||
|
|
@ -34,6 +35,7 @@ autoscaler
|
|||
autoscales
|
||||
autoscaling
|
||||
awardable
|
||||
awardables
|
||||
Axios
|
||||
Azure
|
||||
B-tree
|
||||
|
|
@ -111,6 +113,7 @@ Dangerfile
|
|||
datetime
|
||||
Debian
|
||||
Decompressor
|
||||
decryptable
|
||||
deduplicate
|
||||
deduplicated
|
||||
deduplicates
|
||||
|
|
@ -168,6 +171,7 @@ Figma
|
|||
Filebeat
|
||||
Fio
|
||||
firewalled
|
||||
firewalling
|
||||
Flawfinder
|
||||
Flowdock
|
||||
Fluentd
|
||||
|
|
@ -272,6 +276,7 @@ libFuzzer
|
|||
Libravatar
|
||||
liveness
|
||||
Lograge
|
||||
logrotate
|
||||
Logstash
|
||||
lookahead
|
||||
lookaheads
|
||||
|
|
@ -282,6 +287,7 @@ loopback
|
|||
Lucene
|
||||
Maildir
|
||||
Mailgun
|
||||
Mailroom
|
||||
Makefile
|
||||
Makefiles
|
||||
Markdown
|
||||
|
|
@ -505,6 +511,8 @@ spidering
|
|||
Splunk
|
||||
SpotBugs
|
||||
Stackdriver
|
||||
starrer
|
||||
starrers
|
||||
storable
|
||||
storages
|
||||
strace
|
||||
|
|
@ -525,6 +533,8 @@ subnet
|
|||
subnets
|
||||
subnetting
|
||||
subpath
|
||||
subproject
|
||||
subprojects
|
||||
subqueried
|
||||
subqueries
|
||||
subquery
|
||||
|
|
@ -534,9 +544,12 @@ substrings
|
|||
subtree
|
||||
subtrees
|
||||
sudo
|
||||
supercookie
|
||||
supercookies
|
||||
swappiness
|
||||
swimlane
|
||||
swimlanes
|
||||
syncable
|
||||
Sysbench
|
||||
syslog
|
||||
tanuki
|
||||
|
|
@ -633,6 +646,9 @@ unresolved
|
|||
unresolving
|
||||
unschedule
|
||||
unscoped
|
||||
unshare
|
||||
unshared
|
||||
unshares
|
||||
unstage
|
||||
unstaged
|
||||
unstages
|
||||
|
|
|
|||
|
|
@ -530,7 +530,12 @@ The process executes the following access checks:
|
|||
In Active Directory, a user is marked as disabled/blocked if the user
|
||||
account control attribute (`userAccountControl:1.2.840.113556.1.4.803`)
|
||||
has bit 2 set.
|
||||
For more information, see <https://ctovswild.com/2009/09/03/bitmask-searches-in-ldap/>
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
For more information, see [Bitmask Searches in LDAP](https://ctovswild.com/2009/09/03/bitmask-searches-in-ldap/).
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
The user is set to an `ldap_blocked` state in GitLab if the previous conditions
|
||||
fail. This means the user is not able to sign in or push/pull code.
|
||||
|
|
|
|||
|
|
@ -175,6 +175,6 @@ If you're having trouble, here are some tips:
|
|||
OAuth2 access token if `client_auth_method` is not defined or if set to `basic`.
|
||||
If you are seeing 401 errors upon retrieving the `userinfo` endpoint, you may
|
||||
want to check your OpenID Web server configuration. For example, for
|
||||
[oauth2-server-php](https://github.com/bshaffer/oauth2-server-php), you
|
||||
[`oauth2-server-php`](https://github.com/bshaffer/oauth2-server-php), you
|
||||
may need to [add a configuration parameter to
|
||||
Apache](https://github.com/bshaffer/oauth2-server-php/issues/926#issuecomment-387502778).
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ The following are GitLab upgrade validation tests we performed.
|
|||
- Outcome: Partial success because we did not run the looping pipeline during the demo to validate
|
||||
zero-downtime.
|
||||
- Follow up issues:
|
||||
- [Clarify hup Puma/Unicorn should include deploy node](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5460)
|
||||
- [Clarify how Puma/Unicorn should include deploy node](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5460)
|
||||
- [Investigate MR creation failure after upgrade to 12.9.10](https://gitlab.com/gitlab-org/gitlab/-/issues/223282) Closed as false positive.
|
||||
|
||||
### February 2020
|
||||
|
|
@ -128,7 +128,7 @@ The following are PostgreSQL upgrade validation tests we performed.
|
|||
PostgreSQL 12 with a database cluster on the primary is not recommended until the issues are resolved.
|
||||
- Known issues for PostgreSQL clusters:
|
||||
- [Ensure Patroni detects PostgreSQL update](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5423)
|
||||
- [Allow configuring permanent replication slots in patroni](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5628)
|
||||
- [Allow configuring permanent replication slots in Patroni](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5628)
|
||||
|
||||
### August 2020
|
||||
|
||||
|
|
|
|||
|
|
@ -489,7 +489,7 @@ This experimental implementation has the following limitations:
|
|||
- Whenever `gitlab-ctl reconfigure` runs on a Patroni Leader instance, there's a
|
||||
chance the node will be demoted due to the required short-time restart. To
|
||||
avoid this, you can pause auto-failover by running `gitlab-ctl patroni pause`.
|
||||
After a reconfigure, it unpauses on its own.
|
||||
After a reconfigure, it resumes on its own.
|
||||
|
||||
For instructions about how to set up Patroni on the primary node, see the
|
||||
[PostgreSQL replication and failover with Omnibus GitLab](../../postgresql/replication_and_failover.md#patroni) page.
|
||||
|
|
@ -644,8 +644,8 @@ With Patroni it's now possible to support that. In order to migrate the existing
|
|||
1. [Configure a permanent replication slot](#step-1-configure-patroni-permanent-replication-slot-on-the-primary-site).
|
||||
1. [Configure a Standby Cluster](#step-2-configure-a-standby-cluster-on-the-secondary-site)
|
||||
on that single node machine.
|
||||
|
||||
You will end up with a "Standby Cluster" with a single node. That allows you to later on add additional patroni nodes
|
||||
|
||||
You will end up with a "Standby Cluster" with a single node. That allows you to later on add additional Patroni nodes
|
||||
by following the same instructions above.
|
||||
|
||||
## Troubleshooting
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ The following list depicts the network architecture of Gitaly:
|
|||
- GitLab Shell.
|
||||
- Elasticsearch indexer.
|
||||
- Gitaly itself.
|
||||
- A Gitaly server must be able to make RPC calls **to itself** by uing its own
|
||||
- A Gitaly server must be able to make RPC calls **to itself** by using its own
|
||||
`(Gitaly address, Gitaly token)` pair as specified in `/config/gitlab.yml`.
|
||||
- Authentication is done through a static token which is shared among the Gitaly and GitLab Rails
|
||||
nodes.
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ Gitaly Cluster and [Geo](../geo/index.md) both provide redundancy. However the r
|
|||
not aware when Gitaly Cluster is used.
|
||||
- Geo provides [replication](../geo/index.md) and [disaster recovery](../geo/disaster_recovery/index.md) for
|
||||
an entire instance of GitLab. Users know when they are using Geo for
|
||||
[replication](../geo/index.md). Geo [replicates multiple datatypes](../geo/replication/datatypes.md#limitations-on-replicationverification),
|
||||
[replication](../geo/index.md). Geo [replicates multiple data types](../geo/replication/datatypes.md#limitations-on-replicationverification),
|
||||
including Git data.
|
||||
|
||||
The following table outlines the major differences between Gitaly Cluster and Geo:
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ The **Kroki URL** is the hostname of the server running the container.
|
|||
|
||||
The [`yuzutech/kroki`](https://hub.docker.com/r/yuzutech/kroki) image contains the following diagrams libraries out-of-the-box:
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
- [Bytefield](https://bytefield-svg.deepsymmetry.org/)
|
||||
- [Ditaa](http://ditaa.sourceforge.net)
|
||||
- [Erd](https://github.com/BurntSushi/erd)
|
||||
|
|
@ -37,6 +39,8 @@ The [`yuzutech/kroki`](https://hub.docker.com/r/yuzutech/kroki) image contains t
|
|||
- [Vega-Lite](https://github.com/vega/vega-lite)
|
||||
- [WaveDrom](https://wavedrom.com/)
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
If you want to use additional diagram libraries,
|
||||
read the [Kroki installation](https://docs.kroki.io/kroki/setup/install/#_images) to learn how to start Kroki companion containers.
|
||||
|
||||
|
|
@ -138,8 +142,12 @@ Rel(banking_system, mainframe, "Uses")
|
|||
|
||||

|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
**Nomnoml**
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
```plaintext
|
||||
[nomnoml]
|
||||
....
|
||||
|
|
@ -159,4 +167,4 @@ Rel(banking_system, mainframe, "Uses")
|
|||
....
|
||||
```
|
||||
|
||||

|
||||

|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ these steps to move the logs to a new location without losing any data.
|
|||
```
|
||||
|
||||
Use `--ignore-existing` so you don't override new job logs with older versions of the same log.
|
||||
1. Unpause continuous integration data processing by editing `/etc/gitlab/gitlab.rb` and removing the `sidekiq` setting you updated earlier.
|
||||
1. Resume continuous integration data processing by editing `/etc/gitlab/gitlab.rb` and removing the `sidekiq` setting you updated earlier.
|
||||
1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the
|
||||
changes to take effect.
|
||||
1. Remove the old job logs storage location:
|
||||
|
|
|
|||
|
|
@ -970,9 +970,13 @@ For Omnibus GitLab installations, Redis logs reside in `/var/log/gitlab/redis/`.
|
|||
|
||||
For Omnibus GitLab installations, Alertmanager logs reside in `/var/log/gitlab/alertmanager/`.
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
## Crond Logs
|
||||
|
||||
For Omnibus GitLab installations, `crond` logs reside in `/var/log/gitlab/crond/`.
|
||||
For Omnibus GitLab installations, crond logs reside in `/var/log/gitlab/crond/`.
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
## Grafana Logs
|
||||
|
||||
|
|
@ -980,7 +984,7 @@ For Omnibus GitLab installations, Grafana logs reside in `/var/log/gitlab/grafan
|
|||
|
||||
## LogRotate Logs
|
||||
|
||||
For Omnibus GitLab installations, logrotate logs reside in `/var/log/gitlab/logrotate/`.
|
||||
For Omnibus GitLab installations, `logrotate` logs reside in `/var/log/gitlab/logrotate/`.
|
||||
|
||||
## GitLab Monitor Logs
|
||||
|
||||
|
|
@ -1023,14 +1027,14 @@ GitLab Support often asks for one of these, and maintains the required tools.
|
|||
### Briefly tail the main logs
|
||||
|
||||
If the bug or error is readily reproducible, save the main GitLab logs
|
||||
[to a file](troubleshooting/linux_cheat_sheet.md#files--dirs) while reproducing the
|
||||
[to a file](troubleshooting/linux_cheat_sheet.md#files-and-directories) while reproducing the
|
||||
problem a few times:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl tail | tee /tmp/<case-ID-and-keywords>.log
|
||||
```
|
||||
|
||||
Conclude the log gathering with <kbd>Ctrl</kbd> + <kbd>C</kbd>.
|
||||
Conclude the log gathering with <kbd>Control</kbd> + <kbd>C</kbd>.
|
||||
|
||||
### GitLabSOS
|
||||
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ both primary and secondaries will fail.
|
|||
|
||||
### Merge requests, issues, epics
|
||||
|
||||
All write actions except those mentioned above will fail. So, in maintenace mode, a user cannot update merge requests, issues, etc.
|
||||
All write actions except those mentioned above will fail. So, in maintenance mode, a user cannot update merge requests, issues, etc.
|
||||
|
||||
### Container Registry
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ Unicorn in GitLab 14.0.
|
|||
When switching to Puma, Unicorn server configuration
|
||||
will _not_ carry over automatically, due to differences between the two application servers. For Omnibus-based
|
||||
deployments, see [Configuring Puma Settings](https://docs.gitlab.com/omnibus/settings/puma.html#configuring-puma-settings).
|
||||
For Helm based deployments, see the [Webservice Chart documentation](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html).
|
||||
For Helm based deployments, see the [`webservice` chart documentation](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html).
|
||||
|
||||
Additionally we strongly recommend that multi-node deployments [configure their load balancers to use the readiness check](../load_balancer.md#readiness-check) due to a difference between Unicorn and Puma in how they handle connections during a restart of the service.
|
||||
|
||||
|
|
|
|||
|
|
@ -312,14 +312,14 @@ configuration.
|
|||
|
||||
The different supported drivers are:
|
||||
|
||||
| Driver | Description |
|
||||
|------------|-------------------------------------|
|
||||
| filesystem | Uses a path on the local filesystem |
|
||||
| Azure | Microsoft Azure Blob Storage |
|
||||
| gcs | Google Cloud Storage |
|
||||
| s3 | Amazon Simple Storage Service. Be sure to configure your storage bucket with the correct [S3 Permission Scopes](https://docs.docker.com/registry/storage-drivers/s3/#s3-permission-scopes). |
|
||||
| swift | OpenStack Swift Object Storage |
|
||||
| oss | Aliyun OSS |
|
||||
| Driver | Description |
|
||||
|--------------|--------------------------------------|
|
||||
| `filesystem` | Uses a path on the local file system |
|
||||
| `Azure` | Microsoft Azure Blob Storage |
|
||||
| `gcs` | Google Cloud Storage |
|
||||
| `s3` | Amazon Simple Storage Service. Be sure to configure your storage bucket with the correct [S3 Permission Scopes](https://docs.docker.com/registry/storage-drivers/s3/#s3-permission-scopes). |
|
||||
| `swift` | OpenStack Swift Object Storage |
|
||||
| `oss` | Aliyun OSS |
|
||||
|
||||
Although most S3 compatible services (like [MinIO](https://min.io/)) should work with the Container Registry, we only guarantee support for AWS S3. Because we cannot assert the correctness of third-party S3 implementations, we can debug issues, but we cannot patch the registry unless an issue is reproducible against an AWS S3 bucket.
|
||||
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@ The Package Registry supports the following formats:
|
|||
The below table lists formats that are not supported, but are accepting Community contributions for. Consider contributing to GitLab. This [development documentation](../../development/packages.md)
|
||||
guides you through the process.
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
| Format | Status |
|
||||
| ------ | ------ |
|
||||
| Chef | [#36889](https://gitlab.com/gitlab-org/gitlab/-/issues/36889) |
|
||||
|
|
@ -51,6 +53,8 @@ guides you through the process.
|
|||
| Terraform | [WIP: Merge Request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18834) |
|
||||
| Vagrant | [#36899](https://gitlab.com/gitlab-org/gitlab/-/issues/36899) |
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
## Enabling the Packages feature
|
||||
|
||||
NOTE:
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ outside world.
|
|||
### Additional configuration for Docker container
|
||||
|
||||
The GitLab Pages daemon doesn't have permissions to bind mounts when it runs
|
||||
in a Docker container. To overcome this issue, you must change the chroot
|
||||
in a Docker container. To overcome this issue, you must change the `chroot`
|
||||
behavior:
|
||||
|
||||
1. Edit `/etc/gitlab/gitlab.rb`.
|
||||
|
|
@ -236,7 +236,7 @@ control over how the Pages daemon runs and serves content in your environment.
|
|||
| `gitlab_secret` | The OAuth application secret. Leave blank to automatically fill when Pages authenticates with GitLab.
|
||||
| `gitlab_server` | Server to use for authentication when access control is enabled; defaults to GitLab `external_url`.
|
||||
| `headers` | Specify any additional http headers that should be sent to the client with each response.
|
||||
| `inplace_chroot` | On [systems that don't support bind-mounts](index.md#additional-configuration-for-docker-container), this instructs GitLab Pages to chroot into its `pages_path` directory. Some caveats exist when using inplace chroot; refer to the GitLab Pages [README](https://gitlab.com/gitlab-org/gitlab-pages/blob/master/README.md#caveats) for more information.
|
||||
| `inplace_chroot` | On [systems that don't support bind-mounts](index.md#additional-configuration-for-docker-container), this instructs GitLab Pages to `chroot` into its `pages_path` directory. Some caveats exist when using in-place `chroot`; refer to the GitLab Pages [README](https://gitlab.com/gitlab-org/gitlab-pages/blob/master/README.md#caveats) for more information.
|
||||
| `insecure_ciphers` | Use default list of cipher suites, may contain insecure ones like 3DES and RC4.
|
||||
| `internal_gitlab_server` | Internal GitLab server address used exclusively for API requests. Useful if you want to send that traffic over an internal load balancer. Defaults to GitLab `external_url`.
|
||||
| `listen_proxy` | The addresses to listen on for reverse-proxy requests. Pages binds to these addresses' network sockets and receives incoming requests from them. Sets the value of `proxy_pass` in `$nginx-dir/conf/gitlab-pages.conf`.
|
||||
|
|
@ -538,7 +538,7 @@ the below steps to do a no downtime transfer to a new storage location.
|
|||
|
||||
1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
1. Verify Pages are still being served up as expected.
|
||||
1. Unpause Pages deployments by removing from `/etc/gitlab/gitlab.rb` the `sidekiq` setting set above.
|
||||
1. Resume Pages deployments by removing from `/etc/gitlab/gitlab.rb` the `sidekiq` setting set above.
|
||||
1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure).
|
||||
1. Trigger a new Pages deployment and verify it's working as expected.
|
||||
1. Remove the old Pages storage location: `sudo rm -rf /var/opt/gitlab/gitlab-rails/shared/pages`
|
||||
|
|
@ -629,7 +629,7 @@ database encryption. Proceed with caution.
|
|||
on the **Pages server** and configure this share to
|
||||
allow access from your main **GitLab server**.
|
||||
Note that the example there is more general and
|
||||
shares several sub-directories from `/home` to several `/nfs/home` mountpoints.
|
||||
shares several sub-directories from `/home` to several `/nfs/home` mount points.
|
||||
For our Pages-specific example here, we instead share only the
|
||||
default GitLab Pages folder `/var/opt/gitlab/gitlab-rails/shared/pages`
|
||||
from the **Pages server** and we mount it to `/mnt/pages`
|
||||
|
|
@ -818,7 +818,7 @@ but commented out to help encourage others to add to it in the future. -->
|
|||
|
||||
### `open /etc/ssl/ca-bundle.pem: permission denied`
|
||||
|
||||
GitLab Pages runs inside a chroot jail, usually in a uniquely numbered directory like
|
||||
GitLab Pages runs inside a `chroot` jail, usually in a uniquely numbered directory like
|
||||
`/tmp/gitlab-pages-*`.
|
||||
|
||||
Within the jail, a bundle of trusted certificates is
|
||||
|
|
@ -828,7 +828,7 @@ from `/opt/gitlab/embedded/ssl/certs/cacert.pem`
|
|||
as part of starting up Pages.
|
||||
|
||||
If the permissions on the source file are incorrect (they should be `0644`), then
|
||||
the file inside the chroot jail is also wrong.
|
||||
the file inside the `chroot` jail is also wrong.
|
||||
|
||||
Pages logs errors in `/var/log/gitlab/gitlab-pages/current` like:
|
||||
|
||||
|
|
@ -837,7 +837,7 @@ x509: failed to load system roots and no roots provided
|
|||
open /etc/ssl/ca-bundle.pem: permission denied
|
||||
```
|
||||
|
||||
The use of a chroot jail makes this error misleading, as it is not
|
||||
The use of a `chroot` jail makes this error misleading, as it is not
|
||||
referring to `/etc/ssl` on the root filesystem.
|
||||
|
||||
The fix is to correct the source file permissions and restart Pages:
|
||||
|
|
@ -862,8 +862,8 @@ open /opt/gitlab/embedded/ssl/certs/cacert.pem: no such file or directory
|
|||
x509: certificate signed by unknown authority
|
||||
```
|
||||
|
||||
The reason for those errors is that the files `resolv.conf` and `ca-bundle.pem` are missing inside the chroot.
|
||||
The fix is to copy the host's `/etc/resolv.conf` and the GitLab certificate bundle inside the chroot:
|
||||
The reason for those errors is that the files `resolv.conf` and `ca-bundle.pem` are missing inside the `chroot`.
|
||||
The fix is to copy the host's `/etc/resolv.conf` and the GitLab certificate bundle inside the `chroot`:
|
||||
|
||||
```shell
|
||||
sudo mkdir -p /var/opt/gitlab/gitlab-rails/shared/pages/etc/ssl
|
||||
|
|
@ -895,7 +895,7 @@ gitlab_pages['listen_proxy'] = '127.0.0.1:8090'
|
|||
### 404 error after transferring project to a different group or user
|
||||
|
||||
If you encounter a `404 Not Found` error a Pages site after transferring a project to
|
||||
another group or user, you must trigger adomain configuration update for Pages. To do
|
||||
another group or user, you must trigger a domain configuration update for Pages. To do
|
||||
so, write something in the `.update` file. The Pages daemon monitors for changes to this
|
||||
file, and reloads the configuration when changes occur.
|
||||
|
||||
|
|
@ -945,8 +945,8 @@ in all of your GitLab Pages instances.
|
|||
### 500 error with `securecookie: failed to generate random iv` and `Failed to save the session`
|
||||
|
||||
This problem most likely results from an [out-dated operating system](https://docs.gitlab.com/omnibus/package-information/deprecated_os.html).
|
||||
The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [crypto/rand in Go](https://golang.org/pkg/crypto/rand/#pkg-variables).
|
||||
This requires the `getrandom` syscall or `/dev/urandom` to be available on the host OS.
|
||||
The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [`crypto/rand` in Go](https://golang.org/pkg/crypto/rand/#pkg-variables).
|
||||
This requires the `getrandom` system call or `/dev/urandom` to be available on the host OS.
|
||||
Upgrading to an [officially supported operating system](https://about.gitlab.com/install/) is recommended.
|
||||
|
||||
### The requested scope is invalid, malformed, or unknown
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ Each database node runs three services:
|
|||
|
||||
`PostgreSQL` - The database itself.
|
||||
|
||||
`Patroni` - Communicates with other patroni services in the cluster and handles
|
||||
`Patroni` - Communicates with other Patroni services in the cluster and handles
|
||||
failover when issues with the leader server occurs. The failover procedure
|
||||
consists of:
|
||||
|
||||
|
|
|
|||
|
|
@ -233,9 +233,9 @@ It can also be used as a receiving application for content encrypted with a KMS:
|
|||
gcloud kms decrypt --key my-key --keyring my-test-kms --plaintext-file=- --ciphertext-file=my-file --location=us-west1 | sudo gitlab-rake gitlab:ldap:secret:write
|
||||
```
|
||||
|
||||
**gcloud secret integration example**
|
||||
**Google Cloud secret integration example**
|
||||
|
||||
It can also be used as a receiving application for secrets out of gcloud:
|
||||
It can also be used as a receiving application for secrets out of Google Cloud:
|
||||
|
||||
```shell
|
||||
gcloud secrets versions access latest --secret="my-test-secret" > $1 | sudo gitlab-rake gitlab:ldap:secret:write
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ your server in `/etc/init.d/gitlab`.
|
|||
|
||||
---
|
||||
|
||||
If you are using other init systems, like systemd, you can check the
|
||||
If you are using other init systems, like `systemd`, you can check the
|
||||
[GitLab Recipes](https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/init) repository for some unofficial services. These are
|
||||
**not** officially supported so use them at your own risk.
|
||||
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
|
|||
|
||||
For more advanced issues, `gdb` is a must-have tool for debugging issues.
|
||||
|
||||
### The GNU Project Debugger (gdb)
|
||||
### The GNU Project Debugger (GDB)
|
||||
|
||||
To install on Ubuntu/Debian:
|
||||
|
||||
|
|
@ -140,9 +140,13 @@ On CentOS:
|
|||
sudo yum install gdb
|
||||
```
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
### rbtrace
|
||||
|
||||
GitLab 11.2 ships with [rbtrace](https://github.com/tmm1/rbtrace), which
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
GitLab 11.2 ships with [`rbtrace`](https://github.com/tmm1/rbtrace), which
|
||||
allows you to trace Ruby code, view all running threads, take memory dumps,
|
||||
and more. However, this is not enabled by default. To enable it, define the
|
||||
`ENABLE_RBTRACE` variable to the environment. For example, in Omnibus:
|
||||
|
|
@ -175,7 +179,7 @@ downtime. Otherwise skip to the next section.
|
|||
|
||||
1. Load the problematic URL
|
||||
1. Run `sudo gdb -p <PID>` to attach to the Unicorn process.
|
||||
1. In the gdb window, type:
|
||||
1. In the GDB window, type:
|
||||
|
||||
```plaintext
|
||||
call (void) rb_backtrace()
|
||||
|
|
@ -210,7 +214,7 @@ downtime. Otherwise skip to the next section.
|
|||
```
|
||||
|
||||
Note that if the Unicorn process terminates before you are able to run these
|
||||
commands, gdb will report an error. To buy more time, you can always raise the
|
||||
commands, GDB will report an error. To buy more time, you can always raise the
|
||||
Unicorn timeout. For omnibus users, you can edit `/etc/gitlab/gitlab.rb` and
|
||||
increase it from 60 seconds to 300:
|
||||
|
||||
|
|
@ -246,7 +250,7 @@ separate Rails process to debug the issue:
|
|||
```
|
||||
|
||||
1. In a new window, run `top`. It should show this Ruby process using 100% CPU. Write down the PID.
|
||||
1. Follow step 2 from the previous section on using gdb.
|
||||
1. Follow step 2 from the previous section on using GDB.
|
||||
|
||||
### GitLab: API is not accessible
|
||||
|
||||
|
|
@ -279,4 +283,4 @@ The output in `/tmp/unicorn.txt` may help diagnose the root cause.
|
|||
## More information
|
||||
|
||||
- [Debugging Stuck Ruby Processes](https://blog.newrelic.com/engineering/debugging-stuck-ruby-processes-what-to-do-before-you-kill-9/)
|
||||
- [Cheatsheet of using gdb and Ruby processes](gdb-stuck-ruby.txt)
|
||||
- [Cheat sheet of using GDB and Ruby processes](gdb-stuck-ruby.txt)
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ chown root:git <file_or_dir>
|
|||
chmod u+x <file>
|
||||
```
|
||||
|
||||
### Files & Dirs
|
||||
### Files and directories
|
||||
|
||||
```shell
|
||||
# create a new directory and all subdirectories
|
||||
|
|
@ -202,7 +202,7 @@ or you can build it from source if you have the Rust compiler.
|
|||
|
||||
First run the tool with no arguments other than the strace output filename to get
|
||||
a summary of the top processes sorted by time spent actively performing tasks. You
|
||||
can also sort based on total time, # of syscalls made, PID #, and # of child processes
|
||||
can also sort based on total time, # of system calls made, PID #, and # of child processes
|
||||
using the `-S` or `--sort` flag. The number of results defaults to 25 processes, but
|
||||
can be changed using the `-c`/`--count` option. See `--help` for full details.
|
||||
|
||||
|
|
@ -220,7 +220,7 @@ Top 25 PIDs
|
|||
...
|
||||
```
|
||||
|
||||
Based on the summary, you can then view the details of syscalls made by one or more
|
||||
Based on the summary, you can then view the details of system calls made by one or more
|
||||
processes using the `-p`/`--pid` for a specific process, or `-s`/`--stats` flags for
|
||||
a sorted list. `--stats` takes the same sorting and count options as summary.
|
||||
|
||||
|
|
|
|||
|
|
@ -207,7 +207,7 @@ to authenticate with the API:
|
|||
- [Go Proxy](../user/packages/go_proxy/index.md)
|
||||
- [Maven Repository](../user/packages/maven_repository/index.md#authenticate-with-a-ci-job-token-in-maven)
|
||||
- [NPM Repository](../user/packages/npm_registry/index.md#authenticate-with-a-ci-job-token)
|
||||
- [Nuget Repository](../user/packages/nuget_repository/index.md)
|
||||
- [NuGet Repository](../user/packages/nuget_repository/index.md)
|
||||
- [PyPI Repository](../user/packages/pypi_repository/index.md#authenticate-with-a-ci-job-token)
|
||||
- [Generic packages](../user/packages/generic_packages/index.md#publish-a-generic-package-by-using-cicd)
|
||||
- [Get job artifacts](job_artifacts.md#get-job-artifacts)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ Available action types for the `action` parameter are:
|
|||
- `destroyed`
|
||||
- `expired`
|
||||
|
||||
Note that these options are downcased.
|
||||
Note that these options are in lower case.
|
||||
|
||||
### Target Types
|
||||
|
||||
|
|
@ -39,7 +39,7 @@ Available target types for the `target_type` parameter are:
|
|||
- `snippet`
|
||||
- `user`
|
||||
|
||||
Note that these options are downcased.
|
||||
Note that these options are in lower case.
|
||||
|
||||
### Date formatting
|
||||
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ POST /features/:name
|
|||
| `user` | string | no | A GitLab username |
|
||||
| `group` | string | no | A GitLab group's path, for example `gitlab-org` |
|
||||
| `project` | string | no | A projects path, for example `gitlab-org/gitlab-foss` |
|
||||
| `force` | boolean | no | Skip feature flag validation checks, ie. YAML definition |
|
||||
| `force` | boolean | no | Skip feature flag validation checks, such as a YAML definition |
|
||||
|
||||
Note that you can enable or disable a feature for a `feature_group`, a `user`,
|
||||
a `group`, and a `project` in a single API call.
|
||||
|
|
|
|||
|
|
@ -12,11 +12,15 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
Badges support placeholders that are replaced in real time in both the link and image URL. The allowed placeholders are:
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
- **%{project_path}**: replaced by the project path.
|
||||
- **%{project_id}**: replaced by the project ID.
|
||||
- **%{default_branch}**: replaced by the project default branch.
|
||||
- **%{commit_sha}**: replaced by the last project's commit SHA.
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
|
||||
Because these endpoints aren't inside a project's context, the information used to replace the placeholders comes
|
||||
from the first group's project by creation date. If the group hasn't got any project the original URL with the placeholders is returned.
|
||||
|
||||
|
|
|
|||
|
|
@ -319,7 +319,7 @@ POST /projects/:id/members
|
|||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project or group](README.md#namespaced-path-encoding) owned by the authenticated user |
|
||||
| `user_id` | integer/string | yes | The user ID of the new member or multiple IDs separated by commas |
|
||||
| `access_level` | integer | yes | A valid access level |
|
||||
| `expires_at` | string | no | A date string in the format YEAR-MONTH-DAY |
|
||||
| `expires_at` | string | no | A date string in the format `YEAR-MONTH-DAY` |
|
||||
|
||||
```shell
|
||||
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --data "user_id=1&access_level=30" "https://gitlab.example.com/api/v4/groups/:id/members"
|
||||
|
|
@ -357,7 +357,7 @@ PUT /projects/:id/members/:user_id
|
|||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project or group](README.md#namespaced-path-encoding) owned by the authenticated user |
|
||||
| `user_id` | integer | yes | The user ID of the member |
|
||||
| `access_level` | integer | yes | A valid access level |
|
||||
| `expires_at` | string | no | A date string in the format YEAR-MONTH-DAY |
|
||||
| `expires_at` | string | no | A date string in the format `YEAR-MONTH-DAY` |
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/:id/members/:user_id?access_level=40"
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ GET /projects/:id/templates/:type
|
|||
| Attribute | Type | Required | Description |
|
||||
| ---------- | ------ | -------- | ----------- |
|
||||
| `id` | integer / string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
|
||||
| `type` | string | yes| The type `(dockerfiles|gitignores|gitlab_ci_ymls|licenses|issues|merge_requests)` of the template |
|
||||
| `type` | string | yes | The type `(dockerfiles|gitignores|gitlab_ci_ymls|licenses|issues|merge_requests)` of the template |
|
||||
|
||||
Example response (licenses):
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ info: "To determine the technical writer assigned to the Stage/Group associated
|
|||
type: reference, api
|
||||
---
|
||||
|
||||
# Repositories API
|
||||
# Repositories API **(CORE)**
|
||||
|
||||
## List repository tree
|
||||
|
||||
|
|
@ -18,14 +18,15 @@ This command provides essentially the same functionality as the `git ls-tree` co
|
|||
GET /projects/:id/repository/tree
|
||||
```
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `path` (optional) - The path inside repository. Used to get content of subdirectories
|
||||
- `ref` (optional) - The name of a repository branch or tag or if not given the default branch
|
||||
- `recursive` (optional) - Boolean value used to get a recursive tree (false by default)
|
||||
- `per_page` (optional) - Number of results to show per page. If not specified, defaults to `20`.
|
||||
Read more on [pagination](README.md#pagination).
|
||||
| Attribute | Type | Required | Description |
|
||||
| :---------- | :------------- | :------- | :---------- |
|
||||
| `id` | integer/string | no | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `path` | string | yes | The path inside repository. Used to get content of subdirectories. |
|
||||
| `ref` | string | yes | The name of a repository branch or tag or if not given the default branch. |
|
||||
| `recursive` | boolean | yes | Boolean value used to get a recursive tree (false by default). |
|
||||
| `per_page` | integer | yes | Number of results to show per page. If not specified, defaults to `20`. [Learn more on pagination](README.md#pagination). |
|
||||
|
||||
```json
|
||||
[
|
||||
|
|
@ -91,10 +92,12 @@ without authentication if the repository is publicly accessible.
|
|||
GET /projects/:id/repository/blobs/:sha
|
||||
```
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `sha` (required) - The blob SHA
|
||||
| Attribute | Type | Required | Description |
|
||||
| :-------- | :------------- | :------- | :---------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `sha` | string | yes | The blob SHA. |
|
||||
|
||||
## Raw blob content
|
||||
|
||||
|
|
@ -105,10 +108,12 @@ without authentication if the repository is publicly accessible.
|
|||
GET /projects/:id/repository/blobs/:sha/raw
|
||||
```
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `sha` (required) - The blob SHA
|
||||
| Attribute | Type | Required | Description |
|
||||
| :-------- | :------- | :------- | :---------- |
|
||||
| `id` | datatype | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `sha` | datatype | yes | The blob SHA. |
|
||||
|
||||
## Get file archive
|
||||
|
||||
|
|
@ -128,10 +133,14 @@ GET /projects/:id/repository/archive[.format]
|
|||
`bz2`, `tar`, and `zip`. For example, specifying `archive.zip`
|
||||
would send an archive in ZIP format.
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `sha` (optional) - The commit SHA to download. A tag, branch reference, or SHA can be used. This defaults to the tip of the default branch if not specified. For example:
|
||||
| Attribute | Type | Required | Description |
|
||||
|:------------|:---------------|:---------|:----------------------|
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `sha` | string | no | The commit SHA to download. A tag, branch reference, or SHA can be used. This defaults to the tip of the default branch if not specified. |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.com/api/v4/projects/<project_id>/repository/archive?sha=<commit_sha>"
|
||||
|
|
@ -146,21 +155,22 @@ publicly accessible. Note that diffs could have an empty diff string if [diff li
|
|||
GET /projects/:id/repository/compare
|
||||
```
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `from` (required) - the commit SHA or branch name
|
||||
- `to` (required) - the commit SHA or branch name
|
||||
- `straight` (optional) - comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`.
|
||||
| Attribute | Type | Required | Description |
|
||||
| :--------- | :------------- | :------- | :---------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `from` | string | yes | The commit SHA or branch name. |
|
||||
| `to` | string | yes | The commit SHA or branch name. |
|
||||
| `straight` | boolean | no | Comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`. |
|
||||
|
||||
```plaintext
|
||||
GET /projects/:id/repository/compare?from=master&to=feature
|
||||
```
|
||||
|
||||
Response:
|
||||
Example response:
|
||||
|
||||
```json
|
||||
|
||||
{
|
||||
"commit": {
|
||||
"id": "12d65c8dd2b2676fa3ac47d955accc085a37a9c1",
|
||||
|
|
@ -203,15 +213,17 @@ GET /projects/:id/repository/contributors
|
|||
```
|
||||
|
||||
WARNING:
|
||||
The `additions` and `deletions` attributes are deprecated [as of GitLab 13.4](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39653) because they [always return `0`](https://gitlab.com/gitlab-org/gitlab/-/issues/233119).
|
||||
The `additions` and `deletions` attributes are deprecated [as of GitLab 13.4](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39653), because they [always return `0`](https://gitlab.com/gitlab-org/gitlab/-/issues/233119).
|
||||
|
||||
Parameters:
|
||||
Supported attributes:
|
||||
|
||||
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
|
||||
- `order_by` (optional) - Return contributors ordered by `name`, `email`, or `commits` (orders by commit date) fields. Default is `commits`
|
||||
- `sort` (optional) - Return contributors sorted in `asc` or `desc` order. Default is `asc`
|
||||
| Attribute | Type | Required | Description |
|
||||
| :--------- | :------------- | :------- | :---------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
|
||||
| `order_by` | string | no | Return contributors ordered by `name`, `email`, or `commits` (orders by commit date) fields. Default is `commits`. |
|
||||
| `sort` | string | no | Return contributors sorted in `asc` or `desc` order. Default is `asc`. |
|
||||
|
||||
Response:
|
||||
Example response:
|
||||
|
||||
```json
|
||||
[{
|
||||
|
|
@ -237,10 +249,12 @@ Get the common ancestor for 2 or more refs (commit SHAs, branch names or tags).
|
|||
GET /projects/:id/repository/merge_base
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
|
||||
| `refs` | array | yes | The refs to find the common ancestor of, multiple refs can be passed |
|
||||
| Attribute | Type | Required | Description |
|
||||
| --------- | -------------- | -------- | ------------------------------------------------------------------------------- |
|
||||
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
|
||||
| `refs` | array | yes | The refs to find the common ancestor of, multiple refs can be passed |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/repository/merge_base?refs[]=304d257dcb821665ab5110318fc58a007bd104ed&refs[]=0031876facac3f2b2702a0e53a26e89939a42209"
|
||||
|
|
@ -264,3 +278,252 @@ Example response:
|
|||
"committed_date": "2014-02-27T08:03:18.000Z"
|
||||
}
|
||||
```
|
||||
|
||||
## Generate changelog data
|
||||
|
||||
> - [Introduced](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/351) in GitLab 13.9.
|
||||
> - It's [deployed behind a feature flag](../user/feature_flags.md), disabled by default.
|
||||
> - It's disabled on GitLab.com.
|
||||
> - It's not yet recommended for production use.
|
||||
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-generating-changelog-data).
|
||||
|
||||
WARNING:
|
||||
This feature might not be available to you. Check the **version history** note above for details.
|
||||
|
||||
Generate changelog data based on commits in a repository.
|
||||
|
||||
Given a version (using semantic versioning) and a range of commits,
|
||||
GitLab generates a changelog for all commits that use a particular
|
||||
[Git trailer](https://git-scm.com/docs/git-interpret-trailers).
|
||||
|
||||
The output of this process is a new section in a changelog file in the Git
|
||||
repository of the given project. The output format is in Markdown, and can be
|
||||
customized.
|
||||
|
||||
```plaintext
|
||||
POST /projects/:id/repository/changelog
|
||||
```
|
||||
|
||||
Supported attributes:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
| :-------- | :------- | :--------- | :---------- |
|
||||
| `version` | string | yes | The version to generate the changelog for. The format must follow [semantic versioning](https://semver.org/). |
|
||||
| `from` | string | yes | The start of the range of commits (as a SHA) to use for generating the changelog. This commit itself isn't included in the list. |
|
||||
| `to` | string | yes | The end of the range of commits (as a SHA) to use for the changelog. This commit _is_ included in the list. |
|
||||
| `date` | datetime | no | The date and time of the release, defaults to the current time. |
|
||||
| `branch` | string | no | The branch to commit the changelog changes to, defaults to the project's default branch. |
|
||||
| `trailer` | string | no | The Git trailer to use for including commits, defaults to `Changelog`. |
|
||||
| `file` | string | no | The file to commit the changes to, defaults to `CHANGELOG.md`. |
|
||||
| `message` | string | no | The commit message to produce when committing the changes, defaults to `Add changelog for version X` where X is the value of the `version` argument. |
|
||||
|
||||
### How it works
|
||||
|
||||
Changelogs are generated based on commit titles. Commits are only included if
|
||||
they contain a specific Git trailer. GitLab uses the value of this trailer to
|
||||
categorize the changes.
|
||||
|
||||
GitLab uses Git trailers, because Git trailers are
|
||||
supported by Git out of the box. We use commits as input, as this is the only
|
||||
source of data every project uses. In addition, commits can be retrieved when
|
||||
operating on a mirror. This is important for GitLab itself, because during a security
|
||||
release we might need to include changes from both public projects and private
|
||||
security mirrors.
|
||||
|
||||
Changelogs are generated by taking the title of the commits to include and using
|
||||
these as the changelog entries. You can enrich entries with additional data,
|
||||
such as a link to the merge request or details about the commit author. You can
|
||||
[customize the format of a changelog](#customize-the-changelog-output) section with a template.
|
||||
|
||||
### Customize the changelog output
|
||||
|
||||
The output is customized using a YAML configuration file stored in your
|
||||
project's Git repository. This file must reside in
|
||||
`.gitlab/changelog_config.yml`.
|
||||
|
||||
You can set the following variables in this file:
|
||||
|
||||
- `date_format`: the date format to use in the title of the newly added
|
||||
changelog data. This uses regular `strftime` formatting.
|
||||
- `template`: a custom template to use for generating the changelog data.
|
||||
- `categories`: a hash that maps raw category names to the names to use in the
|
||||
changelog.
|
||||
|
||||
Using the default settings, generating a changelog results in a section along
|
||||
the lines of the following:
|
||||
|
||||
```markdown
|
||||
## 1.0.0 (2021-01-05)
|
||||
|
||||
### Features (4 changes)
|
||||
|
||||
- [Feature 1](gitlab-org/gitlab@123abc) by @alice ([merge request](gitlab-org/gitlab!123))
|
||||
- [Feature 2](gitlab-org/gitlab@456abc) ([merge request](gitlab-org/gitlab!456))
|
||||
- [Feature 3](gitlab-org/gitlab@234abc) by @steve
|
||||
- [Feature 4](gitlab-org/gitlab@456)
|
||||
```
|
||||
|
||||
Each section starts with a title that contains the version and release date.
|
||||
While the format of the date can be customized, the rest of the title can't be
|
||||
changed. When adding a new section, GitLab parses these titles to determine
|
||||
where in the file the new section should be placed. GitLab sorts sections
|
||||
according to their versions, not their dates.
|
||||
|
||||
Each section can have categories, each with their
|
||||
corresponding changes. In the above example, "Features" is one such category.
|
||||
You can customize the format of these sections.
|
||||
|
||||
The section names are derived from the values of the Git trailer used to include
|
||||
or exclude commits.
|
||||
|
||||
For example, if the trailer to use is called `Changelog`,
|
||||
and its value is `feature`, then the commit is grouped in the `feature`
|
||||
category. The names of these raw values might differ from what you want to
|
||||
show in a changelog, you can remap them. Let's say we use the `Changelog`
|
||||
trailer and developers use the following values: `feature`, `bug`, and
|
||||
`performance`.
|
||||
|
||||
You can remap these using the following YAML configuration file:
|
||||
|
||||
```yaml
|
||||
---
|
||||
categories:
|
||||
feature: Features
|
||||
bug: Bug fixes
|
||||
performance: Performance improvements
|
||||
```
|
||||
|
||||
When generating the changelog data, the category titles are then `### Features`,
|
||||
`### Bug fixes`, and `### Performance improvements`.
|
||||
|
||||
### Custom templates
|
||||
|
||||
The category sections are generated using a template. The default template is as
|
||||
follows:
|
||||
|
||||
```plaintext
|
||||
{% if categories %}
|
||||
{% each categories %}
|
||||
### {{ title }} ({% if single_change %}1 change{% else %}{{ count }} changes{% end %})
|
||||
|
||||
{% each entries %}
|
||||
- [{{ title }}]({{ commit.reference }})\
|
||||
{% if author.contributor %} by {{ author.reference }}{% end %}\
|
||||
{% if merge_request %} ([merge request]({{ merge_request.reference }})){% end %}
|
||||
{% end %}
|
||||
|
||||
{% end %}
|
||||
{% else %}
|
||||
No changes.
|
||||
{% end %}
|
||||
```
|
||||
|
||||
The `{% ... %}` tags are for statements, and `{{ ... }}` is used for printing
|
||||
data. Statements must be terminated using a `{% end %}` tag. Both the `if` and
|
||||
`each` statements require a single argument.
|
||||
|
||||
For example, if we have a variable `valid`, and we want to display "yes"
|
||||
when this value is true, and display "nope" otherwise. We can do so as follows:
|
||||
|
||||
```plaintext
|
||||
{% if valid %}
|
||||
yes
|
||||
{% else %}
|
||||
nope
|
||||
{% end %}
|
||||
```
|
||||
|
||||
The use of `else` is optional. A value is considered true when it's a non-empty
|
||||
value or boolean `true`. Empty arrays and hashes are considered false.
|
||||
|
||||
Looping is done using `each`, and variables inside a loop are scoped to it.
|
||||
Referring to the current value in a loop is done using the variable tag `{{ it
|
||||
}}`. Other variables read their value from the current loop value. Take
|
||||
this template for example:
|
||||
|
||||
```plaintext
|
||||
{% each users %}
|
||||
{{name}}
|
||||
{% end %}
|
||||
```
|
||||
|
||||
Assuming `users` is an array of objects, each with a `name` field, this would
|
||||
then print the name of every user.
|
||||
|
||||
Using variable tags, you can access nested objects. For example, `{{
|
||||
users.0.name }}` prints the name of the first user in the `users` variable.
|
||||
|
||||
If a line ends in a backslash, the next newline is ignored. This allows you to
|
||||
wrap code across multiple lines, without introducing unnecessary newlines in the
|
||||
Markdown output.
|
||||
|
||||
You can specify a custom template in your configuration like so:
|
||||
|
||||
```yaml
|
||||
---
|
||||
template: >
|
||||
{% if categories %}
|
||||
{% each categories %}
|
||||
### {{ title }}
|
||||
|
||||
{% each entries %}
|
||||
- [{{ title }}]({{ commit.reference }})\
|
||||
{% if author.contributor %} by {{ author.reference }}{% end %}
|
||||
{% end %}
|
||||
|
||||
{% end %}
|
||||
{% else %}
|
||||
No changes.
|
||||
{% end %}
|
||||
```
|
||||
|
||||
### Template data
|
||||
|
||||
At the top level, the following variable is available:
|
||||
|
||||
- `categories`: an array of objects, one for every changelog category.
|
||||
|
||||
In a category, the following variables are available:
|
||||
|
||||
- `title`: the title of the category (after it has been remapped).
|
||||
- `count`: the number of entries in this category.
|
||||
- `single_change`: a boolean that indicates if there is only one change (`true`),
|
||||
or multiple changes (`false`).
|
||||
- `entries`: the entries that belong to this category.
|
||||
|
||||
In an entry, the following variables are available (here `foo.bar` means that
|
||||
`bar` is a sub-field of `foo`):
|
||||
|
||||
- `title`: the title of the changelog entry (this is the commit title).
|
||||
- `commit.reference`: a reference to the commit, for example,
|
||||
`gitlab-org/gitlab@0a4cdd86ab31748ba6dac0f69a8653f206e5cfc7`.
|
||||
- `commit.trailers`: an object containing all the Git trailers that were present
|
||||
in the commit body.
|
||||
- `author.reference`: a reference to the commit author (for example, `@alice`).
|
||||
- `author.contributor`: a boolean set to `true` when the author is an external
|
||||
contributor, otherwise this is set to `false`.
|
||||
- `merge_request.reference`: a reference to the merge request that first
|
||||
introduced the change (for example, `gitlab-org/gitlab!50063`).
|
||||
|
||||
The `author` and `merge_request` objects might not be present if the data couldn't
|
||||
be determined (for example, when a commit was created without a corresponding merge
|
||||
request).
|
||||
|
||||
### Enable or disable generating changelog data **(CORE ONLY)**
|
||||
|
||||
This feature is under development and not ready for production use. It is
|
||||
deployed behind a feature flag that is **disabled by default**.
|
||||
[GitLab administrators with access to the GitLab Rails console](../administration/feature_flags.md)
|
||||
can enable it.
|
||||
|
||||
To enable it for a project:
|
||||
|
||||
```ruby
|
||||
Feature.enable(:changelog_api, Project.find(id_of_the_project))
|
||||
```
|
||||
|
||||
To disable it for a project:
|
||||
|
||||
```ruby
|
||||
Feature.disable(:changelog_api, Project.find(id_of_the_project))
|
||||
```
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ Below are the changes made between V3 and V4.
|
|||
- Notes do not return deprecated field `upvote` and `downvote` [!9384](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9384)
|
||||
- Return HTTP status code `400` for all validation errors when creating or updating a member instead of sometimes `422` error. [!9523](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9523)
|
||||
- Remove `GET /groups/owned`. Use `GET /groups?owned=true` instead [!9505](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9505)
|
||||
- Return 202 with JSON body on async removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9449)
|
||||
- Return 202 with JSON body on asynchronous removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9449)
|
||||
- `GET /projects/:id/milestones?iid[]=x&iid[]=y` array filter has been renamed to `iids` [!9096](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9096)
|
||||
- Return basic information about pipeline in `GET /projects/:id/pipelines` [!8875](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/8875)
|
||||
- Renamed all `build` references to `job` [!9463](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9463)
|
||||
|
|
|
|||
|
|
@ -109,6 +109,48 @@ Refer to [`override.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gi
|
|||
Because only a class or prepended module can actually override a method.
|
||||
Including or extending a module into another cannot override anything.
|
||||
|
||||
### Interactions with `ActiveSupport::Concern`, `prepend`, and `class_methods`
|
||||
|
||||
When you use `ActiveSupport::Concern` that includes class methods, you do not
|
||||
get expected results because `ActiveSupport::Concern` doesn't work like a
|
||||
regular Ruby module.
|
||||
|
||||
Since we already have `Prependable` as a patch for `ActiveSupport::Concern`
|
||||
to enable `prepend`, it has consequences with how it would interact with
|
||||
`override` and `class_methods`. We add a workaround directly into
|
||||
`Prependable` to resolve the problem, by `extend`ing `ClassMethods` into the
|
||||
defining module.
|
||||
|
||||
This allows us to use `override` to verify `class_methods` used in the
|
||||
context mentioned above. This workaround only applies when we run the
|
||||
verification, not when running the application itself.
|
||||
|
||||
Here are example code blocks that demonstrate the effect of this workaround:
|
||||
following codes:
|
||||
|
||||
```ruby
|
||||
module Base
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
def f
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
module Derived
|
||||
include Base
|
||||
end
|
||||
|
||||
# Without the workaround
|
||||
Base.f # => NoMethodError
|
||||
Derived.f # => nil
|
||||
|
||||
# With the workaround
|
||||
Base.f # => nil
|
||||
Derived.f # => nil
|
||||
```
|
||||
|
||||
## `StrongMemoize`
|
||||
|
||||
Refer to [`strong_memoize.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/utils/strong_memoize.rb):
|
||||
|
|
|
|||
|
|
@ -555,7 +555,7 @@ username, you can create a new group and transfer projects to it.
|
|||
|
||||
You can change settings that are specific to repositories in your group.
|
||||
|
||||
#### Custom initial branch name **(FREE SELF)**
|
||||
#### Custom initial branch name **(FREE)**
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43290) in GitLab 13.6.
|
||||
|
||||
|
|
|
|||
|
|
@ -170,6 +170,67 @@ module API
|
|||
not_found!("Merge Base")
|
||||
end
|
||||
end
|
||||
|
||||
desc 'Generates a changelog section for a release' do
|
||||
detail 'This feature was introduced in GitLab 13.9'
|
||||
end
|
||||
params do
|
||||
requires :version,
|
||||
type: String,
|
||||
regexp: Gitlab::Regex.unbounded_semver_regex,
|
||||
desc: 'The version of the release, using the semantic versioning format'
|
||||
|
||||
requires :from,
|
||||
type: String,
|
||||
desc: 'The first commit in the range of commits to use for the changelog'
|
||||
|
||||
requires :to,
|
||||
type: String,
|
||||
desc: 'The last commit in the range of commits to use for the changelog'
|
||||
|
||||
optional :date,
|
||||
type: DateTime,
|
||||
desc: 'The date and time of the release'
|
||||
|
||||
optional :branch,
|
||||
type: String,
|
||||
desc: 'The branch to commit the changelog changes to'
|
||||
|
||||
optional :trailer,
|
||||
type: String,
|
||||
desc: 'The Git trailer to use for determining if commits are to be included in the changelog',
|
||||
default: ::Repositories::ChangelogService::DEFAULT_TRAILER
|
||||
|
||||
optional :file,
|
||||
type: String,
|
||||
desc: 'The file to commit the changelog changes to',
|
||||
default: ::Repositories::ChangelogService::DEFAULT_FILE
|
||||
|
||||
optional :message,
|
||||
type: String,
|
||||
desc: 'The commit message to use when committing the changelog'
|
||||
end
|
||||
post ':id/repository/changelog' do
|
||||
not_found! unless Feature.enabled?(:changelog_api, user_project)
|
||||
|
||||
branch = params[:branch] || user_project.default_branch_or_master
|
||||
access = Gitlab::UserAccess.new(current_user, container: user_project)
|
||||
|
||||
unless access.can_push_to_branch?(branch)
|
||||
forbidden!("You are not allowed to commit a changelog on this branch")
|
||||
end
|
||||
|
||||
service = ::Repositories::ChangelogService.new(
|
||||
user_project,
|
||||
current_user,
|
||||
**declared_params(include_missing: false)
|
||||
)
|
||||
|
||||
service.execute
|
||||
status(200)
|
||||
rescue => ex
|
||||
render_api_error!("Failed to generate the changelog: #{ex.message}", 500)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,7 +26,13 @@ module Gitlab
|
|||
# scratch, otherwise we may end up throwing away changes. As such, all
|
||||
# the logic is contained within the retry block.
|
||||
Retriable.retriable(on: CommitError) do
|
||||
commit = @project.commit(branch)
|
||||
commit = Gitlab::Git::Commit.last_for_path(
|
||||
@project.repository,
|
||||
branch,
|
||||
file,
|
||||
literal_pathspec: true
|
||||
)
|
||||
|
||||
content = blob_content(file, commit)
|
||||
|
||||
# If the release has already been added (e.g. concurrently by another
|
||||
|
|
|
|||
|
|
@ -37,7 +37,10 @@ module Gitlab
|
|||
end
|
||||
|
||||
if (template = hash['template'])
|
||||
config.template = Template::Compiler.new.compile(template)
|
||||
# We use the full namespace here (and further down) as otherwise Rails
|
||||
# may use the wrong constant when autoloading is used.
|
||||
config.template =
|
||||
::Gitlab::Changelog::Template::Compiler.new.compile(template)
|
||||
end
|
||||
|
||||
if (categories = hash['categories'])
|
||||
|
|
@ -54,7 +57,8 @@ module Gitlab
|
|||
def initialize(project)
|
||||
@project = project
|
||||
@date_format = DEFAULT_DATE_FORMAT
|
||||
@template = Template::Compiler.new.compile(DEFAULT_TEMPLATE)
|
||||
@template =
|
||||
::Gitlab::Changelog::Template::Compiler.new.compile(DEFAULT_TEMPLATE)
|
||||
@categories = {}
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -98,19 +98,27 @@ module Gitlab
|
|||
ESCAPED_NEWLINE = /\\\n$/.freeze
|
||||
|
||||
# The start tag for ERB tags. These tags will be escaped, preventing
|
||||
# users FROM USING erb DIRECTLY.
|
||||
ERB_START_TAG = '<%'
|
||||
# users from using ERB directly.
|
||||
ERB_START_TAG = /<\\?\s*\\?\s*%/.freeze
|
||||
|
||||
def compile(template)
|
||||
transformed_lines = ['<% it = variables %>']
|
||||
|
||||
# ERB tags must be stripped here, otherwise a user may introduce ERB
|
||||
# tags by making clever use of whitespace. See
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/300224 for more
|
||||
# information.
|
||||
template = template.gsub(ERB_START_TAG, '<%%')
|
||||
|
||||
template.each_line { |line| transformed_lines << transform(line) }
|
||||
Template.new(transformed_lines.join)
|
||||
|
||||
# We use the full namespace here as otherwise Rails may use the wrong
|
||||
# constant when autoloading is used.
|
||||
::Gitlab::Changelog::Template::Template.new(transformed_lines.join)
|
||||
end
|
||||
|
||||
def transform(line)
|
||||
line.gsub!(ESCAPED_NEWLINE, '')
|
||||
line.gsub!(ERB_START_TAG, '<%%')
|
||||
|
||||
# This replacement ensures that "end" blocks on their own lines
|
||||
# don't add extra newlines. Using an ERB -%> tag sadly swallows too
|
||||
|
|
|
|||
|
|
@ -0,0 +1,219 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Database
|
||||
module MigrationHelpers
|
||||
module V2
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Renames a column without requiring downtime.
|
||||
#
|
||||
# Concurrent renames work by using database triggers to ensure both the
|
||||
# old and new column are in sync. However, this method will _not_ remove
|
||||
# the triggers or the old column automatically; this needs to be done
|
||||
# manually in a post-deployment migration. This can be done using the
|
||||
# method `cleanup_concurrent_column_rename`.
|
||||
#
|
||||
# table - The name of the database table containing the column.
|
||||
# old_column - The old column name.
|
||||
# new_column - The new column name.
|
||||
# type - The type of the new column. If no type is given the old column's
|
||||
# type is used.
|
||||
# batch_column_name - option is for tables without primary key, in this
|
||||
# case another unique integer column can be used. Example: :user_id
|
||||
def rename_column_concurrently(table, old_column, new_column, type: nil, batch_column_name: :id)
|
||||
setup_renamed_column(__callee__, table, old_column, new_column, type, batch_column_name)
|
||||
|
||||
with_lock_retries do
|
||||
install_bidirectional_triggers(table, old_column, new_column)
|
||||
end
|
||||
end
|
||||
|
||||
# Reverses operations performed by rename_column_concurrently.
|
||||
#
|
||||
# This method takes care of removing previously installed triggers as well
|
||||
# as removing the new column.
|
||||
#
|
||||
# table - The name of the database table.
|
||||
# old_column - The name of the old column.
|
||||
# new_column - The name of the new column.
|
||||
def undo_rename_column_concurrently(table, old_column, new_column)
|
||||
teardown_rename_mechanism(table, old_column, new_column, column_to_remove: new_column)
|
||||
end
|
||||
|
||||
# Cleans up a concurrent column name.
|
||||
#
|
||||
# This method takes care of removing previously installed triggers as well
|
||||
# as removing the old column.
|
||||
#
|
||||
# table - The name of the database table.
|
||||
# old_column - The name of the old column.
|
||||
# new_column - The name of the new column.
|
||||
def cleanup_concurrent_column_rename(table, old_column, new_column)
|
||||
teardown_rename_mechanism(table, old_column, new_column, column_to_remove: old_column)
|
||||
end
|
||||
|
||||
# Reverses the operations performed by cleanup_concurrent_column_rename.
|
||||
#
|
||||
# This method adds back the old_column removed
|
||||
# by cleanup_concurrent_column_rename.
|
||||
# It also adds back the triggers that are removed
|
||||
# by cleanup_concurrent_column_rename.
|
||||
#
|
||||
# table - The name of the database table containing the column.
|
||||
# old_column - The old column name.
|
||||
# new_column - The new column name.
|
||||
# type - The type of the old column. If no type is given the new column's
|
||||
# type is used.
|
||||
# batch_column_name - option is for tables without primary key, in this
|
||||
# case another unique integer column can be used. Example: :user_id
|
||||
#
|
||||
def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)
|
||||
setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)
|
||||
|
||||
with_lock_retries do
|
||||
install_bidirectional_triggers(table, old_column, new_column)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def setup_renamed_column(calling_operation, table, old_column, new_column, type, batch_column_name)
|
||||
if transaction_open?
|
||||
raise "#{calling_operation} can not be run inside a transaction"
|
||||
end
|
||||
|
||||
column = columns(table).find { |column| column.name == old_column.to_s }
|
||||
|
||||
unless column
|
||||
raise "Column #{old_column} does not exist on #{table}"
|
||||
end
|
||||
|
||||
if column.default
|
||||
raise "#{calling_operation} does not currently support columns with default values"
|
||||
end
|
||||
|
||||
unless column_exists?(table, batch_column_name)
|
||||
raise "Column #{batch_column_name} does not exist on #{table}"
|
||||
end
|
||||
|
||||
check_trigger_permissions!(table)
|
||||
|
||||
unless column_exists?(table, new_column)
|
||||
create_column_from(table, old_column, new_column, type: type, batch_column_name: batch_column_name)
|
||||
end
|
||||
end
|
||||
|
||||
def teardown_rename_mechanism(table, old_column, new_column, column_to_remove:)
|
||||
return unless column_exists?(table, column_to_remove)
|
||||
|
||||
with_lock_retries do
|
||||
check_trigger_permissions!(table)
|
||||
|
||||
remove_bidirectional_triggers(table, old_column, new_column)
|
||||
|
||||
remove_column(table, column_to_remove)
|
||||
end
|
||||
end
|
||||
|
||||
def install_bidirectional_triggers(table, old_column, new_column)
|
||||
insert_trigger_name, update_old_trigger_name, update_new_trigger_name =
|
||||
bidirectional_trigger_names(table, old_column, new_column)
|
||||
|
||||
quoted_table = quote_table_name(table)
|
||||
quoted_old = quote_column_name(old_column)
|
||||
quoted_new = quote_column_name(new_column)
|
||||
|
||||
create_insert_trigger(insert_trigger_name, quoted_table, quoted_old, quoted_new)
|
||||
create_update_trigger(update_old_trigger_name, quoted_table, quoted_new, quoted_old)
|
||||
create_update_trigger(update_new_trigger_name, quoted_table, quoted_old, quoted_new)
|
||||
end
|
||||
|
||||
def remove_bidirectional_triggers(table, old_column, new_column)
|
||||
insert_trigger_name, update_old_trigger_name, update_new_trigger_name =
|
||||
bidirectional_trigger_names(table, old_column, new_column)
|
||||
|
||||
quoted_table = quote_table_name(table)
|
||||
|
||||
drop_trigger(insert_trigger_name, quoted_table)
|
||||
drop_trigger(update_old_trigger_name, quoted_table)
|
||||
drop_trigger(update_new_trigger_name, quoted_table)
|
||||
end
|
||||
|
||||
def bidirectional_trigger_names(table, old_column, new_column)
|
||||
%w[insert update_old update_new].map do |operation|
|
||||
'trigger_' + Digest::SHA256.hexdigest("#{table}_#{old_column}_#{new_column}_#{operation}").first(12)
|
||||
end
|
||||
end
|
||||
|
||||
def function_name_for_trigger(trigger_name)
|
||||
"function_for_#{trigger_name}"
|
||||
end
|
||||
|
||||
def create_insert_trigger(trigger_name, quoted_table, quoted_old_column, quoted_new_column)
|
||||
function_name = function_name_for_trigger(trigger_name)
|
||||
|
||||
execute(<<~SQL)
|
||||
CREATE OR REPLACE FUNCTION #{function_name}()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW.#{quoted_old_column} IS NULL AND NEW.#{quoted_new_column} IS NOT NULL THEN
|
||||
NEW.#{quoted_old_column} = NEW.#{quoted_new_column};
|
||||
END IF;
|
||||
|
||||
IF NEW.#{quoted_new_column} IS NULL AND NEW.#{quoted_old_column} IS NOT NULL THEN
|
||||
NEW.#{quoted_new_column} = NEW.#{quoted_old_column};
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END
|
||||
$$;
|
||||
|
||||
DROP TRIGGER IF EXISTS #{trigger_name}
|
||||
ON #{quoted_table};
|
||||
|
||||
CREATE TRIGGER #{trigger_name}
|
||||
BEFORE INSERT ON #{quoted_table}
|
||||
FOR EACH ROW EXECUTE FUNCTION #{function_name}();
|
||||
SQL
|
||||
end
|
||||
|
||||
def create_update_trigger(trigger_name, quoted_table, quoted_source_column, quoted_target_column)
|
||||
function_name = function_name_for_trigger(trigger_name)
|
||||
|
||||
execute(<<~SQL)
|
||||
CREATE OR REPLACE FUNCTION #{function_name}()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
NEW.#{quoted_target_column} := NEW.#{quoted_source_column};
|
||||
RETURN NEW;
|
||||
END
|
||||
$$;
|
||||
|
||||
DROP TRIGGER IF EXISTS #{trigger_name}
|
||||
ON #{quoted_table};
|
||||
|
||||
CREATE TRIGGER #{trigger_name}
|
||||
BEFORE UPDATE OF #{quoted_source_column} ON #{quoted_table}
|
||||
FOR EACH ROW EXECUTE FUNCTION #{function_name}();
|
||||
SQL
|
||||
end
|
||||
|
||||
def drop_trigger(trigger_name, quoted_table)
|
||||
function_name = function_name_for_trigger(trigger_name)
|
||||
|
||||
execute(<<~SQL)
|
||||
DROP TRIGGER IF EXISTS #{trigger_name}
|
||||
ON #{quoted_table};
|
||||
|
||||
DROP FUNCTION IF EXISTS #{function_name};
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -39,9 +39,14 @@ module Gitlab
|
|||
def class_methods
|
||||
super
|
||||
|
||||
class_methods_module = const_get(:ClassMethods, false)
|
||||
|
||||
if instance_variable_defined?(:@_prepended_class_methods)
|
||||
const_get(:ClassMethods, false).prepend @_prepended_class_methods
|
||||
class_methods_module.prepend @_prepended_class_methods
|
||||
end
|
||||
|
||||
# Hack to resolve https://gitlab.com/gitlab-org/gitlab/-/issues/23932
|
||||
extend class_methods_module if ENV['STATIC_VERIFICATION']
|
||||
end
|
||||
|
||||
def prepended(base = nil, &block)
|
||||
|
|
|
|||
|
|
@ -153,7 +153,13 @@ module Gitlab
|
|||
def extended(mod = nil)
|
||||
super
|
||||
|
||||
queue_verification(mod.singleton_class) if mod
|
||||
# Hack to resolve https://gitlab.com/gitlab-org/gitlab/-/issues/23932
|
||||
is_not_concern_hack =
|
||||
(mod.is_a?(Class) || !name&.end_with?('::ClassMethods'))
|
||||
|
||||
if mod && is_not_concern_hack
|
||||
queue_verification(mod.singleton_class)
|
||||
end
|
||||
end
|
||||
|
||||
def queue_verification(base, verify: false)
|
||||
|
|
@ -174,7 +180,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def self.verify!
|
||||
extensions.values.each(&:verify!)
|
||||
extensions.each_value(&:verify!)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19884,9 +19884,6 @@ msgstr ""
|
|||
msgid "No start date"
|
||||
msgstr ""
|
||||
|
||||
msgid "No status"
|
||||
msgstr ""
|
||||
|
||||
msgid "No template"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20804,9 +20801,6 @@ msgstr ""
|
|||
msgid "PackageRegistry|Delete package"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Filter by name"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|For more information on Composer packages in GitLab, %{linkStart}see the documentation.%{linkEnd}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20900,9 +20894,6 @@ msgstr ""
|
|||
msgid "PackageRegistry|Source project located at %{link}"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|There are no %{packageType} packages yet"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|There are no other versions of this package."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20918,6 +20909,9 @@ msgstr ""
|
|||
msgid "PackageRegistry|To widen your search, change or remove the filters above."
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Unable to fetch package version information."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -33393,15 +33387,15 @@ msgstr ""
|
|||
msgid "Your GPG keys (%{count})"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your GitLab Ultimate trial will last 30 days after which point you can keep your free GitLab account forever. We just need some additional information to activate your trial."
|
||||
msgstr ""
|
||||
|
||||
msgid "Your GitLab account request has been approved!"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your GitLab group"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your Gitlab Ultimate trial will last 30 days after which point you can keep your free Gitlab account forever. We just need some additional information to activate your trial."
|
||||
msgstr ""
|
||||
|
||||
msgid "Your Groups"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -49,6 +49,20 @@ RSpec.describe ObjectStoreSettings do
|
|||
}
|
||||
end
|
||||
|
||||
shared_examples 'consolidated settings for objects accelerated by Workhorse' do
|
||||
it 'consolidates active object storage settings' do
|
||||
described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type|
|
||||
# Use to_h to avoid https://gitlab.com/gitlab-org/gitlab/-/issues/286873
|
||||
section = subject.try(object_type).to_h
|
||||
|
||||
next unless section.dig('object_store', 'enabled')
|
||||
|
||||
expect(section['object_store']['connection']).to eq(connection)
|
||||
expect(section['object_store']['consolidated_settings']).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'sets correct default values' do
|
||||
subject
|
||||
|
||||
|
|
@ -77,9 +91,7 @@ RSpec.describe ObjectStoreSettings do
|
|||
expect(settings.pages['object_store']['consolidated_settings']).to be true
|
||||
|
||||
expect(settings.external_diffs['enabled']).to be false
|
||||
expect(settings.external_diffs['object_store']['enabled']).to be false
|
||||
expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs')
|
||||
expect(settings.external_diffs['object_store']['consolidated_settings']).to be true
|
||||
expect(settings.external_diffs['object_store']).to be_nil
|
||||
end
|
||||
|
||||
it 'raises an error when a bucket is missing' do
|
||||
|
|
@ -95,29 +107,49 @@ RSpec.describe ObjectStoreSettings do
|
|||
expect(settings.pages['object_store']).to eq(nil)
|
||||
end
|
||||
|
||||
it 'allows pages to define its own connection' do
|
||||
pages_connection = { 'provider' => 'Google', 'google_application_default' => true }
|
||||
config['pages'] = {
|
||||
'enabled' => true,
|
||||
'object_store' => {
|
||||
context 'GitLab Pages' do
|
||||
let(:pages_connection) { { 'provider' => 'Google', 'google_application_default' => true } }
|
||||
|
||||
before do
|
||||
config['pages'] = {
|
||||
'enabled' => true,
|
||||
'connection' => pages_connection
|
||||
'object_store' => {
|
||||
'enabled' => true,
|
||||
'connection' => pages_connection
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect { subject }.not_to raise_error
|
||||
|
||||
described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type|
|
||||
section = settings.try(object_type)
|
||||
|
||||
next unless section
|
||||
|
||||
expect(section['object_store']['connection']).to eq(connection)
|
||||
expect(section['object_store']['consolidated_settings']).to be true
|
||||
end
|
||||
|
||||
expect(settings.pages['object_store']['connection']).to eq(pages_connection)
|
||||
expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
|
||||
it_behaves_like 'consolidated settings for objects accelerated by Workhorse'
|
||||
|
||||
it 'allows pages to define its own connection' do
|
||||
expect { subject }.not_to raise_error
|
||||
|
||||
expect(settings.pages['object_store']['connection']).to eq(pages_connection)
|
||||
expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object storage is selectively disabled for artifacts' do
|
||||
before do
|
||||
config['artifacts'] = {
|
||||
'enabled' => true,
|
||||
'object_store' => {
|
||||
'enabled' => false
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
it_behaves_like 'consolidated settings for objects accelerated by Workhorse'
|
||||
|
||||
it 'does not enable consolidated settings for artifacts' do
|
||||
subject
|
||||
|
||||
expect(settings.artifacts['enabled']).to be true
|
||||
expect(settings.artifacts['object_store']['remote_directory']).to be_nil
|
||||
expect(settings.artifacts['object_store']['enabled']).to be_falsey
|
||||
expect(settings.artifacts['object_store']['consolidated_settings']).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
context 'with legacy config' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe MergeRequests::OldestPerCommitFinder do
|
||||
describe '#execute' do
|
||||
it 'returns a Hash mapping commit SHAs to their oldest merge requests' do
|
||||
project = create(:project)
|
||||
mr1 = create(:merge_request, :merged, target_project: project)
|
||||
mr2 = create(:merge_request, :merged, target_project: project)
|
||||
mr1_diff = create(:merge_request_diff, merge_request: mr1)
|
||||
mr2_diff = create(:merge_request_diff, merge_request: mr2)
|
||||
sha1 = Digest::SHA1.hexdigest('foo')
|
||||
sha2 = Digest::SHA1.hexdigest('bar')
|
||||
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr1_diff, sha: sha1)
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr2_diff, sha: sha1)
|
||||
create(
|
||||
:merge_request_diff_commit,
|
||||
merge_request_diff: mr2_diff,
|
||||
sha: sha2,
|
||||
relative_order: 1
|
||||
)
|
||||
|
||||
commits = [double(:commit, id: sha1), double(:commit, id: sha2)]
|
||||
|
||||
expect(described_class.new(project).execute(commits)).to eq(
|
||||
sha1 => mr1,
|
||||
sha2 => mr2
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips merge requests that are not merged' do
|
||||
mr = create(:merge_request)
|
||||
mr_diff = create(:merge_request_diff, merge_request: mr)
|
||||
sha = Digest::SHA1.hexdigest('foo')
|
||||
|
||||
create(:merge_request_diff_commit, merge_request_diff: mr_diff, sha: sha)
|
||||
|
||||
commits = [double(:commit, id: sha)]
|
||||
|
||||
expect(described_class.new(mr.target_project).execute(commits))
|
||||
.to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -75,6 +75,8 @@ describe('Edit feature flag form', () => {
|
|||
});
|
||||
|
||||
const findAlert = () => wrapper.find(GlAlert);
|
||||
const findWarningGlAlert = () =>
|
||||
wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
|
||||
|
||||
it('should display the iid', () => {
|
||||
expect(wrapper.find('h3').text()).toContain('^5');
|
||||
|
|
@ -88,7 +90,7 @@ describe('Edit feature flag form', () => {
|
|||
expect(wrapper.find(GlToggle).props('value')).toBe(true);
|
||||
});
|
||||
|
||||
it('should not alert users that feature flags are changing soon', () => {
|
||||
it('should alert users the flag is read only', () => {
|
||||
expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
|
||||
});
|
||||
|
||||
|
|
@ -96,8 +98,9 @@ describe('Edit feature flag form', () => {
|
|||
it('should render the error', () => {
|
||||
store.dispatch('receiveUpdateFeatureFlagError', { message: ['The name is required'] });
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(wrapper.find('.alert-danger').exists()).toEqual(true);
|
||||
expect(wrapper.find('.alert-danger').text()).toContain('The name is required');
|
||||
const warningGlAlert = findWarningGlAlert();
|
||||
expect(warningGlAlert.at(1).exists()).toEqual(true);
|
||||
expect(warningGlAlert.at(1).text()).toContain('The name is required');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -41,6 +41,9 @@ describe('New feature flag form', () => {
|
|||
});
|
||||
};
|
||||
|
||||
const findWarningGlAlert = () =>
|
||||
wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
|
||||
|
||||
beforeEach(() => {
|
||||
factory();
|
||||
});
|
||||
|
|
@ -53,8 +56,9 @@ describe('New feature flag form', () => {
|
|||
it('should render the error', () => {
|
||||
store.dispatch('receiveCreateFeatureFlagError', { message: ['The name is required'] });
|
||||
return wrapper.vm.$nextTick(() => {
|
||||
expect(wrapper.find('.alert').exists()).toEqual(true);
|
||||
expect(wrapper.find('.alert').text()).toContain('The name is required');
|
||||
const warningGlAlert = findWarningGlAlert();
|
||||
expect(warningGlAlert.at(0).exists()).toBe(true);
|
||||
expect(warningGlAlert.at(0).text()).toContain('The name is required');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -81,10 +85,6 @@ describe('New feature flag form', () => {
|
|||
expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope);
|
||||
});
|
||||
|
||||
it('should not alert users that feature flags are changing soon', () => {
|
||||
expect(wrapper.find(GlAlert).exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('has an all users strategy by default', () => {
|
||||
const strategies = wrapper.find(Form).props('strategies');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`packages_filter renders 1`] = `
|
||||
<gl-search-box-by-click-stub
|
||||
clearable="true"
|
||||
clearbuttontitle="Clear"
|
||||
clearrecentsearchestext="Clear recent searches"
|
||||
closebuttontitle="Close"
|
||||
norecentsearchestext="You don't have any recent searches"
|
||||
placeholder="Filter by name"
|
||||
recentsearchesheader="Recent searches"
|
||||
value=""
|
||||
/>
|
||||
`;
|
||||
|
|
@ -6,517 +6,60 @@ exports[`packages_list_app renders 1`] = `
|
|||
packagehelpurl="foo"
|
||||
/>
|
||||
|
||||
<b-tabs-stub
|
||||
activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
|
||||
class="gl-tabs"
|
||||
contentclass=",gl-tab-content"
|
||||
navclass=",gl-tabs-nav"
|
||||
nofade="true"
|
||||
nonavstyle="true"
|
||||
tag="div"
|
||||
>
|
||||
<template>
|
||||
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="All"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="Composer"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no Composer packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="Conan"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no Conan packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="Generic"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no Generic packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="Maven"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no Maven packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="NPM"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no NPM packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="NuGet"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no NuGet packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
<b-tab-stub
|
||||
tag="div"
|
||||
title="PyPI"
|
||||
titlelinkclass="gl-tab-nav-item"
|
||||
>
|
||||
<template>
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no PyPI packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
</b-tab-stub>
|
||||
</template>
|
||||
<template>
|
||||
<package-search-stub />
|
||||
|
||||
<div>
|
||||
<section
|
||||
class="row empty-state text-center"
|
||||
>
|
||||
<div
|
||||
class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
|
||||
class="col-12"
|
||||
>
|
||||
<package-filter-stub
|
||||
class="gl-mr-2"
|
||||
/>
|
||||
|
||||
<package-sort-stub />
|
||||
<div
|
||||
class="svg-250 svg-content"
|
||||
>
|
||||
<img
|
||||
alt=""
|
||||
class="gl-max-w-full"
|
||||
src="helpSvg"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</b-tabs-stub>
|
||||
|
||||
<div
|
||||
class="col-12"
|
||||
>
|
||||
<div
|
||||
class="text-content gl-mx-auto gl-my-0 gl-p-5"
|
||||
>
|
||||
<h1
|
||||
class="h4"
|
||||
>
|
||||
There are no packages yet
|
||||
</h1>
|
||||
|
||||
<p>
|
||||
Learn how to
|
||||
<b-link-stub
|
||||
class="gl-link"
|
||||
event="click"
|
||||
href="helpUrl"
|
||||
routertag="a"
|
||||
target="_blank"
|
||||
>
|
||||
publish and share your packages
|
||||
</b-link-stub>
|
||||
with GitLab.
|
||||
</p>
|
||||
|
||||
<div>
|
||||
<!---->
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
import Vuex from 'vuex';
|
||||
import { GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { createLocalVue, shallowMount } from '@vue/test-utils';
|
||||
import PackagesFilter from '~/packages/list/components/packages_filter.vue';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(Vuex);
|
||||
|
||||
describe('packages_filter', () => {
|
||||
let wrapper;
|
||||
let store;
|
||||
|
||||
const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick);
|
||||
|
||||
const mountComponent = () => {
|
||||
store = new Vuex.Store();
|
||||
store.dispatch = jest.fn();
|
||||
|
||||
wrapper = shallowMount(PackagesFilter, {
|
||||
localVue,
|
||||
store,
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(mountComponent);
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
it('renders', () => {
|
||||
expect(wrapper.element).toMatchSnapshot();
|
||||
});
|
||||
|
||||
describe('emits events', () => {
|
||||
it('sets the filter value in the store on input', () => {
|
||||
const searchString = 'foo';
|
||||
findGlSearchBox().vm.$emit('input', searchString);
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setFilter', searchString);
|
||||
});
|
||||
|
||||
it('emits the filter event when search box is submitted', () => {
|
||||
findGlSearchBox().vm.$emit('submit');
|
||||
|
||||
expect(wrapper.emitted('filter')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
import Vuex from 'vuex';
|
||||
import { shallowMount, createLocalVue } from '@vue/test-utils';
|
||||
import { GlEmptyState, GlTab, GlTabs, GlSprintf, GlLink } from '@gitlab/ui';
|
||||
import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
|
||||
import * as commonUtils from '~/lib/utils/common_utils';
|
||||
import createFlash from '~/flash';
|
||||
import PackageListApp from '~/packages/list/components/packages_list_app.vue';
|
||||
import PackageSearch from '~/packages/list/components/package_search.vue';
|
||||
import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
|
||||
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants';
|
||||
|
||||
|
|
@ -26,9 +27,9 @@ describe('packages_list_app', () => {
|
|||
const emptyListHelpUrl = 'helpUrl';
|
||||
const findEmptyState = () => wrapper.find(GlEmptyState);
|
||||
const findListComponent = () => wrapper.find(PackageList);
|
||||
const findTabComponent = (index = 0) => wrapper.findAll(GlTab).at(index);
|
||||
const findPackageSearch = () => wrapper.find(PackageSearch);
|
||||
|
||||
const createStore = (filterQuery = '') => {
|
||||
const createStore = (filter = []) => {
|
||||
store = new Vuex.Store({
|
||||
state: {
|
||||
isLoading: false,
|
||||
|
|
@ -38,7 +39,7 @@ describe('packages_list_app', () => {
|
|||
emptyListHelpUrl,
|
||||
packageHelpUrl: 'foo',
|
||||
},
|
||||
filterQuery,
|
||||
filter,
|
||||
},
|
||||
});
|
||||
store.dispatch = jest.fn();
|
||||
|
|
@ -52,8 +53,6 @@ describe('packages_list_app', () => {
|
|||
GlEmptyState,
|
||||
GlLoadingIcon,
|
||||
PackageList,
|
||||
GlTab,
|
||||
GlTabs,
|
||||
GlSprintf,
|
||||
GlLink,
|
||||
},
|
||||
|
|
@ -122,27 +121,9 @@ describe('packages_list_app', () => {
|
|||
expect(store.dispatch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
describe('tab change', () => {
|
||||
it('calls requestPackagesList when all tab is clicked', () => {
|
||||
mountComponent();
|
||||
|
||||
findTabComponent().trigger('click');
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
|
||||
});
|
||||
|
||||
it('calls requestPackagesList when a package type tab is clicked', () => {
|
||||
mountComponent();
|
||||
|
||||
findTabComponent(1).trigger('click');
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
|
||||
});
|
||||
});
|
||||
|
||||
describe('filter without results', () => {
|
||||
beforeEach(() => {
|
||||
createStore('foo');
|
||||
createStore([{ type: 'something' }]);
|
||||
mountComponent();
|
||||
});
|
||||
|
||||
|
|
@ -154,12 +135,28 @@ describe('packages_list_app', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Package Search', () => {
|
||||
it('exists', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findPackageSearch().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it.each(['sort:changed', 'filter:changed'])('on %p fetches data from the store', (event) => {
|
||||
mountComponent();
|
||||
|
||||
findPackageSearch().vm.$emit(event);
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete alert handling', () => {
|
||||
const { location } = window.location;
|
||||
const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`;
|
||||
|
||||
beforeEach(() => {
|
||||
createStore('foo');
|
||||
createStore();
|
||||
jest.spyOn(commonUtils, 'historyReplaceState').mockImplementation(() => {});
|
||||
delete window.location;
|
||||
window.location = {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
import Vuex from 'vuex';
|
||||
import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui';
|
||||
import { shallowMount, createLocalVue } from '@vue/test-utils';
|
||||
import component from '~/packages/list/components/package_search.vue';
|
||||
import PackageTypeToken from '~/packages/list/components/tokens/package_type_token.vue';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(Vuex);
|
||||
|
||||
describe('Package Search', () => {
|
||||
let wrapper;
|
||||
let store;
|
||||
let sorting;
|
||||
let sortingItems;
|
||||
|
||||
const findPackageListSorting = () => wrapper.find(GlSorting);
|
||||
const findSortingItems = () => wrapper.findAll(GlSortingItem);
|
||||
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
|
||||
|
||||
const createStore = (isGroupPage) => {
|
||||
const state = {
|
||||
config: {
|
||||
isGroupPage,
|
||||
},
|
||||
sorting: {
|
||||
orderBy: 'version',
|
||||
sort: 'desc',
|
||||
},
|
||||
filter: [],
|
||||
};
|
||||
store = new Vuex.Store({
|
||||
state,
|
||||
});
|
||||
store.dispatch = jest.fn();
|
||||
};
|
||||
|
||||
const mountComponent = (isGroupPage = false) => {
|
||||
createStore(isGroupPage);
|
||||
|
||||
wrapper = shallowMount(component, {
|
||||
localVue,
|
||||
store,
|
||||
stubs: {
|
||||
GlSortingItem,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('searching', () => {
|
||||
it('has a filtered-search component', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findFilteredSearch().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('binds the correct props to filtered-search', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findFilteredSearch().props()).toMatchObject({
|
||||
value: [],
|
||||
placeholder: 'Filter results',
|
||||
availableTokens: wrapper.vm.tokens,
|
||||
});
|
||||
});
|
||||
|
||||
it('updates vuex when value changes', () => {
|
||||
mountComponent();
|
||||
|
||||
findFilteredSearch().vm.$emit('input', ['foo']);
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setFilter', ['foo']);
|
||||
});
|
||||
|
||||
it('emits filter:changed on submit event', () => {
|
||||
mountComponent();
|
||||
|
||||
findFilteredSearch().vm.$emit('submit');
|
||||
expect(wrapper.emitted('filter:changed')).toEqual([[]]);
|
||||
});
|
||||
|
||||
it('emits filter:changed on clear event and reset vuex', () => {
|
||||
mountComponent();
|
||||
|
||||
findFilteredSearch().vm.$emit('clear');
|
||||
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setFilter', []);
|
||||
expect(wrapper.emitted('filter:changed')).toEqual([[]]);
|
||||
});
|
||||
|
||||
it('has a PackageTypeToken token', () => {
|
||||
mountComponent();
|
||||
|
||||
expect(findFilteredSearch().props('availableTokens')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sorting', () => {
|
||||
describe('when is in projects', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent();
|
||||
sorting = findPackageListSorting();
|
||||
sortingItems = findSortingItems();
|
||||
});
|
||||
|
||||
it('has all the sortable items', () => {
|
||||
expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
|
||||
});
|
||||
|
||||
it('on sort change set sorting in vuex and emit event', () => {
|
||||
sorting.vm.$emit('sortDirectionChange');
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' });
|
||||
expect(wrapper.emitted('sort:changed')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('on sort item click set sorting and emit event', () => {
|
||||
const item = sortingItems.at(0);
|
||||
const { orderBy } = wrapper.vm.sortableFields[0];
|
||||
item.vm.$emit('click');
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy });
|
||||
expect(wrapper.emitted('sort:changed')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when is in group', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent(true);
|
||||
sorting = findPackageListSorting();
|
||||
sortingItems = findSortingItems();
|
||||
});
|
||||
|
||||
it('has all the sortable items', () => {
|
||||
expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import Vuex from 'vuex';
|
||||
import { GlSorting, GlSortingItem } from '@gitlab/ui';
|
||||
import { mount, createLocalVue } from '@vue/test-utils';
|
||||
import stubChildren from 'helpers/stub_children';
|
||||
import PackagesSort from '~/packages/list/components/packages_sort.vue';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
localVue.use(Vuex);
|
||||
|
||||
describe('packages_sort', () => {
|
||||
let wrapper;
|
||||
let store;
|
||||
let sorting;
|
||||
let sortingItems;
|
||||
|
||||
const findPackageListSorting = () => wrapper.find(GlSorting);
|
||||
const findSortingItems = () => wrapper.findAll(GlSortingItem);
|
||||
|
||||
const createStore = (isGroupPage) => {
|
||||
const state = {
|
||||
config: {
|
||||
isGroupPage,
|
||||
},
|
||||
sorting: {
|
||||
orderBy: 'version',
|
||||
sort: 'desc',
|
||||
},
|
||||
};
|
||||
store = new Vuex.Store({
|
||||
state,
|
||||
});
|
||||
store.dispatch = jest.fn();
|
||||
};
|
||||
|
||||
const mountComponent = (isGroupPage = false) => {
|
||||
createStore(isGroupPage);
|
||||
|
||||
wrapper = mount(PackagesSort, {
|
||||
localVue,
|
||||
store,
|
||||
stubs: {
|
||||
...stubChildren(PackagesSort),
|
||||
GlSortingItem,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('when is in projects', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent();
|
||||
sorting = findPackageListSorting();
|
||||
sortingItems = findSortingItems();
|
||||
});
|
||||
|
||||
it('has all the sortable items', () => {
|
||||
expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
|
||||
});
|
||||
|
||||
it('on sort change set sorting in vuex and emit event', () => {
|
||||
sorting.vm.$emit('sortDirectionChange');
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' });
|
||||
expect(wrapper.emitted('sort:changed')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('on sort item click set sorting and emit event', () => {
|
||||
const item = sortingItems.at(0);
|
||||
const { orderBy } = wrapper.vm.sortableFields[0];
|
||||
item.vm.$emit('click');
|
||||
expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy });
|
||||
expect(wrapper.emitted('sort:changed')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when is in group', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent(true);
|
||||
sorting = findPackageListSorting();
|
||||
sortingItems = findSortingItems();
|
||||
});
|
||||
|
||||
it('has all the sortable items', () => {
|
||||
expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
|
||||
import component from '~/packages/list/components/tokens/package_type_token.vue';
|
||||
import { PACKAGE_TYPES } from '~/packages/list/constants';
|
||||
|
||||
describe('packages_filter', () => {
|
||||
let wrapper;
|
||||
|
||||
const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
|
||||
const findFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
|
||||
|
||||
const mountComponent = ({ attrs, listeners } = {}) => {
|
||||
wrapper = shallowMount(component, {
|
||||
attrs,
|
||||
listeners,
|
||||
});
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
it('it binds all of his attrs to filtered search token', () => {
|
||||
mountComponent({ attrs: { foo: 'bar' } });
|
||||
|
||||
expect(findFilteredSearchToken().attributes('foo')).toBe('bar');
|
||||
});
|
||||
|
||||
it('it binds all of his events to filtered search token', () => {
|
||||
const clickListener = jest.fn();
|
||||
mountComponent({ listeners: { click: clickListener } });
|
||||
|
||||
findFilteredSearchToken().vm.$emit('click');
|
||||
|
||||
expect(clickListener).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each(PACKAGE_TYPES.map((p, index) => [p, index]))(
|
||||
'displays a suggestion for %p',
|
||||
(packageType, index) => {
|
||||
mountComponent();
|
||||
const item = findFilteredSearchSuggestions().at(index);
|
||||
expect(item.text()).toBe(packageType.title);
|
||||
expect(item.props('value')).toBe(packageType.type);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
|
@ -30,11 +30,13 @@ describe('Actions Package list store', () => {
|
|||
sort: 'asc',
|
||||
orderBy: 'version',
|
||||
};
|
||||
|
||||
const filter = [];
|
||||
it('should fetch the project packages list when isGroupPage is false', (done) => {
|
||||
testAction(
|
||||
actions.requestPackagesList,
|
||||
undefined,
|
||||
{ config: { isGroupPage: false, resourceId: 1 }, sorting },
|
||||
{ config: { isGroupPage: false, resourceId: 1 }, sorting, filter },
|
||||
[],
|
||||
[
|
||||
{ type: 'setLoading', payload: true },
|
||||
|
|
@ -54,7 +56,7 @@ describe('Actions Package list store', () => {
|
|||
testAction(
|
||||
actions.requestPackagesList,
|
||||
undefined,
|
||||
{ config: { isGroupPage: true, resourceId: 2 }, sorting },
|
||||
{ config: { isGroupPage: true, resourceId: 2 }, sorting, filter },
|
||||
[],
|
||||
[
|
||||
{ type: 'setLoading', payload: true },
|
||||
|
|
@ -70,7 +72,7 @@ describe('Actions Package list store', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should fetch packages of a certain type when selectedType is present', (done) => {
|
||||
it('should fetch packages of a certain type when a filter with a type is present', (done) => {
|
||||
const packageType = 'maven';
|
||||
|
||||
testAction(
|
||||
|
|
@ -79,7 +81,7 @@ describe('Actions Package list store', () => {
|
|||
{
|
||||
config: { isGroupPage: false, resourceId: 1 },
|
||||
sorting,
|
||||
selectedType: { type: packageType },
|
||||
filter: [{ type: 'type', value: { data: 'maven' } }],
|
||||
},
|
||||
[],
|
||||
[
|
||||
|
|
@ -107,7 +109,7 @@ describe('Actions Package list store', () => {
|
|||
testAction(
|
||||
actions.requestPackagesList,
|
||||
undefined,
|
||||
{ config: { isGroupPage: false, resourceId: 2 }, sorting },
|
||||
{ config: { isGroupPage: false, resourceId: 2 }, sorting, filter },
|
||||
[],
|
||||
[
|
||||
{ type: 'setLoading', payload: true },
|
||||
|
|
|
|||
|
|
@ -78,17 +78,10 @@ describe('Mutations Registry Store', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('SET_SELECTED_TYPE', () => {
|
||||
it('should set the selected type', () => {
|
||||
mutations[types.SET_SELECTED_TYPE](mockState, { type: 'maven' });
|
||||
expect(mockState.selectedType).toEqual({ type: 'maven' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('SET_FILTER', () => {
|
||||
it('should set the filter query', () => {
|
||||
mutations[types.SET_FILTER](mockState, 'foo');
|
||||
expect(mockState.filterQuery).toEqual('foo');
|
||||
expect(mockState.filter).toEqual('foo');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -88,3 +88,53 @@ export const issueDiff = [
|
|||
urlPath: 'headPath/lib/six.rb#L6',
|
||||
},
|
||||
];
|
||||
|
||||
export const reportIssues = {
|
||||
status: 'failed',
|
||||
new_errors: [
|
||||
{
|
||||
description:
|
||||
'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
|
||||
severity: 'minor',
|
||||
file_path: 'codequality.rb',
|
||||
line: 5,
|
||||
},
|
||||
],
|
||||
resolved_errors: [
|
||||
{
|
||||
description: 'Insecure Dependency',
|
||||
severity: 'major',
|
||||
file_path: 'lib/six.rb',
|
||||
line: 22,
|
||||
},
|
||||
],
|
||||
existing_errors: [],
|
||||
summary: { total: 3, resolved: 0, errored: 3 },
|
||||
};
|
||||
|
||||
export const parsedReportIssues = {
|
||||
newIssues: [
|
||||
{
|
||||
description:
|
||||
'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
|
||||
file_path: 'codequality.rb',
|
||||
line: 5,
|
||||
name:
|
||||
'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.',
|
||||
path: 'codequality.rb',
|
||||
severity: 'minor',
|
||||
urlPath: 'null/codequality.rb#L5',
|
||||
},
|
||||
],
|
||||
resolvedIssues: [
|
||||
{
|
||||
description: 'Insecure Dependency',
|
||||
file_path: 'lib/six.rb',
|
||||
line: 22,
|
||||
name: 'Insecure Dependency',
|
||||
path: 'lib/six.rb',
|
||||
severity: 'major',
|
||||
urlPath: 'null/lib/six.rb#L22',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,7 +5,14 @@ import axios from '~/lib/utils/axios_utils';
|
|||
import * as actions from '~/reports/codequality_report/store/actions';
|
||||
import * as types from '~/reports/codequality_report/store/mutation_types';
|
||||
import createStore from '~/reports/codequality_report/store';
|
||||
import { headIssues, baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../mock_data';
|
||||
import {
|
||||
headIssues,
|
||||
baseIssues,
|
||||
mockParsedHeadIssues,
|
||||
mockParsedBaseIssues,
|
||||
reportIssues,
|
||||
parsedReportIssues,
|
||||
} from '../mock_data';
|
||||
|
||||
// mock codequality comparison worker
|
||||
jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () =>
|
||||
|
|
@ -39,6 +46,7 @@ describe('Codequality Reports actions', () => {
|
|||
headPath: 'headPath',
|
||||
baseBlobPath: 'baseBlobPath',
|
||||
headBlobPath: 'headBlobPath',
|
||||
reportsPath: 'reportsPath',
|
||||
helpPath: 'codequalityHelpPath',
|
||||
};
|
||||
|
||||
|
|
@ -55,68 +63,119 @@ describe('Codequality Reports actions', () => {
|
|||
|
||||
describe('fetchReports', () => {
|
||||
let mock;
|
||||
let diffFeatureFlagEnabled;
|
||||
|
||||
beforeEach(() => {
|
||||
localState.headPath = `${TEST_HOST}/head.json`;
|
||||
localState.basePath = `${TEST_HOST}/base.json`;
|
||||
mock = new MockAdapter(axios);
|
||||
});
|
||||
describe('with codequalityMrDiff feature flag enabled', () => {
|
||||
beforeEach(() => {
|
||||
diffFeatureFlagEnabled = true;
|
||||
localState.reportsPath = `${TEST_HOST}/codequality_reports.json`;
|
||||
mock = new MockAdapter(axios);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore();
|
||||
});
|
||||
afterEach(() => {
|
||||
mock.restore();
|
||||
});
|
||||
|
||||
describe('on success', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
|
||||
mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
|
||||
describe('on success', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues);
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
null,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[
|
||||
{
|
||||
payload: {
|
||||
newIssues: [mockParsedHeadIssues[0]],
|
||||
resolvedIssues: [mockParsedBaseIssues[0]],
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
diffFeatureFlagEnabled,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[
|
||||
{
|
||||
payload: parsedReportIssues,
|
||||
type: 'receiveReportsSuccess',
|
||||
},
|
||||
type: 'receiveReportsSuccess',
|
||||
},
|
||||
],
|
||||
done,
|
||||
);
|
||||
],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('on error', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500);
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
diffFeatureFlagEnabled,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[{ type: 'receiveReportsError', payload: expect.any(Error) }],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('on error', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/head.json`).reply(500);
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
null,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[{ type: 'receiveReportsError' }],
|
||||
done,
|
||||
);
|
||||
describe('with codequalityMrDiff feature flag disabled', () => {
|
||||
beforeEach(() => {
|
||||
diffFeatureFlagEnabled = false;
|
||||
localState.headPath = `${TEST_HOST}/head.json`;
|
||||
localState.basePath = `${TEST_HOST}/base.json`;
|
||||
mock = new MockAdapter(axios);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with no base path', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
|
||||
localState.basePath = null;
|
||||
afterEach(() => {
|
||||
mock.restore();
|
||||
});
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
null,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[{ type: 'receiveReportsError' }],
|
||||
done,
|
||||
);
|
||||
describe('on success', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
|
||||
mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
diffFeatureFlagEnabled,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[
|
||||
{
|
||||
payload: {
|
||||
newIssues: [mockParsedHeadIssues[0]],
|
||||
resolvedIssues: [mockParsedBaseIssues[0]],
|
||||
},
|
||||
type: 'receiveReportsSuccess',
|
||||
},
|
||||
],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('on error', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
|
||||
mock.onGet(`${TEST_HOST}/head.json`).reply(500);
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
diffFeatureFlagEnabled,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[{ type: 'receiveReportsError', payload: expect.any(Error) }],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with no base path', () => {
|
||||
it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
|
||||
localState.basePath = null;
|
||||
|
||||
testAction(
|
||||
actions.fetchReports,
|
||||
diffFeatureFlagEnabled,
|
||||
localState,
|
||||
[{ type: types.REQUEST_REPORTS }],
|
||||
[{ type: 'receiveReportsError' }],
|
||||
done,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -142,7 +201,7 @@ describe('Codequality Reports actions', () => {
|
|||
actions.receiveReportsError,
|
||||
null,
|
||||
localState,
|
||||
[{ type: types.RECEIVE_REPORTS_ERROR }],
|
||||
[{ type: types.RECEIVE_REPORTS_ERROR, payload: null }],
|
||||
[],
|
||||
done,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -55,6 +55,12 @@ describe('Codequality Reports mutations', () => {
|
|||
expect(localState.hasError).toEqual(false);
|
||||
});
|
||||
|
||||
it('clears statusReason', () => {
|
||||
mutations.RECEIVE_REPORTS_SUCCESS(localState, {});
|
||||
|
||||
expect(localState.statusReason).toEqual('');
|
||||
});
|
||||
|
||||
it('sets newIssues and resolvedIssues from response data', () => {
|
||||
const data = { newIssues: [{ id: 1 }], resolvedIssues: [{ id: 2 }] };
|
||||
mutations.RECEIVE_REPORTS_SUCCESS(localState, data);
|
||||
|
|
@ -76,5 +82,13 @@ describe('Codequality Reports mutations', () => {
|
|||
|
||||
expect(localState.hasError).toEqual(true);
|
||||
});
|
||||
|
||||
it('sets statusReason to string from error response data', () => {
|
||||
const data = { status_reason: 'This merge request does not have codequality reports' };
|
||||
const error = { response: { data } };
|
||||
mutations.RECEIVE_REPORTS_ERROR(localState, error);
|
||||
|
||||
expect(localState.statusReason).toEqual(data.status_reason);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,7 +2,13 @@ import {
|
|||
parseCodeclimateMetrics,
|
||||
doCodeClimateComparison,
|
||||
} from '~/reports/codequality_report/store/utils/codequality_comparison';
|
||||
import { baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../../mock_data';
|
||||
import {
|
||||
baseIssues,
|
||||
mockParsedHeadIssues,
|
||||
mockParsedBaseIssues,
|
||||
reportIssues,
|
||||
parsedReportIssues,
|
||||
} from '../../mock_data';
|
||||
|
||||
jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () => {
|
||||
let mockPostMessageCallback;
|
||||
|
|
@ -34,7 +40,7 @@ describe('Codequality report store utils', () => {
|
|||
let result;
|
||||
|
||||
describe('parseCodeclimateMetrics', () => {
|
||||
it('should parse the received issues', () => {
|
||||
it('should parse the issues from codeclimate artifacts', () => {
|
||||
[result] = parseCodeclimateMetrics(baseIssues, 'path');
|
||||
|
||||
expect(result.name).toEqual(baseIssues[0].check_name);
|
||||
|
|
@ -42,6 +48,14 @@ describe('Codequality report store utils', () => {
|
|||
expect(result.line).toEqual(baseIssues[0].location.lines.begin);
|
||||
});
|
||||
|
||||
it('should parse the issues from backend codequality diff', () => {
|
||||
[result] = parseCodeclimateMetrics(reportIssues.new_errors, 'path');
|
||||
|
||||
expect(result.name).toEqual(parsedReportIssues.newIssues[0].name);
|
||||
expect(result.path).toEqual(parsedReportIssues.newIssues[0].path);
|
||||
expect(result.line).toEqual(parsedReportIssues.newIssues[0].line);
|
||||
});
|
||||
|
||||
describe('when an issue has no location or path', () => {
|
||||
const issue = { description: 'Insecure Dependency' };
|
||||
|
||||
|
|
|
|||
|
|
@ -135,6 +135,15 @@ RSpec.describe AvatarsHelper do
|
|||
helper.avatar_icon_for_user(nil, 20, 2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a blocked user' do
|
||||
let(:user) { create(:user, :blocked) }
|
||||
|
||||
it 'returns the default avatar' do
|
||||
expect(helper.avatar_icon_for_user(user).to_s)
|
||||
.to eq(helper.default_avatar)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#gravatar_icon' do
|
||||
|
|
|
|||
|
|
@ -86,5 +86,43 @@ RSpec.describe Gitlab::Changelog::Committer do
|
|||
end.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context "when the changelog changes before saving the changes" do
|
||||
it 'raises a CommitError' do
|
||||
release1 = Gitlab::Changelog::Release
|
||||
.new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config)
|
||||
|
||||
release2 = Gitlab::Changelog::Release
|
||||
.new(version: '2.0.0', date: Time.utc(2020, 1, 1), config: config)
|
||||
|
||||
# This creates the initial commit we'll later use to see if the
|
||||
# changelog changed before saving our changes.
|
||||
committer.commit(
|
||||
release: release1,
|
||||
file: 'CHANGELOG.md',
|
||||
branch: 'master',
|
||||
message: 'Initial commit'
|
||||
)
|
||||
|
||||
allow(Gitlab::Git::Commit)
|
||||
.to receive(:last_for_path)
|
||||
.with(
|
||||
project.repository,
|
||||
'master',
|
||||
'CHANGELOG.md',
|
||||
literal_pathspec: true
|
||||
)
|
||||
.and_return(double(:commit, sha: 'foo'))
|
||||
|
||||
expect do
|
||||
committer.commit(
|
||||
release: release2,
|
||||
file: 'CHANGELOG.md',
|
||||
branch: 'master',
|
||||
message: 'Test commit'
|
||||
)
|
||||
end.to raise_error(described_class::CommitError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -125,5 +125,12 @@ RSpec.describe Gitlab::Changelog::Template::Compiler do
|
|||
|
||||
expect(compile(input)).to eq(input)
|
||||
end
|
||||
|
||||
it 'ignores malicious code that makes use of whitespace' do
|
||||
input = "x<\\\n%::Kernel.system(\"id\")%>"
|
||||
|
||||
expect(Kernel).not_to receive(:system).with('id')
|
||||
expect(compile(input)).to eq('x<%::Kernel.system("id")%>')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,221 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
|
||||
include Database::TriggerHelpers
|
||||
|
||||
let(:migration) do
|
||||
ActiveRecord::Migration.new.extend(described_class)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(migration).to receive(:puts)
|
||||
end
|
||||
|
||||
shared_examples_for 'Setting up to rename a column' do
|
||||
let(:model) { Class.new(ActiveRecord::Base) }
|
||||
|
||||
before do
|
||||
model.table_name = :test_table
|
||||
end
|
||||
|
||||
context 'when called inside a transaction block' do
|
||||
before do
|
||||
allow(migration).to receive(:transaction_open?).and_return(true)
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
end.to raise_error("#{operation} can not be run inside a transaction")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the existing column has a default value' do
|
||||
before do
|
||||
migration.change_column_default :test_table, existing_column, 'default value'
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
end.to raise_error("#{operation} does not currently support columns with default values")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing a batch column' do
|
||||
context 'when the batch column does not exist' do
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :missing)
|
||||
end.to raise_error('Column missing does not exist on test_table')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the batch column does exist' do
|
||||
it 'passes it when creating the column' do
|
||||
expect(migration).to receive(:create_column_from)
|
||||
.with(:test_table, existing_column, added_column, type: nil, batch_column_name: :status)
|
||||
.and_call_original
|
||||
|
||||
migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :status)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'creates the renamed column, syncing existing data' do
|
||||
existing_record_1 = model.create!(status: 0, existing_column => 'existing')
|
||||
existing_record_2 = model.create!(status: 0, existing_column => nil)
|
||||
|
||||
migration.send(operation, :test_table, :original, :renamed)
|
||||
model.reset_column_information
|
||||
|
||||
expect(migration.column_exists?(:test_table, added_column)).to eq(true)
|
||||
|
||||
expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing')
|
||||
expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil)
|
||||
end
|
||||
|
||||
it 'installs triggers to sync new data' do
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
model.reset_column_information
|
||||
|
||||
new_record_1 = model.create!(status: 1, original: 'first')
|
||||
new_record_2 = model.create!(status: 1, renamed: 'second')
|
||||
|
||||
expect(new_record_1.reload).to have_attributes(status: 1, original: 'first', renamed: 'first')
|
||||
expect(new_record_2.reload).to have_attributes(status: 1, original: 'second', renamed: 'second')
|
||||
|
||||
new_record_1.update!(original: 'updated')
|
||||
new_record_2.update!(renamed: nil)
|
||||
|
||||
expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated')
|
||||
expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#rename_column_concurrently' do
|
||||
before do
|
||||
allow(migration).to receive(:transaction_open?).and_return(false)
|
||||
|
||||
migration.create_table :test_table do |t|
|
||||
t.integer :status, null: false
|
||||
t.text :original
|
||||
t.text :other_column
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'Setting up to rename a column' do
|
||||
let(:operation) { :rename_column_concurrently }
|
||||
let(:existing_column) { :original }
|
||||
let(:added_column) { :renamed }
|
||||
end
|
||||
|
||||
context 'when the column to rename does not exist' do
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
migration.rename_column_concurrently :test_table, :missing_column, :renamed
|
||||
end.to raise_error('Column missing_column does not exist on test_table')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#undo_cleanup_concurrent_column_rename' do
|
||||
before do
|
||||
allow(migration).to receive(:transaction_open?).and_return(false)
|
||||
|
||||
migration.create_table :test_table do |t|
|
||||
t.integer :status, null: false
|
||||
t.text :other_column
|
||||
t.text :renamed
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'Setting up to rename a column' do
|
||||
let(:operation) { :undo_cleanup_concurrent_column_rename }
|
||||
let(:existing_column) { :renamed }
|
||||
let(:added_column) { :original }
|
||||
end
|
||||
|
||||
context 'when the renamed column does not exist' do
|
||||
it 'raises an error' do
|
||||
expect do
|
||||
migration.undo_cleanup_concurrent_column_rename :test_table, :original, :missing_column
|
||||
end.to raise_error('Column missing_column does not exist on test_table')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples_for 'Cleaning up from renaming a column' do
|
||||
let(:connection) { migration.connection }
|
||||
|
||||
before do
|
||||
allow(migration).to receive(:transaction_open?).and_return(false)
|
||||
|
||||
migration.create_table :test_table do |t|
|
||||
t.integer :status, null: false
|
||||
t.text :original
|
||||
t.text :other_column
|
||||
end
|
||||
|
||||
migration.rename_column_concurrently :test_table, :original, :renamed
|
||||
end
|
||||
|
||||
context 'when the helper is called repeatedly' do
|
||||
before do
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
end
|
||||
|
||||
it 'does not make repeated attempts to cleanup' do
|
||||
expect(migration).not_to receive(:remove_column)
|
||||
|
||||
expect do
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
end.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the renamed column exists' do
|
||||
let(:triggers) do
|
||||
[
|
||||
['trigger_7cc71f92fd63', 'function_for_trigger_7cc71f92fd63', before: 'insert'],
|
||||
['trigger_f1a1f619636a', 'function_for_trigger_f1a1f619636a', before: 'update'],
|
||||
['trigger_769a49938884', 'function_for_trigger_769a49938884', before: 'update']
|
||||
]
|
||||
end
|
||||
|
||||
it 'removes the sync triggers and renamed columns' do
|
||||
triggers.each do |(trigger_name, function_name, event)|
|
||||
expect_function_to_exist(function_name)
|
||||
expect_valid_function_trigger(:test_table, trigger_name, function_name, event)
|
||||
end
|
||||
|
||||
expect(migration.column_exists?(:test_table, added_column)).to eq(true)
|
||||
|
||||
migration.public_send(operation, :test_table, :original, :renamed)
|
||||
|
||||
expect(migration.column_exists?(:test_table, added_column)).to eq(false)
|
||||
|
||||
triggers.each do |(trigger_name, function_name, _)|
|
||||
expect_trigger_not_to_exist(:test_table, trigger_name)
|
||||
expect_function_not_to_exist(function_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#undo_rename_column_concurrently' do
|
||||
it_behaves_like 'Cleaning up from renaming a column' do
|
||||
let(:operation) { :undo_rename_column_concurrently }
|
||||
let(:added_column) { :renamed }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#cleanup_concurrent_column_rename' do
|
||||
it_behaves_like 'Cleaning up from renaming a column' do
|
||||
let(:operation) { :cleanup_concurrent_column_rename }
|
||||
let(:added_column) { :original }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -231,4 +231,22 @@ RSpec.describe Gitlab::Patch::Prependable do
|
|||
.to raise_error(described_class::MultiplePrependedBlocks)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'the extra hack for override verification' do
|
||||
context 'when ENV["STATIC_VERIFICATION"] is not defined' do
|
||||
it 'does not extend ClassMethods onto the defining module' do
|
||||
expect(ee).not_to respond_to(:class_name)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when ENV["STATIC_VERIFICATION"] is defined' do
|
||||
before do
|
||||
stub_env('STATIC_VERIFICATION', 'true')
|
||||
end
|
||||
|
||||
it 'does extend ClassMethods onto the defining module' do
|
||||
expect(ee).to respond_to(:class_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@
|
|||
|
||||
require 'fast_spec_helper'
|
||||
|
||||
# Patching ActiveSupport::Concern
|
||||
require_relative '../../../../config/initializers/0_as_concern'
|
||||
|
||||
RSpec.describe Gitlab::Utils::Override do
|
||||
let(:base) do
|
||||
Struct.new(:good) do
|
||||
|
|
@ -164,6 +167,70 @@ RSpec.describe Gitlab::Utils::Override do
|
|||
|
||||
it_behaves_like 'checking as intended, nothing was overridden'
|
||||
end
|
||||
|
||||
context 'when ActiveSupport::Concern and class_methods are used' do
|
||||
# We need to give module names before using Override
|
||||
let(:base) { stub_const('Base', Module.new) }
|
||||
let(:extension) { stub_const('Extension', Module.new) }
|
||||
|
||||
def define_base(method_name:)
|
||||
base.module_eval do
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
define_method(method_name) do
|
||||
:f
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def define_extension(method_name:)
|
||||
extension.module_eval do
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
extend Gitlab::Utils::Override
|
||||
|
||||
override method_name
|
||||
define_method(method_name) do
|
||||
:g
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it is defining a overriding method' do
|
||||
before do
|
||||
define_base(method_name: :f)
|
||||
define_extension(method_name: :f)
|
||||
|
||||
base.prepend(extension)
|
||||
end
|
||||
|
||||
it 'verifies' do
|
||||
expect(base.f).to eq(:g)
|
||||
|
||||
described_class.verify!
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it is not defining a overriding method' do
|
||||
before do
|
||||
define_base(method_name: :f)
|
||||
define_extension(method_name: :g)
|
||||
|
||||
base.prepend(extension)
|
||||
end
|
||||
|
||||
it 'raises NotImplementedError' do
|
||||
expect(base.f).to eq(:f)
|
||||
|
||||
expect { described_class.verify! }
|
||||
.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when STATIC_VERIFICATION is not set' do
|
||||
|
|
|
|||
|
|
@ -610,4 +610,102 @@ RSpec.describe API::Repositories do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST /projects/:id/repository/changelog' do
|
||||
context 'when the changelog_api feature flag is enabled' do
|
||||
it 'generates the changelog for a version' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
user,
|
||||
version: '1.0.0',
|
||||
from: 'foo',
|
||||
to: 'bar',
|
||||
date: DateTime.new(2020, 1, 1),
|
||||
branch: 'kittens',
|
||||
trailer: 'Foo',
|
||||
file: 'FOO.md',
|
||||
message: 'Commit message'
|
||||
)
|
||||
.and_return(spy)
|
||||
|
||||
allow(spy).to receive(:execute)
|
||||
|
||||
post(
|
||||
api("/projects/#{project.id}/repository/changelog", user),
|
||||
params: {
|
||||
version: '1.0.0',
|
||||
from: 'foo',
|
||||
to: 'bar',
|
||||
date: '2020-01-01',
|
||||
branch: 'kittens',
|
||||
trailer: 'Foo',
|
||||
file: 'FOO.md',
|
||||
message: 'Commit message'
|
||||
}
|
||||
)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
||||
it 'produces an error when generating the changelog fails' do
|
||||
spy = instance_spy(Repositories::ChangelogService)
|
||||
|
||||
allow(Repositories::ChangelogService)
|
||||
.to receive(:new)
|
||||
.with(
|
||||
project,
|
||||
user,
|
||||
version: '1.0.0',
|
||||
from: 'foo',
|
||||
to: 'bar',
|
||||
date: DateTime.new(2020, 1, 1),
|
||||
branch: 'kittens',
|
||||
trailer: 'Foo',
|
||||
file: 'FOO.md',
|
||||
message: 'Commit message'
|
||||
)
|
||||
.and_return(spy)
|
||||
|
||||
allow(spy)
|
||||
.to receive(:execute)
|
||||
.and_raise(Gitlab::Changelog::Committer::CommitError.new('oops'))
|
||||
|
||||
post(
|
||||
api("/projects/#{project.id}/repository/changelog", user),
|
||||
params: {
|
||||
version: '1.0.0',
|
||||
from: 'foo',
|
||||
to: 'bar',
|
||||
date: '2020-01-01',
|
||||
branch: 'kittens',
|
||||
trailer: 'Foo',
|
||||
file: 'FOO.md',
|
||||
message: 'Commit message'
|
||||
}
|
||||
)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:internal_server_error)
|
||||
expect(json_response['message']).to eq('Failed to generate the changelog: oops')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the changelog_api feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(changelog_api: false)
|
||||
end
|
||||
|
||||
it 'responds with a 404 Not Found' do
|
||||
post(
|
||||
api("/projects/#{project.id}/repository/changelog", user),
|
||||
params: { version: '1.0.0', from: 'foo', to: 'bar' }
|
||||
)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,74 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Repositories::ChangelogService do
|
||||
describe '#execute' do
|
||||
it 'generates and commits a changelog section' do
|
||||
project = create(:project, :empty_repo)
|
||||
creator = project.creator
|
||||
author1 = create(:user)
|
||||
author2 = create(:user)
|
||||
|
||||
project.add_maintainer(author1)
|
||||
project.add_maintainer(author2)
|
||||
|
||||
mr1 = create(:merge_request, :merged, target_project: project)
|
||||
mr2 = create(:merge_request, :merged, target_project: project)
|
||||
|
||||
# The range of commits ignores the first commit, but includes the last
|
||||
# commit. To ensure both the commits below are included, we must create an
|
||||
# extra commit.
|
||||
#
|
||||
# In the real world, the start commit of the range will be the last commit
|
||||
# of the previous release, so ignoring that is expected and desired.
|
||||
sha1 = create_commit(
|
||||
project,
|
||||
creator,
|
||||
commit_message: 'Initial commit',
|
||||
actions: [{ action: 'create', content: 'test', file_path: 'README.md' }]
|
||||
)
|
||||
|
||||
sha2 = create_commit(
|
||||
project,
|
||||
author1,
|
||||
commit_message: "Title 1\n\nChangelog: feature",
|
||||
actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
|
||||
)
|
||||
|
||||
sha3 = create_commit(
|
||||
project,
|
||||
author2,
|
||||
commit_message: "Title 2\n\nChangelog: feature",
|
||||
actions: [{ action: 'create', content: 'bar', file_path: 'b.txt' }]
|
||||
)
|
||||
|
||||
commit1 = project.commit(sha2)
|
||||
commit2 = project.commit(sha3)
|
||||
|
||||
allow(MergeRequestDiffCommit)
|
||||
.to receive(:oldest_merge_request_id_per_commit)
|
||||
.with(project.id, [commit2.id, commit1.id])
|
||||
.and_return([
|
||||
{ sha: sha2, merge_request_id: mr1.id },
|
||||
{ sha: sha3, merge_request_id: mr2.id }
|
||||
])
|
||||
|
||||
recorder = ActiveRecord::QueryRecorder.new do
|
||||
described_class
|
||||
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
|
||||
.execute
|
||||
end
|
||||
|
||||
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
|
||||
|
||||
expect(recorder.count).to eq(10)
|
||||
expect(changelog).to include('Title 1', 'Title 2')
|
||||
end
|
||||
end
|
||||
|
||||
def create_commit(project, user, params)
|
||||
params = { start_branch: 'master', branch_name: 'master' }.merge(params)
|
||||
Files::MultiService.new(project, user, params).execute.fetch(:result)
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue