Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
2a501f63df
commit
38e6d92913
|
|
@ -0,0 +1,10 @@
|
|||
## Purpose of Revert
|
||||
|
||||
<!-- Please link to the relevant incident -->
|
||||
|
||||
### Check-list
|
||||
|
||||
- [ ] Create an issue to reinstate the merge request and assign it to the author of the reverted merge request.
|
||||
- [ ] If the revert is to resolve a ['broken master' incident](https://about.gitlab.com/handbook/engineering/workflow/#broken-master), please read through the [Responsibilities of the Broken 'Master' resolution DRI](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri)
|
||||
|
||||
/label ~"pipeline:expedite-master-fixing" ~"master:broken"
|
||||
|
|
@ -5,14 +5,7 @@ Database/MultipleDatabases:
|
|||
- 'db/post_migrate/20210811122206_update_external_project_bots.rb'
|
||||
- 'db/post_migrate/20210812013042_remove_duplicate_project_authorizations.rb'
|
||||
- 'ee/spec/services/ee/merge_requests/update_service_spec.rb'
|
||||
- 'spec/db/schema_spec.rb'
|
||||
- 'spec/initializers/database_config_spec.rb'
|
||||
- 'spec/lib/gitlab/database_spec.rb'
|
||||
- 'spec/lib/gitlab/metrics/subscribers/active_record_spec.rb'
|
||||
- 'spec/lib/gitlab/profiler_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb'
|
||||
- 'spec/support/caching.rb'
|
||||
- 'spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb'
|
||||
- 'spec/support/helpers/database/database_helpers.rb'
|
||||
- 'spec/support/helpers/database/table_schema_helpers.rb'
|
||||
- 'spec/support/helpers/migrations_helpers.rb'
|
||||
|
|
|
|||
|
|
@ -4699,7 +4699,6 @@ Layout/LineLength:
|
|||
- 'spec/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/query_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/service_ping_report_spec.rb'
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@ Style/HashEachMethods:
|
|||
- 'ee/spec/helpers/application_helper_spec.rb'
|
||||
- 'ee/spec/lib/gitlab/geo_spec.rb'
|
||||
- 'lib/api/todos.rb'
|
||||
- 'lib/backup/manager.rb'
|
||||
- 'lib/gitlab/changelog/release.rb'
|
||||
- 'lib/gitlab/ci/parsers.rb'
|
||||
- 'lib/gitlab/ci/reports/test_suite.rb'
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
434051eb74b0043267538213192a790095caf671
|
||||
fc4baac8542255ac55d3c9103a86312f95224402
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import SafeHtml from '~/vue_shared/directives/safe_html';
|
|||
import { createAlert, VARIANT_INFO } from '~/flash';
|
||||
import { s__, sprintf } from '~/locale';
|
||||
import createEmptyBadge from '../empty_badge';
|
||||
import { PLACEHOLDERS } from '../constants';
|
||||
import Badge from './badge.vue';
|
||||
|
||||
const badgePreviewDelayInMilliseconds = 1500;
|
||||
|
|
@ -50,9 +51,9 @@ export default {
|
|||
return this.badgeInAddForm;
|
||||
},
|
||||
helpText() {
|
||||
const placeholders = ['project_path', 'project_id', 'default_branch', 'commit_sha']
|
||||
.map((placeholder) => `<code>%{${placeholder}}</code>`)
|
||||
.join(', ');
|
||||
const placeholders = PLACEHOLDERS.map((placeholder) => `<code>%{${placeholder}}</code>`).join(
|
||||
', ',
|
||||
);
|
||||
return sprintf(
|
||||
s__('Badges|Supported %{docsLinkStart}variables%{docsLinkEnd}: %{placeholders}'),
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,2 +1,9 @@
|
|||
export const GROUP_BADGE = 'group';
|
||||
export const PROJECT_BADGE = 'project';
|
||||
export const PLACEHOLDERS = [
|
||||
'project_path',
|
||||
'project_name',
|
||||
'project_id',
|
||||
'default_branch',
|
||||
'commit_sha',
|
||||
];
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { GlCollapsibleListbox, GlSearchBoxByClick } from '@gitlab/ui';
|
||||
import { mergeUrlParams, visitUrl, getParameterValues } from '~/lib/utils/url_utility';
|
||||
import { s__ } from '~/locale';
|
||||
|
||||
|
|
@ -10,8 +10,7 @@ export default {
|
|||
searchPlaceholder: s__('Branches|Filter by branch name'),
|
||||
},
|
||||
components: {
|
||||
GlDropdown,
|
||||
GlDropdownItem,
|
||||
GlCollapsibleListbox,
|
||||
GlSearchBoxByClick,
|
||||
},
|
||||
inject: ['projectBranchesFilteredPath', 'sortOptions', 'mode'],
|
||||
|
|
@ -28,6 +27,9 @@ export default {
|
|||
selectedSortMethodName() {
|
||||
return this.sortOptions[this.selectedKey];
|
||||
},
|
||||
listboxItems() {
|
||||
return Object.entries(this.sortOptions).map(([value, text]) => ({ value, text }));
|
||||
},
|
||||
},
|
||||
created() {
|
||||
const sortValue = getParameterValues('sort');
|
||||
|
|
@ -42,9 +44,6 @@ export default {
|
|||
}
|
||||
},
|
||||
methods: {
|
||||
isSortMethodSelected(sortKey) {
|
||||
return sortKey === this.selectedKey;
|
||||
},
|
||||
visitUrlFromOption(sortKey) {
|
||||
this.selectedKey = sortKey;
|
||||
const urlParams = {};
|
||||
|
|
@ -70,20 +69,15 @@ export default {
|
|||
data-testid="branch-search"
|
||||
@submit="visitUrlFromOption(selectedKey)"
|
||||
/>
|
||||
<gl-dropdown
|
||||
|
||||
<gl-collapsible-listbox
|
||||
v-if="shouldShowDropdown"
|
||||
:text="selectedSortMethodName"
|
||||
v-model="selectedKey"
|
||||
:items="listboxItems"
|
||||
:toggle-text="selectedSortMethodName"
|
||||
class="gl-mr-3"
|
||||
data-testid="branches-dropdown"
|
||||
>
|
||||
<gl-dropdown-item
|
||||
v-for="(value, key) in sortOptions"
|
||||
:key="key"
|
||||
:is-checked="isSortMethodSelected(key)"
|
||||
is-check-item
|
||||
@click="visitUrlFromOption(key)"
|
||||
>{{ value }}</gl-dropdown-item
|
||||
>
|
||||
</gl-dropdown>
|
||||
@select="visitUrlFromOption(selectedKey)"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import { __, s__, sprintf } from '~/locale';
|
|||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { labelForStrategy } from '../utils';
|
||||
|
||||
import StrategyLabel from './strategy_label.vue';
|
||||
|
||||
export default {
|
||||
i18n: {
|
||||
deleteLabel: __('Delete'),
|
||||
|
|
@ -15,6 +17,7 @@ export default {
|
|||
GlButton,
|
||||
GlModal,
|
||||
GlToggle,
|
||||
StrategyLabel,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
|
|
@ -166,14 +169,13 @@ export default {
|
|||
<div
|
||||
class="table-mobile-content d-flex flex-wrap justify-content-end justify-content-md-start js-feature-flag-environments"
|
||||
>
|
||||
<gl-badge
|
||||
<strategy-label
|
||||
v-for="strategy in featureFlag.strategies"
|
||||
:key="strategy.id"
|
||||
data-testid="strategy-badge"
|
||||
variant="info"
|
||||
class="gl-mr-3 gl-mt-2 gl-white-space-normal gl-text-left gl-px-5"
|
||||
>{{ strategyBadgeText(strategy) }}</gl-badge
|
||||
>
|
||||
data-testid="strategy-label"
|
||||
class="w-100 gl-mr-3 gl-mt-2 gl-white-space-normal gl-text-left"
|
||||
v-bind="strategyBadgeText(strategy)"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
<script>
|
||||
export default {
|
||||
props: {
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
scopes: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
parameters: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<strong class="gl-fw-bold"
|
||||
>{{ name }}<span v-if="parameters"> - {{ parameters }}</span
|
||||
>:</strong
|
||||
>
|
||||
<span v-if="scopes">{{ scopes }}</span>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -50,17 +50,11 @@ const scopeName = ({ environment_scope: scope }) =>
|
|||
|
||||
export const labelForStrategy = (strategy) => {
|
||||
const { name, parameters } = badgeTextByType[strategy.name];
|
||||
const scopes = strategy.scopes.map(scopeName).join(', ');
|
||||
|
||||
if (parameters) {
|
||||
return sprintf('%{name} - %{parameters}: %{scopes}', {
|
||||
name,
|
||||
parameters: parameters(strategy),
|
||||
scopes: strategy.scopes.map(scopeName).join(', '),
|
||||
});
|
||||
}
|
||||
|
||||
return sprintf('%{name}: %{scopes}', {
|
||||
return {
|
||||
name,
|
||||
scopes: strategy.scopes.map(scopeName).join(', '),
|
||||
});
|
||||
parameters: parameters ? parameters(strategy) : null,
|
||||
scopes,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -94,7 +94,11 @@ MergeRequest.prototype.initMRBtnListeners = function () {
|
|||
.put(draftToggle.href, null, { params: { format: 'json' } })
|
||||
.then(({ data }) => {
|
||||
draftToggle.removeAttribute('disabled');
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
|
||||
if (!window.gon?.features?.realtimeMrStatusChange) {
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
}
|
||||
|
||||
MergeRequest.toggleDraftStatus(data.title, wipEvent === 'ready');
|
||||
})
|
||||
.catch(() => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
|
||||
import { joinPaths, webIDEUrl } from '~/lib/utils/url_utility';
|
||||
import WebIdeButton from '~/vue_shared/components/web_ide_link.vue';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const apolloProvider = new VueApollo({
|
||||
defaultClient: createDefaultClient(),
|
||||
});
|
||||
|
||||
export default ({ el, router }) => {
|
||||
if (!el) return;
|
||||
|
|
@ -14,6 +22,7 @@ export default ({ el, router }) => {
|
|||
new Vue({
|
||||
el,
|
||||
router,
|
||||
apolloProvider,
|
||||
render(h) {
|
||||
return h(WebIdeButton, {
|
||||
props: {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ export const I18N = {
|
|||
branchNameOrPattern: s__('BranchRules|Branch name or pattern'),
|
||||
branch: s__('BranchRules|Target Branch'),
|
||||
allBranches: s__('BranchRules|All branches'),
|
||||
matchingBranchesLinkTitle: s__('BranchRules|%{total} matching %{subject}'),
|
||||
protectBranchTitle: s__('BranchRules|Protect branch'),
|
||||
protectBranchDescription: s__(
|
||||
'BranchRules|Keep stable branches secure and force developers to use merge requests. %{linkStart}What are protected branches?%{linkEnd}',
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { GlSprintf, GlLink, GlLoadingIcon } from '@gitlab/ui';
|
||||
import { sprintf } from '~/locale';
|
||||
import { getParameterByName } from '~/lib/utils/url_utility';
|
||||
import { sprintf, n__ } from '~/locale';
|
||||
import { getParameterByName, mergeUrlParams } from '~/lib/utils/url_utility';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import branchRulesQuery from '../../queries/branch_rules_details.query.graphql';
|
||||
import { getAccessLevels } from '../../../utils';
|
||||
|
|
@ -42,6 +42,9 @@ export default {
|
|||
statusChecksPath: {
|
||||
default: '',
|
||||
},
|
||||
branchesPath: {
|
||||
default: '',
|
||||
},
|
||||
},
|
||||
apollo: {
|
||||
project: {
|
||||
|
|
@ -56,6 +59,7 @@ export default {
|
|||
this.branchProtection = branchRule?.branchProtection;
|
||||
this.approvalRules = branchRule?.approvalRules;
|
||||
this.statusChecks = branchRule?.externalStatusChecks?.nodes || [];
|
||||
this.matchingBranchesCount = branchRule?.matchingBranchesCount;
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -65,6 +69,7 @@ export default {
|
|||
branchProtection: {},
|
||||
approvalRules: {},
|
||||
statusChecks: [],
|
||||
matchingBranchesCount: null,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -116,6 +121,14 @@ export default {
|
|||
? this.$options.i18n.targetBranch
|
||||
: this.$options.i18n.branchNameOrPattern;
|
||||
},
|
||||
matchingBranchesLinkHref() {
|
||||
return mergeUrlParams({ state: 'all', search: this.branch }, this.branchesPath);
|
||||
},
|
||||
matchingBranchesLinkTitle() {
|
||||
const total = this.matchingBranchesCount;
|
||||
const subject = n__('branch', 'branches', total);
|
||||
return sprintf(this.$options.i18n.matchingBranchesLinkTitle, { total, subject });
|
||||
},
|
||||
approvals() {
|
||||
return this.approvalRules?.nodes || [];
|
||||
},
|
||||
|
|
@ -146,6 +159,10 @@ export default {
|
|||
</div>
|
||||
<code v-else class="gl-mt-2" data-testid="branch">{{ branch }}</code>
|
||||
|
||||
<p v-if="matchingBranchesCount" class="gl-mt-3">
|
||||
<gl-link :href="matchingBranchesLinkHref">{{ matchingBranchesLinkTitle }}</gl-link>
|
||||
</p>
|
||||
|
||||
<h4 class="gl-mb-1 gl-mt-5">{{ $options.i18n.protectBranchTitle }}</h4>
|
||||
<gl-sprintf :message="$options.i18n.protectBranchDescription">
|
||||
<template #link="{ content }">
|
||||
|
|
|
|||
|
|
@ -14,7 +14,13 @@ export default function mountBranchRules(el) {
|
|||
defaultClient: createDefaultClient(),
|
||||
});
|
||||
|
||||
const { projectPath, protectedBranchesPath, approvalRulesPath, statusChecksPath } = el.dataset;
|
||||
const {
|
||||
projectPath,
|
||||
protectedBranchesPath,
|
||||
approvalRulesPath,
|
||||
statusChecksPath,
|
||||
branchesPath,
|
||||
} = el.dataset;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
|
|
@ -24,6 +30,7 @@ export default function mountBranchRules(el) {
|
|||
protectedBranchesPath,
|
||||
approvalRulesPath,
|
||||
statusChecksPath,
|
||||
branchesPath,
|
||||
},
|
||||
render(h) {
|
||||
return h(View);
|
||||
|
|
|
|||
|
|
@ -68,6 +68,7 @@ query getBranchRulesDetails($projectPath: ID!) {
|
|||
externalUrl
|
||||
}
|
||||
}
|
||||
matchingBranchesCount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -189,8 +189,11 @@ export default {
|
|||
.then((data) => {
|
||||
this.mr.setApprovals(data);
|
||||
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
eventHub.$emit('ApprovalUpdated');
|
||||
if (!window.gon?.features?.realtimeMrStatusChange) {
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
eventHub.$emit('ApprovalUpdated');
|
||||
}
|
||||
|
||||
this.$emit('updated');
|
||||
})
|
||||
.catch(errFn)
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ import simplePoll from '~/lib/utils/simple_poll';
|
|||
import { __, s__, n__ } from '~/locale';
|
||||
import SmartInterval from '~/smart_interval';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import { convertToGraphQLId } from '~/graphql_shared/utils';
|
||||
import readyToMergeSubscription from '~/vue_merge_request_widget/queries/states/ready_to_merge.subscription.graphql';
|
||||
import {
|
||||
AUTO_MERGE_STRATEGIES,
|
||||
WARNING,
|
||||
|
|
@ -87,6 +89,31 @@ export default {
|
|||
this.initPolling();
|
||||
}
|
||||
},
|
||||
subscribeToMore: {
|
||||
document() {
|
||||
return readyToMergeSubscription;
|
||||
},
|
||||
skip() {
|
||||
return !this.mr?.id || this.loading || !window.gon?.features?.realtimeMrStatusChange;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
issuableId: convertToGraphQLId('MergeRequest', this.mr?.id),
|
||||
};
|
||||
},
|
||||
updateQuery(
|
||||
_,
|
||||
{
|
||||
subscriptionData: {
|
||||
data: { mergeRequestMergeStatusUpdated },
|
||||
},
|
||||
},
|
||||
) {
|
||||
if (mergeRequestMergeStatusUpdated) {
|
||||
this.state = mergeRequestMergeStatusUpdated;
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
components: {
|
||||
|
|
|
|||
|
|
@ -94,6 +94,7 @@ export default {
|
|||
errors: [],
|
||||
mergeRequest: {
|
||||
__typename: 'MergeRequest',
|
||||
id: this.mr.issuableId,
|
||||
mergeableDiscussionsState: true,
|
||||
title: this.mr.title,
|
||||
draft: false,
|
||||
|
|
@ -111,7 +112,10 @@ export default {
|
|||
}) => {
|
||||
toast(__('Marked as ready. Merging is now allowed.'));
|
||||
$('.merge-request .detail-page-description .title').text(title);
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
|
||||
if (!window.gon?.features?.realtimeMrStatusChange) {
|
||||
eventHub.$emit('MRWidgetUpdateRequested');
|
||||
}
|
||||
},
|
||||
)
|
||||
.catch(() =>
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import notify from '~/lib/utils/notify';
|
|||
import { sprintf, s__, __ } from '~/locale';
|
||||
import Project from '~/pages/projects/project';
|
||||
import SmartInterval from '~/smart_interval';
|
||||
import { convertToGraphQLId } from '~/graphql_shared/utils';
|
||||
import { setFaviconOverlay } from '../lib/utils/favicon';
|
||||
import Loading from './components/loading.vue';
|
||||
import MrWidgetAlertMessage from './components/mr_widget_alert_message.vue';
|
||||
|
|
@ -46,11 +47,13 @@ import { STATE_MACHINE, stateToComponentMap } from './constants';
|
|||
import eventHub from './event_hub';
|
||||
import mergeRequestQueryVariablesMixin from './mixins/merge_request_query_variables';
|
||||
import getStateQuery from './queries/get_state.query.graphql';
|
||||
import getStateSubscription from './queries/get_state.subscription.graphql';
|
||||
import terraformExtension from './extensions/terraform';
|
||||
import accessibilityExtension from './extensions/accessibility';
|
||||
import codeQualityExtension from './extensions/code_quality';
|
||||
import testReportExtension from './extensions/test_report';
|
||||
import ReportWidgetContainer from './components/report_widget_container.vue';
|
||||
import MrWidgetReadyToMerge from './components/states/new_ready_to_merge.vue';
|
||||
|
||||
export default {
|
||||
// False positive i18n lint: https://gitlab.com/gitlab-org/frontend/eslint-plugin-i18n/issues/25
|
||||
|
|
@ -76,7 +79,7 @@ export default {
|
|||
MrWidgetNothingToMerge: NothingToMergeState,
|
||||
MrWidgetNotAllowed: NotAllowedState,
|
||||
MrWidgetMissingBranch: MissingBranchState,
|
||||
MrWidgetReadyToMerge: () => import('./components/states/new_ready_to_merge.vue'),
|
||||
MrWidgetReadyToMerge,
|
||||
ShaMismatch,
|
||||
MrWidgetChecking: CheckingState,
|
||||
MrWidgetUnresolvedDiscussions: UnresolvedDiscussionsState,
|
||||
|
|
@ -108,6 +111,31 @@ export default {
|
|||
this.loading = false;
|
||||
}
|
||||
},
|
||||
subscribeToMore: {
|
||||
document() {
|
||||
return getStateSubscription;
|
||||
},
|
||||
skip() {
|
||||
return !this.mr?.id || this.loading || !window.gon?.features?.realtimeMrStatusChange;
|
||||
},
|
||||
variables() {
|
||||
return {
|
||||
issuableId: convertToGraphQLId('MergeRequest', this.mr?.id),
|
||||
};
|
||||
},
|
||||
updateQuery(
|
||||
_,
|
||||
{
|
||||
subscriptionData: {
|
||||
data: { mergeRequestMergeStatusUpdated },
|
||||
},
|
||||
},
|
||||
) {
|
||||
if (mergeRequestMergeStatusUpdated) {
|
||||
this.mr.setGraphqlSubscriptionData(mergeRequestMergeStatusUpdated);
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
mixins: [mergeRequestQueryVariablesMixin],
|
||||
|
|
@ -128,6 +156,7 @@ export default {
|
|||
machineState: store?.machineValue || STATE_MACHINE.definition.initial,
|
||||
loading: true,
|
||||
recomputeComponentName: 0,
|
||||
issuableId: false,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
subscription getStateSubscription($issuableId: IssuableID!) {
|
||||
mergeRequestMergeStatusUpdated(issuableId: $issuableId) {
|
||||
... on MergeRequest {
|
||||
detailedMergeStatus
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,44 +1,11 @@
|
|||
#import "./ready_to_merge_merge_request.fragment.graphql"
|
||||
|
||||
fragment ReadyToMerge on Project {
|
||||
id
|
||||
onlyAllowMergeIfPipelineSucceeds
|
||||
mergeRequestsFfOnlyEnabled
|
||||
squashReadOnly
|
||||
mergeRequest(iid: $iid) {
|
||||
id
|
||||
autoMergeEnabled
|
||||
shouldRemoveSourceBranch
|
||||
forceRemoveSourceBranch
|
||||
defaultMergeCommitMessage
|
||||
defaultSquashCommitMessage
|
||||
squash
|
||||
squashOnMerge
|
||||
availableAutoMergeStrategies
|
||||
hasCi
|
||||
mergeable
|
||||
mergeWhenPipelineSucceeds
|
||||
commitCount
|
||||
diffHeadSha
|
||||
userPermissions {
|
||||
canMerge
|
||||
removeSourceBranch
|
||||
updateMergeRequest
|
||||
}
|
||||
targetBranch
|
||||
mergeError
|
||||
commitsWithoutMergeCommits {
|
||||
nodes {
|
||||
id
|
||||
sha
|
||||
shortId
|
||||
title
|
||||
message
|
||||
}
|
||||
}
|
||||
headPipeline {
|
||||
id
|
||||
status
|
||||
path
|
||||
active
|
||||
}
|
||||
...ReadyToMergeMergeRequest
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
#import "./ready_to_merge_merge_request.fragment.graphql"
|
||||
|
||||
subscription readyToMergeSubscription($issuableId: IssuableID!) {
|
||||
mergeRequestMergeStatusUpdated(issuableId: $issuableId) {
|
||||
... on MergeRequest {
|
||||
...ReadyToMergeMergeRequest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
fragment ReadyToMergeMergeRequest on MergeRequest {
|
||||
id
|
||||
detailedMergeStatus
|
||||
autoMergeEnabled
|
||||
shouldRemoveSourceBranch
|
||||
forceRemoveSourceBranch
|
||||
defaultMergeCommitMessage
|
||||
defaultSquashCommitMessage
|
||||
squash
|
||||
squashOnMerge
|
||||
availableAutoMergeStrategies
|
||||
hasCi
|
||||
mergeable
|
||||
mergeWhenPipelineSucceeds
|
||||
commitCount
|
||||
diffHeadSha
|
||||
userPermissions {
|
||||
canMerge
|
||||
removeSourceBranch
|
||||
updateMergeRequest
|
||||
}
|
||||
targetBranch
|
||||
mergeError
|
||||
commitsWithoutMergeCommits {
|
||||
nodes {
|
||||
id
|
||||
sha
|
||||
shortId
|
||||
title
|
||||
message
|
||||
}
|
||||
}
|
||||
headPipeline {
|
||||
id
|
||||
status
|
||||
path
|
||||
active
|
||||
}
|
||||
}
|
||||
|
|
@ -30,6 +30,7 @@ export default class MergeRequestStore {
|
|||
this.machineValue = this.stateMachine.value;
|
||||
this.mergeDetailsCollapsed = window.innerWidth < 768;
|
||||
this.mergeError = data.mergeError;
|
||||
this.id = data.id;
|
||||
|
||||
this.setPaths(data);
|
||||
|
||||
|
|
@ -177,6 +178,7 @@ export default class MergeRequestStore {
|
|||
|
||||
this.updateStatusState(mergeRequest.state);
|
||||
|
||||
this.issuableId = mergeRequest.id;
|
||||
this.projectArchived = project.archived;
|
||||
this.onlyAllowMergeIfPipelineSucceeds = project.onlyAllowMergeIfPipelineSucceeds;
|
||||
this.allowMergeOnSkippedPipeline = project.allowMergeOnSkippedPipeline;
|
||||
|
|
@ -206,6 +208,12 @@ export default class MergeRequestStore {
|
|||
this.setState();
|
||||
}
|
||||
|
||||
setGraphqlSubscriptionData(data) {
|
||||
this.detailedMergeStatus = data.detailedMergeStatus;
|
||||
|
||||
this.setState();
|
||||
}
|
||||
|
||||
updateStatusState(state) {
|
||||
if (this.mergeRequestState !== state && badgeState.updateStatus) {
|
||||
badgeState.updateStatus();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,5 @@
|
|||
<script>
|
||||
import {
|
||||
GlDropdown,
|
||||
GlDropdownItem,
|
||||
GlDropdownDivider,
|
||||
GlButton,
|
||||
GlTooltipDirective,
|
||||
} from '@gitlab/ui';
|
||||
import { GlDropdown, GlDropdownItem, GlDropdownDivider, GlButton, GlTooltip } from '@gitlab/ui';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
|
@ -13,11 +7,14 @@ export default {
|
|||
GlDropdownItem,
|
||||
GlDropdownDivider,
|
||||
GlButton,
|
||||
},
|
||||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
GlTooltip,
|
||||
},
|
||||
props: {
|
||||
id: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
actions: {
|
||||
type: Array,
|
||||
required: true,
|
||||
|
|
@ -37,6 +34,11 @@ export default {
|
|||
required: false,
|
||||
default: 'default',
|
||||
},
|
||||
showActionTooltip: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
hasMultipleActions() {
|
||||
|
|
@ -51,6 +53,7 @@ export default {
|
|||
this.$emit('select', action.key);
|
||||
},
|
||||
handleClick(action, evt) {
|
||||
this.$emit('actionClicked', { action });
|
||||
return action.handle?.(evt);
|
||||
},
|
||||
},
|
||||
|
|
@ -58,46 +61,51 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<gl-dropdown
|
||||
v-if="hasMultipleActions"
|
||||
v-gl-tooltip="selectedAction.tooltip"
|
||||
:text="selectedAction.text"
|
||||
:split-href="selectedAction.href"
|
||||
:variant="variant"
|
||||
:category="category"
|
||||
split
|
||||
data-qa-selector="action_dropdown"
|
||||
@click="handleClick(selectedAction, $event)"
|
||||
>
|
||||
<template #button-content>
|
||||
<span class="gl-dropdown-button-text" v-bind="selectedAction.attrs">
|
||||
{{ selectedAction.text }}
|
||||
</span>
|
||||
</template>
|
||||
<template v-for="(action, index) in actions">
|
||||
<gl-dropdown-item
|
||||
:key="action.key"
|
||||
is-check-item
|
||||
:is-checked="action.key === selectedAction.key"
|
||||
:secondary-text="action.secondaryText"
|
||||
:data-qa-selector="`${action.key}_menu_item`"
|
||||
:data-testid="`action_${action.key}`"
|
||||
@click="handleItemClick(action)"
|
||||
>
|
||||
<span class="gl-font-weight-bold">{{ action.text }}</span>
|
||||
</gl-dropdown-item>
|
||||
<gl-dropdown-divider v-if="index != actions.length - 1" :key="action.key + '_divider'" />
|
||||
</template>
|
||||
</gl-dropdown>
|
||||
<gl-button
|
||||
v-else-if="selectedAction"
|
||||
v-gl-tooltip="selectedAction.tooltip"
|
||||
v-bind="selectedAction.attrs"
|
||||
:variant="variant"
|
||||
:category="category"
|
||||
:href="selectedAction.href"
|
||||
@click="handleClick(selectedAction, $event)"
|
||||
>
|
||||
{{ selectedAction.text }}
|
||||
</gl-button>
|
||||
<span>
|
||||
<gl-dropdown
|
||||
v-if="hasMultipleActions"
|
||||
:id="id"
|
||||
:text="selectedAction.text"
|
||||
:split-href="selectedAction.href"
|
||||
:variant="variant"
|
||||
:category="category"
|
||||
split
|
||||
data-qa-selector="action_dropdown"
|
||||
@click="handleClick(selectedAction, $event)"
|
||||
>
|
||||
<template #button-content>
|
||||
<span class="gl-dropdown-button-text" v-bind="selectedAction.attrs">
|
||||
{{ selectedAction.text }}
|
||||
</span>
|
||||
</template>
|
||||
<template v-for="(action, index) in actions">
|
||||
<gl-dropdown-item
|
||||
:key="action.key"
|
||||
is-check-item
|
||||
:is-checked="action.key === selectedAction.key"
|
||||
:secondary-text="action.secondaryText"
|
||||
:data-qa-selector="`${action.key}_menu_item`"
|
||||
:data-testid="`action_${action.key}`"
|
||||
@click="handleItemClick(action)"
|
||||
>
|
||||
<span class="gl-font-weight-bold">{{ action.text }}</span>
|
||||
</gl-dropdown-item>
|
||||
<gl-dropdown-divider v-if="index != actions.length - 1" :key="action.key + '_divider'" />
|
||||
</template>
|
||||
</gl-dropdown>
|
||||
<gl-button
|
||||
v-else-if="selectedAction"
|
||||
:id="id"
|
||||
v-bind="selectedAction.attrs"
|
||||
:variant="variant"
|
||||
:category="category"
|
||||
:href="selectedAction.href"
|
||||
@click="handleClick(selectedAction, $event)"
|
||||
>
|
||||
{{ selectedAction.text }}
|
||||
</gl-button>
|
||||
<gl-tooltip v-if="selectedAction.tooltip && showActionTooltip" :target="id">
|
||||
{{ selectedAction.tooltip }}
|
||||
</gl-tooltip>
|
||||
</span>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
<script>
|
||||
import { GlModal, GlSprintf, GlLink } from '@gitlab/ui';
|
||||
import { GlModal, GlSprintf, GlLink, GlPopover } from '@gitlab/ui';
|
||||
import { s__, __ } from '~/locale';
|
||||
import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser.vue';
|
||||
import ActionsButton from '~/vue_shared/components/actions_button.vue';
|
||||
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
|
||||
import ConfirmForkModal from '~/vue_shared/components/confirm_fork_modal.vue';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
|
||||
const KEY_EDIT = 'edit';
|
||||
const KEY_WEB_IDE = 'webide';
|
||||
|
|
@ -32,9 +34,12 @@ export default {
|
|||
GlModal,
|
||||
GlSprintf,
|
||||
GlLink,
|
||||
GlPopover,
|
||||
ConfirmForkModal,
|
||||
UserCalloutDismisser,
|
||||
},
|
||||
i18n,
|
||||
mixins: [glFeatureFlagsMixin()],
|
||||
props: {
|
||||
isFork: {
|
||||
type: Boolean,
|
||||
|
|
@ -296,6 +301,9 @@ export default {
|
|||
},
|
||||
};
|
||||
},
|
||||
displayVscodeWebIdeCallout() {
|
||||
return this.glFeatures.vscodeWebIde && !this.showEditButton;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
select(key) {
|
||||
|
|
@ -305,40 +313,66 @@ export default {
|
|||
this[dataKey] = true;
|
||||
},
|
||||
},
|
||||
webIdeButtonId: 'web-ide-link',
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="gl-sm-ml-3">
|
||||
<actions-button
|
||||
:actions="actions"
|
||||
:selected-key="selection"
|
||||
:variant="isBlob ? 'confirm' : 'default'"
|
||||
:category="isBlob ? 'primary' : 'secondary'"
|
||||
@select="select"
|
||||
/>
|
||||
<local-storage-sync
|
||||
storage-key="gl-web-ide-button-selected"
|
||||
:value="selection"
|
||||
as-string
|
||||
@input="select"
|
||||
/>
|
||||
<gl-modal
|
||||
v-if="computedShowGitpodButton && !gitpodEnabled"
|
||||
v-model="showEnableGitpodModal"
|
||||
v-bind="enableGitpodModalProps"
|
||||
>
|
||||
<gl-sprintf :message="$options.i18n.modal.content">
|
||||
<template #link="{ content }">
|
||||
<gl-link :href="userPreferencesGitpodPath">{{ content }}</gl-link>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</gl-modal>
|
||||
<confirm-fork-modal
|
||||
v-if="showWebIdeButton || showEditButton"
|
||||
v-model="showForkModal"
|
||||
:modal-id="forkModalId"
|
||||
:fork-path="forkPath"
|
||||
/>
|
||||
</div>
|
||||
<user-callout-dismisser :skip-query="!displayVscodeWebIdeCallout" feature-name="vscode_web_ide">
|
||||
<template #default="{ dismiss, shouldShowCallout }">
|
||||
<div class="gl-sm-ml-3">
|
||||
<actions-button
|
||||
:id="$options.webIdeButtonId"
|
||||
:actions="actions"
|
||||
:selected-key="selection"
|
||||
:variant="isBlob ? 'confirm' : 'default'"
|
||||
:category="isBlob ? 'primary' : 'secondary'"
|
||||
:show-action-tooltip="!displayVscodeWebIdeCallout || !shouldShowCallout"
|
||||
@select="select"
|
||||
@actionClicked="dismiss"
|
||||
/>
|
||||
<local-storage-sync
|
||||
storage-key="gl-web-ide-button-selected"
|
||||
:value="selection"
|
||||
as-string
|
||||
@input="select"
|
||||
/>
|
||||
<gl-modal
|
||||
v-if="computedShowGitpodButton && !gitpodEnabled"
|
||||
v-model="showEnableGitpodModal"
|
||||
v-bind="enableGitpodModalProps"
|
||||
>
|
||||
<gl-sprintf :message="$options.i18n.modal.content">
|
||||
<template #link="{ content }">
|
||||
<gl-link :href="userPreferencesGitpodPath">{{ content }}</gl-link>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</gl-modal>
|
||||
<confirm-fork-modal
|
||||
v-if="showWebIdeButton || showEditButton"
|
||||
v-model="showForkModal"
|
||||
:modal-id="forkModalId"
|
||||
:fork-path="forkPath"
|
||||
/>
|
||||
<gl-popover
|
||||
v-if="displayVscodeWebIdeCallout"
|
||||
:target="$options.webIdeButtonId"
|
||||
:show="shouldShowCallout"
|
||||
show-close-button
|
||||
triggers="manual"
|
||||
@close-button-clicked="dismiss"
|
||||
>
|
||||
<template #title>
|
||||
{{ __('Try out the new Web IDE') }}
|
||||
</template>
|
||||
|
||||
{{
|
||||
__(
|
||||
'VS Code in your browser. View code and make changes from the same UI as in your local IDE 🎉',
|
||||
)
|
||||
}}
|
||||
</gl-popover>
|
||||
</div>
|
||||
</template>
|
||||
</user-callout-dismisser>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -70,6 +70,12 @@ class GraphqlController < ApplicationController
|
|||
end
|
||||
end
|
||||
|
||||
rescue_from Gitlab::Auth::TooManyIps do |exception|
|
||||
log_exception(exception)
|
||||
|
||||
render_error(exception.message, status: :forbidden)
|
||||
end
|
||||
|
||||
rescue_from Gitlab::Graphql::Variables::Invalid do |exception|
|
||||
render_error(exception.message, status: :unprocessable_entity)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
|
|||
push_frontend_feature_flag(:mr_review_submit_comment, project)
|
||||
push_frontend_feature_flag(:mr_experience_survey, project)
|
||||
push_frontend_feature_flag(:realtime_reviewers, project)
|
||||
push_frontend_feature_flag(:realtime_mr_status_change, project)
|
||||
end
|
||||
|
||||
before_action do
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ module Projects
|
|||
return {
|
||||
error: true,
|
||||
message: _('Validations failed.'),
|
||||
service_response: integration.errors.full_messages.join(','),
|
||||
service_response: integration.errors.full_messages.join(', '),
|
||||
test_failed: false
|
||||
}
|
||||
end
|
||||
|
|
@ -90,7 +90,7 @@ module Projects
|
|||
return {
|
||||
error: true,
|
||||
message: s_('Integrations|Connection failed. Check your integration settings.'),
|
||||
service_response: result[:message].to_s,
|
||||
service_response: result[:result].to_s,
|
||||
test_failed: true
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ class Badge < ApplicationRecord
|
|||
# the placeholder is found.
|
||||
PLACEHOLDERS = {
|
||||
'project_path' => :full_path,
|
||||
'project_name' => :name,
|
||||
'project_id' => :id,
|
||||
'default_branch' => :default_branch,
|
||||
'commit_sha' => ->(project) { project.commit&.sha }
|
||||
|
|
|
|||
|
|
@ -55,6 +55,12 @@ class GroupMember < Member
|
|||
{ group: group }
|
||||
end
|
||||
|
||||
def last_owner_of_the_group?
|
||||
return false unless access_level == Gitlab::Access::OWNER
|
||||
|
||||
group.member_last_owner?(self) || group.member_last_blocked_owner?(self)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
override :refresh_member_authorized_projects
|
||||
|
|
|
|||
|
|
@ -96,6 +96,10 @@ class ProjectMember < Member
|
|||
{ project: project }
|
||||
end
|
||||
|
||||
def holder_of_the_personal_namespace?
|
||||
project.personal_namespace_holder?(user)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
override :access_level_inclusion
|
||||
|
|
|
|||
|
|
@ -63,7 +63,8 @@ module Users
|
|||
project_quality_summary_feedback: 59, # EE-only
|
||||
merge_request_settings_moved_callout: 60,
|
||||
new_top_level_group_alert: 61,
|
||||
artifacts_management_page_feedback_banner: 62
|
||||
artifacts_management_page_feedback_banner: 62,
|
||||
vscode_web_ide: 63
|
||||
}
|
||||
|
||||
validates :feature_name,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class GroupMemberPolicy < BasePolicy
|
|||
delegate :group
|
||||
|
||||
with_scope :subject
|
||||
condition(:last_owner) { @subject.group.member_last_owner?(@subject) || @subject.group.member_last_blocked_owner?(@subject) }
|
||||
condition(:last_owner) { @subject.last_owner_of_the_group? }
|
||||
condition(:project_bot) { @subject.user&.project_bot? && @subject.group.member?(@subject.user) }
|
||||
|
||||
desc "Membership is users' own"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ class ProjectMemberPolicy < BasePolicy
|
|||
delegate { @subject.project }
|
||||
|
||||
condition(:target_is_holder_of_the_personal_namespace, scope: :subject) do
|
||||
@subject.project.personal_namespace_holder?(@subject.user)
|
||||
@subject.holder_of_the_personal_namespace?
|
||||
end
|
||||
|
||||
desc "Membership is users' own access request"
|
||||
|
|
|
|||
|
|
@ -3,6 +3,10 @@
|
|||
class GroupMemberPresenter < MemberPresenter
|
||||
presents ::GroupMember
|
||||
|
||||
def last_owner?
|
||||
member.last_owner_of_the_group?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def admin_member_permission
|
||||
|
|
|
|||
|
|
@ -37,6 +37,10 @@ class MemberPresenter < Gitlab::View::Presenter::Delegated
|
|||
false
|
||||
end
|
||||
|
||||
def last_owner?
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def admin_member_permission
|
||||
|
|
|
|||
|
|
@ -21,6 +21,12 @@ class ProjectMemberPresenter < MemberPresenter
|
|||
super
|
||||
end
|
||||
|
||||
def last_owner?
|
||||
# all owners of a project in a group are removable.
|
||||
# but in personal projects, the namespace holder is not removable.
|
||||
member.holder_of_the_personal_namespace?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def admin_member_permission
|
||||
|
|
|
|||
|
|
@ -179,7 +179,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
|
|||
return if releases_count < 1
|
||||
|
||||
AnchorData.new(true,
|
||||
statistic_icon('rocket') +
|
||||
statistic_icon('deployments') +
|
||||
n_('%{strong_start}%{release_count}%{strong_end} Release', '%{strong_start}%{release_count}%{strong_end} Releases', releases_count).html_safe % {
|
||||
release_count: number_with_delimiter(releases_count),
|
||||
strong_start: '<strong class="project-stat-value">'.html_safe,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,8 @@ class MemberEntity < Grape::Entity
|
|||
member.can_remove?
|
||||
end
|
||||
|
||||
expose :last_owner?, as: :is_last_owner
|
||||
|
||||
expose :is_direct_member do |member, options|
|
||||
member.source == options[:source]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,7 +15,26 @@ module PagesDomains
|
|||
pages_domain.update!(auto_ssl_failed: false)
|
||||
end
|
||||
|
||||
PagesDomainSslRenewalWorker.perform_async(pages_domain.id) if updated
|
||||
return unless updated
|
||||
|
||||
PagesDomainSslRenewalWorker.perform_async(pages_domain.id)
|
||||
|
||||
publish_event(pages_domain)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def publish_event(domain)
|
||||
event = PagesDomainUpdatedEvent.new(
|
||||
data: {
|
||||
project_id: domain.project.id,
|
||||
namespace_id: domain.project.namespace_id,
|
||||
root_namespace_id: domain.project.root_namespace.id,
|
||||
domain: domain.domain
|
||||
}
|
||||
)
|
||||
|
||||
Gitlab::EventStore.publish(event)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -36,9 +36,11 @@
|
|||
= render 'shared/group_tips'
|
||||
.gl-mt-5
|
||||
= f.submit _('Create group'), pajamas_button: true
|
||||
= link_to _('Cancel'), admin_groups_path, class: "gl-button btn btn-default btn-cancel"
|
||||
= render Pajamas::ButtonComponent.new(href: admin_groups_path) do
|
||||
= _('Cancel')
|
||||
|
||||
- else
|
||||
.gl-mt-5
|
||||
= f.submit _('Save changes'), data: { qa_selector: 'save_changes_button' }, pajamas_button: true
|
||||
= link_to _('Cancel'), admin_group_path(@group), class: "gl-button btn btn-cancel"
|
||||
= render Pajamas::ButtonComponent.new(href: admin_group_path(@group)) do
|
||||
= _('Cancel')
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@
|
|||
|
||||
%h3.gl-mb-5= s_('BranchRules|Branch rules details')
|
||||
|
||||
#js-branch-rules{ data: { project_path: @project.full_path, protected_branches_path: project_settings_repository_path(@project, anchor: 'js-protected-branches-settings'), approval_rules_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-approval-settings'), status_checks_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-settings') } }
|
||||
#js-branch-rules{ data: { project_path: @project.full_path, protected_branches_path: project_settings_repository_path(@project, anchor: 'js-protected-branches-settings'), approval_rules_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-approval-settings'), status_checks_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-settings'), branches_path: project_branches_path(@project) } }
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: realtime_mr_status_change
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/103011
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385077
|
||||
milestone: '15.7'
|
||||
type: development
|
||||
group: group::code review
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: validate_allowed_cross_slot_commands
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105302
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/384909
|
||||
milestone: '15.7'
|
||||
type: development
|
||||
group: group::scalability
|
||||
default_enabled: false
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
table_name: dependency_list_exports
|
||||
feature_categories:
|
||||
- dependency_scanning
|
||||
- dependency_management
|
||||
description: Dependency list exported data
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104361
|
||||
milestone: '15.7'
|
||||
|
|
|
|||
|
|
@ -294,7 +294,7 @@ control over how the Pages daemon runs and serves content in your environment.
|
|||
| `pages_path` | The directory on disk where pages are stored, defaults to `GITLAB-RAILS/shared/pages`. |
|
||||
| **`pages_nginx[]`** | |
|
||||
| `enable` | Include a virtual host `server{}` block for Pages inside NGINX. Needed for NGINX to proxy traffic back to the Pages daemon. Set to `false` if the Pages daemon should directly receive all requests, for example, when using [custom domains](index.md#custom-domains). |
|
||||
| `FF_ENABLE_PLACEHOLDERS` | Feature flag to enable/disable rewrites (disabled by default). Read the [redirects documentation](../../user/project/pages/redirects.md#feature-flag-for-rewrites) for more information. |
|
||||
| `FF_ENABLE_PLACEHOLDERS` | Feature flag for rewrites (enabled by default). See [Rewrites](../../user/project/pages/redirects.md#rewrites) for more information. |
|
||||
| `use_legacy_storage` | Temporarily-introduced parameter allowing to use legacy domain configuration source and storage. [Removed in 14.3](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/6166). |
|
||||
| `rate_limit_source_ip` | Rate limit per source IP in number of requests per second. Set to `0` to disable this feature. |
|
||||
| `rate_limit_source_ip_burst` | Rate limit per source IP maximum burst allowed per second. |
|
||||
|
|
|
|||
|
|
@ -44,10 +44,6 @@ This section is for links to information elsewhere in the GitLab documentation.
|
|||
|
||||
- Consuming PostgreSQL from [within CI runners](../../ci/services/postgres.md).
|
||||
|
||||
- [Using Slony to update PostgreSQL](../../update/upgrading_postgresql_using_slony.md).
|
||||
- Uses replication to handle PostgreSQL upgrades if the schemas are the same.
|
||||
- Reduces downtime to a short window for switching to the newer version.
|
||||
|
||||
- Managing Omnibus PostgreSQL versions [from the development docs](https://docs.gitlab.com/omnibus/development/managing-postgresql-versions.html).
|
||||
|
||||
- [PostgreSQL scaling](../postgresql/replication_and_failover.md)
|
||||
|
|
|
|||
|
|
@ -22748,6 +22748,7 @@ Name of the feature that the callout is for.
|
|||
| <a id="usercalloutfeaturenameenumunfinished_tag_cleanup_callout"></a>`UNFINISHED_TAG_CLEANUP_CALLOUT` | Callout feature name for unfinished_tag_cleanup_callout. |
|
||||
| <a id="usercalloutfeaturenameenumuser_reached_limit_free_plan_alert"></a>`USER_REACHED_LIMIT_FREE_PLAN_ALERT` | Callout feature name for user_reached_limit_free_plan_alert. |
|
||||
| <a id="usercalloutfeaturenameenumverification_reminder"></a>`VERIFICATION_REMINDER` | Callout feature name for verification_reminder. |
|
||||
| <a id="usercalloutfeaturenameenumvscode_web_ide"></a>`VSCODE_WEB_IDE` | Callout feature name for vscode_web_ide. |
|
||||
| <a id="usercalloutfeaturenameenumweb_ide_alert_dismissed"></a>`WEB_IDE_ALERT_DISMISSED` | Callout feature name for web_ide_alert_dismissed. |
|
||||
| <a id="usercalloutfeaturenameenumweb_ide_ci_environments_guidance"></a>`WEB_IDE_CI_ENVIRONMENTS_GUIDANCE` | Callout feature name for web_ide_ci_environments_guidance. |
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ Badges support placeholders that are replaced in real time in both the link and
|
|||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
- **%{project_path}**: replaced by the project path.
|
||||
- **%{project_name}**: replaced by the project name.
|
||||
- **%{project_id}**: replaced by the project ID.
|
||||
- **%{default_branch}**: replaced by the project default branch.
|
||||
- **%{commit_sha}**: replaced by the last project's commit SHA.
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ Badges support placeholders that are replaced in real-time in both the link and
|
|||
<!-- vale gitlab.Spelling = NO -->
|
||||
|
||||
- **%{project_path}**: Replaced by the project path.
|
||||
- **%{project_name}**: Replaced by the project name.
|
||||
- **%{project_id}**: Replaced by the project ID.
|
||||
- **%{default_branch}**: Replaced by the project default branch.
|
||||
- **%{commit_sha}**: Replaced by the last project's commit SHA.
|
||||
|
|
|
|||
|
|
@ -13,29 +13,32 @@ Anyone can contribute to the GitLab documentation! You can create a merge reques
|
|||
accomplish their work with GitLab.
|
||||
|
||||
If you are working on a feature or enhancement, use the
|
||||
[feature workflow process described in the GitLab Handbook](https://about.gitlab.com/handbook/product/ux/technical-writing/workflow/#for-a-product-change).
|
||||
[feature workflow process described in the GitLab Handbook](https://about.gitlab.com/handbook/product/ux/technical-writing/workflow/#documentation-for-a-product-change).
|
||||
|
||||
## How to update the docs
|
||||
|
||||
If you are not a GitLab team member, or do not have the Developer role for the GitLab repository, to update GitLab documentation:
|
||||
|
||||
1. Select an issue you'd like to work on.
|
||||
1. Select an [issue](https://about.gitlab.com/handbook/product/ux/technical-writing/#community-contribution-opportunities) you'd like to work on.
|
||||
- You don't need an issue to open a merge request.
|
||||
- For a Hackathon, in the issue, in a comment, mention the person who opened the issue and ask for the issue to be assigned to you.
|
||||
To be fair to other contributors, if you see someone has already asked to work on the issue, choose another issue.
|
||||
If you are looking for issues to work on and don't see any that suit you, you can always fix [Vale](testing.md#vale) issues.
|
||||
1. Go to the [GitLab repository](https://gitlab.com/gitlab-org/gitlab).
|
||||
1. In the top-right, select **Fork**. Forking makes a copy of the repository on GitLab.com.
|
||||
1. In your fork, find the documentation page by going to the `\doc` directory.
|
||||
1. In the top right, select **Fork**. Forking makes a copy of the repository on GitLab.com.
|
||||
1. In your fork, find the documentation page in the `\doc` directory.
|
||||
1. If you know Git, make your changes and open a merge request.
|
||||
If not, follow these steps:
|
||||
1. In the top right, select **Edit**, make the changes, and **Save**.
|
||||
1. From the left menu, select **Merge requests**.
|
||||
1. On the top right, select **Edit** if it is visible. If it is not, select the down arrow (**{chevron-lg-down}**) next to **Open in Web IDE** or **Gitpod**, and select **Edit**.
|
||||
1. In the **Commit message** text box, enter a commit message. Use 3-5 words, start with a capital letter, and do not end with a period.
|
||||
1. Select **Commit changes**.
|
||||
1. On the left sidebar, select **Merge requests**.
|
||||
1. Select **New merge request**.
|
||||
1. For the source branch, select your fork and branch. If you did not create a branch, select `master`.
|
||||
For the target branch, select the [GitLab repository](https://gitlab.com/gitlab-org/gitlab) `master` branch.
|
||||
1. For the commit message, use 3-5 words, start with a capital letter, and do not end with a period.
|
||||
1. Select **Commit changes**. A merge request opens.
|
||||
1. Select **Compare branches and continue**. A new merge request opens.
|
||||
1. Select the **Documentation** template. In the description, write a brief summary of the changes and link to the related issue, if there is one.
|
||||
1. Select **Create merge request**.
|
||||
|
||||
If you need help while working on the page, view:
|
||||
|
||||
|
|
@ -65,7 +68,7 @@ If you are a member of the GitLab Slack workspace, you can request help in `#doc
|
|||
|
||||
When you author an issue or merge request, you must add these labels:
|
||||
|
||||
- A [type label](../contributing/issue_workflow.md#type-labels).
|
||||
- A [type label](../contributing/issue_workflow.md#type-labels), either `~"type::feature"` or `~"type::maintenance"`.
|
||||
- A [stage label](../contributing/issue_workflow.md#stage-labels) and [group label](../contributing/issue_workflow.md#group-labels).
|
||||
For example, `~devops::create` and `~group::source code`.
|
||||
- A `~documentation` [specialization label](../contributing/issue_workflow.md#specialization-labels).
|
||||
|
|
@ -75,7 +78,6 @@ A member of the Technical Writing team adds these labels:
|
|||
- A [documentation scoped label](../../user/project/labels.md#scoped-labels) with the
|
||||
`docs::` prefix. For example, `~docs::improvement`.
|
||||
- The [`~Technical Writing` team label](../contributing/issue_workflow.md#team-labels).
|
||||
- A type label: either `~"type::feature"` or `~"type::maintenance"`.
|
||||
|
||||
## Reviewing and merging
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 32 KiB |
|
|
@ -1307,6 +1307,4 @@ This issue is resolved in GitLab 15.3.3, so customers with the following configu
|
|||
|
||||
## Miscellaneous
|
||||
|
||||
- [Upgrading PostgreSQL Using Slony](upgrading_postgresql_using_slony.md), for
|
||||
upgrading a PostgreSQL database with minimal downtime.
|
||||
- [Managing PostgreSQL extensions](../install/postgresql_extensions.md)
|
||||
|
|
|
|||
|
|
@ -2,479 +2,11 @@
|
|||
stage: Data Stores
|
||||
group: Database
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
remove_date: '2023-02-28'
|
||||
redirect_to: '../administration/postgresql/replication_and_failover.md'
|
||||
---
|
||||
|
||||
# Upgrading PostgreSQL Using Slony **(FREE SELF)**
|
||||
# Upgrading PostgreSQL Using Slony (removed) **(FREE SELF)**
|
||||
|
||||
This guide describes the steps one can take to upgrade their PostgreSQL database
|
||||
to the latest version without the need for hours of downtime. This guide assumes
|
||||
you have two database servers: one database server running an older version of
|
||||
PostgreSQL (for example, 9.2.18) and one server running a newer version (for example, 9.6.0).
|
||||
|
||||
For this process, a PostgreSQL replication tool called
|
||||
[Slony](https://www.slony.info/) is used. Slony allows replication between different
|
||||
PostgreSQL versions and as such can be used to upgrade a cluster with a minimal
|
||||
amount of downtime.
|
||||
|
||||
This guide often refers to the user `gitlab-psql`, which is the
|
||||
user used to run the various PostgreSQL OS processes. If you are using a
|
||||
different user (for example, `postgres`), replace `gitlab-psql` with the name
|
||||
of said user. This guide also assumes your database is called
|
||||
`gitlabhq_production`. If you happen to use a different database name you should
|
||||
change this accordingly.
|
||||
|
||||
## Database Dumps
|
||||
|
||||
Slony only replicates data and not any schema changes. As a result you must
|
||||
ensure that all databases have the same database structure.
|
||||
|
||||
To do so, generate a dump of the current database. This dump only
|
||||
contains the structure, not any data. To generate this dump run the following
|
||||
command on your active database server:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/pg_dump -h /var/opt/gitlab/postgresql -p 5432 -U gitlab-psql -s -f /tmp/structure.sql gitlabhq_production
|
||||
```
|
||||
|
||||
If you're not using the Omnibus GitLab package you may have to adjust the paths to
|
||||
`pg_dump` and the PostgreSQL installation directory to match the paths of your
|
||||
configuration.
|
||||
|
||||
After the structure dump is generated, generate another dump for the
|
||||
`schema_migrations` table. This table doesn't have any primary keys and as such
|
||||
can't be replicated by Slony. To generate a dump of the `schema_migrations` table, run the following command on your active database server:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/pg_dump -h /var/opt/gitlab/postgresql/ -p 5432 -U gitlab-psql -a -t schema_migrations -f /tmp/migrations.sql gitlabhq_production
|
||||
```
|
||||
|
||||
Next, move these files somewhere accessible by the new database
|
||||
server. The easiest way is to download these files to your local system:
|
||||
|
||||
```shell
|
||||
scp your-user@production-database-host:/tmp/*.sql /tmp
|
||||
```
|
||||
|
||||
This copies all the SQL files located in `/tmp` to your local system's
|
||||
`/tmp` directory. Once copied you can safely remove the files from the database
|
||||
server.
|
||||
|
||||
## Installing Slony
|
||||
|
||||
Use Slony to upgrade the database without requiring a long downtime.
|
||||
Slony can be downloaded from <https://www.slony.info/>. If you have installed
|
||||
PostgreSQL using your operating system's package manager you may also be able to
|
||||
install Slony using said package manager.
|
||||
|
||||
When compiling Slony from source you *must* use the following commands to do so:
|
||||
|
||||
```shell
|
||||
./configure --prefix=/path/to/installation/directory --with-perltools --with-pgconfigdir=/path/to/directory/containing/pg_config/bin
|
||||
make
|
||||
make install
|
||||
```
|
||||
|
||||
Omnibus users can use the following commands:
|
||||
|
||||
```shell
|
||||
./configure --prefix=/opt/gitlab/embedded --with-perltools --with-pgconfigdir=/opt/gitlab/embedded/bin
|
||||
make
|
||||
make install
|
||||
```
|
||||
|
||||
This assumes you have installed GitLab into `/opt/gitlab`.
|
||||
|
||||
To test if Slony is installed properly, run the following commands:
|
||||
|
||||
```shell
|
||||
test -f /opt/gitlab/embedded/bin/slonik && echo 'Slony installed' || echo 'Slony not installed'
|
||||
test -f /opt/gitlab/embedded/bin/slonik_init_cluster && echo 'Slony Perl tools are available' || echo 'Slony Perl tools are not available'
|
||||
/opt/gitlab/embedded/bin/slonik -v
|
||||
```
|
||||
|
||||
This assumes Slony was installed to `/opt/gitlab/embedded`. If Slony was
|
||||
installed properly the output of these commands is (the mentioned `slonik`
|
||||
version may be different):
|
||||
|
||||
```plaintext
|
||||
Slony installed
|
||||
Slony Perl tools are available
|
||||
slonik version 2.2.5
|
||||
```
|
||||
|
||||
## Slony User
|
||||
|
||||
Next, set up a PostgreSQL user that Slony can use to replicate your
|
||||
database. To do so, sign in to your production database using `psql` using a
|
||||
super-user account. After signing in, run the following SQL queries:
|
||||
|
||||
```sql
|
||||
CREATE ROLE slony WITH SUPERUSER LOGIN REPLICATION ENCRYPTED PASSWORD 'password string here';
|
||||
ALTER ROLE slony SET statement_timeout TO 0;
|
||||
```
|
||||
|
||||
Make sure you replace "password string here" with an actual password for the
|
||||
user. A password is required. This user must be created on both the old and
|
||||
new database server using the same password.
|
||||
|
||||
After creating the user, be sure to note the password, as the password is needed
|
||||
later.
|
||||
|
||||
## Configuring Slony
|
||||
|
||||
You can now start configuring Slony. Slony uses a configuration file for
|
||||
most of the work so it is important to set this up with care. Your configuration
|
||||
specifies where to put log files, how Slony should connect to the databases,
|
||||
etc.
|
||||
|
||||
First, create some required directories and set the correct
|
||||
permissions. To do so, run the following commands on both the old and new
|
||||
database server:
|
||||
|
||||
```shell
|
||||
sudo mkdir -p /var/log/gitlab/slony /var/run/slony1 /var/opt/gitlab/postgresql/slony
|
||||
sudo chown gitlab-psql:root /var/log/gitlab/slony /var/run/slony1 /var/opt/gitlab/postgresql/slony
|
||||
```
|
||||
|
||||
Here `gitlab-psql` is the user used to run the PostgreSQL database processes. If
|
||||
you are using a different user you should replace this with the name of said
|
||||
user.
|
||||
|
||||
Now that the directories are in place you can create the configuration file
|
||||
by using the following template:
|
||||
|
||||
```perl
|
||||
if ($ENV{"SLONYNODES"}) {
|
||||
require $ENV{"SLONYNODES"};
|
||||
} else {
|
||||
$CLUSTER_NAME = 'slony_replication';
|
||||
$LOGDIR = '/var/log/gitlab/slony';
|
||||
$MASTERNODE = 1;
|
||||
$DEBUGLEVEL = 2;
|
||||
|
||||
add_node(host => 'OLD_HOST', dbname => 'gitlabhq_production', port =>5432,
|
||||
user=>'slony', password=>'SLONY_PASSWORD', node=>1);
|
||||
|
||||
add_node(host => 'NEW_HOST', dbname => 'gitlabhq_production', port =>5432,
|
||||
user=>'slony', password=>'SLONY_PASSWORD', node=>2, parent=>1 );
|
||||
}
|
||||
|
||||
$SLONY_SETS = {
|
||||
"set1" => {
|
||||
"set_id" => 1,
|
||||
"table_id" => 1,
|
||||
"sequence_id" => 1,
|
||||
"pkeyedtables" => [
|
||||
TABLES
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
if ($ENV{"SLONYSET"}) {
|
||||
require $ENV{"SLONYSET"};
|
||||
}
|
||||
|
||||
# Please do not add or change anything below this point.
|
||||
1;
|
||||
```
|
||||
|
||||
Replace the following placeholders in this file to use it:
|
||||
|
||||
- `OLD_HOST`: the address of the old database server.
|
||||
- `NEW_HOST`: the address of the new database server.
|
||||
- `SLONY_PASSWORD`: the password of the Slony user created earlier.
|
||||
- `TABLES`: the tables to replicate.
|
||||
|
||||
Generate the list of tables to replicate by running the following
|
||||
command on your old PostgreSQL database:
|
||||
|
||||
```shell
|
||||
sudo gitlab-psql gitlabhq_production -c "select concat('\"', schemaname, '.', tablename, '\",') from pg_catalog.pg_tables where schemaname = 'public' and tableowner = 'gitlab' and tablename != 'schema_migrations' order by tablename asc;" -t
|
||||
```
|
||||
|
||||
If you're not using Omnibus you should replace `gitlab-psql` with the
|
||||
appropriate path to the `psql` executable.
|
||||
|
||||
The above command outputs a list of tables in a format that can be copy-pasted
|
||||
directly into the above configuration file. Make sure to _replace_ `TABLES` with
|
||||
this output, don't just append it below it. The result looks like this:
|
||||
|
||||
```perl
|
||||
"pkeyedtables" => [
|
||||
"public.abuse_reports",
|
||||
"public.appearances",
|
||||
"public.application_settings",
|
||||
... more rows here ...
|
||||
]
|
||||
```
|
||||
|
||||
After you have the configuration file generated you must install it on both the
|
||||
old and new database. To do so, place it in
|
||||
`/var/opt/gitlab/postgresql/slony/slon_tools.conf` (for which you created the
|
||||
directory earlier on).
|
||||
|
||||
Now that the configuration file is in place, you can _finally_ start replicating
|
||||
the database. First, set up the schema in the new database by making
|
||||
sure that the SQL files generated earlier are in the `/tmp`
|
||||
directory of the new server. After these files are in place start a `psql`
|
||||
session on this server:
|
||||
|
||||
```shell
|
||||
sudo gitlab-psql gitlabhq_production
|
||||
```
|
||||
|
||||
Now run the following commands:
|
||||
|
||||
```plaintext
|
||||
\i /tmp/structure.sql
|
||||
\i /tmp/migrations.sql
|
||||
```
|
||||
|
||||
To verify if the structure is in place close the session (`\q`), start it again, then
|
||||
run `\d`. If all went well you should see output along the lines of the
|
||||
following:
|
||||
|
||||
```plaintext
|
||||
List of relations
|
||||
Schema | Name | Type | Owner
|
||||
--------+---------------------------------------------+----------+-------------
|
||||
public | abuse_reports | table | gitlab
|
||||
public | abuse_reports_id_seq | sequence | gitlab
|
||||
public | appearances | table | gitlab
|
||||
public | appearances_id_seq | sequence | gitlab
|
||||
public | application_settings | table | gitlab
|
||||
public | application_settings_id_seq | sequence | gitlab
|
||||
public | approvals | table | gitlab
|
||||
... more rows here ...
|
||||
```
|
||||
|
||||
Now you can initialize the required tables and other processes for
|
||||
the replication process. To do so, run the following on the old database:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slonik_init_cluster --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf | /opt/gitlab/embedded/bin/slonik
|
||||
```
|
||||
|
||||
If all went well this produces something along the lines of:
|
||||
|
||||
```plaintext
|
||||
<stdin>:10: Set up replication nodes
|
||||
<stdin>:13: Next: configure paths for each node/origin
|
||||
<stdin>:16: Replication nodes prepared
|
||||
<stdin>:17: Please start a slon replication daemon for each node
|
||||
```
|
||||
|
||||
Next, start a replication node on every server. To do so, run the
|
||||
following on the old database:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slon_start 1 --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf
|
||||
```
|
||||
|
||||
This should produce an output like the following:
|
||||
|
||||
```plaintext
|
||||
Invoke slon for node 1 - /opt/gitlab/embedded/bin/slon -p /var/run/slony1/slony_replication_node1.pid -s 1000 -d2 slony_replication 'host=192.168.0.7 dbname=gitlabhq_production user=slony port=5432 password=hieng8ezohHuCeiqu0leeghai4aeyahp' > /var/log/gitlab/slony/node1/gitlabhq_production-2016-10-06.log 2>&1 &
|
||||
Slon successfully started for cluster slony_replication, node node1
|
||||
PID [26740]
|
||||
Start the watchdog process as well...
|
||||
```
|
||||
|
||||
Next, run the following command on the _new_ database server:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slon_start 2 --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf
|
||||
```
|
||||
|
||||
This produces similar output if all went well.
|
||||
|
||||
After Slony starts, you must tell the new database server what it should replicate. Run the following command on the _new_ database server:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slonik_create_set 1 --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf | /opt/gitlab/embedded/bin/slonik
|
||||
```
|
||||
|
||||
This should produce an output like the following:
|
||||
|
||||
```plaintext
|
||||
<stdin>:11: Subscription set 1 (set1) created
|
||||
<stdin>:12: Adding tables to the subscription set
|
||||
<stdin>:16: Add primary keyed table public.abuse_reports
|
||||
<stdin>:20: Add primary keyed table public.appearances
|
||||
<stdin>:24: Add primary keyed table public.application_settings
|
||||
... more rows here ...
|
||||
<stdin>:327: Adding sequences to the subscription set
|
||||
<stdin>:328: All tables added
|
||||
```
|
||||
|
||||
Finally, you can start the replication process by running the following on the
|
||||
_new_ database server:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slonik_subscribe_set 1 2 --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf | /opt/gitlab/embedded/bin/slonik
|
||||
```
|
||||
|
||||
This should produce the following output:
|
||||
|
||||
```plaintext
|
||||
<stdin>:6: Subscribed nodes to set 1
|
||||
```
|
||||
|
||||
At this point the new database server starts replicating the data of the old
|
||||
database server. This process can take anywhere from a few minutes to hours, if
|
||||
not days. Unfortunately Slony itself doesn't really provide a way of knowing
|
||||
when the two databases are in sync. To get an estimate of the progress you can
|
||||
use the following shell script:
|
||||
|
||||
```shell
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
user='slony'
|
||||
pass='SLONY_PASSWORD'
|
||||
|
||||
function main {
|
||||
while :
|
||||
do
|
||||
local source
|
||||
local target
|
||||
|
||||
source=$(PGUSER="${user}" PGPASSWORD="${pass}" /opt/gitlab/embedded/bin/psql -h OLD_HOST gitlabhq_production -c "select pg_size_pretty(pg_database_size('gitlabhq_production'));" -t -A)
|
||||
target=$(PGUSER="${user}" PGPASSWORD="${pass}" /opt/gitlab/embedded/bin/psql -h NEW_HOST gitlabhq_production -c "select pg_size_pretty(pg_database_size('gitlabhq_production'));" -t -A)
|
||||
|
||||
echo "$(date): ${target} of ${source}" >> progress.log
|
||||
echo "$(date): ${target} of ${source}"
|
||||
|
||||
sleep 60
|
||||
done
|
||||
}
|
||||
|
||||
main
|
||||
```
|
||||
|
||||
This script compares the sizes of the old and new database every minute and
|
||||
prints the results to STDOUT as well as logging it to a file. Make sure to replace
|
||||
`SLONY_PASSWORD`, `OLD_HOST`, and `NEW_HOST` with the correct values.
|
||||
|
||||
## Stopping Replication
|
||||
|
||||
Eventually, the two databases become in sync. At this point, there is a few minutes of downtime that you must plan for before the replicated database is available. During this time, the replication process should stop and all Slony data should be removed from both databases. After the replication process finishes, GitLab can restart and is able to use the newly-replicated database.
|
||||
|
||||
First, stop all of GitLab. Omnibus users can do so by running the
|
||||
following on their GitLab servers:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl stop puma
|
||||
sudo gitlab-ctl stop sidekiq
|
||||
sudo gitlab-ctl stop mailroom
|
||||
```
|
||||
|
||||
If you have any other processes that use PostgreSQL, you should also stop those.
|
||||
|
||||
After everything successfully stops, be sure to update any configuration settings
|
||||
and DNS records so they all point to the new database.
|
||||
|
||||
When the configuration is complete, stop the replication
|
||||
process. It's crucial that no new data is written to the databases at this point,
|
||||
as this data is discarded.
|
||||
|
||||
To stop replication, run the following on both database servers:
|
||||
|
||||
```shell
|
||||
sudo -u gitlab-psql /opt/gitlab/embedded/bin/slon_kill --conf /var/opt/gitlab/postgresql/slony/slon_tools.conf
|
||||
```
|
||||
|
||||
This stops all the Slony processes on the host the command was executed on.
|
||||
|
||||
## Resetting Sequences
|
||||
|
||||
The above setup does not replicate database sequences, as such these must be
|
||||
reset manually in the target database. You can use the following script for
|
||||
this:
|
||||
|
||||
```shell
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
function main {
|
||||
local fix_sequences
|
||||
local fix_owners
|
||||
|
||||
fix_sequences='/tmp/fix_sequences.sql'
|
||||
fix_owners='/tmp/fix_owners.sql'
|
||||
|
||||
# The SQL queries were taken from
|
||||
# https://wiki.postgresql.org/wiki/Fixing_Sequences
|
||||
sudo gitlab-psql gitlabhq_production -t -c "
|
||||
SELECT 'ALTER SEQUENCE '|| quote_ident(MIN(schema_name)) ||'.'|| quote_ident(MIN(seq_name))
|
||||
||' OWNED BY '|| quote_ident(MIN(TABLE_NAME)) ||'.'|| quote_ident(MIN(column_name)) ||';'
|
||||
FROM (
|
||||
SELECT
|
||||
n.nspname AS schema_name,
|
||||
c.relname AS TABLE_NAME,
|
||||
a.attname AS column_name,
|
||||
SUBSTRING(d.adsrc FROM E'^nextval\\(''([^'']*)''(?:::text|::regclass)?\\)') AS seq_name
|
||||
FROM pg_class c
|
||||
JOIN pg_attribute a ON (c.oid=a.attrelid)
|
||||
JOIN pg_attrdef d ON (a.attrelid=d.adrelid AND a.attnum=d.adnum)
|
||||
JOIN pg_namespace n ON (c.relnamespace=n.oid)
|
||||
WHERE has_schema_privilege(n.oid,'USAGE')
|
||||
AND n.nspname NOT LIKE 'pg!_%' escape '!'
|
||||
AND has_table_privilege(c.oid,'SELECT')
|
||||
AND (NOT a.attisdropped)
|
||||
AND d.adsrc ~ '^nextval'
|
||||
) seq
|
||||
GROUP BY seq_name HAVING COUNT(*)=1;
|
||||
" > "${fix_owners}"
|
||||
|
||||
sudo gitlab-psql gitlabhq_production -t -c "
|
||||
SELECT 'SELECT SETVAL(' ||
|
||||
quote_literal(quote_ident(PGT.schemaname) || '.' || quote_ident(S.relname)) ||
|
||||
', COALESCE(MAX(' ||quote_ident(C.attname)|| '), 1) ) FROM ' ||
|
||||
quote_ident(PGT.schemaname)|| '.'||quote_ident(T.relname)|| ';'
|
||||
FROM pg_class AS S,
|
||||
pg_depend AS D,
|
||||
pg_class AS T,
|
||||
pg_attribute AS C,
|
||||
pg_tables AS PGT
|
||||
WHERE S.relkind = 'S'
|
||||
AND S.oid = D.objid
|
||||
AND D.refobjid = T.oid
|
||||
AND D.refobjid = C.attrelid
|
||||
AND D.refobjsubid = C.attnum
|
||||
AND T.relname = PGT.tablename
|
||||
ORDER BY S.relname;
|
||||
" > "${fix_sequences}"
|
||||
|
||||
sudo gitlab-psql gitlabhq_production -f "${fix_owners}"
|
||||
sudo gitlab-psql gitlabhq_production -f "${fix_sequences}"
|
||||
|
||||
rm "${fix_owners}" "${fix_sequences}"
|
||||
}
|
||||
|
||||
main
|
||||
```
|
||||
|
||||
Upload this script to the _target_ server and execute it as follows:
|
||||
|
||||
```shell
|
||||
sudo bash path/to/the/script/above.sh
|
||||
```
|
||||
|
||||
This corrects the ownership of sequences and reset the next value for the
|
||||
`id` column to the next available value.
|
||||
|
||||
## Removing Slony
|
||||
|
||||
The final step is to remove all Slony related data. To do so, run the following
|
||||
command on the _target_ server:
|
||||
|
||||
```shell
|
||||
sudo gitlab-psql gitlabhq_production -c "DROP SCHEMA _slony_replication CASCADE;"
|
||||
```
|
||||
|
||||
Once done you can safely remove any Slony related files (for example, the log
|
||||
directory), and uninstall Slony if desired. At this point you can start your
|
||||
GitLab instance again and if all went well it should be using your new database
|
||||
server.
|
||||
This content was removed in GitLab 15.7.
|
||||
Patroni has been used for database replication since GitLab 14.0. To perform upgrades, use the [Patroni replication documentation](../administration/postgresql/replication_and_failover.md) instead.
|
||||
|
|
|
|||
|
|
@ -13,9 +13,8 @@ On self-managed GitLab, by default this feature is not available. To make it ava
|
|||
On GitLab.com, this feature is not available.
|
||||
This feature is not ready for production use.
|
||||
|
||||
## Overview
|
||||
|
||||
You can view the [product category](https://about.gitlab.com/direction/analytics/product-analytics/) page for more information about our direction. This page is a work in progress and will be updated as we add more features.
|
||||
This page is a work in progress, and we're updating the information as we add more features.
|
||||
For more information, visit the [Product Analytics group direction page](https://about.gitlab.com/direction/analytics/product-analytics/).
|
||||
|
||||
## Enable product analytics
|
||||
|
||||
|
|
|
|||
|
|
@ -89,6 +89,7 @@ which are evaluated when displaying the badge. The following placeholders
|
|||
are available:
|
||||
|
||||
- `%{project_path}`: Path of a project including the parent groups
|
||||
- `%{project_name}`: Name of the project
|
||||
- `%{project_id}`: Database ID associated with a project
|
||||
- `%{default_branch}`: Default branch name configured for a project's repository
|
||||
- `%{commit_sha}`: ID of the most recent commit to the default branch of a
|
||||
|
|
|
|||
|
|
@ -108,9 +108,8 @@ and an [HTTP status code](#http-status-codes):
|
|||
|
||||
## Rewrites
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-pages/-/merge_requests/458) in GitLab 14.3.
|
||||
> - Enabled on GitLab.com.
|
||||
> - Disabled by default in self-managed GitLab behind the [`FF_ENABLE_PLACEHOLDERS` feature flag](#feature-flag-for-rewrites).
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-pages/-/merge_requests/458) in GitLab 14.3 [with a flag](../../../administration/feature_flags.md) named `FF_ENABLE_PLACEHOLDERS`. Disabled by default.
|
||||
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/619) in GitLab 15.2.
|
||||
|
||||
Provide a status code of `200` to serve the content of the `to` path when the
|
||||
request matches the `from`:
|
||||
|
|
@ -267,28 +266,3 @@ However, there are some minor differences:
|
|||
- Netlify redirects to `/new/:placeholder` (with a
|
||||
literal `:placeholder`).
|
||||
- GitLab redirects to `/new/`.
|
||||
|
||||
## Feature flag for rewrites
|
||||
|
||||
FLAG:
|
||||
Rewrites in GitLab Pages is under development, and is deployed behind a feature flag
|
||||
that is **disabled by default**.
|
||||
|
||||
To enable rewrites, for [Omnibus installations](../../../administration/pages/index.md), define the
|
||||
`FF_ENABLE_PLACEHOLDERS` environment variable in the
|
||||
[global settings](../../../administration/pages/index.md#global-settings).
|
||||
Add the following line to `/etc/gitlab/gitlab.rb` and
|
||||
[reconfigure the instance](../../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure):
|
||||
|
||||
```ruby
|
||||
gitlab_pages['env']['FF_ENABLE_PLACEHOLDERS'] = 'true'
|
||||
```
|
||||
|
||||
For [source installations](../../../administration/pages/source.md), define the
|
||||
`FF_ENABLE_PLACEHOLDERS` environment variable, then
|
||||
[restart GitLab](../../../administration/restart_gitlab.md#installations-from-source):
|
||||
|
||||
```shell
|
||||
export FF_ENABLE_PLACEHOLDERS="true"
|
||||
/path/to/pages/bin/gitlab-pages -config gitlab-pages.conf
|
||||
```
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ module Backup
|
|||
|
||||
build_backup_information
|
||||
|
||||
definitions.keys.each do |task_name|
|
||||
definitions.each_key do |task_name|
|
||||
run_create_task(task_name)
|
||||
end
|
||||
|
||||
|
|
@ -239,7 +239,7 @@ module Backup
|
|||
read_backup_information
|
||||
verify_backup_version
|
||||
|
||||
definitions.keys.each do |task_name|
|
||||
definitions.each_key do |task_name|
|
||||
if !skipped?(task_name) && enabled_task?(task_name)
|
||||
run_restore_task(task_name)
|
||||
end
|
||||
|
|
@ -263,7 +263,7 @@ module Backup
|
|||
|
||||
def write_backup_information
|
||||
# Make sure there is a connection
|
||||
::Gitlab::Database.database_base_models.values.each do |base_model|
|
||||
::Gitlab::Database.database_base_models.each_value do |base_model|
|
||||
base_model.connection.reconnect!
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ module Gitlab
|
|||
push_frontend_feature_flag(:security_auto_fix)
|
||||
push_frontend_feature_flag(:new_header_search)
|
||||
push_frontend_feature_flag(:source_editor_toolbar)
|
||||
push_frontend_feature_flag(:vscode_web_ide, current_user)
|
||||
push_frontend_feature_flag(:integration_slack_app_notifications)
|
||||
push_frontend_feature_flag(:vue_group_select)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ require 'redis'
|
|||
module Gitlab
|
||||
module Instrumentation
|
||||
class RedisBase
|
||||
VALIDATE_ALLOWED_COMMANDS_KEY = 'validate_allowed_commands_flag'
|
||||
|
||||
class << self
|
||||
include ::Gitlab::Utils::StrongMemoize
|
||||
include ::Gitlab::Instrumentation::RedisPayload
|
||||
|
|
@ -75,13 +77,23 @@ module Gitlab
|
|||
query_time.round(::Gitlab::InstrumentationHelper::DURATION_PRECISION)
|
||||
end
|
||||
|
||||
def redis_cluster_validate!(commands)
|
||||
::Gitlab::Instrumentation::RedisClusterValidator.validate!(commands) if @redis_cluster_validation
|
||||
true
|
||||
rescue ::Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError
|
||||
raise if Rails.env.development? || Rails.env.test? # raise in test environments to catch violations
|
||||
def validate_allowed_commands?
|
||||
::Gitlab::SafeRequestStore.fetch(VALIDATE_ALLOWED_COMMANDS_KEY) do
|
||||
Feature.enabled?(:validate_allowed_cross_slot_commands, type: :development)
|
||||
end
|
||||
end
|
||||
|
||||
false
|
||||
def redis_cluster_validate!(commands)
|
||||
return true unless @redis_cluster_validation
|
||||
|
||||
result = ::Gitlab::Instrumentation::RedisClusterValidator.validate(commands, validate_allowed_commands?)
|
||||
return true if result.nil?
|
||||
|
||||
if !result[:valid] && !result[:allowed] && (Rails.env.development? || Rails.env.test?)
|
||||
raise RedisClusterValidator::CrossSlotError, "Redis command #{result[:command_name]} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands"
|
||||
end
|
||||
|
||||
result[:valid]
|
||||
end
|
||||
|
||||
def enable_redis_cluster_validation
|
||||
|
|
|
|||
|
|
@ -183,8 +183,8 @@ module Gitlab
|
|||
CrossSlotError = Class.new(StandardError)
|
||||
|
||||
class << self
|
||||
def validate!(commands)
|
||||
return if allow_cross_slot_commands?
|
||||
def validate(commands, validate_allowed_cmd)
|
||||
return if allow_cross_slot_commands? && !validate_allowed_cmd
|
||||
return if commands.empty?
|
||||
|
||||
# early exit for single-command (non-pipelined) if it is a single-key-command
|
||||
|
|
@ -192,9 +192,14 @@ module Gitlab
|
|||
return if commands.size == 1 && REDIS_COMMANDS.dig(command_name, :single_key)
|
||||
|
||||
key_slots = commands.map { |command| key_slots(command) }.flatten
|
||||
if key_slots.uniq.many? # rubocop: disable CodeReuse/ActiveRecord
|
||||
raise CrossSlotError, "Redis command #{command_name} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands"
|
||||
end
|
||||
|
||||
{
|
||||
valid: !key_slots.uniq.many?, # rubocop: disable CodeReuse/ActiveRecord
|
||||
command_name: command_name,
|
||||
key_count: key_slots.size,
|
||||
allowed: allow_cross_slot_commands?,
|
||||
command: commands.first.join(' ')
|
||||
}
|
||||
end
|
||||
|
||||
# Keep track of the call stack to allow nested calls to work.
|
||||
|
|
|
|||
|
|
@ -123,18 +123,18 @@ module Gitlab
|
|||
|
||||
def self.with_custom_logger(logger)
|
||||
original_colorize_logging = ActiveSupport::LogSubscriber.colorize_logging
|
||||
original_activerecord_logger = ApplicationRecord.logger
|
||||
original_activerecord_logger = ActiveRecord::Base.logger
|
||||
original_actioncontroller_logger = ActionController::Base.logger
|
||||
|
||||
if logger
|
||||
ActiveSupport::LogSubscriber.colorize_logging = false
|
||||
ApplicationRecord.logger = logger
|
||||
ActiveRecord::Base.logger = logger
|
||||
ActionController::Base.logger = logger
|
||||
end
|
||||
|
||||
yield.tap do
|
||||
ActiveSupport::LogSubscriber.colorize_logging = original_colorize_logging
|
||||
ApplicationRecord.logger = original_activerecord_logger
|
||||
ActiveRecord::Base.logger = original_activerecord_logger
|
||||
ActionController::Base.logger = original_actioncontroller_logger
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8223,9 +8223,6 @@ msgstr ""
|
|||
msgid "Checkout|Edit"
|
||||
msgstr ""
|
||||
|
||||
msgid "Checkout|Enter a number greater than 0"
|
||||
msgstr ""
|
||||
|
||||
msgid "Checkout|Exp %{expirationMonth}/%{expirationYear}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8265,6 +8262,9 @@ msgstr ""
|
|||
msgid "Checkout|Must be %{minimumNumberOfUsers} (your seats in use, plus all over limit members) or more. To buy fewer seats, remove members from the group."
|
||||
msgstr ""
|
||||
|
||||
msgid "Checkout|Must be 1 or more. Cannot be a decimal."
|
||||
msgstr ""
|
||||
|
||||
msgid "Checkout|Name of company or organization using GitLab"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -43537,6 +43537,9 @@ msgstr ""
|
|||
msgid "Try out GitLab Pipelines"
|
||||
msgstr ""
|
||||
|
||||
msgid "Try out the new Web IDE"
|
||||
msgstr ""
|
||||
|
||||
msgid "Try the troubleshooting steps here."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -45027,6 +45030,9 @@ msgstr ""
|
|||
msgid "Using the %{codeStart}needs%{codeEnd} keyword makes jobs run before their stage is reached. Jobs run as soon as their %{codeStart}needs%{codeEnd} relationships are met, which speeds up your pipelines."
|
||||
msgstr ""
|
||||
|
||||
msgid "VS Code in your browser. View code and make changes from the same UI as in your local IDE 🎉"
|
||||
msgstr ""
|
||||
|
||||
msgid "Valid From"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ module RuboCop
|
|||
ALLOWED_METHODS = %i[
|
||||
no_touching
|
||||
configurations
|
||||
logger
|
||||
].freeze
|
||||
|
||||
def_node_matcher :active_record_base_method_is_used?, <<~PATTERN
|
||||
|
|
|
|||
|
|
@ -47,6 +47,23 @@ RSpec.describe GraphqlController do
|
|||
'raisedAt' => /graphql_controller_spec.rb/))
|
||||
)
|
||||
end
|
||||
|
||||
it 'handles Gitlab::Auth::TooManyIps', :aggregate_failures do
|
||||
allow(controller).to receive(:execute) do
|
||||
raise Gitlab::Auth::TooManyIps.new(150, '123.123.123.123', 10)
|
||||
end
|
||||
|
||||
expect(controller).to receive(:log_exception).and_call_original
|
||||
|
||||
post :execute
|
||||
|
||||
expect(json_response).to include(
|
||||
'errors' => include(
|
||||
a_hash_including('message' => 'User 150 from IP: 123.123.123.123 tried logging from too many ips: 10')
|
||||
)
|
||||
)
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST #execute' do
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::Settings::IntegrationsController do
|
||||
RSpec.describe Projects::Settings::IntegrationsController, feature_category: :integrations do
|
||||
include JiraIntegrationHelpers
|
||||
include AfterNextHelpers
|
||||
|
||||
|
|
@ -39,179 +39,174 @@ RSpec.describe Projects::Settings::IntegrationsController do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#test' do
|
||||
context 'when the integration is not testable' do
|
||||
it 'renders 404' do
|
||||
allow_any_instance_of(Integration).to receive(:testable?).and_return(false)
|
||||
describe '#test', :clean_gitlab_redis_rate_limiting do
|
||||
let_it_be(:integration) { create(:external_wiki_integration, project: project) }
|
||||
|
||||
put :test, params: project_params
|
||||
let(:integration_params) { { external_wiki_url: 'https://example.net/wiki' } }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
it 'renders 404 when the integration is not testable' do
|
||||
allow_next_found_instance_of(integration.class) do |integration|
|
||||
allow(integration).to receive(:testable?).and_return(false)
|
||||
end
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
expect(json_response).to eq({})
|
||||
end
|
||||
|
||||
context 'when validations fail', :clean_gitlab_redis_rate_limiting do
|
||||
let(:integration_params) { { active: 'true', url: '' } }
|
||||
it 'returns success if test is successful' do
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute).and_return({ success: true })
|
||||
|
||||
it 'returns error messages in JSON response' do
|
||||
put :test, params: project_params(service: integration_params)
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(json_response['message']).to eq 'Validations failed.'
|
||||
expect(json_response['service_response']).to include "Url can't be blank"
|
||||
expect(response).to be_successful
|
||||
end
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq({})
|
||||
end
|
||||
|
||||
context 'when successful', :clean_gitlab_redis_rate_limiting do
|
||||
context 'with empty project' do
|
||||
let_it_be(:project) { create(:project) }
|
||||
it 'returns extra given data if test is successful' do
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute)
|
||||
.and_return({ success: true, data: { my_payload: true } })
|
||||
|
||||
context 'with chat notification integration' do
|
||||
let_it_be(:integration) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
it 'returns success' do
|
||||
allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
|
||||
|
||||
put :test, params: project_params
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
||||
context 'with masked token' do
|
||||
let(:integration_params) { { active: 'true', webhook: '************' } }
|
||||
|
||||
it 'returns success' do
|
||||
allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(integration.reload.webhook).to eq('http://webhook.com')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns success' do
|
||||
stub_jira_integration_test
|
||||
|
||||
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns success' do
|
||||
stub_jira_integration_test
|
||||
|
||||
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
||||
context 'when service is configured for the first time' do
|
||||
let(:integration_params) do
|
||||
{
|
||||
'active' => '1',
|
||||
'push_events' => '1',
|
||||
'token' => 'token',
|
||||
'project_url' => 'https://buildkite.com/organization/pipeline'
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next(ServiceHook).to receive(:execute).and_return(true)
|
||||
end
|
||||
|
||||
it 'persist the object' do
|
||||
do_put
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to be_empty
|
||||
expect(Integrations::Buildkite.first).to be_present
|
||||
end
|
||||
|
||||
it 'creates the ServiceHook object' do
|
||||
do_put
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to be_empty
|
||||
expect(Integrations::Buildkite.first.service_hook).to be_present
|
||||
end
|
||||
|
||||
def do_put
|
||||
put :test, params: project_params(id: 'buildkite',
|
||||
service: integration_params)
|
||||
end
|
||||
end
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq({ 'my_payload' => true })
|
||||
end
|
||||
|
||||
context 'when unsuccessful', :clean_gitlab_redis_rate_limiting do
|
||||
it 'returns an error response when the integration test fails' do
|
||||
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
|
||||
.to_return(status: 404)
|
||||
it 'returns an error response if the test is not successful' do
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute).and_return({ success: false })
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => '',
|
||||
'test_failed' => true
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns extra given message if the test is not successful' do
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute)
|
||||
.and_return({ success: false, result: 'Result of test' })
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => 'Result of test',
|
||||
'test_failed' => true
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns an error response if a network exception is raised' do
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute).and_raise(Errno::ECONNREFUSED)
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => 'Connection refused',
|
||||
'test_failed' => true
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns error messages in JSON response if validations fail' do
|
||||
integration_params = { active: 'true', external_wiki_url: '' }
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(json_response['message']).to eq 'Validations failed.'
|
||||
expect(json_response['service_response']).to eq(
|
||||
"External wiki url can't be blank, External wiki url must be a valid URL"
|
||||
)
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
||||
context 'when integration has a webhook' do
|
||||
let_it_be(:integration) { create(:integrations_slack, project: project) }
|
||||
|
||||
it 'returns an error response if the webhook URL is changed to one that is blocked' do
|
||||
integration_params = { webhook: 'http://127.0.0.1' }
|
||||
|
||||
put :test, params: project_params(service: integration_params)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => '',
|
||||
'test_failed' => true
|
||||
'message' => 'Validations failed.',
|
||||
'service_response' => "Webhook is blocked: Requests to localhost are not allowed",
|
||||
'test_failed' => false
|
||||
)
|
||||
end
|
||||
|
||||
context 'with the Slack integration' do
|
||||
let_it_be(:integration) { build(:integrations_slack) }
|
||||
it 'ignores masked webhook param' do
|
||||
integration_params = { active: 'true', webhook: '************' }
|
||||
allow_next(Integrations::Test::ProjectService).to receive(:execute).and_return({ success: true })
|
||||
|
||||
it 'returns an error response when the URL is blocked' do
|
||||
put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
|
||||
expect do
|
||||
put :test, params: project_params(service: integration_params)
|
||||
end.not_to change { integration.reload.webhook }
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
|
||||
'test_failed' => true
|
||||
)
|
||||
end
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq({})
|
||||
end
|
||||
|
||||
it 'returns an error response when a network exception is raised' do
|
||||
expect_next(Integrations::Slack).to receive(:test).and_raise(Errno::ECONNREFUSED)
|
||||
it 'creates an associated web hook record if web hook integration is configured for the first time' do
|
||||
integration_params = {
|
||||
'active' => '1',
|
||||
'issues_events' => '1',
|
||||
'push_events' => '0',
|
||||
'token' => 'my-token',
|
||||
'project_url' => 'https://buildkite.com/organization/pipeline'
|
||||
}
|
||||
allow_next(ServiceHook).to receive(:execute).and_return(true)
|
||||
|
||||
put :test, params: project_params
|
||||
expect do
|
||||
put :test, params: project_params(id: 'buildkite', service: integration_params)
|
||||
end.to change { Integrations::Buildkite.count }.from(0).to(1)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq(
|
||||
'error' => true,
|
||||
'message' => 'Connection failed. Check your integration settings.',
|
||||
'service_response' => 'Connection refused',
|
||||
'test_failed' => true
|
||||
)
|
||||
end
|
||||
integration = Integrations::Buildkite.take
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(json_response).to eq({})
|
||||
expect(integration).to have_attributes(
|
||||
project_url: 'https://buildkite.com/organization/pipeline',
|
||||
issues_events: true,
|
||||
push_events: false
|
||||
)
|
||||
expect(integration.service_hook).to have_attributes(
|
||||
url: 'https://webhook.buildkite.com/deliver/{webhook_token}',
|
||||
interpolated_url: 'https://webhook.buildkite.com/deliver/my-token'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the endpoint receives requests above the limit', :freeze_time, :clean_gitlab_redis_rate_limiting do
|
||||
context 'when the endpoint receives requests above the rate limit', :freeze_time do
|
||||
before do
|
||||
allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
|
||||
.and_return(project_testing_integration: { threshold: 1, interval: 1.minute })
|
||||
end
|
||||
|
||||
it 'prevents making test requests' do
|
||||
stub_jira_integration_test
|
||||
|
||||
expect_next_instance_of(::Integrations::Test::ProjectService) do |service|
|
||||
expect(service).to receive(:execute).and_return(http_status: 200)
|
||||
end
|
||||
|
||||
2.times { post :test, params: project_params(service: integration_params) }
|
||||
|
||||
expect(response.body).to include(_('This endpoint has been requested too many times. Try again later.'))
|
||||
expect(json_response).to eq(
|
||||
{
|
||||
'error' => true,
|
||||
'message' => 'This endpoint has been requested too many times. Try again later.'
|
||||
}
|
||||
)
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ require Rails.root.join('ee', 'spec', 'db', 'schema_support') if Gitlab.ee?
|
|||
RSpec.describe 'Database schema' do
|
||||
prepend_mod_with('DB::SchemaSupport')
|
||||
|
||||
let(:connection) { ActiveRecord::Base.connection }
|
||||
let(:tables) { connection.tables }
|
||||
let(:columns_name_with_jsonb) { retrieve_columns_name_with_jsonb }
|
||||
|
||||
|
|
@ -121,56 +120,62 @@ RSpec.describe 'Database schema' do
|
|||
}.with_indifferent_access.freeze
|
||||
|
||||
context 'for table' do
|
||||
(ActiveRecord::Base.connection.tables - TABLE_PARTITIONS).sort.each do |table|
|
||||
describe table do
|
||||
let(:indexes) { connection.indexes(table) }
|
||||
let(:columns) { connection.columns(table) }
|
||||
let(:foreign_keys) { connection.foreign_keys(table) }
|
||||
let(:loose_foreign_keys) { Gitlab::Database::LooseForeignKeys.definitions.group_by(&:from_table).fetch(table, []) }
|
||||
let(:all_foreign_keys) { foreign_keys + loose_foreign_keys }
|
||||
# take the first column in case we're using a composite primary key
|
||||
let(:primary_key_column) { Array(connection.primary_key(table)).first }
|
||||
Gitlab::Database::EachDatabase.each_database_connection do |connection, _|
|
||||
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
|
||||
(connection.tables - TABLE_PARTITIONS).sort.each do |table|
|
||||
table_schema = Gitlab::Database::GitlabSchema.table_schema(table)
|
||||
next unless schemas_for_connection.include?(table_schema)
|
||||
|
||||
context 'all foreign keys' do
|
||||
# for index to be effective, the FK constraint has to be at first place
|
||||
it 'are indexed' do
|
||||
first_indexed_column = indexes.filter_map do |index|
|
||||
columns = index.columns
|
||||
describe table do
|
||||
let(:indexes) { connection.indexes(table) }
|
||||
let(:columns) { connection.columns(table) }
|
||||
let(:foreign_keys) { connection.foreign_keys(table) }
|
||||
let(:loose_foreign_keys) { Gitlab::Database::LooseForeignKeys.definitions.group_by(&:from_table).fetch(table, []) }
|
||||
let(:all_foreign_keys) { foreign_keys + loose_foreign_keys }
|
||||
# take the first column in case we're using a composite primary key
|
||||
let(:primary_key_column) { Array(connection.primary_key(table)).first }
|
||||
|
||||
# In cases of complex composite indexes, a string is returned eg:
|
||||
# "lower((extern_uid)::text), group_id"
|
||||
columns = columns.split(',') if columns.is_a?(String)
|
||||
column = columns.first.chomp
|
||||
context 'all foreign keys' do
|
||||
# for index to be effective, the FK constraint has to be at first place
|
||||
it 'are indexed' do
|
||||
first_indexed_column = indexes.filter_map do |index|
|
||||
columns = index.columns
|
||||
|
||||
# A partial index is not suitable for a foreign key column, unless
|
||||
# the only condition is for the presence of the foreign key itself
|
||||
column if index.where.nil? || index.where == "(#{column} IS NOT NULL)"
|
||||
# In cases of complex composite indexes, a string is returned eg:
|
||||
# "lower((extern_uid)::text), group_id"
|
||||
columns = columns.split(',') if columns.is_a?(String)
|
||||
column = columns.first.chomp
|
||||
|
||||
# A partial index is not suitable for a foreign key column, unless
|
||||
# the only condition is for the presence of the foreign key itself
|
||||
column if index.where.nil? || index.where == "(#{column} IS NOT NULL)"
|
||||
end
|
||||
foreign_keys_columns = all_foreign_keys.map(&:column)
|
||||
required_indexed_columns = foreign_keys_columns - ignored_index_columns(table)
|
||||
|
||||
# Add the primary key column to the list of indexed columns because
|
||||
# postgres and mysql both automatically create an index on the primary
|
||||
# key. Also, the rails connection.indexes() method does not return
|
||||
# automatically generated indexes (like the primary key index).
|
||||
first_indexed_column.push(primary_key_column)
|
||||
|
||||
expect(first_indexed_column.uniq).to include(*required_indexed_columns)
|
||||
end
|
||||
foreign_keys_columns = all_foreign_keys.map(&:column)
|
||||
required_indexed_columns = foreign_keys_columns - ignored_index_columns(table)
|
||||
|
||||
# Add the primary key column to the list of indexed columns because
|
||||
# postgres and mysql both automatically create an index on the primary
|
||||
# key. Also, the rails connection.indexes() method does not return
|
||||
# automatically generated indexes (like the primary key index).
|
||||
first_indexed_column.push(primary_key_column)
|
||||
|
||||
expect(first_indexed_column.uniq).to include(*required_indexed_columns)
|
||||
end
|
||||
end
|
||||
|
||||
context 'columns ending with _id' do
|
||||
let(:column_names) { columns.map(&:name) }
|
||||
let(:column_names_with_id) { column_names.select { |column_name| column_name.ends_with?('_id') } }
|
||||
let(:foreign_keys_columns) { all_foreign_keys.map(&:column).uniq } # we can have FK and loose FK present at the same time
|
||||
let(:ignored_columns) { ignored_fk_columns(table) }
|
||||
|
||||
it 'do have the foreign keys' do
|
||||
expect(column_names_with_id - ignored_columns).to match_array(foreign_keys_columns)
|
||||
end
|
||||
|
||||
it 'and having foreign key are not in the ignore list' do
|
||||
expect(ignored_columns).to match_array(ignored_columns - foreign_keys)
|
||||
context 'columns ending with _id' do
|
||||
let(:column_names) { columns.map(&:name) }
|
||||
let(:column_names_with_id) { column_names.select { |column_name| column_name.ends_with?('_id') } }
|
||||
let(:foreign_keys_columns) { all_foreign_keys.map(&:column).uniq } # we can have FK and loose FK present at the same time
|
||||
let(:ignored_columns) { ignored_fk_columns(table) }
|
||||
|
||||
it 'do have the foreign keys' do
|
||||
expect(column_names_with_id - ignored_columns).to match_array(foreign_keys_columns)
|
||||
end
|
||||
|
||||
it 'and having foreign key are not in the ignore list' do
|
||||
expect(ignored_columns).to match_array(ignored_columns - foreign_keys)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -288,13 +293,16 @@ RSpec.describe 'Database schema' do
|
|||
|
||||
context 'primary keys' do
|
||||
it 'expects every table to have a primary key defined' do
|
||||
connection = ActiveRecord::Base.connection
|
||||
Gitlab::Database::EachDatabase.each_database_connection do |connection, _|
|
||||
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
|
||||
|
||||
problematic_tables = connection.tables.select do |table|
|
||||
!connection.primary_key(table).present?
|
||||
end.map(&:to_sym)
|
||||
problematic_tables = connection.tables.select do |table|
|
||||
table_schema = Gitlab::Database::GitlabSchema.table_schema(table)
|
||||
schemas_for_connection.include?(table_schema) && !connection.primary_key(table).present?
|
||||
end.map(&:to_sym)
|
||||
|
||||
expect(problematic_tables).to be_empty
|
||||
expect(problematic_tables).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ RSpec.describe 'Branches', feature_category: :projects do
|
|||
|
||||
click_button "Updated date" # Open sorting dropdown
|
||||
within '[data-testid="branches-dropdown"]' do
|
||||
find('p', text: 'Name').click
|
||||
first('span', text: 'Name').click
|
||||
end
|
||||
|
||||
expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :name))
|
||||
|
|
@ -154,7 +154,7 @@ RSpec.describe 'Branches', feature_category: :projects do
|
|||
|
||||
click_button "Updated date" # Open sorting dropdown
|
||||
within '[data-testid="branches-dropdown"]' do
|
||||
find('p', text: 'Oldest updated').click
|
||||
first('span', text: 'Oldest updated').click
|
||||
end
|
||||
|
||||
expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :updated_asc))
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ RSpec.describe 'User sees feature flag list', :js, feature_category: :feature_fl
|
|||
expect_status_toggle_button_not_to_be_checked
|
||||
|
||||
within_feature_flag_scopes do
|
||||
expect(page.find('[data-testid="strategy-badge"]')).to have_content('All Users: All Environments, review/*')
|
||||
expect(page.find('[data-testid="strategy-label"]')).to have_content('All Users: All Environments, review/*')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -66,7 +66,7 @@ RSpec.describe 'User sees feature flag list', :js, feature_category: :feature_fl
|
|||
expect_status_toggle_button_to_be_checked
|
||||
|
||||
within_feature_flag_scopes do
|
||||
expect(page.find('[data-testid="strategy-badge"]')).to have_content('All Users: production')
|
||||
expect(page.find('[data-testid="strategy-label"]')).to have_content('All Users: production')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ jest.mock('@gitlab/ui/dist/components/base/popover/popover.js', () => ({
|
|||
'boundary',
|
||||
'container',
|
||||
'showCloseButton',
|
||||
'show',
|
||||
].map((prop) => [prop, {}]),
|
||||
),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { GlToggle, GlBadge } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { GlToggle } from '@gitlab/ui';
|
||||
import { nextTick } from 'vue';
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { trimText } from 'helpers/text_helper';
|
||||
import { mockTracking } from 'helpers/tracking_helper';
|
||||
import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
|
||||
|
|
@ -52,10 +52,10 @@ const getDefaultProps = () => ({
|
|||
describe('Feature flag table', () => {
|
||||
let wrapper;
|
||||
let props;
|
||||
let badges;
|
||||
let labels;
|
||||
|
||||
const createWrapper = (propsData, opts = {}) => {
|
||||
wrapper = shallowMount(FeatureFlagsTable, {
|
||||
wrapper = mountExtended(FeatureFlagsTable, {
|
||||
propsData,
|
||||
provide: {
|
||||
csrfToken: 'fakeToken',
|
||||
|
|
@ -70,18 +70,13 @@ describe('Feature flag table', () => {
|
|||
provide: { csrfToken: 'fakeToken' },
|
||||
});
|
||||
|
||||
badges = wrapper.findAll('[data-testid="strategy-badge"]');
|
||||
labels = wrapper.findAllByTestId('strategy-label');
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
props = getDefaultProps();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('with an active scope and a standard rollout strategy', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper(props);
|
||||
|
|
@ -101,7 +96,7 @@ describe('Feature flag table', () => {
|
|||
});
|
||||
|
||||
it('Should render a status column', () => {
|
||||
const badge = wrapper.find('[data-testid="feature-flag-status-badge"]');
|
||||
const badge = wrapper.findByTestId('feature-flag-status-badge');
|
||||
|
||||
expect(badge.exists()).toBe(true);
|
||||
expect(trimText(badge.text())).toEqual('Active');
|
||||
|
|
@ -116,10 +111,10 @@ describe('Feature flag table', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should render an environments specs badge with active class', () => {
|
||||
const envColumn = wrapper.find('.js-feature-flag-environments');
|
||||
it('should render an environments specs label', () => {
|
||||
const strategyLabel = wrapper.findByTestId('strategy-label');
|
||||
|
||||
expect(trimText(envColumn.findComponent(GlBadge).text())).toBe('All Users: All Environments');
|
||||
expect(trimText(strategyLabel.text())).toBe('All Users: All Environments');
|
||||
});
|
||||
|
||||
it('should render an actions column', () => {
|
||||
|
|
@ -167,29 +162,29 @@ describe('Feature flag table', () => {
|
|||
});
|
||||
|
||||
it('shows All Environments if the environment scope is *', () => {
|
||||
expect(badges.at(0).text()).toContain('All Environments');
|
||||
expect(labels.at(0).text()).toContain('All Environments');
|
||||
});
|
||||
|
||||
it('shows the environment scope if another is set', () => {
|
||||
expect(badges.at(1).text()).toContain('production');
|
||||
expect(badges.at(1).text()).toContain('staging');
|
||||
expect(badges.at(2).text()).toContain('review/*');
|
||||
expect(labels.at(1).text()).toContain('production');
|
||||
expect(labels.at(1).text()).toContain('staging');
|
||||
expect(labels.at(2).text()).toContain('review/*');
|
||||
});
|
||||
|
||||
it('shows All Users for the default strategy', () => {
|
||||
expect(badges.at(0).text()).toContain('All Users');
|
||||
expect(labels.at(0).text()).toContain('All Users');
|
||||
});
|
||||
|
||||
it('shows the percent for a percent rollout', () => {
|
||||
expect(badges.at(1).text()).toContain('Percent of users - 50%');
|
||||
expect(labels.at(1).text()).toContain('Percent of users - 50%');
|
||||
});
|
||||
|
||||
it('shows the number of users for users with ID', () => {
|
||||
expect(badges.at(2).text()).toContain('User IDs - 4 users');
|
||||
expect(labels.at(2).text()).toContain('User IDs - 4 users');
|
||||
});
|
||||
|
||||
it('shows the name of a user list for user list', () => {
|
||||
expect(badges.at(3).text()).toContain('User List - test list');
|
||||
expect(labels.at(3).text()).toContain('User List - test list');
|
||||
});
|
||||
|
||||
it('renders a feature flag without an iid', () => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import StrategyLabel from '~/feature_flags/components/strategy_label.vue';
|
||||
|
||||
const DEFAULT_PROPS = {
|
||||
name: 'All Users',
|
||||
parameters: 'parameters',
|
||||
scopes: 'scope1, scope2',
|
||||
};
|
||||
|
||||
describe('feature_flags/components/feature_flags_tab.vue', () => {
|
||||
let wrapper;
|
||||
|
||||
const factory = (props = {}) =>
|
||||
mount(
|
||||
{
|
||||
components: {
|
||||
StrategyLabel,
|
||||
},
|
||||
render(h) {
|
||||
return h(StrategyLabel, { props: this.$attrs, on: this.$listeners }, this.$slots.default);
|
||||
},
|
||||
},
|
||||
{
|
||||
propsData: {
|
||||
...DEFAULT_PROPS,
|
||||
...props,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
describe('render', () => {
|
||||
let strategyLabel;
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = factory({});
|
||||
strategyLabel = wrapper.findComponent(StrategyLabel);
|
||||
});
|
||||
|
||||
it('should show the strategy label with parameters and scope', () => {
|
||||
expect(strategyLabel.text()).toContain(DEFAULT_PROPS.name);
|
||||
expect(strategyLabel.text()).toContain(DEFAULT_PROPS.parameters);
|
||||
expect(strategyLabel.text()).toContain(DEFAULT_PROPS.scopes);
|
||||
expect(strategyLabel.text()).toContain('All Users - parameters: scope1, scope2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('without parameters', () => {
|
||||
let strategyLabel;
|
||||
|
||||
beforeEach(() => {
|
||||
wrapper = factory({ parameters: null });
|
||||
strategyLabel = wrapper.findComponent(StrategyLabel);
|
||||
});
|
||||
|
||||
it('should hide empty params and dash', () => {
|
||||
expect(strategyLabel.text()).toContain(DEFAULT_PROPS.name);
|
||||
expect(strategyLabel.text()).not.toContain(' - ');
|
||||
expect(strategyLabel.text()).toContain('All Users: scope1, scope2');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -16,10 +16,12 @@ import {
|
|||
branchProtectionsMockResponse,
|
||||
approvalRulesMock,
|
||||
statusChecksRulesMock,
|
||||
matchingBranchesCount,
|
||||
} from './mock_data';
|
||||
|
||||
jest.mock('~/lib/utils/url_utility', () => ({
|
||||
getParameterByName: jest.fn().mockReturnValue('main'),
|
||||
mergeUrlParams: jest.fn().mockReturnValue('/branches?state=all&search=main'),
|
||||
joinPaths: jest.fn(),
|
||||
}));
|
||||
|
||||
|
|
@ -65,6 +67,13 @@ describe('View branch rules', () => {
|
|||
const findForcePushTitle = () => wrapper.findByText(I18N.allowForcePushDescription);
|
||||
const findApprovalsTitle = () => wrapper.findByText(I18N.approvalsTitle);
|
||||
const findStatusChecksTitle = () => wrapper.findByText(I18N.statusChecksTitle);
|
||||
const findMatchingBranchesLink = () =>
|
||||
wrapper.findByText(
|
||||
sprintf(I18N.matchingBranchesLinkTitle, {
|
||||
total: matchingBranchesCount,
|
||||
subject: 'branches',
|
||||
}),
|
||||
);
|
||||
|
||||
it('gets the branch param from url and renders it in the view', () => {
|
||||
expect(util.getParameterByName).toHaveBeenCalledWith('branch');
|
||||
|
|
@ -85,6 +94,12 @@ describe('View branch rules', () => {
|
|||
expect(findBranchTitle().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders matching branches link', () => {
|
||||
const matchingBranchesLink = findMatchingBranchesLink();
|
||||
expect(matchingBranchesLink.exists()).toBe(true);
|
||||
expect(matchingBranchesLink.attributes().href).toBe('/branches?state=all&search=main');
|
||||
});
|
||||
|
||||
it('renders a branch protection title', () => {
|
||||
expect(findBranchProtectionTitle().exists()).toBe(true);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -109,6 +109,8 @@ export const accessLevelsMockResponse = [
|
|||
},
|
||||
];
|
||||
|
||||
export const matchingBranchesCount = 3;
|
||||
|
||||
export const branchProtectionsMockResponse = {
|
||||
data: {
|
||||
project: {
|
||||
|
|
@ -141,6 +143,7 @@ export const branchProtectionsMockResponse = {
|
|||
__typename: 'ExternalStatusCheckConnection',
|
||||
nodes: statusChecksRulesMock,
|
||||
},
|
||||
matchingBranchesCount,
|
||||
},
|
||||
{
|
||||
__typename: 'BranchRule',
|
||||
|
|
@ -166,6 +169,7 @@ export const branchProtectionsMockResponse = {
|
|||
__typename: 'ExternalStatusCheckConnection',
|
||||
nodes: [],
|
||||
},
|
||||
matchingBranchesCount,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { GlDropdown, GlDropdownDivider, GlButton } from '@gitlab/ui';
|
||||
import { GlDropdown, GlDropdownDivider, GlButton, GlTooltip } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
|
||||
import ActionsButton from '~/vue_shared/components/actions_button.vue';
|
||||
|
||||
const TEST_ACTION = {
|
||||
|
|
@ -32,7 +31,6 @@ describe('Actions button component', () => {
|
|||
function createComponent(props) {
|
||||
wrapper = shallowMount(ActionsButton, {
|
||||
propsData: { ...props },
|
||||
directives: { GlTooltip: createMockDirective() },
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -40,15 +38,9 @@ describe('Actions button component', () => {
|
|||
wrapper.destroy();
|
||||
});
|
||||
|
||||
const getTooltip = (child) => {
|
||||
const directiveBinding = getBinding(child.element, 'gl-tooltip');
|
||||
|
||||
return directiveBinding.value;
|
||||
};
|
||||
const findButton = () => wrapper.findComponent(GlButton);
|
||||
const findButtonTooltip = () => getTooltip(findButton());
|
||||
const findTooltip = () => wrapper.findComponent(GlTooltip);
|
||||
const findDropdown = () => wrapper.findComponent(GlDropdown);
|
||||
const findDropdownTooltip = () => getTooltip(findDropdown());
|
||||
const parseDropdownItems = () =>
|
||||
findDropdown()
|
||||
.findAll('gl-dropdown-item-stub,gl-dropdown-divider-stub')
|
||||
|
|
@ -88,8 +80,8 @@ describe('Actions button component', () => {
|
|||
expect(findButton().text()).toBe(TEST_ACTION.text);
|
||||
});
|
||||
|
||||
it('should have tooltip', () => {
|
||||
expect(findButtonTooltip()).toBe(TEST_ACTION.tooltip);
|
||||
it('should not have tooltip', () => {
|
||||
expect(findTooltip().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('should have attrs', () => {
|
||||
|
|
@ -105,7 +97,18 @@ describe('Actions button component', () => {
|
|||
it('should have tooltip', () => {
|
||||
createComponent({ actions: [{ ...TEST_ACTION, tooltip: TEST_TOOLTIP }] });
|
||||
|
||||
expect(findButtonTooltip()).toBe(TEST_TOOLTIP);
|
||||
expect(findTooltip().text()).toBe(TEST_TOOLTIP);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when showActionTooltip is false', () => {
|
||||
it('should not have tooltip', () => {
|
||||
createComponent({
|
||||
actions: [{ ...TEST_ACTION, tooltip: TEST_TOOLTIP }],
|
||||
showActionTooltip: false,
|
||||
});
|
||||
|
||||
expect(findTooltip().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -174,8 +177,8 @@ describe('Actions button component', () => {
|
|||
expect(wrapper.emitted('select')).toEqual([[TEST_ACTION_2.key]]);
|
||||
});
|
||||
|
||||
it('should have tooltip value', () => {
|
||||
expect(findDropdownTooltip()).toBe(TEST_ACTION.tooltip);
|
||||
it('should not have tooltip value', () => {
|
||||
expect(findTooltip().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -199,7 +202,7 @@ describe('Actions button component', () => {
|
|||
});
|
||||
|
||||
it('should have tooltip value', () => {
|
||||
expect(findDropdownTooltip()).toBe(TEST_ACTION_2.tooltip);
|
||||
expect(findTooltip().text()).toBe(TEST_ACTION_2.tooltip);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import { GlModal } from '@gitlab/ui';
|
||||
import { GlButton, GlModal, GlPopover } from '@gitlab/ui';
|
||||
import { nextTick } from 'vue';
|
||||
|
||||
import ActionsButton from '~/vue_shared/components/actions_button.vue';
|
||||
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
|
||||
import WebIdeLink, { i18n } from '~/vue_shared/components/web_ide_link.vue';
|
||||
import ConfirmForkModal from '~/vue_shared/components/confirm_fork_modal.vue';
|
||||
import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser.vue';
|
||||
|
||||
import { stubComponent } from 'helpers/stub_component';
|
||||
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
|
|
@ -81,7 +82,14 @@ const ACTION_PIPELINE_EDITOR = {
|
|||
describe('Web IDE link component', () => {
|
||||
let wrapper;
|
||||
|
||||
function createComponent(props, mountFn = shallowMountExtended) {
|
||||
function createComponent(
|
||||
props,
|
||||
{
|
||||
mountFn = shallowMountExtended,
|
||||
glFeatures = {},
|
||||
userCalloutDismisserSlotProps = { dismiss: jest.fn() },
|
||||
} = {},
|
||||
) {
|
||||
wrapper = mountFn(WebIdeLink, {
|
||||
propsData: {
|
||||
editUrl: TEST_EDIT_URL,
|
||||
|
|
@ -91,6 +99,9 @@ describe('Web IDE link component', () => {
|
|||
forkPath,
|
||||
...props,
|
||||
},
|
||||
provide: {
|
||||
glFeatures,
|
||||
},
|
||||
stubs: {
|
||||
GlModal: stubComponent(GlModal, {
|
||||
template: `
|
||||
|
|
@ -100,6 +111,11 @@ describe('Web IDE link component', () => {
|
|||
<slot name="modal-footer"></slot>
|
||||
</div>`,
|
||||
}),
|
||||
UserCalloutDismisser: stubComponent(UserCalloutDismisser, {
|
||||
render() {
|
||||
return this.$scopedSlots.default(userCalloutDismisserSlotProps);
|
||||
},
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -112,6 +128,8 @@ describe('Web IDE link component', () => {
|
|||
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
|
||||
const findModal = () => wrapper.findComponent(GlModal);
|
||||
const findForkConfirmModal = () => wrapper.findComponent(ConfirmForkModal);
|
||||
const findUserCalloutDismisser = () => wrapper.findComponent(UserCalloutDismisser);
|
||||
const findNewWebIdeCalloutPopover = () => wrapper.findComponent(GlPopover);
|
||||
|
||||
it.each([
|
||||
{
|
||||
|
|
@ -322,9 +340,9 @@ describe('Web IDE link component', () => {
|
|||
});
|
||||
|
||||
it.each(testActions)('opens the modal when the button is clicked', async ({ props }) => {
|
||||
createComponent({ ...props, needsToFork: true }, mountExtended);
|
||||
createComponent({ ...props, needsToFork: true }, { mountFn: mountExtended });
|
||||
|
||||
await findActionsButton().trigger('click');
|
||||
await findActionsButton().findComponent(GlButton).trigger('click');
|
||||
|
||||
expect(findForkConfirmModal().props()).toEqual({
|
||||
visible: true,
|
||||
|
|
@ -377,7 +395,7 @@ describe('Web IDE link component', () => {
|
|||
gitpodEnabled: false,
|
||||
gitpodText,
|
||||
},
|
||||
mountExtended,
|
||||
{ mountFn: mountExtended },
|
||||
);
|
||||
|
||||
findLocalStorageSync().vm.$emit('input', ACTION_GITPOD.key);
|
||||
|
|
@ -401,4 +419,110 @@ describe('Web IDE link component', () => {
|
|||
expect(findModal().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Web IDE callout', () => {
|
||||
describe('vscode_web_ide feature flag is enabled and the edit button is not shown', () => {
|
||||
let dismiss;
|
||||
|
||||
beforeEach(() => {
|
||||
dismiss = jest.fn();
|
||||
createComponent(
|
||||
{
|
||||
showEditButton: false,
|
||||
},
|
||||
{ glFeatures: { vscodeWebIde: true }, userCalloutDismisserSlotProps: { dismiss } },
|
||||
);
|
||||
});
|
||||
it('does not skip the user_callout_dismisser query', () => {
|
||||
expect(findUserCalloutDismisser().props()).toEqual(
|
||||
expect.objectContaining({
|
||||
skipQuery: false,
|
||||
featureName: 'vscode_web_ide',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('mounts new web ide callout popover', () => {
|
||||
expect(findNewWebIdeCalloutPopover().props()).toEqual(
|
||||
expect.objectContaining({
|
||||
showCloseButton: '',
|
||||
target: 'web-ide-link',
|
||||
triggers: 'manual',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
describe.each`
|
||||
calloutStatus | shouldShowCallout | popoverVisibility | tooltipVisibility
|
||||
${'show'} | ${true} | ${true} | ${false}
|
||||
${'hide'} | ${false} | ${false} | ${true}
|
||||
`(
|
||||
'when should $calloutStatus web ide callout',
|
||||
({ shouldShowCallout, popoverVisibility, tooltipVisibility }) => {
|
||||
beforeEach(() => {
|
||||
createComponent(
|
||||
{
|
||||
showEditButton: false,
|
||||
},
|
||||
{
|
||||
glFeatures: { vscodeWebIde: true },
|
||||
userCalloutDismisserSlotProps: { shouldShowCallout, dismiss },
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it(`popover visibility = ${popoverVisibility}`, () => {
|
||||
expect(findNewWebIdeCalloutPopover().props().show).toBe(popoverVisibility);
|
||||
});
|
||||
|
||||
it(`action button tooltip visibility = ${tooltipVisibility}`, () => {
|
||||
expect(findActionsButton().props().showActionTooltip).toBe(tooltipVisibility);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it('dismisses the callout when popover close button is clicked', () => {
|
||||
findNewWebIdeCalloutPopover().vm.$emit('close-button-clicked');
|
||||
|
||||
expect(dismiss).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('dismisses the callout when action button is clicked', () => {
|
||||
findActionsButton().vm.$emit('actionClicked');
|
||||
|
||||
expect(dismiss).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe.each`
|
||||
featureFlag | showEditButton
|
||||
${false} | ${true}
|
||||
${true} | ${false}
|
||||
${false} | ${false}
|
||||
`(
|
||||
'when vscode_web_ide=$featureFlag and showEditButton = $showEditButton',
|
||||
({ vscodeWebIde, showEditButton }) => {
|
||||
beforeEach(() => {
|
||||
createComponent(
|
||||
{
|
||||
showEditButton,
|
||||
},
|
||||
{ glFeatures: { vscodeWebIde } },
|
||||
);
|
||||
});
|
||||
|
||||
it('skips the user_callout_dismisser query', () => {
|
||||
expect(findUserCalloutDismisser().props().skipQuery).toBe(true);
|
||||
});
|
||||
|
||||
it('displays actions button tooltip', () => {
|
||||
expect(findActionsButton().props().showActionTooltip).toBe(true);
|
||||
});
|
||||
|
||||
it('mounts new web ide callout popover', () => {
|
||||
expect(findNewWebIdeCalloutPopover().exists()).toBe(false);
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -7,15 +7,33 @@ RSpec.describe 'Database config initializer', :reestablished_active_record_base
|
|||
load Rails.root.join('config/initializers/database_config.rb')
|
||||
end
|
||||
|
||||
it 'retains the correct database name for the connection' do
|
||||
previous_db_name = ApplicationRecord.connection.pool.db_config.name
|
||||
shared_examples 'does not change connection attributes' do
|
||||
it 'retains the correct database name for connection' do
|
||||
previous_db_name = database_base_model.connection.pool.db_config.name
|
||||
|
||||
subject
|
||||
subject
|
||||
|
||||
expect(ApplicationRecord.connection.pool.db_config.name).to eq(previous_db_name)
|
||||
expect(database_base_model.connection.pool.db_config.name).to eq(previous_db_name)
|
||||
end
|
||||
|
||||
it 'does not overwrite custom pool settings' do
|
||||
expect { subject }.not_to change { database_base_model.connection_db_config.pool }
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not overwrite custom pool settings' do
|
||||
expect { subject }.not_to change { ActiveRecord::Base.connection_db_config.pool }
|
||||
context 'when main database connection' do
|
||||
let(:database_base_model) { Gitlab::Database.database_base_models[:main] }
|
||||
|
||||
it_behaves_like 'does not change connection attributes'
|
||||
end
|
||||
|
||||
context 'when ci database connection' do
|
||||
before do
|
||||
skip_if_multiple_databases_not_setup
|
||||
end
|
||||
|
||||
let(:database_base_model) { Gitlab::Database.database_base_models[:ci] }
|
||||
|
||||
it_behaves_like 'does not change connection attributes'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,8 +2,9 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper do
|
||||
RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do
|
||||
include StubRequests
|
||||
include RepoHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { project.owner }
|
||||
|
|
@ -12,13 +13,13 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
|
|||
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
|
||||
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
|
||||
let(:variables) { project.predefined_variables }
|
||||
let(:context_params) { { project: project, sha: '123456', user: user, variables: variables } }
|
||||
let(:context_params) { { project: project, sha: project.commit.sha, user: user, variables: variables } }
|
||||
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
|
||||
|
||||
let(:file_content) do
|
||||
<<~HEREDOC
|
||||
<<~YAML
|
||||
image: 'image:1.0'
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
subject(:mapper) { described_class.new(values, context) }
|
||||
|
|
@ -379,17 +380,28 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
|
|||
end
|
||||
|
||||
context 'when local file path has wildcard' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
||||
let(:values) do
|
||||
{ include: 'myfolder/*.yml' }
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:search_files_by_wildcard_path).with('myfolder/*.yml', '123456') do
|
||||
['myfolder/file1.yml', 'myfolder/file2.yml']
|
||||
end
|
||||
let(:project_files) do
|
||||
{
|
||||
'myfolder/file1.yml' => <<~YAML,
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
'myfolder/file2.yml' => <<~YAML
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
around do |example|
|
||||
create_and_delete_files(project, project_files) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -2,17 +2,31 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Ci::Config::External::Processor do
|
||||
RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipeline_authoring do
|
||||
include StubRequests
|
||||
include RepoHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be_with_reload(:another_project) { create(:project, :repository) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
let(:sha) { '12345' }
|
||||
let_it_be_with_reload(:project) { create(:project, :repository) }
|
||||
let_it_be_with_reload(:another_project) { create(:project, :repository) }
|
||||
|
||||
let(:project_files) { {} }
|
||||
let(:other_project_files) { {} }
|
||||
|
||||
let(:sha) { project.commit.sha }
|
||||
let(:context_params) { { project: project, sha: sha, user: user } }
|
||||
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
|
||||
let(:processor) { described_class.new(values, context) }
|
||||
|
||||
subject(:processor) { described_class.new(values, context) }
|
||||
|
||||
around do |example|
|
||||
create_and_delete_files(project, project_files) do
|
||||
create_and_delete_files(another_project, other_project_files) do
|
||||
example.run
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
project.add_developer(user)
|
||||
|
|
@ -63,7 +77,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
let(:remote_file) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
|
||||
let(:values) { { include: remote_file, image: 'image:1.0' } }
|
||||
let(:external_file_content) do
|
||||
<<-HEREDOC
|
||||
<<-YAML
|
||||
before_script:
|
||||
- apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs
|
||||
- ruby -v
|
||||
|
|
@ -77,7 +91,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
rubocop:
|
||||
script:
|
||||
- bundle exec rubocop
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -98,7 +112,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
let(:remote_file) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
|
||||
let(:values) { { include: remote_file, image: 'image:1.0' } }
|
||||
let(:external_file_content) do
|
||||
<<-HEREDOC
|
||||
<<-YAML
|
||||
include:
|
||||
- local: another-file.yml
|
||||
rules:
|
||||
|
|
@ -107,7 +121,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
rspec:
|
||||
script:
|
||||
- bundle exec rspec
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -127,19 +141,16 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
context 'with a valid local external file is defined' do
|
||||
let(:values) { { include: '/lib/gitlab/ci/templates/template.yml', image: 'image:1.0' } }
|
||||
let(:local_file_content) do
|
||||
<<-HEREDOC
|
||||
<<-YAML
|
||||
before_script:
|
||||
- apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs
|
||||
- ruby -v
|
||||
- which ruby
|
||||
- bundle install --jobs $(nproc) "${FLAGS[@]}"
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
before do
|
||||
allow_any_instance_of(Gitlab::Ci::Config::External::File::Local)
|
||||
.to receive(:fetch_local_content).and_return(local_file_content)
|
||||
end
|
||||
let(:project_files) { { '/lib/gitlab/ci/templates/template.yml' => local_file_content } }
|
||||
|
||||
it 'appends the file to the values' do
|
||||
output = processor.perform
|
||||
|
|
@ -153,6 +164,11 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
|
||||
context 'with multiple external files are defined' do
|
||||
let(:remote_file) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
|
||||
|
||||
let(:local_file_content) do
|
||||
File.read(Rails.root.join('spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml'))
|
||||
end
|
||||
|
||||
let(:external_files) do
|
||||
[
|
||||
'/spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml',
|
||||
|
|
@ -168,20 +184,21 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
end
|
||||
|
||||
let(:remote_file_content) do
|
||||
<<-HEREDOC
|
||||
<<-YAML
|
||||
stages:
|
||||
- build
|
||||
- review
|
||||
- cleanup
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
let(:project_files) do
|
||||
{
|
||||
'/spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' => local_file_content
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
local_file_content = File.read(Rails.root.join('spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml'))
|
||||
|
||||
allow_any_instance_of(Gitlab::Ci::Config::External::File::Local)
|
||||
.to receive(:fetch_local_content).and_return(local_file_content)
|
||||
|
||||
stub_full_request(remote_file).to_return(body: remote_file_content)
|
||||
end
|
||||
|
||||
|
|
@ -199,10 +216,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
|
||||
let(:local_file_content) { 'invalid content file ////' }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(Gitlab::Ci::Config::External::File::Local)
|
||||
.to receive(:fetch_local_content).and_return(local_file_content)
|
||||
end
|
||||
let(:project_files) { { '/lib/gitlab/ci/templates/template.yml' => local_file_content } }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { processor.perform }.to raise_error(
|
||||
|
|
@ -222,9 +236,9 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
end
|
||||
|
||||
let(:remote_file_content) do
|
||||
<<~HEREDOC
|
||||
<<~YAML
|
||||
image: php:5-fpm-alpine
|
||||
HEREDOC
|
||||
YAML
|
||||
end
|
||||
|
||||
it 'takes precedence' do
|
||||
|
|
@ -244,31 +258,32 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
}
|
||||
end
|
||||
|
||||
let(:project_files) do
|
||||
{
|
||||
'/local/file.yml' => <<~YAML
|
||||
include:
|
||||
- template: Ruby.gitlab-ci.yml
|
||||
- remote: http://my.domain.com/config.yml
|
||||
- project: #{another_project.full_path}
|
||||
file: /templates/my-workflow.yml
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
let(:other_project_files) do
|
||||
{
|
||||
'/templates/my-workflow.yml' => <<~YAML,
|
||||
include:
|
||||
- local: /templates/my-build.yml
|
||||
YAML
|
||||
'/templates/my-build.yml' => <<~YAML
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(project.repository).to receive(:blob_data_at).with('12345', '/local/file.yml') do
|
||||
<<~HEREDOC
|
||||
include:
|
||||
- template: Ruby.gitlab-ci.yml
|
||||
- remote: http://my.domain.com/config.yml
|
||||
- project: #{another_project.full_path}
|
||||
file: /templates/my-workflow.yml
|
||||
HEREDOC
|
||||
end
|
||||
|
||||
allow_any_instance_of(Repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-workflow.yml') do
|
||||
<<~HEREDOC
|
||||
include:
|
||||
- local: /templates/my-build.yml
|
||||
HEREDOC
|
||||
end
|
||||
|
||||
allow_any_instance_of(Repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
|
||||
<<~HEREDOC
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
|
||||
stub_full_request('http://my.domain.com/config.yml')
|
||||
.to_return(body: 'remote_build: { script: echo Hello World }')
|
||||
end
|
||||
|
|
@ -299,32 +314,32 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
expect(context.includes).to contain_exactly(
|
||||
{ type: :local,
|
||||
location: '/local/file.yml',
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/12345/local/file.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/12345/local/file.yml",
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/local/file.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/local/file.yml",
|
||||
extra: {},
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :template,
|
||||
location: 'Ruby.gitlab-ci.yml',
|
||||
blob: nil,
|
||||
raw: 'https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Ruby.gitlab-ci.yml',
|
||||
extra: {},
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :remote,
|
||||
location: 'http://my.domain.com/config.yml',
|
||||
blob: nil,
|
||||
raw: "http://my.domain.com/config.yml",
|
||||
extra: {},
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :file,
|
||||
location: '/templates/my-workflow.yml',
|
||||
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-workflow.yml",
|
||||
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-workflow.yml",
|
||||
extra: { project: another_project.full_path, ref: 'HEAD' },
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :local,
|
||||
location: '/templates/my-build.yml',
|
||||
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-build.yml",
|
||||
|
|
@ -393,17 +408,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
}
|
||||
end
|
||||
|
||||
let(:other_project_files) do
|
||||
{
|
||||
'/templates/my-build.yml' => <<~YAML
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
another_project.add_developer(user)
|
||||
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
|
||||
<<~HEREDOC
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'appends the file to the values' do
|
||||
|
|
@ -423,24 +438,21 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
}
|
||||
end
|
||||
|
||||
let(:other_project_files) do
|
||||
{
|
||||
'/templates/my-build.yml' => <<~YAML,
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
'/templates/my-test.yml' => <<~YAML
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
another_project.add_developer(user)
|
||||
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
|
||||
<<~HEREDOC
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
|
||||
allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-test.yml') do
|
||||
<<~HEREDOC
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'appends the file to the values' do
|
||||
|
|
@ -458,45 +470,34 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-build.yml",
|
||||
extra: { project: another_project.full_path, ref: 'HEAD' },
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :file,
|
||||
blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-test.yml",
|
||||
raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-test.yml",
|
||||
location: '/templates/my-test.yml',
|
||||
extra: { project: another_project.full_path, ref: 'HEAD' },
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' }
|
||||
context_sha: sha }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when local file path has wildcard' do
|
||||
let(:project) { create(:project, :repository) }
|
||||
|
||||
let(:values) do
|
||||
{ include: 'myfolder/*.yml', image: 'image:1.0' }
|
||||
end
|
||||
|
||||
before do
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:search_files_by_wildcard_path).with('myfolder/*.yml', sha) do
|
||||
['myfolder/file1.yml', 'myfolder/file2.yml']
|
||||
end
|
||||
|
||||
allow(repository).to receive(:blob_data_at).with(sha, 'myfolder/file1.yml') do
|
||||
<<~HEREDOC
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
|
||||
allow(repository).to receive(:blob_data_at).with(sha, 'myfolder/file2.yml') do
|
||||
<<~HEREDOC
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
HEREDOC
|
||||
end
|
||||
end
|
||||
let(:project_files) do
|
||||
{
|
||||
'myfolder/file1.yml' => <<~YAML,
|
||||
my_build:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
'myfolder/file2.yml' => <<~YAML
|
||||
my_test:
|
||||
script: echo Hello World
|
||||
YAML
|
||||
}
|
||||
end
|
||||
|
||||
it 'fetches the matched files' do
|
||||
|
|
@ -510,18 +511,18 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
|
|||
expect(context.includes).to contain_exactly(
|
||||
{ type: :local,
|
||||
location: 'myfolder/file1.yml',
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/12345/myfolder/file1.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/12345/myfolder/file1.yml",
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/myfolder/file1.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/myfolder/file1.yml",
|
||||
extra: {},
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' },
|
||||
context_sha: sha },
|
||||
{ type: :local,
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/12345/myfolder/file2.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/12345/myfolder/file2.yml",
|
||||
blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/myfolder/file2.yml",
|
||||
raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/myfolder/file2.yml",
|
||||
location: 'myfolder/file2.yml',
|
||||
extra: {},
|
||||
context_project: project.full_path,
|
||||
context_sha: '12345' }
|
||||
context_sha: sha }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ RSpec.describe Gitlab::Database do
|
|||
describe '.db_config_for_connection' do
|
||||
context 'when the regular connection is used' do
|
||||
it 'returns db_config' do
|
||||
connection = ActiveRecord::Base.retrieve_connection
|
||||
connection = ApplicationRecord.retrieve_connection
|
||||
|
||||
expect(described_class.db_config_for_connection(connection)).to eq(connection.pool.db_config)
|
||||
end
|
||||
|
|
@ -147,12 +147,15 @@ RSpec.describe Gitlab::Database do
|
|||
|
||||
context 'when the connection is LoadBalancing::ConnectionProxy', :database_replica do
|
||||
it 'returns primary db config even if ambiguous queries default to replica' do
|
||||
Gitlab::Database::LoadBalancing::Session.current.use_primary!
|
||||
primary_config = described_class.db_config_for_connection(ActiveRecord::Base.connection)
|
||||
Gitlab::Database.database_base_models_using_load_balancing.each_value do |database_base_model|
|
||||
connection = database_base_model.connection
|
||||
Gitlab::Database::LoadBalancing::Session.current.use_primary!
|
||||
primary_config = described_class.db_config_for_connection(connection)
|
||||
|
||||
Gitlab::Database::LoadBalancing::Session.clear_session
|
||||
Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
|
||||
expect(described_class.db_config_for_connection(ActiveRecord::Base.connection)).to eq(primary_config)
|
||||
Gitlab::Database::LoadBalancing::Session.clear_session
|
||||
Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
|
||||
expect(described_class.db_config_for_connection(connection)).to eq(primary_config)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -180,11 +183,16 @@ RSpec.describe Gitlab::Database do
|
|||
end
|
||||
|
||||
context 'when replicas are configured', :database_replica do
|
||||
it 'returns the name for a replica' do
|
||||
replica = ActiveRecord::Base.load_balancer.host
|
||||
|
||||
it 'returns the main_replica for a main database replica' do
|
||||
replica = ApplicationRecord.load_balancer.host
|
||||
expect(described_class.db_config_name(replica)).to eq('main_replica')
|
||||
end
|
||||
|
||||
it 'returns the ci_replica for a ci database replica' do
|
||||
skip_if_multiple_databases_not_setup
|
||||
replica = Ci::ApplicationRecord.load_balancer.host
|
||||
expect(described_class.db_config_name(replica)).to eq('ci_replica')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -214,13 +222,17 @@ RSpec.describe Gitlab::Database do
|
|||
expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared)
|
||||
end
|
||||
|
||||
# rubocop:disable Database/MultipleDatabases
|
||||
it 'does return gitlab_ci when a ActiveRecord::Base is using CI connection' do
|
||||
with_reestablished_active_record_base do
|
||||
reconfigure_db_connection(model: ActiveRecord::Base, config_model: Ci::Build)
|
||||
|
||||
expect(described_class.gitlab_schemas_for_connection(ActiveRecord::Base.connection)).to include(:gitlab_ci, :gitlab_shared)
|
||||
expect(
|
||||
described_class.gitlab_schemas_for_connection(ActiveRecord::Base.connection)
|
||||
).to include(:gitlab_ci, :gitlab_shared)
|
||||
end
|
||||
end
|
||||
# rubocop:enable Database/MultipleDatabases
|
||||
|
||||
it 'does return a valid schema for a replica connection' do
|
||||
with_replica_pool_for(ActiveRecord::Base) do |main_replica_pool|
|
||||
|
|
@ -281,7 +293,8 @@ RSpec.describe Gitlab::Database do
|
|||
|
||||
it 'does return empty for non-adopted connections' do
|
||||
new_connection = ActiveRecord::Base.postgresql_connection(
|
||||
ActiveRecord::Base.connection_db_config.configuration_hash)
|
||||
ActiveRecord::Base.connection_db_config.configuration_hash # rubocop:disable Database/MultipleDatabases
|
||||
)
|
||||
|
||||
expect(described_class.gitlab_schemas_for_connection(new_connection)).to be_nil
|
||||
ensure
|
||||
|
|
@ -405,7 +418,7 @@ RSpec.describe Gitlab::Database do
|
|||
context 'within a transaction block' do
|
||||
it 'publishes a transaction event' do
|
||||
events = subscribe_events do
|
||||
ActiveRecord::Base.transaction do
|
||||
ApplicationRecord.transaction do
|
||||
User.first
|
||||
end
|
||||
end
|
||||
|
|
@ -424,10 +437,11 @@ RSpec.describe Gitlab::Database do
|
|||
context 'within an empty transaction block' do
|
||||
it 'publishes a transaction event' do
|
||||
events = subscribe_events do
|
||||
ActiveRecord::Base.transaction {}
|
||||
ApplicationRecord.transaction {}
|
||||
Ci::ApplicationRecord.transaction {}
|
||||
end
|
||||
|
||||
expect(events.length).to be(1)
|
||||
expect(events.length).to be(2)
|
||||
|
||||
event = events.first
|
||||
expect(event).not_to be_nil
|
||||
|
|
@ -441,9 +455,9 @@ RSpec.describe Gitlab::Database do
|
|||
context 'within a nested transaction block' do
|
||||
it 'publishes multiple transaction events' do
|
||||
events = subscribe_events do
|
||||
ActiveRecord::Base.transaction do
|
||||
ActiveRecord::Base.transaction do
|
||||
ActiveRecord::Base.transaction do
|
||||
ApplicationRecord.transaction do
|
||||
ApplicationRecord.transaction do
|
||||
ApplicationRecord.transaction do
|
||||
User.first
|
||||
end
|
||||
end
|
||||
|
|
@ -465,7 +479,7 @@ RSpec.describe Gitlab::Database do
|
|||
context 'within a cancelled transaction block' do
|
||||
it 'publishes multiple transaction events' do
|
||||
events = subscribe_events do
|
||||
ActiveRecord::Base.transaction do
|
||||
ApplicationRecord.transaction do
|
||||
User.first
|
||||
raise ActiveRecord::Rollback
|
||||
end
|
||||
|
|
|
|||
|
|
@ -153,32 +153,90 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do
|
|||
end
|
||||
|
||||
describe '.redis_cluster_validate!' do
|
||||
context 'Rails environments' do
|
||||
where(:env, :should_raise) do
|
||||
'production' | false
|
||||
'staging' | false
|
||||
'development' | true
|
||||
'test' | true
|
||||
end
|
||||
let(:args) { [[:mget, 'foo', 'bar']] }
|
||||
|
||||
before do
|
||||
instrumentation_class_a.enable_redis_cluster_validation
|
||||
before do
|
||||
instrumentation_class_a.enable_redis_cluster_validation
|
||||
end
|
||||
|
||||
context 'Rails environments' do
|
||||
where(:env, :allowed, :should_raise) do
|
||||
'production' | false | false
|
||||
'production' | true | false
|
||||
'staging' | false | false
|
||||
'staging' | true | false
|
||||
'development' | true | false
|
||||
'development' | false | true
|
||||
'test' | true | false
|
||||
'test' | false | true
|
||||
end
|
||||
|
||||
with_them do
|
||||
it do
|
||||
stub_rails_env(env)
|
||||
|
||||
args = [[:mget, 'foo', 'bar']]
|
||||
validation = -> { instrumentation_class_a.redis_cluster_validate!(args) }
|
||||
under_test = if allowed
|
||||
-> { Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands(&validation) }
|
||||
else
|
||||
validation
|
||||
end
|
||||
|
||||
if should_raise
|
||||
expect { instrumentation_class_a.redis_cluster_validate!(args) }
|
||||
.to raise_error(::Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError)
|
||||
expect(&under_test).to raise_error(::Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError)
|
||||
else
|
||||
expect { instrumentation_class_a.redis_cluster_validate!(args) }.not_to raise_error
|
||||
expect(&under_test).not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'validate_allowed_cross_slot_commands feature flag' do
|
||||
context 'when disabled' do
|
||||
before do
|
||||
stub_feature_flags(validate_allowed_cross_slot_commands: false)
|
||||
end
|
||||
|
||||
it 'skips check' do
|
||||
expect(
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
instrumentation_class_a.redis_cluster_validate!(args)
|
||||
end
|
||||
).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when enabled' do
|
||||
before do
|
||||
stub_feature_flags(validate_allowed_cross_slot_commands: true)
|
||||
end
|
||||
|
||||
it 'performs check' do
|
||||
expect(
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
instrumentation_class_a.redis_cluster_validate!(args)
|
||||
end
|
||||
).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'looks up feature-flag once per request' do
|
||||
stub_feature_flags(validate_allowed_cross_slot_commands: true)
|
||||
expect(
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
instrumentation_class_a.redis_cluster_validate!(args)
|
||||
end
|
||||
).to eq(false)
|
||||
|
||||
# even with validate set to false, redis_cluster_validate! will use the
|
||||
# SafeRequestStore cached feature flag value and perform validation
|
||||
stub_feature_flags(validate_allowed_cross_slot_commands: false)
|
||||
expect(
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
instrumentation_class_a.redis_cluster_validate!(args)
|
||||
end
|
||||
).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,83 +7,116 @@ require 'rspec-parameterized'
|
|||
RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
|
||||
include RailsHelpers
|
||||
|
||||
describe '.validate!' do
|
||||
describe '.validate' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:command, :arguments, :should_raise) do
|
||||
:rename | %w(foo bar) | true
|
||||
:RENAME | %w(foo bar) | true
|
||||
'rename' | %w(foo bar) | true
|
||||
'RENAME' | %w(foo bar) | true
|
||||
:rename | %w(iaa ahy) | false # 'iaa' and 'ahy' hash to the same slot
|
||||
:rename | %w({foo}:1 {foo}:2) | false
|
||||
:rename | %w(foo foo bar) | false # This is not a valid command but should not raise here
|
||||
:mget | %w(foo bar) | true
|
||||
:mget | %w(foo foo bar) | true
|
||||
:mget | %w(foo foo) | false
|
||||
:blpop | %w(foo bar 1) | true
|
||||
:blpop | %w(foo foo 1) | false
|
||||
:mset | %w(foo a bar a) | true
|
||||
:mset | %w(foo a foo a) | false
|
||||
:del | %w(foo bar) | true
|
||||
:del | [%w(foo bar)] | true # Arguments can be a nested array
|
||||
:del | %w(foo foo) | false
|
||||
:hset | %w(foo bar) | false # Not a multi-key command
|
||||
:mget | [] | false # This is invalid, but not because it's a cross-slot command
|
||||
where(:command, :arguments, :keys, :is_valid) do
|
||||
:rename | %w(foo bar) | 2 | false
|
||||
:RENAME | %w(foo bar) | 2 | false
|
||||
'rename' | %w(foo bar) | 2 | false
|
||||
'RENAME' | %w(foo bar) | 2 | false
|
||||
:rename | %w(iaa ahy) | 2 | true # 'iaa' and 'ahy' hash to the same slot
|
||||
:rename | %w({foo}:1 {foo}:2) | 2 | true
|
||||
:rename | %w(foo foo bar) | 2 | true # This is not a valid command but should not raise here
|
||||
:mget | %w(foo bar) | 2 | false
|
||||
:mget | %w(foo foo bar) | 3 | false
|
||||
:mget | %w(foo foo) | 2 | true
|
||||
:blpop | %w(foo bar 1) | 2 | false
|
||||
:blpop | %w(foo foo 1) | 2 | true
|
||||
:mset | %w(foo a bar a) | 2 | false
|
||||
:mset | %w(foo a foo a) | 2 | true
|
||||
:del | %w(foo bar) | 2 | false
|
||||
:del | [%w(foo bar)] | 2 | false # Arguments can be a nested array
|
||||
:del | %w(foo foo) | 2 | true
|
||||
:hset | %w(foo bar) | 1 | nil # Single key write
|
||||
:get | %w(foo) | 1 | nil # Single key read
|
||||
:mget | [] | 0 | true # This is invalid, but not because it's a cross-slot command
|
||||
end
|
||||
|
||||
with_them do
|
||||
it do
|
||||
args = [[command] + arguments]
|
||||
|
||||
if should_raise
|
||||
expect { described_class.validate!(args) }
|
||||
.to raise_error(described_class::CrossSlotError)
|
||||
if is_valid.nil?
|
||||
expect(described_class.validate(args, true)).to eq(nil)
|
||||
else
|
||||
expect { described_class.validate!(args) }.not_to raise_error
|
||||
expect(described_class.validate(args, true)[:valid]).to eq(is_valid)
|
||||
expect(described_class.validate(args, true)[:allowed]).to eq(false)
|
||||
expect(described_class.validate(args, true)[:command_name]).to eq(command.to_s.upcase)
|
||||
expect(described_class.validate(args, true)[:key_count]).to eq(keys)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
where(:arguments, :should_raise) do
|
||||
[[:get, "foo"], [:get, "bar"]] | true
|
||||
[[:get, "foo"], [:mget, "foo", "bar"]] | true # mix of single-key and multi-key cmds
|
||||
[[:get, "{foo}:name"], [:get, "{foo}:profile"]] | false
|
||||
[[:del, "foo"], [:del, "bar"]] | true
|
||||
[] | false # pipeline or transaction opened and closed without ops
|
||||
where(:arguments, :should_raise, :output) do
|
||||
[
|
||||
[
|
||||
[[:get, "foo"], [:get, "bar"]],
|
||||
true,
|
||||
{ valid: false, key_count: 2, command_name: 'PIPELINE/MULTI', allowed: false, command: "get foo" }
|
||||
],
|
||||
[
|
||||
[[:get, "foo"], [:mget, "foo", "bar"]],
|
||||
true,
|
||||
{ valid: false, key_count: 3, command_name: 'PIPELINE/MULTI', allowed: false, command: "get foo" }
|
||||
],
|
||||
[
|
||||
[[:get, "{foo}:name"], [:get, "{foo}:profile"]],
|
||||
false,
|
||||
{ valid: true, key_count: 2, command_name: 'PIPELINE/MULTI', allowed: false, command: "get {foo}:name" }
|
||||
],
|
||||
[
|
||||
[[:del, "foo"], [:del, "bar"]],
|
||||
true,
|
||||
{ valid: false, key_count: 2, command_name: 'PIPELINE/MULTI', allowed: false, command: "del foo" }
|
||||
],
|
||||
[
|
||||
[],
|
||||
false,
|
||||
nil # pipeline or transaction opened and closed without ops
|
||||
]
|
||||
]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it do
|
||||
if should_raise
|
||||
expect { described_class.validate!(arguments) }
|
||||
.to raise_error(described_class::CrossSlotError)
|
||||
else
|
||||
expect { described_class.validate!(arguments) }.not_to raise_error
|
||||
end
|
||||
expect(described_class.validate(arguments, true)).to eq(output)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.allow_cross_slot_commands' do
|
||||
it 'does not raise for invalid arguments' do
|
||||
expect do
|
||||
described_class.allow_cross_slot_commands do
|
||||
described_class.validate!([[:mget, 'foo', 'bar']])
|
||||
end
|
||||
end.not_to raise_error
|
||||
context 'with validate_allowed_cmd set to true' do
|
||||
it 'runs validation for allowed commands' do
|
||||
expect(
|
||||
described_class.allow_cross_slot_commands do
|
||||
described_class.validate([[:mget, 'foo', 'bar']], true)
|
||||
end
|
||||
).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true,
|
||||
command: "mget foo bar" })
|
||||
end
|
||||
|
||||
it 'allows nested invocation' do
|
||||
expect(
|
||||
described_class.allow_cross_slot_commands do
|
||||
described_class.allow_cross_slot_commands do
|
||||
described_class.validate([[:mget, 'foo', 'bar']], true)
|
||||
end
|
||||
|
||||
described_class.validate([[:mget, 'foo', 'bar']], true)
|
||||
end
|
||||
).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true,
|
||||
command: "mget foo bar" })
|
||||
end
|
||||
end
|
||||
|
||||
it 'allows nested invocation' do
|
||||
expect do
|
||||
described_class.allow_cross_slot_commands do
|
||||
context 'with validate_allowed_cmd set to false' do
|
||||
it 'does not run for allowed commands' do
|
||||
expect(
|
||||
described_class.allow_cross_slot_commands do
|
||||
described_class.validate!([[:mget, 'foo', 'bar']])
|
||||
described_class.validate([[:mget, 'foo', 'bar']], false)
|
||||
end
|
||||
|
||||
described_class.validate!([[:mget, 'foo', 'bar']])
|
||||
end
|
||||
end.not_to raise_error
|
||||
).to eq(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -64,12 +64,6 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
|
|||
end
|
||||
end
|
||||
|
||||
it 'skips count for non-cross-slot requests' do
|
||||
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(:mget, '{foo}bar', '{foo}baz') }
|
||||
end
|
||||
|
||||
it 'counts exceptions' do
|
||||
expect(instrumentation_class).to receive(:instance_count_exception)
|
||||
.with(instance_of(Redis::CommandError)).and_call_original
|
||||
|
|
@ -82,16 +76,30 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
|
|||
end.to raise_exception(Redis::CommandError)
|
||||
end
|
||||
|
||||
context 'in production env' do
|
||||
context 'in production environment' do
|
||||
before do
|
||||
stub_rails_env('production') # to avoid raising CrossSlotError
|
||||
end
|
||||
|
||||
it 'counts cross-slot requests' do
|
||||
it 'counts disallowed cross-slot requests' do
|
||||
expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(:mget, 'foo', 'bar') }
|
||||
end
|
||||
|
||||
it 'counts allowed cross-slot requests' do
|
||||
expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(:mget, 'foo', 'bar') }
|
||||
end
|
||||
end
|
||||
|
||||
it 'skips count for non-cross-slot requests' do
|
||||
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
|
||||
|
||||
Gitlab::Redis::SharedState.with { |redis| redis.call(:mget, '{foo}bar', '{foo}baz') }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
|
|||
|
||||
let(:env) { {} }
|
||||
let(:subscriber) { described_class.new }
|
||||
let(:connection) { ActiveRecord::Base.retrieve_connection }
|
||||
|
||||
let(:connection) { Gitlab::Database.database_base_models[:main].retrieve_connection }
|
||||
let(:db_config_name) { ::Gitlab::Database.db_config_name(connection) }
|
||||
|
||||
describe '.load_balancing_metric_counter_keys' do
|
||||
|
|
@ -155,7 +156,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
|
|||
end
|
||||
|
||||
it 'captures the metrics for web only' do
|
||||
expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name })
|
||||
expect(web_transaction).to receive(:observe).with(
|
||||
:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name }
|
||||
)
|
||||
|
||||
expect(background_transaction).not_to receive(:observe)
|
||||
expect(background_transaction).not_to receive(:increment)
|
||||
|
|
@ -175,7 +178,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
|
|||
end
|
||||
|
||||
it 'captures the metrics for web only' do
|
||||
expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name })
|
||||
expect(web_transaction).to receive(:observe).with(
|
||||
:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name }
|
||||
)
|
||||
|
||||
expect(background_transaction).not_to receive(:observe)
|
||||
expect(background_transaction).not_to receive(:increment)
|
||||
|
|
@ -195,7 +200,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
|
|||
end
|
||||
|
||||
it 'captures the metrics for web only' do
|
||||
expect(background_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name })
|
||||
expect(background_transaction).to receive(:observe).with(
|
||||
:gitlab_database_transaction_seconds, 0.23, { db_config_name: db_config_name }
|
||||
)
|
||||
|
||||
expect(web_transaction).not_to receive(:observe)
|
||||
expect(web_transaction).not_to receive(:increment)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Joins do
|
||||
describe '#accept' do
|
||||
let(:collector) { Arel::Collectors::SubstituteBinds.new(ActiveRecord::Base.connection, Arel::Collectors::SQLString.new) }
|
||||
let(:collector) do
|
||||
Arel::Collectors::SubstituteBinds.new(ApplicationRecord.connection, Arel::Collectors::SQLString.new)
|
||||
end
|
||||
|
||||
context 'with join added via string' do
|
||||
it 'collects join parts' do
|
||||
|
|
@ -33,7 +35,10 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Joins
|
|||
|
||||
result = described_class.new(ApplicationRecord.connection).accept(arel)
|
||||
|
||||
expect(result).to match_array [{ source: "joins", constraints: "records.id = joins.records_id" }, { source: "second_level_joins", constraints: "joins.id = second_level_joins.joins_id" }]
|
||||
expect(result).to match_array [
|
||||
{ source: "joins", constraints: "records.id = joins.records_id" },
|
||||
{ source: "second_level_joins", constraints: "joins.id = second_level_joins.joins_id" }
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Badge do
|
||||
let(:placeholder_url) { 'http://www.example.com/%{project_path}/%{project_id}/%{default_branch}/%{commit_sha}' }
|
||||
let(:placeholder_url) { 'http://www.example.com/%{project_path}/%{project_id}/%{project_name}/%{default_branch}/%{commit_sha}' }
|
||||
|
||||
describe 'validations' do
|
||||
# Requires the let variable url_sym
|
||||
|
|
@ -64,7 +64,7 @@ RSpec.describe Badge do
|
|||
it 'uses the project information to populate the url placeholders' do
|
||||
stub_project_commit_info(project)
|
||||
|
||||
expect(badge.public_send("rendered_#{method}", project)).to eq "http://www.example.com/#{project.full_path}/#{project.id}/master/whatever"
|
||||
expect(badge.public_send("rendered_#{method}", project)).to eq "http://www.example.com/#{project.full_path}/#{project.id}/#{project.name}/master/whatever"
|
||||
end
|
||||
|
||||
it 'returns the url if the project used is nil' do
|
||||
|
|
|
|||
|
|
@ -120,6 +120,40 @@ RSpec.describe GroupMember do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#last_owner_of_the_group?' do
|
||||
context 'when member is an owner' do
|
||||
let_it_be(:group_member) { build(:group_member, :owner) }
|
||||
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:member_last_owner?, :member_last_blocked_owner?, :expected) do
|
||||
false | false | false
|
||||
true | false | true
|
||||
false | true | true
|
||||
true | true | true
|
||||
end
|
||||
|
||||
with_them do
|
||||
it "returns expected" do
|
||||
allow(group_member.group).to receive(:member_last_owner?).with(group_member).and_return(member_last_owner?)
|
||||
allow(group_member.group).to receive(:member_last_blocked_owner?)
|
||||
.with(group_member)
|
||||
.and_return(member_last_blocked_owner?)
|
||||
|
||||
expect(group_member.last_owner_of_the_group?).to be(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when member is not an owner' do
|
||||
let_it_be(:group_member) { build(:group_member, :guest) }
|
||||
|
||||
subject { group_member.last_owner_of_the_group? }
|
||||
|
||||
it { is_expected.to be(false) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'access levels' do
|
||||
context 'with parent group' do
|
||||
it_behaves_like 'inherited access level as a member of entity' do
|
||||
|
|
|
|||
|
|
@ -85,6 +85,27 @@ RSpec.describe ProjectMember do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#holder_of_the_personal_namespace?' do
|
||||
let_it_be(:project_member) { build(:project_member) }
|
||||
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:personal_namespace_holder?, :expected) do
|
||||
false | false
|
||||
true | true
|
||||
end
|
||||
|
||||
with_them do
|
||||
it "returns expected" do
|
||||
allow(project_member.project).to receive(:personal_namespace_holder?)
|
||||
.with(project_member.user)
|
||||
.and_return(personal_namespace_holder?)
|
||||
|
||||
expect(project_member.holder_of_the_personal_namespace?).to be(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.import_team' do
|
||||
before do
|
||||
@project_1 = create(:project)
|
||||
|
|
|
|||
|
|
@ -54,6 +54,24 @@ RSpec.describe GroupMemberPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#last_owner?' do
|
||||
context 'when member is the last owner of the group' do
|
||||
before do
|
||||
allow(group_member).to receive(:last_owner_of_the_group?).and_return(true)
|
||||
end
|
||||
|
||||
it { expect(presenter.last_owner?).to eq(true) }
|
||||
end
|
||||
|
||||
context 'when member is not the last owner of the group' do
|
||||
before do
|
||||
allow(group_member).to receive(:last_owner_of_the_group?).and_return(false)
|
||||
end
|
||||
|
||||
it { expect(presenter.last_owner?).to eq(false) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#can_update?' do
|
||||
context 'when user can update_group_member' do
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe MemberPresenter, feature_category: :subgroups do
|
||||
let_it_be(:member) { build(:group_member) }
|
||||
let(:presenter) { described_class.new(member) }
|
||||
|
||||
describe '#last_owner?' do
|
||||
it 'raises `NotImplementedError`' do
|
||||
expect { presenter.last_owner? }.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -54,6 +54,24 @@ RSpec.describe ProjectMemberPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#last_owner?' do
|
||||
context 'when member is the holder of the personal namespace' do
|
||||
before do
|
||||
allow(project_member).to receive(:holder_of_the_personal_namespace?).and_return(true)
|
||||
end
|
||||
|
||||
it { expect(presenter.last_owner?).to eq(true) }
|
||||
end
|
||||
|
||||
context 'when member is not the holder of the personal namespace' do
|
||||
before do
|
||||
allow(project_member).to receive(:holder_of_the_personal_namespace?).and_return(false)
|
||||
end
|
||||
|
||||
it { expect(presenter.last_owner?).to eq(false) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#can_update?' do
|
||||
context 'when user is NOT attempting to update an Owner' do
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -90,6 +90,28 @@ RSpec.describe MemberEntity do
|
|||
it_behaves_like 'is_direct_member'
|
||||
end
|
||||
|
||||
context 'is_last_owner' do
|
||||
context 'when member is last owner' do
|
||||
before do
|
||||
allow(member).to receive(:last_owner?).and_return(true)
|
||||
end
|
||||
|
||||
it 'exposes `is_last_owner` as `true`' do
|
||||
expect(entity_hash[:is_last_owner]).to be(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when owner is not last owner' do
|
||||
before do
|
||||
allow(member).to receive(:last_owner?).and_return(false)
|
||||
end
|
||||
|
||||
it 'exposes `is_last_owner` as `false`' do
|
||||
expect(entity_hash[:is_last_owner]).to be(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'new member user state is blocked_pending_approval' do
|
||||
let(:user) { create(:user, :blocked_pending_approval) }
|
||||
let(:group_member) { create(:group_member, :invited, group: group, invite_email: user.email) }
|
||||
|
|
|
|||
|
|
@ -2,21 +2,37 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe PagesDomains::RetryAcmeOrderService do
|
||||
let(:domain) { create(:pages_domain, auto_ssl_enabled: true, auto_ssl_failed: true) }
|
||||
RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
|
||||
let_it_be(:project) { create(:project) }
|
||||
|
||||
let(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: true, auto_ssl_failed: true) }
|
||||
|
||||
let(:service) { described_class.new(domain) }
|
||||
|
||||
it 'clears auto_ssl_failed' do
|
||||
expect do
|
||||
service.execute
|
||||
end.to change { domain.auto_ssl_failed }.from(true).to(false)
|
||||
expect { service.execute }
|
||||
.to change { domain.auto_ssl_failed }
|
||||
.from(true).to(false)
|
||||
.and publish_event(PagesDomains::PagesDomainUpdatedEvent)
|
||||
.with(
|
||||
project_id: project.id,
|
||||
namespace_id: project.namespace.id,
|
||||
root_namespace_id: project.root_namespace.id,
|
||||
domain: domain.domain
|
||||
)
|
||||
end
|
||||
|
||||
it 'schedules renewal worker' do
|
||||
it 'schedules renewal worker and publish PagesDomainUpdatedEvent event' do
|
||||
expect(PagesDomainSslRenewalWorker).to receive(:perform_async).with(domain.id).and_return(nil).once
|
||||
|
||||
service.execute
|
||||
expect { service.execute }
|
||||
.to publish_event(PagesDomains::PagesDomainUpdatedEvent)
|
||||
.with(
|
||||
project_id: project.id,
|
||||
namespace_id: project.namespace.id,
|
||||
root_namespace_id: project.root_namespace.id,
|
||||
domain: domain.domain
|
||||
)
|
||||
end
|
||||
|
||||
it "doesn't schedule renewal worker if Let's Encrypt integration is not enabled" do
|
||||
|
|
@ -24,7 +40,8 @@ RSpec.describe PagesDomains::RetryAcmeOrderService do
|
|||
|
||||
expect(PagesDomainSslRenewalWorker).not_to receive(:new)
|
||||
|
||||
service.execute
|
||||
expect { service.execute }
|
||||
.to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
|
||||
end
|
||||
|
||||
it "doesn't schedule renewal worker if auto ssl has not failed yet" do
|
||||
|
|
@ -32,6 +49,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService do
|
|||
|
||||
expect(PagesDomainSslRenewalWorker).not_to receive(:new)
|
||||
|
||||
service.execute
|
||||
expect { service.execute }
|
||||
.to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ RSpec.shared_examples 'a correct instrumented metric query' do |params|
|
|||
end
|
||||
|
||||
before do
|
||||
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
|
||||
allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
|
||||
end
|
||||
|
||||
it 'has correct generate query' do
|
||||
|
|
|
|||
|
|
@ -137,4 +137,28 @@ eos
|
|||
file_content: content
|
||||
).execute
|
||||
end
|
||||
|
||||
def create_and_delete_files(project, files, &block)
|
||||
files.each do |filename, content|
|
||||
project.repository.create_file(
|
||||
project.creator,
|
||||
filename,
|
||||
content,
|
||||
message: "Automatically created file #{filename}",
|
||||
branch_name: project.default_branch_or_main
|
||||
)
|
||||
end
|
||||
|
||||
yield
|
||||
|
||||
ensure
|
||||
files.each do |filename, _content|
|
||||
project.repository.delete_file(
|
||||
project.creator,
|
||||
filename,
|
||||
message: "Automatically deleted file #{filename}",
|
||||
branch_name: project.default_branch_or_main
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue