Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-07-14 12:08:33 +00:00
parent 3438be0998
commit a5c4a731c8
81 changed files with 1273 additions and 690 deletions

View File

@ -1 +1 @@
53fd83a9c21e89bf1bfb9b7f918b9bcfa3ef776a
76dabc8174f7978025f48adcfab0a19c85416531

View File

@ -39,12 +39,12 @@ export class ContentEditor {
this._eventHub.dispose();
}
deserialize(serializedContent) {
deserialize(markdown) {
const { _tiptapEditor: editor, _deserializer: deserializer } = this;
return deserializer.deserialize({
schema: editor.schema,
content: serializedContent,
markdown,
});
}

View File

@ -16,8 +16,8 @@ export default ({ render }) => {
* document. The dom property contains the HTML generated from the Markdown Source.
*/
return {
deserialize: async ({ schema, content }) => {
const html = await render(content);
deserialize: async ({ schema, markdown }) => {
const html = await render(markdown);
if (!html) return {};
@ -25,7 +25,7 @@ export default ({ render }) => {
const { body } = parser.parseFromString(html, 'text/html');
// append original source as a comment that nodes can access
body.append(document.createComment(content));
body.append(document.createComment(markdown));
return { document: ProseMirrorDOMParser.fromSchema(schema).parse(body) };
},

View File

@ -53,7 +53,7 @@ function maybeMerge(a, b) {
* Hast node documentation: https://github.com/syntax-tree/hast
*
* @param {HastNode} hastNode A Hast node
* @param {String} source Markdown source file
* @param {String} markdown Markdown source file
*
* @returns It returns an object with the following attributes:
*
@ -62,13 +62,13 @@ function maybeMerge(a, b) {
* - sourceMarkdown: A nodes original Markdown source extrated
* from the Markdown source file.
*/
function createSourceMapAttributes(hastNode, source) {
function createSourceMapAttributes(hastNode, markdown) {
const { position } = hastNode;
return position && position.end
? {
sourceMapKey: `${position.start.offset}:${position.end.offset}`,
sourceMarkdown: source.substring(position.start.offset, position.end.offset),
sourceMarkdown: markdown.substring(position.start.offset, position.end.offset),
}
: {};
}
@ -84,16 +84,16 @@ function createSourceMapAttributes(hastNode, source) {
* @param {*} proseMirrorNodeSpec ProseMirror node spec object
* @param {HastNode} hastNode A hast node
* @param {Array<HastNode>} hastParents All the ancestors of the hastNode
* @param {String} source Markdown source files content
* @param {String} markdown Markdown source files content
*
* @returns An object that contains a ProseMirror nodes attributes
*/
function getAttrs(proseMirrorNodeSpec, hastNode, hastParents, source) {
function getAttrs(proseMirrorNodeSpec, hastNode, hastParents, markdown) {
const { getAttrs: specGetAttrs } = proseMirrorNodeSpec;
return {
...createSourceMapAttributes(hastNode, source),
...(isFunction(specGetAttrs) ? specGetAttrs(hastNode, hastParents, source) : {}),
...createSourceMapAttributes(hastNode, markdown),
...(isFunction(specGetAttrs) ? specGetAttrs(hastNode, hastParents, markdown) : {}),
};
}
@ -319,11 +319,11 @@ class HastToProseMirrorConverterState {
* @param {model.ProseMirrorSchema} schema A ProseMirror schema used to create the
* ProseMirror nodes and marks.
* @param {Object} proseMirrorFactorySpecs ProseMirror nodes factory specifications.
* @param {String} source Markdown source files content
* @param {String} markdown Markdown source files content
*
* @returns An object that contains ProseMirror node factories
*/
const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source) => {
const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, markdown) => {
const factories = {
root: {
selector: 'root',
@ -356,7 +356,7 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
const nodeType = schema.nodeType(proseMirrorName);
state.closeUntil(parent);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, markdown), factory);
/**
* If a getContent function is provided, we immediately close
@ -371,14 +371,14 @@ const createProseMirrorNodeFactories = (schema, proseMirrorFactorySpecs, source)
const nodeType = schema.nodeType(proseMirrorName);
factory.handle = (state, hastNode, parent) => {
state.closeUntil(parent);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
state.openNode(nodeType, hastNode, getAttrs(factory, hastNode, parent, markdown), factory);
// Inline nodes do not have children therefore they are immediately closed
state.closeNode();
};
} else if (factory.type === 'mark') {
const markType = schema.marks[proseMirrorName];
factory.handle = (state, hastNode, parent) => {
state.openMark(markType, hastNode, getAttrs(factory, hastNode, parent, source), factory);
state.openMark(markType, hastNode, getAttrs(factory, hastNode, parent, markdown), factory);
};
} else if (factory.type === 'ignore') {
factory.handle = noop;
@ -601,9 +601,9 @@ export const createProseMirrorDocFromMdastTree = ({
factorySpecs,
wrappableTags,
tree,
source,
markdown,
}) => {
const proseMirrorNodeFactories = createProseMirrorNodeFactories(schema, factorySpecs, source);
const proseMirrorNodeFactories = createProseMirrorNodeFactories(schema, factorySpecs, markdown);
const state = new HastToProseMirrorConverterState();
visitParents(tree, (hastNode, ancestors) => {

View File

@ -169,7 +169,7 @@ const factorySpecs = {
export default () => {
return {
deserialize: async ({ schema, content: markdown }) => {
deserialize: async ({ schema, markdown }) => {
const document = await render({
markdown,
renderer: (tree) =>
@ -178,7 +178,7 @@ export default () => {
factorySpecs,
tree,
wrappableTags,
source: markdown,
markdown,
}),
});

View File

@ -45,6 +45,7 @@ export default {
:fields="tableFields"
:tbody-tr-attr="{ 'data-testid': 'jobs-table-row' }"
:empty-text="$options.i18n.emptyText"
data-testid="jobs-table"
show-empty
stacked="lg"
fixed

View File

@ -2,7 +2,6 @@
import { GlAlert, GlSkeletonLoader, GlIntersectionObserver, GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import createFlash from '~/flash';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import JobsFilteredSearch from '../filtered_search/jobs_filtered_search.vue';
import eventHub from './event_hub';
import GetJobs from './graphql/queries/get_jobs.query.graphql';
@ -28,7 +27,6 @@ export default {
GlIntersectionObserver,
GlLoadingIcon,
},
mixins: [glFeatureFlagMixin()],
inject: {
fullPath: {
default: '',
@ -93,7 +91,7 @@ export default {
return this.loading && !this.showLoadingSpinner;
},
showFilteredSearch() {
return this.glFeatures?.jobsTableVueSearch && !this.scope;
return !this.scope;
},
jobsCount() {
return this.jobs.count;

View File

@ -1,23 +1,3 @@
import Vue from 'vue';
import initJobsTable from '~/jobs/components/table';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
if (gon.features?.jobsTableVue) {
initJobsTable();
} else {
const remainingTimeElements = document.querySelectorAll('.js-remaining-time');
remainingTimeElements.forEach(
(el) =>
new Vue({
el,
render(h) {
return h(GlCountdown, {
props: {
endDateString: el.dateTime,
},
});
},
}),
);
}
initJobsTable();

View File

@ -221,8 +221,11 @@ export default {
formattedHumanAccess() {
return (this.mr.humanAccess || '').toLowerCase();
},
hasMergeError() {
return this.mr.mergeError && this.state !== 'closed';
},
hasAlerts() {
return this.mr.mergeError || this.showMergePipelineForkWarning;
return this.hasMergeError || this.showMergePipelineForkWarning;
},
shouldShowExtension() {
return (
@ -574,7 +577,12 @@ export default {
/>
<div class="mr-section-container mr-widget-workflow">
<div v-if="hasAlerts" class="gl-overflow-hidden mr-widget-alert-container">
<mr-widget-alert-message v-if="mr.mergeError" type="danger" dismissible>
<mr-widget-alert-message
v-if="hasMergeError"
type="danger"
dismissible
data-testid="merge_error"
>
<span v-safe-html="mergeError"></span>
</mr-widget-alert-message>
<mr-widget-alert-message

View File

@ -4,6 +4,7 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import {
i18n,
WIDGET_TYPE_ASSIGNEES,
WIDGET_TYPE_LABELS,
WIDGET_TYPE_DESCRIPTION,
WIDGET_TYPE_WEIGHT,
} from '../constants';
@ -14,6 +15,7 @@ import WorkItemState from './work_item_state.vue';
import WorkItemTitle from './work_item_title.vue';
import WorkItemDescription from './work_item_description.vue';
import WorkItemAssignees from './work_item_assignees.vue';
import WorkItemLabels from './work_item_labels.vue';
import WorkItemWeight from './work_item_weight.vue';
export default {
@ -25,6 +27,7 @@ export default {
WorkItemAssignees,
WorkItemActions,
WorkItemDescription,
WorkItemLabels,
WorkItemTitle,
WorkItemState,
WorkItemWeight,
@ -99,6 +102,9 @@ export default {
workItemAssignees() {
return this.workItem?.widgets?.find((widget) => widget.type === WIDGET_TYPE_ASSIGNEES);
},
workItemLabels() {
return this.workItem?.mockWidgets?.find((widget) => widget.type === WIDGET_TYPE_LABELS);
},
workItemWeight() {
return this.workItem?.mockWidgets?.find((widget) => widget.type === WIDGET_TYPE_WEIGHT);
},
@ -155,6 +161,12 @@ export default {
:allows-multiple-assignees="workItemAssignees.allowsMultipleAssignees"
@error="error = $event"
/>
<work-item-labels
v-if="workItemLabels"
:work-item-id="workItem.id"
:can-update="canUpdate"
@error="error = $event"
/>
<work-item-weight
v-if="workItemWeight"
class="gl-mb-5"

View File

@ -0,0 +1,246 @@
<script>
import { GlTokenSelector, GlLabel, GlSkeletonLoader } from '@gitlab/ui';
import { debounce } from 'lodash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import Tracking from '~/tracking';
import labelSearchQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import LabelItem from '~/vue_shared/components/sidebar/labels_select_widget/label_item.vue';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { isScopedLabel, scopedLabelKey } from '~/lib/utils/common_utils';
import workItemQuery from '../graphql/work_item.query.graphql';
import localUpdateWorkItemMutation from '../graphql/local_update_work_item.mutation.graphql';
import { i18n, TRACKING_CATEGORY_SHOW, WIDGET_TYPE_LABELS } from '../constants';
function isTokenSelectorElement(el) {
return el?.classList.contains('gl-label-close') || el?.classList.contains('dropdown-item');
}
function addClass(el) {
return {
...el,
class: 'gl-bg-transparent',
};
}
export default {
components: {
GlTokenSelector,
GlLabel,
GlSkeletonLoader,
LabelItem,
},
mixins: [Tracking.mixin()],
inject: ['fullPath'],
props: {
workItemId: {
type: String,
required: true,
},
canUpdate: {
type: Boolean,
required: true,
},
},
data() {
return {
isEditing: false,
searchStarted: false,
localLabels: [],
searchKey: '',
searchLabels: [],
};
},
apollo: {
workItem: {
query: workItemQuery,
variables() {
return {
id: this.workItemId,
};
},
skip() {
return !this.workItemId;
},
error() {
this.$emit('error', i18n.fetchError);
},
},
searchLabels: {
query: labelSearchQuery,
variables() {
return {
fullPath: this.fullPath,
search: this.searchKey,
};
},
skip() {
return !this.searchStarted;
},
update(data) {
return data.workspace?.labels?.nodes.map((node) => addClass({ ...node, ...node.label }));
},
error() {
this.$emit('error', i18n.fetchError);
},
},
},
computed: {
tracking() {
return {
category: TRACKING_CATEGORY_SHOW,
label: 'item_labels',
property: `type_${this.workItem.workItemType?.name}`,
};
},
allowScopedLabels() {
return this.labelsWidget.allowScopedLabels;
},
listEmpty() {
return this.labels.length === 0;
},
containerClass() {
return !this.isEditing ? 'gl-shadow-none!' : '';
},
isLoading() {
return this.$apollo.queries.searchLabels.loading;
},
labelsWidget() {
return this.workItem?.mockWidgets?.find((widget) => widget.type === WIDGET_TYPE_LABELS);
},
labels() {
return this.labelsWidget?.nodes || [];
},
},
watch: {
labels(newVal) {
if (!this.isEditing) {
this.localLabels = newVal.map(addClass);
}
},
},
created() {
this.debouncedSearchKeyUpdate = debounce(this.setSearchKey, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
},
methods: {
getId(id) {
return getIdFromGraphQLId(id);
},
removeLabel({ id }) {
this.localLabels = this.localLabels.filter((label) => label.id !== id);
},
setLabels(event) {
this.searchKey = '';
if (isTokenSelectorElement(event.relatedTarget) || !this.isEditing) return;
this.isEditing = false;
this.$apollo
.mutate({
mutation: localUpdateWorkItemMutation,
variables: {
input: {
id: this.workItemId,
labels: this.localLabels,
},
},
})
.catch((e) => {
this.$emit('error', e);
});
this.track('updated_labels');
},
handleFocus() {
this.isEditing = true;
this.searchStarted = true;
},
async focusTokenSelector(labels) {
if (this.allowScopedLabels) {
const newLabel = labels[labels.length - 1];
const existingLabels = labels.slice(0, labels.length - 1);
const newLabelKey = scopedLabelKey(newLabel);
const removeLabelsWithSameScope = existingLabels.filter((label) => {
const sameKey = newLabelKey === scopedLabelKey(label);
return !sameKey;
});
this.localLabels = [...removeLabelsWithSameScope, newLabel];
}
this.handleFocus();
await this.$nextTick();
this.$refs.tokenSelector.focusTextInput();
},
handleMouseOver() {
this.timeout = setTimeout(() => {
this.searchStarted = true;
}, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
},
handleMouseOut() {
clearTimeout(this.timeout);
},
setSearchKey(value) {
this.searchKey = value;
},
scopedLabel(label) {
return this.allowScopedLabels && isScopedLabel(label);
},
},
};
</script>
<template>
<div class="form-row gl-mb-5 work-item-labels gl-relative">
<span
class="gl-font-weight-bold gl-mt-2 col-lg-2 col-3 gl-pt-2 min-w-fit-content gl-overflow-wrap-break"
data-testid="labels-title"
>{{ __('Labels') }}</span
>
<gl-token-selector
ref="tokenSelector"
v-model="localLabels"
:container-class="containerClass"
:dropdown-items="searchLabels"
:loading="isLoading"
:view-only="!canUpdate"
class="gl-flex-grow-1 gl-border gl-border-white gl-hover-border-gray-200 gl-rounded-base col-9 gl-align-self-start gl-px-0! gl-mx-2!"
@input="focusTokenSelector"
@text-input="debouncedSearchKeyUpdate"
@focus="handleFocus"
@blur="setLabels"
@mouseover.native="handleMouseOver"
@mouseout.native="handleMouseOut"
>
<template #empty-placeholder>
<div
class="add-labels gl-min-w-fit-content gl-display-flex gl-align-items-center gl-text-gray-400 gl-pr-4 gl-top-2"
data-testid="empty-state"
>
<span v-if="canUpdate" class="gl-ml-2">{{ __('Select labels') }}</span>
<span v-else class="gl-ml-2">{{ __('None') }}</span>
</div>
</template>
<template #token-content="{ token }">
<gl-label
:data-qa-label-name="token.title"
:title="token.title"
:description="token.description"
:background-color="token.color"
:scoped="scopedLabel(token)"
:show-close-button="canUpdate"
@close="removeLabel(token)"
/>
</template>
<template #dropdown-item-content="{ dropdownItem }">
<label-item :label="dropdownItem" />
</template>
<template #loading-content>
<gl-skeleton-loader :height="170">
<rect width="380" height="20" x="10" y="15" rx="4" />
<rect width="280" height="20" x="10" y="50" rx="4" />
<rect width="380" height="20" x="10" y="95" rx="4" />
<rect width="280" height="20" x="10" y="130" rx="4" />
</gl-skeleton-loader>
</template>
</gl-token-selector>
</div>
</template>

View File

@ -17,6 +17,7 @@ export const TASK_TYPE_NAME = 'Task';
export const WIDGET_TYPE_ASSIGNEES = 'ASSIGNEES';
export const WIDGET_TYPE_DESCRIPTION = 'DESCRIPTION';
export const WIDGET_TYPE_LABELS = 'LABELS';
export const WIDGET_TYPE_WEIGHT = 'WEIGHT';
export const WIDGET_TYPE_HIERARCHY = 'HIERARCHY';

View File

@ -2,7 +2,7 @@ import produce from 'immer';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { WIDGET_TYPE_ASSIGNEES, WIDGET_TYPE_WEIGHT } from '../constants';
import { WIDGET_TYPE_ASSIGNEES, WIDGET_TYPE_LABELS, WIDGET_TYPE_WEIGHT } from '../constants';
import typeDefs from './typedefs.graphql';
import workItemQuery from './work_item.query.graphql';
@ -10,7 +10,7 @@ export const temporaryConfig = {
typeDefs,
cacheConfig: {
possibleTypes: {
LocalWorkItemWidget: ['LocalWorkItemWeight'],
LocalWorkItemWidget: ['LocalWorkItemLabels', 'LocalWorkItemWeight'],
},
typePolicies: {
WorkItem: {
@ -19,6 +19,12 @@ export const temporaryConfig = {
read(widgets) {
return (
widgets || [
{
__typename: 'LocalWorkItemLabels',
type: WIDGET_TYPE_LABELS,
allowScopedLabels: true,
nodes: [],
},
{
__typename: 'LocalWorkItemWeight',
type: 'WEIGHT',
@ -56,6 +62,13 @@ export const resolvers = {
);
weightWidget.weight = input.weight;
}
if (input.labels) {
const labelsWidget = draftData.workItem.mockWidgets.find(
(widget) => widget.type === WIDGET_TYPE_LABELS,
);
labelsWidget.nodes = [...input.labels];
}
});
cache.writeQuery({

View File

@ -1,5 +1,6 @@
enum LocalWidgetType {
ASSIGNEES
LABELS
WEIGHT
}
@ -12,6 +13,12 @@ type LocalWorkItemAssignees implements LocalWorkItemWidget {
nodes: [UserCore]
}
type LocalWorkItemLabels implements LocalWorkItemWidget {
type: LocalWidgetType!
allowScopedLabels: Boolean!
nodes: [Label!]
}
type LocalWorkItemWeight implements LocalWorkItemWidget {
type: LocalWidgetType!
weight: Int
@ -24,6 +31,7 @@ extend type WorkItem {
input LocalUpdateWorkItemInput {
id: WorkItemID!
assignees: [UserCore!]
labels: [Label]
weight: Int
}

View File

@ -1,9 +1,17 @@
#import "~/graphql_shared/fragments/label.fragment.graphql"
#import "./work_item.fragment.graphql"
query workItem($id: WorkItemID!) {
workItem(id: $id) {
...WorkItem
mockWidgets @client {
... on LocalWorkItemLabels {
type
allowScopedLabels
nodes {
...Label
}
}
... on LocalWorkItemWeight {
type
weight

View File

@ -23,3 +23,13 @@
display: block;
}
}
.work-item-labels {
.gl-token {
padding-left: $gl-spacing-scale-1;
}
.gl-token-close {
display: none;
}
}

View File

@ -18,8 +18,6 @@ class Projects::JobsController < Projects::ApplicationController
before_action :verify_api_request!, only: :terminal_websocket_authorize
before_action :authorize_create_proxy_build!, only: :proxy_websocket_authorize
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
before_action :push_jobs_table_vue, only: [:index]
before_action :push_jobs_table_vue_search, only: [:index]
before_action :push_job_log_search, only: [:show]
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
@ -251,14 +249,6 @@ class Projects::JobsController < Projects::ApplicationController
::Gitlab::Workhorse.channel_websocket(service)
end
def push_jobs_table_vue
push_frontend_feature_flag(:jobs_table_vue, @project)
end
def push_jobs_table_vue_search
push_frontend_feature_flag(:jobs_table_vue_search, @project)
end
def push_job_log_search
push_frontend_feature_flag(:job_log_search, @project)
end

View File

@ -174,6 +174,10 @@ class MergeRequest < ApplicationRecord
merge_request.merge_jid = nil
end
before_transition any => :closed do |merge_request|
merge_request.merge_error = nil
end
after_transition any => :opened do |merge_request|
merge_request.run_after_commit do
UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id)

View File

@ -75,7 +75,8 @@ module Ci
def runner_version_with_updated_status(runner_version)
version = runner_version['version']
new_status = upgrade_check.check_runner_upgrade_status(version)
suggestion = upgrade_check.check_runner_upgrade_status(version)
new_status = suggestion.each_key.first
if new_status != :error && new_status != runner_version['status'].to_sym
{

View File

@ -2,6 +2,7 @@
class GravatarService
def execute(email, size = nil, scale = 2, username: nil)
return if Gitlab::FIPS.enabled?
return unless Gitlab::CurrentSettings.gravatar_enabled?
identifier = email.presence || username.presence

View File

@ -1,126 +0,0 @@
# frozen_string_literal: true
module Issuable
module Clone
class AttributesRewriter < ::Issuable::Clone::BaseService
def initialize(current_user, original_entity, new_entity)
@current_user = current_user
@original_entity = original_entity
@new_entity = new_entity
end
def execute
update_attributes = { labels: cloneable_labels }
milestone = matching_milestone(original_entity.milestone&.title)
update_attributes[:milestone] = milestone if milestone.present?
new_entity.update(update_attributes)
copy_resource_label_events
copy_resource_milestone_events
copy_resource_state_events
end
private
def matching_milestone(title)
return if title.blank? || !new_entity.supports_milestone?
params = { title: title, project_ids: new_entity.project&.id, group_ids: group&.id }
milestones = MilestonesFinder.new(params).execute
milestones.first
end
def cloneable_labels
params = {
project_id: new_entity.project&.id,
group_id: group&.id,
title: original_entity.labels.select(:title),
include_ancestor_groups: true
}
params[:only_group_labels] = true if new_parent.is_a?(Group)
LabelsFinder.new(current_user, params).execute
end
def copy_resource_label_events
copy_events(ResourceLabelEvent.table_name, original_entity.resource_label_events) do |event|
event.attributes
.except('id', 'reference', 'reference_html')
.merge(entity_key => new_entity.id, 'action' => ResourceLabelEvent.actions[event.action])
end
end
def copy_resource_milestone_events
return unless milestone_events_supported?
copy_events(ResourceMilestoneEvent.table_name, original_entity.resource_milestone_events) do |event|
if event.remove?
event_attributes_with_milestone(event, nil)
else
matching_destination_milestone = matching_milestone(event.milestone_title)
event_attributes_with_milestone(event, matching_destination_milestone) if matching_destination_milestone.present?
end
end
end
def copy_resource_state_events
return unless state_events_supported?
copy_events(ResourceStateEvent.table_name, original_entity.resource_state_events) do |event|
event.attributes
.except(*blocked_state_event_attributes)
.merge(entity_key => new_entity.id,
'state' => ResourceStateEvent.states[event.state])
end
end
# Overriden on EE::Issuable::Clone::AttributesRewriter
def blocked_state_event_attributes
['id']
end
def event_attributes_with_milestone(event, milestone)
event.attributes
.except('id')
.merge(entity_key => new_entity.id,
'milestone_id' => milestone&.id,
'action' => ResourceMilestoneEvent.actions[event.action],
'state' => ResourceMilestoneEvent.states[event.state])
end
def copy_events(table_name, events_to_copy)
events_to_copy.find_in_batches do |batch|
events = batch.map do |event|
yield(event)
end.compact
ApplicationRecord.legacy_bulk_insert(table_name, events) # rubocop:disable Gitlab/BulkInsert
end
end
def entity_key
new_entity.class.name.underscore.foreign_key
end
def milestone_events_supported?
both_respond_to?(:resource_milestone_events)
end
def state_events_supported?
both_respond_to?(:resource_state_events)
end
def both_respond_to?(method)
original_entity.respond_to?(method) &&
new_entity.respond_to?(method)
end
end
end
end
Issuable::Clone::AttributesRewriter.prepend_mod_with('Issuable::Clone::AttributesRewriter')

View File

@ -25,19 +25,19 @@ module Issuable
private
def copy_award_emoji
AwardEmojis::CopyService.new(original_entity, new_entity).execute
end
def copy_notes
Notes::CopyService.new(current_user, original_entity, new_entity).execute
def rewritten_old_entity_attributes(include_milestone: true)
Gitlab::Issuable::Clone::AttributesRewriter.new(
current_user,
original_entity,
target_project
).execute(include_milestone: include_milestone)
end
def update_new_entity
update_new_entity_description
update_new_entity_attributes
copy_award_emoji
copy_notes
copy_resource_events
end
def update_new_entity_description
@ -52,8 +52,16 @@ module Issuable
new_entity.update!(update_description_params)
end
def update_new_entity_attributes
AttributesRewriter.new(current_user, original_entity, new_entity).execute
def copy_award_emoji
AwardEmojis::CopyService.new(original_entity, new_entity).execute
end
def copy_notes
Notes::CopyService.new(current_user, original_entity, new_entity).execute
end
def copy_resource_events
Gitlab::Issuable::Clone::CopyResourceEventsService.new(current_user, original_entity, new_entity).execute
end
def update_old_entity
@ -74,12 +82,6 @@ module Issuable
new_entity.resource_parent
end
def group
if new_entity.project&.group && current_user.can?(:read_group, new_entity.project.group)
new_entity.project.group
end
end
def relative_position
return if original_entity.project.root_ancestor.id != target_project.root_ancestor.id

View File

@ -41,9 +41,12 @@ module Issues
def update_new_entity
# we don't call `super` because we want to be able to decide whether or not to copy all comments over.
update_new_entity_description
update_new_entity_attributes
copy_award_emoji
copy_notes if with_notes
if with_notes
copy_notes
copy_resource_events
end
end
def update_old_entity
@ -62,14 +65,18 @@ module Issues
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
new_params = new_params.merge(rewritten_old_entity_attributes)
new_params.delete(:created_at)
new_params.delete(:updated_at)
# spam checking is not necessary, as no new content is being created. Passing nil for
# spam_params will cause SpamActionService to skip checking and return a success response.
spam_params = nil
# Skip creation of system notes for existing attributes of the issue. The system notes of the old
# issue are copied over so we don't want to end up with duplicate notes.
CreateService.new(project: target_project, current_user: current_user, params: new_params, spam_params: spam_params).execute(skip_system_notes: true)
# Skip creation of system notes for existing attributes of the issue when cloning with notes.
# The system notes of the old issue are copied over so we don't want to end up with duplicate notes.
# When cloning without notes, we want to generate system notes for the attributes that were copied.
CreateService.new(project: target_project, current_user: current_user, params: new_params, spam_params: spam_params).execute(skip_system_notes: with_notes)
end
def queue_copy_designs

View File

@ -76,6 +76,7 @@ module Issues
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
new_params = new_params.merge(rewritten_old_entity_attributes)
# spam checking is not necessary, as no new content is being created. Passing nil for
# spam_params will cause SpamActionService to skip checking and return a success response.
spam_params = nil

View File

@ -25,7 +25,11 @@ module WorkItems
work_item = create_result[:work_item]
return ::ServiceResponse.success(payload: payload(work_item)) if @link_params.blank?
result = IssueLinks::CreateService.new(work_item, @current_user, @link_params).execute
result = WorkItems::ParentLinks::CreateService.new(
@link_params[:parent_work_item],
@current_user,
{ target_issuable: work_item }
).execute
if result[:status] == :success
::ServiceResponse.success(payload: payload(work_item))

View File

@ -17,7 +17,7 @@ module WorkItems
current_user: @current_user,
params: @work_item_params.slice(:title, :work_item_type_id),
spam_params: @spam_params,
link_params: { target_issuable: @work_item }
link_params: { parent_work_item: @work_item }
).execute
if create_and_link_result.error?

View File

@ -2,12 +2,4 @@
- add_page_specific_style 'page_bundles/ci_status'
- admin = local_assigns.fetch(:admin, false)
- if Feature.enabled?(:jobs_table_vue, @project)
#js-jobs-table{ data: { admin: admin, full_path: @project.full_path, job_statuses: job_statuses.to_json, pipeline_editor_path: project_ci_pipeline_editor_path(@project), empty_state_svg_path: image_path('jobs-empty-state.svg') } }
- else
.top-area
- build_path_proc = ->(scope) { project_jobs_path(@project, scope: scope) }
= render "shared/builds/tabs", build_path_proc: build_path_proc, all_builds: @all_builds, scope: @scope
.content-list.builds-content-list
= render "table", builds: @builds, project: @project
#js-jobs-table{ data: { admin: admin, full_path: @project.full_path, job_statuses: job_statuses.to_json, pipeline_editor_path: project_ci_pipeline_editor_path(@project), empty_state_svg_path: image_path('jobs-empty-state.svg') } }

View File

@ -1,8 +0,0 @@
---
name: ci_value_change_for_processable_and_rules_entry
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90238
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365876
milestone: '15.2'
type: development
group: group::pipeline authoring
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: jobs_table_vue
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57155
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/327500
milestone: '13.11'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: jobs_table_vue_search
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82539
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356007
milestone: '14.10'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: standard_context_type_check
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88540
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/364265
milestone: '15.1'
type: development
group: group::product intelligence
default_enabled: false

View File

@ -202,8 +202,8 @@ successfully, you must replicate their data using some other means.
|[External merge request diffs](../../merge_request_diffs.md) | **Yes** (13.5) | **Yes** (14.6) | [**Yes** (15.1)](https://gitlab.com/groups/gitlab-org/-/epics/5551) | [No](object_storage.md#verification-of-files-in-object-storage) | Replication is behind the feature flag `geo_merge_request_diff_replication`, enabled by default. Verification was behind the feature flag `geo_merge_request_diff_verification`, removed in 14.7.|
|[Versioned snippets](../../../user/snippets.md#versioned-snippets) | [**Yes** (13.7)](https://gitlab.com/groups/gitlab-org/-/epics/2809) | [**Yes** (14.2)](https://gitlab.com/groups/gitlab-org/-/epics/2810) | N/A | N/A | Verification was implemented behind the feature flag `geo_snippet_repository_verification` in 13.11, and the feature flag was removed in 14.2. |
|[GitLab Pages](../../pages/index.md) | [**Yes** (14.3)](https://gitlab.com/groups/gitlab-org/-/epics/589) | **Yes** (14.6) | [**Yes** (15.1)](https://gitlab.com/groups/gitlab-org/-/epics/5551) | [No](object_storage.md#verification-of-files-in-object-storage) | Behind feature flag `geo_pages_deployment_replication`, enabled by default. Verification was behind the feature flag `geo_pages_deployment_verification`, removed in 14.7. |
|[Incident Metric Images](../../../operations/incident_management/incidents.md#metrics) | [Planned](https://gitlab.com/gitlab-org/gitlab/-/issues/352326) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/362561) | No | No | |
|[Alert Metric Images](../../../operations/incident_management/alerts.md#metrics-tab) | [Planned](https://gitlab.com/gitlab-org/gitlab/-/issues/352326) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/362561) | No | No | |
|[Incident Metric Images](../../../operations/incident_management/incidents.md#metrics) | [Planned](https://gitlab.com/gitlab-org/gitlab/-/issues/362561) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/362561) | No | No | |
|[Alert Metric Images](../../../operations/incident_management/alerts.md#metrics-tab) | [Planned](https://gitlab.com/gitlab-org/gitlab/-/issues/362564) | [No](https://gitlab.com/gitlab-org/gitlab/-/issues/362564) | No | No | |
|[Server-side Git hooks](../../server_hooks.md) | [Not planned](https://gitlab.com/groups/gitlab-org/-/epics/1867) | No | N/A | N/A | Not planned because of current implementation complexity, low customer interest, and availability of alternatives to hooks. |
|[Elasticsearch integration](../../../integration/elasticsearch.md) | [Not planned](https://gitlab.com/gitlab-org/gitlab/-/issues/1186) | No | No | No | Not planned because further product discovery is required and Elasticsearch (ES) clusters can be rebuilt. Secondaries use the same ES cluster as the primary. |
|[Dependency proxy images](../../../user/packages/dependency_proxy/index.md) | [Not planned](https://gitlab.com/gitlab-org/gitlab/-/issues/259694) | No | No | No | Blocked by [Geo: Secondary Mimicry](https://gitlab.com/groups/gitlab-org/-/epics/1528). Replication of this cache is not needed for disaster recovery purposes because it can be recreated from external sources. |

View File

@ -45,9 +45,6 @@ Clicking an individual job shows you its job log, and allows you to:
### View all jobs in a project
> - An improved view was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/293862) in GitLab 14.10, [with a flag](../../administration/feature_flags.md) named `jobs_table_vue`. Disabled by default.
> - The job status filter was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82539) in GitLab 14.10, [with a flag](../../administration/feature_flags.md) named `jobs_table_vue_search`. Disabled by default.
To view the full list of jobs that ran in a project:
1. On the top bar, select **Menu > Projects** and find the project.

View File

@ -121,6 +121,12 @@ GitLab has several features which can help you manage the number of users:
> Introduced in GitLab 14.1.
Prerequisites:
- You must be running GitLab Enterprise Edition (EE).
- You must have GitLab 14.1 or later.
- Your instance must be connected to the internet, and not be in an offline environment.
To sync subscription data between your self-managed instance and GitLab, you must [activate your instance](../../user/admin_area/license.md) with an
activation code.

View File

@ -158,7 +158,7 @@ group itself.
Prerequisites:
- You must have the Owner role.
- You must have the Maintainer or Owner role.
- Optional. Unassign the member from all issues and merge requests that
are assigned to them.

View File

@ -8,7 +8,7 @@ module API
helpers do
# Overridden in EE
def geo_proxy_response
{}
{ geo_enabled: false }
end
end

View File

@ -86,16 +86,10 @@ module Gitlab
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
unless ::Feature.enabled?(:ci_value_change_for_processable_and_rules_entry)
validate_against_warnings unless has_workflow_rules
end
yield if block_given?
end
if ::Feature.enabled?(:ci_value_change_for_processable_and_rules_entry)
validate_against_warnings unless has_workflow_rules
end
validate_against_warnings unless has_workflow_rules
end
def validate_against_warnings

View File

@ -13,12 +13,7 @@ module Gitlab
end
def value
if ::Feature.enabled?(:ci_value_change_for_processable_and_rules_entry)
# `flatten` is needed to make it work with nested `!reference`
[super].flatten
else
[@config].flatten
end
[super].flatten
end
def composable_class

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
module Gitlab
module Issuable
module Clone
class AttributesRewriter
attr_reader :current_user, :original_entity, :target_parent
def initialize(current_user, original_entity, target_parent)
@current_user = current_user
@original_entity = original_entity
@target_parent = target_parent
end
def execute(include_milestone: true)
attributes = { label_ids: cloneable_labels.pluck_primary_key }
if include_milestone
milestone = matching_milestone(original_entity.milestone&.title)
attributes[:milestone_id] = milestone.id if milestone.present?
end
attributes
end
private
def cloneable_labels
params = {
project_id: project&.id,
group_id: group&.id,
title: original_entity.labels.select(:title),
include_ancestor_groups: true
}
params[:only_group_labels] = true if target_parent.is_a?(Group)
LabelsFinder.new(current_user, params).execute
end
def matching_milestone(title)
return if title.blank?
params = { title: title, project_ids: project&.id, group_ids: group&.id }
milestones = MilestonesFinder.new(params).execute
milestones.first
end
def project
target_parent if target_parent.is_a?(Project)
end
def group
if target_parent.is_a?(Group)
target_parent
elsif target_parent&.group && current_user.can?(:read_group, target_parent.group)
target_parent.group
end
end
end
end
end
end

View File

@ -0,0 +1,116 @@
# frozen_string_literal: true
module Gitlab
module Issuable
module Clone
class CopyResourceEventsService
attr_reader :current_user, :original_entity, :new_entity
def initialize(current_user, original_entity, new_entity)
@current_user = current_user
@original_entity = original_entity
@new_entity = new_entity
end
def execute
copy_resource_label_events
copy_resource_milestone_events
copy_resource_state_events
end
private
def copy_resource_label_events
copy_events(ResourceLabelEvent.table_name, original_entity.resource_label_events) do |event|
event.attributes
.except('id', 'reference', 'reference_html')
.merge(entity_key => new_entity.id, 'action' => ResourceLabelEvent.actions[event.action])
end
end
def copy_resource_milestone_events
return unless milestone_events_supported?
copy_events(ResourceMilestoneEvent.table_name, original_entity.resource_milestone_events) do |event|
if event.remove?
event_attributes_with_milestone(event, nil)
else
destination_milestone = matching_milestone(event.milestone_title)
event_attributes_with_milestone(event, destination_milestone) if destination_milestone.present?
end
end
end
def copy_resource_state_events
return unless state_events_supported?
copy_events(ResourceStateEvent.table_name, original_entity.resource_state_events) do |event|
event.attributes
.except(*blocked_state_event_attributes)
.merge(entity_key => new_entity.id,
'state' => ResourceStateEvent.states[event.state])
end
end
# Overriden on EE::Gitlab::Issuable::Clone::CopyResourceEventsService
def blocked_state_event_attributes
['id']
end
def event_attributes_with_milestone(event, milestone)
event.attributes
.except('id')
.merge(entity_key => new_entity.id,
'milestone_id' => milestone&.id,
'action' => ResourceMilestoneEvent.actions[event.action],
'state' => ResourceMilestoneEvent.states[event.state])
end
def copy_events(table_name, events_to_copy)
events_to_copy.find_in_batches do |batch|
events = batch.map do |event|
yield(event)
end.compact
ApplicationRecord.legacy_bulk_insert(table_name, events) # rubocop:disable Gitlab/BulkInsert
end
end
def entity_key
new_entity.class.name.underscore.foreign_key
end
def milestone_events_supported?
both_respond_to?(:resource_milestone_events)
end
def state_events_supported?
both_respond_to?(:resource_state_events)
end
def both_respond_to?(method)
original_entity.respond_to?(method) &&
new_entity.respond_to?(method)
end
def matching_milestone(title)
return if title.blank? || !new_entity.supports_milestone?
params = { title: title, project_ids: new_entity.project&.id, group_ids: group&.id }
milestones = MilestonesFinder.new(params).execute
milestones.first
end
def group
if new_entity.project&.group && current_user.can?(:read_group, new_entity.project.group)
new_entity.project.group
end
end
end
end
end
end
Gitlab::Issuable::Clone::CopyResourceEventsService.prepend_mod

View File

@ -48,9 +48,9 @@ module Gitlab
def map_status(jira_status_category)
case jira_status_category["key"].downcase
when 'done'
Issuable::STATE_ID_MAP[:closed]
::Issuable::STATE_ID_MAP[:closed]
else
Issuable::STATE_ID_MAP[:opened]
::Issuable::STATE_ID_MAP[:opened]
end
end

View File

@ -23,7 +23,7 @@ module Gitlab
_('Closed this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.open? &&
@ -45,7 +45,7 @@ module Gitlab
_('Reopened this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.closed? &&
@ -63,7 +63,7 @@ module Gitlab
_('Changed the title to "%{title_param}".') % { title_param: title_param }
end
params '<New title>'
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target)
@ -82,7 +82,7 @@ module Gitlab
end
end
params '~label1 ~"label 2"'
types Issuable
types ::Issuable
condition do
current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target) &&
find_labels.any?
@ -102,7 +102,7 @@ module Gitlab
end
end
params '~label1 ~"label 2"'
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
@ -134,7 +134,7 @@ module Gitlab
"Replaces all labels with #{labels.join(' ')} #{'label'.pluralize(labels.count)}." if labels.any?
end
params '~label1 ~"label 2"'
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
@ -147,7 +147,7 @@ module Gitlab
desc { _('Add a to do') }
explanation { _('Adds a to do.') }
execution_message { _('Added a to do.') }
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
!TodoService.new.todo_exist?(quick_action_target, current_user)
@ -159,7 +159,7 @@ module Gitlab
desc { _('Mark to do as done') }
explanation { _('Marks to do as done.') }
execution_message { _('Marked to do as done.') }
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
TodoService.new.todo_exist?(quick_action_target, current_user)
@ -177,7 +177,7 @@ module Gitlab
_('Subscribed to this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
!quick_action_target.subscribed?(current_user, project)
@ -195,7 +195,7 @@ module Gitlab
_('Unsubscribed from this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
types ::Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.subscribed?(current_user, project)
@ -212,7 +212,7 @@ module Gitlab
_("Toggled :%{name}: emoji award.") % { name: name } if name
end
params ':emoji:'
types Issuable
types ::Issuable
condition do
quick_action_target.persisted?
end
@ -228,14 +228,14 @@ module Gitlab
desc { _("Append the comment with %{shrug}") % { shrug: SHRUG } }
params '<Comment>'
types Issuable
types ::Issuable
substitution :shrug do |comment|
"#{comment} #{SHRUG}"
end
desc { _("Append the comment with %{tableflip}") % { tableflip: TABLEFLIP } }
params '<Comment>'
types Issuable
types ::Issuable
substitution :tableflip do |comment|
"#{comment} #{TABLEFLIP}"
end

View File

@ -7,11 +7,9 @@ module Gitlab
GITLAB_RAILS_SOURCE = 'gitlab-rails'
def initialize(namespace: nil, project: nil, user: nil, **extra)
if Feature.enabled?(:standard_context_type_check)
check_argument_type(:namespace, namespace, [Namespace])
check_argument_type(:project, project, [Project, Integer])
check_argument_type(:user, user, [User, DeployToken])
end
check_argument_type(:namespace, namespace, [Namespace])
check_argument_type(:project, project, [Project, Integer])
check_argument_type(:user, user, [User, DeployToken])
@namespace = namespace
@plan = namespace&.actual_plan_name

View File

@ -12,8 +12,6 @@ RSpec.describe 'Project Jobs Permissions' do
let_it_be(:job) { create(:ci_build, :running, :coverage, :trace_artifact, pipeline: pipeline) }
before do
stub_feature_flags(jobs_table_vue: false)
sign_in(user)
project.enable_ci
@ -96,8 +94,8 @@ RSpec.describe 'Project Jobs Permissions' do
end
it_behaves_like 'project jobs page responds with status', 200 do
it 'renders job' do
page.within('.build') do
it 'renders job', :js do
page.within('[data-testid="jobs-table"]') do
expect(page).to have_content("##{job.id}")
.and have_content(job.sha[0..7])
.and have_content(job.ref)

View File

@ -9,48 +9,11 @@ def visit_jobs_page
end
RSpec.describe 'User browses jobs' do
describe 'with jobs_table_vue feature flag turned off' do
let!(:build) { create(:ci_build, :coverage, pipeline: pipeline) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.sha, ref: 'master') }
let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:user) { create(:user) }
before do
stub_feature_flags(jobs_table_vue: false)
project.add_maintainer(user)
project.enable_ci
build.update!(coverage_regex: '/Coverage (\d+)%/')
sign_in(user)
visit(project_jobs_path(project))
end
it 'shows the coverage' do
page.within('td.coverage') do
expect(page).to have_content('99.9%')
end
end
context 'with a failed job' do
let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
it 'displays a tooltip with the failure reason' do
page.within('.ci-table') do
failed_job_link = page.find('.ci-failed')
expect(failed_job_link[:title]).to eq('Failed - (unknown failure)')
end
end
end
end
describe 'with jobs_table_vue feature flag turned on', :js do
describe 'Jobs', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
stub_feature_flags(jobs_table_vue: true)
project.add_maintainer(user)
project.enable_ci
@ -135,6 +98,26 @@ RSpec.describe 'User browses jobs' do
end
end
context 'with a coverage job' do
let!(:job) do
create(:ci_build, :coverage, pipeline: pipeline)
end
before do
job.update!(coverage_regex: '/Coverage (\d+)%/')
visit_jobs_page
wait_for_requests
end
it 'shows the coverage' do
page.within('[data-testid="job-coverage"]') do
expect(page).to have_content('99.9%')
end
end
end
context 'with a scheduled job' do
let!(:scheduled_job) { create(:ci_build, :scheduled, pipeline: pipeline, name: 'build') }

View File

@ -20,7 +20,6 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
before do
stub_feature_flags(jobs_table_vue: false)
project.add_role(user, user_access_level)
sign_in(user)
end
@ -29,9 +28,11 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'with no jobs' do
before do
visit project_jobs_path(project)
wait_for_requests
end
it 'shows the empty state page' do
it 'shows the empty state page', :js do
expect(page).to have_content('Use jobs to automate your tasks')
expect(page).to have_link('Create CI/CD configuration file', href: project_ci_pipeline_editor_path(project))
end
@ -40,59 +41,6 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'with a job' do
let!(:job) { create(:ci_build, pipeline: pipeline) }
context "Pending scope" do
before do
visit project_jobs_path(project, scope: :pending)
end
it "shows Pending tab jobs" do
expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Pending')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
end
end
context "Running scope" do
before do
job.run!
visit project_jobs_path(project, scope: :running)
end
it "shows Running tab jobs" do
expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Running')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
end
end
context "Finished scope" do
before do
job.run!
visit project_jobs_path(project, scope: :finished)
end
it "shows Finished tab jobs" do
expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'Finished')
expect(page).to have_content('Use jobs to automate your tasks')
end
end
context "All jobs" do
before do
project.builds.running_or_pending.each(&:success)
visit project_jobs_path(project)
end
it "shows All tab jobs" do
expect(page).to have_selector('[data-testid="jobs-tabs"] a.active', text: 'All')
expect(page).to have_content job.short_sha
expect(page).to have_content job.ref
expect(page).to have_content job.name
end
end
context "when visiting old URL" do
let(:jobs_url) do
project_jobs_path(project)

View File

@ -11,7 +11,7 @@
"author"
],
"properties": {
"author_gravatar_url": { "type": "string" },
"author_gravatar_url": { "type": [ "string", "null" ] },
"commit_url": { "type": "string" },
"commit_path": { "type": "string" },
"author": {

View File

@ -5,7 +5,7 @@
"id": { "type": "integer" },
"login": { "type": "string" },
"url": { "type": "string" },
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"html_url": { "type": "string" }
},
"additionalProperties": false

View File

@ -62,7 +62,7 @@
"required": ["email", "avatar_url", "can_resend", "user_state"],
"properties": {
"email": { "type": "string" },
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"can_resend": { "type": "boolean" },
"user_state": { "type": "string" }
},

View File

@ -11,7 +11,7 @@
"properties": {
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"path": { "type": "string" },
"name": { "type": "string" },
"username": { "type": "string" },

View File

@ -12,7 +12,7 @@
"properties": {
"id": { "type": "integer" },
"state": { "type": "string" },
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" },
"path": { "type": "string" },
"name": { "type": "string" },

View File

@ -13,7 +13,7 @@
"name": { "type": "string" },
"username": { "type": "string" },
"state": { "type": "string" },
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" }
}
}

View File

@ -39,7 +39,7 @@
"type": "string",
"enum": ["active", "blocked"]
},
"avatar_url": { "type": "string" },
"avatar_url": { "type": [ "string", "null" ] },
"web_url": { "type": "string" },
"created_at": { "type": "string", "format": "date-time" },
"bio": { "type": ["string", "null"] },

View File

@ -122,10 +122,10 @@ const {
});
describe('Client side Markdown processing', () => {
const deserialize = async (content) => {
const deserialize = async (markdown) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
content,
markdown,
});
return document;

View File

@ -78,7 +78,7 @@ export const IMPLEMENTATION_ERROR_MSG = 'Error - check implementation';
async function renderMarkdownToHTMLAndJSON(markdown, schema, deserializer) {
let prosemirrorDocument;
try {
const { document } = await deserializer.deserialize({ schema, content: markdown });
const { document } = await deserializer.deserialize({ schema, markdown });
prosemirrorDocument = document;
} catch (e) {
const errorMsg = `${IMPLEMENTATION_ERROR_MSG}:\n${e.message}`;

View File

@ -1177,7 +1177,7 @@ Oranges are orange [^1]
};
it.each`
mark | content | modifiedContent | editAction
mark | markdown | modifiedMarkdown | editAction
${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction}
${'bold'} | ${'__bold__'} | ${'__bold modified__'} | ${defaultEditAction}
${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'} | ${defaultEditAction}
@ -1213,10 +1213,10 @@ Oranges are orange [^1]
${'taskList'} | ${'2) [x] task list item'} | ${'2) [x] task list item modified'} | ${defaultEditAction}
`(
'preserves original $mark syntax when sourceMarkdown is available for $content',
async ({ content, modifiedContent, editAction }) => {
async ({ markdown, modifiedMarkdown, editAction }) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
content,
markdown,
});
editAction(document);
@ -1226,7 +1226,7 @@ Oranges are orange [^1]
doc: tiptapEditor.state.doc,
});
expect(serialized).toEqual(modifiedContent);
expect(serialized).toEqual(modifiedMarkdown);
},
);
});

View File

@ -77,7 +77,7 @@ describe('content_editor/services/markdown_sourcemap', () => {
render: () => BULLET_LIST_HTML,
}).deserialize({
schema: tiptapEditor.schema,
content: BULLET_LIST_MARKDOWN,
markdown: BULLET_LIST_MARKDOWN,
});
const expected = doc(

View File

@ -29,7 +29,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
before do
allow(Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.and_return(:not_available)
.and_return({ not_available: nil })
end
describe do

View File

@ -30,7 +30,6 @@ jest.mock('~/flash');
describe('Job table app', () => {
let wrapper;
let jobsTableVueSearch = true;
const successHandler = jest.fn().mockResolvedValue(mockJobsResponsePaginated);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
@ -66,7 +65,6 @@ describe('Job table app', () => {
},
provide: {
fullPath: projectPath,
glFeatures: { jobsTableVueSearch },
},
apolloProvider: createMockApolloProvider(handler),
});
@ -230,13 +228,5 @@ describe('Job table app', () => {
expect(createFlash).toHaveBeenCalledWith(expectedWarning);
expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
});
it('should not display filtered search', () => {
jobsTableVueSearch = false;
createComponent();
expect(findFilteredSearch().exists()).toBe(false);
});
});
});

View File

@ -906,6 +906,19 @@ describe('MrWidgetOptions', () => {
});
});
describe('merge error', () => {
it.each`
state | show | showText
${'closed'} | ${false} | ${'hides'}
${'merged'} | ${true} | ${'shows'}
${'open'} | ${true} | ${'shows'}
`('it $showText merge error when state is $state', ({ state, show }) => {
createComponent({ ...mockData, state, merge_error: 'Error!' });
expect(wrapper.find('[data-testid="merge_error"]').exists()).toBe(show);
});
});
describe('mock extension', () => {
let pollRequest;

View File

@ -0,0 +1,171 @@
import { GlTokenSelector, GlSkeletonLoader } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import labelSearchQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import { i18n } from '~/work_items/constants';
import { temporaryConfig, resolvers } from '~/work_items/graphql/provider';
import { projectLabelsResponse, mockLabels, workItemQueryResponse } from '../mock_data';
Vue.use(VueApollo);
const workItemId = 'gid://gitlab/WorkItem/1';
describe('WorkItemLabels component', () => {
let wrapper;
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findEmptyState = () => wrapper.findByTestId('empty-state');
const successSearchQueryHandler = jest.fn().mockResolvedValue(projectLabelsResponse);
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const createComponent = ({
labels = mockLabels,
canUpdate = true,
searchQueryHandler = successSearchQueryHandler,
} = {}) => {
const apolloProvider = createMockApollo([[labelSearchQuery, searchQueryHandler]], resolvers, {
typePolicies: temporaryConfig.cacheConfig.typePolicies,
});
apolloProvider.clients.defaultClient.writeQuery({
query: workItemQuery,
variables: {
id: workItemId,
},
data: workItemQueryResponse.data,
});
wrapper = mountExtended(WorkItemLabels, {
provide: {
fullPath: 'test-project-path',
},
propsData: {
labels,
workItemId,
canUpdate,
},
attachTo: document.body,
apolloProvider,
});
};
afterEach(() => {
wrapper.destroy();
});
it('focuses token selector on token selector input event', async () => {
createComponent();
findTokenSelector().vm.$emit('input', [mockLabels[0]]);
await nextTick();
expect(findEmptyState().exists()).toBe(false);
expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
});
it('does not start search by default', () => {
createComponent();
expect(findTokenSelector().props('loading')).toBe(false);
expect(findTokenSelector().props('dropdownItems')).toEqual([]);
});
it('starts search on hovering for more than 250ms', async () => {
createComponent();
findTokenSelector().trigger('mouseover');
jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
expect(findTokenSelector().props('loading')).toBe(true);
});
it('starts search on focusing token selector', async () => {
createComponent();
findTokenSelector().vm.$emit('focus');
await nextTick();
expect(findTokenSelector().props('loading')).toBe(true);
});
it('does not start searching if token-selector was hovered for less than 250ms', async () => {
createComponent();
findTokenSelector().trigger('mouseover');
jest.advanceTimersByTime(100);
await nextTick();
expect(findTokenSelector().props('loading')).toBe(false);
});
it('does not start searching if cursor was moved out from token selector before 250ms passed', async () => {
createComponent();
findTokenSelector().trigger('mouseover');
jest.advanceTimersByTime(100);
findTokenSelector().trigger('mouseout');
jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
expect(findTokenSelector().props('loading')).toBe(false);
});
it('shows skeleton loader on dropdown when loading', async () => {
createComponent();
findTokenSelector().vm.$emit('focus');
await nextTick();
expect(findSkeletonLoader().exists()).toBe(true);
});
it('shows list in dropdown when loaded', async () => {
createComponent();
findTokenSelector().vm.$emit('focus');
await nextTick();
expect(findSkeletonLoader().exists()).toBe(true);
await waitForPromises();
expect(findSkeletonLoader().exists()).toBe(false);
expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
});
it.each([true, false])(
'passes canUpdate=%s prop to view-only of token-selector',
async (canUpdate) => {
createComponent({ canUpdate });
await waitForPromises();
expect(findTokenSelector().props('viewOnly')).toBe(!canUpdate);
},
);
it('emits error event if search query fails', async () => {
createComponent({ searchQueryHandler: errorHandler });
findTokenSelector().vm.$emit('focus');
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([[i18n.fetchError]]);
});
it('should search for with correct key after text input', async () => {
const searchKey = 'Hello';
createComponent();
findTokenSelector().vm.$emit('focus');
findTokenSelector().vm.$emit('text-input', searchKey);
await waitForPromises();
expect(successSearchQueryHandler).toHaveBeenCalledWith(
expect.objectContaining({ search: searchKey }),
);
});
});

View File

@ -456,3 +456,34 @@ export const currentUserNullResponse = {
currentUser: null,
},
};
export const mockLabels = [
{
__typename: 'Label',
id: 'gid://gitlab/Label/1',
title: 'Label 1',
description: '',
color: '#f00',
textColor: '#00f',
},
{
__typename: 'Label',
id: 'gid://gitlab/Label/2',
title: 'Label 2',
description: '',
color: '#b00',
textColor: '#00b',
},
];
export const projectLabelsResponse = {
data: {
workspace: {
id: '1',
__typename: 'Project',
labels: {
nodes: mockLabels,
},
},
},
};

View File

@ -9,6 +9,7 @@ import WorkItemDescription from '~/work_items/components/work_item_description.v
import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
@ -32,6 +33,7 @@ describe('WorkItemDetail component', () => {
const findWorkItemState = () => wrapper.findComponent(WorkItemState);
const findWorkItemDescription = () => wrapper.findComponent(WorkItemDescription);
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
const findWorkItemWeight = () => wrapper.findComponent(WorkItemWeight);
const createComponent = ({
@ -203,6 +205,19 @@ describe('WorkItemDetail component', () => {
expect(findWorkItemAssignees().exists()).toBe(false);
});
describe('labels widget', () => {
it.each`
description | includeWidgets | exists
${'renders when widget is returned from API'} | ${true} | ${true}
${'does not render when widget is not returned from API'} | ${false} | ${false}
`('$description', async ({ includeWidgets, exists }) => {
createComponent({ includeWidgets, workItemsMvc2Enabled: true });
await waitForPromises();
expect(findWorkItemLabels().exists()).toBe(exists);
});
});
describe('weight widget', () => {
describe('when work_items_mvc_2 feature flag is enabled', () => {
describe.each`

View File

@ -221,48 +221,56 @@ RSpec.describe AvatarsHelper do
stub_application_setting(gravatar_enabled?: true)
end
it 'returns a generic avatar when email is blank' do
expect(helper.gravatar_icon('')).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
context 'with FIPS not enabled', fips_mode: false do
it 'returns a generic avatar when email is blank' do
expect(helper.gravatar_icon('')).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
end
it 'returns a valid Gravatar URL' do
stub_config_setting(https: false)
expect(helper.gravatar_icon(user_email))
.to match('https://www.gravatar.com/avatar/b58c6f14d292556214bd64909bcdb118')
end
it 'uses HTTPs when configured' do
stub_config_setting(https: true)
expect(helper.gravatar_icon(user_email))
.to match('https://secure.gravatar.com')
end
it 'returns custom gravatar path when gravatar_url is set' do
stub_gravatar_setting(plain_url: 'http://example.local/?s=%{size}&hash=%{hash}')
expect(gravatar_icon(user_email, 20))
.to eq('http://example.local/?s=40&hash=b58c6f14d292556214bd64909bcdb118')
end
it 'accepts a custom size argument' do
expect(helper.gravatar_icon(user_email, 64)).to include '?s=128'
end
it 'defaults size to 40@2x when given an invalid size' do
expect(helper.gravatar_icon(user_email, nil)).to include '?s=80'
end
it 'accepts a scaling factor' do
expect(helper.gravatar_icon(user_email, 40, 3)).to include '?s=120'
end
it 'ignores case and surrounding whitespace' do
normal = helper.gravatar_icon('foo@example.com')
upcase = helper.gravatar_icon(' FOO@EXAMPLE.COM ')
expect(normal).to eq upcase
end
end
it 'returns a valid Gravatar URL' do
stub_config_setting(https: false)
expect(helper.gravatar_icon(user_email))
.to match('https://www.gravatar.com/avatar/b58c6f14d292556214bd64909bcdb118')
end
it 'uses HTTPs when configured' do
stub_config_setting(https: true)
expect(helper.gravatar_icon(user_email))
.to match('https://secure.gravatar.com')
end
it 'returns custom gravatar path when gravatar_url is set' do
stub_gravatar_setting(plain_url: 'http://example.local/?s=%{size}&hash=%{hash}')
expect(gravatar_icon(user_email, 20))
.to eq('http://example.local/?s=40&hash=b58c6f14d292556214bd64909bcdb118')
end
it 'accepts a custom size argument' do
expect(helper.gravatar_icon(user_email, 64)).to include '?s=128'
end
it 'defaults size to 40@2x when given an invalid size' do
expect(helper.gravatar_icon(user_email, nil)).to include '?s=80'
end
it 'accepts a scaling factor' do
expect(helper.gravatar_icon(user_email, 40, 3)).to include '?s=120'
end
it 'ignores case and surrounding whitespace' do
normal = helper.gravatar_icon('foo@example.com')
upcase = helper.gravatar_icon(' FOO@EXAMPLE.COM ')
expect(normal).to eq upcase
context 'with FIPS enabled', :fips_mode do
it 'returns a generic avatar' do
expect(helper.gravatar_icon(user_email)).to match_asset_path(described_class::DEFAULT_AVATAR_PATH)
end
end
end
end

View File

@ -278,13 +278,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
context 'when workflow rules is not used' do
let(:workflow) { double('workflow', 'has_rules?' => false) }
let(:ci_value_change_for_processable_and_rules_entry) { true }
before do
stub_feature_flags(
ci_value_change_for_processable_and_rules_entry: ci_value_change_for_processable_and_rules_entry
)
entry.compose!(deps)
end
@ -308,14 +303,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
it 'raises a warning' do
expect(entry.warnings).to contain_exactly(/may allow multiple pipelines/)
end
context 'when the FF ci_value_change_for_processable_and_rules_entry is disabled' do
let(:ci_value_change_for_processable_and_rules_entry) { false }
it 'raises a warning' do
expect(entry.warnings).to contain_exactly(/may allow multiple pipelines/)
end
end
end
context 'and its value is `never`' do

View File

@ -1,13 +1,9 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'support/helpers/stubbed_feature'
require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Rules do
include StubFeatureFlags
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.metadata(metadata)
@ -106,14 +102,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
end
it { is_expected.to eq([]) }
context 'when the FF ci_value_change_for_processable_and_rules_entry is disabled' do
before do
stub_feature_flags(ci_value_change_for_processable_and_rules_entry: false)
end
it { is_expected.to eq([config]) }
end
end
context 'with nested rules' do

View File

@ -0,0 +1,87 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Issuable::Clone::AttributesRewriter do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project1) { create(:project, :public, group: group) }
let_it_be(:project2) { create(:project, :public, group: group) }
let_it_be(:original_issue) { create(:issue, project: project1) }
let(:new_attributes) { described_class.new(user, original_issue, project2).execute }
context 'setting labels' do
it 'sets labels present in the new project and group labels' do
project1_label_1 = create(:label, title: 'label1', project: project1)
project1_label_2 = create(:label, title: 'label2', project: project1)
project2_label_1 = create(:label, title: 'label1', project: project2)
group_label = create(:group_label, title: 'group_label', group: group)
create(:label, title: 'label3', project: project2)
original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
expect(new_attributes[:label_ids]).to match_array([project2_label_1.id, group_label.id])
end
it 'does not set any labels when not used on the original issue' do
expect(new_attributes[:label_ids]).to be_empty
end
end
context 'setting milestones' do
it 'sets milestone to nil when old issue milestone is not in the new project' do
milestone = create(:milestone, title: 'milestone', project: project1)
original_issue.update!(milestone: milestone)
expect(new_attributes[:milestone_id]).to be_nil
end
it 'copies the milestone when old issue milestone title is in the new project' do
milestone_project1 = create(:milestone, title: 'milestone', project: project1)
milestone_project2 = create(:milestone, title: 'milestone', project: project2)
original_issue.update!(milestone: milestone_project1)
expect(new_attributes[:milestone_id]).to eq(milestone_project2.id)
end
it 'copies the milestone when old issue milestone is a group milestone' do
milestone = create(:milestone, title: 'milestone', group: group)
original_issue.update!(milestone: milestone)
expect(new_attributes[:milestone_id]).to eq(milestone.id)
end
context 'when include_milestone is false' do
let(:new_attributes) { described_class.new(user, original_issue, project2).execute(include_milestone: false) }
it 'does not return any milestone' do
milestone = create(:milestone, title: 'milestone', group: group)
original_issue.update!(milestone: milestone)
expect(new_attributes[:milestone_id]).to be_nil
end
end
end
context 'when target parent is a group' do
let(:new_attributes) { described_class.new(user, original_issue, group).execute }
context 'setting labels' do
let(:project_label1) { create(:label, title: 'label1', project: project1) }
let!(:project_label2) { create(:label, title: 'label2', project: project1) }
let(:group_label1) { create(:group_label, title: 'group_label', group: group) }
let!(:group_label2) { create(:group_label, title: 'label2', group: group) }
it 'keeps group labels and merges project labels where possible' do
original_issue.update!(labels: [project_label1, project_label2, group_label1])
expect(new_attributes[:label_ids]).to match_array([group_label1.id, group_label2.id])
end
end
end
end

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Issuable::Clone::CopyResourceEventsService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project1) { create(:project, :public, group: group) }
let_it_be(:project2) { create(:project, :public, group: group) }
let_it_be(:new_issue) { create(:issue, project: project2) }
let_it_be_with_reload(:original_issue) { create(:issue, project: project1) }
subject { described_class.new(user, original_issue, new_issue) }
it 'copies the resource label events' do
resource_label_events = create_list(:resource_label_event, 2, issue: original_issue)
subject.execute
expected = resource_label_events.map(&:label_id)
expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected)
end
context 'with existing milestone events' do
let!(:milestone1_project1) { create(:milestone, title: 'milestone1', project: project1) }
let!(:milestone2_project1) { create(:milestone, title: 'milestone2', project: project1) }
let!(:milestone3_project1) { create(:milestone, title: 'milestone3', project: project1) }
let!(:milestone1_project2) { create(:milestone, title: 'milestone1', project: project2) }
let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
before do
original_issue.update!(milestone: milestone2_project1)
create_event(milestone1_project1)
create_event(milestone2_project1)
create_event(nil, 'remove')
create_event(milestone3_project1)
end
it 'copies existing resource milestone events' do
subject.execute
new_issue_milestone_events = new_issue.reload.resource_milestone_events
expect(new_issue_milestone_events.count).to eq(3)
expect_milestone_event(
new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened'
)
expect_milestone_event(
new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened'
)
expect_milestone_event(
new_issue_milestone_events.third, milestone: nil, action: 'remove', state: 'opened'
)
end
def create_event(milestone, action = 'add')
create(:resource_milestone_event, issue: original_issue, milestone: milestone, action: action)
end
def expect_milestone_event(event, expected_attrs)
expect(event.milestone_id).to eq(expected_attrs[:milestone]&.id)
expect(event.action).to eq(expected_attrs[:action])
expect(event.state).to eq(expected_attrs[:state])
end
end
context 'with existing state events' do
let!(:event1) { create(:resource_state_event, issue: original_issue, state: 'opened') }
let!(:event2) { create(:resource_state_event, issue: original_issue, state: 'closed') }
let!(:event3) { create(:resource_state_event, issue: original_issue, state: 'reopened') }
it 'copies existing state events as expected' do
subject.execute
state_events = new_issue.reload.resource_state_events
expect(state_events.size).to eq(3)
expect_state_event(state_events.first, issue: new_issue, state: 'opened')
expect_state_event(state_events.second, issue: new_issue, state: 'closed')
expect_state_event(state_events.third, issue: new_issue, state: 'reopened')
end
def expect_state_event(event, expected_attrs)
expect(event.issue_id).to eq(expected_attrs[:issue]&.id)
expect(event.state).to eq(expected_attrs[:state])
end
end
end

View File

@ -93,30 +93,11 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
context 'with incorrect argument type' do
context 'when standard_context_type_check FF is disabled' do
before do
stub_feature_flags(standard_context_type_check: false)
end
subject { described_class.new(project: create(:group)) }
subject { described_class.new(project: create(:group)) }
it 'does not call `track_and_raise_for_dev_exception`' do
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
snowplow_context
end
end
context 'when standard_context_type_check FF is enabled' do
before do
stub_feature_flags(standard_context_type_check: true)
end
subject { described_class.new(project: create(:group)) }
it 'does call `track_and_raise_for_dev_exception`' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
snowplow_context
end
it 'does call `track_and_raise_for_dev_exception`' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
snowplow_context
end
end

View File

@ -4286,6 +4286,18 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
describe 'transition to closed' do
context 'with merge error' do
subject { create(:merge_request, merge_error: 'merge error') }
it 'clears merge error' do
subject.close!
expect(subject.reload.merge_error).to eq(nil)
end
end
end
describe 'transition to cannot_be_merged' do
let(:notification_service) { double(:notification_service) }
let(:todo_service) { double(:todo_service) }

View File

@ -10,12 +10,24 @@ RSpec.describe API::Geo do
include_context 'workhorse headers'
let(:non_proxy_response_schema) do
{
'type' => 'object',
'additionalProperties' => false,
'required' => %w(geo_enabled),
'properties' => {
'geo_enabled' => { 'type' => 'boolean' }
}
}
end
context 'with valid auth' do
it 'returns empty data' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
expect(json_response).to match_schema(non_proxy_response_schema)
expect(json_response['geo_enabled']).to be_falsey
end
end

View File

@ -47,6 +47,7 @@ RSpec.describe "Create a work item from a task in a work item's description" do
expect(work_item.description).to eq("- [ ] #{created_work_item.to_reference}+")
expect(created_work_item.issue_type).to eq('task')
expect(created_work_item.work_item_type.base_type).to eq('task')
expect(created_work_item.work_item_parent).to eq(work_item)
expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
expect(mutation_response['newWorkItem']).to include('id' => created_work_item.to_global_id.to_s)
end

View File

@ -15,7 +15,7 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.and_return(:recommended)
.and_return({ recommended: ::Gitlab::VersionInfo.new(14, 0, 2) })
end
context 'with runner with new version' do
@ -27,7 +27,7 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.with('14.0.2')
.and_return(:not_available)
.and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
.once
end
@ -59,7 +59,7 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
before do
allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.and_return(:not_available)
.and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
end
it 'deletes orphan ci_runner_versions entry', :aggregate_failures do
@ -81,7 +81,7 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
before do
allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.and_return(:not_available)
.and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 1) })
end
it 'does not modify ci_runner_versions entries', :aggregate_failures do
@ -101,7 +101,7 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
before do
allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
.to receive(:check_runner_upgrade_status)
.and_return(:error)
.and_return({ error: ::Gitlab::VersionInfo.new(14, 0, 1) })
end
it 'makes no changes to ci_runner_versions', :aggregate_failures do

View File

@ -1,140 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issuable::Clone::AttributesRewriter do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project1) { create(:project, :public, group: group) }
let(:project2) { create(:project, :public, group: group) }
let(:original_issue) { create(:issue, project: project1) }
let(:new_issue) { create(:issue, project: project2) }
subject { described_class.new(user, original_issue, new_issue) }
context 'setting labels' do
it 'sets labels present in the new project and group labels' do
project1_label_1 = create(:label, title: 'label1', project: project1)
project1_label_2 = create(:label, title: 'label2', project: project1)
project2_label_1 = create(:label, title: 'label1', project: project2)
group_label = create(:group_label, title: 'group_label', group: group)
create(:label, title: 'label3', project: project2)
original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
subject.execute
expect(new_issue.reload.labels).to match_array([project2_label_1, group_label])
end
it 'does not set any labels when not used on the original issue' do
subject.execute
expect(new_issue.reload.labels).to be_empty
end
it 'copies the resource label events' do
resource_label_events = create_list(:resource_label_event, 2, issue: original_issue)
subject.execute
expected = resource_label_events.map(&:label_id)
expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected)
end
end
context 'setting milestones' do
it 'sets milestone to nil when old issue milestone is not in the new project' do
milestone = create(:milestone, title: 'milestone', project: project1)
original_issue.update!(milestone: milestone)
subject.execute
expect(new_issue.reload.milestone).to be_nil
end
it 'copies the milestone when old issue milestone title is in the new project' do
milestone_project1 = create(:milestone, title: 'milestone', project: project1)
milestone_project2 = create(:milestone, title: 'milestone', project: project2)
original_issue.update!(milestone: milestone_project1)
subject.execute
expect(new_issue.reload.milestone).to eq(milestone_project2)
end
it 'copies the milestone when old issue milestone is a group milestone' do
milestone = create(:milestone, title: 'milestone', group: group)
original_issue.update!(milestone: milestone)
subject.execute
expect(new_issue.reload.milestone).to eq(milestone)
end
context 'with existing milestone events' do
let!(:milestone1_project1) { create(:milestone, title: 'milestone1', project: project1) }
let!(:milestone2_project1) { create(:milestone, title: 'milestone2', project: project1) }
let!(:milestone3_project1) { create(:milestone, title: 'milestone3', project: project1) }
let!(:milestone1_project2) { create(:milestone, title: 'milestone1', project: project2) }
let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
before do
original_issue.update!(milestone: milestone2_project1)
create_event(milestone1_project1)
create_event(milestone2_project1)
create_event(nil, 'remove')
create_event(milestone3_project1)
end
it 'copies existing resource milestone events' do
subject.execute
new_issue_milestone_events = new_issue.reload.resource_milestone_events
expect(new_issue_milestone_events.count).to eq(3)
expect_milestone_event(new_issue_milestone_events.first, milestone: milestone1_project2, action: 'add', state: 'opened')
expect_milestone_event(new_issue_milestone_events.second, milestone: milestone2_project2, action: 'add', state: 'opened')
expect_milestone_event(new_issue_milestone_events.third, milestone: nil, action: 'remove', state: 'opened')
end
def create_event(milestone, action = 'add')
create(:resource_milestone_event, issue: original_issue, milestone: milestone, action: action)
end
def expect_milestone_event(event, expected_attrs)
expect(event.milestone_id).to eq(expected_attrs[:milestone]&.id)
expect(event.action).to eq(expected_attrs[:action])
expect(event.state).to eq(expected_attrs[:state])
end
end
context 'with existing state events' do
let!(:event1) { create(:resource_state_event, issue: original_issue, state: 'opened') }
let!(:event2) { create(:resource_state_event, issue: original_issue, state: 'closed') }
let!(:event3) { create(:resource_state_event, issue: original_issue, state: 'reopened') }
it 'copies existing state events as expected' do
subject.execute
state_events = new_issue.reload.resource_state_events
expect(state_events.size).to eq(3)
expect_state_event(state_events.first, issue: new_issue, state: 'opened')
expect_state_event(state_events.second, issue: new_issue, state: 'closed')
expect_state_event(state_events.third, issue: new_issue, state: 'reopened')
end
def expect_state_event(event, expected_attrs)
expect(event.issue_id).to eq(expected_attrs[:issue]&.id)
expect(event.state).to eq(expected_attrs[:state])
end
end
end
end

View File

@ -82,12 +82,14 @@ RSpec.describe Issues::CloneService do
expect(new_issue.iid).to be_present
end
it 'preserves create time' do
expect(old_issue.created_at.strftime('%D')).to eq new_issue.created_at.strftime('%D')
end
it 'sets created_at of new issue to the time of clone' do
future_time = 5.days.from_now
it 'does not copy system notes' do
expect(new_issue.notes.count).to eq(1)
travel_to(future_time) do
new_issue = clone_service.execute(old_issue, new_project, with_notes: with_notes)
expect(new_issue.created_at).to be_like_time(future_time)
end
end
it 'does not set moved_issue' do
@ -105,6 +107,24 @@ RSpec.describe Issues::CloneService do
end
end
context 'issue with system notes and resource events' do
before do
create(:note, :system, noteable: old_issue, project: old_project)
create(:resource_label_event, label: create(:label, project: old_project), issue: old_issue)
create(:resource_state_event, issue: old_issue, state: :reopened)
create(:resource_milestone_event, issue: old_issue, action: 'remove', milestone_id: nil)
end
it 'does not copy system notes and resource events' do
new_issue = clone_service.execute(old_issue, new_project)
# 1 here is for the "cloned from" system note
expect(new_issue.notes.count).to eq(1)
expect(new_issue.resource_state_events).to be_empty
expect(new_issue.resource_milestone_events).to be_empty
end
end
context 'issue with award emoji' do
let!(:award_emoji) { create(:award_emoji, awardable: old_issue) }
@ -124,14 +144,27 @@ RSpec.describe Issues::CloneService do
create(:issue, title: title, description: description, project: old_project, author: author, milestone: milestone)
end
before do
create(:resource_milestone_event, issue: old_issue, milestone: milestone, action: :add)
end
it 'does not create extra milestone events' do
it 'copies the milestone and creates a resource_milestone_event' do
new_issue = clone_service.execute(old_issue, new_project)
expect(new_issue.resource_milestone_events.count).to eq(old_issue.resource_milestone_events.count)
expect(new_issue.milestone).to eq(milestone)
expect(new_issue.resource_milestone_events.count).to eq(1)
end
end
context 'issue with label' do
let(:label) { create(:group_label, group: sub_group_1) }
let(:new_project) { create(:project, namespace: sub_group_1) }
let(:old_issue) do
create(:issue, project: old_project, labels: [label])
end
it 'copies the label and creates a resource_label_event' do
new_issue = clone_service.execute(old_issue, new_project)
expect(new_issue.labels).to contain_exactly(label)
expect(new_issue.resource_label_events.count).to eq(1)
end
end

View File

@ -7,13 +7,16 @@ RSpec.describe WorkItems::CreateAndLinkService do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:related_work_item) { create(:work_item, project: project) }
let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
let(:spam_params) { double }
let(:link_params) { {} }
let(:params) do
{
title: 'Awesome work item',
description: 'please fix'
description: 'please fix',
work_item_type_id: WorkItems::Type.default_by_type(:task).id
}
end
@ -40,32 +43,32 @@ RSpec.describe WorkItems::CreateAndLinkService do
end
context 'when link params are valid' do
let(:link_params) { { issuable_references: [related_work_item.to_reference] } }
let(:link_params) { { parent_work_item: related_work_item } }
it 'creates a work item successfully with links' do
expect do
service_result
end.to change(WorkItem, :count).by(1).and(
change(IssueLink, :count).by(1)
change(WorkItems::ParentLink, :count).by(1)
)
end
end
context 'when link params are invalid' do
let(:link_params) { { issuable_references: ['invalid reference'] } }
context 'when link creation fails' do
let(:link_params) { { parent_work_item: invalid_parent } }
it { is_expected.to be_error }
it 'does not create a link and does not rollback transaction' do
expect do
service_result
end.to not_change(IssueLink, :count).and(
end.to not_change(WorkItems::ParentLink, :count).and(
change(WorkItem, :count).by(1)
)
end
it 'returns a link creation error message' do
expect(service_result.errors).to contain_exactly('No matching issue found. Make sure that you are adding a valid issue URL.')
expect(service_result.errors).to contain_exactly(/Only Issue can be parent of Task./)
end
end
end
@ -84,7 +87,7 @@ RSpec.describe WorkItems::CreateAndLinkService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
not_change(IssueLink, :count)
not_change(WorkItems::ParentLink, :count)
)
end

View File

@ -32,7 +32,7 @@ RSpec.describe WorkItems::CreateFromTaskService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
not_change(IssueLink, :count)
not_change(WorkItems::ParentLink, :count)
)
end
end
@ -47,12 +47,14 @@ RSpec.describe WorkItems::CreateFromTaskService do
context 'when work item params are valid' do
it { is_expected.to be_success }
it 'creates a work item and links it to the original work item successfully' do
it 'creates a work item and creates parent link to the original work item' do
expect do
service_result
end.to change(WorkItem, :count).by(1).and(
change(IssueLink, :count)
change(WorkItems::ParentLink, :count).by(1)
)
expect(work_item_to_update.reload.work_item_children).not_to be_empty
end
it 'replaces the original issue markdown description with new work item reference' do
@ -73,7 +75,7 @@ RSpec.describe WorkItems::CreateFromTaskService do
expect do
service_result
end.to not_change(WorkItem, :count).and(
not_change(IssueLink, :count)
not_change(WorkItems::ParentLink, :count)
)
end

View File

@ -89,10 +89,13 @@ RSpec.shared_examples 'clone quick action' do
let(:bug) { create(:label, project: project, title: 'bug') }
let(:wontfix) { create(:label, project: project, title: 'wontfix') }
let!(:target_milestone) { create(:milestone, title: '1.0', project: target_project) }
before do
target_project.add_maintainer(user)
# create equivalent labels and milestones in the target project
create(:label, project: target_project, title: 'bug')
create(:label, project: target_project, title: 'wontfix')
create(:milestone, title: '1.0', project: target_project)
end
shared_examples 'applies the commands to issues in both projects, target and source' do

View File

@ -65,11 +65,13 @@ func NewAPI(myURL *url.URL, version string, roundTripper http.RoundTripper) *API
type GeoProxyEndpointResponse struct {
GeoProxyURL string `json:"geo_proxy_url"`
GeoProxyExtraData string `json:"geo_proxy_extra_data"`
GeoEnabled bool `json:"geo_enabled"`
}
type GeoProxyData struct {
GeoProxyURL *url.URL
GeoProxyExtraData string
GeoEnabled bool
}
type HandleFunc func(http.ResponseWriter, *http.Request, *Response)
@ -458,5 +460,6 @@ func (api *API) GetGeoProxyData() (*GeoProxyData, error) {
return &GeoProxyData{
GeoProxyURL: geoProxyURL,
GeoProxyExtraData: response.GeoProxyExtraData,
GeoEnabled: response.GeoEnabled,
}, nil
}

View File

@ -52,6 +52,7 @@ type upstream struct {
geoProxyCableRoute routeEntry
geoProxyRoute routeEntry
geoProxyPollSleep func(time.Duration)
geoPollerDone chan struct{}
accessLogger *logrus.Logger
enableGeoProxyFeature bool
mu sync.RWMutex
@ -81,6 +82,7 @@ func newUpstream(cfg config.Config, accessLogger *logrus.Logger, routesCallback
if up.CableSocket == "" {
up.CableSocket = up.Socket
}
up.geoPollerDone = make(chan struct{})
up.RoundTripper = roundtripper.NewBackendRoundTripper(up.Backend, up.Socket, up.ProxyHeadersTimeout, cfg.DevelopmentMode)
up.CableRoundTripper = roundtripper.NewBackendRoundTripper(up.CableBackend, up.CableSocket, up.ProxyHeadersTimeout, cfg.DevelopmentMode)
up.configureURLPrefix()
@ -92,9 +94,7 @@ func newUpstream(cfg config.Config, accessLogger *logrus.Logger, routesCallback
routesCallback(&up)
if up.enableGeoProxyFeature {
go up.pollGeoProxyAPI()
}
go up.pollGeoProxyAPI()
var correlationOpts []correlation.InboundHandlerOption
if cfg.PropagateCorrelationID {
@ -165,10 +165,8 @@ func (u *upstream) ServeHTTP(w http.ResponseWriter, r *http.Request) {
}
func (u *upstream) findRoute(cleanedPath string, r *http.Request) *routeEntry {
if u.enableGeoProxyFeature {
if route := u.findGeoProxyRoute(cleanedPath, r); route != nil {
return route
}
if route := u.findGeoProxyRoute(cleanedPath, r); route != nil {
return route
}
for _, ro := range u.Routes {
@ -207,7 +205,15 @@ func (u *upstream) findGeoProxyRoute(cleanedPath string, r *http.Request) *route
}
func (u *upstream) pollGeoProxyAPI() {
defer close(u.geoPollerDone)
for {
// Check enableGeoProxyFeature every time because `callGeoProxyApi()` can change its value.
// This is can also be disabled through the GEO_SECONDARY_PROXY env var.
if !u.enableGeoProxyFeature {
break
}
u.callGeoProxyAPI()
u.geoProxyPollSleep(geoProxyApiPollingInterval)
}
@ -221,6 +227,14 @@ func (u *upstream) callGeoProxyAPI() {
return
}
if !geoProxyData.GeoEnabled {
// When Geo is not enabled, we don't need to proxy, as it unnecessarily polls the
// API, whereas a restart is necessary to enable Geo in the first place; at which
// point we get fresh data from the API.
u.enableGeoProxyFeature = false
return
}
hasProxyDataChanged := false
if u.geoProxyBackend.String() != geoProxyData.GeoProxyURL.String() {
// URL changed

View File

@ -12,9 +12,11 @@ import (
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
apipkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)
const (
@ -72,6 +74,54 @@ func TestRouting(t *testing.T) {
runTestCases(t, ts, testCases)
}
func TestPollGeoProxyApiStopsWhenExplicitlyDisabled(t *testing.T) {
up := upstream{
enableGeoProxyFeature: false,
geoProxyPollSleep: func(time.Duration) {},
geoPollerDone: make(chan struct{}),
}
go up.pollGeoProxyAPI()
select {
case <-up.geoPollerDone:
// happy
case <-time.After(10 * time.Second):
t.Fatal("timeout")
}
}
func TestPollGeoProxyApiStopsWhenGeoNotEnabled(t *testing.T) {
remoteServer, rsDeferredClose := startRemoteServer("Geo primary")
defer rsDeferredClose()
geoProxyEndpointResponseBody := `{"geo_enabled":false}`
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
defer deferredClose()
cfg := newUpstreamConfig(railsServer.URL)
roundTripper := roundtripper.NewBackendRoundTripper(cfg.Backend, "", 1*time.Minute, true)
remoteServerUrl := helper.URLMustParse(remoteServer.URL)
up := upstream{
Config: *cfg,
RoundTripper: roundTripper,
APIClient: apipkg.NewAPI(remoteServerUrl, "", roundTripper),
enableGeoProxyFeature: true,
geoProxyPollSleep: func(time.Duration) {},
geoPollerDone: make(chan struct{}),
}
go up.pollGeoProxyAPI()
select {
case <-up.geoPollerDone:
// happy
case <-time.After(10 * time.Second):
t.Fatal("timeout")
}
}
// This test can be removed when the environment variable `GEO_SECONDARY_PROXY` is removed
func TestGeoProxyFeatureDisabledOnGeoSecondarySite(t *testing.T) {
// We could just not set up the primary, but then we'd have to assert
@ -79,7 +129,7 @@ func TestGeoProxyFeatureDisabledOnGeoSecondarySite(t *testing.T) {
remoteServer, rsDeferredClose := startRemoteServer("Geo primary")
defer rsDeferredClose()
geoProxyEndpointResponseBody := fmt.Sprintf(`{"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointResponseBody := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v"}`, remoteServer.URL)
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
defer deferredClose()
@ -109,7 +159,7 @@ func TestGeoProxyFeatureEnabledOnGeoSecondarySite(t *testing.T) {
// This test can be removed when the environment variable `GEO_SECONDARY_PROXY` is removed
func TestGeoProxyFeatureDisabledOnNonGeoSecondarySite(t *testing.T) {
geoProxyEndpointResponseBody := "{}"
geoProxyEndpointResponseBody := `{"geo_enabled":false}`
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
defer deferredClose()
@ -127,7 +177,7 @@ func TestGeoProxyFeatureDisabledOnNonGeoSecondarySite(t *testing.T) {
}
func TestGeoProxyFeatureEnabledOnNonGeoSecondarySite(t *testing.T) {
geoProxyEndpointResponseBody := "{}"
geoProxyEndpointResponseBody := `{"geo_enabled":false}`
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
defer deferredClose()
@ -166,8 +216,8 @@ func TestGeoProxyFeatureEnablingAndDisabling(t *testing.T) {
remoteServer, rsDeferredClose := startRemoteServer("Geo primary")
defer rsDeferredClose()
geoProxyEndpointEnabledResponseBody := fmt.Sprintf(`{"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointDisabledResponseBody := "{}"
geoProxyEndpointEnabledResponseBody := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointDisabledResponseBody := `{"geo_enabled":true}`
geoProxyEndpointResponseBody := geoProxyEndpointEnabledResponseBody
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
@ -218,9 +268,9 @@ func TestGeoProxyUpdatesExtraDataWhenChanged(t *testing.T) {
}))
defer remoteServer.Close()
geoProxyEndpointExtraData1 := fmt.Sprintf(`{"geo_proxy_url":"%v","geo_proxy_extra_data":"data1"}`, remoteServer.URL)
geoProxyEndpointExtraData2 := fmt.Sprintf(`{"geo_proxy_url":"%v","geo_proxy_extra_data":"data2"}`, remoteServer.URL)
geoProxyEndpointExtraData3 := fmt.Sprintf(`{"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointExtraData1 := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v","geo_proxy_extra_data":"data1"}`, remoteServer.URL)
geoProxyEndpointExtraData2 := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v","geo_proxy_extra_data":"data2"}`, remoteServer.URL)
geoProxyEndpointExtraData3 := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointResponseBody := geoProxyEndpointExtraData1
expectedGeoProxyExtraData = "data1"
@ -253,8 +303,8 @@ func TestGeoProxySetsCustomHeader(t *testing.T) {
json string
extraData string
}{
{"no extra data", `{"geo_proxy_url":"%v"}`, ""},
{"with extra data", `{"geo_proxy_url":"%v","geo_proxy_extra_data":"extra-geo-data"}`, "extra-geo-data"},
{"no extra data", `{"geo_enabled":true,"geo_proxy_url":"%v"}`, ""},
{"with extra data", `{"geo_enabled":true,"geo_proxy_url":"%v","geo_proxy_extra_data":"extra-geo-data"}`, "extra-geo-data"},
}
for _, tc := range testCases {
@ -299,7 +349,7 @@ func runTestCasesWithGeoProxyEnabled(t *testing.T, testCases []testCase) {
remoteServer, rsDeferredClose := startRemoteServer("Geo primary")
defer rsDeferredClose()
geoProxyEndpointResponseBody := fmt.Sprintf(`{"geo_proxy_url":"%v"}`, remoteServer.URL)
geoProxyEndpointResponseBody := fmt.Sprintf(`{"geo_enabled":true,"geo_proxy_url":"%v"}`, remoteServer.URL)
railsServer, deferredClose := startRailsServer("Local Rails server", &geoProxyEndpointResponseBody)
defer deferredClose()