Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
2578890510
commit
2ebd699ede
|
|
@ -19,32 +19,10 @@ Gitlab/Json:
|
|||
- 'app/controllers/projects_controller.rb'
|
||||
- 'app/controllers/search_controller.rb'
|
||||
- 'app/mailers/emails/members.rb'
|
||||
- 'app/models/concerns/redis_cacheable.rb'
|
||||
- 'app/models/diff_discussion.rb'
|
||||
- 'app/models/integrations/assembla.rb'
|
||||
- 'app/models/integrations/pivotaltracker.rb'
|
||||
- 'app/models/integrations/pumble.rb'
|
||||
- 'app/models/integrations/unify_circuit.rb'
|
||||
- 'app/models/integrations/webex_teams.rb'
|
||||
- 'app/models/merge_request_diff_commit.rb'
|
||||
- 'app/presenters/packages/composer/packages_presenter.rb'
|
||||
- 'app/presenters/projects/security/configuration_presenter.rb'
|
||||
- 'app/workers/google_cloud/create_cloudsql_instance_worker.rb'
|
||||
- 'config/initializers/rack_multipart_patch.rb'
|
||||
- 'db/migrate/20210305031822_create_dast_site_profile_variables.rb'
|
||||
- 'db/migrate/20210317035357_create_dast_profiles_pipelines.rb'
|
||||
- 'db/migrate/20210412111213_create_security_orchestration_policy_rule_schedule.rb'
|
||||
- 'db/migrate/20210423054022_create_dast_site_profiles_pipelines.rb'
|
||||
- 'db/migrate/20210604032738_create_dast_site_profiles_builds.rb'
|
||||
- 'db/migrate/20210604051330_create_dast_scanner_profiles_builds.rb'
|
||||
- 'db/migrate/20210713123345_create_dast_profile_schedule.rb'
|
||||
- 'db/post_migrate/20210311120155_backfill_events_id_for_bigint_conversion.rb'
|
||||
- 'db/post_migrate/20210311120156_backfill_push_event_payload_event_id_for_bigint_conversion.rb'
|
||||
- 'db/post_migrate/20210415101228_backfill_ci_build_needs_for_bigint_conversion.rb'
|
||||
- 'db/post_migrate/20210420121149_backfill_conversion_of_ci_job_artifacts.rb'
|
||||
- 'db/post_migrate/20210422023046_backfill_ci_sources_pipelines_source_job_id_for_bigint_conversion.rb'
|
||||
- 'db/post_migrate/20210615234935_fix_batched_migrations_old_format_job_arguments.rb'
|
||||
- 'db/post_migrate/20221006172302_adjust_task_note_rename_background_migration_values.rb'
|
||||
- 'ee/app/controllers/admin/geo/nodes_controller.rb'
|
||||
- 'ee/app/controllers/ee/admin/application_settings_controller.rb'
|
||||
- 'ee/app/controllers/ee/search_controller.rb'
|
||||
|
|
|
|||
4
Gemfile
4
Gemfile
|
|
@ -130,8 +130,6 @@ gem 'graphql-docs', '~> 2.1.0', group: [:development, :test]
|
|||
gem 'graphlient', '~> 0.5.0' # Used by BulkImport feature (group::import)
|
||||
|
||||
gem 'hashie', '~> 5.0.0'
|
||||
# Disable strong_params so that Mash does not respond to :permitted?
|
||||
gem 'hashie-forbidden_attributes'
|
||||
|
||||
# Pagination
|
||||
gem 'kaminari', '~> 1.2.2'
|
||||
|
|
@ -193,7 +191,7 @@ gem 'asciidoctor-kroki', '~> 0.5.0', require: false
|
|||
gem 'rouge', '~> 3.30.0'
|
||||
gem 'truncato', '~> 0.7.12'
|
||||
gem 'bootstrap_form', '~> 4.2.0'
|
||||
gem 'nokogiri', '~> 1.13.8'
|
||||
gem 'nokogiri', '~> 1.13.9'
|
||||
|
||||
# Calendar rendering
|
||||
gem 'icalendar'
|
||||
|
|
|
|||
|
|
@ -260,7 +260,6 @@
|
|||
{"name":"hangouts-chat","version":"0.0.5","platform":"ruby","checksum":"bdbeb6c6e4abc98f395cb273f53b39911b3aa9e248fbbf063242b021ced8b6b6"},
|
||||
{"name":"hashdiff","version":"1.0.1","platform":"ruby","checksum":"2cd4d04f5080314ecc8403c4e2e00dbaa282dff395e2d031bc16c8d501bdd6db"},
|
||||
{"name":"hashie","version":"5.0.0","platform":"ruby","checksum":"9d6c4e51f2a36d4616cbc8a322d619a162d8f42815a792596039fc95595603da"},
|
||||
{"name":"hashie-forbidden_attributes","version":"0.1.1","platform":"ruby","checksum":"3a6ed37f3a314e4fb1dd1e2df6eb7721bcadd023a30bc0b951b2b5285a790fb2"},
|
||||
{"name":"health_check","version":"3.1.0","platform":"ruby","checksum":"10146508237dc54ed7e24c292d8ba7fb8f9590cf26c66e325b947438c4103b57"},
|
||||
{"name":"heapy","version":"0.2.0","platform":"ruby","checksum":"74141e845d61ffc7c1e8bf8b127c8cf94544ec7a1181aec613288682543585ea"},
|
||||
{"name":"html-pipeline","version":"2.14.3","platform":"ruby","checksum":"8a1d4d7128b2141913387cac0f8ba898bb6812557001acc0c2b46910f59413a0"},
|
||||
|
|
@ -358,16 +357,16 @@
|
|||
{"name":"nio4r","version":"2.5.8","platform":"java","checksum":"b2b1800f6bf7ce4b797ca8b639ad278a99c9c904fb087a91d944f38e4bd71401"},
|
||||
{"name":"nio4r","version":"2.5.8","platform":"ruby","checksum":"3becb4ad95ab8ac0a9bd2e1b16466869402be62848082bf6329ae9091f276676"},
|
||||
{"name":"no_proxy_fix","version":"0.1.2","platform":"ruby","checksum":"4e9b4c31bb146de7fcf347dc1087bb13ac2039b56d50aa019e61036256abcd00"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"aarch64-linux","checksum":"d6b2c45a57738f12fe27783939fe1394e7049246288c7770d3b1fee7f49432a6"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"arm64-darwin","checksum":"00217e48a6995e81dd83014325c0ea0b015023a8922c7bdb2ef1416aa87c1f43"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"java","checksum":"9d04c616900e2b5118e501436ebb9bc48520d08f3695d012a314006e28082f72"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"ruby","checksum":"79c279298b2f22fd4e760f49990c7930436bac1b1cfeff7bacff192f30edea3c"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x64-mingw-ucrt","checksum":"98f7dac7583f07a84ec3fcc01dc03a66fce10f412cd363fce7de749acdb2a42d"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x64-mingw32","checksum":"117a71b37f2e1d774a9f031d393e72d5d04b92af8036e0c1a8dd509c247b2013"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x86-linux","checksum":"6d04342456edfb8fbc041d0c2cf5a59baaa7aacdda414b2333100b02f85d441d"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x86-mingw32","checksum":"0529d558b4280a55bc7af500d3d4d590b7c059c814a0cea52e4e18cb30c25d15"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x86_64-darwin","checksum":"8966d79e687b271df87a4b240456597c43cd98584e3f783fc35de4f066486421"},
|
||||
{"name":"nokogiri","version":"1.13.8","platform":"x86_64-linux","checksum":"344f1bc66feac787e5b2053c6e9095d1f33605083e58ddf2b8d4eef257bccc5f"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"aarch64-linux","checksum":"9b69829561d30c4461ea803baeaf3460e8b145cff7a26ce397119577a4083a02"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"arm64-darwin","checksum":"e76ebb4b7b2e02c72b2d1541289f8b0679fb5984867cf199d89b8ef485764956"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"java","checksum":"15bae7d08bddeaa898d8e3f558723300137c26a2dc2632a1f89c8574c4467165"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"ruby","checksum":"96f37c1baf0234d3ae54c2c89aef7220d4a8a1b03d2675ff7723565b0a095531"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x64-mingw-ucrt","checksum":"f6a1dbc7229184357f3129503530af73cc59ceba4932c700a458a561edbe04b9"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x64-mingw32","checksum":"36d935d799baa4dc488024f71881ff0bc8b172cecdfc54781169c40ec02cbdb3"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x86-linux","checksum":"ebaf82aa9a11b8fafb67873d19ee48efb565040f04c898cdce8ca0cd53ff1a12"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x86-mingw32","checksum":"11789a2a11b28bc028ee111f23311461104d8c4468d5b901ab7536b282504154"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x86_64-darwin","checksum":"01830e1646803ff91c0fe94bc768ff40082c6de8cfa563dafd01b3f7d5f9d795"},
|
||||
{"name":"nokogiri","version":"1.13.9","platform":"x86_64-linux","checksum":"8e93b8adec22958013799c8690d81c2cdf8a90b6f6e8150ab22e11895844d781"},
|
||||
{"name":"notiffany","version":"0.1.3","platform":"ruby","checksum":"d37669605b7f8dcb04e004e6373e2a780b98c776f8eb503ac9578557d7808738"},
|
||||
{"name":"numerizer","version":"0.2.0","platform":"ruby","checksum":"e58076d5ee5370417b7e52d9cb25836d62acd1b8d9a194c308707986c1705d7b"},
|
||||
{"name":"oauth","version":"0.5.6","platform":"ruby","checksum":"4085fe28e0c5e2434135e00a6555294fd2a4ff96a98d1bdecdcd619fc6368dff"},
|
||||
|
|
|
|||
|
|
@ -710,8 +710,6 @@ GEM
|
|||
hangouts-chat (0.0.5)
|
||||
hashdiff (1.0.1)
|
||||
hashie (5.0.0)
|
||||
hashie-forbidden_attributes (0.1.1)
|
||||
hashie (>= 3.0)
|
||||
health_check (3.1.0)
|
||||
railties (>= 5.0)
|
||||
heapy (0.2.0)
|
||||
|
|
@ -908,7 +906,7 @@ GEM
|
|||
netrc (0.11.0)
|
||||
nio4r (2.5.8)
|
||||
no_proxy_fix (0.1.2)
|
||||
nokogiri (1.13.8)
|
||||
nokogiri (1.13.9)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
racc (~> 1.4)
|
||||
notiffany (0.1.3)
|
||||
|
|
@ -1665,7 +1663,6 @@ DEPENDENCIES
|
|||
hamlit (~> 2.15.0)
|
||||
hangouts-chat (~> 0.0.5)
|
||||
hashie (~> 5.0.0)
|
||||
hashie-forbidden_attributes
|
||||
health_check (~> 3.0)
|
||||
html-pipeline (~> 2.14.3)
|
||||
html2text
|
||||
|
|
@ -1704,7 +1701,7 @@ DEPENDENCIES
|
|||
multi_json (~> 1.14.1)
|
||||
net-ldap (~> 0.16.3)
|
||||
net-ntp
|
||||
nokogiri (~> 1.13.8)
|
||||
nokogiri (~> 1.13.9)
|
||||
oauth2 (~> 2.0)
|
||||
octokit (~> 4.15)
|
||||
ohai (~> 16.10)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ $.fn.renderGFM = function renderGFM() {
|
|||
syntaxHighlight(this.find('.js-syntax-highlight').get());
|
||||
renderKroki(this.find('.js-render-kroki[hidden]').get());
|
||||
renderMath(this.find('.js-render-math'));
|
||||
renderSandboxedMermaid(this.find('.js-render-mermaid'));
|
||||
renderSandboxedMermaid(this.find('.js-render-mermaid').get());
|
||||
renderJSONTable(
|
||||
Array.from(this.find('[lang="json"][data-lang-params="table"]').get()).map((e) => e.parentNode),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import $ from 'jquery';
|
||||
import { once, countBy } from 'lodash';
|
||||
import { countBy } from 'lodash';
|
||||
import { __ } from '~/locale';
|
||||
import {
|
||||
getBaseURL,
|
||||
|
|
@ -8,7 +7,8 @@ import {
|
|||
joinPaths,
|
||||
} from '~/lib/utils/url_utility';
|
||||
import { darkModeEnabled } from '~/lib/utils/color_utils';
|
||||
import { setAttributes } from '~/lib/utils/dom_utils';
|
||||
import { setAttributes, isElementVisible } from '~/lib/utils/dom_utils';
|
||||
import { createAlert, VARIANT_WARNING } from '~/flash';
|
||||
import { unrestrictedPages } from './constants';
|
||||
|
||||
// Renders diagrams and flowcharts from text using Mermaid in any element with the
|
||||
|
|
@ -27,17 +27,30 @@ import { unrestrictedPages } from './constants';
|
|||
|
||||
const SANDBOX_FRAME_PATH = '/-/sandbox/mermaid';
|
||||
// This is an arbitrary number; Can be iterated upon when suitable.
|
||||
const MAX_CHAR_LIMIT = 2000;
|
||||
export const MAX_CHAR_LIMIT = 2000;
|
||||
// Max # of mermaid blocks that can be rendered in a page.
|
||||
const MAX_MERMAID_BLOCK_LIMIT = 50;
|
||||
export const MAX_MERMAID_BLOCK_LIMIT = 50;
|
||||
// Max # of `&` allowed in Chaining of links syntax
|
||||
const MAX_CHAINING_OF_LINKS_LIMIT = 30;
|
||||
|
||||
export const BUFFER_IFRAME_HEIGHT = 10;
|
||||
export const SANDBOX_ATTRIBUTES = 'allow-scripts allow-popups';
|
||||
|
||||
const ALERT_CONTAINER_CLASS = 'mermaid-alert-container';
|
||||
export const LAZY_ALERT_SHOWN_CLASS = 'lazy-alert-shown';
|
||||
|
||||
// Keep a map of mermaid blocks we've already rendered.
|
||||
const elsProcessingMap = new WeakMap();
|
||||
let renderedMermaidBlocks = 0;
|
||||
|
||||
/**
|
||||
* Determines whether a given Mermaid diagram is visible.
|
||||
*
|
||||
* @param {Element} el The Mermaid DOM node
|
||||
* @returns
|
||||
*/
|
||||
const isVisibleMermaid = (el) => el.closest('details') === null && isElementVisible(el);
|
||||
|
||||
function shouldLazyLoadMermaidBlock(source) {
|
||||
/**
|
||||
* If source contains `&`, which means that it might
|
||||
|
|
@ -104,8 +117,8 @@ function renderMermaidEl(el, source) {
|
|||
);
|
||||
}
|
||||
|
||||
function renderMermaids($els) {
|
||||
if (!$els.length) return;
|
||||
function renderMermaids(els) {
|
||||
if (!els.length) return;
|
||||
|
||||
const pageName = document.querySelector('body').dataset.page;
|
||||
|
||||
|
|
@ -114,7 +127,7 @@ function renderMermaids($els) {
|
|||
|
||||
let renderedChars = 0;
|
||||
|
||||
$els.each((i, el) => {
|
||||
els.forEach((el) => {
|
||||
// Skipping all the elements which we've already queued in requestIdleCallback
|
||||
if (elsProcessingMap.has(el)) {
|
||||
return;
|
||||
|
|
@ -133,33 +146,29 @@ function renderMermaids($els) {
|
|||
renderedMermaidBlocks >= MAX_MERMAID_BLOCK_LIMIT ||
|
||||
shouldLazyLoadMermaidBlock(source))
|
||||
) {
|
||||
const html = `
|
||||
<div class="alert gl-alert gl-alert-warning alert-dismissible lazy-render-mermaid-container js-lazy-render-mermaid-container fade show" role="alert">
|
||||
<div>
|
||||
<div>
|
||||
<div class="js-warning-text"></div>
|
||||
<div class="gl-alert-actions">
|
||||
<button type="button" class="js-lazy-render-mermaid btn gl-alert-action btn-confirm btn-md gl-button">Display</button>
|
||||
</div>
|
||||
</div>
|
||||
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
|
||||
<span aria-hidden="true">×</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
const parent = el.parentNode;
|
||||
|
||||
const $parent = $(el).parent();
|
||||
|
||||
if (!$parent.hasClass('lazy-alert-shown')) {
|
||||
$parent.after(html);
|
||||
$parent
|
||||
.siblings()
|
||||
.find('.js-warning-text')
|
||||
.text(
|
||||
__('Warning: Displaying this diagram might cause performance issues on this page.'),
|
||||
);
|
||||
$parent.addClass('lazy-alert-shown');
|
||||
if (!parent.classList.contains(LAZY_ALERT_SHOWN_CLASS)) {
|
||||
const alertContainer = document.createElement('div');
|
||||
alertContainer.classList.add(ALERT_CONTAINER_CLASS);
|
||||
alertContainer.classList.add('gl-mb-5');
|
||||
parent.after(alertContainer);
|
||||
createAlert({
|
||||
message: __(
|
||||
'Warning: Displaying this diagram might cause performance issues on this page.',
|
||||
),
|
||||
variant: VARIANT_WARNING,
|
||||
parent: parent.parentNode,
|
||||
containerSelector: `.${ALERT_CONTAINER_CLASS}`,
|
||||
primaryButton: {
|
||||
text: __('Display'),
|
||||
clickHandler: () => {
|
||||
alertContainer.remove();
|
||||
renderMermaidEl(el, source);
|
||||
},
|
||||
},
|
||||
});
|
||||
parent.classList.add(LAZY_ALERT_SHOWN_CLASS);
|
||||
}
|
||||
|
||||
return;
|
||||
|
|
@ -176,37 +185,33 @@ function renderMermaids($els) {
|
|||
});
|
||||
}
|
||||
|
||||
const hookLazyRenderMermaidEvent = once(() => {
|
||||
$(document.body).on('click', '.js-lazy-render-mermaid', function eventHandler() {
|
||||
const parent = $(this).closest('.js-lazy-render-mermaid-container');
|
||||
const pre = parent.prev();
|
||||
export default function renderMermaid(els) {
|
||||
if (!els.length) return;
|
||||
|
||||
const el = pre.find('.js-render-mermaid');
|
||||
const visibleMermaids = [];
|
||||
const hiddenMermaids = [];
|
||||
|
||||
parent.remove();
|
||||
|
||||
// sandbox update
|
||||
const element = el.get(0);
|
||||
const { source } = fixElementSource(element);
|
||||
|
||||
renderMermaidEl(element, source);
|
||||
});
|
||||
});
|
||||
|
||||
export default function renderMermaid($els) {
|
||||
if (!$els.length) return;
|
||||
|
||||
const visibleMermaids = $els.filter(function filter() {
|
||||
return $(this).closest('details').length === 0 && $(this).is(':visible');
|
||||
});
|
||||
for (const el of els) {
|
||||
if (isVisibleMermaid(el)) {
|
||||
visibleMermaids.push(el);
|
||||
} else {
|
||||
hiddenMermaids.push(el);
|
||||
}
|
||||
}
|
||||
|
||||
renderMermaids(visibleMermaids);
|
||||
|
||||
$els.closest('details').one('toggle', function toggle() {
|
||||
if (this.open) {
|
||||
renderMermaids($(this).find('.js-render-mermaid'));
|
||||
}
|
||||
hiddenMermaids.forEach((el) => {
|
||||
el.closest('details').addEventListener(
|
||||
'toggle',
|
||||
({ target: details }) => {
|
||||
if (details.open) {
|
||||
renderMermaids([...details.querySelectorAll('.js-render-mermaid')]);
|
||||
}
|
||||
},
|
||||
{
|
||||
once: true,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
hookLazyRenderMermaidEvent();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,8 +42,6 @@ import {
|
|||
} from '~/vue_shared/components/filtered_search_bar/constants';
|
||||
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
|
||||
import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
|
||||
import {
|
||||
CREATED_DESC,
|
||||
defaultTypeTokenOptions,
|
||||
|
|
@ -125,7 +123,6 @@ export default {
|
|||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
},
|
||||
mixins: [glFeatureFlagMixin()],
|
||||
inject: [
|
||||
'autocompleteAwardEmojisPath',
|
||||
'calendarPath',
|
||||
|
|
@ -239,21 +236,14 @@ export default {
|
|||
state: this.state,
|
||||
...this.pageParams,
|
||||
...this.apiFilterParams,
|
||||
types: this.apiFilterParams.types || this.defaultWorkItemTypes,
|
||||
types: this.apiFilterParams.types || defaultWorkItemTypes,
|
||||
};
|
||||
},
|
||||
namespace() {
|
||||
return this.isProject ? ITEM_TYPE.PROJECT : ITEM_TYPE.GROUP;
|
||||
},
|
||||
defaultWorkItemTypes() {
|
||||
return this.isWorkItemsEnabled
|
||||
? defaultWorkItemTypes
|
||||
: defaultWorkItemTypes.filter((type) => type !== WORK_ITEM_TYPE_ENUM_TASK);
|
||||
},
|
||||
typeTokenOptions() {
|
||||
return this.isWorkItemsEnabled
|
||||
? defaultTypeTokenOptions.concat(TYPE_TOKEN_TASK_OPTION)
|
||||
: defaultTypeTokenOptions;
|
||||
return defaultTypeTokenOptions.concat(TYPE_TOKEN_TASK_OPTION);
|
||||
},
|
||||
hasSearch() {
|
||||
return (
|
||||
|
|
@ -272,9 +262,6 @@ export default {
|
|||
isOpenTab() {
|
||||
return this.state === IssuableStates.Opened;
|
||||
},
|
||||
isWorkItemsEnabled() {
|
||||
return this.glFeatures.workItems;
|
||||
},
|
||||
showCsvButtons() {
|
||||
return this.isProject && this.isSignedIn;
|
||||
},
|
||||
|
|
|
|||
|
|
@ -148,6 +148,11 @@ export default {
|
|||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
hidePrompt: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
markdown() {
|
||||
|
|
@ -163,7 +168,7 @@ export default {
|
|||
|
||||
<template>
|
||||
<div class="cell text-cell">
|
||||
<prompt />
|
||||
<prompt v-if="!hidePrompt" />
|
||||
<div v-safe-html:[$options.markdownConfig]="markdown" class="markdown"></div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ import CodeOutput from '../code/index.vue';
|
|||
import HtmlOutput from './html.vue';
|
||||
import ImageOutput from './image.vue';
|
||||
import LatexOutput from './latex.vue';
|
||||
import MarkdownOutput from './markdown.vue';
|
||||
|
||||
const TEXT_MARKDOWN = 'text/markdown';
|
||||
|
||||
export default {
|
||||
props: {
|
||||
|
|
@ -35,6 +38,8 @@ export default {
|
|||
return 'text/latex';
|
||||
} else if (output.data['image/svg+xml']) {
|
||||
return 'image/svg+xml';
|
||||
} else if (output.data[TEXT_MARKDOWN]) {
|
||||
return TEXT_MARKDOWN;
|
||||
}
|
||||
|
||||
return 'text/plain';
|
||||
|
|
@ -42,7 +47,7 @@ export default {
|
|||
dataForType(output, type) {
|
||||
let data = output.data[type];
|
||||
|
||||
if (typeof data === 'object') {
|
||||
if (typeof data === 'object' && this.outputType(output) !== TEXT_MARKDOWN) {
|
||||
data = data.join('');
|
||||
}
|
||||
|
||||
|
|
@ -61,6 +66,8 @@ export default {
|
|||
return LatexOutput;
|
||||
} else if (output.data['image/svg+xml']) {
|
||||
return HtmlOutput;
|
||||
} else if (output.data[TEXT_MARKDOWN]) {
|
||||
return MarkdownOutput;
|
||||
}
|
||||
|
||||
return CodeOutput;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,42 @@
|
|||
<script>
|
||||
import { GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
|
||||
import Prompt from '../prompt.vue';
|
||||
import Markdown from '../markdown.vue';
|
||||
|
||||
export default {
|
||||
name: 'MarkdownOutput',
|
||||
components: {
|
||||
Prompt,
|
||||
Markdown,
|
||||
},
|
||||
directives: {
|
||||
SafeHtml,
|
||||
},
|
||||
props: {
|
||||
count: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
rawCode: {
|
||||
type: Array,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
markdownContent() {
|
||||
return { source: this.rawCode };
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="output">
|
||||
<prompt type="Out" :count="count" :show-output="index === 0" />
|
||||
<markdown :cell="markdownContent" :hide-prompt="true" />
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -257,9 +257,9 @@ export default {
|
|||
|
||||
<asset-links-form />
|
||||
|
||||
<div class="d-flex pt-3">
|
||||
<div class="d-flex gl-gap-x-3 pt-3">
|
||||
<gl-button
|
||||
class="mr-auto js-no-auto-disable"
|
||||
class="js-no-auto-disable"
|
||||
category="primary"
|
||||
variant="confirm"
|
||||
type="submit"
|
||||
|
|
|
|||
|
|
@ -131,10 +131,10 @@ export default {
|
|||
<div
|
||||
v-for="(link, index) in release.assets.links"
|
||||
:key="link.id"
|
||||
class="row flex-column flex-sm-row align-items-stretch align-items-sm-start no-gutters"
|
||||
class="gl-sm-display-flex flex-column flex-sm-row gl-gap-5 align-items-stretch align-items-sm-start no-gutters"
|
||||
>
|
||||
<gl-form-group
|
||||
class="url-field form-group col pr-sm-2"
|
||||
class="url-field form-group col"
|
||||
:label="__('URL')"
|
||||
:label-for="`asset-url-${index}`"
|
||||
>
|
||||
|
|
@ -174,7 +174,7 @@ export default {
|
|||
</gl-form-group>
|
||||
|
||||
<gl-form-group
|
||||
class="link-title-field col px-sm-2"
|
||||
class="link-title-field col"
|
||||
:label="__('Link title')"
|
||||
:label-for="`asset-link-name-${index}`"
|
||||
>
|
||||
|
|
@ -201,7 +201,7 @@ export default {
|
|||
</gl-form-group>
|
||||
|
||||
<gl-form-group
|
||||
class="link-type-field col-auto px-sm-2"
|
||||
class="link-type-field col-auto"
|
||||
:label="__('Type')"
|
||||
:label-for="`asset-type-${index}`"
|
||||
>
|
||||
|
|
@ -216,9 +216,8 @@ export default {
|
|||
/>
|
||||
</gl-form-group>
|
||||
|
||||
<div class="mb-5 mb-sm-3 mt-sm-4 col col-sm-auto pl-sm-2">
|
||||
<div v-if="release.assets.links.length !== 1" class="mb-5 mb-sm-3 mt-sm-4 col col-sm-auto">
|
||||
<gl-button
|
||||
v-gl-tooltip
|
||||
class="remove-button w-100 form-control"
|
||||
:aria-label="__('Remove asset link')"
|
||||
:title="__('Remove asset link')"
|
||||
|
|
@ -233,8 +232,9 @@ export default {
|
|||
</div>
|
||||
<gl-button
|
||||
ref="addAnotherLinkButton"
|
||||
variant="link"
|
||||
class="align-self-end mb-5 mb-sm-0"
|
||||
category="secondary"
|
||||
variant="confirm"
|
||||
class="gl-align-self-start gl-mb-5"
|
||||
@click="onAddAnotherClicked"
|
||||
>
|
||||
{{ __('Add another link') }}
|
||||
|
|
|
|||
|
|
@ -67,6 +67,22 @@ function UsersSelect(currentUser, els, options = {}) {
|
|||
let assigneeTemplate;
|
||||
let collapsedAssigneeTemplate;
|
||||
|
||||
const suggestedReviewersHelpPath = $dropdown.data('suggestedReviewersHelpPath');
|
||||
const suggestedReviewersHeaderTemplate = template(
|
||||
`<div class="gl-display-flex gl-align-items-center">
|
||||
<%- header %>
|
||||
<a
|
||||
title="${s__('SuggestedReviewers|Learn about suggested reviewers')}"
|
||||
href="${suggestedReviewersHelpPath}"
|
||||
rel="noopener"
|
||||
target="_blank"
|
||||
aria-label="${s__('SuggestedReviewers|Suggested reviewers help link')}"
|
||||
class="gl-hover-bg-transparent! gl-p-0! has-tooltip">
|
||||
${spriteIcon('question-o', 'gl-ml-3 gl-icon s16')}
|
||||
</a>
|
||||
</div>`,
|
||||
);
|
||||
|
||||
if (selectedId === undefined) {
|
||||
selectedId = selectedIdDefault;
|
||||
}
|
||||
|
|
@ -383,7 +399,12 @@ function UsersSelect(currentUser, els, options = {}) {
|
|||
if (!suggestedUsers.length) return [];
|
||||
|
||||
const items = [
|
||||
{ type: 'header', content: $dropdown.data('suggestedReviewersHeader') },
|
||||
{
|
||||
type: 'header',
|
||||
content: suggestedReviewersHeaderTemplate({
|
||||
header: $dropdown.data('suggestedReviewersHeader'),
|
||||
}),
|
||||
},
|
||||
...suggestedUsers,
|
||||
{ type: 'header', content: $dropdown.data('allMembersHeader') },
|
||||
];
|
||||
|
|
|
|||
|
|
@ -34,8 +34,8 @@ export default {
|
|||
iconAriaLabel() {
|
||||
return `${capitalizeFirstCharacter(this.iconName)} ${this.name}`;
|
||||
},
|
||||
iconSize() {
|
||||
return this.level === 1 ? 16 : 12;
|
||||
iconClassNameText() {
|
||||
return this.$options.EXTENSION_ICON_CLASS[this.iconName];
|
||||
},
|
||||
},
|
||||
EXTENSION_ICON_NAMES,
|
||||
|
|
@ -44,24 +44,22 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div :class="[$options.EXTENSION_ICON_CLASS[iconName]]" class="gl-mr-3">
|
||||
<div
|
||||
:class="{
|
||||
[iconClassNameText]: !isLoading,
|
||||
[`mr-widget-status-icon-level-${level}`]: !isLoading,
|
||||
'gl-mr-3': level === 1,
|
||||
}"
|
||||
class="gl-relative gl-w-6 gl-h-6 gl-rounded-full gl--flex-center"
|
||||
>
|
||||
<gl-loading-icon v-if="isLoading" size="md" inline />
|
||||
<div
|
||||
<gl-icon
|
||||
v-else
|
||||
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-rounded-full gl-bg-gray-10"
|
||||
:class="{
|
||||
'gl-p-2': level === 1,
|
||||
}"
|
||||
>
|
||||
<div class="gl-rounded-full gl-bg-white">
|
||||
<gl-icon
|
||||
:name="$options.EXTENSION_ICON_NAMES[iconName]"
|
||||
:size="iconSize"
|
||||
:aria-label="iconAriaLabel"
|
||||
:data-qa-selector="`status_${iconName}_icon`"
|
||||
class="gl-display-block"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
:name="$options.EXTENSION_ICON_NAMES[iconName]"
|
||||
:size="12"
|
||||
:aria-label="iconAriaLabel"
|
||||
:data-qa-selector="`status_${iconName}_icon`"
|
||||
class="gl-relative gl-z-index-1"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ export default {
|
|||
workItemTypes() {
|
||||
return this.workItemHierarchy.reduce(
|
||||
(itemTypes, item) => {
|
||||
const skipItem = workItemTypes[item.type].isWorkItem && !window.gon?.features?.workItems;
|
||||
const skipItem = workItemTypes[item.type].isWorkItem;
|
||||
|
||||
if (skipItem) {
|
||||
return itemTypes;
|
||||
|
|
|
|||
|
|
@ -1085,6 +1085,31 @@ $tabs-holder-z-index: 250;
|
|||
border-top: 0;
|
||||
}
|
||||
|
||||
.mr-widget-status-icon-level-1::before {
|
||||
@include gl-content-empty;
|
||||
@include gl-absolute;
|
||||
@include gl-left-0;
|
||||
@include gl-top-0;
|
||||
@include gl-bottom-0;
|
||||
@include gl-right-0;
|
||||
@include gl-opacity-3;
|
||||
@include gl-rounded-full;
|
||||
@include gl-border-solid;
|
||||
@include gl-border-4;
|
||||
}
|
||||
|
||||
.mr-widget-status-icon-level-1::after {
|
||||
@include gl-content-empty;
|
||||
@include gl-absolute;
|
||||
@include gl-rounded-full;
|
||||
@include gl-border-solid;
|
||||
@include gl-border-4;
|
||||
@include gl-left-2;
|
||||
@include gl-right-2;
|
||||
@include gl-top-2;
|
||||
@include gl-bottom-2;
|
||||
}
|
||||
|
||||
.memory-graph-container {
|
||||
background: var(--white, $white);
|
||||
border: 1px solid var(--gray-100, $gray-100);
|
||||
|
|
|
|||
|
|
@ -25,6 +25,10 @@ module GraphqlTriggers
|
|||
GitlabSchema.subscriptions.trigger('issuableDatesUpdated', { issuable_id: issuable.to_gid }, issuable)
|
||||
end
|
||||
|
||||
def self.issuable_milestone_updated(issuable)
|
||||
GitlabSchema.subscriptions.trigger('issuableMilestoneUpdated', { issuable_id: issuable.to_gid }, issuable)
|
||||
end
|
||||
|
||||
def self.merge_request_reviewers_updated(merge_request)
|
||||
GitlabSchema.subscriptions.trigger(
|
||||
'mergeRequestReviewersUpdated',
|
||||
|
|
|
|||
|
|
@ -22,6 +22,9 @@ module Types
|
|||
field :issuable_dates_updated, subscription: Subscriptions::IssuableUpdated, null: true,
|
||||
description: 'Triggered when the due date or start date of an issuable is updated.'
|
||||
|
||||
field :issuable_milestone_updated, subscription: Subscriptions::IssuableUpdated, null: true,
|
||||
description: 'Triggered when the milestone of an issuable is updated.'
|
||||
|
||||
field :merge_request_reviewers_updated,
|
||||
subscription: Subscriptions::IssuableUpdated,
|
||||
null: true,
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ module RedisCacheable
|
|||
|
||||
def cache_attributes(values)
|
||||
Gitlab::Redis::Cache.with do |redis|
|
||||
redis.set(cache_attribute_key, values.to_json, ex: CACHED_ATTRIBUTES_EXPIRY_TIME)
|
||||
redis.set(cache_attribute_key, Gitlab::Json.dump(values), ex: CACHED_ATTRIBUTES_EXPIRY_TIME)
|
||||
end
|
||||
|
||||
clear_memoization(:cached_attributes)
|
||||
|
|
|
|||
|
|
@ -37,18 +37,18 @@ class DiffDiscussion < Discussion
|
|||
|
||||
def reply_attributes
|
||||
super.merge(
|
||||
original_position: original_position.to_json,
|
||||
position: position.to_json
|
||||
original_position: Gitlab::Json.dump(original_position),
|
||||
position: Gitlab::Json.dump(position)
|
||||
)
|
||||
end
|
||||
|
||||
def cache_key
|
||||
positions_json = diff_note_positions.map { |dnp| dnp.position.to_json }
|
||||
positions_json = diff_note_positions.map { |dnp| Gitlab::Json.dump(dnp.position) }
|
||||
positions_sha = Digest::SHA1.hexdigest(positions_json.join(':')) if positions_json.any?
|
||||
|
||||
[
|
||||
super,
|
||||
Digest::SHA1.hexdigest(position.to_json),
|
||||
Digest::SHA1.hexdigest(Gitlab::Json.dump(position)),
|
||||
positions_sha
|
||||
].join(':')
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,7 +35,9 @@ module Integrations
|
|||
return unless supported_events.include?(data[:object_kind])
|
||||
|
||||
url = "https://atlas.assembla.com/spaces/#{subdomain}/github_tool?secret_key=#{token}"
|
||||
Gitlab::HTTP.post(url, body: { payload: data }.to_json, headers: { 'Content-Type' => 'application/json' })
|
||||
body = { payload: data }
|
||||
|
||||
Gitlab::HTTP.post(url, body: Gitlab::Json.dump(body), headers: { 'Content-Type' => 'application/json' })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ module Integrations
|
|||
}
|
||||
Gitlab::HTTP.post(
|
||||
API_ENDPOINT,
|
||||
body: message.to_json,
|
||||
body: Gitlab::Json.dump(message),
|
||||
headers: {
|
||||
'Content-Type' => 'application/json',
|
||||
'X-TrackerToken' => token
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ module Integrations
|
|||
|
||||
def notify(message, opts)
|
||||
header = { 'Content-Type' => 'application/json' }
|
||||
response = Gitlab::HTTP.post(webhook, headers: header, body: { text: message.summary }.to_json)
|
||||
response = Gitlab::HTTP.post(webhook, headers: header, body: Gitlab::Json.dump({ text: message.summary }))
|
||||
|
||||
response if response.success?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -43,11 +43,13 @@ module Integrations
|
|||
private
|
||||
|
||||
def notify(message, opts)
|
||||
response = Gitlab::HTTP.post(webhook, body: {
|
||||
body = {
|
||||
subject: message.project_name,
|
||||
text: message.summary,
|
||||
markdown: true
|
||||
}.to_json)
|
||||
}
|
||||
|
||||
response = Gitlab::HTTP.post(webhook, body: Gitlab::Json.dump(body))
|
||||
|
||||
response if response.success?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ module Integrations
|
|||
|
||||
def notify(message, opts)
|
||||
header = { 'Content-Type' => 'application/json' }
|
||||
response = Gitlab::HTTP.post(webhook, headers: header, body: { markdown: message.summary }.to_json)
|
||||
response = Gitlab::HTTP.post(webhook, headers: header, body: Gitlab::Json.dump({ markdown: message.summary }))
|
||||
|
||||
response if response.success?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,9 +4,8 @@
|
|||
class Iteration < ApplicationRecord
|
||||
include IgnorableColumns
|
||||
|
||||
# TODO https://gitlab.com/gitlab-org/gitlab/-/issues/372125
|
||||
# TODO https://gitlab.com/gitlab-org/gitlab/-/issues/372126
|
||||
ignore_column :project_id, remove_with: '15.6', remove_after: '2022-09-17'
|
||||
ignore_column :project_id, remove_with: '15.7', remove_after: '2022-11-18'
|
||||
|
||||
self.table_name = 'sprints'
|
||||
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class MergeRequestDiffCommit < ApplicationRecord
|
|||
sha: Gitlab::Database::ShaAttribute.serialize(sha), # rubocop:disable Cop/ActiveRecordSerialize
|
||||
authored_date: Gitlab::Database.sanitize_timestamp(commit_hash[:authored_date]),
|
||||
committed_date: Gitlab::Database.sanitize_timestamp(commit_hash[:committed_date]),
|
||||
trailers: commit_hash.fetch(:trailers, {}).to_json
|
||||
trailers: Gitlab::Json.dump(commit_hash.fetch(:trailers, {}))
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -163,6 +163,7 @@ module Issues
|
|||
|
||||
invalidate_milestone_issue_counters(issue)
|
||||
send_milestone_change_notification(issue)
|
||||
GraphqlTriggers.issuable_milestone_updated(issue)
|
||||
end
|
||||
|
||||
def invalidate_milestone_issue_counters(issue)
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@
|
|||
- data = { multi_select: true }
|
||||
- data['dropdown-title'] = title
|
||||
- data['dropdown-header'] = dropdown_options[:data][:'dropdown-header']
|
||||
- data[:suggested_reviewers_help_path] = dropdown_options[:data][:suggested_reviewers_help_path]
|
||||
- data[:suggested_reviewers_header] = dropdown_options[:data][:suggested_reviewers_header]
|
||||
- data[:all_members_header] = dropdown_options[:data][:all_members_header]
|
||||
- data[:show_suggested] = dropdown_options[:data][:show_suggested]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Pulls logic from https://github.com/Maxim-Filimonov/hashie-forbidden_attributes so we could drop the dependency.
|
||||
# This gem is simply `Hashie::Mash` monkey patch to allow mass assignment bypassing `:permitted?` check.
|
||||
#
|
||||
# Reasons:
|
||||
# 1. The gem was last updated 5 years ago and does not have CI setup to test under the latest Ruby/Rails.
|
||||
# 2. There is a significant chance this logic is not used at all.
|
||||
# We didn't find any explicit places in the code where we mass-assign to `Hashie::Mash`.
|
||||
# Experimental MR where we dropped the gem showed that no tests from the full suite failed:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/101535
|
||||
# 3. The logic is very simple. Even if we need it, keeping it in our codebase is better than pulling a dependency.
|
||||
# This logic will be visible and it will be one less gem to install.
|
||||
#
|
||||
# Next steps:
|
||||
# 1. Keep the patch for at least one milestone in our codebase. Log its usage.
|
||||
# 2. After that, check if there were any related log events.
|
||||
# 3. If no usages were tracked, we could drop the patch (delete this file).
|
||||
# 4. Otherwise, audit where and why we need it, and add a comment to that place.
|
||||
#
|
||||
# See discussion https://gitlab.com/gitlab-org/gitlab/-/issues/378398#note_1143133426
|
||||
|
||||
require 'hashie/mash'
|
||||
|
||||
module Hashie
|
||||
class Mash
|
||||
module MonkeyPatch
|
||||
def respond_to_missing?(method_name, *args)
|
||||
if method_name == :permitted?
|
||||
Gitlab::AppLogger.info(message: 'Hashie::Mash#respond_to?(:permitted?)',
|
||||
caller: Gitlab::BacktraceCleaner.clean_backtrace(caller))
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def method_missing(method_name, *args)
|
||||
if method_name == :permitted?
|
||||
Gitlab::AppLogger.info(message: 'Hashie::Mash#permitted?',
|
||||
caller: Gitlab::BacktraceCleaner.clean_backtrace(caller))
|
||||
|
||||
raise ArgumentError
|
||||
end
|
||||
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
prepend MonkeyPatch
|
||||
end
|
||||
end
|
||||
|
|
@ -18,5 +18,7 @@ metadata:
|
|||
description: Operations related to metadata of the GitLab instance
|
||||
- name: access_requests
|
||||
description: Operations related to access requests
|
||||
- name: merge_requests
|
||||
description: Operations related to merge requests
|
||||
- name: deployments
|
||||
description: Operations related to deployments
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CreateDastSiteProfileVariables < ActiveRecord::Migration[6.0]
|
|||
encrypted_value_constraint_name = check_constraint_name(:dast_site_profile_secret_variables, 'encrypted_value', 'max_length')
|
||||
encrypted_value_iv_constraint_name = check_constraint_name(:dast_site_profile_secret_variables, 'encrypted_value_iv', 'max_length')
|
||||
|
||||
create_table_with_constraints :dast_site_profile_secret_variables, comment: table_comment.to_json do |t|
|
||||
create_table_with_constraints :dast_site_profile_secret_variables, comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.references :dast_site_profile, null: false, foreign_key: { on_delete: :cascade }, index: false
|
||||
|
||||
t.timestamps_with_timezone
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CreateDastProfilesPipelines < ActiveRecord::Migration[6.0]
|
|||
def up
|
||||
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Profiles and CI Pipelines' }
|
||||
|
||||
create_table :dast_profiles_pipelines, primary_key: [:dast_profile_id, :ci_pipeline_id], comment: table_comment.to_json do |t|
|
||||
create_table :dast_profiles_pipelines, primary_key: [:dast_profile_id, :ci_pipeline_id], comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.bigint :dast_profile_id, null: false
|
||||
t.bigint :ci_pipeline_id, null: false
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class CreateSecurityOrchestrationPolicyRuleSchedule < ActiveRecord::Migration[6.
|
|||
def up
|
||||
table_comment = { owner: 'group::container security', description: 'Schedules used to store relationship between project and security policy repository' }
|
||||
|
||||
create_table_with_constraints :security_orchestration_policy_rule_schedules, comment: table_comment.to_json do |t|
|
||||
create_table_with_constraints :security_orchestration_policy_rule_schedules, comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.timestamps_with_timezone
|
||||
t.datetime_with_timezone :next_run_at, null: true
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class CreateDastSiteProfilesPipelines < ActiveRecord::Migration[6.0]
|
|||
def up
|
||||
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Site Profiles and CI Pipelines' }
|
||||
|
||||
create_table :dast_site_profiles_pipelines, primary_key: [:dast_site_profile_id, :ci_pipeline_id], comment: table_comment.to_json do |t|
|
||||
create_table :dast_site_profiles_pipelines, primary_key: [:dast_site_profile_id, :ci_pipeline_id], comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.bigint :dast_site_profile_id, null: false
|
||||
t.bigint :ci_pipeline_id, null: false
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class CreateDastSiteProfilesBuilds < ActiveRecord::Migration[6.1]
|
|||
def up
|
||||
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Site Profiles and CI Builds' }
|
||||
|
||||
create_table :dast_site_profiles_builds, primary_key: [:dast_site_profile_id, :ci_build_id], comment: table_comment.to_json do |t|
|
||||
create_table :dast_site_profiles_builds, primary_key: [:dast_site_profile_id, :ci_build_id], comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.bigint :dast_site_profile_id, null: false
|
||||
t.bigint :ci_build_id, null: false
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class CreateDastScannerProfilesBuilds < ActiveRecord::Migration[6.1]
|
|||
def up
|
||||
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Scanner Profiles and CI Builds' }
|
||||
|
||||
create_table :dast_scanner_profiles_builds, primary_key: [:dast_scanner_profile_id, :ci_build_id], comment: table_comment.to_json do |t|
|
||||
create_table :dast_scanner_profiles_builds, primary_key: [:dast_scanner_profile_id, :ci_build_id], comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.bigint :dast_scanner_profile_id, null: false
|
||||
t.bigint :ci_build_id, null: false
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class CreateDastProfileSchedule < ActiveRecord::Migration[6.1]
|
|||
owner: 'group::dynamic analysis', description: 'Scheduling for scans using DAST Profiles'
|
||||
}
|
||||
|
||||
create_table_with_constraints :dast_profile_schedules, comment: table_comment.to_json do |t|
|
||||
create_table_with_constraints :dast_profile_schedules, comment: Gitlab::Json.dump(table_comment) do |t|
|
||||
t.bigint :project_id, null: false
|
||||
t.bigint :dast_profile_id, null: false
|
||||
t.bigint :user_id
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class BackfillEventsIdForBigintConversion < ActiveRecord::Migration[6.0]
|
|||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'events', column_name: 'id')
|
||||
.where(job_arguments: %w[id id_convert_to_bigint].to_json)
|
||||
.where(job_arguments: Gitlab::Json.dump(%w[id id_convert_to_bigint]))
|
||||
.delete_all
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class BackfillPushEventPayloadEventIdForBigintConversion < ActiveRecord::Migrati
|
|||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'push_event_payloads', column_name: 'event_id')
|
||||
.where(job_arguments: %w[event_id event_id_convert_to_bigint].to_json)
|
||||
.where(job_arguments: Gitlab::Json.dump(%w[event_id event_id_convert_to_bigint]))
|
||||
.delete_all
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class BackfillCiBuildNeedsForBigintConversion < ActiveRecord::Migration[6.0]
|
|||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'ci_build_needs', column_name: 'build_id')
|
||||
.where(job_arguments: %w[build_id build_id_convert_to_bigint].to_json)
|
||||
.where(job_arguments: Gitlab::Json.dump(%w[build_id build_id_convert_to_bigint]))
|
||||
.delete_all
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class BackfillConversionOfCiJobArtifacts < ActiveRecord::Migration[6.0]
|
|||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'ci_job_artifacts', column_name: 'id')
|
||||
.where(job_arguments: [%w[id job_id], %w[id_convert_to_bigint job_id_convert_to_bigint]].to_json)
|
||||
.where(job_arguments: Gitlab::Json.dump([%w[id job_id], %w[id_convert_to_bigint job_id_convert_to_bigint]]))
|
||||
.delete_all
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class BackfillCiSourcesPipelinesSourceJobIdForBigintConversion < ActiveRecord::M
|
|||
Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
.where(job_class_name: 'CopyColumnUsingBackgroundMigrationJob')
|
||||
.where(table_name: 'ci_sources_pipelines', column_name: 'id')
|
||||
.where(job_arguments: [%w[source_job_id], %w[source_job_id_convert_to_bigint]].to_json)
|
||||
.where(job_arguments: Gitlab::Json.dump([%w[source_job_id], %w[source_job_id_convert_to_bigint]]))
|
||||
.delete_all
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ class FixBatchedMigrationsOldFormatJobArguments < ActiveRecord::Migration[6.1]
|
|||
|
||||
# rubocop:disable Rails/WhereEquals
|
||||
base_scope
|
||||
.where('job_arguments = ?', legacy_job_arguments.to_json)
|
||||
.where('NOT EXISTS (?)', base_scope.select('1').where('job_arguments = ?', current_job_arguments.to_json))
|
||||
.where('job_arguments = ?', Gitlab::Json.dump(legacy_job_arguments))
|
||||
.where('NOT EXISTS (?)', base_scope.select('1').where('job_arguments = ?', Gitlab::Json.dump(current_job_arguments)))
|
||||
.update_all(job_arguments: current_job_arguments)
|
||||
# rubocop:enable Rails/WhereEquals
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class AdjustTaskNoteRenameBackgroundMigrationValues < Gitlab::Database::Migratio
|
|||
|
||||
scope :for_configuration, ->(job_class_name, table_name, column_name, job_arguments) do
|
||||
where(job_class_name: job_class_name, table_name: table_name, column_name: column_name)
|
||||
.where("job_arguments = ?", job_arguments.to_json) # rubocop:disable Rails/WhereEquals
|
||||
.where("job_arguments = ?", Gitlab::Json.dump(job_arguments)) # rubocop:disable Rails/WhereEquals
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RemoveSprintsProjectIdColumn < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
DATERANGE_CONSTRAINT_NAME = 'iteration_start_and_due_daterange_project_id_constraint'
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
remove_column :sprints, :project_id, :bigint if column_exists?(:sprints, :project_id)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
add_column :sprints, :project_id, :bigint unless column_exists?(:sprints, :project_id)
|
||||
end
|
||||
|
||||
with_lock_retries do
|
||||
next if check_constraint_exists?(:sprints, DATERANGE_CONSTRAINT_NAME)
|
||||
|
||||
execute(<<~SQL)
|
||||
ALTER TABLE sprints
|
||||
ADD CONSTRAINT #{DATERANGE_CONSTRAINT_NAME}
|
||||
EXCLUDE USING gist (project_id WITH =, daterange(start_date, due_date, '[]'::text) WITH &&)
|
||||
WHERE (project_id IS NOT NULL)
|
||||
SQL
|
||||
end
|
||||
|
||||
add_check_constraint(:sprints,
|
||||
'project_id <> NULL::bigint AND group_id IS NULL OR group_id <> NULL::bigint AND project_id IS NULL',
|
||||
'sprints_must_belong_to_project_or_group')
|
||||
|
||||
add_concurrent_index :sprints, [:project_id, :iid], unique: true, name: 'index_sprints_on_project_id_and_iid'
|
||||
|
||||
add_concurrent_foreign_key :sprints, :projects, column: :project_id, on_delete: :cascade
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
7f83a1d04357f4f2e1e4ed92e0d9b0041f79d1850b28f41cee45d243e25741f0
|
||||
|
|
@ -21594,7 +21594,6 @@ CREATE TABLE sprints (
|
|||
updated_at timestamp with time zone NOT NULL,
|
||||
start_date date,
|
||||
due_date date,
|
||||
project_id bigint,
|
||||
group_id bigint,
|
||||
iid integer NOT NULL,
|
||||
cached_markdown_version integer,
|
||||
|
|
@ -21605,7 +21604,6 @@ CREATE TABLE sprints (
|
|||
state_enum smallint DEFAULT 1 NOT NULL,
|
||||
iterations_cadence_id integer,
|
||||
sequence integer,
|
||||
CONSTRAINT sprints_must_belong_to_project_or_group CHECK ((((project_id <> NULL::bigint) AND (group_id IS NULL)) OR ((group_id <> NULL::bigint) AND (project_id IS NULL)))),
|
||||
CONSTRAINT sprints_title CHECK ((char_length(title) <= 255))
|
||||
);
|
||||
|
||||
|
|
@ -25863,9 +25861,6 @@ ALTER TABLE ONLY issues_self_managed_prometheus_alert_events
|
|||
ALTER TABLE ONLY sprints
|
||||
ADD CONSTRAINT iteration_start_and_due_date_iterations_cadence_id_constraint EXCLUDE USING gist (iterations_cadence_id WITH =, daterange(start_date, due_date, '[]'::text) WITH &&) WHERE ((group_id IS NOT NULL)) DEFERRABLE INITIALLY DEFERRED;
|
||||
|
||||
ALTER TABLE ONLY sprints
|
||||
ADD CONSTRAINT iteration_start_and_due_daterange_project_id_constraint EXCLUDE USING gist (project_id WITH =, daterange(start_date, due_date, '[]'::text) WITH &&) WHERE ((project_id IS NOT NULL));
|
||||
|
||||
ALTER TABLE ONLY iterations_cadences
|
||||
ADD CONSTRAINT iterations_cadences_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -30569,8 +30564,6 @@ CREATE INDEX index_sprints_on_due_date ON sprints USING btree (due_date);
|
|||
|
||||
CREATE INDEX index_sprints_on_group_id ON sprints USING btree (group_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_sprints_on_project_id_and_iid ON sprints USING btree (project_id, iid);
|
||||
|
||||
CREATE INDEX index_sprints_on_title ON sprints USING btree (title);
|
||||
|
||||
CREATE INDEX index_sprints_on_title_trigram ON sprints USING gin (title gin_trgm_ops);
|
||||
|
|
@ -33405,9 +33398,6 @@ ALTER TABLE ONLY namespaces
|
|||
ALTER TABLE ONLY fork_networks
|
||||
ADD CONSTRAINT fk_e7b436b2b5 FOREIGN KEY (root_project_id) REFERENCES projects(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY sprints
|
||||
ADD CONSTRAINT fk_e8206c9686 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY application_settings
|
||||
ADD CONSTRAINT fk_e8a145f3a7 FOREIGN KEY (instance_administrators_group_id) REFERENCES namespaces(id) ON DELETE SET NULL;
|
||||
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 7.5 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 23 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 6.0 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 3.5 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 4.2 KiB |
|
|
@ -6,240 +6,179 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Merge trains **(PREMIUM)**
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/9186) in GitLab 12.0.
|
||||
> - [Squash and merge](../../user/project/merge_requests/squash_and_merge.md) support [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/13001) in GitLab 12.6.
|
||||
Use merge trains to queue merge requests and verify their changes work together before
|
||||
they are merged to the target branch.
|
||||
|
||||
For more information about why you might want to use merge trains, read [How starting merge trains improve efficiency for DevOps](https://about.gitlab.com/blog/2020/01/30/all-aboard-merge-trains/).
|
||||
In projects with frequent merges to the default branch, changes in different merge requests
|
||||
might conflict with each other. [Merged results pipelines](merged_results_pipelines.md)
|
||||
ensure the changes work with the content in the default branch, but not content
|
||||
that others are merging at the same time.
|
||||
|
||||
When [merged results pipelines](merged_results_pipelines.md) are
|
||||
enabled, the pipeline jobs run as if the changes from your source branch have already
|
||||
been merged into the target branch.
|
||||
Merge trains do not work with [Semi-linear history merge requests](../../user/project/merge_requests/methods/index.md#merge-commit-with-semi-linear-history)
|
||||
or [fast-forward merge requests](../../user/project/merge_requests/methods/index.md#fast-forward-merge).
|
||||
|
||||
However, the target branch may be changing rapidly. When you're ready to merge,
|
||||
if you haven't run the pipeline in a while, the target branch may have already changed.
|
||||
Merging now could introduce breaking changes.
|
||||
For more information about:
|
||||
|
||||
*Merge trains* can prevent this from happening. A merge train is a queued list of merge
|
||||
requests, each waiting to be merged into the target branch.
|
||||
- How merge trains work, review the [merge train workflow](#merge-train-workflow).
|
||||
- Why you might want to use merge trains, read [How starting merge trains improve efficiency for DevOps](https://about.gitlab.com/blog/2020/01/30/all-aboard-merge-trains/).
|
||||
|
||||
Many merge requests can be added to the train. Each merge request runs its own merged results pipeline,
|
||||
which includes the changes from all of the other merge requests in *front* of it on the train.
|
||||
All the pipelines run in parallel, to save time. The author of the internal merged result commit is always the user that initiated the merge.
|
||||
## Merge train workflow
|
||||
|
||||
If the pipeline for a merge request fails, the breaking changes are not merged, and the target
|
||||
branch is unaffected. The merge request is removed from the train, and all pipelines behind it restart.
|
||||
A merge train starts when there are no merge requests waiting to merge and you
|
||||
select [**Start merge train**](#start-a-merge-train). GitLab starts a merge train pipeline
|
||||
that verifies that the changes can merge into the default branch. This first pipeline
|
||||
is the same as a [merged results pipeline](merged_results_pipelines.md), which runs on
|
||||
the changes of the source and target branches combined together. The author of the
|
||||
internal merged result commit is the user that initiated the merge.
|
||||
|
||||
If the pipeline for the merge request at the front of the train completes successfully,
|
||||
the changes are merged into the target branch, and the other pipelines continue to
|
||||
run.
|
||||
To queue a second merge request to merge immediately after the first pipeline completes, select
|
||||
[**Add to merge train**](#add-a-merge-request-to-a-merge-train) and add it to the train.
|
||||
This second merge train pipeline runs on the changes of _both_ merge requests combined with the
|
||||
target branch. Similarly, if you add a third merge request, that pipeline runs on the changes
|
||||
of all three merge requests merged with the target branch. The pipelines all run in parallel.
|
||||
|
||||
To add a merge request to a merge train, you need [permissions](../../user/permissions.md) to merge or push to the
|
||||
target branch.
|
||||
Each merge request merges into the target branch only after:
|
||||
|
||||
Each merge train can run a maximum of **twenty** pipelines in parallel.
|
||||
If more than twenty merge requests are added to the merge train, the merge requests
|
||||
are queued until a slot in the merge train is free. There is no limit to the
|
||||
number of merge requests that can be queued.
|
||||
- The merge request's pipeline completes successfully.
|
||||
- All other merge requests queued before it are merged.
|
||||
|
||||
## Merge train example
|
||||
If a merge train pipeline fails, the merge request is not merged. GitLab
|
||||
removes that merge request from the merge train, and starts new pipelines for all
|
||||
the merge requests that were queued after it.
|
||||
|
||||
Three merge requests (`A`, `B` and `C`) are added to a merge train in order, which
|
||||
For example:
|
||||
|
||||
Three merge requests (`A`, `B`, and `C`) are added to a merge train in order, which
|
||||
creates three merged results pipelines that run in parallel:
|
||||
|
||||
1. The first pipeline runs on the changes from `A` combined with the target branch.
|
||||
1. The second pipeline runs on the changes from `A` and `B` combined with the target branch.
|
||||
1. The third pipeline runs on the changes from `A`, `B`, and `C` combined with the target branch.
|
||||
|
||||
If the pipeline for `B` fails, it is removed from the train. The pipeline for
|
||||
`C` restarts with the `A` and `C` changes, but without the `B` changes.
|
||||
If the pipeline for `B` fails:
|
||||
|
||||
- The first pipeline (`A`) continues to run.
|
||||
- `B` is removed from the train.
|
||||
- The pipeline for `C` [is cancelled](#automatic-pipeline-cancellation), and a new pipeline
|
||||
starts for the changes from `A` and `C` combined with the target branch (without the `B` changes).
|
||||
|
||||
If `A` then completes successfully, it merges into the target branch, and `C` continues
|
||||
to run. If more merge requests are added to the train, they now include the `A`
|
||||
changes that are included in the target branch, and the `C` changes that are from
|
||||
the merge request already in the train.
|
||||
to run. Any new merge requests added to the train include the `A` changes now in
|
||||
the target branch, and the `C` changes from the merge train.
|
||||
|
||||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
|
||||
Watch this video for a demonstration on
|
||||
[how parallel execution of merge trains can prevent commits from breaking the default branch](https://www.youtube.com/watch?v=D4qCqXgZkHQ).
|
||||
Watch this video for a demonstration on [how parallel execution of merge trains can prevent commits from breaking the default branch](https://www.youtube.com/watch?v=D4qCqXgZkHQ).
|
||||
|
||||
## Prerequisites
|
||||
### Automatic pipeline cancellation
|
||||
|
||||
To enable merge trains:
|
||||
GitLab CI/CD detects redundant pipelines, and cancels them to conserve resources.
|
||||
|
||||
- You must have the Maintainer role.
|
||||
- You must be using [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner) 11.9 or later.
|
||||
- Your repository must be a GitLab repository, not an
|
||||
[external repository](../ci_cd_for_external_repos/index.md).
|
||||
Redundant merge train pipelines happen when:
|
||||
|
||||
Merge trains do not work with [Semi-linear history merge requests](../../user/project/merge_requests/methods/index.md#merge-commit-with-semi-linear-history)
|
||||
or [fast-forward merge requests](../../user/project/merge_requests/methods/index.md#fast-forward-merge).
|
||||
- The pipeline fails for one of the merge requests in the merge train.
|
||||
- You [skip the merge train and merge immediately](#skip-the-merge-train-and-merge-immediately).
|
||||
- You [remove a merge request from a merge train](#remove-a-merge-request-from-a-merge-train).
|
||||
|
||||
In these cases, GitLab must create new merge train pipelines for some or all of the
|
||||
merge requests on the train. The old pipelines were comparing against the previous
|
||||
combined changes in the merge train, which are no longer valid, so these old pipelines
|
||||
are cancelled.
|
||||
|
||||
## Enable merge trains
|
||||
|
||||
To enable merge trains for your project:
|
||||
Prerequisites:
|
||||
|
||||
- You must have the Maintainer role.
|
||||
- Your repository must be a GitLab repository, not an [external repository](../ci_cd_for_external_repos/index.md).
|
||||
- Your pipeline must be [configured to use merge request pipelines](merge_request_pipelines.md#prerequisites).
|
||||
Otherwise your merge requests may become stuck in an unresolved state or your pipelines
|
||||
might be dropped.
|
||||
|
||||
To enable merge trains:
|
||||
|
||||
1. If you are on a self-managed GitLab instance, ensure the [feature flag](#merge-trains-feature-flag) is set correctly.
|
||||
1. [Configure your CI/CD configuration file](merge_request_pipelines.md#prerequisites)
|
||||
so that the pipeline or individual jobs run for merge requests.
|
||||
1. On the top bar, select **Main menu > Projects** and find your project.
|
||||
1. On the left sidebar, select **Settings > Merge requests**.
|
||||
1. In the **Merge method** section, verify that **Merge commit** is selected.
|
||||
1. In the **Merge options** section, select **Enable merged results pipelines** (if not already selected) and **Enable merge trains**.
|
||||
1. In the **Merge options** section:
|
||||
- In GitLab 13.6 and later, select **Enable merged results pipelines** and **Enable merge trains**.
|
||||
- In GitLab 13.5 and earlier, select **Enable merge trains and pipelines for merged results**.
|
||||
Additionally, [a feature flag](#disable-merge-trains-in-gitlab-135-and-earlier)
|
||||
must be set correctly.
|
||||
1. Select **Save changes**.
|
||||
|
||||
In GitLab 13.5 and earlier, there is only one checkbox, named
|
||||
**Enable merge trains and pipelines for merged results**.
|
||||
|
||||
WARNING:
|
||||
If you select the checkbox but don't configure your CI/CD to use
|
||||
merge request pipelines, your merge requests may become stuck in an
|
||||
unresolved state or your pipelines may be dropped.
|
||||
|
||||
## Start a merge train
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- You must have [permissions](../../user/permissions.md) to merge or push to the target branch.
|
||||
|
||||
To start a merge train:
|
||||
|
||||
1. Visit a merge request.
|
||||
1. Select **Start merge train**.
|
||||
1. Select:
|
||||
- When no pipeline is running, **Start merge train**.
|
||||
- When a pipeline is running, **Start merge train when pipeline succeeds**.
|
||||
|
||||

|
||||
The merge request's merge train status displays under the pipeline widget with a
|
||||
message similar to `A new merge train has started and this merge request is the first of the queue.`
|
||||
|
||||
Other merge requests can now be added to the train.
|
||||
|
||||
## Add a merge request to a merge train
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- You must have [permissions](../../user/permissions.md) to merge or push to the target branch.
|
||||
|
||||
To add a merge request to a merge train:
|
||||
|
||||
1. Visit a merge request.
|
||||
1. Select **Add to merge train**.
|
||||
1. Select:
|
||||
- When no pipeline is running, **Add to merge train**.
|
||||
- When a pipeline is running, **Add to merge train when pipeline succeeds**.
|
||||
|
||||
If pipelines are already running for the merge request, you cannot add the merge request
|
||||
to the train. Instead, you can schedule to add the merge request to a merge train **when the latest
|
||||
pipeline succeeds**.
|
||||
The merge request's merge train status displays under the pipeline widget with a
|
||||
message similar to `Added to the merge train. There are 2 merge requests waiting to be merged.`
|
||||
|
||||

|
||||
Each merge train can run a maximum of twenty pipelines in parallel. If you add more than
|
||||
twenty merge requests to the merge train, the extra merge requests are queued, waiting
|
||||
for pipelines to complete. There is no limit to the number of queued merge requests
|
||||
waiting to join the merge train.
|
||||
|
||||
## Remove a merge request from a merge train
|
||||
|
||||
1. Visit a merge request.
|
||||
1. Select **Remove from merge train**.
|
||||
To remove a merge request from a merge train, select **Remove from merge train**.
|
||||
You can add the merge request to a merge train again later.
|
||||
|
||||

|
||||
When you remove a merge request from a merge train:
|
||||
|
||||
If you want to add the merge request to a merge train again later, you can.
|
||||
- All pipelines for merge requests queued after the removed merge request restart.
|
||||
- Redundant pipelines [are cancelled](#automatic-pipeline-cancellation).
|
||||
|
||||
## View a merge request's current position on the merge train
|
||||
## Skip the merge train and merge immediately
|
||||
|
||||
After a merge request has been added to the merge train, the merge request's
|
||||
current position is displayed under the pipeline widget:
|
||||
If you have a high-priority merge request, like a critical patch that must
|
||||
be merged urgently, select **Merge Immediately**.
|
||||
|
||||

|
||||
When you merge a merge request immediately:
|
||||
|
||||
## Immediately merge a merge request with a merge train
|
||||
|
||||
If you have a high-priority merge request (for example, a critical patch) that must
|
||||
be merged urgently, you can bypass the merge train by using the **Merge Immediately** option.
|
||||
This is the fastest option to get the change merged into the target branch.
|
||||
|
||||

|
||||
- The current merge train is recreated.
|
||||
- All pipelines restart.
|
||||
- Redundant pipelines [are cancelled](#automatic-pipeline-cancellation).
|
||||
|
||||
WARNING:
|
||||
Each time you merge a merge request immediately, the current merge train is recreated,
|
||||
all pipelines restart, and [redundant pipelines are cancelled](#automatic-pipeline-cancellation).
|
||||
Merging immediately can use a lot of CI/CD resources. Use this option
|
||||
only in critical situations.
|
||||
|
||||
### Automatic pipeline cancellation
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12996) in GitLab 12.3.
|
||||
|
||||
GitLab CI/CD can detect the presence of redundant pipelines, and cancels them
|
||||
to conserve CI resources.
|
||||
|
||||
When a user merges a merge request immediately in an ongoing merge
|
||||
train, the train is reconstructed, because it recreates the expected
|
||||
post-merge commit and pipeline. In this case, the merge train may already
|
||||
have pipelines running against the previous expected post-merge commit.
|
||||
These pipelines are considered redundant and are automatically
|
||||
canceled.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Merge request dropped from the merge train immediately
|
||||
|
||||
If a merge request is not mergeable (for example, it's a draft merge request or it has a merge
|
||||
conflict), the merge train drops your merge request automatically.
|
||||
|
||||
In these cases, the reason for dropping the merge request is in the **system notes**.
|
||||
|
||||
To check the reason:
|
||||
|
||||
1. Open the merge request that was dropped from the merge train.
|
||||
1. Select the **Discussion** tab.
|
||||
1. Find a system note that includes either:
|
||||
- **... removed this merge request from the merge train because ...**
|
||||
- **... aborted this merge request from the merge train because ...**
|
||||
|
||||
The reason is given in the text after the **because ...** phrase.
|
||||
|
||||

|
||||
|
||||
### Merge When Pipeline Succeeds cannot be chosen
|
||||
|
||||
[Merge When Pipeline Succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md)
|
||||
is currently unavailable when merge trains are enabled.
|
||||
|
||||
See [the related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/12267)
|
||||
for more information.
|
||||
|
||||
### Merge train pipeline cannot be retried
|
||||
|
||||
When a merge train pipeline fails, the merge request is dropped from the train and the pipeline can't be retried.
|
||||
Merge train pipelines run on the merged result of the changes in the merge request and
|
||||
the changes from other merge requests already on the train. If the merge request is dropped from the train,
|
||||
the merged result is out of date and the pipeline can't be retried.
|
||||
|
||||
Instead, you should [add the merge request to the train](#add-a-merge-request-to-a-merge-train)
|
||||
again, which triggers a new pipeline.
|
||||
|
||||
If a job only fails intermittently, you can try using the [`retry`](../yaml/index.md#retry)
|
||||
keyword in the `.gitlab-ci.yml` file to have the job retried before the pipeline completes.
|
||||
If it succeeds after a retry, the merge request is not removed from the merge train.
|
||||
|
||||
### Unable to add to merge train with message "The pipeline for this merge request failed."
|
||||
|
||||
Sometimes the **Start/Add to merge train** button is not available and the merge request says,
|
||||
"The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure."
|
||||
|
||||
This issue occurs when [**Pipelines must succeed**](../../user/project/merge_requests/merge_when_pipeline_succeeds.md#require-a-successful-pipeline-for-merge)
|
||||
is enabled in **Settings > General > Merge requests**. This option requires that you
|
||||
run a new successful pipeline before you can re-add a merge request to a merge train.
|
||||
|
||||
Merge trains ensure that each pipeline has succeeded before a merge happens, so
|
||||
you can:
|
||||
|
||||
- Clear the **Pipelines must succeed** checkbox.
|
||||
- Select the **Enable merged results pipelines** and **Enable merge trains** checkboxes.
|
||||
|
||||
In GitLab 13.5 and earlier, there is only one checkbox, named
|
||||
**Enable merge trains and pipelines for merged results**.
|
||||
|
||||
If you want to keep the **Pipelines must succeed** option selected along with merge
|
||||
trains, create a new merged results pipeline when this error occurs:
|
||||
|
||||
1. On the **Pipelines** tab, select **Run pipeline**.
|
||||
1. Select **Start/Add to merge train when pipeline succeeds**.
|
||||
|
||||
See [the related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/35135)
|
||||
for more information.
|
||||
|
||||
### Merge trains feature flag **(PREMIUM SELF)**
|
||||
## Disable merge trains in GitLab 13.5 and earlier **(PREMIUM SELF)**
|
||||
|
||||
In [GitLab 13.6 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/244831),
|
||||
you can [enable or disable merge trains in the project settings](#enable-merge-trains).
|
||||
|
||||
In GitLab 13.5 and earlier, merge trains are automatically enabled when
|
||||
[merged results pipelines](merged_results_pipelines.md) are enabled.
|
||||
To use merged results pipelines without using merge trains, you can enable a
|
||||
[feature flag](../../user/feature_flags.md) that blocks the merge trains feature.
|
||||
To use merged results pipelines but not merge trains, enable the `disable_merge_trains`
|
||||
[feature flag](../../user/feature_flags.md).
|
||||
|
||||
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
|
||||
can enable the feature flag to disable merge trains:
|
||||
|
|
@ -248,11 +187,64 @@ can enable the feature flag to disable merge trains:
|
|||
Feature.enable(:disable_merge_trains)
|
||||
```
|
||||
|
||||
After you enable this feature flag, all existing merge trains are cancelled and
|
||||
the **Start/Add to merge train** button no longer appears in merge requests.
|
||||
After you enable this feature flag, GitLab cancels existing merge trains and removes
|
||||
the **Start/Add to merge train** option from merge requests.
|
||||
|
||||
To disable the feature flag, and enable merge trains again:
|
||||
To disable the feature flag, which enables merge trains again:
|
||||
|
||||
```ruby
|
||||
Feature.disable(:disable_merge_trains)
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Merge request dropped from the merge train
|
||||
|
||||
If a merge request becomes unmergeable while a merge train pipeline is running,
|
||||
the merge train drops your merge request automatically. For example, this could be caused by:
|
||||
|
||||
- Changing the merge request to a [draft](../../user/project/merge_requests/drafts.md).
|
||||
- A merge conflict.
|
||||
- A new conversation thread that is unresolved, when [all threads must be resolved](../../user/discussions/index.md#prevent-merge-unless-all-threads-are-resolved)
|
||||
is enabled.
|
||||
|
||||
You can find reason the merge request was dropped from the merge train in the system
|
||||
notes. Check the **Activity** section in the **Overview** tab for a message similar to:
|
||||
`User removed this merge request from the merge train because ...`
|
||||
|
||||
### Cannot use merge when pipeline succeeds
|
||||
|
||||
You cannot use [merge when pipeline succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md)
|
||||
when merge trains are enabled. See [the related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/12267)
|
||||
for more information.
|
||||
|
||||
### Cannot retry merge train pipeline cannot
|
||||
|
||||
When a merge train pipeline fails, the merge request is dropped from the train and the pipeline can't be retried.
|
||||
Merge train pipelines run on the merged result of the changes in the merge request and
|
||||
changes from other merge requests already on the train. If the merge request is dropped from the train,
|
||||
the merged result is out of date and the pipeline can't be retried.
|
||||
|
||||
You can:
|
||||
|
||||
- [Add the merge request to the train](#add-a-merge-request-to-a-merge-train) again,
|
||||
which triggers a new pipeline.
|
||||
- Add the [`retry`](../yaml/index.md#retry) keyword to the job if it fails intermittently.
|
||||
If it succeeds after a retry, the merge request is not removed from the merge train.
|
||||
|
||||
### Unable to add to the merge train
|
||||
|
||||
When [**Pipelines must succeed**](../../user/project/merge_requests/merge_when_pipeline_succeeds.md#require-a-successful-pipeline-for-merge)
|
||||
is enabled, but the latest pipeline failed:
|
||||
|
||||
- The **Start/Add to merge train** option is not available.
|
||||
- The merge request displays `The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure.`
|
||||
|
||||
Before you can re-add a merge request to a merge train, you can try to:
|
||||
|
||||
- Retry the failed job. If it passes, and no other jobs failed, the pipeline is marked as successful.
|
||||
- Rerun the whole pipeline. On the **Pipelines** tab, select **Run pipeline**.
|
||||
- Push a new commit that fixes the issue, which also triggers a new pipeline.
|
||||
|
||||
See [the related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/35135)
|
||||
for more information.
|
||||
|
|
|
|||
|
|
@ -1978,19 +1978,8 @@
|
|||
}
|
||||
]]>
|
||||
<p>okay</p>
|
||||
static: |-
|
||||
<![CDATA[
|
||||
function matchwo(a,b)
|
||||
{
|
||||
if (a < b && a < 0) then {
|
||||
return 1;
|
||||
static: |2-
|
||||
|
||||
} else {
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
]]>
|
||||
<p data-sourcepos="13:1-13:4" dir="auto">okay</p>
|
||||
wysiwyg: |-
|
||||
<p>okay</p>
|
||||
|
|
@ -7424,14 +7413,14 @@
|
|||
canonical: |
|
||||
<p>foo <!ELEMENT br EMPTY></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:23" dir="auto">foo <!ELEMENT br EMPTY></p>
|
||||
<p data-sourcepos="1:1-1:23" dir="auto">foo </p>
|
||||
wysiwyg: |-
|
||||
<p>foo </p>
|
||||
06_11_00__inlines__raw_html__018:
|
||||
canonical: |
|
||||
<p>foo <![CDATA[>&<]]></p>
|
||||
static: |-
|
||||
<p data-sourcepos="1:1-1:19" dir="auto">foo <![CDATA[>&<]]></p>
|
||||
<p data-sourcepos="1:1-1:19" dir="auto">foo &<]]></p>
|
||||
wysiwyg: |-
|
||||
<p>foo &<]]></p>
|
||||
06_11_00__inlines__raw_html__019:
|
||||
|
|
|
|||
|
|
@ -173,6 +173,7 @@ module API
|
|||
mount ::API::Appearance
|
||||
mount ::API::Deployments
|
||||
mount ::API::Metadata
|
||||
mount ::API::MergeRequestDiffs
|
||||
mount ::API::UserCounts
|
||||
mount ::API::ProjectRepositoryStorageMoves
|
||||
mount ::API::SnippetRepositoryStorageMoves
|
||||
|
|
@ -260,7 +261,6 @@ module API
|
|||
mount ::API::MavenPackages
|
||||
mount ::API::Members
|
||||
mount ::API::MergeRequestApprovals
|
||||
mount ::API::MergeRequestDiffs
|
||||
mount ::API::MergeRequests
|
||||
mount ::API::Metrics::Dashboard::Annotations
|
||||
mount ::API::Metrics::UserStarredDashboards
|
||||
|
|
|
|||
|
|
@ -10,16 +10,18 @@ module API
|
|||
feature_category :code_review
|
||||
|
||||
params do
|
||||
requires :id, type: String, desc: 'The ID of a project'
|
||||
requires :id, type: String, desc: 'The ID of the project'
|
||||
end
|
||||
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
|
||||
desc 'Get a list of merge request diff versions' do
|
||||
detail 'This feature was introduced in GitLab 8.12.'
|
||||
success Entities::MergeRequestDiff
|
||||
tags %w[merge_requests]
|
||||
is_array true
|
||||
end
|
||||
|
||||
params do
|
||||
requires :merge_request_iid, type: Integer, desc: 'The IID of a merge request'
|
||||
requires :merge_request_iid, type: Integer, desc: 'The internal ID of the merge request'
|
||||
use :pagination
|
||||
end
|
||||
get ":id/merge_requests/:merge_request_iid/versions" do
|
||||
|
|
@ -31,11 +33,12 @@ module API
|
|||
desc 'Get a single merge request diff version' do
|
||||
detail 'This feature was introduced in GitLab 8.12.'
|
||||
success Entities::MergeRequestDiffFull
|
||||
tags %w[merge_requests]
|
||||
end
|
||||
|
||||
params do
|
||||
requires :merge_request_iid, type: Integer, desc: 'The IID of a merge request'
|
||||
requires :version_id, type: Integer, desc: 'The ID of a merge request diff version'
|
||||
requires :merge_request_iid, type: Integer, desc: 'The internal ID of the merge request'
|
||||
requires :version_id, type: Integer, desc: 'The ID of the merge request diff version'
|
||||
end
|
||||
|
||||
get ":id/merge_requests/:merge_request_iid/versions/:version_id", urgency: :low do
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module SidekiqMiddleware
|
||||
module DuplicateJobs
|
||||
# Cookie is a serialization format we use to minimize the number of keys
|
||||
# we read, write and delete in Redis. Keys and values must be strings.
|
||||
# Newlines are not allowed in either keys or values. Keys cannot contain
|
||||
# '='. This format has the useful property that serialize(h1) +
|
||||
# serialize(h2) == h1.merge(h2).
|
||||
class Cookie
|
||||
def self.serialize(hash)
|
||||
hash.map { |k, v| "#{k}=#{v}\n" }.join
|
||||
end
|
||||
|
||||
def self.deserialize(string)
|
||||
string.each_line(chomp: true).to_h { |line| line.split('=', 2) }
|
||||
end
|
||||
|
||||
def initialize(key)
|
||||
@key = key
|
||||
end
|
||||
|
||||
def set(hash, expiry)
|
||||
with_redis { |redis| redis.set(@key, self.class.serialize(hash), nx: true, ex: expiry) }
|
||||
end
|
||||
|
||||
def get
|
||||
with_redis { |redis| self.class.deserialize(redis.get(@key) || '') }
|
||||
end
|
||||
|
||||
def del
|
||||
with_redis { |redis| redis.del(@key) }
|
||||
end
|
||||
|
||||
def append(hash)
|
||||
with_redis do |redis|
|
||||
redis.eval(
|
||||
# Only append if the keys exists. This way we are not responsible for
|
||||
# setting the expiry of the key: that is the responsibility of #set.
|
||||
'if redis.call("exists", KEYS[1]) > 0 then redis.call("append", KEYS[1], ARGV[1]) end',
|
||||
keys: [@key],
|
||||
argv: [self.class.serialize(hash)]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def with_redis(&block)
|
||||
if Feature.enabled?(:use_primary_and_secondary_stores_for_duplicate_jobs) ||
|
||||
Feature.enabled?(:use_primary_store_as_default_for_duplicate_jobs)
|
||||
# TODO: Swap for Gitlab::Redis::SharedState after store transition
|
||||
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/923
|
||||
Gitlab::Redis::DuplicateJobs.with(&block) # rubocop:disable CodeReuse/ActiveRecord
|
||||
else
|
||||
# Keep the old behavior intact if neither feature flag is turned on
|
||||
Sidekiq.redis(&block) # rubocop:disable Cop/SidekiqRedisCall
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -81,7 +81,7 @@ module Gitlab
|
|||
|
||||
strong_memoize(:latest_wal_locations) do
|
||||
if Feature.enabled?(:duplicate_jobs_cookie)
|
||||
latest_wal_locations_cookie
|
||||
get_cookie.fetch('wal_locations', {})
|
||||
else
|
||||
latest_wal_locations_multi
|
||||
end
|
||||
|
|
@ -90,7 +90,7 @@ module Gitlab
|
|||
|
||||
def delete!
|
||||
if Feature.enabled?(:duplicate_jobs_cookie)
|
||||
cookie.del
|
||||
with_redis { |redis| redis.del(cookie_key) }
|
||||
else
|
||||
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
|
||||
with_redis do |redis|
|
||||
|
|
@ -123,7 +123,7 @@ module Gitlab
|
|||
return unless reschedulable?
|
||||
|
||||
if Feature.enabled?(:duplicate_jobs_cookie)
|
||||
cookie.append({ 'deduplicated' => '1' })
|
||||
with_redis { |redis| redis.eval(DEDUPLICATED_SCRIPT, keys: [cookie_key]) }
|
||||
else
|
||||
with_redis do |redis|
|
||||
redis.set(deduplicated_flag_key, DEDUPLICATED_FLAG_VALUE, ex: expiry, nx: true)
|
||||
|
|
@ -131,11 +131,21 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
DEDUPLICATED_SCRIPT = <<~LUA
|
||||
local cookie_msgpack = redis.call("get", KEYS[1])
|
||||
if not cookie_msgpack then
|
||||
return
|
||||
end
|
||||
local cookie = cmsgpack.unpack(cookie_msgpack)
|
||||
cookie.deduplicated = "1"
|
||||
redis.call("set", KEYS[1], cmsgpack.pack(cookie), "ex", redis.call("ttl", KEYS[1]))
|
||||
LUA
|
||||
|
||||
def should_reschedule?
|
||||
return false unless reschedulable?
|
||||
|
||||
if Feature.enabled?(:duplicate_jobs_cookie)
|
||||
cookie.get['deduplicated'].present?
|
||||
get_cookie['deduplicated'].present?
|
||||
else
|
||||
with_redis do |redis|
|
||||
redis.get(deduplicated_flag_key).present?
|
||||
|
|
@ -172,16 +182,26 @@ module Gitlab
|
|||
attr_writer :existing_jid
|
||||
|
||||
def check_cookie!(expiry)
|
||||
my_cookie = { 'jid' => jid }
|
||||
job_wal_locations.each do |connection_name, location|
|
||||
my_cookie["existing_wal_location:#{connection_name}"] = location
|
||||
end
|
||||
my_cookie = {
|
||||
'jid' => jid,
|
||||
'offsets' => {},
|
||||
'wal_locations' => {},
|
||||
'existing_wal_locations' => job_wal_locations
|
||||
}
|
||||
|
||||
actual_cookie = cookie.set(my_cookie, expiry) ? my_cookie : cookie.get
|
||||
# There are 3 possible scenarios. In order of decreasing likelyhood:
|
||||
# 1. SET NX succeeds.
|
||||
# 2. SET NX fails, GET succeeds.
|
||||
# 3. SET NX fails, the key expires and GET fails. In this case we must retry.
|
||||
actual_cookie = {}
|
||||
while actual_cookie.empty?
|
||||
set_succeeded = with_redis { |r| r.set(cookie_key, my_cookie.to_msgpack, nx: true, ex: expiry) }
|
||||
actual_cookie = set_succeeded ? my_cookie : get_cookie
|
||||
end
|
||||
|
||||
job['idempotency_key'] = idempotency_key
|
||||
|
||||
self.existing_wal_locations = filter_prefix(actual_cookie, 'existing_wal_location:')
|
||||
self.existing_wal_locations = actual_cookie['existing_wal_locations']
|
||||
self.existing_jid = actual_cookie['jid']
|
||||
end
|
||||
|
||||
|
|
@ -205,15 +225,40 @@ module Gitlab
|
|||
end
|
||||
|
||||
def update_latest_wal_location_cookie!
|
||||
new_wal_locations = {}
|
||||
argv = []
|
||||
job_wal_locations.each do |connection_name, location|
|
||||
offset = pg_wal_lsn_diff(connection_name).to_i
|
||||
new_wal_locations["wal_location:#{connection_name}:#{offset}"] = location
|
||||
argv += [connection_name, pg_wal_lsn_diff(connection_name), location]
|
||||
end
|
||||
|
||||
cookie.append(new_wal_locations)
|
||||
with_redis { |r| r.eval(UPDATE_WAL_COOKIE_SCRIPT, keys: [cookie_key], argv: argv) }
|
||||
end
|
||||
|
||||
# Generally speaking, updating a Redis key by deserializing and
|
||||
# serializing it on the Redis server is bad for performance. However in
|
||||
# the case of DuplicateJobs we know that key updates are rare, and the
|
||||
# most common operations are setting, getting and deleting the key. The
|
||||
# aim of this design is to make the common operations as fast as
|
||||
# possible.
|
||||
UPDATE_WAL_COOKIE_SCRIPT = <<~LUA
|
||||
local cookie_msgpack = redis.call("get", KEYS[1])
|
||||
if not cookie_msgpack then
|
||||
return
|
||||
end
|
||||
local cookie = cmsgpack.unpack(cookie_msgpack)
|
||||
|
||||
for i = 1, #ARGV, 3 do
|
||||
local connection = ARGV[i]
|
||||
local current_offset = cookie.offsets[connection]
|
||||
local new_offset = tonumber(ARGV[i+1])
|
||||
if not current_offset or current_offset < new_offset then
|
||||
cookie.offsets[connection] = new_offset
|
||||
cookie.wal_locations[connection] = ARGV[i+2]
|
||||
end
|
||||
end
|
||||
|
||||
redis.call("set", KEYS[1], cmsgpack.pack(cookie), "ex", redis.call("ttl", KEYS[1]))
|
||||
LUA
|
||||
|
||||
def update_latest_wal_location_multi!
|
||||
with_redis do |redis|
|
||||
redis.multi do |multi|
|
||||
|
|
@ -228,21 +273,6 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def latest_wal_locations_cookie
|
||||
wal_locations = {}
|
||||
offsets = {}
|
||||
filter_prefix(cookie.get, 'wal_location:').each do |key, value|
|
||||
connection, offset = key.split(':', 2)
|
||||
offset = offset.to_i
|
||||
if !offsets[connection] || offsets[connection] < offset
|
||||
offsets[connection] = offset
|
||||
wal_locations[connection] = value
|
||||
end
|
||||
end
|
||||
|
||||
wal_locations
|
||||
end
|
||||
|
||||
def latest_wal_locations_multi
|
||||
read_wal_locations = {}
|
||||
|
||||
|
|
@ -256,17 +286,6 @@ module Gitlab
|
|||
read_wal_locations.transform_values(&:value).compact
|
||||
end
|
||||
|
||||
# Filter_prefix extracts a sub-hash from a Ruby hash. For example, with
|
||||
# input h = { 'foo:a' => '1', 'foo:b' => '2', 'bar' => '3' }, the output
|
||||
# of filter_prefix(h, 'foo:') is { 'a' => '1', 'b' => '2' }.
|
||||
def filter_prefix(hash, prefix)
|
||||
out = {}
|
||||
hash.each do |k, v|
|
||||
out[k.delete_prefix(prefix)] = v if k.start_with?(prefix)
|
||||
end
|
||||
out
|
||||
end
|
||||
|
||||
def worker_klass
|
||||
@worker_klass ||= worker_class_name.to_s.safe_constantize
|
||||
end
|
||||
|
|
@ -331,8 +350,12 @@ module Gitlab
|
|||
"#{idempotency_key}:#{connection_name}:wal_location"
|
||||
end
|
||||
|
||||
def cookie
|
||||
@cookie ||= Cookie.new("#{idempotency_key}:cookie")
|
||||
def cookie_key
|
||||
"#{idempotency_key}:cookie"
|
||||
end
|
||||
|
||||
def get_cookie
|
||||
with_redis { |redis| MessagePack.unpack(redis.get(cookie_key) || "\x80") }
|
||||
end
|
||||
|
||||
def idempotency_key
|
||||
|
|
|
|||
|
|
@ -39364,9 +39364,15 @@ msgstr ""
|
|||
msgid "SuggestedReviewers|Get suggestions for reviewers based on GitLab's machine learning tool."
|
||||
msgstr ""
|
||||
|
||||
msgid "SuggestedReviewers|Learn about suggested reviewers"
|
||||
msgstr ""
|
||||
|
||||
msgid "SuggestedReviewers|Suggested reviewers"
|
||||
msgstr ""
|
||||
|
||||
msgid "SuggestedReviewers|Suggested reviewers help link"
|
||||
msgstr ""
|
||||
|
||||
msgid "SuggestedReviewers|Suggestions appear in the Reviewer section of the right sidebar"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,34 +1,127 @@
|
|||
import $ from 'jquery';
|
||||
import { createWrapper } from '@vue/test-utils';
|
||||
import { __ } from '~/locale';
|
||||
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
|
||||
import renderMermaid from '~/behaviors/markdown/render_sandboxed_mermaid';
|
||||
import renderMermaid, {
|
||||
MAX_CHAR_LIMIT,
|
||||
MAX_MERMAID_BLOCK_LIMIT,
|
||||
LAZY_ALERT_SHOWN_CLASS,
|
||||
} from '~/behaviors/markdown/render_sandboxed_mermaid';
|
||||
|
||||
describe('Render mermaid diagrams for Gitlab Flavoured Markdown', () => {
|
||||
it('Does something', () => {
|
||||
document.body.dataset.page = '';
|
||||
setHTMLFixture(`
|
||||
<div class="gl-relative markdown-code-block js-markdown-code">
|
||||
<pre data-sourcepos="1:1-7:3" class="code highlight js-syntax-highlight language-mermaid white" lang="mermaid" id="code-4">
|
||||
<code class="js-render-mermaid">
|
||||
<span id="LC1" class="line" lang="mermaid">graph TD;</span>
|
||||
<span id="LC2" class="line" lang="mermaid">A-->B</span>
|
||||
<span id="LC3" class="line" lang="mermaid">A-->C</span>
|
||||
<span id="LC4" class="line" lang="mermaid">B-->D</span>
|
||||
<span id="LC5" class="line" lang="mermaid">C-->D</span>
|
||||
</code>
|
||||
</pre>
|
||||
<copy-code>
|
||||
<button type="button" class="btn btn-default btn-md gl-button btn-icon has-tooltip" data-title="Copy to clipboard" data-clipboard-target="pre#code-4">
|
||||
<svg><use xlink:href="/assets/icons-7f1680a3670112fe4c8ef57b9dfb93f0f61b43a2a479d7abd6c83bcb724b9201.svg#copy-to-clipboard"></use></svg>
|
||||
</button>
|
||||
</copy-code>
|
||||
</div>`);
|
||||
const els = $('pre.js-syntax-highlight').find('.js-render-mermaid');
|
||||
|
||||
renderMermaid(els);
|
||||
describe('Mermaid diagrams renderer', () => {
|
||||
// Finders
|
||||
const findMermaidIframes = () => document.querySelectorAll('iframe[src="/-/sandbox/mermaid"]');
|
||||
const findDangerousMermaidAlert = () =>
|
||||
createWrapper(document.querySelector('[data-testid="alert-warning"]'));
|
||||
|
||||
// Helpers
|
||||
const renderDiagrams = () => {
|
||||
renderMermaid([...document.querySelectorAll('.js-render-mermaid')]);
|
||||
jest.runAllTimers();
|
||||
expect(document.querySelector('pre.js-syntax-highlight').classList).toContain('gl-sr-only');
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
document.body.dataset.page = '';
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetHTMLFixture();
|
||||
});
|
||||
|
||||
it('renders a mermaid diagram', () => {
|
||||
setHTMLFixture('<pre><code class="js-render-mermaid"></code></pre>');
|
||||
|
||||
expect(findMermaidIframes()).toHaveLength(0);
|
||||
|
||||
renderDiagrams();
|
||||
|
||||
expect(document.querySelector('pre').classList).toContain('gl-sr-only');
|
||||
expect(findMermaidIframes()).toHaveLength(1);
|
||||
});
|
||||
|
||||
describe('within a details element', () => {
|
||||
beforeEach(() => {
|
||||
setHTMLFixture('<details><pre><code class="js-render-mermaid"></code></pre></details>');
|
||||
renderDiagrams();
|
||||
});
|
||||
|
||||
it('does not render the diagram on load', () => {
|
||||
expect(findMermaidIframes()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('render the diagram when the details element is opened', () => {
|
||||
document.querySelector('details').setAttribute('open', true);
|
||||
document.querySelector('details').dispatchEvent(new Event('toggle'));
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(findMermaidIframes()).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dangerous diagrams', () => {
|
||||
describe(`when the diagram's source exceeds ${MAX_CHAR_LIMIT} characters`, () => {
|
||||
beforeEach(() => {
|
||||
setHTMLFixture(
|
||||
`<pre>
|
||||
<code class="js-render-mermaid">${Array(MAX_CHAR_LIMIT + 1)
|
||||
.fill('a')
|
||||
.join('')}</code>
|
||||
</pre>`,
|
||||
);
|
||||
renderDiagrams();
|
||||
});
|
||||
it('does not render the diagram on load', () => {
|
||||
expect(findMermaidIframes()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('shows a warning about performance impact when rendering the diagram', () => {
|
||||
expect(document.querySelector('pre').classList).toContain(LAZY_ALERT_SHOWN_CLASS);
|
||||
expect(findDangerousMermaidAlert().exists()).toBe(true);
|
||||
expect(findDangerousMermaidAlert().text()).toContain(
|
||||
__('Warning: Displaying this diagram might cause performance issues on this page.'),
|
||||
);
|
||||
});
|
||||
|
||||
it("renders the diagram when clicking on the alert's button", () => {
|
||||
findDangerousMermaidAlert().find('button').trigger('click');
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(findMermaidIframes()).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it(`stops rendering diagrams once the total rendered source exceeds ${MAX_CHAR_LIMIT} characters`, () => {
|
||||
setHTMLFixture(
|
||||
`<pre>
|
||||
<code class="js-render-mermaid">${Array(MAX_CHAR_LIMIT - 1)
|
||||
.fill('a')
|
||||
.join('')}</code>
|
||||
<code class="js-render-mermaid">2</code>
|
||||
<code class="js-render-mermaid">3</code>
|
||||
<code class="js-render-mermaid">4</code>
|
||||
</pre>`,
|
||||
);
|
||||
renderDiagrams();
|
||||
|
||||
expect(findMermaidIframes()).toHaveLength(3);
|
||||
});
|
||||
|
||||
// Note: The test case below is provided for convenience but should remain skipped as the DOM
|
||||
// operations it requires are too expensive and would significantly slow down the test suite.
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
it.skip(`stops rendering diagrams when the rendered diagrams count exceeds ${MAX_MERMAID_BLOCK_LIMIT}`, () => {
|
||||
setHTMLFixture(
|
||||
`<pre>
|
||||
${Array(MAX_MERMAID_BLOCK_LIMIT + 1)
|
||||
.fill('<code class="js-render-mermaid"></code>')
|
||||
.join('')}
|
||||
</pre>`,
|
||||
);
|
||||
renderDiagrams();
|
||||
|
||||
expect([...document.querySelectorAll('.js-render-mermaid')]).toHaveLength(
|
||||
MAX_MERMAID_BLOCK_LIMIT + 1,
|
||||
);
|
||||
expect(findMermaidIframes()).toHaveLength(MAX_MERMAID_BLOCK_LIMIT);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -131,7 +131,6 @@ describe('CE IssuesListApp component', () => {
|
|||
const mountComponent = ({
|
||||
provide = {},
|
||||
data = {},
|
||||
workItems = false,
|
||||
issuesQueryResponse = mockIssuesQueryResponse,
|
||||
issuesCountsQueryResponse = mockIssuesCountsQueryResponse,
|
||||
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
|
||||
|
|
@ -150,9 +149,6 @@ describe('CE IssuesListApp component', () => {
|
|||
apolloProvider: createMockApollo(requestHandlers),
|
||||
router,
|
||||
provide: {
|
||||
glFeatures: {
|
||||
workItems,
|
||||
},
|
||||
...defaultProvide,
|
||||
...provide,
|
||||
},
|
||||
|
|
@ -1060,45 +1056,23 @@ describe('CE IssuesListApp component', () => {
|
|||
});
|
||||
|
||||
describe('fetching issues', () => {
|
||||
describe('when work_items feature flag is disabled', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent({ workItems: false });
|
||||
jest.runOnlyPendingTimers();
|
||||
});
|
||||
|
||||
it('fetches issue, incident, and test case types', () => {
|
||||
const types = [
|
||||
WORK_ITEM_TYPE_ENUM_ISSUE,
|
||||
WORK_ITEM_TYPE_ENUM_INCIDENT,
|
||||
WORK_ITEM_TYPE_ENUM_TEST_CASE,
|
||||
];
|
||||
|
||||
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
|
||||
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ types }),
|
||||
);
|
||||
});
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent();
|
||||
jest.runOnlyPendingTimers();
|
||||
});
|
||||
|
||||
describe('when work_items feature flag is enabled', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent({ workItems: true });
|
||||
jest.runOnlyPendingTimers();
|
||||
});
|
||||
it('fetches issue, incident, test case, and task types', () => {
|
||||
const types = [
|
||||
WORK_ITEM_TYPE_ENUM_ISSUE,
|
||||
WORK_ITEM_TYPE_ENUM_INCIDENT,
|
||||
WORK_ITEM_TYPE_ENUM_TEST_CASE,
|
||||
WORK_ITEM_TYPE_ENUM_TASK,
|
||||
];
|
||||
|
||||
it('fetches issue, incident, test case, and task types', () => {
|
||||
const types = [
|
||||
WORK_ITEM_TYPE_ENUM_ISSUE,
|
||||
WORK_ITEM_TYPE_ENUM_INCIDENT,
|
||||
WORK_ITEM_TYPE_ENUM_TEST_CASE,
|
||||
WORK_ITEM_TYPE_ENUM_TASK,
|
||||
];
|
||||
|
||||
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
|
||||
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ types }),
|
||||
);
|
||||
});
|
||||
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
|
||||
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ types }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,20 +5,22 @@ import markdownTableJson from 'test_fixtures/blob/notebook/markdown-table.json';
|
|||
import basicJson from 'test_fixtures/blob/notebook/basic.json';
|
||||
import mathJson from 'test_fixtures/blob/notebook/math.json';
|
||||
import MarkdownComponent from '~/notebook/cells/markdown.vue';
|
||||
import Prompt from '~/notebook/cells/prompt.vue';
|
||||
|
||||
const Component = Vue.extend(MarkdownComponent);
|
||||
|
||||
window.katex = katex;
|
||||
|
||||
function buildCellComponent(cell, relativePath = '') {
|
||||
function buildCellComponent(cell, relativePath = '', hidePrompt) {
|
||||
return mount(Component, {
|
||||
propsData: {
|
||||
cell,
|
||||
hidePrompt,
|
||||
},
|
||||
provide: {
|
||||
relativeRawPath: relativePath,
|
||||
},
|
||||
}).vm;
|
||||
});
|
||||
}
|
||||
|
||||
function buildMarkdownComponent(markdownContent, relativePath = '') {
|
||||
|
|
@ -33,7 +35,7 @@ function buildMarkdownComponent(markdownContent, relativePath = '') {
|
|||
}
|
||||
|
||||
describe('Markdown component', () => {
|
||||
let vm;
|
||||
let wrapper;
|
||||
let cell;
|
||||
let json;
|
||||
|
||||
|
|
@ -43,21 +45,30 @@ describe('Markdown component', () => {
|
|||
// eslint-disable-next-line prefer-destructuring
|
||||
cell = json.cells[1];
|
||||
|
||||
vm = buildCellComponent(cell);
|
||||
wrapper = buildCellComponent(cell);
|
||||
|
||||
await nextTick();
|
||||
});
|
||||
|
||||
it('does not render prompt', () => {
|
||||
expect(vm.$el.querySelector('.prompt span')).toBeNull();
|
||||
const findPrompt = () => wrapper.findComponent(Prompt);
|
||||
|
||||
it('renders a prompt by default', () => {
|
||||
expect(findPrompt().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('does not render a prompt if hidePrompt is true', () => {
|
||||
wrapper = buildCellComponent(cell, '', true);
|
||||
expect(findPrompt().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not render the markdown text', () => {
|
||||
expect(vm.$el.querySelector('.markdown').innerHTML.trim()).not.toEqual(cell.source.join(''));
|
||||
expect(wrapper.vm.$el.querySelector('.markdown').innerHTML.trim()).not.toEqual(
|
||||
cell.source.join(''),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the markdown HTML', () => {
|
||||
expect(vm.$el.querySelector('.markdown h1')).not.toBeNull();
|
||||
expect(wrapper.vm.$el.querySelector('.markdown h1')).not.toBeNull();
|
||||
});
|
||||
|
||||
it('sanitizes Markdown output', async () => {
|
||||
|
|
@ -68,11 +79,11 @@ describe('Markdown component', () => {
|
|||
});
|
||||
|
||||
await nextTick();
|
||||
expect(vm.$el.querySelector('a').getAttribute('href')).toBeNull();
|
||||
expect(wrapper.vm.$el.querySelector('a').getAttribute('href')).toBeNull();
|
||||
});
|
||||
|
||||
it('sanitizes HTML', async () => {
|
||||
const findLink = () => vm.$el.querySelector('.xss-link');
|
||||
const findLink = () => wrapper.vm.$el.querySelector('.xss-link');
|
||||
Object.assign(cell, {
|
||||
source: ['<a href="test.js" data-remote=true data-type="script" class="xss-link">XSS</a>\n'],
|
||||
});
|
||||
|
|
@ -97,11 +108,11 @@ describe('Markdown component', () => {
|
|||
["for embedded images, it doesn't", '\n', 'src="data:'],
|
||||
["for images urls, it doesn't", '\n', 'src="http:'],
|
||||
])('%s', async ([testMd, mustContain]) => {
|
||||
vm = buildMarkdownComponent([testMd], '/raw/');
|
||||
wrapper = buildMarkdownComponent([testMd], '/raw/');
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(vm.$el.innerHTML).toContain(mustContain);
|
||||
expect(wrapper.vm.$el.innerHTML).toContain(mustContain);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -111,13 +122,13 @@ describe('Markdown component', () => {
|
|||
});
|
||||
|
||||
it('renders images and text', async () => {
|
||||
vm = buildCellComponent(json.cells[0]);
|
||||
wrapper = buildCellComponent(json.cells[0]);
|
||||
|
||||
await nextTick();
|
||||
const images = vm.$el.querySelectorAll('img');
|
||||
const images = wrapper.vm.$el.querySelectorAll('img');
|
||||
expect(images.length).toBe(5);
|
||||
|
||||
const columns = vm.$el.querySelectorAll('td');
|
||||
const columns = wrapper.vm.$el.querySelectorAll('td');
|
||||
expect(columns.length).toBe(6);
|
||||
|
||||
expect(columns[0].textContent).toEqual('Hello ');
|
||||
|
|
@ -141,81 +152,93 @@ describe('Markdown component', () => {
|
|||
});
|
||||
|
||||
it('renders multi-line katex', async () => {
|
||||
vm = buildCellComponent(json.cells[0]);
|
||||
wrapper = buildCellComponent(json.cells[0]);
|
||||
|
||||
await nextTick();
|
||||
expect(vm.$el.querySelector('.katex')).not.toBeNull();
|
||||
expect(wrapper.vm.$el.querySelector('.katex')).not.toBeNull();
|
||||
});
|
||||
|
||||
it('renders inline katex', async () => {
|
||||
vm = buildCellComponent(json.cells[1]);
|
||||
wrapper = buildCellComponent(json.cells[1]);
|
||||
|
||||
await nextTick();
|
||||
expect(vm.$el.querySelector('p:first-child .katex')).not.toBeNull();
|
||||
expect(wrapper.vm.$el.querySelector('p:first-child .katex')).not.toBeNull();
|
||||
});
|
||||
|
||||
it('renders multiple inline katex', async () => {
|
||||
vm = buildCellComponent(json.cells[1]);
|
||||
wrapper = buildCellComponent(json.cells[1]);
|
||||
|
||||
await nextTick();
|
||||
expect(vm.$el.querySelectorAll('p:nth-child(2) .katex')).toHaveLength(4);
|
||||
expect(wrapper.vm.$el.querySelectorAll('p:nth-child(2) .katex')).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('output cell in case of katex error', async () => {
|
||||
vm = buildMarkdownComponent(['Some invalid $a & b$ inline formula $b & c$\n', '\n']);
|
||||
wrapper = buildMarkdownComponent(['Some invalid $a & b$ inline formula $b & c$\n', '\n']);
|
||||
|
||||
await nextTick();
|
||||
// expect one paragraph with no katex formula in it
|
||||
expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('p .katex')).toHaveLength(0);
|
||||
expect(wrapper.vm.$el.querySelectorAll('p')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('p .katex')).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('output cell and render remaining formula in case of katex error', async () => {
|
||||
vm = buildMarkdownComponent([
|
||||
wrapper = buildMarkdownComponent([
|
||||
'An invalid $a & b$ inline formula and a vaild one $b = c$\n',
|
||||
'\n',
|
||||
]);
|
||||
|
||||
await nextTick();
|
||||
// expect one paragraph with no katex formula in it
|
||||
expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('p .katex')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('p')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('p .katex')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('renders math formula in list object', async () => {
|
||||
vm = buildMarkdownComponent(["- list with inline $a=2$ inline formula $a' + b = c$\n", '\n']);
|
||||
wrapper = buildMarkdownComponent([
|
||||
"- list with inline $a=2$ inline formula $a' + b = c$\n",
|
||||
'\n',
|
||||
]);
|
||||
|
||||
await nextTick();
|
||||
// expect one list with a katex formula in it
|
||||
expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("renders math formula with tick ' in it", async () => {
|
||||
vm = buildMarkdownComponent(["- list with inline $a=2$ inline formula $a' + b = c$\n", '\n']);
|
||||
wrapper = buildMarkdownComponent([
|
||||
"- list with inline $a=2$ inline formula $a' + b = c$\n",
|
||||
'\n',
|
||||
]);
|
||||
|
||||
await nextTick();
|
||||
// expect one list with a katex formula in it
|
||||
expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('renders math formula with less-than-operator < in it', async () => {
|
||||
vm = buildMarkdownComponent(['- list with inline $a=2$ inline formula $a + b < c$\n', '\n']);
|
||||
wrapper = buildMarkdownComponent([
|
||||
'- list with inline $a=2$ inline formula $a + b < c$\n',
|
||||
'\n',
|
||||
]);
|
||||
|
||||
await nextTick();
|
||||
// expect one list with a katex formula in it
|
||||
expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('renders math formula with greater-than-operator > in it', async () => {
|
||||
vm = buildMarkdownComponent(['- list with inline $a=2$ inline formula $a + b > c$\n', '\n']);
|
||||
wrapper = buildMarkdownComponent([
|
||||
'- list with inline $a=2$ inline formula $a + b > c$\n',
|
||||
'\n',
|
||||
]);
|
||||
|
||||
await nextTick();
|
||||
// expect one list with a katex formula in it
|
||||
expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li')).toHaveLength(1);
|
||||
expect(wrapper.vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import json from 'test_fixtures/blob/notebook/basic.json';
|
||||
import Output from '~/notebook/cells/output/index.vue';
|
||||
import MarkdownOutput from '~/notebook/cells/output/markdown.vue';
|
||||
import { relativeRawPath, markdownCellContent } from '../../mock_data';
|
||||
|
||||
describe('Output component', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = (output) => {
|
||||
wrapper = mount(Output, {
|
||||
provide: { relativeRawPath },
|
||||
propsData: {
|
||||
outputs: [].concat(output),
|
||||
count: 1,
|
||||
|
|
@ -95,6 +98,17 @@ describe('Output component', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Markdown output', () => {
|
||||
beforeEach(() => {
|
||||
const markdownType = { data: { 'text/markdown': markdownCellContent } };
|
||||
createComponent(markdownType);
|
||||
});
|
||||
|
||||
it('renders a markdown component', () => {
|
||||
expect(wrapper.findComponent(MarkdownOutput).props('rawCode')).toBe(markdownCellContent);
|
||||
});
|
||||
});
|
||||
|
||||
describe('default to plain text', () => {
|
||||
beforeEach(() => {
|
||||
const unknownType = json.cells[6];
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
import { mount } from '@vue/test-utils';
|
||||
import MarkdownOutput from '~/notebook/cells/output/markdown.vue';
|
||||
import Prompt from '~/notebook/cells/prompt.vue';
|
||||
import Markdown from '~/notebook/cells/markdown.vue';
|
||||
import { relativeRawPath, markdownCellContent } from '../../mock_data';
|
||||
|
||||
describe('markdown output cell', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = ({ count = 0, index = 0 } = {}) => {
|
||||
wrapper = mount(MarkdownOutput, {
|
||||
provide: { relativeRawPath },
|
||||
propsData: {
|
||||
rawCode: markdownCellContent,
|
||||
count,
|
||||
index,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
});
|
||||
|
||||
const findPrompt = () => wrapper.findComponent(Prompt);
|
||||
const findMarkdown = () => wrapper.findComponent(Markdown);
|
||||
|
||||
it.each`
|
||||
index | count | showOutput
|
||||
${0} | ${1} | ${true}
|
||||
${1} | ${2} | ${false}
|
||||
${2} | ${3} | ${false}
|
||||
`('renders a prompt', ({ index, count, showOutput }) => {
|
||||
createComponent({ count, index });
|
||||
expect(findPrompt().props()).toMatchObject({ count, showOutput, type: 'Out' });
|
||||
});
|
||||
|
||||
it('renders a Markdown component', () => {
|
||||
expect(findMarkdown().props()).toMatchObject({
|
||||
cell: { source: markdownCellContent },
|
||||
hidePrompt: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
export const relativeRawPath = '/test';
|
||||
export const markdownCellContent = ['# Test'];
|
||||
|
|
@ -292,6 +292,42 @@ describe('Release edit component', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('remove button state', () => {
|
||||
describe('when there is only one link', () => {
|
||||
beforeEach(() => {
|
||||
factory({
|
||||
release: {
|
||||
...release,
|
||||
assets: {
|
||||
links: release.assets.links.slice(0, 1),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('remove asset link button should not be present', () => {
|
||||
expect(wrapper.find('.remove-button').exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there are multiple links', () => {
|
||||
beforeEach(() => {
|
||||
factory({
|
||||
release: {
|
||||
...release,
|
||||
assets: {
|
||||
links: release.assets.links.slice(0, 2),
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('remove asset link button should be visible', () => {
|
||||
expect(wrapper.find('.remove-button').exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('empty state', () => {
|
||||
describe('when the release fetched from the API has no links', () => {
|
||||
beforeEach(() => {
|
||||
|
|
@ -325,12 +361,6 @@ describe('Release edit component', () => {
|
|||
it('does not call the addEmptyAssetLink store method when the component is created', () => {
|
||||
expect(actions.addEmptyAssetLink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls addEmptyAssetLink when the final link is deleted by the user', () => {
|
||||
wrapper.find('.remove-button').vm.$emit('click');
|
||||
|
||||
expect(actions.addEmptyAssetLink).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,76 +3,89 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe GraphqlTriggers do
|
||||
describe '.issuable_assignees_updated' do
|
||||
it 'triggers the issuableAssigneesUpdated subscription' do
|
||||
assignees = create_list(:user, 2)
|
||||
issue = create(:issue, assignees: assignees)
|
||||
let_it_be(:issuable, refind: true) { create(:work_item) }
|
||||
|
||||
describe '.issuable_assignees_updated' do
|
||||
let(:assignees) { create_list(:user, 2) }
|
||||
|
||||
before do
|
||||
issuable.update!(assignees: assignees)
|
||||
end
|
||||
|
||||
it 'triggers the issuableAssigneesUpdated subscription' do
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableAssigneesUpdated',
|
||||
{ issuable_id: issue.to_gid },
|
||||
issue
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
)
|
||||
|
||||
GraphqlTriggers.issuable_assignees_updated(issue)
|
||||
GraphqlTriggers.issuable_assignees_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.issuable_title_updated' do
|
||||
it 'triggers the issuableTitleUpdated subscription' do
|
||||
work_item = create(:work_item)
|
||||
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableTitleUpdated',
|
||||
{ issuable_id: work_item.to_gid },
|
||||
work_item
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
).and_call_original
|
||||
|
||||
GraphqlTriggers.issuable_title_updated(work_item)
|
||||
GraphqlTriggers.issuable_title_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.issuable_description_updated' do
|
||||
it 'triggers the issuableDescriptionUpdated subscription' do
|
||||
work_item = create(:work_item)
|
||||
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableDescriptionUpdated',
|
||||
{ issuable_id: work_item.to_gid },
|
||||
work_item
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
).and_call_original
|
||||
|
||||
GraphqlTriggers.issuable_description_updated(work_item)
|
||||
GraphqlTriggers.issuable_description_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.issuable_labels_updated' do
|
||||
it 'triggers the issuableLabelsUpdated subscription' do
|
||||
project = create(:project)
|
||||
labels = create_list(:label, 3, project: project)
|
||||
issue = create(:issue, labels: labels)
|
||||
let(:labels) { create_list(:label, 3, project: create(:project)) }
|
||||
|
||||
before do
|
||||
issuable.update!(labels: labels)
|
||||
end
|
||||
|
||||
it 'triggers the issuableLabelsUpdated subscription' do
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableLabelsUpdated',
|
||||
{ issuable_id: issue.to_gid },
|
||||
issue
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
)
|
||||
|
||||
GraphqlTriggers.issuable_labels_updated(issue)
|
||||
GraphqlTriggers.issuable_labels_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.issuable_dates_updated' do
|
||||
it 'triggers the issuableDatesUpdated subscription' do
|
||||
work_item = create(:work_item)
|
||||
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableDatesUpdated',
|
||||
{ issuable_id: work_item.to_gid },
|
||||
work_item
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
).and_call_original
|
||||
|
||||
GraphqlTriggers.issuable_dates_updated(work_item)
|
||||
GraphqlTriggers.issuable_dates_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.issuable_milestone_updated' do
|
||||
it 'triggers the issuableMilestoneUpdated subscription' do
|
||||
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
|
||||
'issuableMilestoneUpdated',
|
||||
{ issuable_id: issuable.to_gid },
|
||||
issuable
|
||||
).and_call_original
|
||||
|
||||
GraphqlTriggers.issuable_milestone_updated(issuable)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ RSpec.describe GitlabSchema.types['Subscription'] do
|
|||
issuable_description_updated
|
||||
issuable_labels_updated
|
||||
issuable_dates_updated
|
||||
issuable_milestone_updated
|
||||
merge_request_reviewers_updated
|
||||
merge_request_merge_status_updated
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'Hashie::Mash#permitted patch' do
|
||||
let(:mash) { Hashie::Mash.new }
|
||||
|
||||
before do
|
||||
load Rails.root.join('config/initializers/hashie_mash_permitted_patch.rb')
|
||||
end
|
||||
|
||||
describe '#respond_to? with :permitted?' do
|
||||
it 'returns false' do
|
||||
expect(Gitlab::AppLogger).to receive(:info).with(
|
||||
{ message: 'Hashie::Mash#respond_to?(:permitted?)', caller: instance_of(Array) })
|
||||
|
||||
expect(mash.respond_to?(:permitted?)).to be false
|
||||
end
|
||||
end
|
||||
|
||||
describe '#permitted' do
|
||||
it 'raises ArgumentError' do
|
||||
expect(Gitlab::AppLogger).to receive(:info).with(
|
||||
{ message: 'Hashie::Mash#permitted?', caller: instance_of(Array) })
|
||||
|
||||
expect { mash.permitted? }.to raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,130 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Cookie, :clean_gitlab_redis_shared_state,
|
||||
:clean_gitlab_redis_queues do
|
||||
describe 'serialization' do
|
||||
it 'can round-trip a hash' do
|
||||
h = { 'hello' => 'world', 'foo' => 'bar' }
|
||||
expect(described_class.deserialize(described_class.serialize(h))).to eq(h)
|
||||
end
|
||||
|
||||
it 'can merge by concatenating' do
|
||||
h1 = { 'foo' => 'bar', 'baz' => 'qux' }
|
||||
h2 = { 'foo' => 'other bar', 'hello' => 'world' }
|
||||
concatenated = described_class.serialize(h1) + described_class.serialize(h2)
|
||||
expect(described_class.deserialize(concatenated)).to eq(h1.merge(h2))
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'with Redis persistence' do
|
||||
let(:cookie) { described_class.new(key) }
|
||||
let(:key) { 'redis_key' }
|
||||
let(:hash) { { 'hello' => 'world' } }
|
||||
|
||||
describe '.set' do
|
||||
subject { cookie.set(hash, expiry) }
|
||||
|
||||
let(:expiry) { 10 }
|
||||
|
||||
it 'stores the hash' do
|
||||
expect(subject).to be_truthy
|
||||
with_redis do |redis|
|
||||
expect(redis.get(key)).to eq("hello=world\n")
|
||||
expect(redis.ttl(key)).to be_within(1).of(expiry)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the key is set' do
|
||||
before do
|
||||
with_redis { |r| r.set(key, 'foobar') }
|
||||
end
|
||||
|
||||
it 'does not overwrite existing keys' do
|
||||
expect(subject).to be_falsey
|
||||
with_redis do |redis|
|
||||
expect(redis.get(key)).to eq('foobar')
|
||||
expect(redis.ttl(key)).to eq(-1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.get' do
|
||||
subject { cookie.get }
|
||||
|
||||
it { expect(subject).to eq({}) }
|
||||
|
||||
context 'when the key is set' do
|
||||
before do
|
||||
with_redis { |r| r.set(key, "hello=world\n") }
|
||||
end
|
||||
|
||||
it { expect(subject).to eq({ 'hello' => 'world' }) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '.append' do
|
||||
subject { cookie.append(hash) }
|
||||
|
||||
it 'does not create the key' do
|
||||
subject
|
||||
|
||||
with_redis do |redis|
|
||||
expect(redis.get(key)).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the key exists' do
|
||||
before do
|
||||
with_redis { |r| r.set(key, 'existing data', ex: 10) }
|
||||
end
|
||||
|
||||
it 'appends without modifying ttl' do
|
||||
subject
|
||||
|
||||
with_redis do |redis|
|
||||
expect(redis.get(key)).to eq("existing datahello=world\n")
|
||||
expect(redis.ttl(key)).to be_within(1).of(10)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multi-store feature flags turned on' do
|
||||
def with_redis(&block)
|
||||
Gitlab::Redis::DuplicateJobs.with(&block)
|
||||
end
|
||||
|
||||
it 'use Gitlab::Redis::DuplicateJobs.with' do
|
||||
expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original
|
||||
expect(Sidekiq).not_to receive(:redis)
|
||||
|
||||
described_class.new('hello').get
|
||||
end
|
||||
|
||||
it_behaves_like 'with Redis persistence'
|
||||
end
|
||||
|
||||
context 'when both multi-store feature flags are off' do
|
||||
def with_redis(&block)
|
||||
Sidekiq.redis(&block)
|
||||
end
|
||||
|
||||
before do
|
||||
stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false)
|
||||
stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false)
|
||||
end
|
||||
|
||||
it 'use Sidekiq.redis' do
|
||||
expect(Sidekiq).to receive(:redis).and_call_original
|
||||
expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with)
|
||||
|
||||
described_class.new('hello').get
|
||||
end
|
||||
|
||||
it_behaves_like 'with Redis persistence'
|
||||
end
|
||||
end
|
||||
|
|
@ -527,9 +527,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
end
|
||||
|
||||
context 'with Redis cookies' do
|
||||
let(:cookie_key) do
|
||||
"#{idempotency_key}:cookie"
|
||||
end
|
||||
let(:cookie_key) { "#{idempotency_key}:cookie" }
|
||||
let(:cookie) { get_redis_msgpack(cookie_key) }
|
||||
|
||||
def with_redis(&block)
|
||||
Gitlab::Redis::DuplicateJobs.with(&block)
|
||||
|
|
@ -541,15 +540,16 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
|
||||
shared_examples 'sets Redis keys with correct TTL' do
|
||||
it "adds an idempotency key with correct ttl" do
|
||||
expected_cookie = <<~COOKIE
|
||||
jid=123
|
||||
existing_wal_location:main=#{wal_locations['main']}
|
||||
existing_wal_location:ci=#{wal_locations['ci']}
|
||||
COOKIE
|
||||
expect { duplicate_job.check! }
|
||||
.to change { read_idempotency_key_with_ttl(cookie_key) }
|
||||
.from([nil, -2])
|
||||
.to([expected_cookie, be_within(1).of(expected_ttl)])
|
||||
expected_cookie = {
|
||||
'jid' => '123',
|
||||
'offsets' => {},
|
||||
'wal_locations' => {},
|
||||
'existing_wal_locations' => wal_locations
|
||||
}
|
||||
|
||||
duplicate_job.check!
|
||||
expect(cookie).to eq(expected_cookie)
|
||||
expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -576,15 +576,17 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
|
||||
context 'when there was already a job with same arguments in the same queue' do
|
||||
before do
|
||||
set_idempotency_key(cookie_key, "jid=existing-jid\n")
|
||||
set_idempotency_key(cookie_key, existing_cookie.to_msgpack)
|
||||
end
|
||||
|
||||
let(:existing_cookie) { { 'jid' => 'existing-jid' } }
|
||||
|
||||
it { expect(duplicate_job.check!).to eq('existing-jid') }
|
||||
|
||||
it "does not change the existing key's TTL" do
|
||||
expect { duplicate_job.check! }
|
||||
.not_to change { read_idempotency_key_with_ttl(cookie_key) }
|
||||
.from(["jid=existing-jid\n", -1])
|
||||
.not_to change { redis_ttl(cookie_key) }
|
||||
.from(-1)
|
||||
end
|
||||
|
||||
it 'sets the existing jid' do
|
||||
|
|
@ -601,20 +603,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
{ main: ::ActiveRecord::Base,
|
||||
ci: ::ActiveRecord::Base })
|
||||
|
||||
set_idempotency_key(cookie_key, initial_cookie)
|
||||
with_redis { |r| r.set(cookie_key, initial_cookie.to_msgpack, ex: expected_ttl) }
|
||||
|
||||
# read existing_wal_locations
|
||||
duplicate_job.check!
|
||||
end
|
||||
|
||||
let(:initial_cookie) do
|
||||
<<~COOKIE
|
||||
jid=foobar
|
||||
existing_wal_location:main=0/D525E3A0
|
||||
existing_wal_location:ci=AB/12340
|
||||
COOKIE
|
||||
{
|
||||
'jid' => 'foobar',
|
||||
'existing_wal_locations' => { 'main' => '0/D525E3A0', 'ci' => 'AB/12340' },
|
||||
'offsets' => {},
|
||||
'wal_locations' => {}
|
||||
}
|
||||
end
|
||||
|
||||
let(:expected_ttl) { 123 }
|
||||
let(:new_wal) do
|
||||
{
|
||||
# offset is relative to `existing_wal`
|
||||
|
|
@ -626,34 +630,76 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
let(:wal_locations) { new_wal.transform_values { |v| v[:location] } }
|
||||
|
||||
it 'stores a wal location to redis with an offset relative to existing wal location' do
|
||||
expected_cookie = initial_cookie + <<~COOKIE
|
||||
wal_location:main:#{new_wal['main'][:offset]}=#{new_wal['main'][:location]}
|
||||
wal_location:ci:#{new_wal['ci'][:offset]}=#{new_wal['ci'][:location]}
|
||||
COOKIE
|
||||
duplicate_job.update_latest_wal_location!
|
||||
|
||||
expect { duplicate_job.update_latest_wal_location! }
|
||||
.to change { read_idempotency_key_with_ttl(cookie_key) }
|
||||
.from([initial_cookie, -1])
|
||||
.to([expected_cookie, -1])
|
||||
expect(cookie['wal_locations']).to eq(wal_locations)
|
||||
expect(cookie['offsets']).to eq(new_wal.transform_values { |v| v[:offset].to_i })
|
||||
expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'UPDATE_WAL_COOKIE_SCRIPT' do
|
||||
subject do
|
||||
with_redis do |redis|
|
||||
redis.eval(described_class::UPDATE_WAL_COOKIE_SCRIPT, keys: [cookie_key], argv: argv)
|
||||
end
|
||||
end
|
||||
|
||||
let(:argv) { ['c1', 1, 'loc1', 'c2', 2, 'loc2', 'c3', 3, 'loc3'] }
|
||||
|
||||
it 'does not create the key' do
|
||||
subject
|
||||
|
||||
expect(with_redis { |r| r.get(cookie_key) }).to eq(nil)
|
||||
end
|
||||
|
||||
context 'when the key exists' do
|
||||
let(:existing_cookie) { { 'offsets' => {}, 'wal_locations' => {} } }
|
||||
let(:expected_ttl) { 123 }
|
||||
|
||||
before do
|
||||
with_redis { |r| r.set(cookie_key, existing_cookie.to_msgpack, ex: expected_ttl) }
|
||||
end
|
||||
|
||||
it 'updates all connections' do
|
||||
subject
|
||||
|
||||
expect(cookie['wal_locations']).to eq({ 'c1' => 'loc1', 'c2' => 'loc2', 'c3' => 'loc3' })
|
||||
expect(cookie['offsets']).to eq({ 'c1' => 1, 'c2' => 2, 'c3' => 3 })
|
||||
end
|
||||
|
||||
it 'preserves the ttl' do
|
||||
subject
|
||||
|
||||
expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
|
||||
end
|
||||
|
||||
context 'and low offsets' do
|
||||
let(:existing_cookie) do
|
||||
{
|
||||
'offsets' => { 'c1' => 0, 'c2' => 2 },
|
||||
'wal_locations' => { 'c1' => 'loc1old', 'c2' => 'loc2old' }
|
||||
}
|
||||
end
|
||||
|
||||
it 'updates only some connections' do
|
||||
subject
|
||||
|
||||
expect(cookie['wal_locations']).to eq({ 'c1' => 'loc1', 'c2' => 'loc2old', 'c3' => 'loc3' })
|
||||
expect(cookie['offsets']).to eq({ 'c1' => 1, 'c2' => 2, 'c3' => 3 })
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#latest_wal_locations' do
|
||||
context 'when job was deduplicated and wal locations were already persisted' do
|
||||
before do
|
||||
cookie = <<~COOKIE
|
||||
jid=foobar
|
||||
wal_location:main:1=main1
|
||||
wal_location:ci:2:=ci2
|
||||
wal_location:main:5=main5
|
||||
wal_location:ci:6=ci6
|
||||
wal_location:main:3=main3
|
||||
wal_location:ci:4=ci4
|
||||
COOKIE
|
||||
cookie = { 'wal_locations' => { 'main' => 'abc', 'ci' => 'def' } }.to_msgpack
|
||||
set_idempotency_key(cookie_key, cookie)
|
||||
end
|
||||
|
||||
it { expect(duplicate_job.latest_wal_locations).to eq({ 'main' => 'main5', 'ci' => 'ci6' }) }
|
||||
it { expect(duplicate_job.latest_wal_locations).to eq({ 'main' => 'abc', 'ci' => 'def' }) }
|
||||
end
|
||||
|
||||
context 'when job is not deduplication and wal locations were not persisted' do
|
||||
|
|
@ -668,30 +714,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
|
||||
context 'when the key exists in redis' do
|
||||
before do
|
||||
set_idempotency_key(cookie_key, "jid=existing-jid\n")
|
||||
set_idempotency_key(cookie_key, "garbage")
|
||||
end
|
||||
|
||||
shared_examples 'deleting the duplicate job' do
|
||||
shared_examples 'deleting keys from redis' do |key_name|
|
||||
it "removes the #{key_name} from redis" do
|
||||
expect { duplicate_job.delete! }
|
||||
.to change { read_idempotency_key_with_ttl(key) }
|
||||
.from([from_value, -1])
|
||||
.to([nil, -2])
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'does not delete key from redis' do |key_name|
|
||||
it "does not remove the #{key_name} from redis" do
|
||||
expect { duplicate_job.delete! }
|
||||
.to not_change { read_idempotency_key_with_ttl(key) }
|
||||
.from([from_value, -1])
|
||||
.to change { with_redis { |r| r.get(key) } }
|
||||
.from(from_value)
|
||||
.to(nil)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'deleting keys from redis', 'cookie key' do
|
||||
let(:key) { cookie_key }
|
||||
let(:from_value) { "jid=existing-jid\n" }
|
||||
let(:from_value) { "garbage" }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -730,8 +768,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
it 'sets the key in Redis' do
|
||||
duplicate_job.set_deduplicated_flag!
|
||||
|
||||
cookie = with_redis { |redis| redis.get(cookie_key) }
|
||||
expect(cookie).to include("\ndeduplicated=1\n")
|
||||
expect(cookie['deduplicated']).to eq('1')
|
||||
end
|
||||
|
||||
it 'sets, gets and cleans up the deduplicated flag' do
|
||||
|
|
@ -754,9 +791,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
duplicate_job.check!
|
||||
duplicate_job.set_deduplicated_flag!
|
||||
|
||||
cookie = with_redis { |redis| redis.get(cookie_key) }
|
||||
|
||||
expect(cookie).not_to include('deduplicated=')
|
||||
expect(cookie['deduplicated']).to eq(nil)
|
||||
end
|
||||
|
||||
it 'does not set the deduplicated flag' do
|
||||
|
|
@ -783,7 +818,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
end
|
||||
|
||||
it 'returns true if the existing jid is different from the job jid' do
|
||||
set_idempotency_key(cookie_key, "jid=a different jid\n")
|
||||
set_idempotency_key(cookie_key, { 'jid' => 'a different jid' }.to_msgpack)
|
||||
duplicate_job.check!
|
||||
|
||||
expect(duplicate_job.duplicate?).to be(true)
|
||||
|
|
@ -794,13 +829,12 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
|
|||
with_redis { |r| r.set(key, value) }
|
||||
end
|
||||
|
||||
def read_idempotency_key_with_ttl(key)
|
||||
with_redis do |redis|
|
||||
redis.pipelined do |p|
|
||||
p.get(key)
|
||||
p.ttl(key)
|
||||
end
|
||||
end
|
||||
def get_redis_msgpack(key)
|
||||
MessagePack.unpack(with_redis { |redis| redis.get(key) })
|
||||
end
|
||||
|
||||
def redis_ttl(key)
|
||||
with_redis { |redis| redis.ttl(key) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -104,10 +104,33 @@ RSpec.describe Issues::UpdateService, :mailer do
|
|||
expect(issue.issue_customer_relations_contacts.last.contact).to eq contact
|
||||
end
|
||||
|
||||
it 'updates issue milestone when passing `milestone` param' do
|
||||
update_issue(milestone: milestone)
|
||||
context 'when updating milestone' do
|
||||
before do
|
||||
update_issue({ milestone: nil })
|
||||
end
|
||||
|
||||
expect(issue.milestone).to eq milestone
|
||||
it 'updates issue milestone when passing `milestone` param' do
|
||||
expect { update_issue({ milestone: milestone }) }
|
||||
.to change(issue, :milestone).to(milestone).from(nil)
|
||||
end
|
||||
|
||||
it "triggers 'issuableMilestoneUpdated'" do
|
||||
expect(GraphqlTriggers).to receive(:issuable_milestone_updated).with(issue).and_call_original
|
||||
|
||||
update_issue({ milestone: milestone })
|
||||
end
|
||||
|
||||
context 'when milestone remains unchanged' do
|
||||
before do
|
||||
update_issue({ title: 'abc', milestone: milestone })
|
||||
end
|
||||
|
||||
it "does not trigger 'issuableMilestoneUpdated'" do
|
||||
expect(GraphqlTriggers).not_to receive(:issuable_milestone_updated)
|
||||
|
||||
update_issue({ milestone: milestone })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sentry identifier is given' do
|
||||
|
|
|
|||
|
|
@ -311,6 +311,34 @@ RSpec.describe WorkItems::UpdateService do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for milestone widget' do
|
||||
let_it_be(:milestone) { create(:milestone, project: project) }
|
||||
|
||||
let(:widget_params) { { milestone_widget: { milestone_id: milestone.id } } }
|
||||
|
||||
context 'when milestone is updated' do
|
||||
it "triggers 'issuableMilestoneUpdated'" do
|
||||
expect(work_item.milestone).to eq(nil)
|
||||
expect(GraphqlTriggers).to receive(:issuable_milestone_updated).with(work_item).and_call_original
|
||||
|
||||
update_work_item
|
||||
end
|
||||
end
|
||||
|
||||
context 'when milestone remains unchanged' do
|
||||
before do
|
||||
update_work_item
|
||||
end
|
||||
|
||||
it "does not trigger 'issuableMilestoneUpdated'" do
|
||||
expect(work_item.milestone).to eq(milestone)
|
||||
expect(GraphqlTriggers).not_to receive(:issuable_milestone_updated)
|
||||
|
||||
update_work_item
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'label updates' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue