Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f1c788bb18
commit
d5ff067431
|
|
@ -595,7 +595,6 @@ Layout/ArgumentAlignment:
|
|||
- 'ee/app/graphql/ee/types/ci/runner_countable_connection_type.rb'
|
||||
- 'ee/app/graphql/ee/types/deployment_type.rb'
|
||||
- 'ee/app/graphql/ee/types/environment_type.rb'
|
||||
- 'ee/app/graphql/ee/types/group_type.rb'
|
||||
- 'ee/app/graphql/ee/types/issues/negated_issue_filter_input_type.rb'
|
||||
- 'ee/app/graphql/ee/types/namespace_type.rb'
|
||||
- 'ee/app/graphql/ee/types/permission_types/deployment.rb'
|
||||
|
|
|
|||
|
|
@ -235,7 +235,6 @@ Lint/UnusedBlockArgument:
|
|||
- 'lib/google_api/cloud_platform/client.rb'
|
||||
- 'lib/peek/views/gitaly.rb'
|
||||
- 'lib/security/ci_configuration/sast_build_action.rb'
|
||||
- 'lib/tasks/cleanup.rake'
|
||||
- 'lib/tasks/contracts/merge_requests.rake'
|
||||
- 'lib/tasks/contracts/pipeline_schedules.rake'
|
||||
- 'lib/tasks/contracts/pipelines.rake'
|
||||
|
|
|
|||
|
|
@ -1098,6 +1098,7 @@ RSpec/BeforeAllRoleAssignment:
|
|||
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/commit_notes_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb'
|
||||
|
|
|
|||
|
|
@ -2688,7 +2688,6 @@ RSpec/FeatureCategory:
|
|||
- 'spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/stage_spec.rb'
|
||||
- 'spec/lib/bulk_imports/retry_pipeline_error_spec.rb'
|
||||
- 'spec/lib/bulk_imports/users_mapper_spec.rb'
|
||||
- 'spec/lib/constraints/admin_constrainer_spec.rb'
|
||||
|
|
|
|||
|
|
@ -905,7 +905,6 @@ Style/IfUnlessModifier:
|
|||
- 'lib/sidebars/projects/menus/settings_menu.rb'
|
||||
- 'lib/system_check/app/systemd_unit_files_or_init_script_up_to_date_check.rb'
|
||||
- 'lib/system_check/init_helpers.rb'
|
||||
- 'lib/tasks/cleanup.rake'
|
||||
- 'lib/tasks/eslint.rake'
|
||||
- 'lib/tasks/gitlab/assets.rake'
|
||||
- 'lib/tasks/gitlab/cleanup.rake'
|
||||
|
|
|
|||
|
|
@ -3257,7 +3257,6 @@ Style/InlineDisableAnnotation:
|
|||
- 'spec/support/helpers/stub_snowplow.rb'
|
||||
- 'spec/support/helpers/wait_for_requests.rb'
|
||||
- 'spec/support/matchers/event_store.rb'
|
||||
- 'spec/support/rspec_order.rb'
|
||||
- 'spec/support/shared_contexts/controllers/ambiguous_ref_controller_shared_context.rb'
|
||||
- 'spec/support/shared_contexts/disable_user_tracking.rb'
|
||||
- 'spec/support/shared_contexts/policies/project_policy_table_shared_context.rb'
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
14.30.0
|
||||
14.30.1
|
||||
|
|
|
|||
|
|
@ -419,6 +419,7 @@ export default {
|
|||
<div
|
||||
:id="`diff-content-${file.file_hash}`"
|
||||
:class="hasBodyClasses.contentByHash"
|
||||
class="diff-content"
|
||||
data-testid="content-area"
|
||||
>
|
||||
<gl-alert
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ import PinnedLinks from './pinned_links.vue';
|
|||
import StickyHeader from './sticky_header.vue';
|
||||
import TitleComponent from './title.vue';
|
||||
|
||||
const STICKY_HEADER_VISIBLE_CLASS = 'issuable-sticky-header-visible';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
HeaderActions,
|
||||
|
|
@ -322,6 +324,7 @@ export default {
|
|||
eventHub.$off('close.form', this.closeForm);
|
||||
eventHub.$off('open.form', this.openForm);
|
||||
window.removeEventListener('beforeunload', this.handleBeforeUnloadEvent);
|
||||
this.hideStickyHeader();
|
||||
},
|
||||
methods: {
|
||||
handleBeforeUnloadEvent(e) {
|
||||
|
|
@ -472,6 +475,8 @@ export default {
|
|||
|
||||
hideStickyHeader() {
|
||||
this.isStickyHeaderShowing = false;
|
||||
|
||||
document.body.classList?.remove(STICKY_HEADER_VISIBLE_CLASS);
|
||||
},
|
||||
|
||||
showStickyHeader() {
|
||||
|
|
@ -479,6 +484,8 @@ export default {
|
|||
if (this.$refs.title.$el.offsetTop < window.pageYOffset) {
|
||||
this.isStickyHeaderShowing = true;
|
||||
}
|
||||
|
||||
document.body.classList?.add(STICKY_HEADER_VISIBLE_CLASS);
|
||||
},
|
||||
|
||||
handleSaveDescription(description) {
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ export default {
|
|||
:is="component"
|
||||
:aria-label="ariaLabel"
|
||||
:href="href"
|
||||
class="counter gl-display-block gl-flex-grow-1 gl-text-center gl-py-3 gl-bg-gray-10 gl-rounded-base gl-text-gray-900 gl-border-none gl-inset-border-1-gray-a-08 gl-line-height-1 gl-font-sm gl-hover-text-gray-900 gl-hover-text-decoration-none gl-focus--focus"
|
||||
class="counter gl-display-block gl-flex-grow-1 gl-text-center gl-py-3 gl-rounded-base gl-border-none gl-inset-border-1-gray-a-08 gl-line-height-1 gl-font-sm gl-hover-text-decoration-none gl-focus--focus"
|
||||
>
|
||||
<gl-icon aria-hidden="true" :name="icon" />
|
||||
<span v-if="count" aria-hidden="true" class="gl-ml-1">{{ formattedCount }}</span>
|
||||
|
|
|
|||
|
|
@ -215,7 +215,11 @@ export default {
|
|||
@hidden="trackDropdownToggle(false)"
|
||||
>
|
||||
<template #toggle>
|
||||
<gl-button category="tertiary" icon="question-o" class="btn-with-notification">
|
||||
<gl-button
|
||||
category="tertiary"
|
||||
icon="question-o"
|
||||
class="super-sidebar-help-center-toggle btn-with-notification"
|
||||
>
|
||||
<span
|
||||
v-if="showWhatsNewNotification"
|
||||
data-testid="notification-dot"
|
||||
|
|
|
|||
|
|
@ -229,7 +229,7 @@ export default {
|
|||
>
|
||||
<div
|
||||
:class="[isActive ? 'gl-opacity-10' : 'gl-opacity-0']"
|
||||
class="active-indicator gl-bg-blue-500 gl-absolute gl-left-2 gl-top-2 gl-bottom-2 gl-transition-slow"
|
||||
class="active-indicator gl-absolute gl-left-2 gl-top-2 gl-bottom-2 gl-transition-slow"
|
||||
aria-hidden="true"
|
||||
:style="activeIndicatorStyle"
|
||||
data-testid="active-indicator"
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ export default {
|
|||
<template>
|
||||
<div
|
||||
ref="gl-form"
|
||||
class="js-vue-markdown-field md-area position-relative gfm-form gl-overflow-hidden"
|
||||
class="js-vue-markdown-field md-area position-relative gfm-form"
|
||||
:data-uploads-path="uploadsPath"
|
||||
>
|
||||
<markdown-header
|
||||
|
|
|
|||
|
|
@ -254,7 +254,10 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div class="md-header gl-border-b gl-border-gray-100 gl-px-3">
|
||||
<div
|
||||
class="md-header gl-bg-white gl-border-b gl-border-gray-100 gl-rounded-lg gl-rounded-bottom-left-none gl-rounded-bottom-right-none gl-px-3"
|
||||
:class="{ 'md-header-preview': previewMarkdown }"
|
||||
>
|
||||
<div class="gl-display-flex gl-align-items-center gl-flex-wrap">
|
||||
<div
|
||||
data-testid="md-header-toolbar"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
$brand-logo-light-background: #e0dfe5;
|
||||
$brand-logo-dark-background: #53515b;
|
||||
|
||||
.brand-logo {
|
||||
display: inline-block;
|
||||
@include gl-rounded-base;
|
||||
|
|
@ -16,14 +13,4 @@ $brand-logo-dark-background: #53515b;
|
|||
&:active {
|
||||
@include gl-focus;
|
||||
}
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&:active {
|
||||
background-color: $brand-logo-light-background;
|
||||
|
||||
.gl-dark & {
|
||||
background-color: $brand-logo-dark-background;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
$diff-file-header: 41px;
|
||||
|
||||
// Common
|
||||
.diff-file {
|
||||
margin-bottom: $gl-padding;
|
||||
|
|
@ -38,6 +40,10 @@
|
|||
|
||||
&.is-sidebar-moved {
|
||||
top: calc(#{$calc-application-header-height} + #{$mr-sticky-header-height} - #{$gl-border-size-1});
|
||||
|
||||
+ .diff-content .md-header-preview {
|
||||
top: calc(#{$calc-application-header-height} + #{$mr-sticky-header-height} + #{$diff-file-header} - #{$gl-border-size-1});
|
||||
}
|
||||
}
|
||||
|
||||
&::before {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
@mixin active-toggle {
|
||||
background-color: $gray-50 !important;
|
||||
@mixin active-toggle($background-color: var(--super-sidebar-user-bar-button-hover-bg)) {
|
||||
background-color: $background-color !important;
|
||||
mix-blend-mode: multiply;
|
||||
|
||||
.gl-dark & {
|
||||
|
|
@ -12,7 +12,7 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
|
||||
@mixin notification-dot($color, $size, $top, $left) {
|
||||
background-color: $color;
|
||||
border: 2px solid $gray-10; // Same as the sidebar's background color.
|
||||
border: 2px solid var(--super-sidebar-bg);
|
||||
position: absolute;
|
||||
height: $size;
|
||||
width: $size;
|
||||
|
|
@ -29,13 +29,25 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
}
|
||||
|
||||
.super-sidebar {
|
||||
--super-sidebar-bg: var(--gray-10, #{$gray-10});
|
||||
--super-sidebar-primary: var(--blue-500, #{$blue-500});
|
||||
--super-sidebar-notification-dot: var(--blue-500, #{$blue-500});
|
||||
--super-sidebar-user-bar-bg: #{$t-gray-a-04};
|
||||
--super-sidebar-user-bar-button-bg: var(--gray-10, #{$gray-10});
|
||||
--super-sidebar-user-bar-button-hover-bg: var(--gray-50, #{$gray-50});
|
||||
--super-sidebar-user-bar-button-color: var(--gray-900, #{$gray-900});
|
||||
--super-sidebar-user-bar-button-hover-color: var(--gray-900, #{$gray-900});
|
||||
// Separate values provided to use `---gray-600` in dark mode
|
||||
--super-sidebar-user-bar-button-icon-color: var(--gray-600, #{$gray-500});
|
||||
--super-sidebar-user-bar-button-icon-hover-color: var(--gray-700, #{$gray-700});
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: fixed;
|
||||
top: $calc-application-bars-height;
|
||||
bottom: $calc-application-footer-height;
|
||||
left: 0;
|
||||
background-color: var(--gray-10, $gray-10);
|
||||
background-color: var(--super-sidebar-bg);
|
||||
border-right: 1px solid $t-gray-a-08;
|
||||
transform: translate3d(0, 0, 0);
|
||||
width: $super-sidebar-width;
|
||||
|
|
@ -55,7 +67,7 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
}
|
||||
|
||||
.user-bar {
|
||||
background-color: $t-gray-a-04;
|
||||
background-color: var(--super-sidebar-user-bar-bg);
|
||||
|
||||
.user-bar-item {
|
||||
@include gl-rounded-base;
|
||||
|
|
@ -76,39 +88,77 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
@include active-toggle;
|
||||
}
|
||||
}
|
||||
|
||||
.brand-logo {
|
||||
&:hover,
|
||||
&:focus {
|
||||
background-color: var(--super-sidebar-user-bar-button-hover-bg);
|
||||
mix-blend-mode: multiply;
|
||||
|
||||
.gl-dark & {
|
||||
mix-blend-mode: screen;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.btn-default-tertiary,
|
||||
.counter {
|
||||
color: var(--super-sidebar-user-bar-button-color);
|
||||
|
||||
.gl-icon {
|
||||
color: var(--super-sidebar-user-bar-button-icon-color) !important;
|
||||
}
|
||||
|
||||
&:hover,
|
||||
&:focus {
|
||||
background-color: var(--super-sidebar-user-bar-button-hover-bg) !important;
|
||||
color: var(--super-sidebar-user-bar-button-hover-color);
|
||||
}
|
||||
}
|
||||
|
||||
.counter {
|
||||
background-color: var(--super-sidebar-user-bar-button-bg);
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&[aria-expanded='true'] {
|
||||
background-color: var(--super-sidebar-user-bar-button-hover-bg);
|
||||
border-color: transparent;
|
||||
mix-blend-mode: multiply;
|
||||
|
||||
.gl-icon {
|
||||
color: var(--super-sidebar-user-bar-button-icon-hover-color);
|
||||
}
|
||||
|
||||
.gl-dark & {
|
||||
mix-blend-mode: screen;
|
||||
}
|
||||
}
|
||||
|
||||
&:hover,
|
||||
&[aria-expanded='true'] {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.counter .gl-icon,
|
||||
.item-icon {
|
||||
color: var(--gray-600, $gray-500);
|
||||
}
|
||||
|
||||
.counter:hover,
|
||||
.counter:focus,
|
||||
.counter[aria-expanded='true'] {
|
||||
background-color: $gray-50;
|
||||
border-color: transparent;
|
||||
mix-blend-mode: multiply;
|
||||
color: $gray-500;
|
||||
|
||||
.gl-dark & {
|
||||
mix-blend-mode: screen;
|
||||
}
|
||||
|
||||
.gl-icon {
|
||||
color: var(--gray-700, $gray-700);
|
||||
color: $gray-600;
|
||||
}
|
||||
}
|
||||
|
||||
.counter:hover,
|
||||
.counter[aria-expanded='true'] {
|
||||
box-shadow: none;
|
||||
.active-indicator {
|
||||
background-color: var(--super-sidebar-primary);
|
||||
}
|
||||
|
||||
.btn-with-notification {
|
||||
position: relative;
|
||||
|
||||
.notification-dot-info {
|
||||
@include notification-dot($blue-500, 9px, 5px, 22px);
|
||||
@include notification-dot(var(--super-sidebar-notification-dot), 9px, 5px, 22px);
|
||||
}
|
||||
|
||||
.notification-dot-warning {
|
||||
|
|
@ -118,7 +168,7 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
&:hover,
|
||||
&:focus {
|
||||
.notification {
|
||||
border-color: $gray-50; // Same as the button's hover background color.
|
||||
background-color: var(--super-sidebar-user-bar-button-hover-bg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -137,6 +187,10 @@ $super-sidebar-transition-hint-duration: $super-sidebar-transition-duration / 4;
|
|||
}
|
||||
}
|
||||
|
||||
.super-sidebar-help-center-toggle[aria-expanded='true'] {
|
||||
@include active-toggle($gray-50);
|
||||
}
|
||||
|
||||
#trial-status-sidebar-widget:hover {
|
||||
text-decoration: none;
|
||||
@include gl-text-contrast-light;
|
||||
|
|
|
|||
|
|
@ -1126,6 +1126,10 @@ $tabs-holder-z-index: 250;
|
|||
|
||||
.submit-review-dropdown {
|
||||
margin-left: $grid-size;
|
||||
|
||||
.md-header {
|
||||
top: -$gl-spacing-scale-2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1214,3 +1218,7 @@ $tabs-holder-z-index: 250;
|
|||
@include gl-rounded-top-right-none;
|
||||
}
|
||||
}
|
||||
|
||||
.merge-request-overview .md-header {
|
||||
top: calc(#{$calc-application-header-height} + #{$mr-sticky-header-height});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -268,3 +268,25 @@ ul.related-merge-requests > li gl-emoji {
|
|||
.issuable-header-slide-leave-to {
|
||||
transform: translateY(-100%);
|
||||
}
|
||||
|
||||
.issuable-sticky-header-visible {
|
||||
--issuable-sticky-header-height: 40px;
|
||||
}
|
||||
|
||||
.md-header-preview {
|
||||
z-index: 1;
|
||||
position: sticky;
|
||||
top: calc(#{$calc-application-header-height} + var(--issuable-sticky-header-height, 0px));
|
||||
}
|
||||
|
||||
.detail-page-description .md-header {
|
||||
top: $calc-application-header-height;
|
||||
}
|
||||
|
||||
.gl-drawer .md-header {
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.gl-modal .md-header {
|
||||
top: -$gl-padding-8;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -264,11 +264,6 @@ $system-note-icon-m-left: $avatar-m-left + $icon-size-diff / $avatar-m-ratio;
|
|||
display: block;
|
||||
position: relative;
|
||||
|
||||
.timeline-discussion-body {
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
}
|
||||
|
||||
.diff-content {
|
||||
overflow: visible;
|
||||
padding: 0;
|
||||
|
|
@ -330,8 +325,6 @@ $system-note-icon-m-left: $avatar-m-left + $icon-size-diff / $avatar-m-ratio;
|
|||
|
||||
.note-body {
|
||||
padding: 0 $gl-padding-8 $gl-padding-8;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
|
||||
.note-text {
|
||||
word-wrap: break-word;
|
||||
|
|
@ -747,8 +740,6 @@ $system-note-icon-m-left: $avatar-m-left + $icon-size-diff / $avatar-m-ratio;
|
|||
}
|
||||
|
||||
.timeline-content {
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
border-radius: $gl-border-radius-base;
|
||||
padding: $gl-padding-8 !important;
|
||||
@include gl-border;
|
||||
|
|
|
|||
|
|
@ -273,61 +273,32 @@
|
|||
$theme-color,
|
||||
$theme-color-darkest,
|
||||
) {
|
||||
--sidebar-background: #{mix(white, $theme-color-lightest, 50%)};
|
||||
--transparent-white-16: rgba(255, 255, 255, 0.16);
|
||||
--transparent-white-24: rgba(255, 255, 255, 0.24);
|
||||
|
||||
.super-sidebar {
|
||||
background-color: var(--sidebar-background);
|
||||
}
|
||||
--super-sidebar-bg: #{mix(white, $theme-color-lightest, 50%)};
|
||||
--super-sidebar-user-bar-bg: #{$theme-color};
|
||||
--super-sidebar-user-bar-button-bg: rgba(255, 255, 255, 0.16);
|
||||
--super-sidebar-user-bar-button-hover-bg: rgba(255, 255, 255, 0.24);
|
||||
--super-sidebar-user-bar-button-color: #{$theme-color-lightest};
|
||||
--super-sidebar-user-bar-button-hover-color: #{$white};
|
||||
--super-sidebar-user-bar-button-icon-color: #{$theme-color-light};
|
||||
--super-sidebar-user-bar-button-icon-hover-color: #{$theme-color-light};
|
||||
--super-sidebar-primary: #{$theme-color};
|
||||
--super-sidebar-notification-dot: #{$theme-color-darkest};
|
||||
|
||||
.super-sidebar .user-bar {
|
||||
background-color: $theme-color;
|
||||
|
||||
.counter {
|
||||
background-color: var(--transparent-white-16) !important;
|
||||
}
|
||||
|
||||
.brand-logo,
|
||||
.btn-default-tertiary,
|
||||
.counter {
|
||||
color: $theme-color-lightest;
|
||||
mix-blend-mode: normal;
|
||||
|
||||
&:hover,
|
||||
&:focus {
|
||||
background-color: var(--transparent-white-24) !important;
|
||||
color: $white;
|
||||
}
|
||||
|
||||
.gl-icon {
|
||||
color: $theme-color-light;
|
||||
.user-bar {
|
||||
.brand-logo,
|
||||
.btn-default-tertiary,
|
||||
.counter {
|
||||
mix-blend-mode: normal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.super-sidebar hr {
|
||||
mix-blend-mode: multiply;
|
||||
}
|
||||
|
||||
.btn-with-notification {
|
||||
&:hover,
|
||||
&:focus {
|
||||
hr {
|
||||
mix-blend-mode: multiply;
|
||||
}
|
||||
|
||||
.notification-dot-info {
|
||||
background-color: $theme-color-darkest;
|
||||
border-color: $theme-color-lightest;
|
||||
|
||||
.super-sidebar-context-header {
|
||||
color: var(--super-sidebar-primary);
|
||||
}
|
||||
}
|
||||
|
||||
.active-indicator {
|
||||
background-color: $theme-color;
|
||||
}
|
||||
|
||||
.super-sidebar-context-header {
|
||||
color: $theme-color;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,6 +45,10 @@ class AuditEvent < ApplicationRecord
|
|||
# https://gitlab.com/groups/gitlab-org/-/epics/2765
|
||||
after_validation :parallel_persist
|
||||
|
||||
def self.supported_keyset_orderings
|
||||
{ id: [:desc] }
|
||||
end
|
||||
|
||||
def self.order_by(method)
|
||||
case method.to_s
|
||||
when 'created_asc'
|
||||
|
|
|
|||
|
|
@ -217,6 +217,10 @@ module Ci
|
|||
job_variables_attributes resource_group scheduling_type
|
||||
ci_stage partition_id id_tokens].freeze
|
||||
end
|
||||
|
||||
def supported_keyset_orderings
|
||||
{ id: [:desc] }
|
||||
end
|
||||
end
|
||||
|
||||
state_machine :status do
|
||||
|
|
|
|||
|
|
@ -6,11 +6,6 @@ module Ci
|
|||
include Ci::Partitionable
|
||||
include Ci::NamespacedModelName
|
||||
include SafelyChangeColumnDefault
|
||||
include IgnorableColumns
|
||||
|
||||
ignore_columns [
|
||||
:pipeline_id_convert_to_bigint, :source_pipeline_id_convert_to_bigint
|
||||
], remove_with: '16.6', remove_after: '2023-10-22'
|
||||
|
||||
columns_changing_default :partition_id, :source_partition_id
|
||||
|
||||
|
|
|
|||
|
|
@ -19,42 +19,6 @@ module Ci
|
|||
extend ActiveSupport::Concern
|
||||
include ::Gitlab::Utils::StrongMemoize
|
||||
|
||||
module Testing
|
||||
InclusionError = Class.new(StandardError)
|
||||
|
||||
PARTITIONABLE_MODELS = %w[
|
||||
CommitStatus
|
||||
Ci::BuildMetadata
|
||||
Ci::BuildNeed
|
||||
Ci::BuildReportResult
|
||||
Ci::BuildRunnerSession
|
||||
Ci::BuildTraceChunk
|
||||
Ci::BuildTraceMetadata
|
||||
Ci::BuildPendingState
|
||||
Ci::JobAnnotation
|
||||
Ci::JobArtifact
|
||||
Ci::JobVariable
|
||||
Ci::Pipeline
|
||||
Ci::PendingBuild
|
||||
Ci::RunningBuild
|
||||
Ci::RunnerManagerBuild
|
||||
Ci::PipelineVariable
|
||||
Ci::Sources::Pipeline
|
||||
Ci::Stage
|
||||
Ci::UnitTestFailure
|
||||
].freeze
|
||||
|
||||
def self.check_inclusion(klass)
|
||||
return if PARTITIONABLE_MODELS.include?(klass.name)
|
||||
|
||||
raise Partitionable::Testing::InclusionError,
|
||||
"#{klass} must be included in PARTITIONABLE_MODELS"
|
||||
|
||||
rescue InclusionError => e
|
||||
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
|
||||
end
|
||||
end
|
||||
|
||||
included do
|
||||
Partitionable::Testing.check_inclusion(self)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,47 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module Partitionable
|
||||
module Testing
|
||||
InclusionError = Class.new(StandardError)
|
||||
|
||||
PARTITIONABLE_MODELS = %w[
|
||||
CommitStatus
|
||||
Ci::BuildMetadata
|
||||
Ci::BuildNeed
|
||||
Ci::BuildReportResult
|
||||
Ci::BuildRunnerSession
|
||||
Ci::BuildTraceChunk
|
||||
Ci::BuildTraceMetadata
|
||||
Ci::BuildPendingState
|
||||
Ci::JobAnnotation
|
||||
Ci::JobArtifact
|
||||
Ci::JobVariable
|
||||
Ci::Pipeline
|
||||
Ci::PendingBuild
|
||||
Ci::RunningBuild
|
||||
Ci::RunnerManagerBuild
|
||||
Ci::PipelineVariable
|
||||
Ci::Sources::Pipeline
|
||||
Ci::Stage
|
||||
Ci::UnitTestFailure
|
||||
].freeze
|
||||
|
||||
def self.check_inclusion(klass)
|
||||
return if partitionable_models.include?(klass.name)
|
||||
|
||||
raise Partitionable::Testing::InclusionError,
|
||||
"#{klass} must be included in PARTITIONABLE_MODELS"
|
||||
|
||||
rescue InclusionError => e
|
||||
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
|
||||
end
|
||||
|
||||
def self.partitionable_models
|
||||
PARTITIONABLE_MODELS
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Ci::Partitionable::Testing.prepend_mod
|
||||
|
|
@ -29,6 +29,10 @@ class Group < Namespace
|
|||
'Group'
|
||||
end
|
||||
|
||||
def self.supported_keyset_orderings
|
||||
{ name: [:asc] }
|
||||
end
|
||||
|
||||
has_many :all_group_members, -> { where(requested_at: nil) }, dependent: :destroy, as: :source, class_name: 'GroupMember' # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :group_members, -> { where(requested_at: nil).where.not(members: { access_level: Gitlab::Access::MINIMAL_ACCESS }) }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
|
||||
has_many :namespace_members, -> { where(requested_at: nil).where.not(members: { access_level: Gitlab::Access::MINIMAL_ACCESS }).unscope(where: %i[source_id source_type]) },
|
||||
|
|
|
|||
|
|
@ -9,4 +9,8 @@ class Packages::BuildInfo < ApplicationRecord
|
|||
scope :order_by_pipeline_id, -> (direction) { order(pipeline_id: direction) }
|
||||
scope :with_pipeline_id_less_than, -> (pipeline_id) { where("#{table_name}.pipeline_id < ?", pipeline_id) }
|
||||
scope :with_pipeline_id_greater_than, -> (pipeline_id) { where("#{table_name}.pipeline_id > ?", pipeline_id) }
|
||||
|
||||
def self.supported_keyset_orderings
|
||||
{ id: [:desc] }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@ module Pages
|
|||
class LookupPath
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(project, trim_prefix: nil, domain: nil)
|
||||
@project = project
|
||||
def initialize(deployment:, domain: nil, trim_prefix: nil)
|
||||
@deployment = deployment
|
||||
@project = deployment.project
|
||||
@domain = domain
|
||||
@trim_prefix = trim_prefix || project.full_path
|
||||
@trim_prefix = trim_prefix || @project.full_path
|
||||
end
|
||||
|
||||
def project_id
|
||||
|
|
@ -45,11 +46,7 @@ module Pages
|
|||
strong_memoize_attr :source
|
||||
|
||||
def prefix
|
||||
if url_builder.namespace_pages?
|
||||
'/'
|
||||
else
|
||||
"#{project.full_path.delete_prefix(trim_prefix)}/"
|
||||
end
|
||||
ensure_leading_and_trailing_slash(prefix_value)
|
||||
end
|
||||
strong_memoize_attr :prefix
|
||||
|
||||
|
|
@ -73,23 +70,24 @@ module Pages
|
|||
|
||||
private
|
||||
|
||||
attr_reader :project, :trim_prefix, :domain
|
||||
|
||||
# project.active_pages_deployments is already loaded from the database,
|
||||
# so selecting from the array to avoid N+1
|
||||
# this will change with when serving multiple versions on
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/133261
|
||||
def deployment
|
||||
project
|
||||
.active_pages_deployments
|
||||
.to_a
|
||||
.find { |deployment| deployment.path_prefix.blank? }
|
||||
end
|
||||
strong_memoize_attr :deployment
|
||||
attr_reader :project, :deployment, :trim_prefix, :domain
|
||||
|
||||
def url_builder
|
||||
Gitlab::Pages::UrlBuilder.new(project)
|
||||
end
|
||||
strong_memoize_attr :url_builder
|
||||
|
||||
def prefix_value
|
||||
return deployment.path_prefix if url_builder.namespace_pages?
|
||||
|
||||
[project.full_path.delete_prefix(trim_prefix), deployment.path_prefix].compact.join('/')
|
||||
end
|
||||
|
||||
def ensure_leading_and_trailing_slash(value)
|
||||
value
|
||||
.to_s
|
||||
.then { |s| s.start_with?("/") ? s : "/#{s}" }
|
||||
.then { |s| s.end_with?("/") ? s : "#{s}/" }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -17,11 +17,7 @@ module Pages
|
|||
end
|
||||
|
||||
def lookup_paths
|
||||
projects
|
||||
.map { |project| lookup_paths_for(project) }
|
||||
.select(&:source) # TODO: remove in https://gitlab.com/gitlab-org/gitlab/-/issues/328715
|
||||
.sort_by(&:prefix)
|
||||
.reverse
|
||||
projects.flat_map { |project| lookup_paths_for(project) }
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -29,7 +25,26 @@ module Pages
|
|||
attr_reader :projects, :trim_prefix, :domain
|
||||
|
||||
def lookup_paths_for(project)
|
||||
Pages::LookupPath.new(project, trim_prefix: trim_prefix, domain: domain)
|
||||
deployments_for(project).map do |deployment|
|
||||
Pages::LookupPath.new(
|
||||
deployment: deployment,
|
||||
trim_prefix: trim_prefix,
|
||||
domain: domain)
|
||||
end
|
||||
end
|
||||
|
||||
def deployments_for(project)
|
||||
if ::Gitlab::Pages.multiple_versions_enabled_for?(project)
|
||||
project.active_pages_deployments
|
||||
else
|
||||
# project.active_pages_deployments is already loaded from the database,
|
||||
# so finding from the array to avoid N+1
|
||||
project
|
||||
.active_pages_deployments
|
||||
.to_a
|
||||
.find { |deployment| deployment.path_prefix.blank? }
|
||||
.then { |deployment| [deployment] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -603,6 +603,16 @@ class User < MainClusterwide::ApplicationRecord
|
|||
.trusted_with_spam)
|
||||
end
|
||||
|
||||
def self.supported_keyset_orderings
|
||||
{
|
||||
id: [:asc, :desc],
|
||||
name: [:asc, :desc],
|
||||
username: [:asc, :desc],
|
||||
created_at: [:asc, :desc],
|
||||
updated_at: [:asc, :desc]
|
||||
}
|
||||
end
|
||||
|
||||
strip_attributes! :name
|
||||
|
||||
def preferred_language
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ module MergeRequests
|
|||
end
|
||||
|
||||
def ci_check_failed_check
|
||||
if merge_request.actual_head_pipeline&.running?
|
||||
if merge_request.actual_head_pipeline&.active?
|
||||
:ci_still_running
|
||||
else
|
||||
check_ci_results.payload.fetch(:identifier)
|
||||
|
|
|
|||
|
|
@ -2649,6 +2649,15 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: bulk_imports_transform_references
|
||||
:worker_name: BulkImports::TransformReferencesWorker
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: chat_notification
|
||||
:worker_name: ChatNotificationWorker
|
||||
:feature_category: :integrations
|
||||
|
|
|
|||
|
|
@ -0,0 +1,147 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module BulkImports
|
||||
class TransformReferencesWorker
|
||||
include ApplicationWorker
|
||||
|
||||
idempotent!
|
||||
data_consistency :delayed
|
||||
sidekiq_options retry: 3, dead: false
|
||||
feature_category :importers
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def perform(object_ids, klass, tracker_id)
|
||||
@tracker = BulkImports::Tracker.find_by_id(tracker_id)
|
||||
|
||||
return unless tracker
|
||||
|
||||
project = tracker.entity.project
|
||||
|
||||
klass.constantize.where(id: object_ids, project: project).find_each do |object|
|
||||
transform_and_save(object)
|
||||
end
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
attr_reader :tracker
|
||||
|
||||
private
|
||||
|
||||
def transform_and_save(object)
|
||||
body = object_body(object).dup
|
||||
|
||||
return if body.blank?
|
||||
|
||||
object.refresh_markdown_cache!
|
||||
|
||||
body.gsub!(username_regex(mapped_usernames), mapped_usernames)
|
||||
|
||||
if object_has_reference?(body)
|
||||
matching_urls(object).each do |old_url, new_url|
|
||||
body.gsub!(old_url, new_url) if body.include?(old_url)
|
||||
end
|
||||
end
|
||||
|
||||
object.assign_attributes(body_field(object) => body)
|
||||
object.save!(touch: false) if object_body_changed?(object)
|
||||
|
||||
object
|
||||
rescue StandardError => e
|
||||
log_and_fail(e)
|
||||
end
|
||||
|
||||
def object_body(object)
|
||||
call_object_method(object)
|
||||
end
|
||||
|
||||
def object_body_changed?(object)
|
||||
call_object_method(object, suffix: '_changed?')
|
||||
end
|
||||
|
||||
def call_object_method(object, suffix: nil)
|
||||
method = body_field(object)
|
||||
method = "#{method}#{suffix}" if suffix.present?
|
||||
|
||||
object.public_send(method) # rubocop:disable GitlabSecurity/PublicSend -- the method being called is dependent on several factors
|
||||
end
|
||||
|
||||
def body_field(object)
|
||||
object.is_a?(Note) ? 'note' : 'description'
|
||||
end
|
||||
|
||||
def mapped_usernames
|
||||
@mapped_usernames ||= ::BulkImports::UsersMapper.new(context: context)
|
||||
.map_usernames.transform_keys { |key| "@#{key}" }
|
||||
.transform_values { |value| "@#{value}" }
|
||||
end
|
||||
|
||||
def username_regex(mapped_usernames)
|
||||
@username_regex ||= Regexp.new(mapped_usernames.keys.sort_by(&:length)
|
||||
.reverse.map { |x| Regexp.escape(x) }.join('|'))
|
||||
end
|
||||
|
||||
def matching_urls(object)
|
||||
URI.extract(object_body(object), %w[http https]).each_with_object([]) do |url, array|
|
||||
parsed_url = URI.parse(url)
|
||||
|
||||
next unless source_host == parsed_url.host
|
||||
next unless parsed_url.path&.start_with?("/#{source_full_path}")
|
||||
|
||||
array << [url, new_url(object, parsed_url)]
|
||||
end
|
||||
end
|
||||
|
||||
def new_url(object, parsed_old_url)
|
||||
parsed_old_url.host = ::Gitlab.config.gitlab.host
|
||||
parsed_old_url.port = ::Gitlab.config.gitlab.port
|
||||
parsed_old_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
parsed_old_url.to_s.gsub!(source_full_path, full_path(object))
|
||||
end
|
||||
|
||||
def source_host
|
||||
@source_host ||= URI.parse(context.configuration.url).host
|
||||
end
|
||||
|
||||
def source_full_path
|
||||
@source_full_path ||= context.entity.source_full_path
|
||||
end
|
||||
|
||||
def full_path(object)
|
||||
object.project.full_path
|
||||
end
|
||||
|
||||
def object_has_reference?(body)
|
||||
body.include?(source_full_path)
|
||||
end
|
||||
|
||||
def log_and_fail(exception)
|
||||
Gitlab::ErrorTracking.track_exception(exception, log_params)
|
||||
BulkImports::Failure.create(failure_attributes(exception))
|
||||
end
|
||||
|
||||
def log_params
|
||||
{
|
||||
message: 'Failed to update references',
|
||||
bulk_import_id: context.bulk_import_id,
|
||||
bulk_import_entity_id: tracker.bulk_import_entity_id,
|
||||
source_full_path: context.entity.source_full_path,
|
||||
source_version: context.bulk_import.source_version,
|
||||
importer: 'gitlab_migration'
|
||||
}
|
||||
end
|
||||
|
||||
def failure_attributes(exception)
|
||||
{
|
||||
bulk_import_entity_id: context.entity.id,
|
||||
pipeline_class: 'ReferencesPipeline',
|
||||
exception_class: exception.class.to_s,
|
||||
exception_message: exception.message.truncate(255),
|
||||
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
|
||||
}
|
||||
end
|
||||
|
||||
def context
|
||||
@context ||= BulkImports::Pipeline::Context.new(tracker)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: bulk_import_async_references_pipeline
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135806
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430181
|
||||
milestone: '16.7'
|
||||
type: development
|
||||
group: group::import and integrate
|
||||
default_enabled: false
|
||||
|
|
@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/431266
|
|||
milestone: '16.6'
|
||||
type: development
|
||||
group: group::pipeline security
|
||||
default_enabled: true
|
||||
default_enabled: false
|
||||
|
|
@ -147,6 +147,8 @@
|
|||
- 1
|
||||
- - bulk_imports_relation_export
|
||||
- 1
|
||||
- - bulk_imports_transform_references
|
||||
- 1
|
||||
- - chaos
|
||||
- 2
|
||||
- - chat_notification
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPartitionIdToCiJobArtifactStates < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.7'
|
||||
|
||||
enable_lock_retries!
|
||||
|
||||
def change
|
||||
add_column :ci_job_artifact_states, :partition_id, :bigint, default: 100, null: false
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
d9a518f44671a226e1d7213ac1b7822077faa96f6a1ffc8df6b272c6a3655a4b
|
||||
|
|
@ -13976,6 +13976,7 @@ CREATE TABLE ci_job_artifact_states (
|
|||
verification_retry_count smallint,
|
||||
verification_checksum bytea,
|
||||
verification_failure text,
|
||||
partition_id bigint DEFAULT 100 NOT NULL,
|
||||
CONSTRAINT check_df832b66ea CHECK ((char_length(verification_failure) <= 255))
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -26,79 +26,68 @@ trace will include sub-traces for each sub-step.
|
|||
|
||||
```protobuf
|
||||
message Step {
|
||||
string name = 1;
|
||||
Reference ref = 2;
|
||||
repeated EnvironmentVariable env = 3;
|
||||
repeated Input inputs = 4;
|
||||
message Reference {
|
||||
string uri = 1;
|
||||
string version = 2;
|
||||
string hash = 3;
|
||||
Definition def = 4;
|
||||
}
|
||||
string name = 1;
|
||||
string step = 2;
|
||||
map<string,string> env = 3;
|
||||
map<string,google.protobuf.Value> inputs = 4;
|
||||
}
|
||||
|
||||
message Definition {
|
||||
Spec spec = 1;
|
||||
enum Type {
|
||||
type_unknown = 0;
|
||||
type_steps = 1;
|
||||
type_exec = 2;
|
||||
}
|
||||
Type type = 2;
|
||||
oneof type_oneof {
|
||||
DefinitionExec exec = 3;
|
||||
DefinitionSteps steps = 4;
|
||||
}
|
||||
DefinitionType type = 1;
|
||||
Exec exec = 2;
|
||||
repeated Step steps = 3;
|
||||
message Exec {
|
||||
repeated string command = 1;
|
||||
string work_dir = 2;
|
||||
}
|
||||
message Steps {
|
||||
repeated Step children = 1;
|
||||
}
|
||||
}
|
||||
|
||||
enum DefinitionType {
|
||||
definition_type_unspecified = 0;
|
||||
exec = 1;
|
||||
steps = 2;
|
||||
}
|
||||
|
||||
message Spec {
|
||||
repeated Input inputs = 1;
|
||||
message Input {
|
||||
string key = 1;
|
||||
string default_value = 2;
|
||||
Content spec = 1;
|
||||
message Content {
|
||||
map<string,Input> inputs = 1;
|
||||
message Input {
|
||||
InputType type = 1;
|
||||
google.protobuf.Value default = 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
message EnvironmentVariable {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
bool masked = 3;
|
||||
bool raw = 4;
|
||||
}
|
||||
|
||||
message Input {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
bool masked = 3;
|
||||
bool raw = 4;
|
||||
}
|
||||
|
||||
message Output {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
bool masked = 3;
|
||||
enum InputType {
|
||||
spec_type_unspecified = 0;
|
||||
string = 1;
|
||||
number = 2;
|
||||
bool = 3;
|
||||
struct = 4;
|
||||
list = 5;
|
||||
}
|
||||
|
||||
message StepResult {
|
||||
Step step = 1;
|
||||
Spec spec = 2;
|
||||
Definition def = 3;
|
||||
enum Status {
|
||||
unknown_result = 0;
|
||||
success = 1;
|
||||
failure = 2;
|
||||
running = 3;
|
||||
unspecified = 0;
|
||||
running = 1;
|
||||
success = 2;
|
||||
failure = 3;
|
||||
}
|
||||
Result result = 2;
|
||||
repeated Output outputs = 3;
|
||||
repeated EnvironmentVariable exports = 4;
|
||||
int32 exit_code = 5;
|
||||
repeated StepResult children_step_results = 6;
|
||||
Status status = 4;
|
||||
map<string,Output> outputs = 5;
|
||||
message Output {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
bool masked = 3;
|
||||
}
|
||||
map<string,string> exports = 6;
|
||||
int32 exit_code = 7;
|
||||
repeated StepResult children_step_results = 8;
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -32,8 +32,6 @@ To use merged results pipelines:
|
|||
[run jobs in merge request pipelines](merge_request_pipelines.md#prerequisites).
|
||||
- Your repository must be a GitLab repository, not an
|
||||
[external repository](../ci_cd_for_external_repos/index.md).
|
||||
- You must not be using [fast forward merges](../../user/project/merge_requests/methods/index.md).
|
||||
[An issue exists](https://gitlab.com/gitlab-org/gitlab/-/issues/26996) to change this behavior.
|
||||
|
||||
## Enable merged results pipelines
|
||||
|
||||
|
|
|
|||
|
|
@ -87,6 +87,55 @@ Because keyset pagination does not support page numbers, we are restricted to go
|
|||
- Last page
|
||||
- First page
|
||||
|
||||
#### Usage in REST API with `paginate_with_strategies`
|
||||
|
||||
For the REST API, the `paginate_with_strategies` helper can be used on a relation in order to use either keyset pagination or offset pagination.
|
||||
|
||||
```ruby
|
||||
desc 'Get the things related to a project' do
|
||||
detail 'This feature was introduced in GitLab 16.1'
|
||||
success code: 200, model: ::API::Entities::Thing
|
||||
failure [
|
||||
{ code: 401, message: 'Unauthorized' },
|
||||
{ code: 403, message: 'Forbidden' },
|
||||
{ code: 404, message: 'Not Found' }
|
||||
]
|
||||
end
|
||||
params do
|
||||
use :pagination
|
||||
requires :project_id, type: Integer, desc: 'The ID of the project'
|
||||
optional :cursor, type: String, desc: 'Cursor for obtaining the next set of records'
|
||||
optional :order_by, type: String, values: %w[id name], default: 'id',
|
||||
desc: 'Attribute to sort by'
|
||||
optional :sort, type: String, values: %w[asc desc], default: 'desc', desc: 'Order of sorting'
|
||||
end
|
||||
route_setting :authentication
|
||||
get ':project_id/things' do
|
||||
project = Project.find_by_id(params[:project_id])
|
||||
|
||||
not_found! if project.blank?
|
||||
|
||||
things = project.things
|
||||
|
||||
present paginate_with_strategies(things), with: ::API::Entities::Thing
|
||||
end
|
||||
```
|
||||
|
||||
In order for keyset pagination to be used, the following conditions must be met:
|
||||
|
||||
1. `params[:keyset]` must return `'keyset'`
|
||||
1. `params[:order_by]` and `params[:sort]` must both appear in the object returned by the
|
||||
`supported_keyset_orderings` class method on the model. In the following example, `Thing`
|
||||
supports keyset pagination when ordering by ID in either ascending or descending order.
|
||||
|
||||
```ruby
|
||||
class Thing < ApplicationRecord
|
||||
def self.supported_keyset_orderings
|
||||
{ id: [:asc, :desc] }
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
#### Usage in Rails with HAML views
|
||||
|
||||
Consider the following controller action, where we list the projects ordered by name:
|
||||
|
|
|
|||
|
|
@ -69,6 +69,16 @@ If a feature is moved to another subscription tier, use `moved`:
|
|||
> - [Moved](<link-to-issue>) from GitLab Premium to GitLab Free in 12.0.
|
||||
```
|
||||
|
||||
#### Changing the feature status
|
||||
|
||||
If the feature status changes, use `changed`:
|
||||
|
||||
```markdown
|
||||
> - [Introduced](<link-to-issue>) as an [Experiment](../../policy/experiment-beta-support.md) in GitLab 15.7.
|
||||
> - [Changed](<link-to-issue>) to Beta in GitLab 16.0.
|
||||
> - [Changed](<link-to-issue>) to Generally Available in GitLab 16.3.
|
||||
```
|
||||
|
||||
#### Features introduced behind feature flags
|
||||
|
||||
When features are introduced behind feature flags, you must add details about the feature flag to the documentation.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,131 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module BulkImports
|
||||
module Projects
|
||||
module Pipelines
|
||||
class LegacyReferencesPipeline
|
||||
include Pipeline
|
||||
|
||||
BATCH_SIZE = 100
|
||||
|
||||
def extract(_context)
|
||||
data = Enumerator.new do |enum|
|
||||
add_matching_objects(portable.issues, enum)
|
||||
add_matching_objects(portable.merge_requests, enum)
|
||||
add_notes(portable.issues, enum)
|
||||
add_notes(portable.merge_requests, enum)
|
||||
end
|
||||
|
||||
BulkImports::Pipeline::ExtractedData.new(data: data)
|
||||
end
|
||||
|
||||
def transform(_context, object)
|
||||
body = object_body(object).dup
|
||||
|
||||
body.gsub!(username_regex(mapped_usernames), mapped_usernames)
|
||||
|
||||
matching_urls(object).each do |old_url, new_url|
|
||||
body.gsub!(old_url, new_url) if body.include?(old_url)
|
||||
end
|
||||
|
||||
object.assign_attributes(body_field(object) => body)
|
||||
|
||||
object
|
||||
end
|
||||
|
||||
def load(_context, object)
|
||||
object.save! if object_body_changed?(object)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def mapped_usernames
|
||||
@mapped_usernames ||= ::BulkImports::UsersMapper.new(context: context)
|
||||
.map_usernames.transform_keys { |key| "@#{key}" }
|
||||
.transform_values { |value| "@#{value}" }
|
||||
end
|
||||
|
||||
def username_regex(mapped_usernames)
|
||||
@username_regex ||= Regexp.new(mapped_usernames.keys.sort_by(&:length)
|
||||
.reverse.map { |x| Regexp.escape(x) }.join('|'))
|
||||
end
|
||||
|
||||
def add_matching_objects(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
enum << object if object_has_reference?(object) || object_has_username?(object)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_notes(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
object.notes.each_batch(of: BATCH_SIZE) do |notes_batch|
|
||||
notes_batch.each do |note|
|
||||
note.refresh_markdown_cache!
|
||||
enum << note if object_has_reference?(note) || object_has_username?(note)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def object_has_reference?(object)
|
||||
object_body(object)&.include?(source_full_path)
|
||||
end
|
||||
|
||||
def object_has_username?(object)
|
||||
return false unless object_body(object)
|
||||
|
||||
mapped_usernames.keys.any? { |old_username| object_body(object).include?(old_username) }
|
||||
end
|
||||
|
||||
def object_body(object)
|
||||
call_object_method(object)
|
||||
end
|
||||
|
||||
def object_body_changed?(object)
|
||||
call_object_method(object, suffix: '_changed?')
|
||||
end
|
||||
|
||||
def call_object_method(object, suffix: nil)
|
||||
method = body_field(object)
|
||||
method = "#{method}#{suffix}" if suffix.present?
|
||||
|
||||
object.public_send(method) # rubocop:disable GitlabSecurity/PublicSend -- the method being called is dependent on several factors
|
||||
end
|
||||
|
||||
def body_field(object)
|
||||
object.is_a?(Note) ? 'note' : 'description'
|
||||
end
|
||||
|
||||
def matching_urls(object)
|
||||
URI.extract(object_body(object), %w[http https]).each_with_object([]) do |url, array|
|
||||
parsed_url = URI.parse(url)
|
||||
|
||||
next unless source_host == parsed_url.host
|
||||
next unless parsed_url.path&.start_with?("/#{source_full_path}")
|
||||
|
||||
array << [url, new_url(parsed_url)]
|
||||
end
|
||||
end
|
||||
|
||||
def new_url(parsed_old_url)
|
||||
parsed_old_url.host = ::Gitlab.config.gitlab.host
|
||||
parsed_old_url.port = ::Gitlab.config.gitlab.port
|
||||
parsed_old_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
parsed_old_url.to_s.gsub!(source_full_path, portable.full_path)
|
||||
end
|
||||
|
||||
def source_host
|
||||
@source_host ||= URI.parse(context.configuration.url).host
|
||||
end
|
||||
|
||||
def source_full_path
|
||||
context.entity.source_full_path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -7,123 +7,49 @@ module BulkImports
|
|||
include Pipeline
|
||||
|
||||
BATCH_SIZE = 100
|
||||
DELAY = 1.second
|
||||
|
||||
def extract(_context)
|
||||
data = Enumerator.new do |enum|
|
||||
add_matching_objects(portable.issues, enum)
|
||||
add_matching_objects(portable.merge_requests, enum)
|
||||
add_notes(portable.issues, enum)
|
||||
add_notes(portable.merge_requests, enum)
|
||||
end
|
||||
def extract(context)
|
||||
@tracker_id = context.tracker.id
|
||||
@counter = 0
|
||||
|
||||
BulkImports::Pipeline::ExtractedData.new(data: data)
|
||||
enqueue_ref_workers_for_issues_and_issue_notes
|
||||
enqueue_ref_workers_for_merge_requests_and_merge_request_notes
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def transform(_context, object)
|
||||
body = object_body(object).dup
|
||||
|
||||
body.gsub!(username_regex(mapped_usernames), mapped_usernames)
|
||||
|
||||
matching_urls(object).each do |old_url, new_url|
|
||||
body.gsub!(old_url, new_url) if body.include?(old_url)
|
||||
end
|
||||
|
||||
object.assign_attributes(body_field(object) => body)
|
||||
|
||||
object
|
||||
end
|
||||
|
||||
def load(_context, object)
|
||||
object.save! if object_body_changed?(object)
|
||||
end
|
||||
attr_reader :tracker_id
|
||||
|
||||
private
|
||||
|
||||
def mapped_usernames
|
||||
@mapped_usernames ||= ::BulkImports::UsersMapper.new(context: context)
|
||||
.map_usernames.transform_keys { |key| "@#{key}" }
|
||||
.transform_values { |value| "@#{value}" }
|
||||
end
|
||||
def enqueue_ref_workers_for_issues_and_issue_notes
|
||||
portable.issues.select(:id).each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
BulkImports::TransformReferencesWorker.perform_in(delay, batch.map(&:id), Issue.to_s, tracker_id)
|
||||
|
||||
def username_regex(mapped_usernames)
|
||||
@username_regex ||= Regexp.new(mapped_usernames.keys.sort_by(&:length)
|
||||
.reverse.map { |x| Regexp.escape(x) }.join('|'))
|
||||
end
|
||||
|
||||
def add_matching_objects(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
enum << object if object_has_reference?(object) || object_has_username?(object)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_notes(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
object.notes.each_batch(of: BATCH_SIZE) do |notes_batch|
|
||||
notes_batch.each do |note|
|
||||
note.refresh_markdown_cache!
|
||||
enum << note if object_has_reference?(note) || object_has_username?(note)
|
||||
end
|
||||
batch.each do |issue|
|
||||
issue.notes.select(:id).each_batch(of: BATCH_SIZE) do |notes_batch|
|
||||
BulkImports::TransformReferencesWorker.perform_in(delay, notes_batch.map(&:id), Note.to_s, tracker_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def object_has_reference?(object)
|
||||
object_body(object)&.include?(source_full_path)
|
||||
end
|
||||
def enqueue_ref_workers_for_merge_requests_and_merge_request_notes
|
||||
portable.merge_requests.select(:id).each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
BulkImports::TransformReferencesWorker.perform_in(delay, batch.map(&:id), MergeRequest.to_s, tracker_id)
|
||||
|
||||
def object_has_username?(object)
|
||||
return false unless object_body(object)
|
||||
|
||||
mapped_usernames.keys.any? { |old_username| object_body(object).include?(old_username) }
|
||||
end
|
||||
|
||||
def object_body(object)
|
||||
call_object_method(object)
|
||||
end
|
||||
|
||||
def object_body_changed?(object)
|
||||
call_object_method(object, suffix: '_changed?')
|
||||
end
|
||||
|
||||
def call_object_method(object, suffix: nil)
|
||||
method = body_field(object)
|
||||
method = "#{method}#{suffix}" if suffix.present?
|
||||
|
||||
object.public_send(method) # rubocop:disable GitlabSecurity/PublicSend
|
||||
end
|
||||
|
||||
def body_field(object)
|
||||
object.is_a?(Note) ? 'note' : 'description'
|
||||
end
|
||||
|
||||
def matching_urls(object)
|
||||
URI.extract(object_body(object), %w[http https]).each_with_object([]) do |url, array|
|
||||
parsed_url = URI.parse(url)
|
||||
|
||||
next unless source_host == parsed_url.host
|
||||
next unless parsed_url.path&.start_with?("/#{source_full_path}")
|
||||
|
||||
array << [url, new_url(parsed_url)]
|
||||
batch.each do |merge_request|
|
||||
merge_request.notes.select(:id).each_batch(of: BATCH_SIZE) do |notes_batch|
|
||||
BulkImports::TransformReferencesWorker.perform_in(delay, notes_batch.map(&:id), Note.to_s, tracker_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def new_url(parsed_old_url)
|
||||
parsed_old_url.host = ::Gitlab.config.gitlab.host
|
||||
parsed_old_url.port = ::Gitlab.config.gitlab.port
|
||||
parsed_old_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
parsed_old_url.to_s.gsub!(source_full_path, portable.full_path)
|
||||
end
|
||||
|
||||
def source_host
|
||||
@source_host ||= URI.parse(context.configuration.url).host
|
||||
end
|
||||
|
||||
def source_full_path
|
||||
context.entity.source_full_path
|
||||
def delay
|
||||
@counter += 1
|
||||
@counter * DELAY
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ module BulkImports
|
|||
stage: 5
|
||||
},
|
||||
references: {
|
||||
pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline,
|
||||
pipeline: references_pipeline,
|
||||
stage: 5
|
||||
},
|
||||
finisher: {
|
||||
|
|
@ -144,6 +144,14 @@ module BulkImports
|
|||
}
|
||||
}
|
||||
end
|
||||
|
||||
def references_pipeline
|
||||
if Feature.enabled?(:bulk_import_async_references_pipeline)
|
||||
BulkImports::Projects::Pipelines::ReferencesPipeline
|
||||
else
|
||||
BulkImports::Projects::Pipelines::LegacyReferencesPipeline
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,20 +3,6 @@
|
|||
module Gitlab
|
||||
module Pagination
|
||||
module CursorBasedKeyset
|
||||
SUPPORTED_MULTI_ORDERING = {
|
||||
Group => { name: [:asc] },
|
||||
AuditEvent => { id: [:desc] },
|
||||
User => {
|
||||
id: [:asc, :desc],
|
||||
name: [:asc, :desc],
|
||||
username: [:asc, :desc],
|
||||
created_at: [:asc, :desc],
|
||||
updated_at: [:asc, :desc]
|
||||
},
|
||||
::Ci::Build => { id: [:desc] },
|
||||
::Packages::BuildInfo => { id: [:desc] }
|
||||
}.freeze
|
||||
|
||||
# Relation types that are enforced in this list
|
||||
# enforce the use of keyset pagination, thus erroring out requests
|
||||
# made with offset pagination above a certain limit.
|
||||
|
|
@ -26,7 +12,7 @@ module Gitlab
|
|||
ENFORCED_TYPES = [Group].freeze
|
||||
|
||||
def self.available_for_type?(relation)
|
||||
SUPPORTED_MULTI_ORDERING.key?(relation.klass)
|
||||
relation.klass.respond_to?(:supported_keyset_orderings)
|
||||
end
|
||||
|
||||
def self.available?(cursor_based_request_context, relation)
|
||||
|
|
@ -44,7 +30,7 @@ module Gitlab
|
|||
order_by_from_request = cursor_based_request_context.order
|
||||
sort_from_request = cursor_based_request_context.sort
|
||||
|
||||
SUPPORTED_MULTI_ORDERING[relation.klass][order_by_from_request]&.include?(sort_from_request)
|
||||
!!relation.klass.supported_keyset_orderings[order_by_from_request]&.include?(sort_from_request)
|
||||
end
|
||||
private_class_method :order_satisfied?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,35 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
namespace :gitlab do
|
||||
namespace :cleanup do
|
||||
desc "GitLab | Cleanup | Delete moved repositories"
|
||||
task moved: :gitlab_environment do
|
||||
warn_user_is_not_gitlab
|
||||
remove_flag = ENV['REMOVE']
|
||||
|
||||
Gitlab.config.repositories.storages.each do |name, repository_storage|
|
||||
repo_root = repository_storage.legacy_disk_path.chomp('/')
|
||||
# Look for global repos (legacy, depth 1) and normal repos (depth 2)
|
||||
IO.popen(%W[find #{repo_root} -mindepth 1 -maxdepth 2 -name *+moved*.git]) do |find|
|
||||
find.each_line do |path|
|
||||
path.chomp!
|
||||
|
||||
if remove_flag
|
||||
if FileUtils.rm_rf(path)
|
||||
puts "Removed...#{path}".color(:green)
|
||||
else
|
||||
puts "Cannot remove #{path}".color(:red)
|
||||
end
|
||||
else
|
||||
puts "Can be removed: #{path}".color(:green)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
unless remove_flag
|
||||
puts "To cleanup these repositories run this command with REMOVE=true".color(:yellow)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -21593,12 +21593,6 @@ msgstr ""
|
|||
msgid "Geo|Resync all"
|
||||
msgstr ""
|
||||
|
||||
msgid "Geo|Resync all %{projects_count} projects"
|
||||
msgstr ""
|
||||
|
||||
msgid "Geo|Resync project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Geo|Retry count"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -21722,9 +21716,6 @@ msgstr ""
|
|||
msgid "Geo|This will %{action} %{replicableType}. It may take some time to complete. Are you sure you want to continue?"
|
||||
msgstr ""
|
||||
|
||||
msgid "Geo|This will resync all projects. It may take some time to complete. Are you sure you want to continue?"
|
||||
msgstr ""
|
||||
|
||||
msgid "Geo|This will reverify all projects. It may take some time to complete. Are you sure you want to continue?"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -52,6 +52,7 @@ RSpec.describe 'Database schema', feature_category: :database do
|
|||
ci_sources_pipelines: %w[partition_id source_partition_id source_job_id],
|
||||
ci_stages: %w[partition_id],
|
||||
ci_trigger_requests: %w[commit_id],
|
||||
ci_job_artifact_states: %w[partition_id],
|
||||
cluster_providers_aws: %w[security_group_id vpc_id access_key_id],
|
||||
cluster_providers_gcp: %w[gcp_project_id operation_id],
|
||||
compliance_management_frameworks: %w[group_id],
|
||||
|
|
@ -355,11 +356,11 @@ RSpec.describe 'Database schema', feature_category: :database do
|
|||
|
||||
context 'for CI partitioned table' do
|
||||
# Check that each partitionable model with more than 1 column has the partition_id column at the trailing
|
||||
# position. Using PARTITIONABLE_MODELS instead of iterating tables since when partitioning existing tables,
|
||||
# position. Using .partitionable_models instead of iterating tables since when partitioning existing tables,
|
||||
# the routing table only gets created after the PK has already been created, which would be too late for a check.
|
||||
|
||||
skip_tables = %w[]
|
||||
partitionable_models = Ci::Partitionable::Testing::PARTITIONABLE_MODELS
|
||||
partitionable_models = Ci::Partitionable::Testing.partitionable_models
|
||||
(partitionable_models - skip_tables).each do |klass|
|
||||
model = klass.safe_constantize
|
||||
table_name = model.table_name
|
||||
|
|
|
|||
|
|
@ -87,6 +87,10 @@ FactoryBot.define do
|
|||
status { :running }
|
||||
end
|
||||
|
||||
trait :pending do
|
||||
status { :pending }
|
||||
end
|
||||
|
||||
trait :canceled do
|
||||
status { :canceled }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -371,7 +371,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
|
|||
sha: merge_request.diff_head_sha,
|
||||
user: user,
|
||||
merge_request: merge_request,
|
||||
status: :running)
|
||||
status: :pending)
|
||||
merge_request.update_head_pipeline
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -94,6 +94,10 @@ describe('Issuable output', () => {
|
|||
axiosMock.onPut().reply(HTTP_STATUS_OK, putRequest);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
document.body.classList?.remove('issuable-sticky-header-visible');
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
beforeEach(async () => {
|
||||
await createComponent();
|
||||
|
|
@ -334,6 +338,29 @@ describe('Issuable output', () => {
|
|||
});
|
||||
},
|
||||
);
|
||||
|
||||
describe('document body class', () => {
|
||||
beforeEach(async () => {
|
||||
await createComponent({ props: { canUpdate: false } });
|
||||
});
|
||||
|
||||
it('adds the css class to the document body', () => {
|
||||
wrapper.findComponent(StickyHeader).vm.$emit('show');
|
||||
expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(true);
|
||||
});
|
||||
|
||||
it('removes the css class from the document body', () => {
|
||||
wrapper.findComponent(StickyHeader).vm.$emit('show');
|
||||
wrapper.findComponent(StickyHeader).vm.$emit('hide');
|
||||
expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(false);
|
||||
});
|
||||
|
||||
it('removes the css class from the document body when unmounting', () => {
|
||||
wrapper.findComponent(StickyHeader).vm.$emit('show');
|
||||
wrapper.vm.$destroy();
|
||||
expect(document.body.classList?.contains('issuable-sticky-header-visible')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Composable description component', () => {
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
|
|||
/>
|
||||
</div>
|
||||
<div
|
||||
class="gfm-form gl-overflow-hidden js-expanded js-vue-markdown-field md-area position-relative"
|
||||
class="gfm-form js-expanded js-vue-markdown-field md-area position-relative"
|
||||
data-uploads-path=""
|
||||
>
|
||||
<markdown-header-stub
|
||||
|
|
|
|||
|
|
@ -0,0 +1,268 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Projects::Pipelines::LegacyReferencesPipeline, feature_category: :importers do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
|
||||
let_it_be(:entity) do
|
||||
create(
|
||||
:bulk_import_entity,
|
||||
:project_entity,
|
||||
project: project,
|
||||
bulk_import: bulk_import,
|
||||
source_full_path: 'source/full/path'
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||
let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
|
||||
let(:mr) do
|
||||
create(
|
||||
:merge_request,
|
||||
source_project: project,
|
||||
description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
|
||||
)
|
||||
end
|
||||
|
||||
let(:issue_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: issue,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
|
||||
)
|
||||
end
|
||||
|
||||
let(:mr_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
|
||||
)
|
||||
end
|
||||
|
||||
let(:interchanged_usernames) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: '@manuelgrabowski-admin, @boaty-mc-boatface'
|
||||
)
|
||||
end
|
||||
|
||||
let(:old_note_html) { 'old note_html' }
|
||||
let(:system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request !#{mr.iid} created by @old_username",
|
||||
note_html: old_note_html
|
||||
)
|
||||
end
|
||||
|
||||
let(:username_system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request created by @source_username.",
|
||||
note_html: 'empty'
|
||||
)
|
||||
end
|
||||
|
||||
subject(:pipeline) { described_class.new(context) }
|
||||
|
||||
before do
|
||||
project.add_owner(user)
|
||||
|
||||
allow(Gitlab::Cache::Import::Caching)
|
||||
.to receive(:values_from_hash)
|
||||
.and_return({
|
||||
'old_username' => 'new_username',
|
||||
'older_username' => 'newer_username',
|
||||
'source_username' => 'destination_username',
|
||||
'bob' => 'alice-gdk',
|
||||
'alice' => 'bob-gdk',
|
||||
'manuelgrabowski' => 'manuelgrabowski-admin',
|
||||
'manuelgrabowski-admin' => 'manuelgrabowski',
|
||||
'boaty-mc-boatface' => 'boatymcboatface',
|
||||
'boatymcboatface' => 'boaty-mc-boatface'
|
||||
})
|
||||
end
|
||||
|
||||
def create_project_data
|
||||
[issue, mr, issue_note, mr_note, system_note, username_system_note]
|
||||
end
|
||||
|
||||
def create_username_project_data
|
||||
[username_system_note]
|
||||
end
|
||||
|
||||
describe '#extract' do
|
||||
it 'returns ExtractedData containing issues, mrs & their notes' do
|
||||
create_project_data
|
||||
|
||||
extracted_data = subject.extract(context)
|
||||
|
||||
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
|
||||
expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
|
||||
expect(system_note.note_html).not_to eq(old_note_html)
|
||||
expect(system_note.note_html)
|
||||
.to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
|
||||
.and include(project.full_path.to_s)
|
||||
.and include("@old_username")
|
||||
expect(username_system_note.note_html)
|
||||
.to include("@source_username")
|
||||
end
|
||||
|
||||
context 'when object body is nil' do
|
||||
let(:issue) { create(:issue, project: project, description: nil) }
|
||||
|
||||
it 'returns ExtractedData not containing the object' do
|
||||
extracted_data = subject.extract(context)
|
||||
|
||||
expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#transform', :clean_gitlab_redis_cache do
|
||||
it 'updates matching urls and usernames with new ones' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
transformed_username_system_note = subject.transform(context, username_system_note)
|
||||
|
||||
expected_url = URI('')
|
||||
expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
expected_url.host = ::Gitlab.config.gitlab.host
|
||||
expected_url.port = ::Gitlab.config.gitlab.port
|
||||
expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
|
||||
|
||||
expect(transformed_issue_note.note).not_to include("@older_username")
|
||||
expect(transformed_mr.description).not_to include("@source_username")
|
||||
expect(transformed_system_note.note).not_to include("@old_username")
|
||||
expect(transformed_username_system_note.note).not_to include("@source_username")
|
||||
|
||||
expect(transformed_issue.description)
|
||||
.to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
|
||||
expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
|
||||
expect(transformed_note.note).to eq("#{expected_url} @same_username")
|
||||
expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
|
||||
expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
|
||||
expect(transformed_username_system_note.note).to include("@destination_username.")
|
||||
end
|
||||
|
||||
it 'handles situations where old usernames are substrings of new usernames' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
|
||||
expect(transformed_mr.description).to include("@alice-gdk")
|
||||
expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
|
||||
end
|
||||
|
||||
it 'handles situations where old and new usernames are interchanged' do
|
||||
# e.g
|
||||
# |------------------------|-------------------------|
|
||||
# | old_username | new_username |
|
||||
# |------------------------|-------------------------|
|
||||
# | @manuelgrabowski-admin | @manuelgrabowski |
|
||||
# | @manuelgrabowski | @manuelgrabowski-admin |
|
||||
# |------------------------|-------------------------|
|
||||
|
||||
transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
|
||||
|
||||
expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
|
||||
expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
|
||||
end
|
||||
|
||||
context 'when object does not have reference or username' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: 'foo')
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq('foo')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are not matched urls or usernames' do
|
||||
let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
|
||||
|
||||
shared_examples 'returns object unchanged' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: description)
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq(description)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
|
||||
context 'when url path does not start with source full path' do
|
||||
let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when host does not match and url path starts with source full path' do
|
||||
let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when url does not match at all' do
|
||||
let(:description) { 'https://website.example/foo/bar' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#load' do
|
||||
it 'saves the object when object body changed' do
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
|
||||
expect(transformed_issue).to receive(:save!)
|
||||
expect(transformed_note).to receive(:save!)
|
||||
expect(transformed_mr).to receive(:save!)
|
||||
expect(transformed_issue_note).to receive(:save!)
|
||||
expect(transformed_system_note).to receive(:save!)
|
||||
|
||||
subject.load(context, transformed_issue)
|
||||
subject.load(context, transformed_note)
|
||||
subject.load(context, transformed_mr)
|
||||
subject.load(context, transformed_issue_note)
|
||||
subject.load(context, transformed_system_note)
|
||||
end
|
||||
|
||||
context 'when object body is not changed' do
|
||||
it 'does not save the object' do
|
||||
expect(mr).not_to receive(:save!)
|
||||
expect(mr_note).not_to receive(:save!)
|
||||
expect(system_note).not_to receive(:save!)
|
||||
|
||||
subject.load(context, mr)
|
||||
subject.load(context, mr_note)
|
||||
subject.load(context, system_note)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6,7 +6,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
|
|||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
|
||||
|
||||
let_it_be(:entity) do
|
||||
create(
|
||||
:bulk_import_entity,
|
||||
|
|
@ -19,250 +19,55 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
|
|||
|
||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||
let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
|
||||
let(:mr) do
|
||||
create(
|
||||
:merge_request,
|
||||
source_project: project,
|
||||
description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
|
||||
)
|
||||
end
|
||||
|
||||
let(:issue_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: issue,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
|
||||
)
|
||||
end
|
||||
let_it_be(:issue) { create(:issue, project: project) }
|
||||
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let_it_be(:issue_note) { create(:note, noteable: issue, project: project) }
|
||||
let_it_be(:merge_request_note) { create(:note, noteable: merge_request, project: project) }
|
||||
let_it_be(:system_note) { create(:note, project: project, system: true, noteable: issue) }
|
||||
|
||||
let(:mr_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
|
||||
)
|
||||
end
|
||||
let_it_be(:random_project) { create(:project) }
|
||||
let_it_be(:random_issue) { create(:issue, project: random_project) }
|
||||
let_it_be(:random_merge_request) { create(:merge_request, source_project: random_project) }
|
||||
let_it_be(:random_issue_note) { create(:note, noteable: random_issue, project: random_project) }
|
||||
let_it_be(:random_mr_note) { create(:note, noteable: random_merge_request, project: random_project) }
|
||||
let_it_be(:random_system_note) { create(:note, system: true, noteable: random_issue, project: random_project) }
|
||||
|
||||
let(:interchanged_usernames) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: '@manuelgrabowski-admin, @boaty-mc-boatface'
|
||||
)
|
||||
end
|
||||
|
||||
let(:old_note_html) { 'old note_html' }
|
||||
let(:system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request !#{mr.iid} created by @old_username",
|
||||
note_html: old_note_html
|
||||
)
|
||||
end
|
||||
|
||||
let(:username_system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request created by @source_username.",
|
||||
note_html: 'empty'
|
||||
)
|
||||
end
|
||||
let(:delay) { described_class::DELAY }
|
||||
|
||||
subject(:pipeline) { described_class.new(context) }
|
||||
|
||||
before do
|
||||
project.add_owner(user)
|
||||
describe '#run' do
|
||||
it "enqueues TransformReferencesWorker for the project's issues, mrs and their notes" do
|
||||
expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
|
||||
.with(delay, [issue.id], 'Issue', tracker.id)
|
||||
|
||||
allow(Gitlab::Cache::Import::Caching)
|
||||
.to receive(:values_from_hash)
|
||||
.and_return({
|
||||
'old_username' => 'new_username',
|
||||
'older_username' => 'newer_username',
|
||||
'source_username' => 'destination_username',
|
||||
'bob' => 'alice-gdk',
|
||||
'alice' => 'bob-gdk',
|
||||
'manuelgrabowski' => 'manuelgrabowski-admin',
|
||||
'manuelgrabowski-admin' => 'manuelgrabowski',
|
||||
'boaty-mc-boatface' => 'boatymcboatface',
|
||||
'boatymcboatface' => 'boaty-mc-boatface'
|
||||
})
|
||||
end
|
||||
expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
|
||||
.with(delay * 2, array_including([issue_note.id, system_note.id]), 'Note', tracker.id)
|
||||
|
||||
def create_project_data
|
||||
[issue, mr, issue_note, mr_note, system_note, username_system_note]
|
||||
end
|
||||
expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
|
||||
.with(delay * 3, [merge_request.id], 'MergeRequest', tracker.id)
|
||||
|
||||
def create_username_project_data
|
||||
[username_system_note]
|
||||
end
|
||||
expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
|
||||
.with(delay * 4, [merge_request_note.id], 'Note', tracker.id)
|
||||
|
||||
describe '#extract' do
|
||||
it 'returns ExtractedData containing issues, mrs & their notes' do
|
||||
create_project_data
|
||||
|
||||
extracted_data = subject.extract(context)
|
||||
|
||||
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
|
||||
expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
|
||||
expect(system_note.note_html).not_to eq(old_note_html)
|
||||
expect(system_note.note_html)
|
||||
.to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
|
||||
.and include(project.full_path.to_s)
|
||||
.and include("@old_username")
|
||||
expect(username_system_note.note_html)
|
||||
.to include("@source_username")
|
||||
subject.run
|
||||
end
|
||||
|
||||
context 'when object body is nil' do
|
||||
let(:issue) { create(:issue, project: project, description: nil) }
|
||||
it 'does not enqueue objects that do not belong to the project' do
|
||||
expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
|
||||
.with(anything, [random_issue.id], 'Issue', tracker.id)
|
||||
|
||||
it 'returns ExtractedData not containing the object' do
|
||||
extracted_data = subject.extract(context)
|
||||
expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
|
||||
.with(anything, array_including([random_issue_note.id, random_system_note.id]), 'Note', tracker.id)
|
||||
|
||||
expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
|
||||
end
|
||||
end
|
||||
end
|
||||
expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
|
||||
.with(anything, [random_merge_request.id], 'MergeRequest', tracker.id)
|
||||
|
||||
describe '#transform', :clean_gitlab_redis_cache do
|
||||
it 'updates matching urls and usernames with new ones' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
transformed_username_system_note = subject.transform(context, username_system_note)
|
||||
expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
|
||||
.with(anything, [random_mr_note.id], 'Note', tracker.id)
|
||||
|
||||
expected_url = URI('')
|
||||
expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
expected_url.host = ::Gitlab.config.gitlab.host
|
||||
expected_url.port = ::Gitlab.config.gitlab.port
|
||||
expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
|
||||
|
||||
expect(transformed_issue_note.note).not_to include("@older_username")
|
||||
expect(transformed_mr.description).not_to include("@source_username")
|
||||
expect(transformed_system_note.note).not_to include("@old_username")
|
||||
expect(transformed_username_system_note.note).not_to include("@source_username")
|
||||
|
||||
expect(transformed_issue.description)
|
||||
.to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
|
||||
expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
|
||||
expect(transformed_note.note).to eq("#{expected_url} @same_username")
|
||||
expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
|
||||
expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
|
||||
expect(transformed_username_system_note.note).to include("@destination_username.")
|
||||
end
|
||||
|
||||
it 'handles situations where old usernames are substrings of new usernames' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
|
||||
expect(transformed_mr.description).to include("@alice-gdk")
|
||||
expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
|
||||
end
|
||||
|
||||
it 'handles situations where old and new usernames are interchanged' do
|
||||
# e.g
|
||||
# |------------------------|-------------------------|
|
||||
# | old_username | new_username |
|
||||
# |------------------------|-------------------------|
|
||||
# | @manuelgrabowski-admin | @manuelgrabowski |
|
||||
# | @manuelgrabowski | @manuelgrabowski-admin |
|
||||
# |------------------------|-------------------------|
|
||||
|
||||
transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
|
||||
|
||||
expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
|
||||
expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
|
||||
end
|
||||
|
||||
context 'when object does not have reference or username' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: 'foo')
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq('foo')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are not matched urls or usernames' do
|
||||
let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
|
||||
|
||||
shared_examples 'returns object unchanged' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: description)
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq(description)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
|
||||
context 'when url path does not start with source full path' do
|
||||
let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when host does not match and url path starts with source full path' do
|
||||
let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when url does not match at all' do
|
||||
let(:description) { 'https://website.example/foo/bar' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#load' do
|
||||
it 'saves the object when object body changed' do
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
|
||||
expect(transformed_issue).to receive(:save!)
|
||||
expect(transformed_note).to receive(:save!)
|
||||
expect(transformed_mr).to receive(:save!)
|
||||
expect(transformed_issue_note).to receive(:save!)
|
||||
expect(transformed_system_note).to receive(:save!)
|
||||
|
||||
subject.load(context, transformed_issue)
|
||||
subject.load(context, transformed_note)
|
||||
subject.load(context, transformed_mr)
|
||||
subject.load(context, transformed_issue_note)
|
||||
subject.load(context, transformed_system_note)
|
||||
end
|
||||
|
||||
context 'when object body is not changed' do
|
||||
it 'does not save the object' do
|
||||
expect(mr).not_to receive(:save!)
|
||||
expect(mr_note).not_to receive(:save!)
|
||||
expect(system_note).not_to receive(:save!)
|
||||
|
||||
subject.load(context, mr)
|
||||
subject.load(context, mr_note)
|
||||
subject.load(context, system_note)
|
||||
end
|
||||
subject.run
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Projects::Stage do
|
||||
RSpec.describe BulkImports::Projects::Stage, feature_category: :importers do
|
||||
subject do
|
||||
entity = build(:bulk_import_entity, :project_entity)
|
||||
|
||||
|
|
@ -15,11 +15,30 @@ RSpec.describe BulkImports::Projects::Stage do
|
|||
|
||||
expect(pipelines).to include(
|
||||
hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
|
||||
hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
|
||||
hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline }),
|
||||
hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
|
||||
)
|
||||
expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
|
||||
end
|
||||
|
||||
context 'when bulk_import_async_references_pipeline feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(bulk_import_async_references_pipeline: false)
|
||||
end
|
||||
|
||||
it 'uses the legacy references pipeline' do
|
||||
pipelines = subject.pipelines
|
||||
|
||||
expect(pipelines).to include(
|
||||
hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::LegacyReferencesPipeline })
|
||||
)
|
||||
|
||||
expect(pipelines).not_to include(
|
||||
hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
it 'only have pipelines with valid keys' do
|
||||
pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
|
||||
allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
|
||||
|
|
|
|||
|
|
@ -6,24 +6,24 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
|
|||
subject { described_class }
|
||||
|
||||
describe '.available_for_type?' do
|
||||
it 'returns true for Group' do
|
||||
expect(subject.available_for_type?(Group.all)).to be_truthy
|
||||
it 'returns true for when class implements .supported_keyset_orderings' do
|
||||
model = Class.new(ApplicationRecord) do
|
||||
self.table_name = 'users'
|
||||
|
||||
def self.supported_keyset_orderings
|
||||
{ id: [:desc] }
|
||||
end
|
||||
end
|
||||
|
||||
expect(subject.available_for_type?(model.all)).to eq(true)
|
||||
end
|
||||
|
||||
it 'returns true for Ci::Build' do
|
||||
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
|
||||
end
|
||||
it 'return false when class does not implement .supported_keyset_orderings' do
|
||||
model = Class.new(ApplicationRecord) do
|
||||
self.table_name = 'users'
|
||||
end
|
||||
|
||||
it 'returns true for Packages::BuildInfo' do
|
||||
expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns true for User' do
|
||||
expect(subject.available_for_type?(User.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'return false for other types of relations' do
|
||||
expect(subject.available_for_type?(Issue.all)).to be_falsey
|
||||
expect(subject.available_for_type?(model.all)).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -68,53 +68,54 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
|
|||
describe '.available?' do
|
||||
let(:request_context) { double('request_context', params: { order_by: order_by, sort: sort }) }
|
||||
let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
|
||||
let(:model) do
|
||||
Class.new(ApplicationRecord) do
|
||||
self.table_name = 'users'
|
||||
|
||||
context 'with order-by name asc' do
|
||||
let(:order_by) { :name }
|
||||
let(:sort) { :asc }
|
||||
|
||||
it 'returns true for Group' do
|
||||
expect(subject.available?(cursor_based_request_context, Group.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'return false for other types of relations' do
|
||||
expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
|
||||
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
|
||||
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
|
||||
def self.supported_keyset_orderings
|
||||
{ id: [:desc] }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with order-by id desc' do
|
||||
context 'when param order is supported by the model' do
|
||||
let(:order_by) { :id }
|
||||
let(:sort) { :desc }
|
||||
|
||||
it 'returns true for Ci::Build' do
|
||||
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns true for AuditEvent' do
|
||||
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns true for Packages::BuildInfo' do
|
||||
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns true for User' do
|
||||
expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
|
||||
it 'returns true' do
|
||||
expect(subject.available?(cursor_based_request_context, model.all)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with other order-by columns' do
|
||||
let(:order_by) { :path }
|
||||
context 'when sort param is not supported by the model' do
|
||||
let(:order_by) { :id }
|
||||
let(:sort) { :asc }
|
||||
|
||||
it 'returns false for Group' do
|
||||
expect(subject.available?(cursor_based_request_context, Group.all)).to be_falsey
|
||||
it 'returns false' do
|
||||
expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when order_by params is not supported by the model' do
|
||||
let(:order_by) { :name }
|
||||
let(:sort) { :desc }
|
||||
|
||||
it 'returns false' do
|
||||
expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when model does not implement .supported_keyset_orderings' do
|
||||
let(:order_by) { :id }
|
||||
let(:sort) { :desc }
|
||||
let(:model) do
|
||||
Class.new(ApplicationRecord) do
|
||||
self.table_name = 'users'
|
||||
end
|
||||
end
|
||||
|
||||
it 'return false for other types of relations' do
|
||||
expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
|
||||
it 'returns false' do
|
||||
expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,19 +3,38 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Pages::LookupPath, feature_category: :pages do
|
||||
let(:project) { create(:project, :pages_private, pages_https_only: true) }
|
||||
let(:trim_prefix) { nil }
|
||||
let(:domain) { nil }
|
||||
let(:path_prefix) { nil }
|
||||
let(:file_store) { ::ObjectStorage::Store::REMOTE }
|
||||
let(:group) { build(:group, path: 'mygroup') }
|
||||
let(:deployment) do
|
||||
build(
|
||||
:pages_deployment,
|
||||
id: 1,
|
||||
project: project,
|
||||
path_prefix: path_prefix,
|
||||
file_store: file_store)
|
||||
end
|
||||
|
||||
subject(:lookup_path) { described_class.new(project, trim_prefix: trim_prefix, domain: domain) }
|
||||
let(:project) do
|
||||
build(
|
||||
:project,
|
||||
:pages_private,
|
||||
group: group,
|
||||
path: 'myproject',
|
||||
pages_https_only: true)
|
||||
end
|
||||
|
||||
subject(:lookup_path) { described_class.new(deployment: deployment, trim_prefix: trim_prefix) }
|
||||
|
||||
before do
|
||||
stub_pages_setting(
|
||||
enabled: true,
|
||||
access_control: true,
|
||||
external_https: ["1.1.1.1:443"],
|
||||
url: 'http://example.com',
|
||||
protocol: 'http'
|
||||
)
|
||||
protocol: 'http')
|
||||
|
||||
stub_pages_object_storage(::Pages::DeploymentUploader)
|
||||
end
|
||||
|
||||
|
|
@ -32,7 +51,11 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
end
|
||||
|
||||
describe '#https_only' do
|
||||
subject(:lookup_path) { described_class.new(deployment: deployment, domain: domain) }
|
||||
|
||||
context 'when no domain provided' do
|
||||
let(:domain) { nil }
|
||||
|
||||
it 'delegates to Project#pages_https_only?' do
|
||||
expect(lookup_path.https_only).to eq(true)
|
||||
end
|
||||
|
|
@ -48,66 +71,55 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
end
|
||||
|
||||
describe '#source' do
|
||||
let(:source) { lookup_path.source }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(source).to eq(nil)
|
||||
it 'uses deployment from object storage', :freeze_time do
|
||||
expect(lookup_path.source).to eq(
|
||||
type: 'zip',
|
||||
path: deployment.file.url(expire_at: 1.day.from_now),
|
||||
global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
|
||||
sha256: deployment.file_sha256,
|
||||
file_size: deployment.size,
|
||||
file_count: deployment.file_count
|
||||
)
|
||||
end
|
||||
|
||||
context 'when there is pages deployment' do
|
||||
let!(:deployment) { create(:pages_deployment, project: project) }
|
||||
it 'does not recreate source hash' do
|
||||
expect(deployment.file).to receive(:url_or_file_path).once
|
||||
|
||||
it 'uses deployment from object storage' do
|
||||
freeze_time do
|
||||
expect(source).to eq(
|
||||
type: 'zip',
|
||||
path: deployment.file.url(expire_at: 1.day.from_now),
|
||||
global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
|
||||
sha256: deployment.file_sha256,
|
||||
file_size: deployment.size,
|
||||
file_count: deployment.file_count
|
||||
)
|
||||
end
|
||||
end
|
||||
2.times { lookup_path.source }
|
||||
end
|
||||
|
||||
context 'when deployment is in the local storage' do
|
||||
before do
|
||||
deployment.file.migrate!(::ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
context 'when deployment is in the local storage' do
|
||||
let(:file_store) { ::ObjectStorage::Store::LOCAL }
|
||||
|
||||
it 'uses file protocol' do
|
||||
freeze_time do
|
||||
expect(source).to eq(
|
||||
type: 'zip',
|
||||
path: "file://#{deployment.file.path}",
|
||||
global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
|
||||
sha256: deployment.file_sha256,
|
||||
file_size: deployment.size,
|
||||
file_count: deployment.file_count
|
||||
)
|
||||
end
|
||||
end
|
||||
it 'uses file protocol', :freeze_time do
|
||||
expect(lookup_path.source).to eq(
|
||||
type: 'zip',
|
||||
path: "file://#{deployment.file.path}",
|
||||
global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
|
||||
sha256: deployment.file_sha256,
|
||||
file_size: deployment.size,
|
||||
file_count: deployment.file_count
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#prefix' do
|
||||
let(:trim_prefix) { 'mygroup' }
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
context 'when pages group root projects' do
|
||||
let(:project) { instance_double(Project, full_path: "namespace/namespace.example.com") }
|
||||
|
||||
it 'returns "/"' do
|
||||
expect(lookup_path.prefix).to eq('/')
|
||||
end
|
||||
where(:full_path, :trim_prefix, :path_prefix, :result) do
|
||||
'mygroup/myproject' | nil | nil | '/'
|
||||
'mygroup/myproject' | 'mygroup' | nil | '/myproject/'
|
||||
'mygroup/myproject' | nil | 'PREFIX' | '/PREFIX/'
|
||||
'mygroup/myproject' | 'mygroup' | 'PREFIX' | '/myproject/PREFIX/'
|
||||
end
|
||||
|
||||
context 'when pages in the given prefix' do
|
||||
let(:project) { instance_double(Project, full_path: 'mygroup/myproject') }
|
||||
|
||||
it 'returns the project full path with the provided prefix removed' do
|
||||
expect(lookup_path.prefix).to eq('/myproject/')
|
||||
with_them do
|
||||
before do
|
||||
allow(project).to receive(:full_path).and_return(full_path)
|
||||
end
|
||||
|
||||
it { expect(lookup_path.prefix).to eq(result) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -129,26 +141,12 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
|
|||
|
||||
expect(lookup_path.unique_host).to eq('unique-domain.example.com')
|
||||
end
|
||||
|
||||
context 'when there is domain provided' do
|
||||
let(:domain) { instance_double(PagesDomain) }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(lookup_path.unique_host).to eq(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#root_directory' do
|
||||
context 'when there is no deployment' do
|
||||
it 'returns nil' do
|
||||
expect(lookup_path.root_directory).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a deployment' do
|
||||
let!(:deployment) { create(:pages_deployment, project: project, root_directory: 'foo') }
|
||||
let(:deployment) { build_stubbed(:pages_deployment, project: project, root_directory: 'foo') }
|
||||
|
||||
it 'returns the deployment\'s root_directory' do
|
||||
expect(lookup_path.root_directory).to eq('foo')
|
||||
|
|
|
|||
|
|
@ -3,9 +3,30 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Pages::VirtualDomain, feature_category: :pages do
|
||||
let(:domain) { nil }
|
||||
let(:trim_prefix) { nil }
|
||||
|
||||
let_it_be(:group) { create(:group, path: 'mygroup') }
|
||||
let_it_be(:project_a) { create(:project, group: group) }
|
||||
let_it_be(:project_a_main_deployment) { create(:pages_deployment, project: project_a, path_prefix: nil) }
|
||||
let_it_be(:project_a_versioned_deployment) { create(:pages_deployment, project: project_a, path_prefix: 'v1') }
|
||||
let_it_be(:project_b) { create(:project, group: group) }
|
||||
let_it_be(:project_b_main_deployment) { create(:pages_deployment, project: project_b, path_prefix: nil) }
|
||||
let_it_be(:project_b_versioned_deployment) { create(:pages_deployment, project: project_b, path_prefix: 'v1') }
|
||||
let_it_be(:project_c) { create(:project, group: group) }
|
||||
let_it_be(:project_c_main_deployment) { create(:pages_deployment, project: project_c, path_prefix: nil) }
|
||||
let_it_be(:project_c_versioned_deployment) { create(:pages_deployment, project: project_c, path_prefix: 'v1') }
|
||||
|
||||
before_all do
|
||||
# Those deployments are created to ensure that deactivated deployments won't be returned on the queries
|
||||
deleted_at = 1.hour.ago
|
||||
create(:pages_deployment, project: project_a, path_prefix: 'v2', deleted_at: deleted_at)
|
||||
create(:pages_deployment, project: project_b, path_prefix: 'v2', deleted_at: deleted_at)
|
||||
create(:pages_deployment, project: project_c, path_prefix: 'v2', deleted_at: deleted_at)
|
||||
end
|
||||
|
||||
describe '#certificate and #key pair' do
|
||||
let(:domain) { nil }
|
||||
let(:project) { instance_double(Project) }
|
||||
let(:project) { project_a }
|
||||
|
||||
subject(:virtual_domain) { described_class.new(projects: [project], domain: domain) }
|
||||
|
||||
|
|
@ -25,51 +46,52 @@ RSpec.describe Pages::VirtualDomain, feature_category: :pages do
|
|||
end
|
||||
|
||||
describe '#lookup_paths' do
|
||||
let(:domain) { nil }
|
||||
let(:trim_prefix) { nil }
|
||||
let(:project_a) { instance_double(Project) }
|
||||
let(:project_b) { instance_double(Project) }
|
||||
let(:project_c) { instance_double(Project) }
|
||||
let(:pages_lookup_path_a) { instance_double(Pages::LookupPath, prefix: 'aaa', source: { type: 'zip', path: 'https://example.com' }) }
|
||||
let(:pages_lookup_path_b) { instance_double(Pages::LookupPath, prefix: 'bbb', source: { type: 'zip', path: 'https://example.com' }) }
|
||||
let(:pages_lookup_path_without_source) { instance_double(Pages::LookupPath, prefix: 'ccc', source: nil) }
|
||||
let(:project_list) { [project_a, project_b, project_c] }
|
||||
|
||||
subject(:virtual_domain) do
|
||||
described_class.new(projects: project_list, domain: domain, trim_prefix: trim_prefix)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Pages::LookupPath)
|
||||
.to receive(:new)
|
||||
.with(project_a, domain: domain, trim_prefix: trim_prefix)
|
||||
.and_return(pages_lookup_path_a)
|
||||
context 'when pages multiple versions is disabled' do
|
||||
before do
|
||||
allow(::Gitlab::Pages)
|
||||
.to receive(:multiple_versions_enabled_for?)
|
||||
.and_return(false)
|
||||
end
|
||||
|
||||
allow(Pages::LookupPath)
|
||||
.to receive(:new)
|
||||
.with(project_b, domain: domain, trim_prefix: trim_prefix)
|
||||
.and_return(pages_lookup_path_b)
|
||||
it 'returns only the main deployments for each project' do
|
||||
global_ids = virtual_domain.lookup_paths.map do |lookup_path|
|
||||
lookup_path.source[:global_id]
|
||||
end
|
||||
|
||||
allow(Pages::LookupPath)
|
||||
.to receive(:new)
|
||||
.with(project_c, domain: domain, trim_prefix: trim_prefix)
|
||||
.and_return(pages_lookup_path_without_source)
|
||||
end
|
||||
|
||||
context 'when there is pages domain provided' do
|
||||
let(:domain) { instance_double(PagesDomain) }
|
||||
let(:project_list) { [project_a, project_b, project_c] }
|
||||
|
||||
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
|
||||
expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
|
||||
expect(global_ids).to match_array([
|
||||
project_a_main_deployment.to_gid.to_s,
|
||||
project_b_main_deployment.to_gid.to_s,
|
||||
project_c_main_deployment.to_gid.to_s
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is trim_prefix provided' do
|
||||
let(:trim_prefix) { 'group/' }
|
||||
let(:project_list) { [project_a, project_b] }
|
||||
context 'when pages multiple versions is enabled' do
|
||||
before do
|
||||
allow(::Gitlab::Pages)
|
||||
.to receive(:multiple_versions_enabled_for?)
|
||||
.and_return(true)
|
||||
end
|
||||
|
||||
it 'returns collection of projects pages lookup paths sorted by prefix in reverse' do
|
||||
expect(virtual_domain.lookup_paths).to eq([pages_lookup_path_b, pages_lookup_path_a])
|
||||
global_ids = virtual_domain.lookup_paths.map do |lookup_path|
|
||||
lookup_path.source[:global_id]
|
||||
end
|
||||
|
||||
expect(global_ids).to match_array([
|
||||
project_a_main_deployment.to_gid.to_s,
|
||||
project_a_versioned_deployment.to_gid.to_s,
|
||||
project_b_main_deployment.to_gid.to_s,
|
||||
project_b_versioned_deployment.to_gid.to_s,
|
||||
project_c_main_deployment.to_gid.to_s,
|
||||
project_c_versioned_deployment.to_gid.to_s
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -102,6 +102,12 @@ RSpec.describe ::MergeRequests::Mergeability::DetailedMergeStatusService, featur
|
|||
end
|
||||
end
|
||||
|
||||
context 'when the pipeline is pending' do
|
||||
let(:ci_status) { :pending }
|
||||
|
||||
it { expect(detailed_merge_status).to eq(:ci_still_running) }
|
||||
end
|
||||
|
||||
context 'when the pipeline is not running' do
|
||||
let(:ci_status) { :failed }
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ module PartitioningTesting
|
|||
end
|
||||
end
|
||||
|
||||
Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass|
|
||||
Ci::Partitionable::Testing.partitionable_models.each do |klass|
|
||||
next if klass == 'Ci::Pipeline'
|
||||
|
||||
model = klass.safe_constantize
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ module Ci
|
|||
end
|
||||
|
||||
def each_partitionable_table
|
||||
::Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass|
|
||||
::Ci::Partitionable::Testing.partitionable_models.each do |klass|
|
||||
model = klass.safe_constantize
|
||||
table_name = model.table_name.delete_prefix('p_')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'yaml'
|
||||
require 'rspec/core/formatters/base_formatter'
|
||||
|
||||
module Support
|
||||
module RspecOrder
|
||||
|
|
@ -28,7 +29,7 @@ module Support
|
|||
end
|
||||
|
||||
def potential_order_dependent?(path)
|
||||
@todo ||= YAML.load_file(TODO_YAML).to_set # rubocop:disable Gitlab/PredicateMemoization
|
||||
@todo ||= YAML.load_file(TODO_YAML).to_set # rubocop:disable Gitlab/PredicateMemoization -- @todo is never `nil` or `false`.
|
||||
|
||||
@todo.include?(path)
|
||||
end
|
||||
|
|
@ -38,23 +39,31 @@ module Support
|
|||
#
|
||||
# Previously, we've modified metadata[:description] directly but that led
|
||||
# to bugs. See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96137
|
||||
module DocumentationFormatterPatch
|
||||
class RSpecFormatter < RSpec::Core::Formatters::BaseFormatter
|
||||
RSpec::Core::Formatters.register self, :example_group_started
|
||||
|
||||
# See https://github.com/rspec/rspec-core/blob/v3.11.0/lib/rspec/core/formatters/documentation_formatter.rb#L24-L29
|
||||
def example_group_started(notification)
|
||||
super
|
||||
|
||||
order = notification.group.metadata[:order]
|
||||
return unless order
|
||||
|
||||
output.puts "#{current_indentation}# order #{order}"
|
||||
output.puts " # order #{order}" if order
|
||||
end
|
||||
|
||||
# Print order information only with `--format documentation`.
|
||||
def self.add_formatter_to(config)
|
||||
documentation_formatter = config.formatters
|
||||
.find { |formatter| formatter.is_a?(RSpec::Core::Formatters::DocumentationFormatter) }
|
||||
return unless documentation_formatter
|
||||
|
||||
config.add_formatter self, documentation_formatter.output
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec::Core::Formatters::DocumentationFormatter.prepend Support::RspecOrder::DocumentationFormatterPatch
|
||||
|
||||
RSpec.configure do |config|
|
||||
Support::RspecOrder::RSpecFormatter.add_formatter_to(config)
|
||||
|
||||
# Useful to find order-dependent specs.
|
||||
config.register_ordering(:reverse, &:reverse)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,257 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::TransformReferencesWorker, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
project = create(:project)
|
||||
project.add_owner(user)
|
||||
project
|
||||
end
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:bulk_import) { create(:bulk_import) }
|
||||
|
||||
let_it_be(:entity) do
|
||||
create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import,
|
||||
source_full_path: 'source/full/path')
|
||||
end
|
||||
|
||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
|
||||
|
||||
let_it_be_with_refind(:issue) do
|
||||
create(:issue,
|
||||
project: project,
|
||||
description: 'https://my.gitlab.com/source/full/path/-/issues/1')
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) do
|
||||
create(:merge_request,
|
||||
source_project: project,
|
||||
description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!')
|
||||
end
|
||||
|
||||
let_it_be(:issue_note) do
|
||||
create(:note,
|
||||
noteable: issue,
|
||||
project: project,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.')
|
||||
end
|
||||
|
||||
let_it_be(:merge_request_note) do
|
||||
create(:note,
|
||||
noteable: merge_request,
|
||||
project: project,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username')
|
||||
end
|
||||
|
||||
let_it_be(:system_note) do
|
||||
create(:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request !#{merge_request.iid} created by @old_username",
|
||||
note_html: 'note html'
|
||||
)
|
||||
end
|
||||
|
||||
let(:expected_url) do
|
||||
expected_url = URI('')
|
||||
expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
expected_url.host = ::Gitlab.config.gitlab.host
|
||||
expected_url.port = ::Gitlab.config.gitlab.port
|
||||
expected_url.path = "/#{project.full_path}"
|
||||
expected_url
|
||||
end
|
||||
|
||||
subject { described_class.new.perform([object.id], object.class.to_s, tracker.id) }
|
||||
|
||||
before do
|
||||
allow(Gitlab::Cache::Import::Caching)
|
||||
.to receive(:values_from_hash)
|
||||
.and_return({
|
||||
'old_username' => 'new_username',
|
||||
'older_username' => 'newer_username',
|
||||
'source_username' => 'destination_username',
|
||||
'bob' => 'alice-gdk',
|
||||
'alice' => 'bob-gdk',
|
||||
'manuelgrabowski' => 'manuelgrabowski-admin',
|
||||
'manuelgrabowski-admin' => 'manuelgrabowski',
|
||||
'boaty-mc-boatface' => 'boatymcboatface',
|
||||
'boatymcboatface' => 'boaty-mc-boatface'
|
||||
})
|
||||
end
|
||||
|
||||
it_behaves_like 'an idempotent worker' do
|
||||
let(:job_args) { [[issue.id], 'Issue', tracker.id] }
|
||||
end
|
||||
|
||||
it 'transforms and saves multiple objects' do
|
||||
old_note = merge_request_note.note
|
||||
merge_request_note_2 = create(:note, noteable: merge_request, project: project, note: old_note)
|
||||
|
||||
described_class.new.perform([merge_request_note.id, merge_request_note_2.id], 'Note', tracker.id)
|
||||
|
||||
expect(merge_request_note.reload.note).not_to eq(old_note)
|
||||
expect(merge_request_note_2.reload.note).not_to eq(old_note)
|
||||
end
|
||||
|
||||
shared_examples 'transforms and saves references' do
|
||||
it 'transforms references and saves the object' do
|
||||
expect_any_instance_of(object.class) do |object|
|
||||
expect(object).to receive(:save!)
|
||||
end
|
||||
|
||||
expect { subject }.not_to change { object.updated_at }
|
||||
|
||||
expect(body).to eq(expected_body)
|
||||
end
|
||||
|
||||
context 'when an error is raised' do
|
||||
before do
|
||||
allow(BulkImports::UsersMapper).to receive(:new).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'tracks the error and creates an import failure' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception)
|
||||
.with(anything, hash_including(bulk_import_id: bulk_import.id))
|
||||
|
||||
expect(BulkImports::Failure).to receive(:create)
|
||||
.with(hash_including(bulk_import_entity_id: entity.id, pipeline_class: 'ReferencesPipeline'))
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for issue description' do
|
||||
let(:object) { issue }
|
||||
let(:body) { object.reload.description }
|
||||
let(:expected_body) { "http://localhost:80/#{object.namespace.full_path}/-/issues/1" }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
|
||||
shared_examples 'returns object unchanged' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: description)
|
||||
|
||||
subject
|
||||
|
||||
expect(issue.reload.description).to eq(description)
|
||||
end
|
||||
|
||||
it 'does not save the object' do
|
||||
expect_any_instance_of(object.class) do |object|
|
||||
expect(object).to receive(:save!)
|
||||
end
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when object does not have reference or username' do
|
||||
let(:description) { 'foo' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when there are no matched urls or usernames' do
|
||||
let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when url path does not start with source full path' do
|
||||
let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when host does not match and url path starts with source full path' do
|
||||
let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when url does not match at all' do
|
||||
let(:description) { 'https://website.example/foo/bar' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
end
|
||||
|
||||
context 'for merge request description' do
|
||||
let(:object) { merge_request }
|
||||
let(:body) { object.reload.description }
|
||||
let(:expected_body) do
|
||||
"#{expected_url}/-/merge_requests/#{merge_request.iid} @destination_username? @alice-gdk, @bob-gdk!"
|
||||
end
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
end
|
||||
|
||||
context 'for issue notes' do
|
||||
let(:object) { issue_note }
|
||||
let(:body) { object.reload.note }
|
||||
let(:expected_body) { "#{expected_url}/-/issues/#{issue.iid} @newer_username, not_a@username, and @new_username." }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
end
|
||||
|
||||
context 'for merge request notes' do
|
||||
let(:object) { merge_request_note }
|
||||
let(:body) { object.reload.note }
|
||||
let(:expected_body) { "#{expected_url}/-/merge_requests/#{merge_request.iid} @same_username" }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
end
|
||||
|
||||
context 'for system notes' do
|
||||
let(:object) { system_note }
|
||||
let(:body) { object.reload.note }
|
||||
let(:expected_body) { "mentioned in merge request !#{merge_request.iid} created by @new_username" }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
|
||||
context 'when the note includes a username' do
|
||||
let_it_be(:object) do
|
||||
create(:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: 'mentioned in merge request created by @source_username.',
|
||||
note_html: 'empty'
|
||||
)
|
||||
end
|
||||
|
||||
let(:body) { object.reload.note }
|
||||
let(:expected_body) { 'mentioned in merge request created by @destination_username.' }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when old and new usernames are interchanged' do
|
||||
# e.g
|
||||
# |------------------------|-------------------------|
|
||||
# | old_username | new_username |
|
||||
# |------------------------|-------------------------|
|
||||
# | @manuelgrabowski-admin | @manuelgrabowski |
|
||||
# | @manuelgrabowski | @manuelgrabowski-admin |
|
||||
# |------------------------|-------------------------|
|
||||
|
||||
let_it_be(:object) do
|
||||
create(:note,
|
||||
project: project,
|
||||
noteable: merge_request,
|
||||
note: '@manuelgrabowski-admin, @boaty-mc-boatface'
|
||||
)
|
||||
end
|
||||
|
||||
let(:body) { object.reload.note }
|
||||
let(:expected_body) { '@manuelgrabowski, @boatymcboatface' }
|
||||
|
||||
include_examples 'transforms and saves references'
|
||||
end
|
||||
end
|
||||
|
|
@ -144,6 +144,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
|
|||
'BulkImports::PipelineWorker' => 3,
|
||||
'BulkImports::PipelineBatchWorker' => 3,
|
||||
'BulkImports::FinishProjectImportWorker' => 5,
|
||||
'BulkImports::TransformReferencesWorker' => 3,
|
||||
'Chaos::CpuSpinWorker' => 3,
|
||||
'Chaos::DbSpinWorker' => 3,
|
||||
'Chaos::KillWorker' => false,
|
||||
|
|
|
|||
Loading…
Reference in New Issue