Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-10-05 21:11:33 +00:00
parent c20e6edd8a
commit 91145d427b
107 changed files with 844 additions and 739 deletions

View File

@ -2761,7 +2761,6 @@ RSpec/MissingFeatureCategory:
- 'spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb'

View File

@ -54,7 +54,7 @@ export default {
</script>
<template>
<gl-disclosure-dropdown :items="listItems" dropup data-qa-selector="review_preview_dropdown">
<gl-disclosure-dropdown :items="listItems" dropup data-testid="review-preview-dropdown">
<template #toggle>
<gl-button>
{{ __('Pending comments') }}

View File

@ -40,7 +40,7 @@ export default {
<nav class="review-bar-component js-review-bar" data-testid="review_bar_component">
<div
class="review-bar-content d-flex gl-justify-content-end"
data-qa-selector="review_bar_content"
data-testid="review-bar-content"
>
<preview-dropdown />
<submit-dropdown />

View File

@ -127,7 +127,7 @@ export default {
dropup
class="submit-review-dropdown"
:class="{ 'submit-review-dropdown-animated': shouldAnimateReviewButton }"
data-qa-selector="submit_review_dropdown"
data-testid="submit-review-dropdown"
variant="info"
category="primary"
>
@ -192,7 +192,6 @@ export default {
type="submit"
class="js-no-auto-disable"
data-testid="submit-review-button"
data-qa-selector="submit_review_button"
>
{{ __('Submit review') }}
</gl-button>

View File

@ -26,7 +26,7 @@ export default {
<template>
<gl-dropdown
:text="selectedVersionName"
data-qa-selector="dropdown_content"
data-testid="version-dropdown-content"
size="small"
category="tertiary"
>

View File

@ -90,7 +90,7 @@ export default {
variant="default"
icon="file-tree"
class="gl-mr-3 js-toggle-tree-list btn-icon"
data-qa-selector="file_tree_button"
data-testid="file-tree-button"
:title="toggleFileBrowserTitle"
:aria-label="toggleFileBrowserTitle"
:selected="showTreeList"
@ -141,7 +141,7 @@ export default {
<compare-dropdown-layout
:versions="diffCompareDropdownTargetVersions"
class="mr-version-compare-dropdown"
data-qa-selector="target_version_dropdown"
data-testid="target-version-dropdown"
/>
</template>
<template #source>

View File

@ -302,7 +302,7 @@ export default {
'is-sidebar-moved': glFeatures.movedMrSidebar,
}"
class="js-file-title file-title file-title-flex-parent gl-border"
data-qa-selector="file_title_container"
data-testid="file-title-container"
:data-qa-file-name="filePath"
@click.self="handleToggleFile"
>
@ -432,7 +432,7 @@ export default {
right
toggle-class="btn-icon js-diff-more-actions"
class="gl-pt-0!"
data-qa-selector="dropdown_button"
data-testid="options-dropdown-button"
lazy
@show="setMoreActionsShown(true)"
@hidden="setMoreActionsShown(false)"
@ -459,7 +459,7 @@ export default {
ref="ideEditButton"
:href="diffFile.ide_edit_path"
class="js-ide-edit-blob"
data-qa-selector="edit_in_ide_button"
data-testid="edit-in-ide-button"
target="_blank"
>
{{ __('Open in Web IDE') }}

View File

@ -248,7 +248,6 @@ export default {
:class="$options.classNameMapCellLeft(props)"
data-testid="left-line-number"
class="diff-td diff-line-num"
data-qa-selector="new_diff_line_link"
>
<span
v-if="
@ -266,7 +265,6 @@ export default {
:draggable="!props.line.left.commentsDisabled"
type="button"
class="add-diff-note unified-diff-components-diff-note-button note-button js-add-diff-note-button"
data-qa-selector="diff_comment_button"
:disabled="props.line.left.commentsDisabled"
:aria-disabled="props.line.left.commentsDisabled"
@click="

View File

@ -164,11 +164,7 @@ export default {
</script>
<template>
<div
ref="wrapper"
class="tree-list-holder d-flex flex-column"
data-qa-selector="file_tree_container"
>
<div ref="wrapper" class="tree-list-holder d-flex flex-column" data-testid="file-tree-container">
<div class="gl-pb-3 position-relative tree-list-search d-flex">
<div class="flex-fill d-flex">
<gl-icon name="search" class="gl-absolute gl-top-3 gl-left-3 tree-list-icon" />
@ -181,7 +177,6 @@ export default {
name="diff-tree-search"
class="form-control"
data-testid="diff-tree-search"
data-qa-selector="diff_tree_search"
/>
<button
v-show="search"

View File

@ -73,7 +73,7 @@ export default {
:disabled="disableSubmitButton"
:phrase="confirmationPhrase"
:button-text="confirmButtonText"
button-qa-selector="transfer_group_button"
button-testid="transfer-group-button"
@confirm="$emit('confirm')"
/>
</div>

View File

@ -20,8 +20,7 @@ export default {
name: 'MembersFilteredSearchBar',
components: { FilteredSearchBar },
availableTokens: AVAILABLE_FILTERED_SEARCH_TOKENS,
searchButtonAttributes: { 'data-qa-selector': 'search_button' },
searchInputAttributes: { 'data-qa-selector': 'search_bar_input' },
searchButtonAttributes: { 'data-testid': 'search-button' },
inject: {
namespace: {},
sourceId: {},

View File

@ -406,7 +406,7 @@ export default {
category="primary"
variant="confirm"
class="gl-sm-mr-3 gl-mb-3"
data-qa-selector="start_review_button"
data-testid="start-review-button"
@click="handleAddToReview"
>
<template v-if="hasDrafts">{{ __('Add to review') }}</template>
@ -416,7 +416,7 @@ export default {
:disabled="isDisabled"
category="secondary"
variant="confirm"
data-qa-selector="comment_now_button"
data-testid="comment-now-button"
class="gl-sm-mr-3 gl-mb-3 js-comment-button"
@click="handleUpdate()"
>

View File

@ -184,7 +184,7 @@ export default {
this.currentPath ? encodeURIComponent(this.currentPath) : '',
),
extraAttrs: {
'data-qa-selector': 'new_file_menu_item',
'data-testid': 'new-file-menu-item',
},
},
{
@ -284,7 +284,6 @@ export default {
:toggle-text="__('Add to tree')"
toggle-class="add-to-tree gl-ml-2"
data-testid="add-to-tree"
data-qa-selector="add_to_tree_dropdown"
text-sr-only
icon="plus"
:items="dropdownItems"

View File

@ -53,7 +53,7 @@ export default {
</script>
<template>
<div class="well-segment commit gl-min-h-8 gl-p-5 gl-w-full gl-display-flex">
<div class="well-segment commit gl-min-h-8 gl-p-2 gl-w-full gl-display-flex">
<user-avatar-link
v-if="commit.author"
:link-href="commit.author.webPath"

View File

@ -291,7 +291,7 @@ export default {
>
<div v-if="sourceName">
{{ $options.i18n.forkedFrom }}
<gl-link data-qa-selector="forked_from_link" :href="sourcePath">{{ sourceName }}</gl-link>
<gl-link data-testid="forked-from-link" :href="sourcePath">{{ sourceName }}</gl-link>
<gl-skeleton-loader v-if="isLoading" :lines="1" />
<div v-else class="gl-text-secondary" data-testid="divergence-message">
<gl-sprintf :message="forkDivergenceMessage">

View File

@ -67,7 +67,7 @@ export default {
</gl-link>
</div>
</div>
<div class="blob-viewer" data-qa-selector="blob_viewer_content" itemprop="about">
<div class="blob-viewer" data-testid="blob-viewer-content" itemprop="about">
<gl-loading-icon v-if="isLoading" size="lg" color="dark" class="my-4 mx-auto" />
<div
v-else-if="readme"

View File

@ -206,16 +206,11 @@ export default {
<div class="media-body">
<div
data-testid="pipeline-info-container"
data-qa-selector="merge_request_pipeline_info_content"
class="gl-display-flex gl-flex-wrap gl-align-items-center gl-justify-content-space-between"
>
<p class="mr-pipeline-title gl-m-0! gl-mr-3! gl-font-weight-bold gl-text-gray-900">
{{ pipeline.details.event_type_name }}
<gl-link
:href="pipeline.path"
class="pipeline-id"
data-testid="pipeline-id"
data-qa-selector="pipeline_link"
<gl-link :href="pipeline.path" class="pipeline-id" data-testid="pipeline-id"
>#{{ pipeline.id }}</gl-link
>
{{ pipeline.details.status.label }}

View File

@ -51,7 +51,6 @@ export default {
text: s__('mrWidget|Refresh now'),
onClick: () => this.refresh(),
testId: 'merge-request-failed-refresh-button',
dataQaSelector: 'merge_request_error_content',
},
];
},

View File

@ -0,0 +1,51 @@
<script>
import { GlTooltipDirective } from '@gitlab/ui';
import SafeHtml from '~/vue_shared/directives/safe_html';
import CommitInfo from '~/repository/components/commit_info.vue';
import { calculateBlameOffset, toggleBlameClasses } from '../utils';
export default {
name: 'BlameInfo',
components: {
CommitInfo,
},
directives: {
GlTooltip: GlTooltipDirective,
SafeHtml,
},
props: {
blameData: {
type: Array,
required: true,
},
},
computed: {
blameInfo() {
return this.blameData.map((blame, index) => ({
...blame,
blameOffset: calculateBlameOffset(blame.lineno, index),
}));
},
},
mounted() {
toggleBlameClasses(this.blameData, true);
},
destroyed() {
toggleBlameClasses(this.blameData, false);
},
};
</script>
<template>
<div class="blame gl-bg-gray-10">
<div class="blame-commit gl-border-none!">
<commit-info
v-for="(blame, index) in blameInfo"
:key="index"
:class="{ 'gl-border-t': index !== 0 }"
class="gl-display-flex gl-absolute gl-px-3"
:style="{ top: blame.blameOffset }"
:commit="blame.commit"
/>
</div>
</div>
</template>

View File

@ -0,0 +1,37 @@
const BLAME_INFO_CLASSLIST = ['gl-border-t', 'gl-border-gray-500', 'gl-pt-3!'];
const PADDING_BOTTOM_LARGE = 'gl-pb-6!';
const PADDING_BOTTOM_SMALL = 'gl-pb-3!';
const findLineNumberElement = (lineNumber) => document.getElementById(`L${lineNumber}`);
const findLineContentElement = (lineNumber) => document.getElementById(`LC${lineNumber}`);
export const calculateBlameOffset = (lineNumber) => {
if (lineNumber === 1) return '0px';
const lineContentOffset = findLineContentElement(lineNumber)?.offsetTop;
return `${lineContentOffset}px`;
};
export const toggleBlameClasses = (blameData, isVisible) => {
/**
* Adds/removes classes to line number/content elements to match the line with the blame info
* */
const method = isVisible ? 'add' : 'remove';
blameData.forEach(({ lineno, span }) => {
const lineNumberEl = findLineNumberElement(lineno)?.parentElement;
const lineContentEl = findLineContentElement(lineno);
const lineNumberSpanEl = findLineNumberElement(lineno + span - 1)?.parentElement;
const lineContentSpanEl = findLineContentElement(lineno + span - 1);
lineNumberEl?.classList[method](...BLAME_INFO_CLASSLIST);
lineContentEl?.classList[method](...BLAME_INFO_CLASSLIST);
if (span === 1) {
lineNumberSpanEl?.classList[method](PADDING_BOTTOM_LARGE);
lineContentSpanEl?.classList[method](PADDING_BOTTOM_LARGE);
} else {
lineNumberSpanEl?.classList[method](PADDING_BOTTOM_SMALL);
lineContentSpanEl?.classList[method](PADDING_BOTTOM_SMALL);
}
});
};

View File

@ -1,53 +0,0 @@
# frozen_string_literal: true
module Projects
module HashedStorage
class MigrateRepositoryService < BaseRepositoryService
def execute
try_to_set_repository_read_only!
@old_storage_version = project.storage_version
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
@new_disk_path = project.disk_path
result = move_repositories
if result
project.set_full_path
project.track_project_repository
else
rollback_folder_move
project.storage_version = nil
end
project.transaction do
project.save!(validate: false)
project.set_repository_writable!
end
result
rescue Gitlab::Git::CommandError => e
logger.error("Repository #{project.full_path} failed to upgrade (PROJECT_ID=#{project.id}). Git operation failed: #{e.inspect}")
rollback_migration!
false
rescue OpenSSL::Cipher::CipherError => e
logger.error("Repository #{project.full_path} failed to upgrade (PROJECT_ID=#{project.id}). There is a problem with encrypted attributes: #{e.inspect}")
rollback_migration!
false
end
private
def rollback_migration!
rollback_folder_move
project.storage_version = nil
project.set_repository_writable!
end
end
end
end

View File

@ -12,11 +12,6 @@ module Projects
end
def execute
# Migrate repository from Legacy to Hashed Storage
unless project.hashed_storage?(:repository)
return false unless migrate_repository_service.execute
end
# Migrate attachments from Legacy to Hashed Storage
unless project.hashed_storage?(:attachments)
return false unless migrate_attachments_service.execute
@ -27,10 +22,6 @@ module Projects
private
def migrate_repository_service
HashedStorage::MigrateRepositoryService.new(project: project, old_disk_path: old_disk_path, logger: logger)
end
def migrate_attachments_service
HashedStorage::MigrateAttachmentsService.new(project: project, old_disk_path: old_disk_path, logger: logger)
end

View File

@ -49,7 +49,7 @@
= render_if_exists 'groups/templates_setting', expanded: expanded
= render_if_exists 'shared/groups/max_pages_size_setting'
%section.settings.gs-advanced.no-animate#js-advanced-settings{ class: ('expanded' if expanded), data: { qa_selector: 'advanced_settings_content' } }
%section.settings.gs-advanced.no-animate#js-advanced-settings{ class: ('expanded' if expanded), data: { testid: 'advanced-settings-content' } }
.settings-header
%h4.settings-title.js-settings-toggle.js-settings-toggle-trigger-only{ role: 'button' }
= _('Advanced')

View File

@ -1,7 +1,7 @@
- form_id = "transfer-group-form"
- initial_data = { button_text: s_('GroupSettings|Transfer group'), group_full_path: @group.full_path, group_name: @group.name, group_id: @group.id, target_form_id: form_id, is_paid_group: group.paid?.to_s }
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card', data: { qa_selector: 'transfer_group_content' } }, header_options: { class: 'gl-new-card-header gl-flex-direction-column' }, body_options: { class: 'gl-new-card-body gl-px-5 gl-py-4' }) do |c|
= render Pajamas::CardComponent.new(card_options: { class: 'gl-new-card', data: { testid: 'transfer-group-content' } }, header_options: { class: 'gl-new-card-header gl-flex-direction-column' }, body_options: { class: 'gl-new-card-body gl-px-5 gl-py-4' }) do |c|
- c.with_header do
.gl-new-card-title-wrapper
%h4.gl-new-card-title.warning-title= s_('GroupSettings|Transfer group')

View File

@ -2,7 +2,7 @@
- namespace = @group || @project&.namespace || @namespace
= webpack_bundle_tag 'tracker'
- if Gitlab.com? && Feature.enabled?(:browsersdk_tracking)
- if Gitlab.com? && Feature.enabled?(:browsersdk_tracking) && Feature.enabled?(:gl_analytics_tracking, Feature.current_request)
= webpack_bundle_tag 'analytics'
= javascript_tag do
:plain

View File

@ -21,7 +21,7 @@
#js-fork-info{ data: vue_fork_divergence_data(project, ref) }
- if is_project_overview && has_project_shortcut_buttons
.project-buttons.gl-mb-5.js-show-on-project-root{ data: { qa_selector: 'project_buttons' } }
.project-buttons.gl-mb-5.js-show-on-project-root{ data: { testid: 'project-buttons' } }
= render 'stat_anchor_list', anchors: @project.statistics_buttons(show_auto_devops_callout: show_auto_devops_callout), project_buttons: true
#js-tree-list{ data: vue_file_list_data(project, ref) }

View File

@ -8,13 +8,13 @@
%div{ class: 'avatar-container rect-avatar s64 home-panel-avatar gl-flex-shrink-0 gl-w-11 gl-h-11 gl-mr-3! float-none' }
= project_icon(@project, alt: @project.name, class: 'avatar avatar-tile s64', width: 64, height: 64, itemprop: 'image')
%div
%h1.home-panel-title.gl-font-size-h1.gl-mt-3.gl-mb-2.gl-display-flex.gl-word-break-word{ data: { qa_selector: 'project_name_content' }, itemprop: 'name' }
%h1.home-panel-title.gl-font-size-h1.gl-mt-3.gl-mb-2.gl-display-flex.gl-word-break-word{ data: { testid: 'project-name-content' }, itemprop: 'name' }
= @project.name
= visibility_level_content(@project, css_class: 'visibility-icon gl-text-secondary gl-ml-2', icon_css_class: 'icon')
= render_if_exists 'compliance_management/compliance_framework/compliance_framework_badge', project: @project, additional_classes: 'gl-align-self-center gl-ml-2'
- if @project.group
= render_if_exists 'shared/tier_badge', source: @project, source_type: 'Project'
.home-panel-metadata.gl-font-sm.gl-text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal{ data: { qa_selector: 'project_id_content' }, itemprop: 'identifier' }
.home-panel-metadata.gl-font-sm.gl-text-secondary.gl-font-base.gl-font-weight-normal.gl-line-height-normal{ data: { testid: 'project-id-content' }, itemprop: 'identifier' }
- if can?(current_user, :read_project, @project)
%span.gl-display-inline-block.gl-vertical-align-middle
= s_('ProjectPage|Project ID: %{project_id}') % { project_id: @project.id }
@ -52,13 +52,13 @@
= render_if_exists "projects/home_mirror"
- if @project.badges.present?
.project-badges.mb-2{ data: { qa_selector: 'project_badges_content' } }
.project-badges.mb-2{ data: { testid: 'project-badges-content' } }
- @project.badges.each do |badge|
- badge_link_url = badge.rendered_link_url(@project)
%a.gl-mr-3{ href: badge_link_url,
target: '_blank',
rel: 'noopener noreferrer',
data: { qa_selector: 'badge_image_link', qa_link_url: badge_link_url } }>
data: { testid: 'badge-image-link', qa_link_url: badge_link_url } }>
%img.project-badge{ src: badge.rendered_image_url(@project),
'aria-hidden': true,
alt: 'Project badge' }>

View File

@ -16,5 +16,5 @@
- if create_mr_button_from_event?(event)
- c.with_actions do
= render Pajamas::ButtonComponent.new(variant: :confirm, href: create_mr_path_from_push_event(event), button_options: { data: { qa_selector: 'create_merge_request_button' }}) do
= render Pajamas::ButtonComponent.new(variant: :confirm, href: create_mr_path_from_push_event(event), button_options: { data: { testid: 'create-merge-request-button' }}) do
= _('Create merge request')

View File

@ -1 +1 @@
= gl_loading_icon(size: "md", css_class: "gl-my-4", data: { qa_selector: 'spinner_placeholder' })
= gl_loading_icon(size: "md", css_class: "gl-my-4", data: { testid: 'spinner-placeholder' })

View File

@ -6,7 +6,7 @@
- if !project.empty_repo? && can?(current_user, :download_code, project)
- archive_prefix = "#{project.path}-#{ref.tr('/', '-')}"
.project-action-button.dropdown.gl-dropdown.inline{ class: css_class }>
= render Pajamas::ButtonComponent.new(button_options: { class: 'dropdown-toggle gl-dropdown-toggle dropdown-icon-only has-tooltip', title: s_('DownloadSource|Download'), 'data-toggle' => 'dropdown', 'aria-label' => s_('DownloadSource|Download'), 'data-display' => 'static', data: { qa_selector: 'download_source_code_button' } }) do
= render Pajamas::ButtonComponent.new(button_options: { class: 'dropdown-toggle gl-dropdown-toggle dropdown-icon-only has-tooltip', title: s_('DownloadSource|Download'), 'data-toggle' => 'dropdown', 'aria-label' => s_('DownloadSource|Download'), 'data-display' => 'static', data: { testid: 'download-source-code-button' } }) do
= sprite_icon('download', css_class: 'gl-icon dropdown-icon')
%span.sr-only= _('Select Archive Format')
= sprite_icon('chevron-down', css_class: 'gl-icon dropdown-chevron')

View File

@ -17,7 +17,7 @@
%p
= _('You can get started by cloning the repository or start adding files to it with one of the following options.')
.project-buttons{ data: { qa_selector: 'quick_actions_container' } }
.project-buttons{ data: { testid: 'quick-actions-container' } }
.project-clone-holder.d-block.d-md-none.gl-mt-3.gl-mr-3
= render "shared/mobile_clone_panel"

View File

@ -1,5 +1,5 @@
.tree-ref-container.gl-display-flex.gl-flex-wrap.gl-gap-2.mb-2.mb-md-0
.tree-ref-holder.gl-max-w-26{ data: { qa_selector: 'ref_dropdown_container' } }
.tree-ref-holder.gl-max-w-26{ data: { testid: 'ref-dropdown-container' } }
#js-tree-ref-switcher{ data: { project_id: @project.id, ref_type: @ref_type.to_s, project_root_path: project_path(@project) } }
#js-repo-breadcrumb{ data: breadcrumb_data_attributes }

View File

@ -10,7 +10,6 @@ module Gitlab
included do
include ApplicationWorker
sidekiq_options retry: 3
include GithubImport::Queue
include ReschedulingMethods

View File

@ -15,14 +15,6 @@ module Gitlab
# this is better than a project being stuck in the "import" state
# forever.
sidekiq_options dead: false, retry: 5
sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::ImportFailureService.track(
project_id: msg['args'][0],
exception: e,
fail_import: true
)
end
end
end
end

View File

@ -3,6 +3,21 @@
module Gitlab
module GithubImport
module StageMethods
extend ActiveSupport::Concern
included do
include ApplicationWorker
sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::ImportFailureService.track(
project_id: msg['args'][0],
exception: e,
error_source: self.class.name,
fail_import: true
)
end
end
# project_id - The ID of the GitLab project to import the data into.
def perform(project_id)
info(project_id, message: 'starting stage')
@ -29,7 +44,8 @@ module Gitlab
project_id: project_id,
exception: e,
error_source: self.class.name,
fail_import: abort_on_failure
fail_import: false,
metrics: true
)
raise(e)
@ -51,10 +67,6 @@ module Gitlab
# rubocop: enable CodeReuse/ActiveRecord
end
def abort_on_failure
false
end
private
def info(project_id, extra = {})

View File

@ -7,7 +7,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
# The interval to schedule new instances of this job at.

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 5
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
@ -31,22 +30,6 @@ module Gitlab
project.import_state.refresh_jid_expiration
ImportPullRequestsWorker.perform_async(project.id)
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
fail_import: abort_on_failure,
metrics: true
)
raise(e)
end
private
def abort_on_failure
true
end
end
end

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
@ -24,16 +23,6 @@ module Gitlab
project.import_state.refresh_jid_expiration
move_to_next_stage(project, { waiter.key => waiter.jobs_remaining })
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
fail_import: abort_on_failure,
metrics: true
)
raise(e)
end
private
@ -58,10 +47,6 @@ module Gitlab
project.id, waiters, :pull_requests_merged_by
)
end
def abort_on_failure
true
end
end
end
end

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
@ -27,15 +26,6 @@ module Gitlab
{ waiter.key => waiter.jobs_remaining },
:lfs_objects
)
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
metrics: true
)
raise(e)
end
end
end

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
@ -33,16 +32,6 @@ module Gitlab
{ waiter.key => waiter.jobs_remaining },
:collaborators
)
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
fail_import: abort_on_failure,
metrics: true
)
raise(e)
end
private
@ -57,10 +46,6 @@ module Gitlab
MergeRequest.track_target_project_iid!(project, last_github_pull_request[:number])
end
def abort_on_failure
true
end
end
end
end

View File

@ -8,7 +8,6 @@ module Gitlab
data_consistency :always
sidekiq_options retry: 3
include GithubImport::Queue
include StageMethods
@ -34,17 +33,6 @@ module Gitlab
counter.increment
ImportBaseDataWorker.perform_async(project.id)
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project.id,
error_source: self.class.name,
exception: e,
fail_import: abort_on_failure,
metrics: true
)
raise(e)
end
def counter
@ -54,10 +42,6 @@ module Gitlab
)
end
def abort_on_failure
true
end
private
def allocate_issues_internal_id!(project, client)

View File

@ -0,0 +1,8 @@
---
name: bulk_import_idempotent_workers
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132702
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/426480
milestone: '16.5'
type: development
group: group::import and integrate
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: gl_analytics_tracking
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132534
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/426106
milestone: '16.5'
type: development
group: group::analytics instrumentation
default_enabled: false

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class EnsureBackfillForSharedRunnersDurationIsFinished < Gitlab::Database::Migration[2.1]
restrict_gitlab_migration gitlab_schema: :gitlab_ci
disable_ddl_transaction!
TABLE_NAMES = %i[ci_project_monthly_usages ci_namespace_monthly_usages]
def up
TABLE_NAMES.each do |table_name|
ensure_batched_background_migration_is_finished(
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: table_name,
column_name: 'id',
job_arguments: [
%w[shared_runners_duration],
%w[shared_runners_duration_convert_to_bigint]
]
)
end
end
def down
# no-op
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class SwapColumnsForCiProjectMonthlyUsagesSharedRunnersDuration < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
TABLE_NAME = :ci_project_monthly_usages
OLD_COLUMN_NAME = :shared_runners_duration
NEW_COLUMN_NAME = :shared_runners_duration_convert_to_bigint
TEMP_COLUMN_NAME = :temp_shared_runners_duration
def up
swap
end
def down
swap
end
private
def swap
with_lock_retries(raise_on_exhaustion: true) do
execute "LOCK TABLE #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{OLD_COLUMN_NAME} TO #{TEMP_COLUMN_NAME}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{NEW_COLUMN_NAME} TO #{OLD_COLUMN_NAME}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{TEMP_COLUMN_NAME} TO #{NEW_COLUMN_NAME}"
end
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class SwapColumnsForCiNamespaceMonthlyUsagesSharedRunnersDuration < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
TABLE_NAME = :ci_namespace_monthly_usages
OLD_COLUMN_NAME = :shared_runners_duration
NEW_COLUMN_NAME = :shared_runners_duration_convert_to_bigint
TEMP_COLUMN_NAME = :temp_shared_runners_duration
def up
swap
end
def down
swap
end
private
def swap
with_lock_retries(raise_on_exhaustion: true) do
execute "LOCK TABLE #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{OLD_COLUMN_NAME} TO #{TEMP_COLUMN_NAME}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{NEW_COLUMN_NAME} TO #{OLD_COLUMN_NAME}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{TEMP_COLUMN_NAME} TO #{NEW_COLUMN_NAME}"
end
end
end

View File

@ -0,0 +1 @@
19af567b51abfdb299cdf9a988c484cdb4faf13d666bcedeed465f38d2723f27

View File

@ -0,0 +1 @@
c9422a60a6b5397191b8c6fdb01f006db530b3d78e5d1d4034ac64c21c90c0b4

View File

@ -0,0 +1 @@
8ee432895b4acd47a202f3bc656aae0f248d8bad45d4ff77db0bf1268a054304

View File

@ -13677,10 +13677,10 @@ CREATE TABLE ci_namespace_monthly_usages (
namespace_id bigint NOT NULL,
date date NOT NULL,
notification_level smallint DEFAULT 100 NOT NULL,
shared_runners_duration integer DEFAULT 0 NOT NULL,
shared_runners_duration_convert_to_bigint integer DEFAULT 0 NOT NULL,
created_at timestamp with time zone,
amount_used numeric(18,4) DEFAULT 0.0 NOT NULL,
shared_runners_duration_convert_to_bigint bigint DEFAULT 0 NOT NULL,
shared_runners_duration bigint DEFAULT 0 NOT NULL,
CONSTRAINT ci_namespace_monthly_usages_year_month_constraint CHECK ((date = date_trunc('month'::text, (date)::timestamp with time zone)))
);
@ -13964,10 +13964,10 @@ CREATE TABLE ci_project_monthly_usages (
id bigint NOT NULL,
project_id bigint NOT NULL,
date date NOT NULL,
shared_runners_duration integer DEFAULT 0 NOT NULL,
shared_runners_duration_convert_to_bigint integer DEFAULT 0 NOT NULL,
created_at timestamp with time zone,
amount_used numeric(18,4) DEFAULT 0.0 NOT NULL,
shared_runners_duration_convert_to_bigint bigint DEFAULT 0 NOT NULL,
shared_runners_duration bigint DEFAULT 0 NOT NULL,
CONSTRAINT ci_project_monthly_usages_year_month_constraint CHECK ((date = date_trunc('month'::text, (date)::timestamp with time zone)))
);

View File

@ -18452,6 +18452,7 @@ Returns [`VulnerabilitySeveritiesCount`](#vulnerabilityseveritiescount).
| <a id="groupvulnerabilityseveritiescountclusteragentid"></a>`clusterAgentId` | [`[ClustersAgentID!]`](#clustersagentid) | Filter vulnerabilities by `cluster_agent_id`. Vulnerabilities with a `reportType` of `cluster_image_scanning` are only included with this filter. |
| <a id="groupvulnerabilityseveritiescountdismissalreason"></a>`dismissalReason` | [`[VulnerabilityDismissalReason!]`](#vulnerabilitydismissalreason) | Filter by dismissal reason. |
| <a id="groupvulnerabilityseveritiescounthasissues"></a>`hasIssues` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have issues. |
| <a id="groupvulnerabilityseveritiescounthasmergerequest"></a>`hasMergeRequest` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a merge request. |
| <a id="groupvulnerabilityseveritiescounthasresolution"></a>`hasResolution` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a resolution. |
| <a id="groupvulnerabilityseveritiescountimage"></a>`image` | [`[String!]`](#string) | Filter vulnerabilities by location image. When this filter is present, the response only matches entries for a `reportType` that includes `container_scanning`, `cluster_image_scanning`. |
| <a id="groupvulnerabilityseveritiescountprojectid"></a>`projectId` | [`[ID!]`](#id) | Filter vulnerabilities by project. |
@ -18948,6 +18949,7 @@ Returns [`VulnerabilitySeveritiesCount`](#vulnerabilityseveritiescount).
| <a id="instancesecuritydashboardvulnerabilityseveritiescountclusteragentid"></a>`clusterAgentId` | [`[ClustersAgentID!]`](#clustersagentid) | Filter vulnerabilities by `cluster_agent_id`. Vulnerabilities with a `reportType` of `cluster_image_scanning` are only included with this filter. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescountdismissalreason"></a>`dismissalReason` | [`[VulnerabilityDismissalReason!]`](#vulnerabilitydismissalreason) | Filter by dismissal reason. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescounthasissues"></a>`hasIssues` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have issues. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescounthasmergerequest"></a>`hasMergeRequest` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a merge request. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescounthasresolution"></a>`hasResolution` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a resolution. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescountimage"></a>`image` | [`[String!]`](#string) | Filter vulnerabilities by location image. When this filter is present, the response only matches entries for a `reportType` that includes `container_scanning`, `cluster_image_scanning`. |
| <a id="instancesecuritydashboardvulnerabilityseveritiescountprojectid"></a>`projectId` | [`[ID!]`](#id) | Filter vulnerabilities by project. |
@ -23421,6 +23423,7 @@ Returns [`VulnerabilitySeveritiesCount`](#vulnerabilityseveritiescount).
| <a id="projectvulnerabilityseveritiescountclusteragentid"></a>`clusterAgentId` | [`[ClustersAgentID!]`](#clustersagentid) | Filter vulnerabilities by `cluster_agent_id`. Vulnerabilities with a `reportType` of `cluster_image_scanning` are only included with this filter. |
| <a id="projectvulnerabilityseveritiescountdismissalreason"></a>`dismissalReason` | [`[VulnerabilityDismissalReason!]`](#vulnerabilitydismissalreason) | Filter by dismissal reason. |
| <a id="projectvulnerabilityseveritiescounthasissues"></a>`hasIssues` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have issues. |
| <a id="projectvulnerabilityseveritiescounthasmergerequest"></a>`hasMergeRequest` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a merge request. |
| <a id="projectvulnerabilityseveritiescounthasresolution"></a>`hasResolution` | [`Boolean`](#boolean) | Filter vulnerabilities that do or do not have a resolution. |
| <a id="projectvulnerabilityseveritiescountimage"></a>`image` | [`[String!]`](#string) | Filter vulnerabilities by location image. When this filter is present, the response only matches entries for a `reportType` that includes `container_scanning`, `cluster_image_scanning`. |
| <a id="projectvulnerabilityseveritiescountprojectid"></a>`projectId` | [`[ID!]`](#id) | Filter vulnerabilities by project. |

View File

@ -12,19 +12,19 @@ GitLab is creating AI-assisted features across our DevSecOps platform. These fea
| Feature | Purpose | Large Language Model | Current availability | Maturity |
|-|-|-|-|-|
| [Suggested Reviewers](project/merge_requests/reviews/index.md#gitlab-duo-suggested-reviewers) | Assists in creating faster and higher-quality reviews by automatically suggesting reviewers for your merge request. | GitLab creates a machine learning model for each project, which is used to generate reviewers <br><br> [View the issue](https://gitlab.com/gitlab-org/modelops/applied-ml/applied-ml-updates/-/issues/10) | SaaS only | [Generally Available (GA)](../policy/experiment-beta-support.md#generally-available-ga) |
| [Code Suggestions](project/repository/code_suggestions/index.md) | Helps you write code more efficiently by viewing code suggestions as you type. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS <br> Self-managed | [Beta](../policy/experiment-beta-support.md#beta) |
| [Vulnerability summary](application_security/vulnerabilities/index.md#explaining-a-vulnerability) | Helps you remediate vulnerabilities more efficiently, uplevel your skills, and write more secure code. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) <br><br> Anthropic's claude model if degraded performance | SaaS only <br><br> Ultimate tier | [Beta](../policy/experiment-beta-support.md#beta) |
| [Code explanation](#explain-code-in-the-web-ui-with-code-explanation) | Helps you understand code by explaining it in English language. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only <br><br> Ultimate tier | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Code Suggestions](project/repository/code_suggestions/index.md) | Helps you write code more efficiently by viewing code suggestions as you type. | [`code-gecko`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/code-completion) and [`code-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/code-generation) | SaaS <br> Self-managed | [Beta](../policy/experiment-beta-support.md#beta) |
| [Vulnerability summary](application_security/vulnerabilities/index.md#explaining-a-vulnerability) | Helps you remediate vulnerabilities more efficiently, uplevel your skills, and write more secure code. | [`text-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text) <br><br> Anthropic's claude model if degraded performance | SaaS only <br><br> Ultimate tier | [Beta](../policy/experiment-beta-support.md#beta) |
| [Code explanation](#explain-code-in-the-web-ui-with-code-explanation) | Helps you understand code by explaining it in English language. | [`codechat-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/code-chat) | SaaS only <br><br> Ultimate tier | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [GitLab Duo Chat](#answer-questions-with-gitlab-duo-chat) | Process and generate text and code in a conversational manner. Helps you quickly identify useful information in large volumes of text in issues, epics, code, and GitLab documentation. | Anthropic's claude model <br><br> OpenAI Embeddings | SaaS only <br><br> Ultimate tier | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Value stream forecasting](#forecast-deployment-frequency-with-value-stream-forecasting) | Assists you with predicting productivity metrics and identifying anomalies across your software development lifecycle. | Statistical forecasting | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Discussion summary](#summarize-issue-discussions-with-discussion-summary) | Assists with quickly getting everyone up to speed on lengthy conversations to help ensure you are all on the same page. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Merge request summary](project/merge_requests/ai_in_merge_requests.md#summarize-merge-request-changes) | Efficiently communicate the impact of your merge request changes. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Code review summary](project/merge_requests/ai_in_merge_requests.md#summarize-my-merge-request-review) | Helps ease merge request handoff between authors and reviewers and help reviewers efficiently understand suggestions. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Merge request template population](project/merge_requests/ai_in_merge_requests.md#fill-in-merge-request-templates) | Generate a description for the merge request based on the contents of the template. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Test generation](project/merge_requests/ai_in_merge_requests.md#generate-suggested-tests-in-merge-requests) | Automates repetitive tasks and helps catch bugs early. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Discussion summary](#summarize-issue-discussions-with-discussion-summary) | Assists with quickly getting everyone up to speed on lengthy conversations to help ensure you are all on the same page. | OpenAI's GPT-3 | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Merge request summary](project/merge_requests/ai_in_merge_requests.md#summarize-merge-request-changes) | Efficiently communicate the impact of your merge request changes. | [`text-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Code review summary](project/merge_requests/ai_in_merge_requests.md#summarize-my-merge-request-review) | Helps ease merge request handoff between authors and reviewers and help reviewers efficiently understand suggestions. | [`text-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Merge request template population](project/merge_requests/ai_in_merge_requests.md#fill-in-merge-request-templates) | Generate a description for the merge request based on the contents of the template. | [`text-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Test generation](project/merge_requests/ai_in_merge_requests.md#generate-suggested-tests-in-merge-requests) | Automates repetitive tasks and helps catch bugs early. | [`text-bison`](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Git suggestions](https://gitlab.com/gitlab-org/gitlab/-/issues/409636) | Helps you discover or recall Git commands when and where you need them. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Root cause analysis](#root-cause-analysis) | Assists you in determining the root cause for a pipeline failure and failed CI/CD build. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only <br><br> Ultimate tier | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Issue description generation](#summarize-an-issue-with-issue-description-generation) | Generate issue descriptions. | [Google Vertex Codey APIs](https://cloud.google.com/vertex-ai/docs/generative-ai/code/code-models-overview) | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
| [Issue description generation](#summarize-an-issue-with-issue-description-generation) | Generate issue descriptions. | OpenAI's GPT-3 | SaaS only | [Experiment](../policy/experiment-beta-support.md#experiment) |
## Enable AI/ML features

View File

@ -4,7 +4,7 @@ group: Package Registry
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Debian packages in the Package Registry **(FREE ALL EXPERIMENT)**
# Debian packages in the Package Registry **(FREE SELF EXPERIMENT)**
> - Debian API [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42670) in GitLab 13.5.
> - Debian group API [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66188) in GitLab 14.2.

View File

@ -33,6 +33,15 @@ module BulkImports
end
end
def already_processed?(data, _)
values = Gitlab::Cache::Import::Caching.values_from_set(cache_key)
values.include?(OpenSSL::Digest::SHA256.hexdigest(data.to_s))
end
def save_processed_entry(data, _)
Gitlab::Cache::Import::Caching.set_add(cache_key, OpenSSL::Digest::SHA256.hexdigest(data.to_s))
end
private
def group_badge?(data)

View File

@ -128,6 +128,15 @@ module BulkImports
import_export_config.top_relation_tree(relation)
end
def already_processed?(_, index)
last_index = Gitlab::Cache::Import::Caching.read(cache_key)
last_index && last_index.to_i >= index
end
def save_processed_entry(_, index)
Gitlab::Cache::Import::Caching.write(cache_key, index)
end
def capture_invalid_subrelations(invalid_subrelations)
invalid_subrelations.each do |record|
BulkImports::Failure.create(

View File

@ -5,6 +5,8 @@ module BulkImports
class ExtractedData
attr_reader :data
delegate :each, :each_with_index, to: :data
def initialize(data: nil, page_info: {})
@data = data.is_a?(Enumerator) ? data : Array.wrap(data)
@page_info = page_info
@ -20,10 +22,6 @@ module BulkImports
def next_page
@page_info&.dig('next_page')
end
def each(&block)
data.each(&block)
end
end
end
end

View File

@ -15,7 +15,10 @@ module BulkImports
extracted_data = extracted_data_from
if extracted_data
extracted_data.each do |entry|
extracted_data.each_with_index do |entry, index|
raw_entry = entry.dup
next if Feature.enabled?(:bulk_import_idempotent_workers) && already_processed?(raw_entry, index)
transformers.each do |transformer|
entry = run_pipeline_step(:transformer, transformer.class.name) do
transformer.transform(context, entry)
@ -25,6 +28,8 @@ module BulkImports
run_pipeline_step(:loader, loader.class.name) do
loader.load(context, entry)
end
save_processed_entry(raw_entry, index) if Feature.enabled?(:bulk_import_idempotent_workers)
end
tracker.update!(
@ -73,6 +78,19 @@ module BulkImports
end
end
def cache_key
batch_number = context.extra[:batch_number] || 0
"#{self.class.name.underscore}/#{tracker.bulk_import_entity_id}/#{batch_number}"
end
# Overridden by child pipelines with different caching strategies
def already_processed?(*)
false
end
def save_processed_entry(*); end
def after_run(extracted_data)
run if extracted_data.has_next_page?
end

View File

@ -86,7 +86,7 @@ module Gitlab
def add_browsersdk_tracking(directives)
return if directives.blank?
return unless Gitlab.com? && Feature.enabled?(:browsersdk_tracking) && ENV['GITLAB_ANALYTICS_URL'].present?
return unless Gitlab.com? && ENV['GITLAB_ANALYTICS_URL'].present?
default_connect_src = directives['connect-src'] || directives['default-src']
connect_src_values = Array.wrap(default_connect_src) | [ENV['GITLAB_ANALYTICS_URL']]

View File

@ -120,7 +120,9 @@ module Gitlab
end
def add_browsersdk_tracking
return unless Gitlab.com? && Feature.enabled?(:browsersdk_tracking)
return unless Gitlab.com? && Feature.enabled?(:browsersdk_tracking) && Feature.enabled?(:gl_analytics_tracking,
Feature.current_request)
return if ENV['GITLAB_ANALYTICS_URL'].blank? || ENV['GITLAB_ANALYTICS_ID'].blank?
gon.analytics_url = ENV['GITLAB_ANALYTICS_URL']

View File

@ -11,14 +11,15 @@ module QA
super
base.view 'app/assets/javascripts/members/components/filter_sort/members_filtered_search_bar.vue' do
element :search_bar_input
element :search_button
element 'search-button'
end
end
def search_member(username)
fill_element :search_bar_input, username
click_element :search_button
filter_input = find('.gl-filtered-search-term-input')
filter_input.click
filter_input.set(username)
click_element 'search-button'
end
end
end

View File

@ -12,7 +12,7 @@ module QA
view 'app/views/groups/edit.html.haml' do
element :permission_lfs_2fa_content
element :advanced_settings_content
element 'advanced-settings-content'
end
view 'app/views/groups/settings/_permissions.html.haml' do
@ -41,11 +41,11 @@ module QA
end
view 'app/views/groups/settings/_transfer.html.haml' do
element :transfer_group_content
element 'transfer-group-content'
end
view 'app/assets/javascripts/groups/components/transfer_group_form.vue' do
element :transfer_group_button
element 'transfer-group-button'
end
def set_group_name(name)
@ -114,14 +114,14 @@ module QA
def transfer_group(source_group, target_group)
QA::Runtime::Logger.info "Transferring group: #{source_group.path} to target group: #{target_group.path}"
expand_content(:advanced_settings_content)
expand_content('advanced-settings-content')
scroll_to_transfer_group_content
select_namespace(target_group.path)
wait_for_enabled_transfer_group_button
click_element(:transfer_group_button)
click_element('transfer-group-button')
fill_confirmation_text(source_group.full_path)
confirm_transfer
@ -131,15 +131,15 @@ module QA
def scroll_to_transfer_group_content
retry_until(sleep_interval: 1, message: 'Waiting for transfer group content to display') do
has_element?(:transfer_group_content, wait: 3)
has_element?('transfer-group-content', wait: 3)
end
scroll_to_element :transfer_group_content
scroll_to_element 'transfer-group-content'
end
def wait_for_enabled_transfer_group_button
retry_until(sleep_interval: 1, message: 'Waiting for transfer group button to be enabled') do
has_element?(:transfer_group_button, disabled: false, wait: 3)
has_element?('transfer-group-button', disabled: false, wait: 3)
end
end
end

View File

@ -8,36 +8,36 @@ module QA
include Page::Component::Issuable::Sidebar
view 'app/assets/javascripts/batch_comments/components/preview_dropdown.vue' do
element :review_preview_dropdown
element 'review-preview-dropdown'
end
view 'app/assets/javascripts/batch_comments/components/review_bar.vue' do
element :review_bar_content
element 'review-bar-content'
end
view 'app/assets/javascripts/batch_comments/components/submit_dropdown.vue' do
element :submit_review_dropdown
element :submit_review_button
element 'submit-review-dropdown'
element 'submit-review-button'
end
view 'app/assets/javascripts/diffs/components/compare_dropdown_layout.vue' do
element :dropdown_content
element 'version-dropdown-content'
end
view 'app/assets/javascripts/diffs/components/compare_versions.vue' do
element :target_version_dropdown
element :file_tree_button
element 'target-version-dropdown'
element 'file-tree-button'
end
view 'app/assets/javascripts/diffs/components/tree_list.vue' do
element :file_tree_container
element :diff_tree_search
element 'file-tree-container'
element 'diff-tree-search'
end
view 'app/assets/javascripts/diffs/components/diff_file_header.vue' do
element :file_title_container
element :dropdown_button
element :edit_in_ide_button
element 'file-title-container'
element 'options-dropdown-button'
element 'edit-in-ide-button'
end
view 'app/assets/javascripts/vue_shared/components/file_row.vue' do
@ -45,13 +45,13 @@ module QA
end
view 'app/assets/javascripts/diffs/components/diff_row.vue' do
element :diff_comment_button
element :new_diff_line_link
element 'left-comment-button'
element 'left-line-number'
end
view 'app/assets/javascripts/notes/components/note_form.vue' do
element :start_review_button
element :comment_now_button
element 'start-review-button'
element 'comment-now-button'
end
view 'app/views/projects/merge_requests/_code_dropdown.html.haml' do
@ -62,12 +62,12 @@ module QA
end
view 'app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue' do
element :merge_request_pipeline_info_content
element :pipeline_link
element 'pipeline-info-container'
element 'pipeline-id'
end
view 'app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue' do
element :merge_request_error_content
element 'merge-request-failed-refresh-button'
end
view 'app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.vue' do
@ -150,19 +150,19 @@ module QA
end
def start_review
click_element(:start_review_button)
click_element('start-review-button')
# After clicking the button, wait for it to disappear
# before moving on to the next part of the test
has_no_element?(:start_review_button)
has_no_element?('start-review-button')
end
def click_target_version_dropdown
click_element(:target_version_dropdown)
click_element('target-version-dropdown')
end
def version_dropdown_content
find_element(:dropdown_content).text
find_element('version-dropdown-content').text
end
def submit_pending_reviews
@ -174,17 +174,17 @@ module QA
end
end
within_element(:review_bar_content) do
click_element(:review_preview_dropdown)
within_element('review-bar-content') do
click_element('review-preview-dropdown')
end
click_element(:submit_review_dropdown)
click_element(:submit_review_button)
click_element('submit-review-dropdown')
click_element('submit-review-button')
# After clicking the button, wait for the review bar to disappear
# before moving on to the next part of the test
wait_until(reload: false) do
has_no_element?(:review_bar_content)
has_no_element?('review-bar-content')
end
end
@ -193,8 +193,8 @@ module QA
has_css?('a[data-linenumber="1"]')
end
all_elements(:new_diff_line_link, minimum: 1).first.hover
click_element(:diff_comment_button)
all_elements('left-line-number', minimum: 1).first.hover
click_element('left-comment-button')
click_element(:dismiss_suggestion_popover_button) if has_element?(:dismiss_suggestion_popover_button, wait: 1)
fill_element('reply-field', text)
@ -216,7 +216,7 @@ module QA
end
def click_pipeline_link
click_element(:pipeline_link)
click_element('pipeline-id')
end
def edit!
@ -248,11 +248,11 @@ module QA
def search_file_tree(file_name)
open_file_tree
fill_element(:diff_tree_search, file_name)
fill_element('diff-tree-search', file_name)
end
def open_file_tree
click_element(:file_tree_button) if has_no_element?(:file_tree_container, wait: 1)
click_element('file-tree-button') if has_no_element?('file-tree-container', wait: 1)
end
def has_merge_button?
@ -275,7 +275,7 @@ module QA
def has_pipeline_status?(text)
# Pipelines can be slow, so we wait a bit longer than the usual 10 seconds
wait_until(max_duration: 120, sleep_interval: 5, reload: true) do
has_element?(:merge_request_pipeline_info_content, text: text, wait: 15)
has_element?('pipeline-info-container', text: text, wait: 15)
end
end
@ -423,7 +423,7 @@ module QA
def wait_for_merge_request_error_message
wait_until(max_duration: 30, reload: false) do
has_element?(:merge_request_error_content)
has_element?('merge-request-failed-refresh-button')
end
end
@ -438,21 +438,21 @@ module QA
end
def edit_file_in_web_ide(file_name)
within_element(:file_title_container, file_name: file_name) do
click_element(:dropdown_button)
click_element(:edit_in_ide_button)
within_element('file-title-container', file_name: file_name) do
click_element('options-dropdown-button')
click_element('edit-in-ide-button')
end
page.driver.browser.switch_to.window(page.driver.browser.window_handles.last)
end
def add_suggestion_to_diff(suggestion, line)
find("a[data-linenumber='#{line}']").hover
click_element(:diff_comment_button)
click_element('left-comment-button')
click_element(:suggestion_button)
initial_content = find_element('reply-field').value
fill_element('reply-field', '')
fill_element('reply-field', initial_content.gsub(/(```suggestion:-0\+0\n).*(\n```)/, "\\1#{suggestion}\\2"))
click_element(:comment_now_button)
click_element('comment-now-button')
wait_for_requests
end

View File

@ -14,7 +14,7 @@ module QA
prepend Mobile::Page::Project::Show if Runtime::Env.phone_layout?
view 'app/assets/javascripts/repository/components/preview/index.vue' do
element :blob_viewer_content
element 'blob-viewer-content'
end
view 'app/assets/javascripts/repository/components/table/row.vue' do
@ -30,23 +30,22 @@ module QA
end
view 'app/views/projects/_last_push.html.haml' do
element :create_merge_request_button
element 'create-merge-request-button'
end
view 'app/views/projects/_home_panel.html.haml' do
element :project_name_content
element :project_id_content
element :project_badges_content
element :badge_image_link
element 'project-name-content'
element 'project-id-content'
element 'project-badges-content'
element 'badge-image-link'
end
view 'app/views/projects/_files.html.haml' do
element :project_buttons
element :tree_holder, '.tree-holder' # rubocop:disable QA/ElementWithPattern
element 'project-buttons'
end
view 'app/assets/javascripts/repository/components/fork_info.vue' do
element :forked_from_link
element 'forked-from-link'
end
view 'app/assets/javascripts/forks/components/forks_button.vue' do
@ -54,39 +53,39 @@ module QA
end
view 'app/views/projects/empty.html.haml' do
element :quick_actions_container
element 'quick-actions-container'
end
view 'app/assets/javascripts/repository/components/breadcrumbs.vue' do
element :add_to_tree_dropdown
element :new_file_menu_item
element 'add-to-tree'
element 'new-file-menu-item'
end
view 'app/views/projects/blob/viewers/_loading.html.haml' do
element :spinner_placeholder
element 'spinner-placeholder'
end
view 'app/views/projects/buttons/_download.html.haml' do
element :download_source_code_button
element 'download-source-code-button'
end
view 'app/views/projects/tree/_tree_header.html.haml' do
element :ref_dropdown_container
element 'ref-dropdown-container'
end
def wait_for_viewers_to_load
has_no_element?(:spinner_placeholder, wait: QA::Support::Repeater::DEFAULT_MAX_WAIT_TIME)
has_no_element?('spinner-placeholder', wait: QA::Support::Repeater::DEFAULT_MAX_WAIT_TIME)
end
def create_first_new_file!
within_element(:quick_actions_container) do
within_element('quick-actions-container') do
click_link_with_text 'New file'
end
end
def create_new_file!
click_element :add_to_tree_dropdown
click_element :new_file_menu_item
click_element 'add-to-tree'
click_element 'new-file-menu-item'
end
# Click by JS is needed to bypass the VSCode Web IDE popover
@ -98,7 +97,7 @@ module QA
end
def forked_from?(parent_project_name)
has_element?(:forked_from_link, text: parent_project_name)
has_element?('forked-from-link', text: parent_project_name)
end
def click_file(filename)
@ -116,7 +115,7 @@ module QA
end
def has_create_merge_request_button?
has_css?(element_selector_css(:create_merge_request_button))
has_css?(element_selector_css('create-merge-request-button'))
end
def has_file?(name)
@ -134,11 +133,11 @@ module QA
end
def has_name?(name)
has_element?(:project_name_content, text: name)
has_element?('project-name-content', text: name)
end
def has_readme_content?(text)
has_element?(:blob_viewer_content, text: text)
has_element?('blob-viewer-content', text: text)
end
def new_merge_request
@ -146,7 +145,7 @@ module QA
has_create_merge_request_button?
end
click_element :create_merge_request_button
click_element 'create-merge-request-button'
end
def open_web_ide!
@ -166,34 +165,28 @@ module QA
end
def project_name
find_element(:project_name_content).text
find_element('project-name-content').text
end
def project_id
find_element(:project_id_content).text.delete('Project ID: ')
find_element('project-id-content').text.delete('Project ID: ')
end
def switch_to_branch(branch_name)
within_element(:ref_dropdown_container) do
within_element('ref-dropdown-container') do
expand_select_list
select_item(branch_name)
end
end
def wait_for_import
wait_until(reload: true) do
has_css?('.tree-holder')
end
end
def has_visible_badge_image_link?(link_url)
within_element(:project_badges_content) do
has_element?(:badge_image_link, link_url: link_url)
within_element('project-badges-content') do
has_element?('badge-image-link', link_url: link_url)
end
end
def has_license?(name)
within_element(:project_buttons) do
within_element('project-buttons') do
has_link?(name)
end
end

View File

@ -0,0 +1,88 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`SourceViewer utils toggleBlameClasses adds classes 1`] = `
<div
class="content"
>
<div
class="gl-border-gray-500 gl-border-t gl-pt-3!"
>
<div
id="reference-0"
>
1
</div>
<div
id="reference-1"
>
2
</div>
<div
id="reference-2"
>
3
</div>
</div>
<div>
<div
class="gl-border-gray-500 gl-border-t gl-pt-3!"
id="reference-3"
>
Content 1
</div>
<div
class="gl-border-gray-500 gl-border-t gl-pt-3!"
id="reference-4"
>
Content 2
</div>
<div
class="gl-border-gray-500 gl-border-t gl-pt-3!"
id="reference-5"
>
Content 3
</div>
</div>
</div>
`;
exports[`SourceViewer utils toggleBlameClasses removes classes 1`] = `
<div
class="content"
>
<div>
<div
id="reference-0"
>
1
</div>
<div
id="reference-1"
>
2
</div>
<div
id="reference-2"
>
3
</div>
</div>
<div>
<div
id="reference-3"
>
Content 1
</div>
<div
id="reference-4"
>
Content 2
</div>
<div
id="reference-5"
>
Content 3
</div>
</div>
</div>
`;

View File

@ -0,0 +1,63 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { setHTMLFixture } from 'helpers/fixtures';
import CommitInfo from '~/repository/components/commit_info.vue';
import BlameInfo from '~/vue_shared/components/source_viewer/components/blame_info.vue';
import * as utils from '~/vue_shared/components/source_viewer/utils';
import { SOURCE_CODE_CONTENT_MOCK, BLAME_DATA_MOCK } from '../mock_data';
describe('BlameInfo component', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMountExtended(BlameInfo, {
propsData: { blameData: BLAME_DATA_MOCK },
});
};
beforeEach(() => {
setHTMLFixture(SOURCE_CODE_CONTENT_MOCK);
jest.spyOn(utils, 'toggleBlameClasses');
createComponent();
});
const findCommitInfoComponents = () => wrapper.findAllComponents(CommitInfo);
it('adds the necessary classes to the DOM', () => {
expect(utils.toggleBlameClasses).toHaveBeenCalledWith(BLAME_DATA_MOCK, true);
});
it('renders a CommitInfo component for each blame entry', () => {
expect(findCommitInfoComponents().length).toBe(BLAME_DATA_MOCK.length);
});
it.each(BLAME_DATA_MOCK)(
'sets the correct data and positioning for the commitInfo',
({ lineno, commit, index }) => {
const commitInfoComponent = findCommitInfoComponents().at(index);
expect(commitInfoComponent.props('commit')).toEqual(commit);
expect(commitInfoComponent.element.style.top).toBe(utils.calculateBlameOffset(lineno));
},
);
describe('commitInfo component styling', () => {
const borderTopClassName = 'gl-border-t';
it('does not add a top border for the first entry', () => {
expect(findCommitInfoComponents().at(0).element.classList).not.toContain(borderTopClassName);
});
it('add a top border for the rest of the entries', () => {
expect(findCommitInfoComponents().at(1).element.classList).toContain(borderTopClassName);
expect(findCommitInfoComponents().at(2).element.classList).toContain(borderTopClassName);
});
});
describe('when component is destroyed', () => {
beforeEach(() => wrapper.destroy());
it('resets the DOM to its original state', () => {
expect(utils.toggleBlameClasses).toHaveBeenCalledWith(BLAME_DATA_MOCK, false);
});
});
});

View File

@ -22,3 +22,24 @@ export const CHUNK_2 = {
startingFrom: 70,
blamePath,
};
export const SOURCE_CODE_CONTENT_MOCK = `
<div class="content">
<div>
<div id="L1">1</div>
<div id="L2">2</div>
<div id="L3">3</div>
</div>
<div>
<div id="LC1">Content 1</div>
<div id="LC2">Content 2</div>
<div id="LC3">Content 3</div>
</div>
</div>`;
export const BLAME_DATA_MOCK = [
{ lineno: 1, commit: { author: 'Peter' }, index: 0 },
{ lineno: 2, commit: { author: 'Sarah' }, index: 1 },
{ lineno: 3, commit: { author: 'Peter' }, index: 2 },
];

View File

@ -0,0 +1,35 @@
import { setHTMLFixture } from 'helpers/fixtures';
import {
calculateBlameOffset,
toggleBlameClasses,
} from '~/vue_shared/components/source_viewer/utils';
import { SOURCE_CODE_CONTENT_MOCK, BLAME_DATA_MOCK } from './mock_data';
describe('SourceViewer utils', () => {
beforeEach(() => setHTMLFixture(SOURCE_CODE_CONTENT_MOCK));
const findContent = () => document.querySelector('.content');
describe('calculateBlameOffset', () => {
it('returns an offset of zero if line number === 1', () => {
expect(calculateBlameOffset(1)).toBe('0px');
});
it('calculates an offset for the blame component', () => {
const { offsetTop } = document.querySelector('#LC3');
expect(calculateBlameOffset(3)).toBe(`${offsetTop}px`);
});
});
describe('toggleBlameClasses', () => {
it('adds classes', () => {
toggleBlameClasses(BLAME_DATA_MOCK, true);
expect(findContent()).toMatchSnapshot();
});
it('removes classes', () => {
toggleBlameClasses(BLAME_DATA_MOCK, false);
expect(findContent()).toMatchSnapshot();
});
});
});

View File

@ -33,6 +33,24 @@ RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do
expect(badge.image_url).to eq(badge_data['image_url'])
end
it 'skips already imported records' do
expect { pipeline.run }.to change(Badge, :count).by(2)
expect { pipeline.run }.to not_change(Badge, :count)
end
context 'with FF bulk_import_idempotent_workers disabled' do
before do
stub_feature_flags(bulk_import_idempotent_workers: false)
end
it 'creates duplicated badges' do
expect { pipeline.run }.to change(Badge, :count).by(2)
expect { pipeline.run }.to change(Badge, :count)
end
end
context 'when project entity' do
let(:first_page) { extracted_data(has_next_page: true) }
let(:last_page) { extracted_data(name: 'badge2', kind: 'project') }

View File

@ -6,6 +6,8 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:tracker) { instance_double(BulkImports::Tracker, bulk_import_entity_id: 1) }
let(:context) { instance_double(BulkImports::Pipeline::Context, tracker: tracker, extra: { batch_number: 1 }) }
let(:klass) do
Class.new do
@ -13,11 +15,12 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
relation_name 'test'
attr_reader :portable, :current_user
attr_reader :portable, :current_user, :context
def initialize(portable, user)
def initialize(portable, user, context)
@portable = portable
@current_user = user
@context = context
end
end
end
@ -26,12 +29,29 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
stub_const('NdjsonPipelineClass', klass)
end
subject { NdjsonPipelineClass.new(group, user) }
subject { NdjsonPipelineClass.new(group, user, context) }
it 'marks pipeline as ndjson' do
expect(NdjsonPipelineClass.file_extraction_pipeline?).to eq(true)
end
describe 'caching' do
it 'saves completed entry in cache' do
subject.save_processed_entry("entry", 10)
expected_cache_key = "ndjson_pipeline_class/1/1"
expect(Gitlab::Cache::Import::Caching.read(expected_cache_key)).to eq("10")
end
it 'identifies completed entries' do
subject.save_processed_entry("entry", 10)
expect(subject.already_processed?("entry", 11)).to be_falsy
expect(subject.already_processed?("entry", 10)).to be_truthy
expect(subject.already_processed?("entry", 9)).to be_truthy
end
end
describe '#deep_transform_relation!' do
it 'transforms relation hash' do
transformed = subject.deep_transform_relation!({}, 'test', {}) do |key, hash|
@ -238,7 +258,7 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
end
context 'when portable is project' do
subject { NdjsonPipelineClass.new(project, user) }
subject { NdjsonPipelineClass.new(project, user, context) }
it 'returns group relation name override' do
expect(subject.relation_key_override('labels')).to eq('project_labels')
@ -254,7 +274,7 @@ RSpec.describe BulkImports::NdjsonPipeline, feature_category: :importers do
end
context 'when portable is project' do
subject { NdjsonPipelineClass.new(project, user) }
subject { NdjsonPipelineClass.new(project, user, context) }
it 'returns project relation factory' do
expect(subject.relation_factory).to eq(Gitlab::ImportExport::Project::RelationFactory)

View File

@ -50,4 +50,18 @@ RSpec.describe BulkImports::Pipeline::ExtractedData do
end
end
end
describe '#each_with_index' do
context 'when block is present' do
it 'yields each data item with index' do
expect { |b| subject.each_with_index(&b) }.to yield_control
end
end
context 'when block is not present' do
it 'returns enumerator' do
expect(subject.each_with_index).to be_instance_of(Enumerator)
end
end
end
end

View File

@ -277,6 +277,115 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
it_behaves_like 'failed pipeline', 'StandardError', 'Error!'
end
it 'saves entry in cache for de-duplication' do
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor)
.to receive(:extract)
.with(context)
.and_return(extracted_data)
end
expect_next_instance_of(BulkImports::Transformer) do |transformer|
expect(transformer)
.to receive(:transform)
.with(context, extracted_data.data.first)
.and_return(extracted_data.data.first)
end
expect_next_instance_of(BulkImports::MyPipeline) do |klass|
expect(klass).to receive(:save_processed_entry).with(extracted_data.data.first, anything)
end
subject.run
end
context 'with FF bulk_import_idempotent_workers disabled' do
before do
stub_feature_flags(bulk_import_idempotent_workers: false)
end
it 'does not touch the cache' do
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor)
.to receive(:extract)
.with(context)
.and_return(extracted_data)
end
expect_next_instance_of(BulkImports::Transformer) do |transformer|
expect(transformer)
.to receive(:transform)
.with(context, extracted_data.data.first)
.and_return(extracted_data.data.first)
end
expect_next_instance_of(BulkImports::MyPipeline) do |klass|
expect(klass).not_to receive(:save_processed_entry)
end
subject.run
end
end
end
context 'when the entry is already processed' do
before do
allow_next_instance_of(BulkImports::MyPipeline) do |klass|
allow(klass).to receive(:already_processed?).and_return true
end
end
it 'runs pipeline extractor, but not transformer or loader' do
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor)
.to receive(:extract)
.with(context)
.and_return(extracted_data)
end
allow_next_instance_of(BulkImports::Transformer) do |transformer|
expect(transformer)
.not_to receive(:transform)
end
allow_next_instance_of(BulkImports::Loader) do |loader|
expect(loader)
.not_to receive(:load)
end
subject.run
end
context 'with FF bulk_import_idempotent_workers disabled' do
before do
stub_feature_flags(bulk_import_idempotent_workers: false)
end
it 'calls extractor, transformer, and loader' do
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor)
.to receive(:extract)
.with(context)
.and_return(extracted_data)
end
expect_next_instance_of(BulkImports::Transformer) do |transformer|
expect(transformer)
.to receive(:transform)
.with(context, extracted_data.data.first)
.and_return(extracted_data.data.first)
end
expect_next_instance_of(BulkImports::Loader) do |loader|
expect(loader)
.to receive(:load)
.with(context, extracted_data.data.first)
end
subject.run
end
end
end
context 'when entity is marked as failed' do

View File

@ -43,7 +43,7 @@ RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
subject(:pipeline) { described_class.new(context) }
describe '#run' do
describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)

View File

@ -36,7 +36,7 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
subject(:pipeline) { described_class.new(context) }
describe '#run' do
describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
issue_with_index = [issue, 0]

View File

@ -83,7 +83,7 @@ RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do
subject(:pipeline) { described_class.new(context) }
describe '#run' do
describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
group.add_maintainer(another_user)

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do
RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }

View File

@ -134,7 +134,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
end
end
describe '#transform' do
describe '#transform', :clean_gitlab_redis_cache do
it 'updates matching urls and usernames with new ones' do
transformed_mr = subject.transform(context, mr)
transformed_note = subject.transform(context, mr_note)
@ -154,7 +154,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
expect(transformed_system_note.note).not_to include("@old_username")
expect(transformed_username_system_note.note).not_to include("@source_username")
expect(transformed_issue.description).to eq('http://localhost:80/namespace1/project-1/-/issues/1')
expect(transformed_issue.description)
.to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
expect(transformed_note.note).to eq("#{expected_url} @same_username")
expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")

View File

@ -38,7 +38,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
subject(:pipeline) { described_class.new(context) }
describe '#run' do
describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
with_index = [release, 0]

View File

@ -40,7 +40,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
subject(:pipeline) { described_class.new(context) }
describe '#run' do
describe '#run', :clean_gitlab_redis_cache do
before do
group.add_owner(user)
snippet_with_index = [exported_snippet.dup, 0]

View File

@ -577,17 +577,6 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s
end
end
context 'when browsersdk_tracking is disabled' do
before do
stub_feature_flags(browsersdk_tracking: false)
stub_env('GITLAB_ANALYTICS_URL', analytics_url)
end
it 'does not add GITLAB_ANALYTICS_URL to connect-src' do
expect(connect_src).not_to include(analytics_url)
end
end
context 'when GITLAB_ANALYTICS_URL is not set' do
before do
stub_env('GITLAB_ANALYTICS_URL', nil)

View File

@ -206,6 +206,7 @@ RSpec.describe Gitlab::GonHelper do
context 'when feature flag is false' do
before do
stub_feature_flags(browsersdk_tracking: false)
stub_feature_flags(gl_analytics_tracking: false)
end
it "doesn't set the analytics_url and analytics_id" do

View File

@ -1,152 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::HashedStorage::MigrateRepositoryService, feature_category: :groups_and_projects do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo, :design_repo) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path) }
describe '#execute' do
let(:old_disk_path) { legacy_storage.disk_path }
let(:new_disk_path) { hashed_storage.disk_path }
before do
allow(service).to receive(:gitlab_shell) { gitlab_shell }
end
context 'repository lock' do
it 'tries to lock the repository' do
expect(service).to receive(:try_to_set_repository_read_only!)
service.execute
end
it 'fails when a git operation is in progress' do
allow(project).to receive(:git_transfer_in_progress?) { true }
expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
end
end
context 'when repository doesnt exist on disk' do
let(:project) { create(:project, :legacy_storage) }
it 'skips the disk change but increase the version' do
service.execute
expect(project.hashed_storage?(:repository)).to be_truthy
end
end
context 'when succeeds' do
it 'renames project, wiki and design repositories' do
service.execute
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_truthy
end
it 'updates project to be hashed and not read-only' do
service.execute
expect(project.hashed_storage?(:repository)).to be_truthy
expect(project.repository_read_only).to be_falsey
end
it 'move operation is called for all repositories' do
expect_move_repository(old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
it 'writes project full path to gitaly' do
service.execute
expect(project.repository.full_path).to eq project.full_path
end
end
context 'when exception happens' do
it 'handles OpenSSL::Cipher::CipherError' do
expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
expect { service.execute }.not_to raise_exception
end
it 'ensures rollback when OpenSSL::Cipher::CipherError' do
expect(project).to receive(:ensure_runners_token).and_raise(OpenSSL::Cipher::CipherError)
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
project.reload
expect(project.legacy_storage?).to be_truthy
expect(project.repository_read_only?).to be_falsey
end
it 'handles Gitlab::Git::CommandError' do
expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect { service.execute }.not_to raise_exception
end
it 'ensures rollback when Gitlab::Git::CommandError' do
expect(project).to receive(:set_full_path).and_raise(Gitlab::Git::CommandError)
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
project.reload
expect(project.legacy_storage?).to be_truthy
expect(project.repository_read_only?).to be_falsey
end
end
context 'when one move fails' do
it 'rollsback repositories to original name' do
allow(service).to receive(:move_repository).and_call_original
allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
expect(gitlab_shell.repository_exists?(project.repository_storage, "#{new_disk_path}.design.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey
end
context 'when rollback fails' do
before do
gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
end
it 'does not try to move nil repository over existing' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
expect_move_repository("#{old_disk_path}.design", "#{new_disk_path}.design")
service.execute
end
end
end
it 'works even when project validation fails' do
allow(project).to receive(:valid?) { false }
expect { service.execute }.to change { project.hashed_storage?(:repository) }.to(true)
end
def expect_move_repository(from_name, to_name)
expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
end
end
end

View File

@ -14,43 +14,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
subject(:service) { described_class.new(project, project.full_path, logger: logger) }
describe '#execute' do
context 'repository migration' do
let(:repository_service) do
Projects::HashedStorage::MigrateRepositoryService.new(
project: project,
old_disk_path: project.full_path,
logger: logger
)
end
it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
expect(service).to receive(:migrate_repository_service).and_return(repository_service)
expect(repository_service).to receive(:execute)
service.execute
end
it 'does not delegate migration if repository is already migrated' do
project.storage_version = ::Project::LATEST_STORAGE_VERSION
expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new)
service.execute
end
it 'migrates legacy repositories to hashed storage' do
legacy_attachments_path = FileUploader.absolute_base_dir(project)
hashed_project = project.dup.tap { |p| p.id = project.id }
hashed_project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
hashed_attachments_path = FileUploader.absolute_base_dir(hashed_project)
expect(logger).to receive(:info).with(/Repository moved from '#{project_legacy_path}' to '#{project_hashed_path}'/)
expect(logger).to receive(:info).with(/Repository moved from '#{wiki_legacy_path}' to '#{wiki_hashed_path}'/)
expect(logger).to receive(:info).with(/Project attachments moved from '#{legacy_attachments_path}' to '#{hashed_attachments_path}'/)
expect { service.execute }.to change { project.storage_version }.from(nil).to(2)
end
end
context 'attachments migration' do
let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
@ -62,13 +25,6 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :gro
)
end
it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
expect(service).to receive(:migrate_attachments_service).and_return(attachments_service)
expect(attachments_service).to receive(:execute)
service.execute
end
it 'does not delegate migration if attachments are already migrated' do
project.storage_version = ::Project::LATEST_STORAGE_VERSION
expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
RSpec.shared_examples Gitlab::GithubImport::StageMethods do
describe '.sidekiq_retries_exhausted' do
it 'tracks the exception and marks the import as failed' do
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: 1,
exception: StandardError,
fail_import: true,
error_source: anything
)
described_class.sidekiq_retries_exhausted_block.call({ 'args' => [1] }, StandardError.new)
end
end
end

View File

@ -92,106 +92,48 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
worker.perform(project.id)
end
context 'when abort_on_failure is false' do
it 'logs error when import fails' do
exception = StandardError.new('some error')
it 'logs error when import fails' do
exception = StandardError.new('some error')
allow(worker)
.to receive(:find_project)
.with(project.id)
.and_return(project)
allow(worker)
.to receive(:find_project)
.with(project.id)
.and_return(project)
expect(worker)
.to receive(:try_import)
.and_raise(exception)
expect(worker)
.to receive(:try_import)
.and_raise(exception)
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
{
message: 'starting stage',
project_id: project.id,
import_stage: 'DummyStage'
}
)
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
{
message: 'starting stage',
project_id: project.id,
import_stage: 'DummyStage'
}
)
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
{
project_id: project.id,
exception: exception,
error_source: 'DummyStage',
fail_import: false
}
).and_call_original
expect { worker.perform(project.id) }
.to raise_error(exception)
expect(project.import_state.reload.status).to eq('started')
expect(project.import_failures).not_to be_empty
expect(project.import_failures.last.exception_class).to eq('StandardError')
expect(project.import_failures.last.exception_message).to eq('some error')
end
end
context 'when abort_on_failure is true' do
let(:worker) do
Class.new do
def self.name
'DummyStage'
end
def abort_on_failure
true
end
include(Gitlab::GithubImport::StageMethods)
end.new
end
it 'logs, captures and re-raises the exception and also marks the import as failed' do
exception = StandardError.new('some error')
allow(worker)
.to receive(:find_project)
.with(project.id)
.and_return(project)
expect(worker)
.to receive(:try_import)
.and_raise(exception)
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(
{
message: 'starting stage',
project_id: project.id,
import_stage: 'DummyStage'
}
)
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
expect(Gitlab::Import::ImportFailureService)
.to receive(:track)
.with(
{
project_id: project.id,
exception: exception,
error_source: 'DummyStage',
fail_import: true
).and_call_original
fail_import: false,
metrics: true
}
).and_call_original
expect { worker.perform(project.id) }.to raise_error(exception)
expect { worker.perform(project.id) }
.to raise_error(exception)
expect(project.import_state.reload.status).to eq('failed')
expect(project.import_state.last_error).to eq('some error')
expect(project.import_state.reload.status).to eq('started')
expect(project.import_failures).not_to be_empty
expect(project.import_failures.last.exception_class).to eq('StandardError')
expect(project.import_failures.last.exception_message).to eq('some error')
end
expect(project.import_failures).not_to be_empty
expect(project.import_failures.last.exception_class).to eq('StandardError')
expect(project.import_failures.last.exception_message).to eq('some error')
end
end

View File

@ -6,6 +6,8 @@ RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker, feature_category
let(:project) { create(:project) }
let(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#perform' do
it 'marks the import as finished and reports import statistics' do
expect(project).to receive(:after_import)

View File

@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker, feature_cat
settings.write({ optional_stages: { attachments_import: stage_enabled } })
end
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:importers) do

View File

@ -10,6 +10,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker, feature_catego
let(:importer) { double(:importer) }
let(:client) { double(:client) }
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
it 'imports the base data of a project' do
described_class::IMPORTERS.each do |klass|
@ -29,23 +31,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker, feature_catego
worker.import(client, project)
end
it 'raises an error' do
exception = StandardError.new('_some_error_')
expect_next_instance_of(Gitlab::GithubImport::Importer::LabelsImporter) do |importer|
expect(importer).to receive(:execute).and_raise(exception)
end
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: project.id,
exception: exception,
error_source: described_class.name,
fail_import: true,
metrics: true
).and_call_original
expect { worker.import(client, project) }.to raise_error(StandardError)
end
end
end

View File

@ -12,6 +12,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportCollaboratorsWorker, feature_c
let(:importer) { instance_double(Gitlab::GithubImport::Importer::CollaboratorsImporter) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
let(:push_rights_granted) { true }
@ -68,23 +70,5 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportCollaboratorsWorker, feature_c
worker.import(client, project)
end
end
it 'raises an error' do
exception = StandardError.new('_some_error_')
expect_next_instance_of(Gitlab::GithubImport::Importer::CollaboratorsImporter) do |importer|
expect(importer).to receive(:execute).and_raise(exception)
end
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: project.id,
exception: exception,
error_source: described_class.name,
fail_import: true,
metrics: true
).and_call_original
expect { worker.import(client, project) }.to raise_error(StandardError)
end
end
end

View File

@ -14,6 +14,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_cat
settings.write({ optional_stages: { single_endpoint_issue_events_import: stage_enabled } })
end
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }

View File

@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker, feat
settings.write({ optional_stages: { single_endpoint_notes_import: single_endpoint_optional_stage } })
end
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
it 'imports the issues and diff notes' do
client = double(:client)

View File

@ -6,6 +6,8 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportLfsObjectsWorker, feature_cate
let(:project) { create(:project) }
let(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
it 'imports all the lfs objects' do
importer = double(:importer)

Some files were not shown because too many files have changed in this diff Show More