Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-08 03:15:41 +00:00
parent 6352d2f7c2
commit 777ef5e9b2
95 changed files with 683 additions and 1215 deletions

View File

@ -38,3 +38,6 @@ include:
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-load_balancing"
- local: .gitlab/ci/templates/gem.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-lock_retries"

View File

@ -62,9 +62,6 @@ include:
GDK_URL: http://gdk.test:3000
KNAPSACK_TEST_FILE_PATTERN: "qa/specs/features/**/*_spec.rb"
QA_SUITE_STATUS_ENV_FILE: "$CI_PROJECT_DIR/suite_status.env"
# Workaround to avoid enabling feature flags unintentionally
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/137890#note_1681217315
QA_FEATURE_FLAGS: ""
before_script:
- echo "SUITE_RAN=true" > "$QA_SUITE_STATUS_ENV_FILE"
- echo -e "\e[0Ksection_start:`date +%s`:pull_image[collapsed=true]\r\e[0KPull GDK QA image"

View File

@ -1350,7 +1350,6 @@ Layout/ArgumentAlignment:
- 'spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb'
- 'spec/lib/gitlab/auth/saml/auth_hash_spec.rb'
- 'spec/lib/gitlab/auth/saml/user_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/wiki_formatter_spec.rb'
- 'spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb'

View File

@ -146,7 +146,6 @@ Layout/ArrayAlignment:
- 'lib/api/internal/base.rb'
- 'lib/api/statistics.rb'
- 'lib/gitlab/alert_management/payload/prometheus.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/chat/command.rb'
- 'lib/gitlab/checks/single_change_access.rb'
- 'lib/gitlab/ci/config/entry/default.rb'

View File

@ -184,7 +184,6 @@ Layout/FirstHashElementIndentation:
- 'spec/lib/container_registry/client_spec.rb'
- 'spec/lib/gitlab/application_rate_limiter_spec.rb'
- 'spec/lib/gitlab/asciidoc_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/ci/config_spec.rb'
- 'spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb'
- 'spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb'

View File

@ -2511,7 +2511,6 @@ Layout/LineLength:
- 'lib/gitlab/background_migration/migrate_requirements_to_work_items.rb'
- 'lib/gitlab/background_migration/populate_resolved_on_default_branch_column.rb'
- 'lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/buffered_io.rb'
- 'lib/gitlab/bullet/exclusions.rb'
- 'lib/gitlab/cache/helpers.rb'
@ -3595,7 +3594,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/background_migration/job_coordinator_spec.rb'
- 'spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb'
- 'spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/buffered_io_spec.rb'
- 'spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb'
- 'spec/lib/gitlab/chat/output_spec.rb'

View File

@ -169,7 +169,6 @@ Lint/RedundantCopDisableDirective:
- 'lib/gitlab/background_migration/re_expire_o_auth_tokens.rb'
- 'lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings.rb'
- 'lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/buffered_io.rb'
- 'lib/gitlab/cache/request_cache.rb'
- 'lib/gitlab/ci/build/artifacts/metadata/entry.rb'

View File

@ -375,7 +375,6 @@ Lint/UnusedMethodArgument:
- 'lib/gitlab/background_migration/cleanup_orphaned_routes.rb'
- 'lib/gitlab/background_migration/job_coordinator.rb'
- 'lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/cache/helpers.rb'
- 'lib/gitlab/cache/metrics.rb'
- 'lib/gitlab/ci/ansi2html.rb'

View File

@ -48,7 +48,6 @@ Rails/TimeZone:
- 'spec/lib/gitlab/app_json_logger_spec.rb'
- 'spec/lib/gitlab/app_text_logger_spec.rb'
- 'spec/lib/gitlab/auth/current_user_mode_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/checks/timed_logger_spec.rb'
- 'spec/lib/gitlab/ci/cron_parser_spec.rb'
- 'spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb'

View File

@ -1575,7 +1575,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/avatar_cache_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb'
- 'spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/blame_spec.rb'
- 'spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb'
- 'spec/lib/gitlab/cache/helpers_spec.rb'

View File

@ -86,7 +86,6 @@ RSpec/InstanceVariable:
- 'spec/lib/extracts_ref_spec.rb'
- 'spec/lib/gitlab/auth/auth_finders_spec.rb'
- 'spec/lib/gitlab/auth/ldap/person_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/chat_name_token_spec.rb'
- 'spec/lib/gitlab/ci/lint_spec.rb'
- 'spec/lib/gitlab/ci/status/composite_spec.rb'

View File

@ -1855,7 +1855,6 @@ RSpec/NamedSubject:
- 'spec/lib/gitlab/background_migration/truncate_overlong_vulnerability_html_titles_spec.rb'
- 'spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb'
- 'spec/lib/gitlab/background_task_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb'
- 'spec/lib/gitlab/blame_spec.rb'

View File

@ -114,7 +114,6 @@ RSpec/ReturnFromStub:
- 'spec/lib/gitlab/auth/o_auth/user_spec.rb'
- 'spec/lib/gitlab/auth/saml/user_spec.rb'
- 'spec/lib/gitlab/auth_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/ci/build/policy/changes_spec.rb'
- 'spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb'
- 'spec/lib/gitlab/ci/status/build/failed_spec.rb'

View File

@ -357,7 +357,6 @@ RSpec/VerifiedDoubles:
- 'spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb'
- 'spec/lib/gitlab/background_migration/job_coordinator_spec.rb'
- 'spec/lib/gitlab/background_migration_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- 'spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- 'spec/lib/gitlab/cache/import/caching_spec.rb'
- 'spec/lib/gitlab/changelog/committer_spec.rb'

View File

@ -427,7 +427,6 @@ Style/GuardClause:
- 'lib/gitlab/auth/o_auth/user.rb'
- 'lib/gitlab/auth/unique_ips_limiter.rb'
- 'lib/gitlab/background_migration/fix_projects_without_project_feature.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/blob_helper.rb'
- 'lib/gitlab/cache/ci/project_pipeline_status.rb'
- 'lib/gitlab/changelog/config.rb'

View File

@ -2377,7 +2377,6 @@ Style/InlineDisableAnnotation:
- 'lib/gitlab/background_migration/update_workspaces_config_version.rb'
- 'lib/gitlab/background_task.rb'
- 'lib/gitlab/base_doorkeeper_controller.rb'
- 'lib/gitlab/bitbucket_import/importer.rb'
- 'lib/gitlab/bitbucket_import/importers/issue_importer.rb'
- 'lib/gitlab/bitbucket_import/importers/issue_notes_importer.rb'
- 'lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer.rb'

View File

@ -1,4 +1,4 @@
import _ from 'lodash';
import { uniqBy, orderBy } from 'lodash';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import Api from '~/api';
import { createAlert } from '~/alert';
@ -52,8 +52,8 @@ export const searchCommits = ({ dispatch, commit, state }, search = {}) => {
};
export const setCommits = ({ commit }, { commits: data, silentAddition = false }) => {
let commits = _.uniqBy(data, 'short_id');
commits = _.orderBy(data, (c) => new Date(c.committed_date), ['desc']);
let commits = uniqBy(data, 'short_id');
commits = orderBy(data, (c) => new Date(c.committed_date), ['desc']);
if (silentAddition) {
commit(types.SET_COMMITS_SILENT, commits);
} else {
@ -125,8 +125,8 @@ export const removeContextCommits = ({ state }, forceReload = false) =>
});
export const setSelectedCommits = ({ commit }, selected) => {
let selectedCommits = _.uniqBy(selected, 'short_id');
selectedCommits = _.orderBy(
let selectedCommits = uniqBy(selected, 'short_id');
selectedCommits = orderBy(
selectedCommits,
(selectedCommit) => new Date(selectedCommit.committed_date),
['desc'],

View File

@ -1,4 +1,4 @@
import _ from 'lodash';
import { isNil } from 'lodash';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import { darkModeEnabled } from '~/lib/utils/color_utils';
import { base64DecodeUnicode } from '~/lib/utils/text_utility';
@ -181,7 +181,7 @@ function configureDrawIOEditor(drawIOEditorState) {
}
function onDrawIOEditorMessage(drawIOEditorState, editorFacade, evt) {
if (_.isNil(evt) || evt.source !== drawIOEditorState.iframe.contentWindow) {
if (isNil(evt) || evt.source !== drawIOEditorState.iframe.contentWindow) {
return;
}

View File

@ -1,5 +1,5 @@
import Visibility from 'visibilityjs';
import _ from 'lodash';
import { isEmpty } from 'lodash';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
@ -25,7 +25,7 @@ const pathWithParams = ({ path, ...params }) => {
return queryString ? `${path}?${queryString}` : path;
};
const commitPaginationData = ({ state, commit, data }) => {
const cursorsGitHubResponse = !_.isEmpty(data.pageInfo || {});
const cursorsGitHubResponse = !isEmpty(data.pageInfo || {});
if (state.provider === PROVIDERS.GITHUB && cursorsGitHubResponse) {
commit(types.SET_PAGE_CURSORS, data.pageInfo);

View File

@ -604,11 +604,7 @@ export default class MergeRequestTabs {
if (!isInVueNoteablePage() || this.cachedPageLayoutClasses) return;
this.cachedPageLayoutClasses = this.pageLayout.className;
this.pageLayout.classList.remove(
'right-sidebar-collapsed',
'right-sidebar-expanded',
'page-with-icon-sidebar',
);
this.pageLayout.classList.remove('right-sidebar-collapsed', 'right-sidebar-expanded');
this.sidebar.style.width = '0px';
}

View File

@ -110,12 +110,12 @@ export default {
</div>
<div
v-if="showParticipantLabel"
class="title hide-collapsed gl-mb-2! gl-line-height-20 gl-font-weight-bold"
class="title hide-collapsed gl-line-height-20 gl-font-weight-bold gl-mb-0!"
>
<gl-loading-icon v-if="loading" size="sm" :inline="true" />
{{ participantLabel }}
</div>
<div class="hide-collapsed gl-display-flex gl-flex-wrap">
<div class="hide-collapsed gl-display-flex gl-flex-wrap gl-mt-2 gl-mb-n3">
<div
v-for="participant in visibleParticipants"
:key="participant.id"

View File

@ -164,7 +164,7 @@ export default {
</gl-button>
</div>
<template v-if="!initialLoading">
<div v-show="!edit" data-testid="collapsed-content" class="gl-line-height-14">
<div v-show="!edit" data-testid="collapsed-content">
<slot name="collapsed">{{ __('None') }}</slot>
</div>
<div v-show="edit" data-testid="expanded-content" :class="{ 'gl-mt-3': !isClassicSidebar }">

View File

@ -58,21 +58,7 @@
}
}
@include media-breakpoint-up(md) {
.page-with-contextual-sidebar {
--application-bar-left: #{$contextual-sidebar-collapsed-width};
}
}
@include media-breakpoint-up(xl) {
.page-with-contextual-sidebar {
--application-bar-left: #{$contextual-sidebar-width};
}
.page-with-icon-sidebar {
--application-bar-left: #{$contextual-sidebar-collapsed-width};
}
.page-with-super-sidebar {
--application-bar-left: #{$super-sidebar-width};
}

View File

@ -194,32 +194,6 @@
}
}
//
// PAGE-LAYOUT
//
.page-with-contextual-sidebar {
transition: padding-left $gl-transition-duration-medium;
@include media-breakpoint-up(md) {
padding-left: $contextual-sidebar-collapsed-width;
}
@include media-breakpoint-up(xl) {
padding-left: $contextual-sidebar-width;
}
.issues-bulk-update.right-sidebar.right-sidebar-expanded .issuable-sidebar-header {
padding: 10px 0 15px;
}
}
.page-with-icon-sidebar {
@include media-breakpoint-up(md) {
padding-left: $contextual-sidebar-collapsed-width;
}
}
//
// THE PANEL
//

View File

@ -1115,10 +1115,6 @@ $tabs-holder-z-index: 250;
border-top: 1px solid var(--border-color, $border-color);
transition: padding $gl-transition-duration-medium;
.page-with-icon-sidebar & {
padding-left: $contextual-sidebar-collapsed-width;
}
@media (max-width: map-get($grid-breakpoints, sm)-1) {
padding-left: 0;
padding-right: 0;

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module Types
module Ml
# rubocop: disable Graphql/AuthorizeTypes -- authorization in ModelDetailsResolver
class CandidateLinksType < BaseObject
graphql_name 'MLCandidateLinks'
description 'Represents links to perform actions on the candidate'
present_using ::Ml::CandidatePresenter
field :show_path, GraphQL::Types::String,
null: true, description: 'Path to the details page of the candidate.', method: :path
field :artifact_path, GraphQL::Types::String,
null: true, description: 'Path to the artifact.', method: :artifact_path
end
# rubocop: enable Graphql/AuthorizeTypes
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Types
module Ml
# rubocop: disable Graphql/AuthorizeTypes -- authorization in ModelDetailsResolver
class CandidateType < ::Types::BaseObject
graphql_name 'MlCandidate'
description 'Candidate for a model version in the model registry'
connection_type_class Types::LimitedCountableConnectionType
field :id, ::Types::GlobalIDType[::Ml::Candidate], null: false, description: 'ID of the candidate.'
field :name, ::GraphQL::Types::String, null: false, description: 'Name of the candidate.'
field :created_at, Types::TimeType, null: false, description: 'Date of creation.'
field :_links, ::Types::Ml::CandidateLinksType, null: false, method: :itself,
description: 'Map of links to perform actions on the candidate.'
end
# rubocop: enable Graphql/AuthorizeTypes
end
end

View File

@ -13,6 +13,9 @@ module Types
field :versions, ::Types::Ml::ModelVersionType.connection_type, null: true,
description: 'Versions of the model.'
field :candidates, ::Types::Ml::CandidateType.connection_type, null: true,
description: 'Version candidates of the model.'
end
# rubocop: enable Graphql/AuthorizeTypes
end

View File

@ -378,10 +378,6 @@ module ApplicationHelper
external_redirect_path(url: "https://#{url[2]}/@#{url[1]}")
end
def collapsed_sidebar?
cookies["sidebar_collapsed"] == "true"
end
def collapsed_super_sidebar?
return false if @force_desktop_expanded_sidebar

View File

@ -215,7 +215,7 @@ module GroupsHelper
return {} unless max_access_level
GroupMember.access_level_roles.select { |_k, v| v <= max_access_level }
group.access_level_roles.select { |_k, v| v <= max_access_level }
end
def groups_projects_more_actions_dropdown_data(source)

View File

@ -11,28 +11,11 @@ module NavHelper
header_links.include?(link)
end
def page_has_sidebar?
defined?(@left_sidebar) && @left_sidebar
end
def page_has_collapsed_sidebar?
page_has_sidebar? && collapsed_sidebar?
end
def page_has_collapsed_super_sidebar?
page_has_sidebar? && collapsed_super_sidebar?
end
def page_with_sidebar_class
class_name = page_gutter_class
if show_super_sidebar?
class_name << 'page-with-super-sidebar' if page_has_sidebar?
class_name << 'page-with-super-sidebar-collapsed' if page_has_collapsed_super_sidebar?
else
class_name << 'page-with-contextual-sidebar' if page_has_sidebar?
class_name << 'page-with-icon-sidebar' if page_has_collapsed_sidebar?
end
class_name << 'page-with-super-sidebar'
class_name << 'page-with-super-sidebar-collapsed' if collapsed_super_sidebar?
class_name -= ['right-sidebar-expanded'] if defined?(@right_sidebar) && !@right_sidebar

View File

@ -46,27 +46,19 @@ class LabelNote < SyntheticNote
end
def note_text(html: false)
added = labels_str(label_refs_by_action('add', html).uniq, prefix: 'added', suffix: added_suffix)
removed = labels_str(label_refs_by_action('remove', html).uniq, prefix: removed_prefix)
added = labels_str(label_refs_by_action('add', html).uniq, prefix: 'added')
removed = labels_str(label_refs_by_action('remove', html).uniq, prefix: 'removed')
[added, removed].compact.join(' and ')
end
def removed_prefix
'removed'
end
def added_suffix
''
end
# returns string containing added/removed labels including
# count of deleted labels:
#
# added ~1 ~2 + 1 deleted label
# added 3 deleted labels
# added ~1 ~2 labels
def labels_str(label_refs, prefix: '', suffix: '')
def labels_str(label_refs, prefix: '')
existing_refs = label_refs.select { |ref| ref.present? }.sort
refs_str = existing_refs.empty? ? nil : existing_refs.join(' ')
@ -76,7 +68,7 @@ class LabelNote < SyntheticNote
return unless refs_str || deleted_str
label_list_str = [refs_str, deleted_str].compact.join(' + ')
suffix += ' label'.pluralize(deleted > 0 ? deleted : existing_refs.count)
suffix = ' label'.pluralize(deleted > 0 ? deleted : existing_refs.count)
"#{prefix} #{label_list_str} #{suffix.squish}"
end

View File

@ -3,6 +3,7 @@
module Ml
class Candidate < ApplicationRecord
include Sortable
include Presentable
include AtomicInternalId
enum status: { running: 0, scheduled: 1, finished: 2, failed: 3, killed: 4 }
@ -30,6 +31,7 @@ module Ml
scope :including_relationships, -> { includes(:latest_metrics, :params, :user, :package, :project, :ci_build) }
scope :by_name, ->(name) { where("ml_candidates.name LIKE ?", "%#{sanitize_sql_like(name)}%") } # rubocop:disable GitlabSecurity/SqlInjection
scope :without_model_version, -> { where(model_version: nil) }
scope :order_by_metric, ->(metric, direction) do
subquery = Ml::CandidateMetric.latest.where(name: metric)

View File

@ -18,6 +18,7 @@ module Ml
belongs_to :project
belongs_to :user
has_many :versions, class_name: 'Ml::ModelVersion'
has_many :candidates, -> { without_model_version }, class_name: 'Ml::Candidate', through: :default_experiment
has_many :metadata, class_name: 'Ml::ModelMetadata'
has_one :latest_version, -> { latest_by_model }, class_name: 'Ml::ModelVersion', inverse_of: :model

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module Ml
class CandidatePresenter < Gitlab::View::Presenter::Delegated
presents ::Ml::Candidate, as: :candidate
def path
project_ml_candidate_path(
candidate.project,
candidate.iid
)
end
def artifact_path
return unless candidate.package_id.present?
project_package_path(candidate.project, candidate.package_id)
end
end
end

View File

@ -1,5 +1,3 @@
- if show_super_sidebar?
- @left_sidebar = true
.layout-page{ class: page_with_sidebar_class }
- if show_super_sidebar?
-# Render the parent group sidebar while creating a new subgroup/project, see GroupsController#new.

View File

@ -1,7 +1,6 @@
- page_title _("Admin Area")
- header_title _("Admin Area"), admin_root_path
- nav "admin"
- @left_sidebar = true
-# This active_nav_link check is also used in `app/views/layouts/nav/sidebar/_admin.html.haml`
- is_application_settings = active_nav_link?(controller: [:application_settings, :integrations])

View File

@ -1,6 +1,5 @@
- header_title _("Your work"), root_path
- @left_sidebar = true
- nav (@parent_group ? "group" : "your_work")
= render template: "layouts/application"

View File

@ -1,6 +1,5 @@
- header_title _("Explore"), explore_root_path
- @left_sidebar = true
- nav "explore"
= render template: "layouts/application"

View File

@ -3,7 +3,6 @@
- header_title group_title(@group) unless header_title
- nav "group"
- display_subscription_banner!
- @left_sidebar = true
- base_layout = local_assigns[:base_layout]
- content_for :flash_message do

View File

@ -1,6 +1,5 @@
- page_title @organization.name if @organization
- header_title @organization.name, organization_path(@organization) if @organization
- nav(%w[index new].include?(params[:action]) ? "your_work" : "organization")
- @left_sidebar = true
= render template: "layouts/application"

View File

@ -2,7 +2,6 @@
- header_title _("User Settings"), profile_path unless header_title
- sidebar "dashboard"
- nav "profile"
- @left_sidebar = true
- enable_search_settings locals: { container_class: 'gl-my-5' }

View File

@ -4,7 +4,6 @@
- nav "project"
- page_itemtype 'http://schema.org/SoftwareSourceCode'
- display_subscription_banner!
- @left_sidebar = true
- @content_class = [@content_class, project_classes(@project)].compact.join(" ")
- content_for :flash_message do

View File

@ -2,7 +2,6 @@
- header_title _("Your work"), root_path
- add_to_breadcrumbs _("Snippets"), dashboard_snippets_path
- snippets_upload_path = snippets_upload_path(@snippet, current_user)
- @left_sidebar = true
- if current_user
- nav "your_work"

View File

@ -6,10 +6,8 @@
- page_itemtype 'http://schema.org/Person'
- add_page_specific_style 'page_bundles/profile'
- add_page_specific_style 'page_bundles/projects'
- if show_super_sidebar?
- @left_sidebar = true
- @force_desktop_expanded_sidebar = true
- nav "user_profile"
- @force_desktop_expanded_sidebar = true
- nav "user_profile"
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, user_url(@user, format: :atom), title: "#{@user.name} activity")

View File

@ -1,8 +0,0 @@
---
name: bitbucket_parallel_importer
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130731
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/423530
milestone: '16.4'
type: development
group: group::import and integrate
default_enabled: false

View File

@ -4,3 +4,4 @@ description: Migrates human user type from old value (nil) to new value (0) for
feature_category: user_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115849
milestone: '16.0'
finalized_by: 20230523101514

View File

@ -11806,6 +11806,43 @@ The edge type for [`Milestone`](#milestone).
| <a id="milestoneedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="milestoneedgenode"></a>`node` | [`Milestone`](#milestone) | The item at the end of the edge. |
#### `MlCandidateConnection`
The connection type for [`MlCandidate`](#mlcandidate).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlcandidateconnectionedges"></a>`edges` | [`[MlCandidateEdge]`](#mlcandidateedge) | A list of edges. |
| <a id="mlcandidateconnectionnodes"></a>`nodes` | [`[MlCandidate]`](#mlcandidate) | A list of nodes. |
| <a id="mlcandidateconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
##### Fields with arguments
###### `MlCandidateConnection.count`
Limited count of collection. Returns limit + 1 for counts greater than the limit.
Returns [`Int!`](#int).
####### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlcandidateconnectioncountlimit"></a>`limit` | [`Int`](#int) | Limit value to be applied to the count query. Default is 1000. |
#### `MlCandidateEdge`
The edge type for [`MlCandidate`](#mlcandidate).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlcandidateedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="mlcandidateedgenode"></a>`node` | [`MlCandidate`](#mlcandidate) | The item at the end of the edge. |
#### `MlModelVersionConnection`
The connection type for [`MlModelVersion`](#mlmodelversion).
@ -21235,6 +21272,17 @@ Represents an entry from the Cloud License history.
| <a id="locationblobpath"></a>`blobPath` | [`String`](#string) | HTTP URI path to view the input file in GitLab. |
| <a id="locationpath"></a>`path` | [`String`](#string) | Path, relative to the root of the repository, of the filewhich was analyzed to detect the dependency. |
### `MLCandidateLinks`
Represents links to perform actions on the candidate.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlcandidatelinksartifactpath"></a>`artifactPath` | [`String`](#string) | Path to the artifact. |
| <a id="mlcandidatelinksshowpath"></a>`showPath` | [`String`](#string) | Path to the details page of the candidate. |
### `MLModelVersionLinks`
Represents links to perform actions on the model version.
@ -22849,6 +22897,19 @@ Contains statistics about a milestone.
| <a id="milestonestatsclosedissuescount"></a>`closedIssuesCount` | [`Int`](#int) | Number of closed issues associated with the milestone. |
| <a id="milestonestatstotalissuescount"></a>`totalIssuesCount` | [`Int`](#int) | Total number of issues associated with the milestone. |
### `MlCandidate`
Candidate for a model version in the model registry.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlcandidate_links"></a>`_links` | [`MLCandidateLinks!`](#mlcandidatelinks) | Map of links to perform actions on the candidate. |
| <a id="mlcandidatecreatedat"></a>`createdAt` | [`Time!`](#time) | Date of creation. |
| <a id="mlcandidateid"></a>`id` | [`MlCandidateID!`](#mlcandidateid) | ID of the candidate. |
| <a id="mlcandidatename"></a>`name` | [`String!`](#string) | Name of the candidate. |
### `MlModel`
Machine learning model in the model registry.
@ -22857,6 +22918,7 @@ Machine learning model in the model registry.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mlmodelcandidates"></a>`candidates` | [`MlCandidateConnection`](#mlcandidateconnection) | Version candidates of the model. (see [Connections](#connections)) |
| <a id="mlmodelid"></a>`id` | [`MlModelID!`](#mlmodelid) | ID of the model. |
| <a id="mlmodelname"></a>`name` | [`String!`](#string) | Name of the model. |
| <a id="mlmodelversions"></a>`versions` | [`MlModelVersionConnection`](#mlmodelversionconnection) | Versions of the model. (see [Connections](#connections)) |
@ -32380,6 +32442,12 @@ A `MilestoneID` is a global ID. It is encoded as a string.
An example `MilestoneID` is: `"gid://gitlab/Milestone/1"`.
### `MlCandidateID`
A `MlCandidateID` is a global ID. It is encoded as a string.
An example `MlCandidateID` is: `"gid://gitlab/Ml::Candidate/1"`.
### `MlModelID`
A `MlModelID` is a global ID. It is encoded as a string.

View File

@ -6,11 +6,6 @@ info: Any user with at least the Maintainer role can merge updates to this conte
# Bitbucket Cloud importer developer documentation
The Bitbucket Cloud importer can be configured with the `bitbucket_parallel_importer` feature flag. When the feature flag is:
- Enabled, the importer uses Sidekiq to schedule work asynchronously.
- Disabled, the importer does all the work in a single thread.
## Prerequisites
You must be authenticated with Bitbucket:

View File

@ -10,7 +10,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
You can use scan result policies to take action based on scan results. For example, one type of scan
result policy is a security approval policy that allows approval to be required based on the
findings of one or more security scan jobs. Scan result policies are evaluated after a CI scanning job is fully executed.
findings of one or more security scan jobs. Scan result policies are evaluated after a CI scanning job is fully executed and both vulnerability and license type policies are evaluated based on the job artifact reports that are published in the completed pipeline.
NOTE:
Scan result policies are applicable only to [protected](../../project/protected_branches.md) target branches.

View File

@ -6,15 +6,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Import your project from Bitbucket Cloud **(FREE ALL)**
> Parallel imports from Bitbucket Cloud [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/412614) in GitLab 16.6 [with a flag](../../../administration/feature_flags.md) named `bitbucket_parallel_importer`. Disabled by default.
Import your projects from Bitbucket Cloud to GitLab.
FLAG:
On self-managed GitLab, parallel imports are not available. Parallel imports can help when importing large projects.
To make parallel imports available, an administrator can [enable the feature flag](../../../administration/feature_flags.md)
named `bitbucket_parallel_importer`. On GitLab.com, this feature is available.
The Bitbucket importer can import:
- Repository description

View File

@ -0,0 +1,98 @@
---
stage: Systems
group: Gitaly
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Troubleshooting monorepo performance
Review these suggestions for performance problems with monorepos.
## Slowness during `git clone` or `git fetch`
There are a few key causes of slowness with clones and fetches.
### High CPU utilization
If the CPU utilization on your Gitaly nodes is high, you can also check
how much CPU is taken up from clones by [filtering on certain values](../../../../user/project/repository/monorepos/observability.md#cpu-and-memory).
In particular, the `command.cpu_time_ms` field can indicate how
much CPU is being taken up by clones and fetches.
In most cases, the bulk of server load is generated from `git-pack-objects`
processes, which is initiated during clones and fetches. Monorepos are often very busy
and CI/CD systems send a lot of clone and fetch commands to the server.
High CPU utilization is a common cause of slow performance.
The following non-mutually exclusive causes are possible:
- [Too many clones for Gitaly to handle](#cause-too-many-large-clones).
- [Poor read distribution on Gitaly Cluster](#cause-poor-read-distribution).
#### Cause: too many large clones
You might have too many large clones for Gitaly to handle. Gitaly can struggle to keep up
because of a number of factors:
- The size of a repository.
- The volume of clones and fetches.
- Lack of CPU capacity.
To help Gitaly process many large clones, you might need to reduce the burden on Gitaly servers through some optimization strategies
such as:
- Turn on [pack-objects-cache](../../../../administration/gitaly/configure_gitaly.md#pack-objects-cache)
to reduce the work that `git-pack-objects` has to do.
- Change [Git strategy](../../../../user/project/repository/monorepos/index.md#git-strategy)
in CI/CD settings from `clone` to `fetch` or `none`.
- [Stop fetching tags](../../../../user/project/repository/monorepos/index.md#git-fetch-extra-flags),
unless your tests require them.
- [Use shallow clones](../../../../user/project/repository/monorepos/index.md#shallow-cloning)
whenever possible.
The other option is to increase CPU capacity on Gitaly servers.
#### Cause: poor read distribution
You might have poor read distribution on Gitaly Cluster.
To observe if most read traffic is going to the primary Gitaly node instead of
getting distributed across the cluster, use the
[read distribution Prometheus metric](observability.md#read-distribution).
If the secondary Gitaly nodes aren't receiving much traffic, it might be that
the secondary nodes are perpetually out of sync. This problem is exacerbated in
a monorepo.
Monorepos are often both large and busy. This leads to two effects. Firstly,
monorepos are pushed to often and have lots of CI jobs running. There can be
times when write operations such as deleting a branch fails a proxy call to the
secondary nodes. This triggers a replication job in Gitaly Cluster so that
the secondary node will catch up eventually.
The replication job is essentially a `git fetch` from the secondary node to the
primary node, and because monorepos are often very large, this fetch can take a
long time.
If the next call fails before the previous replication job completes, and this
keeps happening, you can end up in a state where your monorepo is constantly
behind in its secondaries. This leads to all traffic going to the primary node.
One reason for these failed proxied writes is a known issue with the Git
`$GIT_DIR/packed-refs` file. The file must be locked to
remove an entry in the file, which can lead to a race condition that causes a
delete to fail when concurrent deletes happen.
Engineers at GitLab have developed mitigations to try to batch reference deletions.
Turn on the following [feature flags](../../../../administration/feature_flags.md) to allow GitLab to batch ref deletions.
These feature flags do not need downtime to enable.
- `merge_request_cleanup_ref_worker_async`
- `pipeline_cleanup_ref_worker_async`
- `pipeline_delete_gitaly_refs_in_batches`
- `merge_request_delete_gitaly_refs_in_batches`
[Epic 4220](https://gitlab.com/groups/gitlab-org/-/epics/4220) proposes to add RefTable support in GitLab,
which is considered a long-term solution.

View File

@ -0,0 +1,11 @@
/.bundle/
/.yardoc
/_yardoc/
/coverage/
/doc/
/pkg/
/spec/reports/
/tmp/
# rspec failure tracking
.rspec_status

View File

@ -0,0 +1,4 @@
include:
- local: gems/gem-pg.gitlab-ci.yml
inputs:
gem_name: "gitlab-database-lock_retries"

View File

@ -0,0 +1,3 @@
--format documentation
--color
--require spec_helper

View File

@ -0,0 +1,2 @@
inherit_from:
- ../config/rubocop.yml

View File

@ -0,0 +1,5 @@
## [Unreleased]
## [0.1.0] - 2023-11-29
- Initial release

View File

@ -0,0 +1,6 @@
# frozen_string_literal: true
source "https://rubygems.org"
# Specify your gem's dependencies in gitlab-database-lock_retries.gemspec
gemspec

View File

@ -0,0 +1,105 @@
PATH
remote: .
specs:
gitlab-database-lock_retries (0.1.0)
GEM
remote: https://rubygems.org/
specs:
activesupport (7.1.2)
base64
bigdecimal
concurrent-ruby (~> 1.0, >= 1.0.2)
connection_pool (>= 2.2.5)
drb
i18n (>= 1.6, < 2)
minitest (>= 5.1)
mutex_m
tzinfo (~> 2.0)
ast (2.4.2)
base64 (0.2.0)
bigdecimal (3.1.4)
concurrent-ruby (1.2.2)
connection_pool (2.4.1)
diff-lcs (1.5.0)
drb (2.2.0)
ruby2_keywords
gitlab-styles (10.1.0)
rubocop (~> 1.50.2)
rubocop-graphql (~> 0.18)
rubocop-performance (~> 1.15)
rubocop-rails (~> 2.17)
rubocop-rspec (~> 2.22)
i18n (1.14.1)
concurrent-ruby (~> 1.0)
json (2.6.3)
minitest (5.20.0)
mutex_m (0.2.0)
parallel (1.23.0)
parser (3.2.2.4)
ast (~> 2.4.1)
racc
racc (1.7.3)
rack (3.0.8)
rainbow (3.1.1)
regexp_parser (2.8.2)
rexml (3.2.6)
rspec (3.12.0)
rspec-core (~> 3.12.0)
rspec-expectations (~> 3.12.0)
rspec-mocks (~> 3.12.0)
rspec-core (3.12.2)
rspec-support (~> 3.12.0)
rspec-expectations (3.12.3)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.12.0)
rspec-mocks (3.12.6)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.12.0)
rspec-support (3.12.1)
rubocop (1.50.2)
json (~> 2.3)
parallel (~> 1.10)
parser (>= 3.2.0.0)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 1.8, < 3.0)
rexml (>= 3.2.5, < 4.0)
rubocop-ast (>= 1.28.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
rubocop-ast (1.30.0)
parser (>= 3.2.1.0)
rubocop-capybara (2.19.0)
rubocop (~> 1.41)
rubocop-factory_bot (2.24.0)
rubocop (~> 1.33)
rubocop-graphql (0.19.0)
rubocop (>= 0.87, < 2)
rubocop-performance (1.19.1)
rubocop (>= 1.7.0, < 2.0)
rubocop-ast (>= 0.4.0)
rubocop-rails (2.22.2)
activesupport (>= 4.2.0)
rack (>= 1.1)
rubocop (>= 1.33.0, < 2.0)
rubocop-ast (>= 1.30.0, < 2.0)
rubocop-rspec (2.25.0)
rubocop (~> 1.40)
rubocop-capybara (~> 2.17)
rubocop-factory_bot (~> 2.22)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
unicode-display_width (2.5.0)
PLATFORMS
ruby
DEPENDENCIES
gitlab-database-lock_retries!
gitlab-styles (~> 10.1.0)
rspec (~> 3.0)
BUNDLED WITH
2.4.22

View File

@ -0,0 +1,29 @@
# Gitlab::Database::LockRetries
This gem provides a way to automatically execute code that relies on acquiring a database lock in a way designed to minimize impact on a busy production database.
## Installation
TODO: Replace `UPDATE_WITH_YOUR_GEM_NAME_PRIOR_TO_RELEASE_TO_RUBYGEMS_ORG` with your gem name right after releasing it to RubyGems.org. Please do not do it earlier due to security reasons. Alternatively, replace this section with instructions to install your gem from git if you don't plan to release to RubyGems.org.
Install the gem and add to the application's Gemfile by executing:
$ bundle add UPDATE_WITH_YOUR_GEM_NAME_PRIOR_TO_RELEASE_TO_RUBYGEMS_ORG
If bundler is not being used to manage dependencies, install the gem by executing:
$ gem install UPDATE_WITH_YOUR_GEM_NAME_PRIOR_TO_RELEASE_TO_RUBYGEMS_ORG
## Usage
TODO: Write usage instructions here
## Development
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/gitlab-database-lock_retries.

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
require "bundler/gem_tasks"
require "rspec/core/rake_task"
RSpec::Core::RakeTask.new(:spec)
require "rubocop/rake_task"
RuboCop::RakeTask.new
task default: %i[spec rubocop]

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
require_relative "lib/gitlab/database/lock_retries/version"
Gem::Specification.new do |spec|
spec.name = "gitlab-database-lock_retries"
spec.version = Gitlab::Database::LockRetries::VERSION
spec.authors = ["group::database"]
spec.email = ["engineering@gitlab.com"]
spec.summary = "Gem summary"
spec.description = "A more descriptive text about what the gem is doing."
spec.homepage = "https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-database-lock_retries"
spec.license = "MIT"
spec.required_ruby_version = ">= 3.0"
spec.metadata["rubygems_mfa_required"] = "true"
spec.files = Dir['lib/**/*.rb']
spec.require_paths = ["lib"]
spec.add_development_dependency "gitlab-styles", "~> 10.1.0"
spec.add_development_dependency "rspec", "~> 3.0"
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require_relative "lock_retries/version"
module Gitlab
module Database
module LockRetries
# Your code goes here...
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Gitlab
module Database
module LockRetries
VERSION = "0.1.0"
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Database::LockRetries do
it "has a version number" do
expect(Gitlab::Database::LockRetries::VERSION).not_to be nil
end
xit "does something useful"
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require "gitlab/database/lock_retries"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end

View File

@ -1,339 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
class Importer
LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' },
{ title: 'task', color: '#7F8C8D' }].freeze
attr_reader :project, :client, :errors, :users
ALREADY_IMPORTED_CACHE_KEY = 'bitbucket_cloud-importer/already-imported/%{project}/%{collection}'
def initialize(project)
@project = project
@client = Bitbucket::Client.new(project.import_data.credentials)
@formatter = Gitlab::ImportFormatter.new
@ref_converter = Gitlab::BitbucketImport::RefConverter.new(project)
@labels = {}
@errors = []
@users = {}
end
def execute
import_wiki
import_issues
import_pull_requests
handle_errors
metrics.track_finished_import
true
end
def create_labels
LABELS.each do |label_params|
label = ::Labels::FindOrCreateService.new(nil, project, label_params).execute(skip_authorization: true)
if label.valid?
@labels[label_params[:title]] = label
else
raise "Failed to create label \"#{label_params[:title]}\" for project \"#{project.full_name}\""
end
end
end
def import_pull_request_comments(pull_request, merge_request)
comments = client.pull_request_comments(repo, pull_request.iid)
inline_comments, pr_comments = comments.partition(&:inline?)
import_inline_comments(inline_comments, pull_request, merge_request)
import_standalone_pr_comments(pr_comments, merge_request)
end
private
def already_imported?(collection, iid)
Gitlab::Cache::Import::Caching.set_includes?(cache_key(collection), iid)
end
def mark_as_imported(collection, iid)
Gitlab::Cache::Import::Caching.set_add(cache_key(collection), iid)
end
def cache_key(collection)
format(ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: collection)
end
def handle_errors
return unless errors.any?
project.import_state.update_column(:last_error, {
message: 'The remote data could not be fully imported.',
errors: errors
}.to_json)
end
def store_pull_request_error(pull_request, ex)
backtrace = Gitlab::BacktraceCleaner.clean_backtrace(ex.backtrace)
error = { type: :pull_request, iid: pull_request.iid, errors: ex.message, trace: backtrace, raw_response: pull_request.raw&.to_json }
Gitlab::ErrorTracking.log_exception(ex, error)
# Omit the details from the database to avoid blowing up usage in the error column
error.delete(:trace)
error.delete(:raw_response)
errors << error
end
def gitlab_user_id(project, username)
find_user_id(username) || project.creator_id
end
# rubocop: disable CodeReuse/ActiveRecord
def find_user_id(username)
return unless username
return users[username] if users.key?(username)
users[username] = User.by_provider_and_extern_uid(:bitbucket, username).select(:id).first&.id
end
# rubocop: enable CodeReuse/ActiveRecord
def allocate_issues_internal_id!(project, client)
last_bitbucket_issue = client.last_issue(repo)
return unless last_bitbucket_issue
Issue.track_namespace_iid!(project.project_namespace, last_bitbucket_issue.iid)
end
def repo
@repo ||= client.repo(project.import_source)
end
def import_wiki
return if project.wiki.repository_exists?
wiki = WikiFormatter.new(project)
project.wiki.repository.import_repository(wiki.import_url)
rescue StandardError => e
errors << { type: :wiki, errors: e.message }
end
def import_issues
return unless repo.issues_enabled?
create_labels
issue_type_id = ::WorkItems::Type.default_issue_type.id
client.issues(repo).each_with_index do |issue, index|
next if already_imported?(:issues, issue.iid)
# If a user creates an issue while the import is in progress, this can lead to an import failure.
# The workaround is to allocate IIDs before starting the importer.
allocate_issues_internal_id!(project, client) if index == 0
import_issue(issue, issue_type_id)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def import_issue(issue, issue_type_id)
description = ''
description += @formatter.author_line(issue.author) unless find_user_id(issue.author)
description += issue.description
label_name = issue.kind
milestone = issue.milestone ? project.milestones.find_or_create_by(title: issue.milestone) : nil
gitlab_issue = project.issues.create!(
iid: issue.iid,
title: issue.title,
description: description,
state_id: Issue.available_states[issue.state],
author_id: gitlab_user_id(project, issue.author),
namespace_id: project.project_namespace_id,
milestone: milestone,
work_item_type_id: issue_type_id,
created_at: issue.created_at,
updated_at: issue.updated_at
)
mark_as_imported(:issues, issue.iid)
metrics.issues_counter.increment
gitlab_issue.labels << @labels[label_name]
import_issue_comments(issue, gitlab_issue) if gitlab_issue.persisted?
rescue StandardError => e
errors << { type: :issue, iid: issue.iid, errors: e.message }
end
# rubocop: enable CodeReuse/ActiveRecord
def import_issue_comments(issue, gitlab_issue)
client.issue_comments(repo, issue.iid).each do |comment|
# The note can be blank for issue service messages like "Changed title: ..."
# We would like to import those comments as well but there is no any
# specific parameter that would allow to process them, it's just an empty comment.
# To prevent our importer from just crashing or from creating useless empty comments
# we do this check.
next unless comment.note.present?
note = ''
note += @formatter.author_line(comment.author) unless find_user_id(comment.author)
note += @ref_converter.convert_note(comment.note.to_s)
begin
gitlab_issue.notes.create!(
project: project,
note: note,
author_id: gitlab_user_id(project, comment.author),
created_at: comment.created_at,
updated_at: comment.updated_at
)
rescue StandardError => e
errors << { type: :issue_comment, iid: issue.iid, errors: e.message }
end
end
end
def import_pull_requests
pull_requests = client.pull_requests(repo)
pull_requests.each do |pull_request|
next if already_imported?(:pull_requests, pull_request.iid)
import_pull_request(pull_request)
end
end
def import_pull_request(pull_request)
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
source_sha_from_commit_sha = project.repository.commit(source_branch_sha)&.sha
source_sha_from_merge_sha = project.repository.commit(pull_request.merge_commit_sha)&.sha
source_branch_sha = source_sha_from_commit_sha || source_sha_from_merge_sha || source_branch_sha
target_branch_sha = project.repository.commit(target_branch_sha)&.sha || target_branch_sha
merge_request = project.merge_requests.create!(
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: pull_request.source_branch_name,
source_branch_sha: source_branch_sha,
target_project: project,
target_branch: pull_request.target_branch_name,
target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: gitlab_user_id(project, pull_request.author),
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
)
mark_as_imported(:pull_requests, pull_request.iid)
metrics.merge_requests_counter.increment
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
rescue StandardError => e
store_pull_request_error(pull_request, e)
end
def import_inline_comments(inline_comments, pull_request, merge_request)
position_map = {}
discussion_map = {}
children, parents = inline_comments.partition(&:has_parent?)
# The Bitbucket API returns threaded replies as parent-child
# relationships. We assume that the child can appear in any order in
# the JSON.
parents.each do |comment|
position_map[comment.iid] = build_position(merge_request, comment)
end
children.each do |comment|
position_map[comment.iid] = position_map.fetch(comment.parent_id, nil)
end
inline_comments.each do |comment|
attributes = pull_request_comment_attributes(comment)
attributes[:discussion_id] = discussion_map[comment.parent_id] if comment.has_parent?
attributes.merge!(
position: position_map[comment.iid],
type: 'DiffNote')
note = merge_request.notes.create!(attributes)
# We can't store a discussion ID until a note is created, so if
# replies are created before the parent the discussion ID won't be
# linked properly.
discussion_map[comment.iid] = note.discussion_id
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
end
def build_position(merge_request, pr_comment)
params = {
diff_refs: merge_request.diff_refs,
old_path: pr_comment.file_path,
new_path: pr_comment.file_path,
old_line: pr_comment.old_pos,
new_line: pr_comment.new_pos
}
Gitlab::Diff::Position.new(params)
end
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
merge_request.notes.create!(pull_request_comment_attributes(comment))
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
end
def pull_request_comment_attributes(comment)
{
project: project,
author_id: gitlab_user_id(project, comment.author),
note: comment_note(comment),
created_at: comment.created_at,
updated_at: comment.updated_at
}
end
def comment_note(comment)
author = @formatter.author_line(comment.author) unless find_user_id(comment.author)
author.to_s + @ref_converter.convert_note(comment.note.to_s)
end
def log_base_data
{
class: self.class.name,
project_id: project.id,
project_path: project.full_path
}
end
def metrics
@metrics ||= Gitlab::Import::Metrics.new(:bitbucket_importer, @project)
end
end
end
end

View File

@ -6,6 +6,11 @@ module Gitlab
class RepositoryImporter
include Loggable
LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' },
{ title: 'task', color: '#7F8C8D' }].freeze
def initialize(project)
@project = project
end
@ -62,8 +67,9 @@ module Gitlab
end
def create_labels
importer = Gitlab::BitbucketImport::Importer.new(project)
importer.create_labels
LABELS.each do |label_params|
::Labels::FindOrCreateService.new(nil, project, label_params).execute(skip_authorization: true)
end
end
def wiki

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
module Gitlab
module Database
module BackgroundMigration
class BatchedBackgroundMigrationDictionary
def self.entry(migration_job_name)
entries_by_migration_job_name[migration_job_name]
end
private_class_method def self.entries_by_migration_job_name
@entries_by_migration_job_name ||= Dir.glob(dict_path).to_h do |file_path|
entry = Entry.new(file_path)
[entry.migration_job_name, entry]
end
end
private_class_method def self.dict_path
Rails.root.join('db/docs/batched_background_migrations/*.yml')
end
class Entry
def initialize(file_path)
@file_path = file_path
@data = YAML.load_file(file_path)
end
def migration_job_name
data['migration_job_name']
end
def finalized_by
data['finalized_by']
end
private
attr_reader :file_path, :data
end
end
end
end
end

View File

@ -11,7 +11,7 @@ module Gitlab
IMPORT_TABLE = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::ParallelImporter),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::ParallelImporter),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repository by URL', nil),
@ -44,15 +44,7 @@ module Gitlab
end
def import_table
bitbucket_parallel_enabled = Feature.enabled?(:bitbucket_parallel_importer)
return IMPORT_TABLE unless bitbucket_parallel_enabled
import_table = IMPORT_TABLE.deep_dup
import_table[1].importer = Gitlab::BitbucketImport::ParallelImporter if bitbucket_parallel_enabled
import_table
IMPORT_TABLE
end
end
end

View File

@ -3,12 +3,17 @@
require "spec_helper"
RSpec.describe Projects::Ml::ShowMlModelVersionComponent, type: :component, feature_category: :mlops do
let_it_be(:project) { create(:project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate -- build_stubbed breaks because it doesn't create iids properly.
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:user) { project.owner }
let_it_be(:model) { create(:ml_models, project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate -- build_stubbed breaks because it doesn't create iids properly.
let_it_be(:experiment) { model.default_experiment }
let_it_be(:model) { build_stubbed(:ml_models, project: project) }
let_it_be(:experiment) do
model.default_experiment.iid = 100
model.default_experiment
end
let_it_be(:candidate) do
create(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project) # rubocop:disable RSpec/FactoryBot/AvoidCreate -- build_stubbed breaks because it doesn't create iids properly.
build_stubbed(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project,
internal_id: 100)
end
let_it_be(:version) do

View File

@ -1,10 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
factory :ml_candidates, class: '::Ml::Candidate' do
association :project, factory: :project
association :user
experiment { association :ml_experiments, project_id: project.id }
project { association :project }
user { project.owner }
experiment { association :ml_experiments, project_id: project.id, user: project.owner }
trait :with_metrics_and_params do
metrics { Array.new(2) { association(:ml_candidate_metrics, candidate: instance) } }
@ -16,13 +15,9 @@ FactoryBot.define do
end
trait :with_artifact do
after(:create) do |candidate|
candidate.package = FactoryBot.create(
:generic_package,
name: candidate.package_name,
version: candidate.package_version,
project: candidate.project
)
artifact do
association(:generic_package, name: instance.package_name, version: instance.package_version || '1',
project: project)
end
end
end

View File

@ -20,9 +20,7 @@ FactoryBot.define do
end
trait :with_metadata do
after(:create) do |model|
model.metadata = FactoryBot.create_list(:ml_model_metadata, 2, model: model) # rubocop:disable StrategyInCallback
end
metadata { Array.new(2) { association(:ml_model_metadata, model: instance) } }
end
end
end

View File

@ -38,8 +38,6 @@ RSpec.describe 'Project issue boards', :js, feature_category: :team_planning do
project.add_maintainer(user2)
sign_in(user)
set_cookie('sidebar_collapsed', 'true')
end
context 'no lists' do

View File

@ -31,8 +31,6 @@ RSpec.describe 'User adds lists', :js, feature_category: :team_planning do
before do
sign_in(user)
set_cookie('sidebar_collapsed', 'true')
case board_type
when :project
visit project_board_path(project, project_board)

View File

@ -15,8 +15,6 @@ RSpec.describe 'Merge request > User posts diff notes', :js, feature_category: :
let(:test_note_comment) { 'this is a test note!' }
before do
set_cookie('sidebar_collapsed', 'true')
project.add_developer(user)
sign_in(user)
end

View File

@ -29,8 +29,6 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js, feature_c
before do
sign_in user
set_cookie('sidebar_collapsed', 'true')
end
context 'discussion tab' do

View File

@ -1,4 +1,4 @@
import _ from 'lodash';
import { clone } from 'lodash';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import IdeStatusBar from '~/ide/components/ide_status_bar.vue';
@ -28,7 +28,7 @@ describe('IdeStatusBar component', () => {
currentProjectId: TEST_PROJECT_ID,
projects: {
...store.state.projects,
[TEST_PROJECT_ID]: _.clone(projectData),
[TEST_PROJECT_ID]: clone(projectData),
},
...state,
});
@ -100,7 +100,7 @@ describe('IdeStatusBar component', () => {
currentMergeRequestId: TEST_MERGE_REQUEST_ID,
projects: {
[TEST_PROJECT_ID]: {
..._.clone(projectData),
...clone(projectData),
mergeRequests: {
[TEST_MERGE_REQUEST_ID]: {
web_url: TEST_MERGE_REQUEST_URL,

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['MLCandidateLinks'], feature_category: :mlops do
it 'has the expected fields' do
expected_fields = %w[showPath artifact_path]
expect(described_class).to include_graphql_fields(*expected_fields)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['MlCandidate'], feature_category: :mlops do
specify { expect(described_class.description).to eq('Candidate for a model version in the model registry') }
it 'includes all the package fields' do
expected_fields = %w[id name created_at _links]
expect(described_class).to include_graphql_fields(*expected_fields)
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['MlModel'], feature_category: :mlops do
specify { expect(described_class.description).to eq('Machine learning model in the model registry') }
it 'includes all the package fields' do
expected_fields = %w[id name versions]
expected_fields = %w[id name versions candidates]
expect(described_class).to include_graphql_fields(*expected_fields)
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, schema: 20230327103401, feature_category: :user_management do # rubocop:disable Layout/LineLength
RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, feature_category: :user_management do
let!(:valid_users) do
# 13 is the max value we have at the moment.
(0..13).map do |type|

View File

@ -1,559 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
include ImportSpecHelper
before do
stub_omniauth_provider('bitbucket')
end
let(:statuses) do
[
"open",
"resolved",
"on hold",
"invalid",
"duplicate",
"wontfix",
"closed" # undocumented status
]
end
let(:reporters) do
[
nil,
{ "nickname" => "reporter1" },
nil,
{ "nickname" => "reporter2" },
{ "nickname" => "reporter1" },
nil,
{ "nickname" => "reporter3" }
]
end
let(:sample_issues_statuses) do
issues = []
statuses.map.with_index do |status, index|
issues << {
id: index,
state: status,
title: "Issue #{index}",
kind: 'bug',
content: {
raw: "Some content to issue #{index}",
markup: "markdown",
html: "Some content to issue #{index}"
}
}
end
reporters.map.with_index do |reporter, index|
issues[index]['reporter'] = reporter
end
issues
end
let_it_be(:project_identifier) { 'namespace/repo' }
let_it_be_with_reload(:project) do
create(
:project,
:repository,
import_source: project_identifier,
import_url: "https://bitbucket.org/#{project_identifier}.git",
import_data_attributes: { credentials: { 'token' => 'token' } }
)
end
let(:importer) { described_class.new(project) }
let(:sample) { RepoHelpers.sample_compare }
let(:issues_statuses_sample_data) do
{
count: sample_issues_statuses.count,
values: sample_issues_statuses
}
end
let(:last_issue_data) do
{
page: 1,
pagelen: 1,
values: [sample_issues_statuses.last]
}
end
let(:counter) { double('counter', increment: true) }
subject { described_class.new(project) }
describe '#import_pull_requests' do
let(:source_branch_sha) { sample.commits.last }
let(:merge_commit_sha) { sample.commits.second }
let(:target_branch_sha) { sample.commits.first }
let(:pull_request) do
instance_double(
Bitbucket::Representation::PullRequest,
iid: 10,
source_branch_sha: source_branch_sha,
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: target_branch_sha,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
merge_commit_sha: merge_commit_sha,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
author: pull_request_author,
created_at: Time.now,
updated_at: Time.now)
end
let(:pull_request_author) { 'other' }
let(:comments) { [@inline_note, @reply] }
let(:author_line) { "*Created by: someuser*\n\n" }
before do
allow(subject).to receive(:import_wiki)
allow(subject).to receive(:import_issues)
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
@inline_note = instance_double(
Bitbucket::Representation::PullRequestComment,
iid: 2,
file_path: '.gitmodules',
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author: 'someuser',
created_at: Time.now,
updated_at: Time.now,
inline?: true,
has_parent?: false)
@reply = instance_double(
Bitbucket::Representation::PullRequestComment,
iid: 3,
file_path: '.gitmodules',
note: 'Hello world',
author: 'someuser',
created_at: Time.now,
updated_at: Time.now,
inline?: true,
has_parent?: true,
parent_id: 2)
allow(subject.client).to receive(:repo)
allow(subject.client).to receive(:pull_requests).and_return([pull_request])
allow(subject.client).to receive(:pull_request_comments).with(anything, pull_request.iid).and_return(comments)
end
it 'imports threaded discussions' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.state).to eq('merged')
expect(merge_request.notes.count).to eq(2)
expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
notes = merge_request.notes.order(:id).to_a
start_note = notes.first
expect(start_note).to be_a(DiffNote)
expect(start_note.note).to include(@inline_note.note)
expect(start_note.note).to include(author_line)
reply_note = notes.last
expect(reply_note).to be_a(DiffNote)
expect(reply_note.note).to include(@reply.note)
expect(reply_note.note).to include(author_line)
end
context 'when author is blank' do
let(:pull_request_author) { nil }
it 'adds created by anonymous in the description', :aggregate_failures do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
expect(MergeRequest.first.description).to include('Created by: Anonymous')
end
end
context 'when user exists in GitLab' do
let!(:existing_user) { create(:user, username: 'someuser') }
let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
it 'does not add author line to comments' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
notes = merge_request.notes.order(:id).to_a
start_note = notes.first
expect(start_note.note).to eq(@inline_note.note)
expect(start_note.note).not_to include(author_line)
reply_note = notes.last
expect(reply_note.note).to eq(@reply.note)
expect(reply_note.note).not_to include(author_line)
end
end
it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
expect(subject.instance_values['ref_converter']).to receive(:convert_note).twice
subject.execute
end
context 'when importing a pull request throws an exception' do
before do
allow(pull_request).to receive(:raw).and_return({ error: "broken" })
allow(subject.client).to receive(:pull_request_comments).and_raise(Gitlab::HTTP::Error)
end
it 'logs an error without the backtrace' do
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(instance_of(Gitlab::HTTP::Error), hash_including(raw_response: '{"error":"broken"}'))
subject.execute
expect(subject.errors.count).to eq(1)
expect(subject.errors.first.keys).to match_array(%i[type iid errors])
end
end
context 'when source SHA is not found in the repository' do
let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
let(:target_branch_sha) { 'c' * Commit::MIN_SHA_LENGTH }
it 'uses merge commit SHA for source' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request_diff = MergeRequest.first.merge_request_diff
expect(merge_request_diff.head_commit_sha).to eq merge_commit_sha
expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
end
context 'when the merge commit SHA is also not found' do
let(:merge_commit_sha) { 'b' * Commit::MIN_SHA_LENGTH }
it 'uses the pull request sha references' do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request_diff = MergeRequest.first.merge_request_diff
expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
end
end
end
context "when target_branch_sha is blank" do
let(:target_branch_sha) { nil }
it 'creates the merge request with no target branch', :aggregate_failures do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.target_branch_sha).to eq(nil)
end
end
context 'metrics' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
allow(pull_request).to receive(:raw).and_return('hello world')
end
it 'counts imported pull requests' do
expect(Gitlab::Metrics).to receive(:counter).with(
:bitbucket_importer_imported_merge_requests_total,
'The number of imported merge (pull) requests'
)
expect(counter).to receive(:increment)
subject.execute
end
end
context 'when pull request was already imported' do
let(:pull_request_already_imported) do
instance_double(
BitbucketServer::Representation::PullRequest,
iid: 11)
end
let(:cache_key) do
format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
end
before do
allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
end
it 'does not import the previously imported pull requests', :aggregate_failures do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
end
end
end
context 'issues statuses' do
before do
# HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this
Bitbucket::Representation::Issue.new({})
stub_request(
:get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}"
).to_return(status: 200,
headers: { "Content-Type" => "application/json" },
body: { has_issues: true, full_name: project_identifier }.to_json)
stub_request(
:get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=1&sort=-created_on&state=ALL"
).to_return(status: 200,
headers: { "Content-Type" => "application/json" },
body: last_issue_data.to_json)
stub_request(
:get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=50&sort=created_on"
).to_return(status: 200,
headers: { "Content-Type" => "application/json" },
body: issues_statuses_sample_data.to_json)
stub_request(:get, "https://api.bitbucket.org/2.0/repositories/namespace/repo?pagelen=50&sort=created_on")
.with(headers: { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Authorization' => 'Bearer', 'User-Agent' => 'Faraday v0.9.2' })
.to_return(status: 200, body: "", headers: {})
sample_issues_statuses.each_with_index do |issue, index|
stub_request(
:get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues/#{issue[:id]}/comments?pagelen=50&sort=created_on"
).to_return(
status: 200,
headers: { "Content-Type" => "application/json" },
body: { author_info: { username: "username" }, utc_created_on: index }.to_json
)
end
stub_request(
:get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}/pullrequests?pagelen=50&sort=created_on&state=ALL"
).to_return(status: 200,
headers: { "Content-Type" => "application/json" },
body: {}.to_json)
end
context 'creating labels on project' do
before do
allow(importer).to receive(:import_wiki)
end
it 'creates labels as expected' do
expect { importer.execute }.to change { Label.count }.from(0).to(Gitlab::BitbucketImport::Importer::LABELS.size)
end
it 'does not fail if label is already existing' do
label = Gitlab::BitbucketImport::Importer::LABELS.first
::Labels::CreateService.new(label).execute(project: project)
expect { importer.execute }.not_to raise_error
end
it 'does not create new labels' do
Gitlab::BitbucketImport::Importer::LABELS.each do |label|
create(:label, project: project, title: label[:title])
end
expect { importer.execute }.not_to change { Label.count }
end
it 'does not update existing ones' do
label_title = Gitlab::BitbucketImport::Importer::LABELS.first[:title]
existing_label = create(:label, project: project, title: label_title)
# Reload label from database so we avoid timestamp comparison issues related to time precision when comparing
# attributes later.
existing_label.reload
travel_to(Time.now + 1.minute) do
importer.execute
label_after_import = project.labels.find(existing_label.id)
expect(label_after_import.attributes).to eq(existing_label.attributes)
end
end
it 'raises an error if a label is not valid' do
stub_const("#{described_class}::LABELS", [{ title: nil, color: nil }])
expect { importer.create_labels }.to raise_error(StandardError, /Failed to create label/)
end
end
it 'maps statuses to open or closed' do
allow(importer).to receive(:import_wiki)
importer.execute
expect(project.issues.where(state_id: Issue.available_states[:closed]).size).to eq(5)
expect(project.issues.where(state_id: Issue.available_states[:opened]).size).to eq(2)
expect(project.issues.map(&:namespace_id).uniq).to match_array([project.project_namespace_id])
end
describe 'wiki import' do
it 'is skipped when the wiki exists' do
expect(project.wiki).to receive(:repository_exists?) { true }
expect(project.wiki.repository).not_to receive(:import_repository)
importer.execute
expect(importer.errors).to be_empty
end
it 'imports to the project disk_path' do
expect(project.wiki).to receive(:repository_exists?) { false }
expect(project.wiki.repository).to receive(:import_repository)
importer.execute
expect(importer.errors).to be_empty
end
end
describe 'issue import' do
it 'allocates internal ids' do
expect(Issue).to receive(:track_namespace_iid!).with(project.project_namespace, 6)
importer.execute
end
it 'maps reporters to anonymous if bitbucket reporter is nil' do
allow(importer).to receive(:import_wiki)
importer.execute
expect(project.issues.size).to eq(7)
expect(project.issues.where("description LIKE ?", '%Anonymous%').size).to eq(3)
expect(project.issues.where("description LIKE ?", '%reporter1%').size).to eq(2)
expect(project.issues.where("description LIKE ?", '%reporter2%').size).to eq(1)
expect(project.issues.where("description LIKE ?", '%reporter3%').size).to eq(1)
expect(importer.errors).to be_empty
end
it 'sets work item type on new issues' do
allow(importer).to receive(:import_wiki)
importer.execute
expect(project.issues.map(&:work_item_type_id).uniq).to contain_exactly(WorkItems::Type.default_issue_type.id)
end
context 'with issue comments' do
let(:note) { 'Hello world' }
let(:inline_note) do
instance_double(Bitbucket::Representation::Comment, note: note, author: 'someuser', created_at: Time.now, updated_at: Time.now)
end
before do
allow_next_instance_of(Bitbucket::Client) do |instance|
allow(instance).to receive(:issue_comments).and_return([inline_note])
end
allow(importer).to receive(:import_wiki)
end
it 'imports issue comments' do
importer.execute
comment = project.notes.first
expect(project.notes.size).to eq(7)
expect(comment.note).to include(note)
expect(comment.note).to include(inline_note.author)
expect(importer.errors).to be_empty
end
it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
expect(importer.instance_values['ref_converter']).to receive(:convert_note).exactly(7).times
importer.execute
end
end
context 'when issue was already imported' do
let(:cache_key) do
format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
end
before do
Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
end
it 'does not import previously imported issues', :aggregate_failures do
expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
sample_issues_statuses.each do |sample_issues_status|
expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
end
end
end
end
context 'metrics' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
end
it 'counts imported issues' do
expect(Gitlab::Metrics).to receive(:counter).with(
:bitbucket_importer_imported_issues_total,
'The number of imported issues'
)
expect(counter).to receive(:increment)
subject.execute
end
end
end
describe '#execute' do
context 'metrics' do
let(:histogram) { double(:histogram) }
before do
allow(subject).to receive(:import_wiki)
allow(subject).to receive(:import_issues)
allow(subject).to receive(:import_pull_requests)
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
allow(Gitlab::Metrics).to receive(:histogram).and_return(histogram)
allow(histogram).to receive(:observe)
allow(counter).to receive(:increment)
end
it 'counts and measures duration of imported projects' do
expect(Gitlab::Metrics).to receive(:counter).with(
:bitbucket_importer_imported_projects_total,
'The number of imported projects'
)
expect(Gitlab::Metrics).to receive(:histogram).with(
:bitbucket_importer_total_duration_seconds,
'Total time spent importing projects, in seconds',
{},
Gitlab::Import::Metrics::IMPORT_DURATION_BUCKETS
)
expect(counter).to receive(:increment)
expect(histogram).to receive(:observe).with({ importer: :bitbucket_importer }, anything)
subject.execute
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::Database::BackgroundMigration::BatchedBackgroundMigrationDictionary, feature_category: :database do
describe '.entry' do
it 'returns a single dictionary entry for the given migration job' do
entry = described_class.entry('MigrateHumanUserType')
expect(entry.migration_job_name).to eq('MigrateHumanUserType')
expect(entry.finalized_by).to eq(20230523101514)
end
end
end

View File

@ -74,38 +74,6 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
end
end
describe '.import_table' do
subject { described_class.import_table }
describe 'Bitbucket cloud' do
it 'returns the ParallelImporter' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket',
'Bitbucket Cloud',
Gitlab::BitbucketImport::ParallelImporter
)
)
end
context 'when flag is disabled' do
before do
stub_feature_flags(bitbucket_parallel_importer: false)
end
it 'returns the legacy Importer' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket',
'Bitbucket Cloud',
Gitlab::BitbucketImport::Importer
)
)
end
end
end
end
describe '.title' do
import_sources = {
'github' => 'GitHub',

View File

@ -5,7 +5,12 @@ require 'spec_helper'
RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops do
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params, :with_artifact, name: 'candidate0') }
let_it_be(:candidate2) do
create(:ml_candidates, experiment: candidate.experiment, user: create(:user), name: 'candidate2')
create(:ml_candidates, experiment: candidate.experiment, name: 'candidate2', project: candidate.project)
end
let_it_be(:existing_model) { create(:ml_models, project: candidate2.project) }
let_it_be(:existing_model_version) do
create(:ml_model_versions, model: existing_model, candidate: candidate2)
end
let(:project) { candidate.project }
@ -231,6 +236,14 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
end
end
describe '#without_model_version' do
subject { described_class.without_model_version }
it 'finds only candidates without model version' do
expect(subject).to match_array([candidate])
end
end
describe 'from_ci?' do
subject { candidate }

View File

@ -63,6 +63,19 @@ RSpec.describe Ml::Model, feature_category: :mlops do
end
end
describe 'candidates' do
let_it_be(:candidate1) { create(:ml_model_versions, model: existing_model).candidate }
let_it_be(:candidate2) do
create(:ml_candidates, experiment: existing_model.default_experiment, project: project1)
end
let_it_be(:candidate3) { create(:ml_candidates, project: project1) }
it 'returns only the candidates for default experiment that do not belong to a model version' do
expect(existing_model.candidates).to match_array([candidate2])
end
end
describe '.by_project' do
subject { described_class.by_project(project1) }

View File

@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
let_it_be(:user) { create(:user, :with_avatar) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:project) { create(:project, :private, creator: user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:user) { build_stubbed(:user, :with_avatar) }
let_it_be(:project) { build_stubbed(:project, :private, creator: user) }
let_it_be(:experiment) { build_stubbed(:ml_experiments, user: user, project: project, iid: 100) }
let_it_be(:candidate) do
create(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project) # rubocop:disable RSpec/FactoryBot/AvoidCreate
build_stubbed(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project,
internal_id: 100)
end
let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project, user: user) }

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ml::CandidatePresenter, feature_category: :mlops do
let_it_be(:project) { build_stubbed(:project) }
let_it_be(:candidate) { build_stubbed(:ml_candidates, :with_artifact, internal_id: 1, project: project) }
let_it_be(:presenter) { candidate.present }
describe '#path' do
subject { presenter.path }
it { is_expected.to eq("/#{project.full_path}/-/ml/candidates/#{candidate.iid}") }
end
describe '#artifact_path' do
subject { presenter.artifact_path }
it { is_expected.to eq("/#{project.full_path}/-/packages/#{candidate.package_id}") }
end
end

View File

@ -183,81 +183,6 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
context 'when bitbucket_parallel_importer feature flag is disabled' do
before do
stub_feature_flags(bitbucket_parallel_importer: false)
end
it 'succeeds if repository import is successful' do
expect(project.repository).to receive(:import_repository).and_return(true)
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :success)
end
result = subject.execute
expect(result[:status]).to eq :success
end
it 'fails if repository import fails' do
expect(project.repository)
.to receive(:import_repository)
.with('https://bitbucket.org/vim/vim.git', resolved_address: '')
.and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
context 'when lfs import fails' do
it 'logs the error' do
error_message = 'error message'
expect(project.repository).to receive(:import_repository).and_return(true)
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :error, message: error_message)
end
expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
subject.execute
end
end
context 'when repository import scheduled' do
before do
expect(project.repository).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data)
end
it 'downloads lfs objects if lfs_enabled is enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
subject.execute
end
it 'does not download lfs objects if lfs_enabled is not enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(false)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
subject.execute
end
end
end
end
end
end

View File

@ -139,11 +139,6 @@ RSpec.configure do |config|
metadata[:migration] = true if metadata[:level] == :migration || metadata[:level] == :background_migration
end
# Do not overwrite schema if it's already set
unless metadata.key?(:schema)
metadata[:schema] = :latest if metadata[:level] == :background_migration
end
# Admin controller specs get auto admin mode enabled since they are
# protected by the 'EnforcesAdminAuthentication' concern
metadata[:enable_admin_mode] = true if %r{(ee)?/spec/controllers/admin/}.match?(location)

View File

@ -120,11 +120,18 @@ module MigrationsHelpers
end
end
def finalized_by_version
::Gitlab::Database::BackgroundMigration::BatchedBackgroundMigrationDictionary
.entry(described_class.to_s.demodulize)&.finalized_by
end
def migration_schema_version
metadata_schema = self.class.metadata[:schema]
if metadata_schema == :latest
migrations.last.version
elsif self.class.metadata[:level] == :background_migration
metadata_schema || finalized_by_version || migrations.last.version
else
metadata_schema || previous_migration.version
end

View File

@ -5376,7 +5376,6 @@
- './spec/lib/gitlab/background_task_spec.rb'
- './spec/lib/gitlab/backtrace_cleaner_spec.rb'
- './spec/lib/gitlab/batch_worker_context_spec.rb'
- './spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- './spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
- './spec/lib/gitlab/bitbucket_import/wiki_formatter_spec.rb'
- './spec/lib/gitlab/blame_spec.rb'

View File

@ -11,13 +11,13 @@ require (
github.com/getsentry/raven-go v0.2.0
github.com/golang-jwt/jwt/v5 v5.0.0
github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f
github.com/gorilla/websocket v1.5.0
github.com/gorilla/websocket v1.5.1
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
github.com/johannesboyne/gofakes3 v0.0.0-20230914150226-f005f5cc03aa
github.com/jpillora/backoff v1.0.0
github.com/mitchellh/copystructure v1.2.0
github.com/prometheus/client_golang v1.17.0
github.com/redis/go-redis/v9 v9.2.1
github.com/redis/go-redis/v9 v9.3.0
github.com/sebest/xff v0.0.0-20210106013422-671bd2870b3a
github.com/sirupsen/logrus v1.9.3
github.com/smartystreets/goconvey v1.8.1

View File

@ -291,8 +291,8 @@ github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56
github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8=
@ -388,8 +388,8 @@ github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwa
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
github.com/prometheus/prometheus v0.46.0 h1:9JSdXnsuT6YsbODEhSQMwxNkGwPExfmzqG73vCMk/Kw=
github.com/prometheus/prometheus v0.46.0/go.mod h1:10L5IJE5CEsjee1FnOcVswYXlPIscDWWt3IJ2UDYrz4=
github.com/redis/go-redis/v9 v9.2.1 h1:WlYJg71ODF0dVspZZCpYmoF1+U1Jjk9Rwd7pq6QmlCg=
github.com/redis/go-redis/v9 v9.2.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
github.com/redis/go-redis/v9 v9.3.0 h1:RiVDjmig62jIWp7Kk4XVLs0hzV6pI3PyTnnL0cnn0u0=
github.com/redis/go-redis/v9 v9.3.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=