Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-12-20 18:33:27 +00:00
parent 8a69afc44b
commit 449b8a1e53
108 changed files with 2182 additions and 525 deletions

View File

@ -5,26 +5,16 @@ Migration/EnsureFactoryForTable:
- 'db/migrate/20230926092944_add_approval_group_rules_groups.rb'
- 'db/migrate/20230926093004_add_approval_group_rules_users.rb'
- 'db/migrate/20230926093025_add_approval_group_rules_protected_branches.rb'
- 'db/migrate/20231017095738_create_activity_pub_releases_subscriptions.rb'
- 'db/migrate/20231017134349_create_ml_model_metadata.rb'
- 'db/migrate/20231020095624_create_audit_events_streaming_http_group_namespace_filters.rb'
- 'db/migrate/20231025123238_create_compliance_framework_security_policies.rb'
- 'db/migrate/20231031164724_create_sbom_occurrences_vulnerabilities.rb'
- 'db/migrate/20231102142553_add_zoekt_nodes.rb'
- 'db/migrate/20231107140642_create_audit_events_instance_amazon_s3_configurations.rb'
- 'db/migrate/20231110044738_create_abuse_report_assignees_table.rb'
- 'db/migrate/20231115064007_create_audit_events_streaming_http_instance_namespace_filters.rb'
- 'db/migrate/20231124191759_add_catalog_resource_sync_events_table.rb'
- 'db/migrate/20231207150738_add_work_item_dates_sources.rb'
- 'db/migrate/20231215192706_create_ml_model_version_metadata.rb'
- 'db/migrate/20231220113459_add_work_item_color.rb'
- 'db/migrate/20231221033539_create_sbom_source_packages_table.rb'
- 'db/migrate/20231221113948_create_cloud_connector_access.rb'
- 'db/migrate/20240108123023_create_ai_agents_table.rb'
- 'db/migrate/20240108123115_create_ai_agent_versions_table.rb'
- 'db/migrate/20240112124030_create_audit_events_group_external_streaming_destinations.rb'
- 'db/migrate/20240115185037_create_packages_terraform_module_metadata.rb'
- 'db/migrate/20240122165824_create_ci_job_token_group_scope_links.rb'
- 'db/migrate/20240130162148_create_audit_events_instance_external_streaming_destinations.rb'
- 'db/migrate/20240131052824_create_catalog_verified_namespaces.rb'
- 'db/migrate/20240201112236_create_group_saved_replies_table.rb'
@ -34,7 +24,3 @@ Migration/EnsureFactoryForTable:
- 'db/migrate/20240304184128_create_ci_build_names_table.rb'
- 'db/migrate/20240306121653_create_relation_import_tracker.rb'
- 'db/migrate/20240404192955_create_early_access_program_tracking_events.rb'
- 'db/migrate/20240419082037_create_ai_self_hosted_models.rb'
- 'db/migrate/20240423064716_create_ci_build_execution_config.rb'
- 'db/migrate/20240430110033_create_ai_feature_settings.rb'
- 'db/migrate/20241127092714_create_container_registry_protection_tag_rules.rb'

View File

@ -34,7 +34,7 @@ export const CATEGORY_ICON_MAP = {
/* eslint-enable @gitlab/require-i18n-strings */
export const EMOJIS_PER_ROW = 9;
export const EMOJI_ROW_HEIGHT = 34;
export const EMOJI_ROW_HEIGHT = 36;
export const CATEGORY_ROW_HEIGHT = 37;
export const CACHE_VERSION_KEY = 'gl-emoji-map-version';

View File

@ -239,6 +239,7 @@ export default {
if (this.note.length) {
const noteData = {
endpoint: isDraft ? this.draftEndpoint : this.endpoint,
flashContainer: this.$el,
data: {
note: {
noteable_type: this.noteableType,
@ -371,6 +372,7 @@ export default {
<discussion-locked-widget v-else-if="!canCreateNote" :issuable-type="noteableDisplayName" />
<ul v-else-if="canCreateNote" class="notes notes-form timeline">
<timeline-entry-item class="note-form">
<div class="flash-container gl-mb-2"></div>
<gl-alert
v-for="(error, index) in errors"
:key="index"

View File

@ -350,6 +350,7 @@ export default {
class="discussion-reply-holder clearfix gl-bg-subtle"
:class="discussionHolderClass"
>
<div class="flash-container !gl-mt-0 gl-mb-2"></div>
<discussion-actions
v-if="!isReplying && userCanReply"
:discussion="discussion"

View File

@ -486,7 +486,6 @@ export function saveNote(noteData) {
methodToDispatch = replyId ? this.replyToDiscussion : this.createNewNote;
}
$('.notes-form .flash-container').hide(); // hide previous flash notification
this[types.REMOVE_PLACEHOLDER_NOTES](); // remove previous placeholders
if (hasQuickActions) {

View File

@ -469,7 +469,6 @@ export const saveNote = ({ commit, dispatch }, noteData) => {
methodToDispatch = replyId ? 'replyToDiscussion' : 'createNewNote';
}
$('.notes-form .flash-container').hide(); // hide previous flash notification
commit(types.REMOVE_PLACEHOLDER_NOTES); // remove previous placeholders
if (hasQuickActions) {

View File

@ -1,4 +1,6 @@
<script>
import { GlAlert } from '@gitlab/ui';
import { uniqueId } from 'lodash';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import Tracking from '~/tracking';
import { ASC } from '~/notes/constants';
@ -20,6 +22,7 @@ export default {
},
components: {
DiscussionReplyPlaceholder,
GlAlert,
WorkItemNoteSignedOut,
WorkItemCommentLocked,
WorkItemCommentForm,
@ -115,6 +118,9 @@ export default {
},
data() {
return {
addNoteKey: this.generateUniqueId(),
errorMessages: '',
messages: '',
workItem: {},
isEditing: this.isNewDiscussion,
isSubmitting: false,
@ -215,13 +221,17 @@ export default {
},
},
methods: {
generateUniqueId() {
// used to rerender work-item-comment-form so the text in the textarea is cleared
return uniqueId(`work-item-add-note-${this.workItemId}-`);
},
async updateWorkItem({ commentText, isNoteInternal = false }) {
this.isSubmitting = true;
this.$emit('replying', commentText);
try {
this.track('add_work_item_comment');
await this.$apollo.mutate({
const { data } = await this.$apollo.mutate({
mutation: createNoteMutation,
variables: {
input: {
@ -233,15 +243,10 @@ export default {
},
update: this.onNoteUpdate,
});
/**
* https://gitlab.com/gitlab-org/gitlab/-/issues/388314
*
* Once form is successfully submitted, emit replied event,
* mark isSubmitting to false and clear storage before hiding the form.
* This will restrict comment form to restore the value while textarea
* input triggered due to keyboard event meta+enter.
*
*/
const { errorMessages, messages } = data.createNote.quickActionsStatus;
this.errorMessages = errorMessages?.join(' ');
this.messages = messages?.join(' ');
this.$emit('replied');
clearDraft(this.autosaveKey);
this.cancelEditing();
@ -254,6 +259,7 @@ export default {
},
cancelEditing() {
this.isEditing = this.isNewDiscussion;
this.addNoteKey = this.generateUniqueId();
this.$emit('cancelEditing');
},
showReplyForm() {
@ -305,9 +311,27 @@ export default {
/>
<div v-else :class="timelineEntryInnerClass">
<div :class="timelineContentClass">
<gl-alert
v-if="messages"
class="gl-mb-2"
data-testid="success-alert"
@dismiss="messages = ''"
>
{{ messages }}
</gl-alert>
<gl-alert
v-if="errorMessages"
class="gl-mb-2"
variant="danger"
data-testid="error-alert"
@dismiss="errorMessages = ''"
>
{{ errorMessages }}
</gl-alert>
<div :class="parentClass">
<work-item-comment-form
v-if="isEditing"
:key="addNoteKey"
:work-item-type="workItemType"
:aria-label="__('Add a reply')"
:is-submitting="isSubmitting"

View File

@ -1,6 +1,5 @@
<script>
import { GlModal } from '@gitlab/ui';
import { uniqueId } from 'lodash';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { __ } from '~/locale';
import {
@ -107,7 +106,6 @@ export default {
sortOrder: ASC,
noteToDelete: null,
discussionFilter: WORK_ITEM_NOTES_FILTER_ALL_NOTES,
addNoteKey: uniqueId(`work-item-add-note-${this.workItemId}`),
workItemNamespace: null,
previewNote: null,
};
@ -349,9 +347,6 @@ export default {
filterDiscussions(filterValue) {
this.discussionFilter = filterValue;
},
updateKey() {
this.addNoteKey = uniqueId(`work-item-add-note-${this.workItemId}`);
},
reportAbuse(isOpen, reply = {}) {
this.$emit('openReportAbuse', reply);
},
@ -440,12 +435,7 @@ export default {
<div v-if="someNotesLoaded" class="issuable-discussion gl-mb-5 !gl-clearfix">
<div v-if="formAtTop && !commentsDisabled" class="js-comment-form">
<ul class="notes notes-form timeline">
<work-item-add-note
v-bind="workItemCommentFormProps"
:key="addNoteKey"
@cancelEditing="updateKey"
@error="$emit('error', $event)"
/>
<work-item-add-note v-bind="workItemCommentFormProps" @error="$emit('error', $event)" />
</ul>
</div>
<work-item-notes-loading v-if="formAtTop && isLoadingMore" />
@ -487,12 +477,7 @@ export default {
<work-item-notes-loading v-if="!formAtTop && isLoadingMore" />
<div v-if="!formAtTop && !commentsDisabled" class="js-comment-form">
<ul class="notes notes-form timeline">
<work-item-add-note
v-bind="workItemCommentFormProps"
:key="addNoteKey"
@cancelEditing="updateKey"
@error="$emit('error', $event)"
/>
<work-item-add-note v-bind="workItemCommentFormProps" @error="$emit('error', $event)" />
</ul>
</div>
</div>

View File

@ -13,6 +13,10 @@ mutation createWorkItemNote($input: CreateNoteInput!) {
}
}
}
quickActionsStatus {
errorMessages
messages
}
errors
}
}

View File

@ -3,6 +3,7 @@
class IdeController < ApplicationController
include Gitlab::Utils::StrongMemoize
include WebIdeCSP
include RoutableActions
include StaticObjectExternalStorageCSP
include ProductAnalyticsTracking
@ -40,7 +41,15 @@ class IdeController < ApplicationController
private
def authorize_read_project!
render_404 unless can?(current_user, :read_project, project)
return @project if @project
path = params[:project_id]
@project = find_routable!(Project, path, request.fullpath, extra_authorization_proc: auth_proc)
end
def auth_proc
->(project) { !project.pending_delete? }
end
def ensure_web_ide_oauth_application!

View File

@ -16,8 +16,8 @@ module Packages
}
end
def generic?
data[:package_type] == 'generic'
def ml_model?
data[:package_type] == 'ml_model'
end
end
end

View File

@ -8,10 +8,25 @@ module Mutations
include FindsProject
EXECUTE_OPTIONS = { ignore_skip_ci: true, save_on_errors: false }.freeze
INTERNAL_CREATE_OPERATION_NAME = 'internalPipelineCreate'
field :pipeline,
Types::Ci::PipelineType,
null: true,
description: 'Pipeline created after mutation.'
description: 'Pipeline created after mutation. Null if `async: true`.'
field :request_id,
GraphQL::Types::String,
null: true,
description: 'ID for checking the pipeline creation status. Null if `async: false`.',
experiment: { milestone: '17.8' }
argument :async, GraphQL::Types::Boolean,
required: false,
description: 'When `true`, the request does not wait for the pipeline to be created, ' \
'and returns a unique identifier that can be used to check the creation status.',
experiment: { milestone: '17.8' }
argument :project_path, GraphQL::Types::ID,
required: true,
@ -27,19 +42,39 @@ module Mutations
authorize :create_pipeline
def resolve(project_path:, **params)
def resolve(project_path:, ref:, async: false, variables: {})
project = authorized_find!(project_path)
creation_params = { ref: ref, variables_attributes: variables }
service = ::Ci::CreatePipelineService.new(project, current_user, creation_params)
response = ::Ci::CreatePipelineService
.new(project, current_user, params)
.execute(:web, ignore_skip_ci: true, save_on_errors: false)
response = execute_service(service, source, async)
if response.success?
{ pipeline: response.payload, errors: [] }
if async
{ request_id: response.payload, errors: [] }
else
{ pipeline: response.payload, errors: [] }
end
else
{ pipeline: nil, errors: [response.message] }
end
end
private
def execute_service(service, source, async)
if async
service.execute_async(source, EXECUTE_OPTIONS)
else
service.execute(source, **EXECUTE_OPTIONS)
end
end
def source
return 'web' if context.query.operation_name == INTERNAL_CREATE_OPERATION_NAME
'api'
end
end
end
end

View File

@ -9,19 +9,28 @@ module Types
connection_type_class Types::CountableConnectionType
field :id, GraphQL::Types::ID, null: false, description: 'ID of the catalog resource.'
field :id, GraphQL::Types::ID, null: false,
description: 'ID of the catalog resource.'
field :name, GraphQL::Types::String, null: true, description: 'Name of the catalog resource.'
field :name, GraphQL::Types::String, null: true,
description: 'Name of the catalog resource.'
field :description, GraphQL::Types::String, null: true, description: 'Description of the catalog resource.'
field :description, GraphQL::Types::String, null: true,
description: 'Description of the catalog resource.'
field :icon, GraphQL::Types::String, null: true, description: 'Icon for the catalog resource.',
field :topics, [GraphQL::Types::String], null: true,
description: 'Topics for the catalog resource.'
field :icon, GraphQL::Types::String, null: true,
description: 'Icon for the catalog resource.',
method: :avatar_path
field :full_path, GraphQL::Types::ID, null: true, description: 'Full project path of the catalog resource.',
field :full_path, GraphQL::Types::ID, null: true,
description: 'Full project path of the catalog resource.',
experiment: { milestone: '16.11' }
field :web_path, GraphQL::Types::String, null: true, description: 'Web path of the catalog resource.',
field :web_path, GraphQL::Types::String, null: true,
description: 'Web path of the catalog resource.',
experiment: { milestone: '16.1' }
field :versions, Types::Ci::Catalog::Resources::VersionType.connection_type, null: true,
@ -57,6 +66,28 @@ module Types
def starrers_path
Gitlab::Routing.url_helpers.project_starrers_path(object.project)
end
# rubocop: disable GraphQL/ResolverMethodLength -- this will be refactored:
# https://gitlab.com/gitlab-org/gitlab/-/issues/510648
def topics
BatchLoader::GraphQL.for(object).batch do |resources, loader|
# rubocop: disable CodeReuse/ActiveRecord -- this is necessary to batch
project_ids = resources.pluck(:project_id)
project_topics = ::Projects::ProjectTopic.where(project_id: project_ids)
topics = ::Projects::Topic.where(id: project_topics.pluck(:topic_id))
grouped_project_topics = project_topics.group_by(&:project_id)
resources.each do |resource|
project_topics_ids_for_resource = grouped_project_topics.fetch(resource.project_id,
[]).pluck(:topic_id)
topics_for_resource = topics.select { |topic| project_topics_ids_for_resource.include?(topic.id) }
loader.call(resource, topics_for_resource.pluck(:name))
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
# rubocop: enable GraphQL/ResolverMethodLength
end
# rubocop: enable Graphql/AuthorizeTypes
end

View File

@ -12,12 +12,37 @@ module Emails
)
end
def project_import_complete(project_id, user_id, user_mapping_enabled, safe_import_url)
user = User.find(user_id)
@project = Project.find(project_id)
@namespace = @project.root_ancestor
@hostname = safe_import_url
@user_mapping_available = user_mapping_enabled &&
!@namespace.user_namespace? &&
!@project.import_failed? &&
@namespace.import_source_users.awaiting_reassignment.any?
@is_group_owner = user.can?(:admin_namespace, @namespace)
@is_project_creator = user_id == @project.creator_id
title = safe_format(
s_('Import|Import from %{hostname} completed'),
hostname: @hostname
)
email_with_layout(
to: user.notification_email_or_default,
subject: subject(title)
)
end
def bulk_import_complete(user_id, bulk_import_id)
user = User.find(user_id)
@bulk_import = BulkImport.find(bulk_import_id)
@hostname = @bulk_import.configuration.safe_url
title = safe_format(
s_('BulkImport|Import from %{hostname} completed'),
s_('Import|Import from %{hostname} completed'),
hostname: @hostname
)

View File

@ -389,6 +389,14 @@ class NotifyPreview < ActionMailer::Preview
).message
end
def project_import_complete
project_id = ProjectImportState.last.project_id
project = Project.find(project_id)
creator_id = project.creator_id
Notify.project_import_complete(project_id, creator_id, true, project.safe_import_url(masked: false)).message
end
def bulk_import_complete
bulk_import = BulkImport.last

View File

@ -13,21 +13,35 @@
# }
# }
# }
# NOTE: For general project pipelines, `REDIS_KEY` includes the project ID and the request ID for the pipeline creation.
# This means that the request ID is referenced twice when fetching the request data - in `REDIS_KEY` and in
# `CREATION_ID`. It also means that the value for `REDIS_KEY` will only ever contain one pipeline creation request.
# This is somewhat unexpected, but it is necessary in order to match the data structure for merge request pipelines
# (which can have several pipelines creation requests stored under their `REDIS_KEY`). We've decided that maintaining
# two separate data structures is more confusing and results in more code, so it's better to match the data structures
# even if it means that we have a redundant use of the request ID when storing general project pipeline creation
# requests.
#
# NOTE: All hash keys should be strings because this data is JSONified for Redis and when passing the creation key and
# ID into pipeline creation workers
# NOTE: The `REDIS_KEY` for general project pipelines MUST contain the request ID (not only the project ID) in order to
# ensure that the keys expire. Some projects are so active that their creation request data will never expire from
# Redis if we store all the pipeline creations for a project under one key.
#
# NOTE: All hash keys should be strings because this data is JSONified for Redis and the pipeline creation workers.
#
# TODO: In an attempt to make the Redis data easier to understand, we plan to simplify the way we store MR pipeline
# creation data in https://gitlab.com/gitlab-org/gitlab/-/issues/509925
module Ci
module PipelineCreation
class Requests
FAILED = 'failed'
# GraphQL does not seem to accept spaces so we need to update the regex
IN_PROGRESS = 'in_progress'
SUCCEEDED = 'succeeded'
STATUSES = [FAILED, IN_PROGRESS, SUCCEEDED].freeze
REDIS_EXPIRATION_TIME = 300
MERGE_REQUEST_REDIS_KEY = "pipeline_creation:projects:{%{project_id}}:mrs:{%{mr_id}}"
PROJECT_REDIS_KEY = "pipeline_creation:projects:{%{project_id}}"
MERGE_REQUEST_REDIS_KEY = "#{PROJECT_REDIS_KEY}:mrs:{%{mr_id}}".freeze
REQUEST_REDIS_KEY = "#{PROJECT_REDIS_KEY}:request:{%{request_id}}".freeze
class << self
def failed(request, error)
@ -42,6 +56,15 @@ module Ci
hset(request, SUCCEEDED, pipeline_id: pipeline_id)
end
def start_for_project(project)
request_id = generate_id
request = { 'key' => request_key(project, request_id), 'id' => request_id }
hset(request, IN_PROGRESS)
request
end
def start_for_merge_request(merge_request)
request = { 'key' => merge_request_key(merge_request), 'id' => generate_id }
@ -62,6 +85,14 @@ module Ci
.any? { |request| request['status'] == IN_PROGRESS }
end
def get_request(project, request_id)
hget({ 'key' => request_key(project, request_id), 'id' => request_id })
end
def request_key(project, request_id)
format(REQUEST_REDIS_KEY, project_id: project.id, request_id: request_id)
end
def merge_request_key(merge_request)
format(MERGE_REQUEST_REDIS_KEY, project_id: merge_request.project_id, mr_id: merge_request.id)
end

View File

@ -502,6 +502,12 @@ class Group < Namespace
full_name
end
def to_human_reference(from = nil)
return unless cross_namespace_reference?(from)
human_name
end
def visibility_level_allowed_by_parent?(level = self.visibility_level)
return true unless parent_id && parent_id.nonzero?

View File

@ -8,6 +8,8 @@ module Ml
enum status: { running: 0, scheduled: 1, finished: 2, failed: 3, killed: 4 }
PACKAGE_PREFIX = 'candidate_'
validates :eid, :experiment, :project, presence: true
validates :status, inclusion: { in: statuses.keys }
validates :model_version_id, uniqueness: { allow_nil: true }
@ -68,9 +70,7 @@ module Ml
end
def package_version
return "candidate_#{iid}" if for_model?
iid
package&.generic? ? iid : "#{PACKAGE_PREFIX}#{iid}"
end
def from_ci?

View File

@ -5,8 +5,6 @@ module Packages
class Package < Packages::Package
self.allow_legacy_sti_class = true
after_create_commit :publish_creation_event
validates :name, format: { with: Gitlab::Regex.generic_package_name_regex }
validates :version, presence: true, format: { with: Gitlab::Regex.generic_package_version_regex }
end

View File

@ -5,6 +5,8 @@ module Packages
class Package < Packages::Package
self.allow_legacy_sti_class = true
after_create_commit :publish_creation_event
has_one :model_version, class_name: "Ml::ModelVersion", inverse_of: :package
validates :name,

View File

@ -1569,8 +1569,15 @@ class Project < ApplicationRecord
import_url.present?
end
def safe_import_url
Gitlab::UrlSanitizer.new(import_url).masked_url
def notify_project_import_complete?
return false if import_type.nil? || mirror? || forked?
gitea_import? || github_import? || bitbucket_import? || bitbucket_server_import?
end
def safe_import_url(masked: true)
url = Gitlab::UrlSanitizer.new(import_url)
masked ? url.masked_url : url.sanitized_url
end
def jira_import?
@ -1593,6 +1600,14 @@ class Project < ApplicationRecord
import_type == 'github'
end
def bitbucket_import?
import_type == 'bitbucket'
end
def bitbucket_server_import?
import_type == 'bitbucket_server'
end
def github_enterprise_import?
github_import? &&
URI.parse(import_url).host != URI.parse(Octokit::Default::API_ENDPOINT).host

View File

@ -6,6 +6,8 @@ class ProjectImportState < ApplicationRecord
self.table_name = "project_mirror_data"
attr_accessor :user_mapping_enabled, :safe_import_url
after_commit :expire_etag_cache
belongs_to :project, inverse_of: :import_state
@ -66,6 +68,7 @@ class ProjectImportState < ApplicationRecord
end
after_transition any => [:canceled, :failed] do |state, _|
state.set_notification_data
state.project.remove_import_data
end
@ -82,6 +85,7 @@ class ProjectImportState < ApplicationRecord
after_transition started: :finished do |state, _|
project = state.project
state.set_notification_data
project.reset_cache_and_import_attrs
if Gitlab::ImportSources.values.include?(project.import_type) && project.repo_exists? # rubocop: disable Performance/InefficientHashSearch -- not a Hash
@ -89,6 +93,13 @@ class ProjectImportState < ApplicationRecord
Projects::AfterImportWorker.perform_async(project.id)
end
end
state.send_completion_notification
end
after_transition any => [:failed] do |state, _|
state.set_notification_data
state.send_completion_notification(notify_group_owners: false)
end
end
@ -147,6 +158,33 @@ class ProjectImportState < ApplicationRecord
# import? does SQL work so only run it if it looks like there's an import running
status == 'started' && project.import?
end
def send_completion_notification(notify_group_owners: true)
return unless project.notify_project_import_complete?
run_after_commit do
Projects::ImportExport::ImportCompletionNotificationWorker.perform_async(
project.id,
'user_mapping_enabled' => user_mapping_enabled?,
'notify_group_owners' => notify_group_owners,
'safe_import_url' => safe_import_url
)
end
end
def set_notification_data
self.user_mapping_enabled ||= project.import_data&.user_mapping_enabled?
self.safe_import_url ||= project.safe_import_url(masked: false)
end
private
# Return whether or not user mapping was enabled during the project's import to determine who to
# send completion emails to. user_mapping_enabled should be set if import_data is removed.
# This can be removed when all 3rd party project importer user mapping feature flags are removed.
def user_mapping_enabled?
user_mapping_enabled || project.import_data&.user_mapping_enabled?
end
end
ProjectImportState.prepend_mod_with('ProjectImportState')

View File

@ -127,6 +127,18 @@ module Ci
end
# rubocop: enable Metrics/ParameterLists, Metrics/AbcSize
def execute_async(source, options)
pipeline_creation_request = ::Ci::PipelineCreation::Requests.start_for_project(project)
creation_params = params.merge(pipeline_creation_request: pipeline_creation_request)
::CreatePipelineWorker.perform_async(
project.id, current_user.id, params[:ref], source.to_s,
options.stringify_keys, creation_params.except(:ref).stringify_keys
)
ServiceResponse.success(payload: pipeline_creation_request['id'])
end
private
def after_successful_creation_hook

View File

@ -6,7 +6,7 @@ module Packages
def execute
candidate = params[:candidate]
return unless candidate&.for_model?
return unless candidate
package = find_or_create_package!(
::Packages::Package.package_types['ml_model'],

View File

@ -0,0 +1,39 @@
- header_style = 'font-size:24px; text-align:center; line-height:30px;'
- text_style = 'font-size:16px; text-align:center; line-height:24px; margin-top: 24px;'
- button_style = 'border: 1px solid #694cc0; border-radius: 4px; font-size: 14px; padding: 8px 16px; background-color: #7b58cf; color: #fff; cursor: pointer;'
- strong_tag_pair = tag_pair(tag.strong, :strong_open, :strong_close)
- start_date = l(@project.created_at.to_date, format: :long)
%h1{ style: header_style }
= s_('Import|Import completed')
%p{ style: text_style }
- if @is_project_creator
= safe_format(s_('Import|The import you started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed.'),
strong_tag_pair,
hostname: @hostname,
start_date: start_date)
- else
= safe_format(s_('Import|The import %{project_creator_name} started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed.'),
strong_tag_pair,
project_creator_name: sanitize_name(@project.creator.name),
hostname: @hostname,
start_date: start_date)
- if !@user_mapping_available
= s_('Import|You can now review your import results.')
- elsif @is_group_owner
= s_('Import|You can reassign contributions on the "Members" page of the group.')
- else
= s_('Import|Users with the Owner role for the group can reassign contributions on the "Members" page.')
%p{ style: text_style }
- if @user_mapping_available && @is_group_owner
= link_to group_group_members_url(@namespace, tab: 'placeholders'), target: '_blank', rel: 'noopener noreferrer' do
%button{ type: 'button', style: button_style }
= s_('Import|Reassign contributions')
- else
= link_to import_history_index_url, target: '_blank', rel: 'noopener noreferrer' do
%button{ type: 'button', style: button_style }
= s_('Import|View import results')

View File

@ -0,0 +1,20 @@
<%= s_('Import|Import completed') %>
<% if @is_project_creator %>
<%= safe_format(s_('Import|The import you started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed.'), strong_open: '', strong_close: '', hostname: @hostname, start_date: l(@project.created_at.to_date, format: :long)) -%>
<% else %>
<%= safe_format(s_('Import|The import %{project_creator_name} started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed.'), strong_open: '', strong_close: '', hostname: @hostname, project_creator_name: sanitize_name(@project.creator.name), start_date: l(@project.created_at.to_date, format: :long)) -%>
<% end %>
<% if !@user_mapping_available %>
<%= s_('Import|You can now review your import results.') %>
<% elsif @is_group_owner %>
<%= s_('Import|You can reassign contributions on the "Members" page of the group.') %>
<% else %>
<%= s_('Import|Users with the Owner role for the group can reassign contributions on the "Members" page.') %>
<% end %>
<% if @user_mapping_available && @is_group_owner %>
<%= s_('Import|Reassign contributions') %>: <%= group_group_members_url(@namespace, tab: 'placeholders') %>
<% else %>
<%= s_('Import|View import results') %>: <%= import_history_index_url %>
<% end %>

View File

@ -3918,6 +3918,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: projects_import_export_import_completion_notification
:worker_name: Projects::ImportExport::ImportCompletionNotificationWorker
:feature_category: :importers
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: projects_import_export_parallel_project_export
:worker_name: Projects::ImportExport::ParallelProjectExportWorker
:feature_category: :importers

View File

@ -14,15 +14,16 @@ class CreatePipelineWorker # rubocop:disable Scalability/IdempotentWorker
worker_resource_boundary :cpu
loggable_arguments 2, 3, 4
def perform(project_id, user_id, ref, source, params = {})
def perform(project_id, user_id, ref, source, execute_options = {}, creation_params = {})
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/464671')
project = Project.find(project_id)
user = User.find(user_id)
params = params.deep_symbolize_keys
execute_options = execute_options.deep_symbolize_keys
creation_params = creation_params.symbolize_keys.merge(ref: ref)
Ci::CreatePipelineService
.new(project, user, ref: ref)
.execute(source, **params)
.new(project, user, **creation_params)
.execute(source, **execute_options)
end
end

View File

@ -11,7 +11,8 @@ module Ml
idempotent!
def handle_event(event)
return unless (candidate = Ml::Candidate.with_project_id_and_iid(event.data[:project_id], event.data[:version]))
candidate_iid = event.data[:version].delete_prefix(Ml::Candidate::PACKAGE_PREFIX)
return unless (candidate = Ml::Candidate.with_project_id_and_iid(event.data[:project_id], candidate_iid))
return unless (package = Packages::Package.find_by_id(event.data[:id]))
candidate.package = package
@ -19,7 +20,7 @@ module Ml
end
def self.handles_event?(event)
event.generic? && Ml::Experiment.package_for_experiment?(event.data[:name])
event.ml_model? && Ml::Experiment.package_for_experiment?(event.data[:name])
end
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
module Projects
module ImportExport
class ImportCompletionNotificationWorker
include ApplicationWorker
idempotent!
data_consistency :delayed
urgency :low
feature_category :importers
attr_reader :project, :user_mapping_enabled, :notify_group_owners, :safe_import_url
def perform(project_id, params = {})
@project = Project.find_by_id(project_id)
@user_mapping_enabled = params['user_mapping_enabled']
@notify_group_owners = params['notify_group_owners']
@safe_import_url = params['safe_import_url']
return unless project
return unless project.notify_project_import_complete?
send_completion_notification
end
private
def send_completion_notification
completion_notification_recipients.each do |user|
Notify
.project_import_complete(project.id, user.id, user_mapping_enabled, safe_import_url)
.deliver_later
end
end
def completion_notification_recipients
recipients = []
recipients << project.creator if project.creator.human?
if user_mapping_enabled && notify_group_owners
project.root_ancestor.owners.each do |owner|
recipients |= [owner] if owner.human?
end
end
recipients
end
end
end
end

View File

@ -1,9 +0,0 @@
---
name: require_resource_id
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/463046
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/154115
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/465363
milestone: '17.1'
group: group::ai framework
type: gitlab_com_derisk
default_enabled: false

View File

@ -671,6 +671,8 @@
- 1
- - projects_import_export_create_relation_exports
- 1
- - projects_import_export_import_completion_notification
- 1
- - projects_import_export_parallel_project_export
- 1
- - projects_import_export_relation_export

View File

@ -0,0 +1,8 @@
---
migration_job_name: CopyRunnerTaggings
description: Copy taggings data for the ci_runners table into the new sharded ci_runner_taggigns table
feature_category: runner
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175578
milestone: '17.8'
queued_migration_version: 20241219100359
finalized_by: # version of the migration that finalized this BBM

View File

@ -4,7 +4,7 @@ class CreateDuoWorkflowsTables < Gitlab::Database::Migration[2.2]
milestone '17.2'
def change
create_table :duo_workflows_workflows do |t| # rubocop:disable Migration/EnsureFactoryForTable -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
create_table :duo_workflows_workflows do |t| # rubocop:disable Migration/EnsureFactoryForTable, Lint/RedundantCopDisableDirective -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
t.bigint :user_id, null: false, index: true
t.bigint :project_id, null: false, index: true

View File

@ -3,7 +3,6 @@
class CreateDependencyListExportPartsTable < Gitlab::Database::Migration[2.2]
milestone '17.2'
# rubocop:disable Migration/EnsureFactoryForTable -- False Positive
def change
create_table :dependency_list_export_parts do |t|
t.references :dependency_list_export, foreign_key: { on_delete: :cascade }, null: false, index: true
@ -15,5 +14,4 @@ class CreateDependencyListExportPartsTable < Gitlab::Database::Migration[2.2]
t.text :file, limit: 255
end
end
# rubocop:enable Migration/EnsureFactoryForTable
end

View File

@ -3,7 +3,6 @@
class CreatePCiBuildTraceMetadata < Gitlab::Database::Migration[2.2]
milestone '17.4'
# rubocop:disable Migration/EnsureFactoryForTable -- No factory needed
def change
create_table(:p_ci_build_trace_metadata, primary_key: [:build_id, :partition_id],
options: 'PARTITION BY LIST (partition_id)', if_not_exists: true) do |t|
@ -19,5 +18,4 @@ class CreatePCiBuildTraceMetadata < Gitlab::Database::Migration[2.2]
t.index :trace_artifact_id
end
end
# rubocop:enable Migration/EnsureFactoryForTable -- No factory needed
end

View File

@ -1,6 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Migration/EnsureFactoryForTable -- False positive
class CreateSystemAccessInstanceMicrosoftApplications < Gitlab::Database::Migration[2.2]
milestone '17.5'
@ -17,5 +16,3 @@ class CreateSystemAccessInstanceMicrosoftApplications < Gitlab::Database::Migrat
end
end
end
# rubocop:enable Migration/EnsureFactoryForTable

View File

@ -1,6 +1,5 @@
# frozen_string_literal: true
# rubocop:disable Migration/EnsureFactoryForTable -- False positive
class CreateSystemAccessInstanceMicrosoftGraphAccessTokens < Gitlab::Database::Migration[2.2]
milestone '17.5'
@ -16,5 +15,3 @@ class CreateSystemAccessInstanceMicrosoftGraphAccessTokens < Gitlab::Database::M
end
end
end
# rubocop:enable Migration/EnsureFactoryForTable

View File

@ -4,7 +4,7 @@ class CreateInstanceIntegrationsTable < Gitlab::Database::Migration[2.2]
milestone '17.4'
def up
create_table :instance_integrations, id: :bigserial do |t| # rubocop:disable Migration/EnsureFactoryForTable -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
create_table :instance_integrations, id: :bigserial do |t|
t.timestamps_with_timezone null: false
t.integer :comment_detail
t.boolean :active, default: false, null: false

View File

@ -5,7 +5,7 @@ class CreateCiJobArtifactReports < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
def up
create_table(:p_ci_job_artifact_reports, # rubocop:disable Migration/EnsureFactoryForTable -- Factory exists
create_table(:p_ci_job_artifact_reports,
primary_key: [:job_artifact_id, :partition_id],
options: 'PARTITION BY LIST (partition_id)', if_not_exists: true) do |t|
t.bigint :job_artifact_id, null: false

View File

@ -6,7 +6,7 @@ class CreatePackagesConanRecipeRevisions < Gitlab::Database::Migration[2.2]
INDEX_PACKAGE_ID_REVISION = 'idx_on_packages_conan_recipe_revisions_package_id_revision'
def up
create_table :packages_conan_recipe_revisions do |t| # rubocop:disable Migration/EnsureFactoryForTable -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
create_table :packages_conan_recipe_revisions do |t|
t.bigint :package_id, null: false
t.bigint :project_id, null: false
t.timestamps_with_timezone null: false

View File

@ -7,7 +7,7 @@ class CreatePackagesConanPackageReferences < Gitlab::Database::Migration[2.2]
CONSTRAINT_NAME = 'chk_conan_references_info_length'
def up
create_table :packages_conan_package_references do |t| # rubocop:disable Migration/EnsureFactoryForTable -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
create_table :packages_conan_package_references do |t|
t.bigint :package_id, null: false
t.bigint :project_id, null: false
t.bigint :recipe_revision_id

View File

@ -6,7 +6,7 @@ class CreateCiJobTokenAuthorizations < Gitlab::Database::Migration[2.2]
INDEX_NAME = 'idx_ci_job_token_authorizations_on_accessed_and_origin_project'
def change
create_table(:ci_job_token_authorizations, if_not_exists: true) do |t| # rubocop:disable Migration/EnsureFactoryForTable -- factory at ci/job_token/authorizations.rb
create_table(:ci_job_token_authorizations, if_not_exists: true) do |t|
t.bigint :accessed_project_id, null: false
t.bigint :origin_project_id, null: false, index: true
t.datetime_with_timezone :last_authorized_at, null: false

View File

@ -5,7 +5,7 @@ class CreateSystemAccessGroupMicrosoftApplications < Gitlab::Database::Migration
milestone '17.7'
def change
create_table :system_access_group_microsoft_applications do |t| # rubocop:disable Migration/EnsureFactoryForTable -- False positive, factory name is prefixed with system_access
create_table :system_access_group_microsoft_applications do |t|
t.timestamps_with_timezone null: false
t.references :group, foreign_key: { to_table: :namespaces, on_delete: :cascade }, null: false
t.bigint :temp_source_id, index: { unique: true, name: 'index_group_microsoft_applications_on_temp_source_id' },

View File

@ -5,7 +5,7 @@ class CreateSystemAccessGroupMicrosoftGraphAccessTokens < Gitlab::Database::Migr
milestone '17.7'
def change
create_table :system_access_group_microsoft_graph_access_tokens do |t| # rubocop:disable Migration/EnsureFactoryForTable -- False positive, factory name is prefixed with system_access
create_table :system_access_group_microsoft_graph_access_tokens do |t|
t.timestamps_with_timezone null: false
t.references :system_access_group_microsoft_application,
index: { name: 'unique_index_group_ms_access_tokens_on_ms_app_id', unique: true }

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class QueueCopyRunnerTaggings < Gitlab::Database::Migration[2.2]
milestone '17.8'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "CopyRunnerTaggings"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:ci_runners,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :ci_runners, :id, [])
end
end

View File

@ -0,0 +1 @@
dd5b80b7f1644356f57db2b78aa71dd5717adc7dae55ea6edc913a3f948e4092

View File

@ -8062,6 +8062,7 @@ Input type: `PipelineCreateInput`
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationpipelinecreateasync"></a>`async` **{warning-solid}** | [`Boolean`](#boolean) | **Deprecated:** **Status**: Experiment. Introduced in GitLab 17.8. |
| <a id="mutationpipelinecreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationpipelinecreateprojectpath"></a>`projectPath` | [`ID!`](#id) | Full path of the project that is triggering the pipeline. |
| <a id="mutationpipelinecreateref"></a>`ref` | [`String!`](#string) | Ref on which to run the pipeline. |
@ -8073,7 +8074,8 @@ Input type: `PipelineCreateInput`
| ---- | ---- | ----------- |
| <a id="mutationpipelinecreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationpipelinecreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationpipelinecreatepipeline"></a>`pipeline` | [`Pipeline`](#pipeline) | Pipeline created after mutation. |
| <a id="mutationpipelinecreatepipeline"></a>`pipeline` | [`Pipeline`](#pipeline) | Pipeline created after mutation. Null if `async: true`. |
| <a id="mutationpipelinecreaterequestid"></a>`requestId` **{warning-solid}** | [`String`](#string) | **Deprecated:** **Status**: Experiment. Introduced in GitLab 17.8. |
### `Mutation.pipelineDestroy`
@ -20175,6 +20177,7 @@ Check user's permission for the car.
| <a id="cicatalogresourcename"></a>`name` | [`String`](#string) | Name of the catalog resource. |
| <a id="cicatalogresourcestarcount"></a>`starCount` | [`Int!`](#int) | Number of times the catalog resource has been starred. |
| <a id="cicatalogresourcestarrerspath"></a>`starrersPath` | [`String`](#string) | Relative path to the starrers page for the catalog resource project. |
| <a id="cicatalogresourcetopics"></a>`topics` | [`[String!]`](#string) | Topics for the catalog resource. |
| <a id="cicatalogresourceverificationlevel"></a>`verificationLevel` | [`CiCatalogResourceVerificationLevel`](#cicatalogresourceverificationlevel) | Verification level of the catalog resource. |
| <a id="cicatalogresourcevisibilitylevel"></a>`visibilityLevel` | [`VisibilityLevelsEnum`](#visibilitylevelsenum) | Visibility level of the catalog resource. |
| <a id="cicatalogresourcewebpath"></a>`webPath` **{warning-solid}** | [`String`](#string) | **Introduced** in GitLab 16.1. **Status**: Experiment. Web path of the catalog resource. |

View File

@ -81,6 +81,29 @@ Try to split all your migrations into two steps:
1. Refactor just the Vuex API: Don't change the store structure, make sure it works in Pinia ([example](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/149489)).
1. Refactor the structure: Split your store into multiple smaller, single purpose stores.
### Migration plan
If your store can't be migrated in a single merge request then follow these steps for a smooth migration from Vuex to Pinia:
1. Identify the stores you are going to migrate. Include all the stores that depend on your store and all the stores that you depend on.
1. Create a migration issue, assign a migration DRI(s) and list all the stores you're going to migrate.
Track your migration progress in that issue. If necessary, split the migration into multiple issues.
1. Create a new CODEOWNERS (`.gitlab/CODEOWNERS`) rule for the store files you're migrating, include all the Vuex module dependencies and store specs.
If you are migrating only a single store then you would need to include only `state.js` (or your `index.js`),
`actions.js`, `mutations.js` and `getters.js` and their respective spec files.
Assign at least two individuals responsible for reviewing changes made to the Vuex store.
Always sync your changes from Vuex store to Pinia. This is very important so you don't introduce regressions with the Pinia store.
1. Copy existing store as-is to a new location (you can call it `stores/legacy_store` for example). Keep the file structure intact.
1. Create a store definition (`defineStore`) and only define your state in there.
1. Use code mods from the next section to migrate the store files. Import and use the migrated code in store's definition.
1. Migrate the store specs manually.
1. Refactor components to use the new store.
1. Remove the Vuex store.
1. Remove CODEOWNERS rule.
1. Close the migration issue.
### Automated migration using codemods
You can use [ast-grep](https://ast-grep.github.io/) codemods to simplify migration from Vuex to Pinia.

View File

@ -198,3 +198,69 @@ These tests are performed for offline environments:
| Network | Tests whether: <br>- The environment variable `AI_GATEWAY_URL` has been set to a valid URL.<br> - Your instance can connect to the URL specified by `AI_GATEWAY_URL`.<br><br>If your instance cannot connect to the URL, ensure that your firewall or proxy server settings [allow connection](../user/gitlab_duo/setup.md). |
| License | Tests whether your license has the ability to access Code Suggestions feature. |
| System exchange | Tests whether Code Suggestions can be used in your instance. If the system exchange assessment fails, users might not be able to use GitLab Duo features. |
## Does the AIGW need to autoscale?
Autoscaling is not mandatory but is recommended for environments with variable workloads, high concurrency requirements, or unpredictable usage patterns. In GitLabs production environment:
- Baseline Setup: A single AI Gateway instance with 2 CPU cores and 8 GB RAM can handle approximately 40 concurrent requests.
- Scaling Guidelines: For larger setups, such as an AWS t3.2xlarge instance (8 vCPUs, 32 GB RAM), the gateway can handle up to 160 concurrent requests, equivalent to 4x the baseline setup.
- Request Throughput: GitLab.coms observed usage suggests that 7 RPS (requests per second) per 1000 active users is a reasonable metric for planning.
- Autoscaling Options: Use Kubernetes Horizontal Pod Autoscalers (HPA) or similar mechanisms to dynamically adjust the number of instances based on metrics like CPU, memory utilization, or request latency thresholds.
## Configuration Examples by Deployment Size
- Small Deployment:
- Single instance with 2 vCPUs and 8 GB RAM.
- Handles up to 40 concurrent requests.
- Teams or organizations with up to 50 users and predictable workloads.
- Fixed instances may suffice; autoscaling can be disabled for cost efficiency.
- Medium Deployment:
- Single AWS t3.2xlarge instance with 8 vCPUs and 32 GB RAM.
- Handles up to 160 concurrent requests.
- Organizations with 50-200 users and moderate concurrency requirements.
- Implement Kubernetes HPA with thresholds for 50% CPU utilization or request latency above 500ms.
- Large Deployment:
- Cluster of multiple AWS t3.2xlarge instances or equivalent.
- Each instance handles 160 concurrent requests, scaling to thousands of users with multiple instances.
- Enterprises with over 200 users and variable, high-concurrency workloads.
- Use HPA to scale pods based on real-time demand, combined with node autoscaling for cluster-wide resource adjustments.
## What specs does the AIGW container have access to, and how does resource allocation affect performance?
The AI Gateway operates effectively under the following resource allocations:
- 2 CPU cores and 8 GB of RAM per container.
- Containers typically utilize about 7.39% CPU and proportionate memory in GitLabs production environment, leaving room for growth or handling burst activity.
## Mitigation Strategies for Resource Contention
- Use Kubernetes resource requests and limits to ensure AIGW containers receive guaranteed CPU and memory allocations. For example:
```yaml
resources:
requests:
memory: "16Gi"
cpu: "4"
limits:
memory: "32Gi"
cpu: "8"
```
- Implement tools like Prometheus and Grafana to track resource utilization (CPU, memory, latency) and detect bottlenecks early.
- Dedicate nodes or instances exclusively to the AI Gateway to prevent resource competition with other services.
## Scaling Strategies
- Use Kubernetes HPA to scale pods based on real-time metrics like:
- Average CPU utilization exceeding 50%.
- Request latency consistently above 500ms.
- Enable node autoscaling to scale infrastructure resources dynamically as pods increase.
## Scaling Recommendations
| Deployment Size | Instance Type | Resources | Capacity (Concurrent Requests) | Scaling Recommendations |
|------------------|--------------------|------------------------|---------------------------------|---------------------------------------------|
| Small | 2 vCPUs, 8 GB RAM | Single instance | 40 | Fixed deployment; no autoscaling. |
| Medium | AWS t3.2xlarge | Single instance | 160 | HPA based on CPU or latency thresholds. |
| Large | Multiple t3.2xlarge | Clustered instances | 160 per instance | HPA + node autoscaling for high demand. |

View File

@ -225,7 +225,6 @@ The following operational features are not available:
- Multiple Geo secondaries (Geo replicas) beyond the secondary site included by default
- [Geo proxying](../../administration/geo/secondary_proxy/index.md) and using a unified URL
- Self-serve purchasing and configuration
- Multiple login providers
- Support for deploying to non-AWS cloud providers, such as GCP or Azure
- Observability dashboard in Switchboard

View File

@ -158,11 +158,11 @@ You can download the JSON report file from the CI/CD pipelines page. For more in
## Corpus registry
The corpus registry is a library of corpuses. Corpuses in a project's registry are available to
all jobs in that project. A project-wide registry is a more efficient way to manage corpuses than
The corpus registry is a library of corpora. Corpora in a project's registry are available to
all jobs in that project. A project-wide registry is a more efficient way to manage corpora than
the default option of one corpus per job.
The corpus registry uses the package registry to store the project's corpuses. Corpuses stored in
The corpus registry uses the package registry to store the project's corpora. Corpora stored in
the registry are hidden to ensure data integrity.
When you download a corpus, the file is named `artifacts.zip`, regardless of the filename used when

View File

@ -205,6 +205,8 @@ This rule enforces the defined actions for any merge request based on the commit
This action sets an approval rule to be required when conditions are met for at least one rule in
the defined policy.
> - [Added](https://gitlab.com/groups/gitlab-org/-/epics/12319) support for up to five separate `require_approval` actions in GitLab 17.7 [with a flag](../../../administration/feature_flags.md) named `multiple_approval_actions`. Disabled by default.
| Field | Type | Required | Possible values | Description |
|-------|------|----------|-----------------|-------------|
| `type` | `string` | true | `require_approval` | The action's type. |
@ -451,9 +453,9 @@ When using license approval policies, the combination of project, component (dep
- A license approval policy is created to block merge requests with newly detected licenses matching `AGPL-1.0`. A change is made in project `demo` for component `osframework` that violates the policy. If approved and merged, future merge requests to `osframework` in project `demo` with the license `AGPL-1.0` don't require approval.
### Multiple approvals
### Additional approvals
There are several situations where the merge request approval policy requires an additional approval step. For example:
Merge request approval policies require an additional approval step in some situations. For example:
- The number of security jobs is reduced in the working branch and no longer matches the number of
security jobs in the target branch. Users can't skip the Scanning Result Policies by removing

View File

@ -114,9 +114,10 @@ You must register an agent before you can install the agent in your cluster. To
it must be in this project. Your cluster manifest files should also be in this project.
1. Select **Operate > Kubernetes clusters**.
1. Select **Connect a cluster (agent)**.
- If you want to create a configuration with CI/CD defaults, type a name.
- If you already have an [agent configuration file](#create-an-agent-configuration-file), select it from the list.
1. Select **Register an agent**.
1. In the **Name of new agent** field, enter a unique name for your agent.
- If an [agent configuration file](#create-an-agent-configuration-file) with this name already exists, it is used.
- If no configuration exists for this name, a new agent is created with the default configuration.
1. Select **Create and register**.
1. GitLab generates an access token for the agent. You need this token to install the agent
in your cluster.

View File

@ -48,11 +48,11 @@ For more information, see:
## Step 3: Try other GitLab Duo features
GitLab Duo is available at different points in your workflow. From troubleshooting
CI/CD pipelines to writing test cases and reviewing code, GitLab Duo can help you
GitLab Duo is available in all stages of your workflow. From troubleshooting
CI/CD pipelines to writing test cases and resolving security threats, GitLab Duo can help you
in a variety of ways.
If you want to test a feature, you can go to a failed CI/CD job and at the bottom
If you want to test a feature, you can go to one of your failed CI/CD jobs and at the bottom
of the page, select **Troubleshoot**.
Or, in an issue that has a lot of comments, in the **Activity** section, select **View summary**.
@ -65,19 +65,22 @@ For more information, see:
## Step 4: Prepare to use GitLab Duo in your IDE
To use GitLab Duo, including Code Suggestions, in your IDE, you must:
To use GitLab Duo, including Code Suggestions, in your IDE:
- Install an extension in your IDE.
- Install an extension in your local IDE.
- Authenticate with GitLab from the IDE. You can use either OAuth or a personal access token.
Then you can confirm that GitLab Duo is available in your IDE and test some of the features.
Alternately, you can use the Web IDE, which is included in the GitLab UI and already fully configured.
For more information, see:
- [Set up the extension for VS Code](../../editor_extensions/visual_studio_code/setup.md).
- [Set up the extension for JetBrains](../../editor_extensions/jetbrains_ide/setup.md).
- [Set up the extension for Visual Studio](../../editor_extensions/visual_studio/setup.md).
- [Set up the extension for Neovim](../../editor_extensions/neovim/setup.md).
- [Use the WebIDE](../project/web_ide/index.md).
## Step 5: Confirm that Code Suggestions is on in your IDE
@ -95,3 +98,7 @@ For more information, see:
- [Supported extensions and languages](../project/repository/code_suggestions/supported_extensions.md).
- [Turn on Code Suggestions](../project/repository/code_suggestions/set_up.md#turn-on-code-suggestions).
- [Troubleshoot GitLab Workflow extension for VS Code](../../editor_extensions/visual_studio_code/troubleshooting.md).
- [Troubleshoot GitLab plugin for JetBrains IDEs](../../editor_extensions/jetbrains_ide/jetbrains_troubleshooting.md).
- [Troubleshoot GitLab extension for Visual Studio](../../editor_extensions/visual_studio/visual_studio_troubleshooting.md).
- [Troubleshoot GitLab plugin for Neovim](../../editor_extensions/neovim/neovim_troubleshooting.md).

View File

@ -10,59 +10,73 @@ DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
One project or Git repository can contain multiple different subprojects or submodules that are all
packaged and published individually.
Use a monorepo project as a package registry to publish packages to multiple projects.
## Publishing different packages to the parent project
## Publish packages to a project and its child projects
The number and name of packages you can publish to one project is not limited.
You can accomplish this by setting up different configuration files for each
package. See the documentation for the package manager of your choice since
each has its own specific files and instructions to follow to publish
a given package.
To publish packages to a project and its child projects, you must add configuration files for each package. To learn how to configure packages for a specific package manager, see [Supported package managers](../../packages/package_registry/supported_package_managers.md).
The example here uses [NPM](../npm_registry/index.md).
In this example, `MyProject` is the parent project. It contains a sub-project `Foo` in the
The following example shows you how to publish packages for a project and its child project with [npm](../npm_registry/index.md).
Prerequisites:
- A [personal access token](../../../user/profile/personal_access_tokens.md)
with the scope set to `api`.
- A test project.
In this example, `MyProject` is the parent project. It contains a child project called `ChildProject` in the
`components` directory:
```plaintext
MyProject/
|- src/
| |- components/
| |- Foo/
| |- ChildProject/
|- package.json
```
The goal is to publish the packages for `MyProject` and `Foo`. Following the instructions in the
[GitLab NPM registry documentation](../npm_registry/index.md),
you can publish `MyProject` by modifying the `package.json` file with a `publishConfig` section,
and by doing one of the following:
To publish a package for `MyProject`:
- Modify your local NPM configuration with CLI commands like `npm config set`.
- Save a `.npmrc` file in the root of the project specifying these configuration settings.
1. Go to the `MyProject` directory.
1. Initialize the project by running `npm init`. Make sure the package name follows the [naming convention](../npm_registry/index.md#naming-convention).
1. Create a `.npmrc` file. Include the registry URL and the project endpoint. For example:
If you follow the instructions, you can publish `MyProject` by running `npm publish` from the root
directory.
```yaml
//gitlab.example.com/api/v4/projects/<project_id>/packages/npm/:_authToken="${NPM_TOKEN}"
@scope:registry=https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/
```
Publishing `Foo` is almost exactly the same. Follow the same steps while in the `Foo`
directory. `Foo` needs its own `package.json` file, which you can add manually by using `npm init`.
`Foo` also needs its own configuration settings. Since you are publishing to the same place, if you
used `npm config set` to set the registry for the parent project, then no additional setup is
necessary. If you used an `.npmrc` file, you need an additional `.npmrc` file in the `Foo` directory.
Be sure to add `.npmrc` files to the `.gitignore` file or use environment variables in place of your
access tokens to prevent your tokens from being exposed. This `.npmrc` file can be identical to the
one you used in `MyProject`. You can now run `npm publish` from the `Foo` directory and you can
publish `Foo` separately from `MyProject`.
1. Publish your package from the command line. Replace `<token>` with your personal access token:
You could follow a similar process for Conan packages. However, instead of `.npmrc` and
`package.json`, you have `conanfile.py` in multiple locations within the project.
```shell
NPM_TOKEN=<token> npm publish
```
## Publishing to other projects
WARNING:
Never hardcode GitLab tokens (or any tokens) directly in `.npmrc` files or any other files that can
be committed to a repository.
A package is associated with a project on GitLab, but the package does not need to be associated
with the code in that project. When configuring NPM or Maven, you only use the `Project ID` to set
the registry URL that the package uploads to. If you set this to any project that you have access to
and update any other configuration similarly depending on the package type, your packages are
published to that project. This means you can publish multiple packages to one project, even if
their code does not exist in the same place. See the [project registry workflow documentation](project_registry.md)
for more information.
You should see the package for `MyProject` published in your project's package registry.
To publish a package in `ChildProject`, follow the same steps. The contents of the `.npmrc` file can be identical to the one you added in `MyProject`.
After you publish the package for `ChildProject`, you should see the package in your project's package registry.
## Publishing packages to other projects
A package is associated with a project on GitLab. But, a package is not associated
with the code in that project.
For example, when configuring a package for npm or Maven, the `project_id` sets the registry URL that the package publishes to:
```yaml
# npm
https://gitlab.example.com/api/v4/projects/<project_id>/packages/npm/
# maven
https://gitlab.example.com/api/v4/projects/<project_id>/packages/maven/
```
If you change the `project_id` in the registry URL to another project, your package publishes to that project.
By changing the `project_id`, you can publish multiple packages to one project separately from the code. For more information, see [Store all of your packages in one GitLab project](project_registry.md).

View File

@ -74,8 +74,19 @@ Meanwhile:
### Emails to Service Desk do not create issues
Your emails might be ignored because they contain one of the
[email headers that GitLab ignores](../../../administration/incoming_email.md#rejected-headers).
- Your emails might be ignored because they contain one of the
[email headers that GitLab ignores](../../../administration/incoming_email.md#rejected-headers).
- Emails might get dropped if the sender email domain is using strict DKIM rules and there is a verification
failure due to forwarding emails to the project-specific Service Desk address.
A typical DKIM failure message, which can be found in email headers, might look like:
```plaintext
dkim=fail (signature did not verify) ... arc=fail
```
The exact wording of the failure message may vary depending on the specific email system or tools in use.
Also see [this article on DKIM failures](https://automatedemailwarmup.com/blog/dkim-fail/) for more
information and potential solutions.
### Email ingestion doesn't work in 16.6.0 self-managed

View File

@ -305,7 +305,7 @@ To add [labels](project/labels.md) to a task:
## Set a start and due date
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/365399) in GitLab 15.4 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/365399) in GitLab 15.5.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/365399) in GitLab 15.5. Feature flag `work_items_mvc_2` removed.
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169256) the minimum user role from Reporter to Planner in GitLab 17.7.
You can set a [start and due date](project/issues/due_dates.md) on a task.
@ -339,7 +339,7 @@ To set a start date:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367463) in GitLab 15.5 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/367463) to feature flag named `work_items_mvc` in GitLab 15.7. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/367463) in GitLab 15.7.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/367463) in GitLab 15.7. Feature flag `work_items_mvc` removed.
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169256) the minimum user role from Reporter to Planner in GitLab 17.7.
You can add a task to a [milestone](project/milestones/index.md).
@ -391,15 +391,10 @@ To set issue weight of a task:
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/367456) in GitLab 15.5 [with a flag](../administration/feature_flags.md) named `work_items_mvc_2`. Disabled by default.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/367456) to feature flag named `work_items_mvc` in GitLab 15.7. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/367456) in GitLab 15.7.
FLAG:
On self-managed GitLab, by default this feature is available. To hide the feature, an administrator can [disable the feature flag](../administration/feature_flags.md) named `work_items_mvc`.
On GitLab.com, this feature is available. On GitLab Dedicated, this feature is not available.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/367456) in GitLab 15.7. Feature flag `work_items_mvc` removed.
You can add a task to an [iteration](group/iterations/index.md).
You can see the iteration title and period only when you view a task.
@ -431,7 +426,7 @@ For more information, see [Time tracking](project/time_tracking.md).
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/378949) to feature flag named `work_items_mvc` in GitLab 15.8. Disabled by default.
> - Changing activity sort order [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/378949) in GitLab 15.8.
> - Filtering activity [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/389971) in GitLab 15.10.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/334812) in GitLab 15.10.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/334812) in GitLab 15.10. Feature flag `work_items_mvc` removed.
You can view all the system notes related to the task. By default they are sorted by **Oldest first**.
You can always change the sorting order to **Newest first**, which is remembered across sessions.
@ -567,12 +562,15 @@ system note in the task's comments, for example:
## Lock discussion
DETAILS:
**Status:** Beta
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/398649) in GitLab 16.9 [with a flag](../administration/feature_flags.md) named `work_items_beta`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_beta`.
On GitLab.com and GitLab Dedicated, this feature is not available.
This feature is not ready for production use.
The availability of this feature is controlled by a feature flag.
For more information, see the history.
This feature is available for testing, but not ready for production use.
You can prevent public comments in a task.
When you do, only project members can add and edit comments.
@ -599,9 +597,9 @@ DETAILS:
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/446064) to feature flag named `work_items_beta` in GitLab 16.10. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available per group, an administrator can [enable the feature flag](../administration/feature_flags.md) named `work_items_beta`.
On GitLab.com and GitLab Dedicated, this feature is not available.
This feature is not ready for production use.
The availability of this feature is controlled by a feature flag.
For more information, see the history.
This feature is available for testing, but not ready for production use.
When enabled, tasks use a two-column layout, similar to issues.
The description and threads are on the left, and attributes, such as labels

View File

@ -20,22 +20,37 @@ module API
private
CANDIDATE_PREFIX = 'candidate:'
def run_id
object.eid.to_s
end
def artifact_uri
expose_url(model_version_uri || generic_package_uri)
uri = if object.package&.generic?
generic_package_uri
elsif object.model_version_id
model_version_uri
else
ml_model_candidate_uri
end
expose_url(uri)
end
# Example: http://127.0.0.1:3000/api/v4/projects/20/packages/ml_models/1/files/
def model_version_uri
return unless object.model_version_id
model_version = object.model_version
path = api_v4_projects_packages_ml_models_files___path___path(
id: object.project.id, model_version_id: model_version.id, path: '', file_name: ''
id: object.project.id, model_version_id: object.model_version_id, path: '', file_name: ''
)
path.delete_suffix('(/path/)')
end
# Example: http://127.0.0.1:3000/api/v4/projects/20/packages/ml_models/1/files/
def ml_model_candidate_uri
path = api_v4_projects_packages_ml_models_files___path___path(
id: object.project.id, model_version_id: "#{CANDIDATE_PREFIX}#{object.iid}", path: '', file_name: ''
)
path.delete_suffix('(/path/)')

View File

@ -228,7 +228,7 @@ module Banzai
url.chomp!(matches[:format]) if matches.names.include?("format")
content = link_content || object_link_text(object, matches)
content = context[:link_text] || link_content || object_link_text(object, matches)
link = %(<a href="#{url}" #{data}
title="#{escape_once(title)}"

View File

@ -12,6 +12,8 @@ module Banzai
end
def parent_records(parent, ids)
return AlertManagement::Alert.none unless parent.is_a?(Project)
parent.alert_management_alerts.where(iid: ids.to_a)
end

View File

@ -39,18 +39,19 @@ module Banzai
reference_cache.records_per_parent[project][identifier]
end
def parent_records(project, identifiers)
return [] unless project.design_management_enabled?
def parent_records(parent, identifiers)
return DesignManagement::Design.none unless parent.is_a?(Project)
return DesignManagement::Design.none unless parent.design_management_enabled?
iids = identifiers.map(&:issue_iid).to_set
issues = project.issues.where(iid: iids).includes(:project, :namespace)
issues = parent.issues.where(iid: iids).includes(:project, :namespace)
id_for_iid = issues.index_by(&:iid).transform_values(&:id)
issue_by_id = issues.index_by(&:id)
designs(identifiers, id_for_iid).each do |d|
issue = issue_by_id[d.issue_id]
# optimisation: assign values we have already fetched
d.project = project
d.project = parent
d.issue = issue
end
end

View File

@ -12,6 +12,8 @@ module Banzai
end
def parent_records(parent, ids)
return Operations::FeatureFlag.none unless parent.is_a?(Project)
parent.operations_feature_flags.where(iid: ids.to_a)
end

View File

@ -22,6 +22,10 @@ module Banzai
end
def parent_records(parent, ids)
# we are treating all group level issues as work items so those would be handled
# by the WorkItemReferenceFilter
return Issue.none if parent.is_a?(Group)
parent.issues.where(iid: ids.to_a)
.includes(:project, :namespace, ::Gitlab::Issues::TypeAssociationGetter.call)
end

View File

@ -41,6 +41,8 @@ module Banzai
end
def parent_records(parent, ids)
return MergeRequest.none unless parent.is_a?(Project)
parent.merge_requests
.where(iid: ids.to_a)
.includes(target_project: :namespace)

View File

@ -158,6 +158,15 @@ module Banzai
def requires_unescaping?
true
end
def data_attributes_for(text, parent, object, link_content: false, link_reference: false)
object_parent = object.resource_parent
return super unless object_parent.is_a?(Group)
return super if object_parent.id == parent.id
super.merge({ group: object_parent.id, namespace: object_parent.id, project: nil })
end
end
end
end

View File

@ -195,29 +195,7 @@ module Banzai
def objects_for_paths(paths, absolute_path)
search_paths = absolute_path ? paths.pluck(1..-1) : paths
klass = parent_type.to_s.camelize.constantize
result = if parent_type == :namespace
klass.id_in(Route.by_paths(search_paths).select(:namespace_id))
else
klass.where_full_path_in(search_paths)
end
return result if parent_type == :group || parent_type == :namespace
return unless parent_type == :project
projects = result.includes(namespace: :route)
.allow_cross_joins_across_databases(url: "https://gitlab.com/gitlab-org/gitlab/-/issues/420046")
return projects unless absolute_path
# If we make it to here, then we're handling absolute path(s).
# Which means we need to also search groups as well as projects.
# Possible future optimization might be to use Route along the lines of:
# Routable.where_full_path_in(paths).includes(:source)
# See `routable.rb`
groups = Group.where_full_path_in(search_paths)
projects.to_a + groups.to_a
Route.by_paths(search_paths).preload(source: [:route, { namespace: :route }]).map(&:source)
end
def refs_cache

View File

@ -18,7 +18,13 @@ module ClickHouse
ttl = worker_class.click_house_worker_attrs[:migration_lock_ttl].from_now.utc
Gitlab::Redis::SharedState.with do |redis|
redis.zadd(ACTIVE_WORKERS_REDIS_KEY, ttl.to_i, worker_id, gt: true)
current_score = redis.zscore(ACTIVE_WORKERS_REDIS_KEY, worker_id).to_i
if ttl.to_i > current_score
# DO NOT send 'gt: true' parameter to avoid compatibility
# problems with Redis versions older than 6.2.
redis.zadd(ACTIVE_WORKERS_REDIS_KEY, ttl.to_i, worker_id)
end
yield
ensure
@ -47,7 +53,10 @@ module ClickHouse
# expire keys in the past
redis.zremrangebyscore(ACTIVE_WORKERS_REDIS_KEY, 0, "(#{min}")
# Return if any workers are registered with a future expiry date
redis.zrange(ACTIVE_WORKERS_REDIS_KEY, min, '+inf', by_score: true, limit: [0, 1]).any?
#
# To be compatible with Redis 6.0 not use zrange with 'by_score: true' parameter
# instead use redis.zrangebyscore method.
redis.zrangebyscore(ACTIVE_WORKERS_REDIS_KEY, min, '+inf', limit: [0, 1]).any?
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class CopyRunnerTaggings < BatchedMigrationJob
operation_name :copy_runner_taggings
feature_category :runner
def perform
each_sub_batch do |sub_batch|
scope = sub_batch.where(runner_type: 1).or(sub_batch.where.not(sharding_key_id: nil))
scope = scope
.joins('inner join taggings on ci_runners.id = taggings.taggable_id')
.joins('inner join ci_runners_e59bb2812d on ci_runners.id = ci_runners_e59bb2812d.id')
.where(taggings: { taggable_type: 'Ci::Runner' })
.select(:tag_id, 'taggable_id as runner_id', :sharding_key_id, :runner_type)
connection.execute(<<~SQL.squish)
INSERT INTO ci_runner_taggings(tag_id, runner_id, sharding_key_id, runner_type)
(#{scope.to_sql})
ON CONFLICT DO NOTHING;
SQL
end
end
end
end
end

View File

@ -72,6 +72,7 @@ module Gitlab
end
def unfold_reference(reference, match, target_parent)
format = match[:format].to_s
before = @text[0...match.begin(0)]
after = @text[match.end(0)..]
@ -85,22 +86,26 @@ module Gitlab
raise RewriteError, "Unspecified reference detected for #{referable.class.name}"
end
cross_reference += format
new_text = before + cross_reference + after
substitution_valid?(new_text) ? cross_reference : reference
end
def find_referable(reference)
extractor = Gitlab::ReferenceExtractor.new(@source_parent, @current_user)
extractor.analyze(reference)
extractor = Gitlab::ReferenceExtractor.new(source_parent_param[:project], @current_user)
extractor.analyze(reference, **source_parent_param)
extractor.all.first
end
def build_cross_reference(referable, target_parent)
if referable.respond_to?(:project)
referable.to_reference(target_parent)
else
referable.to_reference(@source_parent, target_container: target_parent)
end
class_name = referable.class.base_class.name
return referable.to_reference(target_parent) unless %w[Label Milestone].include?(class_name)
return referable.to_reference(@source_parent, target_container: target_parent) if referable.is_a?(GroupLabel)
return referable.to_reference(target_parent, full: true, absolute_path: true) if referable.is_a?(Milestone)
full = @source_parent.is_a?(Group) ? true : false
referable.to_reference(target_parent, full: full)
end
def substitution_valid?(substituted)
@ -108,8 +113,20 @@ module Gitlab
end
def markdown(text)
Banzai.render(text, project: @source_parent, no_original_data: true, no_sourcepos: true)
Banzai.render(text, **source_parent_param, no_original_data: true, no_sourcepos: true, link_text: 'placeholder')
end
def source_parent_param
case @source_parent
when Project
{ project: @source_parent }
when Group
{ group: @source_parent, project: nil }
when Namespaces::ProjectNamespace
{ project: @source_parent.project }
end
end
strong_memoize_attr :source_parent_param
end
end
end

View File

@ -10597,9 +10597,6 @@ msgstr ""
msgid "BulkImport|Import failed: Must have exactly one of organization, group or project."
msgstr ""
msgid "BulkImport|Import from %{hostname} completed"
msgstr ""
msgid "BulkImport|Import groups by direct transfer"
msgstr ""
@ -29005,6 +29002,12 @@ msgstr ""
msgid "Import|GitHub import details"
msgstr ""
msgid "Import|Import completed"
msgstr ""
msgid "Import|Import from %{hostname} completed"
msgstr ""
msgid "Import|Import source user has an invalid status for this operation"
msgstr ""
@ -29041,12 +29044,21 @@ msgstr ""
msgid "Import|Placeholder user record reassignment complete"
msgstr ""
msgid "Import|Reassign contributions"
msgstr ""
msgid "Import|Rescheduling placeholder user records reassignment: database health"
msgstr ""
msgid "Import|Show errors"
msgstr ""
msgid "Import|The import %{project_creator_name} started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed."
msgstr ""
msgid "Import|The import you started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed."
msgstr ""
msgid "Import|The repository could not be imported."
msgstr ""
@ -29062,9 +29074,21 @@ msgstr ""
msgid "Import|Timeout for decompressing archived files."
msgstr ""
msgid "Import|Users with the Owner role for the group can reassign contributions on the \"Members\" page."
msgstr ""
msgid "Import|View import results"
msgstr ""
msgid "Import|You can import a Subversion repository by using third-party tools. %{svn_link}."
msgstr ""
msgid "Import|You can now review your import results."
msgstr ""
msgid "Import|You can reassign contributions on the \"Members\" page of the group."
msgstr ""
msgid "Import|You do not have permission to view import source users for this namespace"
msgstr ""

View File

@ -110,11 +110,12 @@ RSpec.describe QA::Tools::Ci::QaChanges do
before do
stub_env('SELECTIVE_EXECUTION_IMPROVED', true)
stub_env('QA_CODE_PATH_MAPPINGS_GCS_CREDENTIALS', gcs_creds)
allow(Fog::Storage::Google).to receive(:new)
.with(google_project: gcs_project_id,
google_json_key_string: gcs_creds)
.and_return(gcs_client)
stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', "master")
allow(QA::Tools::Ci::CodePathsMapping).to receive(:new).and_return(code_paths_mapping)
allow(Fog::Storage::Google).to receive(:new)
.with(google_project: gcs_project_id, google_json_key_string: gcs_creds)
.and_return(gcs_client)
end
describe '#qa_tests' do

View File

@ -47,7 +47,8 @@ module RuboCop
return if !ee? && disabled_comment_absent?
table_definition(node) do |table_name_node, table_name|
unless factory?(table_name.to_s)
# Partioned tables are prefix with `p_`.
unless factory?(table_name.to_s.delete_prefix('p_'))
msg = format(MSG, name: table_name)
add_offense(table_name_node, message: msg)
end
@ -57,13 +58,13 @@ module RuboCop
private
def factory?(table_name)
end_with = "/#{table_name}.rb"
self.class.factories.any? { |path| path.end_with?(end_with) }
self.class.factories.any? { |name| name.end_with?(table_name) }
end
def self.factories
@factories ||= Dir.glob("{,ee/,jh/}spec/factories/**/*.rb")
@factories ||= Dir.glob("{,ee/,jh/}spec/factories/**/*.rb").map do |factory|
factory.gsub(%r{^(ee/|jh/|)spec/factories/}, '').delete_suffix('.rb').tr('/', '_')
end
end
def disabled_comment_absent?

View File

@ -16,7 +16,14 @@ FactoryBot.define do
trait :with_artifact do
artifact do
association(:generic_package, name: instance.package_name, version: instance.package_version || '1',
association(:ml_model_package, name: instance.package_name, version: 'candidate_1',
project: project)
end
end
trait :with_generic_package do
artifact do
association(:generic_package, name: instance.package_name, version: '1',
project: project)
end
end
@ -25,7 +32,7 @@ FactoryBot.define do
artifact do
instance.package_name
instance.package_version
association(:ml_model_package, name: instance.package_name, version: instance.package_version || 'candidate_1',
association(:ml_model_package, name: instance.package_name, version: 'candidate_1',
project: project)
end
end

View File

@ -22,7 +22,7 @@ describe('getFrequentlyUsedEmojis', () => {
frequently_used: {
emojis: [[EMOJI_THUMBS_UP, EMOJI_THUMBS_DOWN]],
top: 0,
height: 71,
height: 73,
},
});
});
@ -36,7 +36,7 @@ describe('getFrequentlyUsedEmojis', () => {
frequently_used: {
emojis: [[EMOJI_THUMBS_UP, EMOJI_THUMBS_DOWN]],
top: 0,
height: 71,
height: 73,
},
});
});

View File

@ -473,6 +473,7 @@ describe('issue_comment_form component', () => {
},
},
endpoint: notesDataMock.draftsPath,
flashContainer: expect.anything(),
isDraft: true,
});
});
@ -493,6 +494,7 @@ describe('issue_comment_form component', () => {
},
},
endpoint: notesDataMock.draftsPath,
flashContainer: expect.anything(),
isDraft: true,
});
});
@ -515,6 +517,7 @@ describe('issue_comment_form component', () => {
},
},
endpoint: noteableDataMock.create_note_path,
flashContainer: expect.anything(),
isDraft: false,
});
});
@ -537,6 +540,7 @@ describe('issue_comment_form component', () => {
},
},
endpoint: noteableDataMock.create_note_path,
flashContainer: expect.anything(),
isDraft: false,
});
});
@ -802,6 +806,7 @@ describe('issue_comment_form component', () => {
},
},
endpoint: noteableDataMock.create_note_path,
flashContainer: expect.anything(),
isDraft: false,
});
});

View File

@ -25,46 +25,46 @@ jest.mock('~/lib/utils/autosave');
const workItemId = workItemQueryResponse.data.workItem.id;
describe('Work item add note', () => {
/** @type {import('helpers/vue_test_utils_helper').ExtendedWrapper} */
let wrapper;
Vue.use(VueApollo);
const mutationSuccessHandler = jest.fn().mockResolvedValue(createWorkItemNoteResponse);
const mutationSuccessHandler = jest.fn().mockResolvedValue(createWorkItemNoteResponse());
let workItemResponseHandler;
const findCommentForm = () => wrapper.findComponent(WorkItemCommentForm);
const findErrorAlert = () => wrapper.findByTestId('error-alert');
const findReplyPlaceholder = () => wrapper.findComponent(DiscussionReplyPlaceholder);
const findSuccessAlert = () => wrapper.findByTestId('success-alert');
const findWorkItemLockedComponent = () => wrapper.findComponent(WorkItemCommentLocked);
const findResolveDiscussionButton = () => wrapper.findComponent(ResolveDiscussionButton);
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
canUpdate = true,
canCreateNote = true,
emailParticipantsWidgetPresent = true,
workItemIid = '1',
workItemResponse = workItemByIidResponseFactory({
canUpdate,
canCreateNote,
emailParticipantsWidgetPresent,
}),
signedIn = true,
isEditing = true,
workItemType = 'Task',
isInternalThread = false,
isNewDiscussion = false,
isDiscussionResolved = false,
isDiscussionResolvable = false,
isResolving = false,
hasReplies = false,
isWorkItemConfidential = false,
} = {}) => {
const workItemResponse = workItemByIidResponseFactory({
canCreateNote,
emailParticipantsWidgetPresent,
});
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
if (signedIn) {
window.gon.current_user_id = '1';
window.gon.current_user_avatar_url = 'avatar.png';
}
const { id } = workItemQueryResponse.data.workItem;
wrapper = shallowMountExtended(WorkItemAddNote, {
apolloProvider: createMockApollo([
[workItemByIidQuery, workItemResponseHandler],
@ -72,9 +72,9 @@ describe('Work item add note', () => {
]),
propsData: {
fullPath: 'test-project-path',
workItemId: id,
workItemId: workItemResponse.data.workspace.workItem.id,
workItemIid,
workItemType,
workItemType: 'Task',
markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem',
autocompleteDataSources: {},
isInternalThread,
@ -82,10 +82,7 @@ describe('Work item add note', () => {
isDiscussionResolved,
isDiscussionResolvable,
isResolving,
hasReplies,
},
stubs: {
WorkItemCommentLocked,
isWorkItemConfidential,
},
});
@ -104,17 +101,12 @@ describe('Work item add note', () => {
`('when internal comment is $isInternalComment', ({ isInternalComment }) => {
it('calls update widgets mutation', async () => {
const noteText = 'updated desc';
await createComponent({
isEditing: true,
signedIn: true,
});
await createComponent({ isEditing: true, signedIn: true });
findCommentForm().vm.$emit('submitForm', {
commentText: noteText,
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(mutationSuccessHandler).toHaveBeenCalledWith({
@ -135,7 +127,6 @@ describe('Work item add note', () => {
commentText: 'test',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'add_work_item_comment', {
@ -174,36 +165,17 @@ describe('Work item add note', () => {
it('emits error when mutation returns error', async () => {
const error = 'eror';
await createComponent({
isEditing: true,
mutationHandler: jest.fn().mockResolvedValue({
data: {
createNote: {
note: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
discussion: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
notes: {
nodes: [],
__typename: 'NoteConnection',
},
__typename: 'Discussion',
},
__typename: 'Note',
},
__typename: 'CreateNotePayload',
errors: [error],
},
},
}),
mutationHandler: jest
.fn()
.mockResolvedValue(createWorkItemNoteResponse({ errors: [error] })),
});
findCommentForm().vm.$emit('submitForm', {
commentText: 'updated desc',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([[error]]);
@ -221,7 +193,6 @@ describe('Work item add note', () => {
commentText: 'updated desc',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(wrapper.emitted('error')).toEqual([[error]]);
@ -230,86 +201,86 @@ describe('Work item add note', () => {
it('ignores errors when mutation returns additional information as errors for quick actions', async () => {
await createComponent({
isEditing: true,
mutationHandler: jest.fn().mockResolvedValue({
data: {
createNote: {
note: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
discussion: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
notes: {
nodes: [],
__typename: 'NoteConnection',
},
__typename: 'Discussion',
},
__typename: 'Note',
},
__typename: 'CreateNotePayload',
errors: ['Commands only Removed assignee @foobar.', 'Command names ["unassign"]'],
},
},
}),
mutationHandler: jest.fn().mockResolvedValue(
createWorkItemNoteResponse({
errors: ['Commands only Removed assignee @foobar.', 'Command names ["unassign"]'],
}),
),
});
findCommentForm().vm.$emit('submitForm', {
commentText: 'updated desc',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
});
it('renders success alert on successful quick action', async () => {
await createComponent({
isEditing: true,
mutationHandler: jest
.fn()
.mockResolvedValue(createWorkItemNoteResponse({ messages: ['Added ~"Label" label.'] })),
});
findCommentForm().vm.$emit('submitForm', {
commentText: '/label ~Label',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(findSuccessAlert().text()).toBe('Added ~"Label" label.');
expect(findSuccessAlert().props('variant')).toBe('info');
});
it('renders error alert on unsuccessful quick action', async () => {
await createComponent({
isEditing: true,
mutationHandler: jest
.fn()
.mockResolvedValue(
createWorkItemNoteResponse({ errorMessages: ['Failed to apply commands.'] }),
),
});
findCommentForm().vm.$emit('submitForm', {
commentText: '/label doesnotexist',
isNoteInternal: isInternalComment,
});
await waitForPromises();
expect(findErrorAlert().text()).toBe('Failed to apply commands.');
expect(findErrorAlert().props('variant')).toBe('danger');
});
it('refetches widgets when work item type is updated', async () => {
await createComponent({
isEditing: true,
mutationHandler: jest.fn().mockResolvedValue({
data: {
createNote: {
note: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
discussion: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
notes: {
nodes: [],
__typename: 'NoteConnection',
},
__typename: 'Discussion',
},
__typename: 'Note',
},
__typename: 'CreateNotePayload',
errors: ['Commands only Type changed successfully.', 'Command names ["type"]'],
},
},
}),
mutationHandler: jest.fn().mockResolvedValue(
createWorkItemNoteResponse({
errors: ['Commands only Type changed successfully.', 'Command names ["type"]'],
}),
),
});
await waitForPromises();
expect(workItemResponseHandler).toHaveBeenCalled();
});
it('emits error to parent when the comment form emits error', async () => {
await createComponent({ isEditing: true, signedIn: true });
const error = 'error';
findCommentForm().vm.$emit('error', error);
expect(wrapper.emitted('error')).toEqual([[error]]);
});
it('sends confidential prop to work item comment form', async () => {
await createComponent({ isEditing: true, signedIn: true });
await createComponent({ isWorkItemConfidential: true });
const {
data: {
workspace: { workItem },
},
} = workItemByIidResponseFactory({ canUpdate: true, canCreateNote: true });
expect(findCommentForm().props('isWorkItemConfidential')).toBe(workItem.confidential);
expect(findCommentForm().props('isWorkItemConfidential')).toBe(true);
});
});
});
@ -332,7 +303,7 @@ describe('Work item add note', () => {
expect(wrapper.attributes('class')).toContain('internal-note');
});
describe('when work item`createNote` permission false', () => {
describe('when work item `createNote` permission is false', () => {
it('cannot add comment', async () => {
await createComponent({ isEditing: false, canCreateNote: false });
@ -345,33 +316,27 @@ describe('Work item add note', () => {
it('sets `hasEmailParticipantsWidget` prop to `true` for comment form by default', async () => {
await createComponent();
expect(findCommentForm().props('hasEmailParticipantsWidget')).toEqual(true);
expect(findCommentForm().props('hasEmailParticipantsWidget')).toBe(true);
});
describe('when email participants widget is not available', () => {
it('sets `hasEmailParticipantsWidget` prop to `false` for comment form', async () => {
await createComponent({ emailParticipantsWidgetPresent: false });
expect(findCommentForm().props('hasEmailParticipantsWidget')).toEqual(false);
expect(findCommentForm().props('hasEmailParticipantsWidget')).toBe(false);
});
});
});
describe('Resolve Discussion button', () => {
it('renders resolve discussion button when discussion is resolvable', async () => {
await createComponent({
isDiscussionResolvable: true,
isEditing: false,
});
await createComponent({ isDiscussionResolvable: true, isEditing: false });
expect(findResolveDiscussionButton().exists()).toBe(true);
});
it('does not render resolve discussion button when discussion is not resolvable', async () => {
await createComponent({
isDiscussionResolvable: false,
isEditing: false,
});
await createComponent({ isDiscussionResolvable: false, isEditing: false });
expect(findResolveDiscussionButton().exists()).toBe(false);
});
@ -387,10 +352,7 @@ describe('Work item add note', () => {
});
it('emits `resolve` event when resolve discussion button is clicked', async () => {
await createComponent({
isDiscussionResolvable: true,
isEditing: false,
});
await createComponent({ isDiscussionResolvable: true, isEditing: false });
findResolveDiscussionButton().vm.$emit('onClick');
@ -405,9 +367,10 @@ describe('Work item add note', () => {
isEditing: false,
});
const resolveButton = findResolveDiscussionButton();
expect(resolveButton.props('isResolving')).toBe(true);
expect(resolveButton.props('buttonTitle')).toBe('Resolve thread');
expect(findResolveDiscussionButton().props()).toMatchObject({
isResolving: true,
buttonTitle: 'Resolve thread',
});
});
});
});

View File

@ -4207,10 +4207,14 @@ export const mockMoreWorkItemNotesResponse = {
},
};
export const createWorkItemNoteResponse = {
export const createWorkItemNoteResponse = ({
errors = [],
errorMessages = null,
messages = null,
} = {}) => ({
data: {
createNote: {
errors: [],
errors,
note: {
id: 'gid://gitlab/Note/569',
discussion: {
@ -4272,10 +4276,14 @@ export const createWorkItemNoteResponse = {
bodyHtml: '<p data-sourcepos="1:1-1:9" dir="auto">Latest 22</p>',
__typename: 'Note',
},
quickActionsStatus: {
errorMessages,
messages,
},
__typename: 'CreateNotePayload',
},
},
};
});
export const mockWorkItemCommentNote = {
id: 'gid://gitlab/Note/158',

View File

@ -19,6 +19,7 @@ RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_com
visibility_level
star_count
starrers_path
topics
last_30_day_usage_count
]

View File

@ -67,8 +67,8 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
describe 'artifact_uri' do
context 'when candidate does not belong to a model version' do
it 'returns the generic package (legacy) format of the artifact_uri' do
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/generic#{candidate.artifact_root}")
it 'returns the ml package format of the artifact_uri' do
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/candidate:#{candidate.internal_id}/files/")
end
end
@ -80,6 +80,14 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/#{version.id}/files/")
end
end
context 'when candidate has already a generic package' do
let!(:candidate) { create(:ml_candidates, :with_generic_package, name: 'run1') }
it 'returns the generic version format of the artifact_uri' do
expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/generic#{candidate.artifact_root}")
end
end
end
describe 'lifecycle_stage' do

View File

@ -793,14 +793,34 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
let_it_be(:context) { { project: nil, group: another_group } }
it 'can not find the label' do
reference = "#{group.full_path}~#{group_label.name}"
reference = "#{another_group.full_path}~#{group_label.name}"
result = reference_filter("See #{reference}", context)
expect(result.to_html).to include "See #{reference}"
end
it_behaves_like 'absolute group reference' do
let_it_be(:reference) { "#{group.full_path}~#{group_label.name}" }
it 'finds the label with relative reference' do
label_name = group_label.name
reference = "#{group.full_path}~#{label_name}"
result = reference_filter("See #{reference}", context)
if context[:label_url_method] == :group_url
expect(result.css('a').first.attr('href')).to eq(urls.group_url(group, label_name: label_name))
else
expect(result.css('a').first.attr('href')).to eq(urls.issues_group_url(group, label_name: label_name))
end
end
it 'finds label in ancestors' do
label_name = parent_group_label.name
reference = "#{group.full_path}~#{label_name}"
result = reference_filter("See #{reference}", context)
if context[:label_url_method] == :group_url
expect(result.css('a').first.attr('href')).to eq(urls.group_url(group, label_name: label_name))
else
expect(result.css('a').first.attr('href')).to eq(urls.issues_group_url(group, label_name: label_name))
end
end
it 'does not find label in ancestors' do
@ -809,6 +829,10 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
expect(result.to_html).to include "See #{reference}"
end
it_behaves_like 'absolute group reference' do
let_it_be(:reference) { "#{group.full_path}~#{group_label.name}" }
end
end
end

View File

@ -33,8 +33,13 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
doc = reference_filter("Milestone #{reference}")
link = doc.css('a').first
expect(link).to have_attribute('data-project')
expect(link.attr('data-project')).to eq project.id.to_s
if milestone.project.present?
expect(link).to have_attribute('data-project')
expect(link.attr('data-project')).to eq project.id.to_s
elsif milestone.group.present?
expect(link).to have_attribute('data-group')
expect(link.attr('data-group')).to eq milestone.group.id.to_s
end
end
it 'includes a data-milestone attribute' do
@ -153,8 +158,13 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
doc = reference_filter("Milestone #{link_reference}")
link = doc.css('a').first
expect(link).to have_attribute('data-project')
expect(link.attr('data-project')).to eq project.id.to_s
if milestone.project.present?
expect(link).to have_attribute('data-project')
expect(link.attr('data-project')).to eq project.id.to_s
elsif milestone.group.present?
expect(link).to have_attribute('data-group')
expect(link.attr('data-group')).to eq milestone.group.id.to_s
end
end
it 'includes a data-milestone attribute' do

View File

@ -26,8 +26,10 @@ RSpec.describe ClickHouse::MigrationSupport::ExclusiveLock, feature_category: :d
end
describe '.register_running_worker' do
let(:worker_ttl) { 10.seconds }
before do
TestWorker.click_house_migration_lock(10.seconds)
TestWorker.click_house_migration_lock(worker_ttl)
end
it 'yields without arguments' do
@ -45,6 +47,67 @@ RSpec.describe ClickHouse::MigrationSupport::ExclusiveLock, feature_category: :d
expect(described_class.active_sidekiq_workers?).to eq false
end
end
it 'is compatible with Redis 6.0' do
redis_mock = instance_double(Redis)
expect(redis_mock).to receive(:zscore).and_return(1)
allow(redis_mock).to receive(:zadd)
expect(redis_mock).to receive(:zrem)
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_mock)
described_class.register_running_worker(worker_class, 'test') do
next
end
# Ensure gt: true parameter is not passed
expect(redis_mock).to have_received(:zadd).with(
described_class::ACTIVE_WORKERS_REDIS_KEY,
worker_ttl.from_now.to_i,
'test'
)
end
context 'when scheduling the same worker concurrently', :freeze_time, :aggregate_failures do
let(:worker_name) { 'test' }
def get_ttl
Gitlab::Redis::SharedState.with do |redis|
redis.zrange(described_class::ACTIVE_WORKERS_REDIS_KEY, 0, -1, with_scores: true)[0][1]
end
end
context 'when ttl is in the future' do
it 'updates worker ttl' do
described_class.register_running_worker(worker_class, worker_name) do
old_ttl = get_ttl
expect(old_ttl).to eq((Time.current + worker_ttl).to_i)
travel 1.second
described_class.register_running_worker(worker_class, worker_name) do
new_ttl = get_ttl
expect(new_ttl).to be > old_ttl
end
end
end
end
context 'when ttl is in the past' do
it 'does not update worker ttl' do
described_class.register_running_worker(worker_class, worker_name) do
old_ttl = get_ttl
expect(old_ttl).to eq(worker_ttl.from_now.to_i)
travel_to 1.second.ago
described_class.register_running_worker(worker_class, worker_name) do
new_ttl = get_ttl
expect(new_ttl).to eq(old_ttl)
end
end
end
end
end
end
describe '.pause_workers?' do
@ -128,4 +191,29 @@ RSpec.describe ClickHouse::MigrationSupport::ExclusiveLock, feature_category: :d
end
end
end
describe '.active_sidekiq_workers?' do
subject(:active_sidekiq_workers) { described_class.active_sidekiq_workers? }
it 'returns false when no workers are registered' do
is_expected.to eq false
end
it 'returns true when workers are registered' do
described_class.register_running_worker(worker_class, 'test') do
is_expected.to eq true
end
end
it 'is compatible with Redis 6.0' do
redis_mock = instance_double(Redis)
allow(redis_mock).to receive_messages(zremrangebyscore: 1, zrangebyscore: [])
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis_mock)
expect(redis_mock).to receive(:zrangebyscore)
expect(redis_mock).not_to receive(:zrange) # Not compatible with Redis 6
active_sidekiq_workers
end
end
end

View File

@ -0,0 +1,120 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::CopyRunnerTaggings, feature_category: :runner do
let(:runners_table) { table(:ci_runners, database: :ci, primary_key: :id) }
let(:runner_taggings_table) { table(:ci_runner_taggings, database: :ci, primary_key: :id) }
let(:taggings_table) { table(:taggings, database: :ci) }
let(:tags_table) { table(:tags, database: :ci) }
let(:instance_runner) { runners_table.create!(runner_type: 1) }
let(:group_runner) { runners_table.create!(runner_type: 2, sharding_key_id: 10) }
let(:project_runner) { runners_table.create!(runner_type: 3, sharding_key_id: 11) }
let(:old_runner) do
without_referential_integrity do
runners_table.create!(runner_type: 2, sharding_key_id: nil)
end
end
let(:deleted_runner) do
without_referential_integrity do
runners_table.create!(runner_type: 3, sharding_key_id: 12)
end
end
let(:tag1) { tags_table.create!(name: 'docker') }
let(:tag2) { tags_table.create!(name: 'postgres') }
let(:tag3) { tags_table.create!(name: 'ruby') }
let(:tag4) { tags_table.create!(name: 'golang') }
let(:migration_attrs) do
{
start_id: runners_table.minimum(:id),
end_id: runners_table.maximum(:id),
batch_table: :ci_runners,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: connection
}
end
let(:migration) { described_class.new(**migration_attrs) }
let(:connection) { Ci::ApplicationRecord.connection }
before do
taggings_table.create!(tag_id: tag1.id, taggable_id: instance_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag2.id, taggable_id: instance_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag3.id, taggable_id: instance_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag1.id, taggable_id: group_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag2.id, taggable_id: group_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag3.id, taggable_id: project_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag4.id, taggable_id: project_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag3.id, taggable_id: old_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag4.id, taggable_id: deleted_runner.id,
taggable_type: 'Ci::Runner', context: :tags)
taggings_table.create!(tag_id: tag3.id, taggable_id: project_runner.id,
taggable_type: 'CommitStatus', context: :tags)
end
describe '#perform' do
it 'copies records over into ci_runner_taggings' do
expect { migration.perform }
.to change { runner_taggings_table.count }
.from(0)
.to(7)
expect(tag_ids_from_taggings_for(instance_runner))
.to match_array(runner_tags_for(instance_runner).pluck(:tag_id))
expect(tag_ids_from_taggings_for(group_runner))
.to match_array(runner_tags_for(group_runner).pluck(:tag_id))
expect(tag_ids_from_taggings_for(project_runner))
.to match_array(runner_tags_for(project_runner).pluck(:tag_id))
expect(runner_tags_for(instance_runner).pluck(:sharding_key_id).uniq)
.to contain_exactly(nil)
expect(runner_tags_for(group_runner).pluck(:sharding_key_id).uniq)
.to contain_exactly(10)
expect(runner_tags_for(project_runner).pluck(:sharding_key_id).uniq)
.to contain_exactly(11)
expect(runner_tags_for(old_runner)).to be_empty
expect(runner_tags_for(deleted_runner)).to be_empty
end
def tag_ids_from_taggings_for(runner)
taggings_table
.where(taggable_id: runner, taggable_type: 'Ci::Runner')
.pluck(:tag_id)
end
def runner_tags_for(runner)
runner_taggings_table.where(runner_id: runner)
end
end
def without_referential_integrity
connection.transaction do
connection.execute('ALTER TABLE ci_runners DISABLE TRIGGER ALL;')
result = yield
connection.execute('ALTER TABLE ci_runners ENABLE TRIGGER ALL;')
result
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Gfm::ReferenceRewriter do
RSpec.describe Gitlab::Gfm::ReferenceRewriter, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
@ -26,14 +26,6 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
let!(:issue_second) { create(:issue, project: old_project) }
let!(:merge_request) { create(:merge_request, source_project: old_project) }
context 'plain text description' do
let(:text) { 'Description that references #1, #2 and !1' }
it { is_expected.to include issue_first.to_reference(new_project) }
it { is_expected.to include issue_second.to_reference(new_project) }
it { is_expected.to include merge_request.to_reference(new_project) }
end
context 'description with ignored elements' do
let(:text) do
"Hi. This references #1, but not `#2`\n" \
@ -71,60 +63,9 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
it { is_expected.to eq "#{ref}, `#1`, #{ref}, `#1`" }
end
context 'description with project labels' do
let!(:label) { create(:label, id: 123, name: 'test', project: old_project) }
context 'label referenced by id' do
let(:text) { '#1 and ~123' }
it { is_expected.to eq %(#{old_project_ref}#1 and #{old_project_ref}~123) }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"test"' }
it { is_expected.to eq %(#{old_project_ref}#1 and #{old_project_ref}~123) }
end
end
context 'description with group labels' do
let(:old_group) { create(:group) }
let!(:group_label) { create(:group_label, id: 321, name: 'group label', group: old_group) }
before do
old_project.update!(namespace: old_group)
end
context 'label referenced by id' do
let(:text) { '#1 and ~321' }
it { is_expected.to eq %(#{old_project_ref}#1 and #{old_project_ref}~321) }
end
context 'label referenced by text' do
let(:text) { '#1 and ~"group label"' }
it { is_expected.to eq %(#{old_project_ref}#1 and #{old_project_ref}~321) }
end
end
end
end
context 'when description contains a local reference' do
let(:local_issue) { create(:issue, project: old_project) }
let(:text) { "See ##{local_issue.iid}" }
it { is_expected.to eq("See #{old_project.path}##{local_issue.iid}") }
end
context 'when description contains a cross reference' do
let(:merge_request) { create(:merge_request) }
let(:text) { "See #{merge_request.project.full_path}!#{merge_request.iid}" }
it { is_expected.to eq(text) }
end
context 'with a commit' do
let(:old_project) { create(:project, :repository, name: 'old-project', group: group) }
let(:commit) { old_project.commit }
@ -142,26 +83,6 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
end
end
context 'reference contains project milestone' do
let!(:milestone) do
create(:milestone, title: '9.0', project: old_project)
end
let(:text) { 'milestone: %"9.0"' }
it { is_expected.to eq %(milestone: #{old_project_ref}%"9.0") }
end
context 'when referring to group milestone' do
let!(:milestone) do
create(:milestone, title: '10.0', group: group)
end
let(:text) { 'milestone %"10.0"' }
it { is_expected.to eq text }
end
context 'when referring to a group' do
let(:text) { "group @#{group.full_path}" }
@ -178,9 +99,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
before do
create(:milestone, title: '9.0', project: old_project)
allow_any_instance_of(Milestone)
.to receive(:to_reference)
.and_return(nil)
allow_any_instance_of(Milestone).to receive(:to_reference).and_return(nil)
end
let(:text) { 'milestone: %"9.0"' }
@ -193,4 +112,232 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
end
end
end
describe '#rewrite with table syntax' do
using RSpec::Parameterized::TableSyntax
let_it_be(:parent_group1) { create(:group, path: "parent-group-one") }
let_it_be(:parent_group2) { create(:group, path: "parent-group-two") }
let_it_be(:user) { create(:user) }
let_it_be(:source_project) { create(:project, path: 'old-project', group: parent_group1) }
let_it_be(:target_project1) { create(:project, path: 'new-project', group: parent_group1) }
let_it_be(:target_project2) { create(:project, path: 'new-project', group: parent_group2) }
let_it_be(:target_group1) { create(:group, path: 'new-group', parent: parent_group1) }
let_it_be(:target_group2) { create(:group, path: 'new-group', parent: parent_group2) }
let_it_be(:work_item_project_first) { create(:issue, project: source_project) }
let_it_be(:merge_request) { create(:merge_request, source_project: source_project) }
let_it_be(:project_label) { create(:label, id: 123, name: 'pr label1', project: source_project) }
let_it_be(:parent_group_label) { create(:group_label, id: 321, name: 'gr label1', group: parent_group1) }
let_it_be(:project_milestone) { create(:milestone, title: 'project milestone', project: source_project) }
let_it_be(:parent_group_milestone) { create(:milestone, title: 'group milestone', group: parent_group1) }
before_all do
parent_group1.add_reporter(user)
parent_group2.add_reporter(user)
end
context 'with source as Project and target as Project within same parent group' do
let_it_be(:source_parent) { source_project } # 'parent-group-one/old-project'
let_it_be(:target_parent) { target_project1 } # 'parent-group-one/new-project'
where(:source_text, :destination_text) do
# project level work item reference
'ref #1' | 'ref old-project#1'
'ref #1+' | 'ref old-project#1+'
'ref #1+s' | 'ref old-project#1+s'
# merge request reference
'ref !1' | 'ref old-project!1'
'ref !1+' | 'ref old-project!1+'
'ref !1+s' | 'ref old-project!1+s'
# project label reference
'ref ~123' | 'ref old-project~123'
'ref ~"pr label1"' | 'ref old-project~123'
# group level label reference
'ref ~321' | 'ref old-project~321'
'ref ~"gr label1"' | 'ref old-project~321'
# project level milestone reference
'ref %"project milestone"' | 'ref /parent-group-one/old-project%"project milestone"'
# group level milestone reference
'ref %"group milestone"' | 'ref /parent-group-one%"group milestone"'
end
with_them do
it_behaves_like 'rewrites references correctly'
end
end
context 'with source as Project and target as Project within different parent groups' do
let_it_be(:source_parent) { source_project } # 'parent-group-one/old-project'
let_it_be(:target_parent) { target_project2 } # 'parent-group-two/new-project'
where(:source_text, :destination_text) do
# project level work item reference
'ref #1' | 'ref parent-group-one/old-project#1'
'ref #1+' | 'ref parent-group-one/old-project#1+'
'ref #1+s' | 'ref parent-group-one/old-project#1+s'
# merge request reference
'ref !1' | 'ref parent-group-one/old-project!1'
'ref !1+' | 'ref parent-group-one/old-project!1+'
'ref !1+s' | 'ref parent-group-one/old-project!1+s'
# project label reference
'ref ~123' | 'ref parent-group-one/old-project~123'
'ref ~"pr label1"' | 'ref parent-group-one/old-project~123'
# group level label reference
'ref ~321' | 'ref parent-group-one/old-project~321'
'ref ~"gr label1"' | 'ref parent-group-one/old-project~321'
# project level milestone reference
'ref %"project milestone"' | 'ref /parent-group-one/old-project%"project milestone"'
# group level milestone reference
'ref %"group milestone"' | 'ref /parent-group-one%"group milestone"'
end
with_them do
it_behaves_like 'rewrites references correctly'
end
end
context 'with source as Project and target as Group within same parent group' do
let_it_be(:source_parent) { source_project } # 'parent-group-one/old-project'
let_it_be(:target_parent) { target_group1 } # 'parent-group-one/new-group'
where(:source_text, :destination_text) do
# project level work item reference
'ref #1' | 'ref parent-group-one/old-project#1'
'ref #1+' | 'ref parent-group-one/old-project#1+'
'ref #1+s' | 'ref parent-group-one/old-project#1+s'
# merge request reference
'ref !1' | 'ref parent-group-one/old-project!1'
'ref !1+' | 'ref parent-group-one/old-project!1+'
'ref !1+s' | 'ref parent-group-one/old-project!1+s'
# project label reference
'ref ~123' | 'ref parent-group-one/old-project~123'
'ref ~"pr label1"' | 'ref parent-group-one/old-project~123'
# group level label reference
'ref ~321' | 'ref parent-group-one/old-project~321'
'ref ~"gr label1"' | 'ref parent-group-one/old-project~321'
# project level milestone reference
'ref %"project milestone"' | 'ref /parent-group-one/old-project%"project milestone"'
# group level milestone reference
'ref %"group milestone"' | 'ref /parent-group-one%"group milestone"'
end
with_them do
it_behaves_like 'rewrites references correctly'
end
end
context 'with source as Project and target as Group within different parent groups' do
let_it_be(:source_parent) { source_project } # 'parent-group-one/old-project'
let_it_be(:target_parent) { target_group2 } # 'parent-group-two/new-group'
where(:source_text, :destination_text) do
# project level work item reference
'ref #1' | 'ref parent-group-one/old-project#1'
'ref #1+' | 'ref parent-group-one/old-project#1+'
'ref #1+s' | 'ref parent-group-one/old-project#1+s'
# merge request reference
'ref !1' | 'ref parent-group-one/old-project!1'
'ref !1+' | 'ref parent-group-one/old-project!1+'
'ref !1+s' | 'ref parent-group-one/old-project!1+s'
# project label reference
'ref ~123' | 'ref parent-group-one/old-project~123'
'ref ~"pr label1"' | 'ref parent-group-one/old-project~123'
# group level label reference
'ref ~321' | 'ref parent-group-one/old-project~321'
'ref ~"gr label1"' | 'ref parent-group-one/old-project~321'
# project level milestone reference
'ref %"project milestone"' | 'ref /parent-group-one/old-project%"project milestone"'
# group level milestone reference
'ref %"group milestone"' | 'ref /parent-group-one%"group milestone"'
end
with_them do
it_behaves_like 'rewrites references correctly'
end
end
context 'with invalid references' do
let_it_be(:source_parent) { source_project }
let_it_be(:target_parent) { target_project1 }
where(:text_with_reference) do
[
# work item references
# project level non-existing WI references
'ref parent-group-one/old-project#12321',
'ref parent-group-one/old-project#12321+',
'ref parent-group-one/old-project#12321+s',
'ref /parent-group-one/old-project#12321',
'ref /parent-group-one/old-project#12321+',
'ref /parent-group-one/old-project#12321+s',
# group level non-existing WI references
'ref parent-group-one/old-group#12321',
'ref parent-group-one/old-group#12321+',
'ref parent-group-one/old-group#12321+s',
'ref /parent-group-one/old-group#12321',
'ref /parent-group-one/old-group#12321+',
'ref /parent-group-one/old-group#12321+s',
# project level non-existing design references
'ref parent-group-one/old-project#1/designs[homescreen.jpg]',
'ref parent-group-one/old-project#12321/designs[homescreen.jpg]',
'ref parent-group-one/old-group#12321/designs[homescreen.jpg]',
'ref /parent-group-one/old-project#1/designs[homescreen.jpg]',
'ref /parent-group-one/old-project#12321/designs[homescreen.jpg]',
'ref /parent-group-one/old-group#12321/designs[homescreen.jpg]',
# merge request references
# project level non-existing MR references
'ref parent-group-one/old-project!12321',
'ref parent-group-one/old-project!12321+',
'ref parent-group-one/old-project!12321+s',
'ref /parent-group-one/old-project!12321',
'ref /parent-group-one/old-project!12321+',
'ref /parent-group-one/old-project!12321+s',
# root group
'ref parent-group-one!1',
'ref parent-group-one!1+',
'ref parent-group-one!1+s',
'ref /parent-group-one!1',
'ref /parent-group-one!1+',
'ref /parent-group-one!1+s',
# sub-group
'ref parent-group-one/new-group!1',
'ref parent-group-one/new-group!1+',
'ref parent-group-one/new-group!1+s',
'ref /parent-group-one/new-group!1',
'ref /parent-group-one/new-group!1+',
'ref /parent-group-one/new-group!1+s',
# alert reference
'ref parent-group-one/old-project^alert#123',
'ref parent-group-one^alert#123',
'ref parent-group-one/new-group^alert#123',
'ref /parent-group-one/old-project^alert#123',
'ref /parent-group-one^alert#123',
'ref /parent-group-one/new-group^alert#123',
# feature flag reference
'ref [feature_flag:parent-group-one/old-project/123]',
'ref [feature_flag:parent-group-one/123]',
'ref [feature_flag:parent-group-one/old-group/123]',
'ref [feature_flag:/parent-group-one/old-project/123]',
'ref [feature_flag:/parent-group-one/123]',
'ref [feature_flag:/parent-group-one/old-group/123]'
]
end
with_them do
it_behaves_like 'does not raise errors on invalid references'
end
end
end
end

View File

@ -146,4 +146,87 @@ RSpec.describe Emails::Imports, feature_category: :importers do
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
end
# rubocop:disable RSpec/FactoryBot/AvoidCreate -- creates are required in this case
describe '#project_import_complete' do
let(:user) { create(:user) }
let(:owner) { create(:owner) }
let(:group) { create(:group) }
let(:project) { create(:project, creator: user, import_url: 'https://user:password@example.com') }
let(:user_mapping_enabled) { true }
subject { Notify.project_import_complete(project.id, user.id, user_mapping_enabled, project.safe_import_url) }
context 'when user mapping is enabled' do
context 'with placeholder users awaiting reassignment' do
before do
create(:import_source_user, namespace: group)
project.update!(namespace: group)
end
context 'when user is a group owner' do
before do
group.add_owner(user)
end
it 'mentions owner role can reassign placeholder users' do
is_expected.to deliver_to(user)
is_expected.to have_subject("#{project.name} | Import from https://*****:*****@example.com completed")
is_expected.to have_content('You can reassign contributions on the "Members" page of the group.')
is_expected.to have_content('Reassign contributions')
end
end
context 'when user is not an owner' do
it 'mentions owners can reassign contributions' do
content = 'Users with the Owner role for the group can reassign contributions on the "Members" page.'
is_expected.to deliver_to(user)
is_expected.to have_subject("#{project.name} | Import from https://*****:*****@example.com completed")
is_expected.to have_content(content)
end
end
end
context 'without placeholder users awaiting reassignment' do
before do
group.add_owner(user)
end
it 'does not mention contributions reassignment' do
create(:import_source_user, :pending_reassignment, namespace: group)
is_expected.to deliver_to(user)
is_expected.to have_subject("#{project.name} | Import from https://*****:*****@example.com completed")
is_expected.to have_content('You can now review your import results.')
end
end
context 'when project is in user namespace' do
it 'does not mention contributions reassignment' do
create(:import_source_user, :pending_reassignment, namespace: group)
is_expected.to deliver_to(user)
is_expected.to have_subject("#{project.name} | Import from https://*****:*****@example.com completed")
is_expected.to have_content('You can now review your import results.')
is_expected.to have_content('View import results')
end
end
end
context 'when user mapping is disabled' do
let(:user_mapping_enabled) { false }
it 'does not mention contributions reassignment' do
is_expected.to deliver_to(user)
is_expected.to have_subject("#{project.name} | Import from https://*****:*****@example.com completed")
is_expected.to have_content('You can now review your import results.')
end
end
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
end
# rubocop:enable RSpec/FactoryBot/AvoidCreate
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueCopyRunnerTaggings, migration: :gitlab_ci, feature_category: :runner do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_ci,
table_name: :ci_runners,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::PipelineCreation::Requests, :clean_gitlab_redis_shared_state, feature_category: :pipeline_composition do
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:project) { merge_request.project }
describe '.failed' do
context 'when given a pipeline creation key and ID' do
@ -45,6 +46,20 @@ RSpec.describe Ci::PipelineCreation::Requests, :clean_gitlab_redis_shared_state,
end
end
describe '.start_for_project' do
it 'stores a pipeline creation for the project and returns its key and ID' do
allow(SecureRandom).to receive(:uuid).and_return('test-id')
request = described_class.start_for_project(project)
expect(request).to eq({
'key' => described_class.request_key(project, 'test-id'),
'id' => 'test-id'
})
expect(described_class.hget(request)).to eq({ 'status' => 'in_progress' })
end
end
describe '.start_for_merge_request' do
it 'stores a pipeline creation for the merge request and returns its key and ID' do
allow(SecureRandom).to receive(:uuid).and_return('test-id')
@ -83,6 +98,16 @@ RSpec.describe Ci::PipelineCreation::Requests, :clean_gitlab_redis_shared_state,
end
end
describe '.get_request' do
it 'returns the data for the request' do
request = described_class.start_for_project(project)
expect(described_class.get_request(project, request['id'])).to eq(
{ 'status' => described_class::IN_PROGRESS }
)
end
end
describe '.hset' do
it 'writes the pipeline creation to the Redis cache' do
request = { 'key' => 'test_key', 'id' => 'test_id' }
@ -115,6 +140,16 @@ RSpec.describe Ci::PipelineCreation::Requests, :clean_gitlab_redis_shared_state,
end
end
describe '.request_key' do
it 'returns the Redis cache key for a single pipeline creation request' do
request_id = described_class.generate_id
expect(described_class.request_key(project, request_id)).to eq(
"pipeline_creation:projects:{#{project.id}}:request:{#{request_id}}"
)
end
end
describe '.merge_request_key' do
it 'returns the Redis cache key for the project' do
expect(described_class.merge_request_key(merge_request)).to eq(

View File

@ -1554,6 +1554,13 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it { expect(group.human_name).to eq(group.name) }
end
describe '#to_human_reference' do
let_it_be(:new_group) { create(:group) }
it { expect(group.to_human_reference).to be_nil }
it { expect(group.to_human_reference(new_group)).to eq(group.full_name) }
end
describe '#add_user' do
let(:user) { create(:user) }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe LabelNote do
RSpec.describe LabelNote, feature_category: :team_planning do
include Gitlab::Routing.url_helpers
let_it_be(:project) { create(:project, :repository) }

View File

@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops do
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params, :with_artifact, name: 'candidate0') }
let_it_be(:candidate_with_generic) { create(:ml_candidates, :with_generic_package, name: 'run1') }
let_it_be(:candidate_with_no_package) { create(:ml_candidates, name: 'run2') }
let_it_be(:candidate2) do
create(:ml_candidates, experiment: candidate.experiment, name: 'candidate2', project: candidate.project)
end
@ -125,7 +127,7 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
subject { tested_candidate.artifact_root }
it { is_expected.to eq("/#{candidate.package_name}/#{candidate.iid}/") }
it { is_expected.to eq("/#{candidate.package_name}/candidate_#{candidate.iid}/") }
context 'when candidate belongs to model' do
let(:tested_candidate) { candidate_for_model }
@ -141,12 +143,18 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
subject { tested_candidate.package_version }
it { is_expected.to eq(candidate.iid) }
it { is_expected.to eq("candidate_#{candidate.iid}") }
context 'when candidate belongs to model' do
let(:tested_candidate) { candidate_for_model }
context 'for candidates with legacy generic package' do
let(:tested_candidate) { candidate_with_generic }
it { is_expected.to eq("candidate_#{candidate_for_model.iid}") }
it { is_expected.to eq(candidate_with_generic.iid) }
end
context 'for candidates with no package' do
let(:tested_candidate) { candidate_with_no_package }
it { is_expected.to eq("candidate_#{candidate_with_no_package.iid}") }
end
end
@ -306,7 +314,8 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d
subject { described_class.without_model_version }
it 'finds only candidates without model version' do
expect(subject).to match_array([candidate, candidate_for_model])
expect(subject).to match_array([candidate, candidate_for_model, candidate_with_no_package,
candidate_with_generic])
end
end

View File

@ -10,12 +10,14 @@ RSpec.describe Namespaces::UserNamespace, type: :model do
it { is_expected.to validate_presence_of(:owner) }
end
describe '#owners' do
describe 'owner methods' do
let(:owner) { build(:user) }
let(:namespace) { build(:namespace, owner: owner) }
specify do
expect(namespace.owners).to match_array([owner])
describe '#owners' do
specify do
expect(namespace.owners).to match_array([owner])
end
end
end

View File

@ -40,24 +40,4 @@ RSpec.describe Packages::Generic::Package, type: :model, feature_category: :pack
it { is_expected.not_to allow_value(nil).for(:version) }
end
end
describe '#publish_creation_event' do
let_it_be(:project) { create(:project) }
let(:version) { '-' }
subject(:create_package) { described_class.create!(project: project, name: 'incoming', version: version) }
it 'publishes an event' do
expect { create_package }
.to publish_event(::Packages::PackageCreatedEvent)
.with({
project_id: project.id,
id: kind_of(Numeric),
name: 'incoming',
version: '-',
package_type: 'generic'
})
end
end
end

View File

@ -82,4 +82,24 @@ RSpec.describe Packages::MlModel::Package, feature_category: :mlops do
describe '.installable' do
it_behaves_like 'installable packages', :ml_model_package
end
describe '#publish_creation_event' do
let_it_be(:project) { create(:project) }
let(:version) { 'candidate_42' }
subject(:create_package) { described_class.create!(project: project, name: 'incoming', version: version) }
it 'publishes an event' do
expect { create_package }
.to publish_event(::Packages::PackageCreatedEvent)
.with({
project_id: project.id,
id: kind_of(Numeric),
name: 'incoming',
version: 'candidate_42',
package_type: 'ml_model'
})
end
end
end

View File

@ -694,7 +694,7 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
describe '#publish_creation_event' do
let_it_be(:project) { create(:project) }
let(:package) { build_stubbed(:generic_package) }
let(:package) { build_stubbed(:ml_model_package) }
it 'publishes an event' do
expect { package.publish_creation_event }

View File

@ -170,6 +170,15 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
expect { import_state.finish }.to change { import_state.last_error }.from(error_message).to(nil)
end
it 'sets the user mapping feature flag state from import data for other transitions' do
import_state = create(:import_state, :started)
import_state.project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: true }).save!
import_state.finish
expect(import_state.user_mapping_enabled).to be(true)
end
it 'enqueues housekeeping when an import of a fresh project is completed' do
project = create(:project_empty_repo, :import_started, import_type: :github)
@ -230,6 +239,15 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
end.to change { project.import_data }
.from(import_data).to(nil)
end
it 'sets the user mapping feature flag state from import data for other transitions' do
import_state = create(:import_state, :scheduled)
import_state.project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: true }).save!
import_state.cancel
expect(import_state.user_mapping_enabled).to be(true)
end
end
context 'state transition: started: [:finished, :canceled, :failed]' do
@ -264,6 +282,38 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
end
end
describe 'completion notification trigger', :aggregate_failures do
context 'when transitioning from started to finished' do
it 'enqueues ImportCompletionNotificationWorker' do
state = create(:import_state, status: :started, import_type: 'github')
expect(Projects::ImportExport::ImportCompletionNotificationWorker).to receive(:perform_async)
state.finish!
end
end
context 'when transitioning to failed' do
it 'enqueues ImportCompletionNotificationWorker' do
state = create(:import_state, status: :started, import_type: 'github')
expect(Projects::ImportExport::ImportCompletionNotificationWorker).to receive(:perform_async)
state.fail_op!
end
end
context 'when transitioning to scheduled' do
it 'does not enqueue ImportCompletionNotificationWorker' do
state = create(:import_state, status: :none, import_type: 'github')
expect(Projects::ImportExport::ImportCompletionNotificationWorker).not_to receive(:perform_async)
state.schedule!
end
end
end
describe 'clearing `jid` after finish', :clean_gitlab_redis_cache do
context 'without an JID' do
it 'does nothing' do

View File

@ -4071,6 +4071,61 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
describe '#notify_project_import_complete?' do
let(:import_type) { 'gitlab_project' }
let(:project) { build(:project, import_type: import_type) }
before do
allow(project).to receive(:forked?).and_return(false)
end
it 'returns false for forked projects' do
allow(project).to receive(:forked?).and_return(true)
expect(project.notify_project_import_complete?).to be(false)
end
it 'returns false for projects with a remote mirror' do
allow(project).to receive(:mirror?).and_return(true)
expect(project.notify_project_import_complete?).to be(false)
end
it 'returns false for unsupported import types' do
project.import_type = 'gitlab_project'
expect(project.notify_project_import_complete?).to be(false)
end
%w[github gitea bitbucket bitbucket_server].each do |import_type|
it "returns true for #{import_type}" do
project.import_type = import_type
expect(project.notify_project_import_complete?).to be(true)
end
end
end
describe '#safe_import_url' do
let_it_be(:import_url) { 'https://example.com' }
let_it_be(:project) do
create(
:project,
import_url: import_url,
import_data_attributes: { credentials: { user: 'user', password: 'password' } }
)
end
it 'returns import_url with credentials masked' do
expect(project.safe_import_url).to include('*****:*****')
end
it 'returns import_url with no credentials, masked or not' do
safe_import_url = project.safe_import_url(masked: false)
expect(safe_import_url).to eq(import_url)
end
end
describe '#jira_import?' do
let_it_be(:project) { build(:project, import_type: 'jira') }
let_it_be(:jira_import) { build(:jira_import_state, project: project) }
@ -4150,6 +4205,18 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { expect(project.gitea_import?).to be true }
end
describe '#bitbucket_import?' do
subject(:project) { build(:project, import_type: 'bitbucket') }
it { expect(project.bitbucket_import?).to be true }
end
describe '#bitbucket_server_import?' do
subject(:project) { build(:project, import_type: 'bitbucket_server') }
it { expect(project.bitbucket_server_import?).to be true }
end
describe '#any_import_in_progress?' do
let_it_be_with_reload(:project) { create(:project) }

View File

@ -7,7 +7,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
let_it_be(:user) { create(:user) }
let_it_be(:namespace) { create(:group, developers: user) }
let_it_be(:project2) { create(:project, namespace: namespace) }
let_it_be(:project) { create(:project, namespace: namespace) }
let_it_be(:private_project) do
create(
@ -53,6 +53,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
starCount
starrersPath
last30DayUsageCount
topics
}
}
}
@ -69,7 +70,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
run_with_clean_state(query, context: ctx)
end
create(:ci_catalog_resource, :published, project: project2)
create(:ci_catalog_resource, :published, project: project)
expect do
run_with_clean_state(query, context: ctx)
@ -127,6 +128,22 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
end
end
describe 'catalog resources topics' do
it 'returns array if there are no topics set' do
post_graphql(query, current_user: user)
expect(graphql_data_at(:ciCatalogResources, :nodes, :topics)).to match([])
end
it 'returns topics' do
public_resource.project.update!(topic_list: 'topic1, topic2, topic3')
post_graphql(query, current_user: user)
expect(graphql_data_at(:ciCatalogResources, :nodes, :topics)).to match(%w[topic1 topic2 topic3])
end
end
describe 'versions' do
let!(:private_resource_v1) do
create(:ci_catalog_resource_version, semver: '1.0.0', catalog_resource: private_resource)

View File

@ -13,16 +13,22 @@ RSpec.describe 'PipelineCreate', feature_category: :pipeline_composition do
**params
}
graphql_mutation(
:pipeline_create,
variables,
<<-QL
errors
pipeline {
id
}
QL
)
fields = <<-QL
errors
#{response_fields}
QL
graphql_mutation(:pipeline_create, variables, fields, [], operation_name)
end
let(:operation_name) { '' }
let(:response_fields) do
<<-QL
pipeline {
id
}
QL
end
let(:params) { { ref: 'master', variables: [] } }
@ -44,29 +50,54 @@ RSpec.describe 'PipelineCreate', feature_category: :pipeline_composition do
context 'when the pipeline creation is not successful' do
it 'returns error' do
expect_next_instance_of(::Ci::CreatePipelineService, project, user, params) do |service|
expect(service).to receive(:execute).and_return(ServiceResponse.error(message: 'Error'))
end
stub_ci_builds_disabled
post_graphql_mutation(mutation, current_user: user)
expect(mutation_response['errors']).to include('Error')
expect(mutation_response['errors']).to include('Pipelines are disabled!')
expect(mutation_response['pipeline']).to be_nil
end
end
context 'when the pipeline creation is successful' do
it 'creates a pipeline' do
pipeline = create(:ci_pipeline, project: project)
stub_ci_pipeline_to_return_yaml_file
expect_next_instance_of(::Ci::CreatePipelineService, project, user, params) do |service|
expect(service).to receive(:execute).and_return(ServiceResponse.success(payload: pipeline))
end
expect do
post_graphql_mutation(mutation, current_user: user)
end.to change { ::Ci::Pipeline.count }.by(1)
post_graphql_mutation(mutation, current_user: user)
created_pipeline = ::Ci::Pipeline.last
expect(mutation_response['pipeline']['id']).to eq(pipeline.to_global_id.to_s)
expect(response).to have_gitlab_http_status(:success)
expect(created_pipeline.source).to eq('api')
expect(mutation_response['pipeline']['id']).to eq(created_pipeline.to_global_id.to_s)
end
end
context 'when the `async` argument is `true`' do
let(:operation_name) { 'internalPipelineCreate' }
let(:params) { { ref: project.default_branch, async: true } }
let(:response_fields) do
<<-QL
requestId
QL
end
it 'creates the pipeline in a worker and returns the request ID',
:clean_gitlab_redis_shared_state, :sidekiq_inline do
stub_ci_pipeline_to_return_yaml_file
request_id = SecureRandom.uuid
allow(SecureRandom).to receive(:uuid).and_return(request_id)
expect do
post_graphql_mutation(mutation, current_user: user)
end.to change { ::Ci::Pipeline.count }.by(1)
expect(mutation_response['requestId']).to eq(request_id)
expect(::Ci::PipelineCreation::Requests.get_request(project, request_id)['status']).to eq('succeeded')
expect(::Ci::Pipeline.last.source).to eq('web')
end
end
end

View File

@ -142,7 +142,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
'experiment_id' => candidate.experiment.iid.to_s,
'user_id' => candidate.user.id.to_s,
'start_time' => candidate.start_time,
'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/",
'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/ml_models/candidate:#{candidate.iid}/files/",
'status' => "RUNNING",
'lifecycle_stage' => "active"
}
@ -174,7 +174,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
it 'gets a run including a valid artifact_uri' do
expect(json_response['run']['info']['artifact_uri']).to eql("http://www.example.com/gitlab/root/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/")
expect(json_response['run']['info']['artifact_uri']).to eql("http://www.example.com/gitlab/root/api/v4/projects/#{project_id}/packages/ml_models/candidate:#{candidate.iid}/files/")
end
end
@ -324,7 +324,7 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
'user_id' => candidate.user.id.to_s,
'start_time' => candidate.start_time,
'end_time' => params[:end_time],
'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/",
'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/ml_models/candidate:#{candidate.iid}/files/",
'status' => 'FAILED',
'lifecycle_stage' => 'active'
}

Some files were not shown because too many files have changed in this diff Show More