Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-09-07 12:11:52 +00:00
parent 3822e951cb
commit 01f48ebf02
54 changed files with 1274 additions and 206 deletions

View File

@ -1,4 +1,5 @@
import $ from 'jquery';
import { fixTitle } from '~/tooltips';
import { getLocationHash } from '../lib/utils/url_utility';
// Toggle button. Show/hide content inside parent container.
@ -29,9 +30,22 @@ $(() => {
$container.find('.js-toggle-content').toggle(toggleState);
}
function updateTitle(el, container) {
const $container = $(container);
const isExpanded = $container.data('is-expanded');
el.setAttribute('title', isExpanded ? el.dataset.collapseTitle : el.dataset.expandTitle);
fixTitle(el);
}
$('body').on('click', '.js-toggle-button', function toggleButton(e) {
e.currentTarget.classList.toggle(e.currentTarget.dataset.toggleOpenClass || 'selected');
toggleContainer($(this).closest('.js-toggle-container'));
const containerEl = this.closest('.js-toggle-container');
toggleContainer(containerEl);
updateTitle(this, containerEl);
const targetTag = e.currentTarget.tagName.toLowerCase();
if (targetTag === 'a' || targetTag === 'button') {

View File

@ -35,7 +35,8 @@ class Group < Namespace
foreign_key: :member_namespace_id, inverse_of: :group, class_name: 'GroupMember'
alias_method :members, :group_members
has_many :users, through: :group_members
has_many :users, -> { allow_cross_joins_across_databases(url: "https://gitlab.com/gitlab-org/gitlab/-/issues/422405") },
through: :group_members
has_many :owners, -> {
where(members: { access_level: Gitlab::Access::OWNER })
.allow_cross_joins_across_databases(url: "https://gitlab.com/gitlab-org/gitlab/-/issues/422405")
@ -341,10 +342,6 @@ class Group < Namespace
end
end
def users
super.loaded? ? super : super.allow_cross_joins_across_databases(url: "https://gitlab.com/gitlab-org/gitlab/-/issues/422405")
end
# Overrides notification_settings has_many association
# This allows to apply notification settings from parent groups
# to child groups and projects.

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
class Packages::DependencyLink < ApplicationRecord
include EachBatch
belongs_to :package, inverse_of: :dependency_links
belongs_to :dependency, inverse_of: :dependency_links, class_name: 'Packages::Dependency'
has_one :nuget_metadatum, inverse_of: :dependency_link, class_name: 'Packages::Nuget::DependencyLinkMetadatum'
@ -14,6 +16,32 @@ class Packages::DependencyLink < ApplicationRecord
scope :with_dependency_type, ->(dependency_type) { where(dependency_type: dependency_type) }
scope :includes_dependency, -> { includes(:dependency) }
scope :for_package, ->(package) { where(package_id: package.id) }
scope :for_packages, ->(packages) { where(package: packages) }
scope :preload_dependency, -> { preload(:dependency) }
scope :preload_nuget_metadatum, -> { preload(:nuget_metadatum) }
scope :select_dependency_id, -> { select(:dependency_id) }
def self.dependency_ids_grouped_by_type(packages)
inner_query = where(package_id: packages)
.select('
package_id,
dependency_type,
ARRAY_AGG(dependency_id) as dependency_ids
')
.group(:package_id, :dependency_type)
cte = Gitlab::SQL::CTE.new(:dependency_links_cte, inner_query)
cte_alias = cte.table.alias(table_name)
with(cte.to_arel)
.select('
package_id,
JSON_OBJECT_AGG(
dependency_type,
dependency_ids
) AS dependency_ids_by_type
')
.from(cte_alias)
.group(:package_id)
end
end

View File

@ -13,6 +13,9 @@ class ProjectAuthorization < ApplicationRecord
scope :non_guests, -> { where('access_level > ?', ::Gitlab::Access::GUEST) }
# TODO: To be removed after https://gitlab.com/gitlab-org/gitlab/-/issues/418205
before_create :assign_is_unique
def self.select_from_union(relations)
from_union(relations)
.select(['project_id', 'MAX(access_level) AS access_level'])
@ -27,6 +30,12 @@ class ProjectAuthorization < ApplicationRecord
def self.insert_all(attributes)
super(attributes, unique_by: connection.schema_cache.primary_keys(table_name))
end
private
def assign_is_unique
self.is_unique = true if Feature.enabled?(:write_project_authorizations_is_unique)
end
end
ProjectAuthorization.prepend_mod_with('ProjectAuthorization')

View File

@ -89,7 +89,11 @@ module ProjectAuthorizations
add_delay = add_delay_between_batches?(entire_size: attributes.size, batch_size: BATCH_SIZE)
log_details(entire_size: attributes.size, batch_size: BATCH_SIZE) if add_delay
write_is_unique = Feature.enabled?(:write_project_authorizations_is_unique)
attributes.each_slice(BATCH_SIZE) do |attributes_batch|
attributes_batch.each { |attrs| attrs[:is_unique] = true } if write_is_unique
ProjectAuthorization.insert_all(attributes_batch)
perform_delay if add_delay
end

View File

@ -4,6 +4,7 @@ module Packages
module Npm
class GenerateMetadataService
include API::Helpers::RelatedResourcesHelpers
include Gitlab::Utils::StrongMemoize
# Allowed fields are those defined in the abbreviated form
# defined here: https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
@ -13,6 +14,8 @@ module Packages
def initialize(name, packages)
@name = name
@packages = packages
@dependencies = {}
@dependency_ids = Hash.new { |h, key| h[key] = {} }
end
def execute(only_dist_tags: false)
@ -21,7 +24,7 @@ module Packages
private
attr_reader :name, :packages
attr_reader :name, :packages, :dependencies, :dependency_ids
def metadata(only_dist_tags)
result = { dist_tags: dist_tags }
@ -38,9 +41,17 @@ module Packages
package_versions = {}
packages.each_batch do |relation|
batched_packages = relation.including_dependency_links
.preload_files
.preload_npm_metadatum
batched_packages = if optimization_enabled?
load_dependencies(relation)
load_dependency_ids(relation)
relation.preload_files
.preload_npm_metadatum
else
relation.including_dependency_links
.preload_files
.preload_npm_metadatum
end
batched_packages.each do |package|
package_file = package.installable_package_files.last
@ -82,14 +93,23 @@ module Packages
end
def build_package_dependencies(package)
dependencies = Hash.new { |h, key| h[key] = {} }
if optimization_enabled?
inverted_dependency_types = Packages::DependencyLink.dependency_types.invert.stringify_keys
dependency_ids[package.id].each_with_object(Hash.new { |h, key| h[key] = {} }) do |(type, ids), memo|
ids.each do |id|
memo[inverted_dependency_types[type]].merge!(dependencies[id])
end
end
else
dependencies = Hash.new { |h, key| h[key] = {} }
package.dependency_links.each do |dependency_link|
dependency = dependency_link.dependency
dependencies[dependency_link.dependency_type][dependency.name] = dependency.version_pattern
package.dependency_links.each do |dependency_link|
dependency = dependency_link.dependency
dependencies[dependency_link.dependency_type][dependency.name] = dependency.version_pattern
end
dependencies
end
dependencies
end
def sorted_versions
@ -106,6 +126,36 @@ module Packages
json = package.npm_metadatum&.package_json || {}
json.slice(*PACKAGE_JSON_ALLOWED_FIELDS)
end
def load_dependencies(packages)
Packages::Dependency
.id_in(
Packages::DependencyLink
.for_packages(packages)
.select_dependency_id
)
.id_not_in(dependencies.keys)
.each_batch do |relation|
relation.each do |dependency|
dependencies[dependency.id] = { dependency.name => dependency.version_pattern }
end
end
end
def load_dependency_ids(packages)
Packages::DependencyLink
.dependency_ids_grouped_by_type(packages)
.each_batch(column: :package_id) do |relation|
relation.each do |dependency_link|
dependency_ids[dependency_link.package_id] = dependency_link.dependency_ids_by_type
end
end
end
def optimization_enabled?
Feature.enabled?(:npm_optimize_metadata_generation)
end
strong_memoize_attr :optimization_enabled?
end
end
end

View File

@ -6,7 +6,7 @@
= _('Gitpod')
= render Pajamas::ButtonComponent.new(button_options: { class: 'js-settings-toggle' }) do
= expanded ? _('Collapse') : _('Expand')
.gl-text-secondary
.gl-text-secondary.gl-mb-5
#js-gitpod-settings-help-text{ data: {"message" => gitpod_enable_description, "message-url" => "https://gitpod.io/" } }
= link_to sprite_icon('question-o'), help_page_path('integration/gitpod.md'), target: '_blank', class: 'has-tooltip', title: _('More information')

View File

@ -61,6 +61,7 @@
= render "projects/merge_requests/tabs/pane", id: "notes", class: "notes voting_notes" do
%div{ class: "#{'merge-request-overview' if moved_mr_sidebar_enabled?}" }
%section
= render_if_exists "projects/merge_requests/diff_summary"
.issuable-discussion.js-vue-notes-event
- if @merge_request.description.present?
.detail-page-description.gl-pb-0

View File

@ -2334,6 +2334,33 @@
:weight: 1
:idempotent: true
:tags: []
- :name: bitbucket_import_advance_stage
:worker_name: Gitlab::BitbucketImport::AdvanceStageWorker
:feature_category: :importers
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: bitbucket_import_stage_finish_import
:worker_name: Gitlab::BitbucketImport::Stage::FinishImportWorker
:feature_category: :importers
:has_external_dependencies: true
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: bitbucket_import_stage_import_repository
:worker_name: Gitlab::BitbucketImport::Stage::ImportRepositoryWorker
:feature_category: :importers
:has_external_dependencies: true
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: false
:tags: []
- :name: bitbucket_server_import_advance_stage
:worker_name: Gitlab::BitbucketServerImport::AdvanceStageWorker
:feature_category: :importers

View File

@ -0,0 +1,76 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
module StageMethods
extend ActiveSupport::Concern
included do
include ApplicationWorker
worker_has_external_dependencies!
feature_category :importers
data_consistency :always
sidekiq_options dead: false, retry: 3
sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::ImportFailureService.track(
project_id: msg['args'][0],
exception: e,
fail_import: true
)
end
end
# project_id - The ID of the GitLab project to import the data into.
def perform(project_id)
info(project_id, message: 'starting stage')
project = find_project(project_id)
return unless project
import(project)
info(project_id, message: 'stage finished')
rescue StandardError => e
Gitlab::Import::ImportFailureService.track(
project_id: project_id,
exception: e,
error_source: self.class.name,
fail_import: abort_on_failure
)
raise(e)
end
def find_project(id)
# If the project has been marked as failed we want to bail out
# automatically.
# rubocop: disable CodeReuse/ActiveRecord
Project.joins_import_state.where(import_state: { status: :started }).find_by_id(id)
# rubocop: enable CodeReuse/ActiveRecord
end
def abort_on_failure
false
end
private
def info(project_id, extra = {})
Logger.info(log_attributes(project_id, extra))
end
def log_attributes(project_id, extra = {})
extra.merge(
project_id: project_id,
import_stage: self.class.name
)
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
# AdvanceStageWorker is a worker used by the BitBucket Importer to wait for a
# number of jobs to complete, without blocking a thread. Once all jobs have
# been completed this worker will advance the import process to the next
# stage.
class AdvanceStageWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
include ::Gitlab::Import::AdvanceStage
data_consistency :delayed
sidekiq_options dead: false, retry: 3
feature_category :importers
loggable_arguments 1, 2
# The known importer stages and their corresponding Sidekiq workers.
STAGES = {
finish: Stage::FinishImportWorker
}.freeze
def find_import_state(project_id)
ProjectImportState.jid_by(project_id: project_id, status: :started)
end
private
def next_stage_worker(next_stage)
STAGES.fetch(next_stage.to_sym)
end
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
module Stage
class FinishImportWorker # rubocop:disable Scalability/IdempotentWorker
include StageMethods
private
def import(project)
project.after_import
Gitlab::Import::Metrics.new(:bitbucket_importer, project).track_finished_import
end
end
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
module Stage
class ImportRepositoryWorker # rubocop:disable Scalability/IdempotentWorker
include StageMethods
private
def import(project)
importer = importer_class.new(project)
importer.execute
FinishImportWorker.perform_async(project.id)
end
def importer_class
Importers::RepositoryImporter
end
def abort_on_failure
true
end
end
end
end
end

View File

@ -1,8 +1,8 @@
---
name: ci_support_include_rules_changes
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129866
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/421608
name: bitbucket_parallel_importer
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130731
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/423530
milestone: '16.4'
type: development
group: group::pipeline authoring
group: group::import and integrate
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: npm_optimize_metadata_generation
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128514
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/416346
milestone: '16.4'
type: development
group: group::package registry
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: write_project_authorizations_is_unique
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130299
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/424097
milestone: '16.4'
type: development
group: group::security policies
default_enabled: false

View File

@ -79,6 +79,12 @@
- 1
- - batched_git_ref_updates_project_cleanup
- 1
- - bitbucket_import_advance_stage
- 1
- - bitbucket_import_stage_finish_import
- 1
- - bitbucket_import_stage_import_repository
- 1
- - bitbucket_server_import_advance_stage
- 1
- - bitbucket_server_import_import_lfs_object

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
class SyncIndexForCiStagesPipelineIdBigint < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
TABLE_NAME = :ci_stages
INDEXES = {
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_name' => [
[:pipeline_id_convert_to_bigint, :name], { unique: true }
],
'index_ci_stages_on_pipeline_id_convert_to_bigint' => [
[:pipeline_id_convert_to_bigint], {}
],
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_id' => [
[:pipeline_id_convert_to_bigint, :id], { where: 'status = ANY (ARRAY[0, 1, 2, 8, 9, 10])' }
],
'index_ci_stages_on_pipeline_id_convert_to_bigint_and_position' => [
[:pipeline_id_convert_to_bigint, :position], {}
]
}
def up
INDEXES.each do |index_name, (columns, options)|
add_concurrent_index TABLE_NAME, columns, name: index_name, **options
end
end
def down
INDEXES.each do |index_name, (_columns, _options)|
remove_concurrent_index_by_name TABLE_NAME, index_name
end
end
end

View File

@ -0,0 +1 @@
c06fc36180c1b495eb800ba1c25bbe441f6973b0979d7fbc114ca7f128bd7c99

View File

@ -31357,6 +31357,14 @@ CREATE UNIQUE INDEX index_ci_stages_on_pipeline_id_and_name ON ci_stages USING b
CREATE INDEX index_ci_stages_on_pipeline_id_and_position ON ci_stages USING btree (pipeline_id, "position");
CREATE INDEX index_ci_stages_on_pipeline_id_convert_to_bigint ON ci_stages USING btree (pipeline_id_convert_to_bigint);
CREATE INDEX index_ci_stages_on_pipeline_id_convert_to_bigint_and_id ON ci_stages USING btree (pipeline_id_convert_to_bigint, id) WHERE (status = ANY (ARRAY[0, 1, 2, 8, 9, 10]));
CREATE UNIQUE INDEX index_ci_stages_on_pipeline_id_convert_to_bigint_and_name ON ci_stages USING btree (pipeline_id_convert_to_bigint, name);
CREATE INDEX index_ci_stages_on_pipeline_id_convert_to_bigint_and_position ON ci_stages USING btree (pipeline_id_convert_to_bigint, "position");
CREATE INDEX index_ci_stages_on_project_id ON ci_stages USING btree (project_id);
CREATE INDEX index_ci_subscriptions_projects_author_id ON ci_subscriptions_projects USING btree (author_id);

View File

@ -186,8 +186,7 @@ You can view the exact JSON payload sent to GitLab Inc. in the Admin Area. To vi
1. Sign in as a user with administrator access.
1. On the left sidebar, select **Search or go to**.
1. Select **Admin Area**.
1. Select **Settings > Metrics and profiling**.
1. Expand the **Usage statistics** section.
1. Select **Settings > Service usage data**.
1. Select **Preview payload**.
For an example payload, see [Example Service Ping payload](../../development/internal_analytics/service_ping/index.md#example-service-ping-payload).
@ -205,7 +204,7 @@ To upload the payload manually:
1. Sign in as a user with administrator access.
1. On the left sidebar, select **Search or go to**.
1. Select **Admin Area**.
1. Select **Settings > Service** usage data.
1. Select **Settings > Service usage data**.
1. Select **Download payload**.
1. Save the JSON file.
1. Visit [Service usage data center](https://version.gitlab.com/usage_data/new).

View File

@ -1742,7 +1742,7 @@ Input type: `CiAiGenerateConfigInput`
| ---- | ---- | ----------- |
| <a id="mutationciaigenerateconfigclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationciaigenerateconfigerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationciaigenerateconfigusermessage"></a>`userMessage` | [`AiMessageType`](#aimessagetype) | User chat message. |
| <a id="mutationciaigenerateconfigusermessage"></a>`userMessage` | [`AiMessage`](#aimessage) | User chat message. |
### `Mutation.ciJobTokenScopeAddProject`
@ -7798,28 +7798,28 @@ The edge type for [`AiChatMessage`](#aichatmessage).
| <a id="aichatmessageedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="aichatmessageedgenode"></a>`node` | [`AiChatMessage`](#aichatmessage) | The item at the end of the edge. |
#### `AiMessageTypeConnection`
#### `AiMessageConnection`
The connection type for [`AiMessageType`](#aimessagetype).
The connection type for [`AiMessage`](#aimessage).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aimessagetypeconnectionedges"></a>`edges` | [`[AiMessageTypeEdge]`](#aimessagetypeedge) | A list of edges. |
| <a id="aimessagetypeconnectionnodes"></a>`nodes` | [`[AiMessageType]`](#aimessagetype) | A list of nodes. |
| <a id="aimessagetypeconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
| <a id="aimessageconnectionedges"></a>`edges` | [`[AiMessageEdge]`](#aimessageedge) | A list of edges. |
| <a id="aimessageconnectionnodes"></a>`nodes` | [`[AiMessage]`](#aimessage) | A list of nodes. |
| <a id="aimessageconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `AiMessageTypeEdge`
#### `AiMessageEdge`
The edge type for [`AiMessageType`](#aimessagetype).
The edge type for [`AiMessage`](#aimessage).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aimessagetypeedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="aimessagetypeedgenode"></a>`node` | [`AiMessageType`](#aimessagetype) | The item at the end of the edge. |
| <a id="aimessageedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="aimessageedgenode"></a>`node` | [`AiMessage`](#aimessage) | The item at the end of the edge. |
#### `AlertManagementAlertConnection`
@ -12595,6 +12595,18 @@ Duo Chat message.
| <a id="aichatmessagerole"></a>`role` | [`AiChatMessageRole!`](#aichatmessagerole) | Message role. |
| <a id="aichatmessagetimestamp"></a>`timestamp` | [`Time!`](#time) | Message timestamp. |
### `AiMessage`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aimessagecontent"></a>`content` | [`String`](#string) | Content of the message or null if loading. |
| <a id="aimessageerrors"></a>`errors` | [`[String!]!`](#string) | Errors that occurred while asynchronously fetching an AI(assistant) response. |
| <a id="aimessageid"></a>`id` | [`ID`](#id) | Global ID of the message. |
| <a id="aimessageisfetching"></a>`isFetching` | [`Boolean`](#boolean) | Whether the content is still being fetched, for a message with the assistant role. |
| <a id="aimessagerole"></a>`role` | [`String!`](#string) | Role of the message (system, user, assistant). |
### `AiMessageExtras`
Extra metadata for AI message.
@ -12605,18 +12617,6 @@ Extra metadata for AI message.
| ---- | ---- | ----------- |
| <a id="aimessageextrassources"></a>`sources` | [`[JSON!]`](#json) | Sources used to form the message. |
### `AiMessageType`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aimessagetypecontent"></a>`content` | [`String`](#string) | Content of the message or null if loading. |
| <a id="aimessagetypeerrors"></a>`errors` | [`[String!]!`](#string) | Errors that occurred while asynchronously fetching an AI(assistant) response. |
| <a id="aimessagetypeid"></a>`id` | [`ID`](#id) | Global ID of the message. |
| <a id="aimessagetypeisfetching"></a>`isFetching` | [`Boolean`](#boolean) | Whether the content is still being fetched, for a message with the assistant role. |
| <a id="aimessagetyperole"></a>`role` | [`String!`](#string) | Role of the message (system, user, assistant). |
### `AiResponse`
#### Fields
@ -12624,14 +12624,17 @@ Extra metadata for AI message.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="airesponsechunkid"></a>`chunkId` | [`Int`](#int) | Incremental ID for a chunk from a streamed response. Null when it is not a streamed response. |
| <a id="airesponsecontent"></a>`content` | [`String`](#string) | Raw response content. |
| <a id="airesponsecontenthtml"></a>`contentHtml` | [`String`](#string) | Response content as HTML. |
| <a id="airesponseerrors"></a>`errors` | [`[String!]`](#string) | Errors return by AI API as response. |
| <a id="airesponseextras"></a>`extras` | [`AiMessageExtras`](#aimessageextras) | Extra message metadata. |
| <a id="airesponseid"></a>`id` | [`ID`](#id) | UUID of the message. |
| <a id="airesponserequestid"></a>`requestId` | [`String`](#string) | ID of the original request. |
| <a id="airesponseresponsebody"></a>`responseBody` | [`String`](#string) | Response body from AI API. |
| <a id="airesponseresponsebodyhtml"></a>`responseBodyHtml` | [`String`](#string) | Response body HTML. |
| <a id="airesponseresponsebody"></a>`responseBody` **{warning-solid}** | [`String`](#string) | **Deprecated** in 16.4. Moved to content attribute. |
| <a id="airesponseresponsebodyhtml"></a>`responseBodyHtml` **{warning-solid}** | [`String`](#string) | **Deprecated** in 16.4. Moved to contentHtml attribute. |
| <a id="airesponserole"></a>`role` | [`AiChatMessageRole!`](#aichatmessagerole) | Message role. |
| <a id="airesponsetimestamp"></a>`timestamp` | [`Time!`](#time) | Message timestamp. |
| <a id="airesponsetype"></a>`type` | [`String`](#string) | Message type. |
| <a id="airesponsetype"></a>`type` | [`AiMessageType`](#aimessagetype) | Message type. |
### `AlertManagementAlert`
@ -22756,7 +22759,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectconversationsciconfigmessages"></a>`ciConfigMessages` **{warning-solid}** | [`AiMessageTypeConnection`](#aimessagetypeconnection) | **Introduced** in 16.0. This feature is an Experiment. It can be changed or removed at any time. Messages generated by open ai and the user. |
| <a id="projectconversationsciconfigmessages"></a>`ciConfigMessages` **{warning-solid}** | [`AiMessageConnection`](#aimessageconnection) | **Introduced** in 16.0. This feature is an Experiment. It can be changed or removed at any time. Messages generated by open ai and the user. |
### `ProjectDataTransfer`
@ -26040,6 +26043,14 @@ Roles to filter in chat message.
| <a id="aichatmessagerolesystem"></a>`SYSTEM` | Filter only system messages. |
| <a id="aichatmessageroleuser"></a>`USER` | Filter only user messages. |
### `AiMessageType`
Types of messages returned from AI features.
| Value | Description |
| ----- | ----------- |
| <a id="aimessagetypetool"></a>`TOOL` | Tool selection message. |
### `AlertManagementAlertSort`
Values for sorting alerts.

View File

@ -0,0 +1,120 @@
---
status: proposed
creation-date: "2023-03-06"
authors: [ "@grzesiek", "@fabiopitino" ]
coach: "@ayufan"
approvers: [ "@jreporter", "@sgoldstein" ]
owning-stage: "~devops::ops section"
---
# GitLab Events Platform
## Summary
GitLab codebase has grown a lot since the [first commit](https://gitlab.com/gitlab-org/gitlab/-/commit/93efff945215)
made in 2011. We've been able to implement many features that got adopted by
millions of users. There is a demand for more features, but there is also an
opportunity of a paradigm change: instead of delivering features that cover
specific use-cases, we can start building a platform that our users will be
able to extend with automation as they see fit. We can build a flexible and
generic DevSecOps solution that will integrate with external and internal
workflows using a robust eventing system.
In this design document we propose to add a few additional layers of
abstraction to make it possible to:
1. Design a notion of events hierarchy that encodes their origin and schema.
1. Publish events from within the application code using Publishers.
1. Intercept and transform events from external sources using Gateways.
1. Subscribe to internal / external events using Subscribers.
1. Hide queueing and processing implementation details behind an abstraction.
This will allow us to transform GitLab into a generic automation tooling, but
will also reduce the complexity of existing events-like features:
1. [Webhooks](../../../user/project/integrations/webhook_events.md)
1. [Audit Events](../../../administration/audit_events.md)
1. [GitLab CI Events](https://about.gitlab.com/blog/2022/08/03/gitlab-ci-event-workflows/)
1. [Package Events](https://gitlab.com/groups/gitlab-org/-/epics/9677)
1. [GraphQL Events](https://gitlab.com/gitlab-org/gitlab/-/blob/dabf4783f5d758f69d947f5ff2391b4b1fb5f18a/app/graphql/graphql_triggers.rb)
## Goals
Build required abstractions and their implementation needed to better manage
internally and externally published events.
## Challenges
1. There is no solution allowing users to build subscribers and publishers.
1. There is no solution for managing subscriptions outside of the Ruby code.
1. There are many events-like features inside GitLab not using common abstractions.
1. Our current eventing solution `Gitlab::EventStore` is tightly coupled with Sidekiq.
1. There is no unified and resilient way to subscribe to externally published events.
1. Payloads associated with events differ a lot, similarly to how we define schemas.
1. Not all events are strongly typed, there is no solution to manage their hierarchy.
1. Events are not being versioned, it is easy to break schema contracts.
1. We want to build more features based on events, but because of missing
abstractions the value we could get from the implementations is limited.
## Proposal
### Publishers
Publishing events from within our Rails codebase is an important piece of the
proposed architecture. Events should be strongly typed, ideally using Ruby classes.
For example, we could emit events in the following way:
```ruby
include Gitlab::Events::Emittable
emit Gitlab::Events::Package::Published.new(package)
```
- Publishing events should be a non-blocking, and near zero-cost operation.
- Publishing events should take their origin and identity into the account.
- Publishing events should build their payload based on their lineage.
- `emit` can be a syntactic sugar over mechanism used in `GitLab::EventStore`.
### Subscribers
Subscribers will allow application developers to subscribe to arbitrary events,
published internally or externally. Subscribers could also allow application
developers to build subscription mechanisms that could be used by our users to,
for example, subscribe to project events to trigger pipelines.
Events that subscribers will subscribe to will becomes contracts, hence we
should version them or use backwards-and-forward compatible solution (like
Protobuf).
### Gateways
Gateways can be used to intercept internal and external events and change their
type, augment lineage and transform their payloads.
Gateways can be used, for example, to implement sink endpoints to intercept
Cloud Events, wrap into an internally used Ruby classes and allow developers /
users to subscribe to them.
We also may be able to implement [cross-Cell](../cells) communication through a
generic events bus implemented using Gateways.
There are also ideas around cross-instance communication to improve how GitLab
can coordinate complex deployments that involve multiple instances.
### Processing
Today in order to queue events, we either use PostgreSQL or Sidekiq. Both
mechanisms are being used interchangeably and are tightly coupled with existing
solution.
The main purpose of building an abstraction for queuing and processing is to be
able to switch to a different queuing backend when needed. For example, we
could queue some of the events on Google Pub/Sub, and send those through a
dedicated Gateway on their way back to the application.
### Observability
In order to understand interactions between events, publishers and subscribers
we may need to deliver a proper instrumentation _via_ OpenTelemetry. This will
allow us to visualize these interactions with Distributed Tracing Backends.

View File

@ -8,63 +8,84 @@ type: index
# Get started with GitLab CI/CD **(FREE ALL)**
Use GitLab CI/CD to automatically build, test, deploy, and monitor your applications.
CI/CD is a continuous method of software development, where you continuously build,
test, deploy, and monitor iterative code changes.
GitLab CI/CD can catch bugs and errors early in the development cycle. It can ensure that
all the code deployed to production complies with your established code standards.
This iterative process helps reduce the chance that you develop new code based on
buggy or failed previous versions. GitLab CI/CD can catch bugs early in the development cycle,
and help ensure that all the code deployed to production complies with your established code standards.
<div class="video-fallback">
Video demonstration of continuous integration with GitLab CI/CD: <a href="https://www.youtube.com/watch?v=ljth1Q5oJoo">Continuous Integration with GitLab (overview demo)</a>.
</div>
<figure class="video-container">
<iframe src="https://www.youtube-nocookie.com/embed/ljth1Q5oJoo" frameborder="0" allowfullscreen> </iframe>
</figure>
## Common terms
If you are new to GitLab CI/CD, get started with a tutorial:
If you're new to GitLab CI/CD, start by reviewing some of the commonly used terms.
- [Create and run your first GitLab CI/CD pipeline](quick_start/index.md)
- [Create a complex pipeline](quick_start/tutorial.md)
### The `.gitlab-ci.yml` file
## CI/CD methodologies
To use GitLab CI/CD, you start with a `.gitlab-ci.yml` file at the root of your project.
In this file, you specify the list of things you want to do, like test and deploy your application.
This file follows the YAML format and has its own special syntax.
With the continuous method of software development, you continuously build,
test, and deploy iterative code changes. This iterative process helps reduce
the chance that you develop new code based on buggy or failed previous versions.
With this method, you strive to have less human intervention or even no intervention at all,
from the development of new code until its deployment.
You can name this file anything you want, but `.gitlab-ci.yml` is the most common name.
Use the pipeline editor to edit the `.gitlab-ci.yml` file and test the syntax before you commit changes.
The three primary approaches for CI/CD are:
**Get started:**
- [Continuous Integration (CI)](https://en.wikipedia.org/wiki/Continuous_integration)
- [Continuous Delivery (CD)](https://en.wikipedia.org/wiki/Continuous_delivery)
- [Continuous Deployment (CD)](https://en.wikipedia.org/wiki/Continuous_deployment)
- [Create your first `.gitlab-ci.yml` file](quick_start/index.md).
- [View all the possible keywords that you can use in the `.gitlab-ci.yml` file](yaml/index.md).
Out-of-the-box management systems can decrease hours spent on maintaining toolchains by 10% or more.
Watch our ["Mastering continuous software development"](https://about.gitlab.com/webcast/mastering-ci-cd/)
webcast to learn about continuous methods and how built-in GitLab CI/CD can help you simplify and scale software development.
### Runners
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i>Learn how to: [configure CI/CD](https://www.youtube.com/watch?v=opdLqwz6tcE).
- [Make the case for CI/CD in your organization](https://about.gitlab.com/devops-tools/github-vs-gitlab/).
- Learn how [Verizon reduced rebuilds](https://about.gitlab.com/blog/2019/02/14/verizon-customer-story/) from 30 days to under 8 hours with GitLab.
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Get a deeper look at GitLab CI/CD](https://youtu.be/l5705U8s_nQ?t=369).
Runners are the agents that run your jobs. These agents can run on physical machines or virtual instances.
In your `.gitlab-ci.yml` file, you can specify a container image you want to use when running the job.
The runner loads the image and runs the job either locally or in the container.
## Administration
If you use GitLab.com, free shared runners are already available for you. And you can register your own
runners on GitLab.com if you'd like.
You can change the default behavior of GitLab CI/CD for:
If you don't use GitLab.com, you can:
- An entire GitLab instance in the [CI/CD administration settings](../administration/cicd.md).
- Specific projects in the [pipelines settings](pipelines/settings.md).
- Register runners or use runners already registered for your self-managed instance.
- Create a runner on your local machine.
See also:
**Get started:**
- [Enable or disable GitLab CI/CD in a project](enable_or_disable_ci.md).
- [Create a runner on your local machine](../tutorials/create_register_first_runner/index.md).
- [Learn more about runners](https://docs.gitlab.com/runner/).
### Pipelines
Pipelines are made up of jobs and stages:
- **Jobs** define what you want to do. For example, test code changes, or deploy
to a staging environment.
- Jobs are grouped into **stages**. Each stage contains at least one job.
Typical stages might be `build`, `test`, and `deploy`.
**Get started:**
- [Learn more about pipelines](pipelines/index.md).
### CI/CD variables
CI/CD variables help you customize jobs by making values defined elsewhere accessible to jobs.
They can be hard-coded in your `.gitlab-ci.yml` file, project settings, or dynamically generated
[predefined variables](variables/predefined_variables.md).
**Get started:**
- [Learn more about CI/CD variables](variables/index.md).
## Videos
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [GitLab CI/CD demo](https://www.youtube-nocookie.com/embed/ljth1Q5oJoo).
- <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [GitLab CI/CD and the Web IDE](https://youtu.be/l5705U8s_nQ?t=369).
- Webcast: [Mastering continuous software development](https://about.gitlab.com/webcast/mastering-ci-cd/).
## Related topics
- [Why you might choose GitLab CI/CD](https://about.gitlab.com/blog/2016/10/17/gitlab-ci-oohlala/)
- [Reasons you might migrate from another platform](https://about.gitlab.com/blog/2016/07/22/building-our-web-app-on-gitlab-ci/)
- [Five teams that made the switch to GitLab CI/CD](https://about.gitlab.com/blog/2019/04/25/5-teams-that-made-the-switch-to-gitlab-ci-cd/)
- If you use VS Code to edit your GitLab CI/CD configuration, the
[GitLab Workflow VS Code extension](../user/project/repository/vscode.md) helps you
- [Five teams that made the switch to GitLab CI/CD](https://about.gitlab.com/blog/2019/04/25/5-teams-that-made-the-switch-to-gitlab-ci-cd/).
- [Make the case for CI/CD in your organization](https://about.gitlab.com/devops-tools/github-vs-gitlab/).
- Learn how [Verizon reduced rebuilds](https://about.gitlab.com/blog/2019/02/14/verizon-customer-story/) from 30 days to under 8 hours with GitLab.
- Use the [GitLab Workflow VS Code extension](../user/project/repository/vscode.md) to
[validate your configuration](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#validate-gitlab-ci-configuration)
and [view your pipeline status](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#information-about-your-branch-pipelines-mr-closing-issue)
and [view your pipeline status](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow#information-about-your-branch-pipelines-mr-closing-issue).

View File

@ -503,7 +503,7 @@ for information about work to improve this behavior.
### `include` with `rules:changes`
> Support for `rules:changes` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342209) in GitLab 16.4 [with a flag](../../administration/feature_flags.md) named `ci_support_include_rules_changes`. Disabled by default.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342209) in GitLab 16.4.
Use [`rules:changes`](index.md#ruleschanges) to conditionally include other configuration files
based on changed files. For example:

View File

@ -9,9 +9,11 @@ type: reference
This document lists the configuration options for your GitLab `.gitlab-ci.yml` file.
- For a quick introduction to GitLab CI/CD, follow the [quick start guide](../quick_start/index.md).
- For a collection of examples, see [GitLab CI/CD Examples](../examples/index.md).
- To view a large `.gitlab-ci.yml` file used in an enterprise, see the [`.gitlab-ci.yml` file for `gitlab`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
- For a collection of examples, see [GitLab CI/CD examples](../examples/index.md).
- To view a large `.gitlab-ci.yml` file used in an enterprise, see the
[`.gitlab-ci.yml` file for `gitlab`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
- To create your own `.gitlab-ci.yml` file, try a tutorial that demonstrates a
[simple](../quick_start/index.md) or [complex](../quick_start/tutorial.md) pipeline.
When you are editing your `.gitlab-ci.yml` file, you can validate it with the
[CI Lint](../lint.md) tool.

View File

@ -55,8 +55,8 @@ To set up the Jenkins project you intend to run your build on, read
You can configure your integration between Jenkins and GitLab:
- With the [recommended approach for Jenkins integration](../../integration/jenkins.md#configure-a-jenkins-integration).
- [Using a webhook](../../integration/jenkins.md#configure-a-webhook).
- With the [recommended approach for Jenkins integration](../../integration/jenkins.md#with-a-jenkins-server-url).
- [Using a webhook](../../integration/jenkins.md#with-a-webhook).
## Test your setup

View File

@ -116,7 +116,7 @@ Set up the Jenkins project you intend to run your build on.
Configure the GitLab integration with Jenkins in one of the following ways.
### Configure a Jenkins integration
### With a Jenkins server URL
You should use this approach for Jenkins integrations if you can provide GitLab
with your Jenkins server URL and authentication information.
@ -139,9 +139,9 @@ with your Jenkins server URL and authentication information.
1. Optional. Select **Test settings**.
1. Select **Save changes**.
### Configure a webhook
### With a webhook
If you cannot [provide GitLab with your Jenkins server URL and authentication information](#configure-a-jenkins-integration), you can configure a webhook to integrate GitLab and Jenkins.
If you cannot [provide GitLab with your Jenkins server URL and authentication information](#with-a-jenkins-server-url), you can configure a webhook to integrate GitLab and Jenkins.
1. In the configuration of your Jenkins job, in the GitLab configuration section, select **Advanced**.
1. Under **Secret Token**, select **Generate**.

View File

@ -110,7 +110,7 @@ You can set a description template at the **instance level** for issues
and merge requests by using an [instance template repository](../admin_area/settings/instance_template_repository.md).
You can also use the instance template repository for file templates.
You might also be interested [project templates](../admin_area/custom_project_templates.md)
You might also be interested in [project templates](../admin_area/custom_project_templates.md)
that you can use when creating a new project in the instance.
### Set group-level description templates **(PREMIUM ALL)**

View File

@ -0,0 +1,78 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
module Importers
class RepositoryImporter
include Loggable
def initialize(project)
@project = project
end
def execute
log_info(import_stage: 'import_repository', message: 'starting import')
if project.empty_repo?
project.repository.import_repository(project.import_url)
project.repository.fetch_as_mirror(project.import_url, refmap: refmap)
validate_repository_size!
update_clone_time
end
import_wiki
log_info(import_stage: 'import_repository', message: 'finished import')
true
rescue ::Gitlab::Git::CommandError => e
Gitlab::ErrorTracking.log_exception(
e, import_stage: 'import_repository', message: 'failed import', error: e.message
)
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
project.repository.expire_content_cache if project.repository_exists?
raise
end
private
attr_reader :project
def refmap
# We omit :heads and :tags since these are fetched in the import_repository
['+refs/pull-requests/*/to:refs/merge-requests/*/head']
end
def import_wiki
return if project.wiki.repository_exists?
project.wiki.repository.import_repository(wiki.import_url)
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(
e, import_stage: 'import_repository', message: 'failed to import wiki', error: e.message
)
end
def wiki
WikiFormatter.new(project)
end
def update_clone_time
project.touch(:last_repository_updated_at)
end
def validate_repository_size!
# Defined in EE
end
end
end
end
end
Gitlab::BitbucketImport::Importers::RepositoryImporter.prepend_mod

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
module Loggable
def log_debug(messages)
logger.debug(log_data(messages))
end
def log_info(messages)
logger.info(log_data(messages))
end
def log_warn(messages)
logger.warn(log_data(messages))
end
def log_error(messages)
logger.error(log_data(messages))
end
private
def logger
Gitlab::BitbucketImport::Logger
end
def log_data(messages)
messages.merge(log_base_data)
end
def log_base_data
{
class: self.class.name,
project_id: project.id,
project_path: project.full_path
}
end
end
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
class Logger < ::Gitlab::Import::Logger
def default_attributes
super.merge(import_type: :bitbucket)
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module BitbucketImport
class ParallelImporter
def self.async?
true
end
def self.imports_repository?
true
end
def self.track_start_import(project)
Gitlab::Import::Metrics.new(:bitbucket_importer, project).track_start_import
end
def initialize(project)
@project = project
end
def execute
Gitlab::Import::SetAsyncJid.set_jid(project.import_state)
Stage::ImportRepositoryWorker
.with_status
.perform_async(project.id)
true
end
private
attr_reader :project
end
end
end

View File

@ -35,11 +35,7 @@ module Gitlab
private
def match_rule(context)
if Feature.enabled?(:ci_support_include_rules_changes, context.project)
@rule_list.find { |rule| rule.matches?(context.pipeline, context) }
else
@rule_list.find { |rule| rule.matches?(nil, context) }
end
@rule_list.find { |rule| rule.matches?(context.pipeline, context) }
end
Result = Struct.new(:when) do

View File

@ -12,7 +12,7 @@ module Gitlab
IMPORT_TABLE = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::ParallelImporter),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repository by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
@ -20,9 +20,6 @@ module Gitlab
ImportSource.new('manifest', 'Manifest file', nil)
].freeze
LEGACY_IMPORT_TABLE = IMPORT_TABLE.deep_dup
LEGACY_IMPORT_TABLE[2].importer = Gitlab::BitbucketServerImport::Importer
class << self
prepend_mod_with('Gitlab::ImportSources') # rubocop: disable Cop/InjectEnterpriseEditionModule
@ -47,9 +44,17 @@ module Gitlab
end
def import_table
return IMPORT_TABLE if Feature.enabled?(:bitbucket_server_parallel_importer)
bitbucket_parallel_enabled = Feature.enabled?(:bitbucket_parallel_importer)
bitbucket_server_parallel_enabled = Feature.enabled?(:bitbucket_server_parallel_importer)
LEGACY_IMPORT_TABLE
return IMPORT_TABLE unless bitbucket_parallel_enabled || bitbucket_server_parallel_enabled
import_table = IMPORT_TABLE.deep_dup
import_table[1].importer = Gitlab::BitbucketImport::ParallelImporter if bitbucket_parallel_enabled
import_table[2].importer = Gitlab::BitbucketServerImport::ParallelImporter if bitbucket_server_parallel_enabled
import_table
end
end
end

View File

@ -11646,6 +11646,9 @@ msgstr ""
msgid "Collapse"
msgstr ""
msgid "Collapse AI-generated summary"
msgstr ""
msgid "Collapse all threads"
msgstr ""
@ -19261,6 +19264,9 @@ msgstr ""
msgid "Expand"
msgstr ""
msgid "Expand AI-generated summary"
msgstr ""
msgid "Expand all"
msgstr ""
@ -27346,6 +27352,9 @@ msgstr ""
msgid "LastPushEvent|at"
msgstr ""
msgid "Latest AI-generated summary"
msgstr ""
msgid "Latest changes"
msgstr ""

View File

@ -224,7 +224,7 @@
"vuex-vue3": "npm:vuex@4.0.0",
"web-streams-polyfill": "^3.2.1",
"web-vitals": "^0.2.4",
"webpack": "^4.46.0",
"webpack": "^4.47.0",
"webpack-bundle-analyzer": "^4.9.1",
"webpack-cli": "^4.10.0",
"webpack-stats-plugin": "^0.3.1",

View File

@ -6,15 +6,31 @@ FactoryBot.define do
dependency { association(:packages_dependency) }
dependency_type { :dependencies }
trait(:with_nuget_metadatum) do
trait :with_nuget_metadatum do
after :build do |link|
link.nuget_metadatum = build(:nuget_dependency_link_metadatum)
end
end
trait(:rubygems) do
trait :rubygems do
package { association(:rubygems_package) }
dependency { association(:packages_dependency, :rubygems) }
end
trait :dependencies do
dependency_type { :dependencies }
end
trait :dev_dependencies do
dependency_type { :devDependencies }
end
trait :bundle_dependencies do
dependency_type { :bundleDependencies }
end
trait :peer_dependencies do
dependency_type { :peerDependencies }
end
end
end

View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::Importers::RepositoryImporter, feature_category: :importers do
let_it_be(:project) { create(:project, import_url: 'https://bitbucket.org/vim/vim.git') }
subject(:importer) { described_class.new(project) }
describe '#execute' do
context 'when repository is empty' do
it 'imports the repository' do
expect(project.repository).to receive(:import_repository).with(project.import_url)
expect(project.repository).to receive(:fetch_as_mirror).with(project.import_url,
refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head'])
expect(project.last_repository_updated_at).to be_present
importer.execute
end
end
context 'when repository is not empty' do
before do
allow(project).to receive(:empty_repo?).and_return(false)
project.last_repository_updated_at = 1.day.ago
end
it 'does not import the repository' do
expect(project.repository).not_to receive(:import_repository)
expect { importer.execute }.not_to change { project.last_repository_updated_at }
end
end
context 'when a Git CommandError is raised and the repository exists' do
before do
allow(project.repository).to receive(:import_repository).and_raise(::Gitlab::Git::CommandError)
allow(project).to receive(:repository_exists?).and_return(true)
end
it 'expires repository caches' do
expect(project.repository).to receive(:expire_content_cache)
expect { importer.execute }.to raise_error(::Gitlab::Git::CommandError)
end
end
end
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::ParallelImporter, feature_category: :importers do
subject { described_class }
it { is_expected.to be_async }
describe '.track_start_import' do
it 'tracks the start of import' do
project = build_stubbed(:project)
expect_next_instance_of(Gitlab::Import::Metrics, :bitbucket_importer, project) do |metric|
expect(metric).to receive(:track_start_import)
end
subject.track_start_import(project)
end
end
describe '#execute', :clean_gitlab_redis_shared_state do
let_it_be(:project) { create(:project) }
let(:importer) { subject.new(project) }
before do
create(:import_state, :started, project: project)
end
it 'schedules the importing of the repository' do
expect(Gitlab::BitbucketImport::Stage::ImportRepositoryWorker)
.to receive_message_chain(:with_status, :perform_async).with(project.id)
expect(importer.execute).to eq(true)
end
it 'sets the JID in Redis' do
expect(Gitlab::Import::SetAsyncJid).to receive(:set_jid).with(project.import_state).and_call_original
importer.execute
end
end
end

View File

@ -114,14 +114,6 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
let(:modified_paths) { ['README.md'] }
it { is_expected.to eq(false) }
context 'when FF `ci_support_include_rules_changes` is disabled' do
before do
stub_feature_flags(ci_support_include_rules_changes: false)
end
it { is_expected.to eq(true) }
end
end
end
@ -160,14 +152,6 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'branch1' } }] }
it { is_expected.to eq(false) }
context 'when FF `ci_support_include_rules_changes` is disabled' do
before do
stub_feature_flags(ci_support_include_rules_changes: false)
end
it { is_expected.to eq(true) }
end
end
context 'when compare_to: is invalid' do
@ -176,16 +160,6 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
it 'raises an error' do
expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /compare_to is not a valid ref/)
end
context 'when FF `ci_support_include_rules_changes` is disabled' do
before do
stub_feature_flags(ci_support_include_rules_changes: false)
end
it 'does not raise an error' do
expect { result }.not_to raise_error
end
end
end
end
end

View File

@ -58,7 +58,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
describe '.importer' do
import_sources = {
'github' => Gitlab::GithubImport::ParallelImporter,
'bitbucket' => Gitlab::BitbucketImport::Importer,
'bitbucket' => Gitlab::BitbucketImport::ParallelImporter,
'bitbucket_server' => Gitlab::BitbucketServerImport::ParallelImporter,
'fogbugz' => Gitlab::FogbugzImport::Importer,
'git' => nil,
@ -87,30 +87,60 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
describe '.import_table' do
subject { described_class.import_table }
it 'returns the ParallelImporter for Bitbucket server' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket_server',
'Bitbucket Server',
Gitlab::BitbucketServerImport::ParallelImporter
)
)
end
context 'when flag is disabled' do
before do
stub_feature_flags(bitbucket_server_parallel_importer: false)
end
it 'returns the legacy Importer for Bitbucket server' do
describe 'Bitbucket server' do
it 'returns the ParallelImporter' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket_server',
'Bitbucket Server',
Gitlab::BitbucketServerImport::Importer
Gitlab::BitbucketServerImport::ParallelImporter
)
)
end
context 'when flag is disabled' do
before do
stub_feature_flags(bitbucket_server_parallel_importer: false)
end
it 'returns the legacy Importer' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket_server',
'Bitbucket Server',
Gitlab::BitbucketServerImport::Importer
)
)
end
end
end
describe 'Bitbucket cloud' do
it 'returns the ParallelImporter' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket',
'Bitbucket Cloud',
Gitlab::BitbucketImport::ParallelImporter
)
)
end
context 'when flag is disabled' do
before do
stub_feature_flags(bitbucket_parallel_importer: false)
end
it 'returns the legacy Importer' do
is_expected.to include(
described_class::ImportSource.new(
'bitbucket',
'Bitbucket Cloud',
Gitlab::BitbucketImport::Importer
)
)
end
end
end
end
@ -134,7 +164,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
end
describe 'imports_repository? checker' do
let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
let(:allowed_importers) { %w[github gitlab_project bitbucket bitbucket_server] }
it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }

View File

@ -1,7 +1,28 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::DependencyLink, type: :model do
RSpec.describe Packages::DependencyLink, type: :model, feature_category: :package_registry do
let_it_be(:package1) { create(:package) }
let_it_be(:package2) { create(:package) }
let_it_be(:dependency1) { create(:packages_dependency) }
let_it_be(:dependency2) { create(:packages_dependency) }
let_it_be(:dependency_link1) do
create(:packages_dependency_link, :dev_dependencies, package: package1, dependency: dependency1)
end
let_it_be(:dependency_link2) do
create(:packages_dependency_link, :dependencies, package: package1, dependency: dependency2)
end
let_it_be(:dependency_link3) do
create(:packages_dependency_link, :dependencies, package: package2, dependency: dependency1)
end
let_it_be(:dependency_link4) do
create(:packages_dependency_link, :dependencies, package: package2, dependency: dependency2)
end
describe 'relationships' do
it { is_expected.to belong_to(:package).inverse_of(:dependency_links) }
it { is_expected.to belong_to(:dependency).inverse_of(:dependency_links) }
@ -53,4 +74,49 @@ RSpec.describe Packages::DependencyLink, type: :model do
end
end
end
describe '.dependency_ids_grouped_by_type' do
let(:packages) { Packages::Package.where(id: [package1.id, package2.id]) }
subject { described_class.dependency_ids_grouped_by_type(packages) }
it 'aggregates dependencies by type', :aggregate_failures do
result = Gitlab::Json.parse(subject.to_json)
expect(result.count).to eq(2)
expect(result).to include(
hash_including(
'package_id' => package1.id,
'dependency_ids_by_type' => {
'1' => [dependency2.id],
'2' => [dependency1.id]
}
),
hash_including(
'package_id' => package2.id,
'dependency_ids_by_type' => {
'1' => [dependency1.id, dependency2.id]
}
)
)
end
end
describe '.for_packages' do
let(:packages) { Packages::Package.where(id: package1.id) }
subject { described_class.for_packages(packages) }
it 'returns dependency links for selected packages' do
expect(subject).to contain_exactly(dependency_link1, dependency_link2)
end
end
describe '.select_dependency_id' do
subject { described_class.select_dependency_id }
it 'returns only dependency_id' do
expect(subject[0].attributes).to eq('dependency_id' => dependency1.id, 'id' => nil)
end
end
end

View File

@ -3,6 +3,33 @@
require 'spec_helper'
RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
describe 'create' do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }
let(:project_auth) do
build(
:project_authorization,
user: user,
project: project_1
)
end
it 'sets is_unique' do
expect { project_auth.save! }.to change { project_auth.is_unique }.to(true)
end
context 'with feature disabled' do
before do
stub_feature_flags(write_project_authorizations_is_unique: false)
end
it 'does not set is_unique' do
expect { project_auth.save! }.not_to change { project_auth.is_unique }.from(nil)
end
end
end
describe 'unique user, project authorizations' do
let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project) }

View File

@ -85,7 +85,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:user_id, :project_id,
:access_level)).to match_array(authorizations_to_add.map(&:values))
:access_level, :is_unique)).to match_array(authorizations_to_add.map(&:values))
end
end
@ -101,7 +101,25 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:user_id, :project_id,
:access_level)).to match_array(authorizations_to_add.map(&:values))
:access_level, :is_unique)).to match_array(authorizations_to_add.map(&:values))
end
it 'writes is_unique' do
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:is_unique)).to all(be(true))
end
context 'with feature disabled' do
before do
stub_feature_flags(write_project_authorizations_is_unique: false)
end
it 'does not write is_unique' do
apply_project_authorization_changes
expect(user.project_authorizations.pluck(:is_unique)).to all(be(nil))
end
end
it_behaves_like 'logs the detail', batch_size: 2

View File

@ -70,6 +70,30 @@ RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :pack
it { expect(subject.dig(package2.version, dependency_type)).to be nil }
end
context 'when generate dependencies' do
let(:packages) { ::Packages::Package.where(id: package1.id) }
it 'loads grouped dependency links', :aggregate_failures do
expect(::Packages::DependencyLink).to receive(:dependency_ids_grouped_by_type).and_call_original
expect(::Packages::Package).not_to receive(:including_dependency_links)
subject
end
context 'when npm_optimize_metadata_generation disabled' do
before do
stub_feature_flags(npm_optimize_metadata_generation: false)
end
it 'does not load grouped dependency links', :aggregate_failures do
expect(::Packages::DependencyLink).not_to receive(:dependency_ids_grouped_by_type)
expect(::Packages::Package).to receive(:including_dependency_links).and_call_original
subject
end
end
end
end
context 'for metadatum' do

View File

@ -163,72 +163,100 @@ RSpec.describe Projects::ImportService, feature_category: :importers do
context 'when importer does not support refmap' do
it 'succeeds if repository import is successful' do
expect(project.repository).to receive(:import_repository).and_return(true)
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :success)
end
result = subject.execute
expect(result[:status]).to eq :success
end
it 'fails if repository import fails' do
expect(project.repository)
.to receive(:import_repository)
.with('https://bitbucket.org/vim/vim.git', resolved_address: '')
.and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
expect_next_instance_of(Gitlab::BitbucketImport::ParallelImporter) do |importer|
expect(importer).to receive(:execute)
.and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
end
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
end
context 'when lfs import fails' do
it 'logs the error' do
error_message = 'error message'
expect(project.repository).to receive(:import_repository).and_return(true)
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect(importer).to receive(:execute).and_return(true)
context 'when bitbucket_parallel_importer feature flag is disabled' do
before do
stub_feature_flags(bitbucket_parallel_importer: false)
end
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :error, message: error_message)
it 'succeeds if repository import is successful' do
expect(project.repository).to receive(:import_repository).and_return(true)
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :success)
end
result = subject.execute
expect(result[:status]).to eq :success
end
expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
it 'fails if repository import fails' do
expect(project.repository)
.to receive(:import_repository)
.with('https://bitbucket.org/vim/vim.git', resolved_address: '')
.and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
subject.execute
end
end
result = subject.execute
context 'when repository import scheduled' do
before do
expect(project.repository).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data)
end
expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository [FILTERED]"
end
it 'downloads lfs objects if lfs_enabled is enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(true)
context 'when lfs import fails' do
it 'logs the error' do
error_message = 'error message'
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
expect(project.repository).to receive(:import_repository).and_return(true)
subject.execute
end
expect_next_instance_of(Gitlab::BitbucketImport::Importer) do |importer|
expect(importer).to receive(:execute).and_return(true)
end
it 'does not download lfs objects if lfs_enabled is not enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(false)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
expect(service).to receive(:execute).and_return(status: :error, message: error_message)
end
subject.execute
expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
subject.execute
end
end
context 'when repository import scheduled' do
before do
expect(project.repository).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data)
end
it 'downloads lfs objects if lfs_enabled is enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute)
subject.execute
end
it 'does not download lfs objects if lfs_enabled is not enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(false)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
subject.execute
end
end
end
end
end

View File

@ -51,6 +51,10 @@ ci_stages:
- index_ci_stages_on_pipeline_id
index_ci_stages_on_pipeline_id_and_position:
- index_ci_stages_on_pipeline_id
index_ci_stages_on_pipeline_id_convert_to_bigint_and_name:
- index_ci_stages_on_pipeline_id_convert_to_bigint
index_ci_stages_on_pipeline_id_convert_to_bigint_and_position:
- index_ci_stages_on_pipeline_id_convert_to_bigint
dast_site_tokens:
index_dast_site_token_on_project_id_and_url:
- index_dast_site_tokens_on_project_id

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
RSpec.shared_examples Gitlab::BitbucketImport::StageMethods do
describe '.sidekiq_retries_exhausted' do
let(:job) { { 'args' => [project.id] } }
it 'tracks the import failure' do
expect(Gitlab::Import::ImportFailureService)
.to receive(:track).with(
project_id: project.id,
exception: StandardError.new,
fail_import: true
)
described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
end
end
describe '.perform' do
let(:worker) { described_class.new }
it 'executes the import' do
expect(worker).to receive(:import).with(project).once
expect(Gitlab::BitbucketImport::Logger).to receive(:info).twice
worker.perform(project.id)
end
end
end

View File

@ -256,6 +256,9 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
'GeoRepositoryDestroyWorker' => 3,
'Gitlab::BitbucketImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketImport::Stage::FinishImportWorker' => 3,
'Gitlab::BitbucketImport::Stage::ImportRepositoryWorker' => 3,
'Gitlab::BitbucketServerImport::AdvanceStageWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::FinishImportWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportLfsObjectsWorker' => 3,

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::AdvanceStageWorker, feature_category: :importers do
let(:project) { create(:project) }
let(:import_state) { create(:import_state, project: project, jid: '123') }
let(:worker) { described_class.new }
describe '#find_import_state' do
it 'returns a ProjectImportState' do
import_state.update_column(:status, 'started')
found = worker.find_import_state(project.id)
expect(found).to be_an_instance_of(ProjectImportState)
expect(found.attributes.keys).to match_array(%w[id jid])
end
it 'returns nil if the project import is not running' do
expect(worker.find_import_state(project.id)).to be_nil
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::Stage::FinishImportWorker, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started) }
subject(:worker) { described_class.new }
it_behaves_like Gitlab::BitbucketImport::StageMethods
it 'does not abort on failure' do
expect(worker.abort_on_failure).to be_falsey
end
describe '#perform' do
it 'finalises the import process' do
expect_next_instance_of(Gitlab::Import::Metrics, :bitbucket_importer, project) do |metric|
expect(metric).to receive(:track_finished_import)
end
worker.perform(project.id)
expect(project.import_state.reload).to be_finished
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::Stage::ImportRepositoryWorker, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started) }
let(:worker) { described_class.new }
it_behaves_like Gitlab::BitbucketImport::StageMethods
it 'executes the importer and enqueues FinishImportWorker' do
expect(Gitlab::BitbucketImport::Importers::RepositoryImporter).to receive_message_chain(:new, :execute)
.and_return(true)
expect(Gitlab::BitbucketImport::Stage::FinishImportWorker).to receive(:perform_async).with(project.id)
.and_return(true).once
worker.perform(project.id)
end
end

View File

@ -13799,10 +13799,10 @@ webpack-stats-plugin@^0.3.1:
resolved "https://registry.yarnpkg.com/webpack-stats-plugin/-/webpack-stats-plugin-0.3.1.tgz#1103c39a305a4e6ba15d5078db84bc0b35447417"
integrity sha512-pxqzFE055NlNTlNyfDG3xlB2QwT1EWdm/CF5dCJI/e+rRHVxrWhWg1rf1lfsWhI1/EePv8gi/A36YxO/+u0FgQ==
webpack@^4.46.0:
version "4.46.0"
resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.46.0.tgz#bf9b4404ea20a073605e0a011d188d77cb6ad542"
integrity sha512-6jJuJjg8znb/xRItk7bkT0+Q7AHCYjjFnvKIWQPkNIOyRqoCGvkOs0ipeQzrqz4l5FtN5ZI/ukEHroeX/o1/5Q==
webpack@^4.47.0:
version "4.47.0"
resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.47.0.tgz#8b8a02152d7076aeb03b61b47dad2eeed9810ebc"
integrity sha512-td7fYwgLSrky3fI1EuU5cneU4+pbH6GgOfuKNS1tNPcfdGinGELAqsb/BP4nnvZyKSG2i/xFGU7+n2PvZA8HJQ==
dependencies:
"@webassemblyjs/ast" "1.9.0"
"@webassemblyjs/helper-module-context" "1.9.0"