Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-17 15:11:22 +00:00
parent c83db9983a
commit a1e798edcd
92 changed files with 1441 additions and 1252 deletions

View File

@ -154,6 +154,7 @@ or use the checklist below in this same issue.
/label <group-label>
/label ~"feature flag"
/relate <feature-issue-link>
<!-- Uncomment the appropriate type label
/label ~"type::feature" ~"feature::addition"
/label ~"type::maintenance"

View File

@ -20,21 +20,3 @@ RSpec/BeNil:
- 'spec/lib/gitlab/blame_spec.rb'
- 'spec/lib/gitlab/ci/config/feature_flags_spec.rb'
- 'spec/lib/gitlab/ci/parsers/security/common_spec.rb'
- 'spec/lib/gitlab/ci/secure_files/cer_spec.rb'
- 'spec/lib/gitlab/ci/secure_files/mobile_provision_spec.rb'
- 'spec/lib/gitlab/ci/secure_files/p12_spec.rb'
- 'spec/lib/gitlab/class_attributes_spec.rb'
- 'spec/lib/gitlab/current/organization_spec.rb'
- 'spec/lib/gitlab/database/background_migration/batched_migration_spec.rb'
- 'spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb'
- 'spec/lib/gitlab/email/incoming_email_spec.rb'
- 'spec/lib/gitlab/email/service_desk/custom_email_spec.rb'
- 'spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb'
- 'spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb'
- 'spec/lib/gitlab/identifier_spec.rb'
- 'spec/lib/gitlab/jira_import/labels_importer_spec.rb'
- 'spec/lib/gitlab/jira_import_spec.rb'
- 'spec/lib/gitlab/mail_room/mail_room_spec.rb'
- 'spec/lib/gitlab/metrics/subscribers/external_http_spec.rb'
- 'spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb'
- 'spec/lib/gitlab/project_template_spec.rb'

View File

@ -38,11 +38,6 @@ export default {
type: Object,
required: true,
},
isBlobPage: {
type: Boolean,
required: false,
default: false,
},
hideViewerSwitcher: {
type: Boolean,
required: false,
@ -178,7 +173,7 @@ export default {
<table-of-contents v-if="glFeatures.blobOverflowMenu" class="gl-pr-2" />
<web-ide-link
v-if="showWebIdeLink"
v-if="!glFeatures.blobOverflowMenu && showWebIdeLink"
:show-edit-button="!isBinary"
:button-variant="editButtonVariant"
class="sm:!gl-ml-0"
@ -203,7 +198,6 @@ export default {
<slot name="actions"></slot>
<default-actions
v-if="!glFeatures.blobOverflowMenu || (glFeatures.blobOverflowMenu && !isBlobPage)"
:raw-path="blob.externalStorageUrl || blob.rawPath"
:active-viewer="viewer"
:has-render-error="hasRenderError"

View File

@ -2,6 +2,7 @@
import { GlButton, GlButtonGroup, GlTooltipDirective } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
import { setUrlParams, relativePathToAbsolute, getBaseURL } from '~/lib/utils/url_utility';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import {
BTN_COPY_CONTENTS_TITLE,
BTN_DOWNLOAD_TITLE,
@ -18,6 +19,7 @@ export default {
directives: {
GlTooltip: GlTooltipDirective,
},
mixins: [glFeatureFlagMixin()],
inject: {
blobHash: {
default: '',
@ -103,7 +105,10 @@ export default {
};
</script>
<template>
<gl-button-group data-testid="default-actions-container">
<gl-button-group
:class="{ 'gl-hidden sm:gl-inline-flex': glFeatures.blobOverflowMenu }"
data-testid="default-actions-container"
>
<gl-button
v-if="!isEmpty && showCopyButton"
v-gl-tooltip.hover

View File

@ -2554,7 +2554,10 @@
"type": "string"
},
{
"$ref": "#/definitions/stepReference"
"$ref": "#/definitions/stepGitReference"
},
{
"$ref": "#/definitions/stepOciReference"
}
]
}
@ -2563,31 +2566,6 @@
{
"description": "Run a sequence of steps.",
"oneOf": [
{
"type": "object",
"additionalProperties": false,
"required": [
"steps"
],
"properties": {
"env": {
"$ref": "#/definitions/stepNamedStrings"
},
"steps": {
"description": "Deprecated. Use `run` instead.",
"type": "array",
"items": {
"$ref": "#/definitions/step"
}
},
"outputs": {
"$ref": "#/definitions/stepNamedValues"
},
"delegate": {
"type": "string"
}
}
},
{
"type": "object",
"additionalProperties": false,
@ -2707,36 +2685,94 @@
},
"additionalProperties": false
},
"stepReference": {
"stepGitReference": {
"type": "object",
"description": "Git a reference to a step in a Git repository.",
"description": "GitReference is a reference to a step in a Git repository.",
"additionalProperties": false,
"required": [
"git"
],
"properties": {
"git": {
"$ref": "#/definitions/stepGitReference"
"type": "object",
"additionalProperties": false,
"required": [
"url",
"rev"
],
"properties": {
"url": {
"type": "string"
},
"dir": {
"type": "string"
},
"rev": {
"type": "string"
},
"file": {
"type": "string"
}
}
}
}
},
"stepGitReference": {
"stepOciReference": {
"type": "object",
"description": "GitReference is a reference to a step in a Git repository containing the full set of configuration options.",
"description": "OCIReference is a reference to a step hosted in an OCI repository.",
"additionalProperties": false,
"required": [
"url",
"rev"
"oci"
],
"properties": {
"url": {
"type": "string"
},
"dir": {
"type": "string"
},
"rev": {
"type": "string"
"oci": {
"type": "object",
"additionalProperties": false,
"required": [
"registry",
"repository",
"tag"
],
"properties": {
"registry": {
"type": "string",
"description": "The <host>[:<port>] of the container registry server.",
"examples": [
"registry.gitlab.com"
]
},
"repository": {
"type": "string",
"description": "A path within the registry containing related OCI images. Typically the namespace, project, and image name.",
"examples": [
"my_group/my_project/image"
]
},
"tag": {
"type": "string",
"description": "A pointer to the image manifest hosted in the OCI repository.",
"examples": [
"latest",
"1",
"1.5",
"1.5.0"
]
},
"dir": {
"type": "string",
"description": "A directory inside the OCI image where the step can be found.",
"examples": [
"/my_steps/hello_world"
]
},
"file": {
"type": "string",
"description": "The name of the file that defines the step, defaults to step.yml.",
"examples": [
"step.yml"
]
}
}
}
}
},

View File

@ -60,12 +60,22 @@ export const useDiffsList = defineStore('diffsList', {
streamRemainingDiffs(url) {
return this.withDebouncedAbortController(async ({ signal }, previousController) => {
this.status = statuses.fetching;
const { body } = await fetch(url, { signal });
let request;
let streamSignal = signal;
if (window.gl.rapidDiffsPreload) {
const { controller, streamRequest } = window.gl.rapidDiffsPreload;
this.loadingController = controller;
request = streamRequest;
streamSignal = controller.signal;
} else {
request = fetch(url, { signal });
}
const { body } = await request;
if (previousController) previousController.abort();
await this.renderDiffsStream(
toPolyfillReadable(body),
document.querySelector('#js-stream-container'),
signal,
streamSignal,
);
performanceMarkAndMeasure({
mark: 'rapid-diffs-list-loaded',

View File

@ -369,7 +369,12 @@ export default {
</div>
<!-- Blob controls -->
<blob-controls :project-path="projectPath" :ref-type="getRefType" :is-binary="isBinary" />
<blob-controls
:project-path="projectPath"
:project-id-as-number="projectIdAsNumber"
:ref-type="getRefType"
:is-binary="isBinary"
/>
</div>
</section>
</template>

View File

@ -21,10 +21,15 @@ import { sanitize } from '~/lib/dompurify';
import { InternalEvents } from '~/tracking';
import { FIND_FILE_BUTTON_CLICK } from '~/tracking/constants';
import { updateElementsVisibility } from '~/repository/utils/dom';
import {
showSingleFileEditorForkSuggestion,
showWebIdeForkSuggestion,
} from '~/repository/utils/fork_suggestion_utils';
import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql';
import userGitpodInfo from '~/repository/queries/user_gitpod_info.query.graphql';
import { getRefType } from '~/repository/utils/ref_type';
import OpenMrBadge from '~/repository/components/header_area/open_mr_badge.vue';
import { TEXT_FILE_TYPE, DEFAULT_BLOB_INFO } from '../../constants';
import { TEXT_FILE_TYPE, EMPTY_FILE, DEFAULT_BLOB_INFO } from '../../constants';
import OverflowMenu from './blob_overflow_menu.vue';
export default {
@ -40,6 +45,7 @@ export default {
OpenMrBadge,
GlButton,
OverflowMenu,
WebIdeLink: () => import('ee_else_ce/vue_shared/components/web_ide_link.vue'),
},
directives: {
GlTooltip: GlTooltipDirective,
@ -63,8 +69,14 @@ export default {
createAlert({ message: this.$options.i18n.errorMessage });
},
},
currentUser: {
query: userGitpodInfo,
error() {
createAlert({ message: this.$options.i18n.errorMessage });
},
},
},
inject: ['currentRef'],
inject: ['currentRef', 'gitpodEnabled'],
provide() {
return {
blobInfo: computed(() => this.blobInfo ?? DEFAULT_BLOB_INFO.repository.blobs.nodes[0]),
@ -76,6 +88,10 @@ export default {
type: String,
required: true,
},
projectIdAsNumber: {
type: Number,
required: true,
},
refType: {
type: String,
required: false,
@ -90,6 +106,7 @@ export default {
data() {
return {
project: {},
currentUser: {},
};
},
computed: {
@ -105,6 +122,9 @@ export default {
blobInfo() {
return this.project?.repository?.blobs?.nodes[0] || {};
},
userPermissions() {
return this.project?.userPermissions || DEFAULT_BLOB_INFO.userPermissions;
},
storageInfo() {
const { storedExternally, externalStorage } = this.blobInfo;
return {
@ -121,7 +141,11 @@ export default {
return this.storageInfo.isLfs;
},
isBinaryFileType() {
return this.isBinary || this.blobInfo.simpleViewer?.fileType !== TEXT_FILE_TYPE;
return (
this.isBinary ||
(this.blobInfo.simpleViewer?.fileType !== TEXT_FILE_TYPE &&
this.blobInfo.simpleViewer?.fileType !== EMPTY_FILE)
);
},
rawPath() {
return this.blobInfo.externalStorageUrl || this.blobInfo.rawPath;
@ -153,6 +177,23 @@ export default {
const description = this.$options.i18n.permalinkTooltip;
return this.formatTooltipWithShortcut(description, this.shortcuts.permalink);
},
showWebIdeLink() {
return !this.blobInfo.archived && this.blobInfo.editBlobPath;
},
shouldShowSingleFileEditorForkSuggestion() {
return showSingleFileEditorForkSuggestion(
this.userPermissions,
this.isUsingLfs,
this.blobInfo.canModifyBlob,
);
},
shouldShowWebIdeForkSuggestion() {
return showWebIdeForkSuggestion(
this.userPermissions,
this.isUsingLfs,
this.blobInfo.canModifyBlobWithWebIde,
);
},
},
watch: {
showBlobControls(shouldShow) {
@ -230,10 +271,33 @@ export default {
{{ $options.i18n.permalink }}
</gl-button>
<web-ide-link
v-if="glFeatures.blobOverflowMenu && showWebIdeLink"
:show-edit-button="!isBinaryFileType"
class="!gl-ml-auto gl-mr-0"
:edit-url="blobInfo.editBlobPath"
:web-ide-url="blobInfo.ideEditPath"
:needs-to-fork="shouldShowSingleFileEditorForkSuggestion"
:needs-to-fork-with-web-ide="shouldShowWebIdeForkSuggestion"
:show-pipeline-editor-button="Boolean(blobInfo.pipelineEditorPath)"
:pipeline-editor-url="blobInfo.pipelineEditorPath"
:gitpod-url="blobInfo.gitpodBlobUrl"
:show-gitpod-button="gitpodEnabled"
:gitpod-enabled="currentUser && currentUser.gitpodEnabled"
:project-path="projectPath"
:project-id="projectIdAsNumber"
:user-preferences-gitpod-path="currentUser && currentUser.preferencesGitpodPath"
:user-profile-enable-gitpod-path="currentUser && currentUser.profileEnableGitpodPath"
is-blob
disable-fork-modal
v-on="$listeners"
/>
<overflow-menu
v-if="!isLoadingRepositoryBlob && glFeatures.blobOverflowMenu"
:user-permissions="userPermissions"
:project-path="projectPath"
:is-binary="isBinaryFileType"
:is-binary-file-type="isBinaryFileType"
:override-copy="true"
:is-empty-repository="project.repository.empty"
:is-using-lfs="isUsingLfs"

View File

@ -36,7 +36,7 @@ export default {
type: Boolean,
required: true,
},
isBinary: {
isBinaryFileType: {
type: Boolean,
required: true,
},
@ -90,7 +90,7 @@ export default {
};
},
showCopyButton() {
return !this.hasRenderError && !this.isBinary;
return !this.hasRenderError && !this.isBinaryFileType;
},
copyDisabled() {
return this.activeViewerType === 'rich';
@ -130,7 +130,7 @@ export default {
</script>
<template>
<gl-disclosure-dropdown-group bordered>
<gl-disclosure-dropdown-group bordered class="sm:gl-hidden">
<gl-disclosure-dropdown-item
v-if="showCopyButton"
data-testid="copy-item"
@ -138,7 +138,11 @@ export default {
class="js-copy-blob-source-btn"
@action="onCopy"
/>
<gl-disclosure-dropdown-item v-if="!isBinary" data-testid="open-raw-item" :item="openRawItem" />
<gl-disclosure-dropdown-item
v-if="!isBinaryFileType"
data-testid="open-raw-item"
:item="openRawItem"
/>
<gl-disclosure-dropdown-item
v-if="!isEmpty && canDownloadCode"
data-test="download-item"

View File

@ -37,11 +37,15 @@ export default {
};
},
props: {
userPermissions: {
type: Object,
required: true,
},
projectPath: {
type: String,
required: true,
},
isBinary: {
isBinaryFileType: {
type: Boolean,
required: false,
default: false,
@ -73,7 +77,6 @@ export default {
},
update({ project }) {
this.pathLocks = project?.pathLocks || DEFAULT_BLOB_INFO.pathLocks;
this.userPermissions = project?.userPermissions;
},
error() {
createAlert({ message: this.$options.i18n.fetchError });
@ -82,7 +85,6 @@ export default {
},
data() {
return {
userPermissions: DEFAULT_BLOB_INFO.userPermissions,
pathLocks: DEFAULT_BLOB_INFO.pathLocks,
isLoggedIn: isLoggedIn(),
};
@ -126,6 +128,7 @@ export default {
data-testid="default-actions-container"
:toggle-text="$options.i18n.dropdownLabel"
text-sr-only
class="gl-mr-0"
category="tertiary"
>
<permalink-dropdown-item :permalink-path="blobInfo.permalinkPath" />
@ -141,7 +144,7 @@ export default {
<blob-default-actions-group
:active-viewer-type="activeViewerType"
:has-render-error="hasRenderError"
:is-binary="isBinary"
:is-binary-file-type="isBinaryFileType"
:is-empty="isEmptyRepository"
:override-copy="overrideCopy"
@copy="onCopy"

View File

@ -2,6 +2,13 @@ query getBlobControls($projectPath: ID!, $filePath: String!, $ref: String!, $ref
project(fullPath: $projectPath) {
__typename
id
userPermissions {
__typename
pushCode
downloadCode
createMergeRequestIn
forkProject
}
repository {
__typename
empty
@ -27,6 +34,10 @@ query getBlobControls($projectPath: ID!, $filePath: String!, $ref: String!, $ref
canModifyBlob
canModifyBlobWithWebIde
forkAndViewPath
editBlobPath
ideEditPath
pipelineEditorPath
gitpodBlobUrl
simpleViewer {
__typename
fileType

View File

@ -0,0 +1,9 @@
query getUserGitpodInfo {
currentUser {
__typename
id
gitpodEnabled
preferencesGitpodPath
profileEnableGitpodPath
}
}

View File

@ -20,6 +20,7 @@ import {
WIDGET_TYPE_CRM_CONTACTS,
WORK_ITEM_TYPE_VALUE_EPIC,
WORK_ITEM_TYPE_VALUE_MAP,
WIDGET_TYPE_CUSTOM_FIELDS,
} from '../constants';
import { findHierarchyWidgetDefinition } from '../utils';
import workItemParticipantsQuery from '../graphql/work_item_participants.query.graphql';
@ -190,8 +191,11 @@ export default {
workItemCrmContacts() {
return this.isWidgetPresent(WIDGET_TYPE_CRM_CONTACTS) && this.glFeatures.workItemsAlpha;
},
customFields() {
return this.isWidgetPresent(WIDGET_TYPE_CUSTOM_FIELDS)?.customFieldValues;
},
showWorkItemCustomFields() {
return this.glFeatures.customFieldsFeature;
return this.glFeatures.customFieldsFeature && this.customFields;
},
},
methods: {
@ -317,6 +321,7 @@ export default {
v-if="showWorkItemCustomFields"
:work-item-id="workItem.id"
:work-item-type="workItemType"
:custom-fields="customFields"
:full-path="fullPath"
:can-update="canUpdateMetadata"
:is-group="isGroup"

View File

@ -1,3 +1,15 @@
- if @preload
- helpers.add_page_startup_api_call @metadata_endpoint
- if @stream_url
- helpers.content_for :startup_js do
= javascript_tag nonce: content_security_policy_nonce do
:plain
var controller = new AbortController();
window.gl.rapidDiffsPreload = {
controller: controller,
streamRequest: fetch('#{Gitlab::UrlSanitizer.sanitize(@stream_url)}', { signal: controller.signal })
}
.rd-app{ data: { rapid_diffs: true, reload_stream_url: @reload_stream_url, metadata_endpoint: @metadata_endpoint } }
.rd-app-header
.rd-app-settings

View File

@ -11,7 +11,8 @@ module RapidDiffs
show_whitespace:,
diff_view:,
update_user_endpoint:,
metadata_endpoint:
metadata_endpoint:,
preload: true
)
@diffs_slice = diffs_slice
@reload_stream_url = reload_stream_url
@ -20,6 +21,7 @@ module RapidDiffs
@diff_view = diff_view
@update_user_endpoint = update_user_endpoint
@metadata_endpoint = metadata_endpoint
@preload = preload
end
def initial_sidebar_width

View File

@ -63,3 +63,5 @@ module Types
end
end
end
Types::Organizations::OrganizationType.prepend_mod

View File

@ -11,6 +11,7 @@ module Ci
include BatchNullifyDependentAssociations
include Gitlab::Utils::StrongMemoize
MAX_INPUTS = 20
VALID_REF_FORMAT_REGEX = %r{\A(#{Gitlab::Git::TAG_REF_PREFIX}|#{Gitlab::Git::BRANCH_REF_PREFIX})[\S]+}
SORT_ORDERS = {
@ -45,6 +46,11 @@ module Ci
validates :variables, nested_attributes_duplicates: true
validates :inputs, nested_attributes_duplicates: { child_attributes: %i[name] }
validates :inputs, length: {
maximum: MAX_INPUTS,
message: ->(*) { _('exceeds the limit of %{count}.') }
}
strip_attributes! :cron
scope :active, -> { where(active: true) }

View File

@ -42,6 +42,17 @@ module ContainerRegistry
.exists?
end
def self.for_action_exists?(action:, access_level:, repository_path:)
return false if [access_level, repository_path].any?(&:blank?)
raise ArgumentError, 'action must be :push or :delete' unless %i[push delete].include?(action)
minimum_access_level_column = "minimum_access_level_for_#{action}"
for_repository_path(repository_path)
.where(":access_level < #{minimum_access_level_column}", access_level: access_level)
.exists?
end
##
# Accepts a list of projects and repository paths and returns a result set
# indicating whether the repository path is protected.

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module ContainerRegistry
module Protection
class CheckRuleExistenceService < BaseProjectService
SUCCESS_RESPONSE_RULE_EXISTS = ServiceResponse.success(payload: { protection_rule_exists?: true }).freeze
SUCCESS_RESPONSE_RULE_DOESNT_EXIST = ServiceResponse.success(payload: { protection_rule_exists?: false }).freeze
ERROR_RESPONSE_UNAUTHORIZED = ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized).freeze
def self.for_delete(params:, **args)
new(params: params.merge(action: :delete), **args)
end
def initialize(params:, **args)
raise(ArgumentError, 'Invalid param :action') unless params[:action].in?([:push, :delete])
super
end
def execute
return ERROR_RESPONSE_UNAUTHORIZED unless current_user_can_do_action?
return service_response_for(check_rule_exists_for_user) if current_user.is_a?(User)
return service_response_for(check_rule_exists_for_deploy_token) if current_user.is_a?(DeployToken)
raise ArgumentError, 'Invalid user'
end
private
def current_user_can_do_action?
if params[:action] == :push
can?(current_user, :create_container_image, project)
else
can?(current_user, :destroy_container_image, project)
end
end
def check_rule_exists_for_user
return false if current_user.can_admin_all_resources?
user_project_authorization_access_level = current_user.max_member_access_for_project(project.id)
project.container_registry_protection_rules.for_action_exists?(
action: params[:action],
access_level: user_project_authorization_access_level,
repository_path: params[:repository_path]
)
end
def check_rule_exists_for_deploy_token
project.container_registry_protection_rules
.for_repository_path(params[:repository_path])
.exists?
end
def service_response_for(protection_rule_exists)
protection_rule_exists ? SUCCESS_RESPONSE_RULE_EXISTS : SUCCESS_RESPONSE_RULE_DOESNT_EXIST
end
end
end
end

View File

@ -359,13 +359,15 @@ class IssuableBaseService < ::BaseContainerService
def transaction_create(issuable)
issuable.save.tap do |saved|
if saved
@callbacks.each(&:after_create)
@callbacks.each(&:after_save)
end
run_after_create_callbacks(issuable) if saved
end
end
def run_after_create_callbacks(_issuable)
@callbacks.each(&:after_create)
@callbacks.each(&:after_save)
end
def update_task(issuable)
filter_params(issuable)

View File

@ -1,85 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
module Cache
module Entries
class CreateOrUpdateService < ::BaseContainerService
alias_method :upstream, :container
ERRORS = {
unauthorized: ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized),
path_not_present: ServiceResponse.error(message: 'Parameter path not present', reason: :path_not_present),
file_not_present: ServiceResponse.error(message: 'Parameter file not present', reason: :file_not_present)
}.freeze
def initialize(upstream:, current_user: nil, params: {})
super(container: upstream, current_user: current_user, params: params)
end
def execute
return ERRORS[:path_not_present] unless path.present?
return ERRORS[:file_not_present] unless file.present?
return ERRORS[:unauthorized] unless allowed?
now = Time.zone.now
updates = {
upstream_etag: etag,
upstream_checked_at: now,
file: file,
size: file.size,
file_sha1: file.sha1,
content_type: content_type
}.compact_blank
updates[:file_md5] = file.md5 unless Gitlab::FIPS.enabled?
ce = ::VirtualRegistries::Packages::Maven::Cache::Entry.create_or_update_by!(
group_id: upstream.group_id,
upstream: upstream,
relative_path: relative_path,
updates: updates
)
ServiceResponse.success(payload: { cache_entry: ce })
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(
e,
upstream_id: upstream.id,
group_id: upstream.group_id,
class: self.class.name
)
ServiceResponse.error(message: e.message, reason: :persistence_error)
end
private
def allowed?
can?(current_user, :read_virtual_registry, upstream)
end
def file
params[:file]
end
def path
params[:path]
end
def relative_path
"/#{path}"
end
def etag
params[:etag]
end
def content_type
params[:content_type]
end
end
end
end
end
end
end

View File

@ -1,174 +0,0 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
module Maven
class HandleFileRequestService < ::BaseContainerService
alias_method :registry, :container
TIMEOUT = 5
DIGEST_EXTENSIONS = %w[.sha1 .md5].freeze
PERMISSIONS_CACHE_TTL = 5.minutes
ERRORS = {
path_not_present: ServiceResponse.error(message: 'Path not present', reason: :path_not_present),
unauthorized: ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized),
no_upstreams: ServiceResponse.error(message: 'No upstreams set', reason: :no_upstreams),
file_not_found_on_upstreams: ServiceResponse.error(
message: 'File not found on any upstream',
reason: :file_not_found_on_upstreams
),
digest_not_found: ServiceResponse.error(
message: 'File of the requested digest not found in cache entries',
reason: :digest_not_found_in_cache_entries
),
fips_unsupported_md5: ServiceResponse.error(
message: 'MD5 digest is not supported when FIPS is enabled',
reason: :fips_unsupported_md5
),
upstream_not_available: ServiceResponse.error(
message: 'Upstream not available',
reason: :upstream_not_available
)
}.freeze
def initialize(registry:, current_user: nil, params: {})
super(container: registry, current_user: current_user, params: params)
end
def execute
return ERRORS[:path_not_present] unless path.present?
return ERRORS[:unauthorized] unless allowed?
return ERRORS[:no_upstreams] unless registry.upstream.present?
if digest_request?
download_cache_entry_digest
elsif cache_response_still_valid?
download_cache_entry
else
check_upstream(registry.upstream)
end
rescue *::Gitlab::HTTP::HTTP_ERRORS
return download_cache_entry if cache_entry
ERRORS[:upstream_not_available]
end
private
def cache_entry
# TODO change this to support multiple upstreams
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
registry.upstream.default_cache_entries.find_by_relative_path(relative_path)
end
strong_memoize_attr :cache_entry
def cache_response_still_valid?
return false unless cache_entry
return true unless cache_entry.stale?
# cache entry with no etag can't be checked
return false if cache_entry.upstream_etag.blank?
response = head_upstream(upstream: cache_entry.upstream)
return false unless cache_entry.upstream_etag == response.headers['etag']
cache_entry.update_column(:upstream_checked_at, Time.current)
true
end
def check_upstream(upstream)
response = head_upstream(upstream: upstream)
return ERRORS[:file_not_found_on_upstreams] unless response.success?
workhorse_upload_url_response(upstream: upstream)
end
def head_upstream(upstream:)
strong_memoize_with(:head_upstream, upstream) do
url = upstream.url_for(path)
headers = upstream.headers
::Gitlab::HTTP.head(url, headers: headers, follow_redirects: true, timeout: TIMEOUT)
end
end
def download_cache_entry_digest
return ERRORS[:digest_not_found] unless cache_entry
digest_format = File.extname(path)[1..] # file extension without the leading dot
return ERRORS[:fips_unsupported_md5] if digest_format == 'md5' && Gitlab::FIPS.enabled?
ServiceResponse.success(
payload: {
action: :download_digest,
action_params: { digest: cache_entry["file_#{digest_format}"] }
}
)
end
def digest_request?
File.extname(path).in?(DIGEST_EXTENSIONS)
end
strong_memoize_attr :digest_request?
def allowed?
return false unless current_user # anonymous users can't access virtual registries
Rails.cache.fetch(permissions_cache_key, expires_in: PERMISSIONS_CACHE_TTL) do
can?(current_user, :read_virtual_registry, registry)
end
end
def permissions_cache_key
[
'virtual_registries',
current_user.model_name.cache_key,
current_user.id,
'read_virtual_registry',
'maven',
registry.id
]
end
def path
params[:path]
end
def relative_path
if digest_request?
"/#{path.chomp(File.extname(path))}"
else
"/#{path}"
end
end
def download_cache_entry
ServiceResponse.success(
payload: {
action: :download_file,
action_params: {
file: cache_entry.file,
file_sha1: cache_entry.file_sha1,
file_md5: cache_entry.file_md5,
content_type: cache_entry.content_type
}
}
)
end
def workhorse_upload_url_response(upstream:)
ServiceResponse.success(
payload: {
action: :workhorse_upload_url,
action_params: { url: upstream.url_for(path), upstream: upstream }
}
)
end
end
end
end
end

View File

@ -33,7 +33,10 @@
"type": "string"
},
{
"$ref": "#/definitions/reference"
"$ref": "#/definitions/gitReference"
},
{
"$ref": "#/definitions/ociReference"
}
]
}
@ -41,57 +44,28 @@
},
{
"description": "Run a sequence of steps.",
"oneOf": [
{
"type": "object",
"additionalProperties": false,
"required": [
"steps"
],
"properties": {
"env": {
"$ref": "#/definitions/namedStrings"
},
"steps": {
"description": "Deprecated. Use `run` instead.",
"type": "array",
"items": {
"$ref": "#/definitions/step"
}
},
"outputs": {
"$ref": "#/definitions/namedValues"
},
"delegate": {
"type": "string"
}
"type": "object",
"additionalProperties": false,
"required": [
"run"
],
"properties": {
"env": {
"$ref": "#/definitions/namedStrings"
},
"run": {
"type": "array",
"items": {
"$ref": "#/definitions/step"
}
},
{
"type": "object",
"additionalProperties": false,
"required": [
"run"
],
"properties": {
"env": {
"$ref": "#/definitions/namedStrings"
},
"run": {
"type": "array",
"items": {
"$ref": "#/definitions/step"
}
},
"outputs": {
"$ref": "#/definitions/namedValues"
},
"delegate": {
"type": "string"
}
}
"outputs": {
"$ref": "#/definitions/namedValues"
},
"delegate": {
"type": "string"
}
]
}
},
{
"description": "Run an action.",
@ -186,36 +160,94 @@
},
"additionalProperties": false
},
"reference": {
"gitReference": {
"type": "object",
"description": "Git a reference to a step in a Git repository.",
"description": "GitReference is a reference to a step in a Git repository.",
"additionalProperties": false,
"required": [
"git"
],
"properties": {
"git": {
"$ref": "#/definitions/gitReference"
"type": "object",
"additionalProperties": false,
"required": [
"url",
"rev"
],
"properties": {
"url": {
"type": "string"
},
"dir": {
"type": "string"
},
"rev": {
"type": "string"
},
"file": {
"type": "string"
}
}
}
}
},
"gitReference": {
"ociReference": {
"type": "object",
"description": "GitReference is a reference to a step in a Git repository containing the full set of configuration options.",
"description": "OCIReference is a reference to a step hosted in an OCI repository.",
"additionalProperties": false,
"required": [
"url",
"rev"
"oci"
],
"properties": {
"url": {
"type": "string"
},
"dir": {
"type": "string"
},
"rev": {
"type": "string"
"oci": {
"type": "object",
"additionalProperties": false,
"required": [
"registry",
"repository",
"tag"
],
"properties": {
"registry": {
"type": "string",
"description": "The <host>[:<port>] of the container registry server.",
"examples": [
"registry.gitlab.com"
]
},
"repository": {
"type": "string",
"description": "A path within the registry containing related OCI images. Typically the namespace, project, and image name.",
"examples": [
"my_group/my_project/image"
]
},
"tag": {
"type": "string",
"description": "A pointer to the image manifest hosted in the OCI repository.",
"examples": [
"latest",
"1",
"1.5",
"1.5.0"
]
},
"dir": {
"type": "string",
"description": "A directory inside the OCI image where the step can be found.",
"examples": [
"/my_steps/hello_world"
]
},
"file": {
"type": "string",
"description": "The name of the file that defines the step, defaults to step.yml.",
"examples": [
"step.yml"
]
}
}
}
}
},

View File

@ -13,7 +13,6 @@ module DependencyProxy
def perform
enqueue_blob_cleanup_job if DependencyProxy::Blob.pending_destruction.any?
enqueue_manifest_cleanup_job if DependencyProxy::Manifest.pending_destruction.any?
enqueue_vreg_packages_cache_entry_cleanup_job
end
private
@ -25,17 +24,7 @@ module DependencyProxy
def enqueue_manifest_cleanup_job
DependencyProxy::CleanupManifestWorker.perform_with_capacity
end
def enqueue_vreg_packages_cache_entry_cleanup_job
[::VirtualRegistries::Packages::Maven::Cache::Entry].each do |klass|
if klass.pending_destruction.any?
if Feature.enabled?(:virtual_registry_maven_cleanup_new_worker_class, Feature.current_request)
::VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker.perform_with_capacity(klass.name)
else
::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker.perform_with_capacity(klass.name)
end
end
end
end
end
end
DependencyProxy::CleanupDependencyProxyWorker.prepend_mod

View File

@ -9,6 +9,7 @@ module VirtualRegistries
include LimitedCapacity::Worker
MAX_CAPACITY = 2
REMAINING_WORK_COUNT = 0
data_consistency :sticky
urgency :low
@ -17,55 +18,21 @@ module VirtualRegistries
queue_namespace :dependency_proxy_blob
feature_category :virtual_registry
def perform_work(model)
next_item = next_item(model.constantize)
return unless next_item
next_item.destroy!
log_metadata(next_item)
rescue StandardError => exception
next_item&.update_column(:status, :error) unless next_item&.destroyed?
Gitlab::ErrorTracking.log_exception(
exception,
class: self.class.name
)
# overridden in EE
def perform_work(_model)
# no-op
end
def remaining_work_count(model)
model.constantize.pending_destruction.limit(max_running_jobs + 1).count
def remaining_work_count(_model)
REMAINING_WORK_COUNT
end
def max_running_jobs
MAX_CAPACITY
end
private
def next_item(klass)
klass.transaction do
next_item = klass.next_pending_destruction
if next_item
next_item.update_column(:status, :processing)
log_cleanup_item(next_item)
end
next_item
end
end
def log_metadata(cache_entry)
log_extra_metadata_on_done(:cache_entry_id, cache_entry.id)
log_extra_metadata_on_done(:group_id, cache_entry.group_id)
log_extra_metadata_on_done(:relative_path, cache_entry.relative_path)
end
def log_cleanup_item(cache_entry)
logger.info(structured_payload(cache_entry_id: cache_entry.id))
end
end
end
end
end
VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker.prepend_mod

View File

@ -0,0 +1,9 @@
---
name: container_registry_protected_containers_delete
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/406797
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/146686
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/517986
milestone: '17.10'
group: group::container registry
type: gitlab_com_derisk
default_enabled: false

View File

@ -182,11 +182,6 @@ InitializerConnections.raise_if_new_database_connection do
draw :phone_verification
draw :arkose
# https://gitlab.com/gitlab-org/gitlab/-/issues/292690
scope '/push_from_secondary/:geo_node_id' do
draw :git_http
end
scope '/from_secondary/:geo_node_id' do
draw :git_http
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class CreateProjectNamespaceTraversalPathsTable < ClickHouse::Migration
def up
execute <<~SQL
CREATE TABLE IF NOT EXISTS project_namespace_traversal_paths (
id Int64 DEFAULT 0,
traversal_path String DEFAULT '0/',
version DateTime64(6, 'UTC') DEFAULT NOW(),
deleted Boolean DEFAULT false
)
ENGINE=ReplacingMergeTree(version, deleted)
PRIMARY KEY id
SETTINGS index_granularity = 512;
SQL
end
def down
execute <<~SQL
DROP TABLE IF EXISTS project_namespace_traversal_paths
SQL
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class CreateProjectNamespaceTraversalPathsMv < ClickHouse::Migration
def up
execute <<~SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS project_namespace_traversal_paths_mv
TO project_namespace_traversal_paths
AS
WITH cte AS (
SELECT id, project_namespace_id FROM siphon_projects
), namespaces_cte AS (
SELECT traversal_path, id, version, deleted
FROM namespace_traversal_paths
WHERE id IN (SELECT project_namespace_id FROM cte)
)
SELECT
cte.id,
namespaces_cte.traversal_path,
namespaces_cte.version,
namespaces_cte.deleted
FROM cte
INNER JOIN namespaces_cte ON namespaces_cte.id = cte.project_namespace_id
SQL
end
def down
execute <<~SQL
DROP VIEW IF EXISTS project_namespace_traversal_paths_mv
SQL
end
end

View File

@ -5,4 +5,4 @@ feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/168750
milestone: '17.6'
queued_migration_version: 20241009135743
finalized_by: # version of the migration that finalized this BBM
finalized_by: 20250316224142

View File

@ -8,14 +8,6 @@ description: Configuration parameters recorded for a Machine Learning model cand
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95168
milestone: '15.4'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: candidate_id
table: ml_candidates
sharding_key: project_id
belongs_to: candidate
desired_sharding_key_migration_job_name: BackfillMlCandidateParamsProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddMlCandidateParamsProjectIdNotNull < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
def up
add_not_null_constraint :ml_candidate_params, :project_id
end
def down
remove_not_null_constraint :ml_candidate_params, :project_id
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddPackagesPackageFilesProjectIdNotNull < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
def up
add_not_null_constraint :packages_package_files, :project_id, validate: false
end
def down
remove_not_null_constraint :packages_package_files, :project_id
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class PreparePackagesPackageFilesProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.11'
CONSTRAINT_NAME = :check_43773f06dc
def up
prepare_async_check_constraint_validation :packages_package_files, name: CONSTRAINT_NAME
end
def down
unprepare_async_check_constraint_validation :packages_package_files, name: CONSTRAINT_NAME
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class FinalizeDeleteOrphanedStageRecords < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = 'DeleteOrphanedStageRecords'
def up
force_finish if Gitlab.com_except_jh?
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,
table_name: :p_ci_stages,
column_name: :pipeline_id,
job_arguments: [],
finalize: true
)
end
def down
# no-op
end
private
def force_finish
Gitlab::Database::BackgroundMigration::BatchedMigration.reset_column_information
migration = Gitlab::Database::BackgroundMigration::BatchedMigration.find_for_configuration(
gitlab_schema_from_context,
MIGRATION, :p_ci_stages, :pipeline_id, [],
include_compatible: true
)
return unless migration
migration.update_columns(
status: Gitlab::Database::BackgroundMigration::BatchedMigration.state_machines[:status].states[:finished].value
)
end
end

View File

@ -0,0 +1 @@
bde6f050ae8ab4b330dc3b7303d2256c16236361a0d066bc9931b5eea0974c9b

View File

@ -0,0 +1 @@
f902cb396c3f7f820896ebcc5b852d995f488e8c17defe203cfe9553467a8974

View File

@ -0,0 +1 @@
733fa5689f2d707f98d070ccb34129b82ef9ab01d498506ec1e9a3a80c06470d

View File

@ -0,0 +1 @@
8a7d1a59655373edbd0c1f05d71c79bf8def3c819647affdbc3c6bcaf3a8dffd

View File

@ -17018,7 +17018,8 @@ CREATE TABLE ml_candidate_params (
project_id bigint,
CONSTRAINT check_093034d049 CHECK ((char_length(name) <= 250)),
CONSTRAINT check_28a3c29e43 CHECK ((char_length(value) <= 250)),
CONSTRAINT check_7a0505ca91 CHECK ((candidate_id IS NOT NULL))
CONSTRAINT check_7a0505ca91 CHECK ((candidate_id IS NOT NULL)),
CONSTRAINT check_b42534522f CHECK ((project_id IS NOT NULL))
);
CREATE SEQUENCE ml_candidate_params_id_seq
@ -27909,6 +27910,9 @@ ALTER TABLE security_scans
ALTER TABLE vulnerability_scanners
ADD CONSTRAINT check_37608c9db5 CHECK ((char_length(vendor) <= 255)) NOT VALID;
ALTER TABLE packages_package_files
ADD CONSTRAINT check_43773f06dc CHECK ((project_id IS NOT NULL)) NOT VALID;
ALTER TABLE ONLY instance_type_ci_runners
ADD CONSTRAINT check_5c34a3c1db UNIQUE (id);

View File

@ -26,8 +26,12 @@ If you plan to allow user activity on your secondary sites during the upgrade,
do not pause replication for a [zero-downtime upgrade](../../../update/zero_downtime.md). While paused, the secondary site gets more and more out-of-date.
One known effect is that more and more Git fetches get redirected or proxied to the primary site. There may be additional unknown effects.
For example, pausing a secondary site with a separate URL may break sign-in at the secondary site's URL. You land on the primary site's root URL, without a new session on the secondary site's URL.
## Pause and resume
Pausing and resuming replication is done through a command-line tool from a specific node in the secondary site. Depending on your database architecture,
this will target either the `postgresql` or `patroni`service:
this targets either the `postgresql` or `patroni` service:
- If you are using a single node for all services on your secondary site, you must run the commands on this single node.
- If you have a standalone PostgreSQL node on your secondary site, you must run the commands on this standalone PostgreSQL node.

View File

@ -32826,6 +32826,27 @@ four standard [pagination arguments](#pagination-arguments):
| <a id="organizationprojectswithissuesenabled"></a>`withIssuesEnabled` | [`Boolean`](#boolean) | Return only projects with issues enabled. |
| <a id="organizationprojectswithmergerequestsenabled"></a>`withMergeRequestsEnabled` | [`Boolean`](#boolean) | Return only projects with merge requests enabled. |
##### `Organization.workspacesClusterAgents`
Cluster agents in the organization with workspaces capabilities.
{{< details >}}
**Introduced** in GitLab 17.10.
**Status**: Experiment.
{{< /details >}}
Returns [`ClusterAgentConnection`](#clusteragentconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#pagination-arguments):
`before: String`, `after: String`, `first: Int`, and `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="organizationworkspacesclusteragentsfilter"></a>`filter` | [`NamespaceClusterAgentFilter!`](#namespaceclusteragentfilter) | Filter the types of cluster agents to return. |
### `OrganizationStateCounts`
Represents the total number of organizations for the represented states.

View File

@ -1202,10 +1202,13 @@ end
### Code in `spec/`
When you're testing EE-only features, avoid adding examples to the
existing CE specs. Also do not change existing CE examples, since they
should remain working as-is when EE is running without a license.
existing CE specs. Instead, place EE specs in the `ee/spec` folder.
Instead place EE specs in the `ee/spec` folder.
By default, CE specs run with EE code loaded as they should remain
working as-is when EE is running without a license.
These specs also need to pass when EE code is removed. You can run
the tests without EE code by [simulating a CE instance](#simulate-a-ce-instance-with-a-licensed-gdk).
### Code in `spec/factories`

View File

@ -67,7 +67,7 @@ Note the following:
- `.pipeline-policy-post` at the very end of the pipeline, after the `.post` stage.
- Injecting jobs in any of the reserved stages is guaranteed to always work. Execution policy jobs can also be assigned to any standard (build, test, deploy) or user-declared stages. However, in this case, the jobs may be ignored depending on the project pipeline configuration.
- It is not possible to assign jobs to reserved stages outside of a pipeline execution policy.
- Regardless of the `needs` keyword, all jobs in a pipeline must wait until the `.pipeline-policy-pre` stage is complete before they start processing.
- Regardless of the `needs` keyword, jobs in a pipeline do not begin until the `.pipeline-policy-pre` stage completes. To run non-blocking jobs at the beginning of the pipeline, add a custom stage that runs before the `.pre` stage. For example: `stages: [custom-non-blocking-stage, .pre]`.
- Choose unique job names for pipeline execution policies. Some CI/CD configurations are based on job names, which can lead to unwanted results if a job name exists multiple times in the same pipeline. For example, the `needs` keyword makes one job dependent on another. If the are multiple jobs with the name `example`, a job that `needs` the `example` job name depend on only one of the `example` job instance at random.
- Pipeline execution policies remain in effect even if the project lacks a CI/CD configuration file.
- The order of the policies matters for the applied suffix.

View File

@ -88,3 +88,8 @@ Your code goes through a pre-scan security workflow when using GitLab Duo:
1. Your code is scanned for sensitive information using Gitleaks.
1. Any detected secrets are automatically removed from the request.
## GitLab Duo Self-Hosted
When you are using [GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md)
and the self-hosted AI gateway, you do not share any data with GitLab.

View File

@ -28,7 +28,14 @@ title: Code Suggestions
Use GitLab Duo Code Suggestions to write code more efficiently by using generative AI to suggest code while you're developing.
Before you start using Code Suggestions, decide if you want to use the default GitLab-hosted LLM to manage Code Suggestions requests, or [deploy a self-hosted model](../../../../administration/gitlab_duo_self_hosted/_index.md). Self-hosted models maximize security and privacy by making sure nothing is sent to an external model.
Before you start using Code Suggestions, decide which of the following methods
you want to use to manage Code Suggestions requests:
- On GitLab.com or GitLab Self-Managed, the default GitLab AI vendor models and
cloud-based AI gateway that is hosted by GitLab.
- On GitLab Self-Managed, in GitLab 17.9 and later, [GitLab Duo Self-Hosted with a supported self-hosted model](../../../../administration/gitlab_duo_self_hosted/_index.md).
Self-hosted models maximize security and privacy by making sure nothing is
sent to an external model.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
[View a click-through demo](https://gitlab.navattic.com/code-suggestions).

View File

@ -62,6 +62,19 @@ module API
end
delete ':id/registry/repositories/:repository_id', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_admin_container_image!
if Feature.enabled?(:container_registry_protected_containers_delete, user_project&.root_ancestor) &&
!current_user.can_admin_all_resources?
service_response = ContainerRegistry::Protection::CheckRuleExistenceService.for_delete(
current_user: current_user,
project: repository.project,
params: { repository_path: repository.path.to_s }
).execute
forbidden!('Deleting protected container repository forbidden.') if service_response[:protection_rule_exists?]
end
repository.delete_scheduled!
track_package_event('delete_repository', :container, project: user_project, namespace: user_project.namespace)

View File

@ -153,11 +153,10 @@ class GroupSeeder
epic_params = {
title: FFaker::Lorem.sentence(6),
description: FFaker::Lorem.paragraphs(3).join("\n\n"),
author: author,
group: group
author: author
}
::Epics::CreateService.new(group: group, current_user: author, params: epic_params).execute
::WorkItems::LegacyEpics::CreateService.new(group: group, current_user: author, params: epic_params).execute
end
end
end

View File

@ -20031,10 +20031,10 @@ msgstr ""
msgid "Dependencies|Software Bill of Materials (SBOM) based on the latest successful scan of each project."
msgstr ""
msgid "Dependencies|The dependency list was succesfully exported."
msgid "Dependencies|The dependency list was successfully exported for %{exportable}."
msgstr ""
msgid "Dependencies|The dependency list was successfully exported for %{exportable}."
msgid "Dependencies|The dependency list was successfully exported."
msgstr ""
msgid "Dependencies|The location includes the lock file. For transitive dependencies a list of its direct dependents is shown."
@ -64786,6 +64786,24 @@ msgstr ""
msgid "Vulnerabilities over time"
msgstr ""
msgid "Vulnerabilities|%{link_start}Download the export%{link_end}."
msgstr ""
msgid "Vulnerabilities|Follow the link below to download the export."
msgstr ""
msgid "Vulnerabilities|The vulnerabilities list was successfully exported for %{exportable}."
msgstr ""
msgid "Vulnerabilities|The vulnerabilities list was successfully exported."
msgstr ""
msgid "Vulnerabilities|This link will expire in %{number} days."
msgstr ""
msgid "Vulnerabilities|Vulnerability report export"
msgstr ""
msgid "Vulnerability"
msgstr ""
@ -66961,9 +66979,6 @@ msgstr ""
msgid "WorkItem|Some fields are not present in %{workItemType}. If you change type now, this information will be lost."
msgstr ""
msgid "WorkItem|Some fields could not be loaded. Refresh the page to try again."
msgstr ""
msgid "WorkItem|Some values are not present in %{groupName} and will be removed."
msgstr ""

View File

@ -212,7 +212,6 @@ spec/frontend/super_sidebar/components/sidebar_portal_spec.js
spec/frontend/super_sidebar/components/user_menu_spec.js
spec/frontend/todos/components/filtered_search_tokens/group_token_spec.js
spec/frontend/todos/components/filtered_search_tokens/project_token_spec.js
spec/frontend/tooltips/components/tooltips_spec.js
spec/frontend/tooltips/index_spec.js
spec/frontend/vue_alerts_spec.js
spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js

View File

@ -500,6 +500,17 @@ tests = [
]
# rubocop:enable Layout/LineLength
},
{
explanation: 'Map Remote Development GraphQL organization/cluster_agents_resolver.rb to request specs',
changed_file: 'ee/app/graphql/resolvers/remote_development/organization/cluster_agents_resolver.rb',
# rubocop:disable Layout/LineLength -- fix CI failures - not sure why other lines in this file don't get errors
expected: %w[
ee/spec/requests/api/graphql/remote_development/organization/workspaces_cluster_agents/with_available_filter_arg_spec.rb
ee/spec/requests/api/graphql/remote_development/organization/workspaces_cluster_agents/with_directly_mapped_filter_arg_spec.rb
ee/spec/requests/api/graphql/remote_development/organization/workspaces_cluster_agents/with_unmapped_filter_arg_spec.rb
]
# rubocop:enable Layout/LineLength
},
{
explanation: 'Map Remote Development GraphQL query root workspaces_admin_resolver.rb to request specs',
changed_file: 'ee/app/graphql/resolvers/remote_development/workspaces_admin_resolver.rb',

View File

@ -62,8 +62,15 @@ RSpec.describe RapidDiffs::AppComponent, type: :component, feature_category: :co
expect(result).to have_text('custom_list')
end
def render_component(&block)
render_inline(described_class.new(
it 'preloads' do
instance = create_instance
render_inline(instance)
expect(instance.helpers.page_startup_api_calls).to include(metadata_endpoint)
expect(vc_test_controller.view_context.content_for?(:startup_js)).not_to be_nil
end
def create_instance
described_class.new(
diffs_slice:,
stream_url:,
reload_stream_url:,
@ -71,6 +78,10 @@ RSpec.describe RapidDiffs::AppComponent, type: :component, feature_category: :co
diff_view:,
update_user_endpoint:,
metadata_endpoint:
), &block)
)
end
def render_component(&block)
render_inline(create_instance, &block)
end
end

View File

@ -24,6 +24,7 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :so
before do
stub_feature_flags(vscode_web_ide: false)
stub_feature_flags(blob_overflow_menu: false)
sign_in(user)
end

View File

@ -11,7 +11,6 @@ import { Blob, mockEnvironmentName, mockEnvironmentPath } from './mock_data';
describe('Blob Header Default Actions', () => {
let wrapper;
let btnGroup;
let buttons;
const blobHash = 'foo-bar';
@ -20,6 +19,9 @@ describe('Blob Header Default Actions', () => {
wrapper = shallowMountExtended(BlobHeaderActions, {
provide: {
blobHash,
glFeatures: {
blobOverflowMenu: false,
},
...provided,
},
propsData: {
@ -31,17 +33,17 @@ describe('Blob Header Default Actions', () => {
beforeEach(() => {
createComponent();
btnGroup = wrapper.findComponent(GlButtonGroup);
buttons = wrapper.findAllComponents(GlButton);
});
describe('renders', () => {
const findButtonGroup = () => wrapper.findComponent(GlButtonGroup);
const findCopyButton = () => wrapper.findByTestId('copy-contents-button');
const findViewRawButton = () => wrapper.findByTestId('viewRawButton');
const findDownloadButton = () => wrapper.findByTestId('download-button');
it('gl-button-group component', () => {
expect(btnGroup.exists()).toBe(true);
expect(findButtonGroup().exists()).toBe(true);
});
it('exactly 3 buttons with predefined actions', () => {
@ -133,4 +135,14 @@ describe('Blob Header Default Actions', () => {
expect(findEnvironmentButton().props('icon')).toBe('external-link');
});
});
describe('when blob_overflow_menu is enabled', () => {
it('hides default actions for mobile layout', () => {
createComponent({}, { glFeatures: { blobOverflowMenu: true } });
expect(wrapper.findComponent(GlButtonGroup).attributes('class')).toBe(
'gl-hidden sm:gl-inline-flex',
);
});
});
});

View File

@ -73,64 +73,108 @@ describe('Blob Header Default Actions', () => {
}
describe('rendering', () => {
beforeEach(() => {
createComponent();
});
describe('WebIdeLink component', () => {
it('renders the WebIdeLink component with the correct props', async () => {
const { ideEditPath, editBlobPath, gitpodBlobUrl, pipelineEditorPath } = Blob;
const showForkSuggestion = false;
const showWebIdeForkSuggestion = false;
await createComponent({ propsData: { showForkSuggestion, showWebIdeForkSuggestion } });
it('does not render WebIdeLink component', () => {
expect(findWebIdeLink().exists()).toBe(false);
});
expect(findWebIdeLink().props()).toMatchObject({
showEditButton: true,
buttonVariant: 'confirm',
editUrl: editBlobPath,
webIdeUrl: ideEditPath,
needsToFork: showForkSuggestion,
needsToForkWithWebIde: showWebIdeForkSuggestion,
showPipelineEditorButton: Boolean(pipelineEditorPath),
pipelineEditorUrl: pipelineEditorPath,
gitpodUrl: gitpodBlobUrl,
showGitpodButton: applicationInfoMock.gitpodEnabled,
gitpodEnabled: userInfoMock.currentUser.gitpodEnabled,
describe('when blob_overflow_menu feature flag is false', () => {
it('renders the WebIdeLink component with the correct props', async () => {
const { ideEditPath, editBlobPath, gitpodBlobUrl, pipelineEditorPath } = Blob;
const showForkSuggestion = false;
const showWebIdeForkSuggestion = false;
await createComponent({
options: {
provide: {
glFeatures: { blobOverflowMenu: false },
},
},
propsData: { showForkSuggestion, showWebIdeForkSuggestion },
});
expect(findWebIdeLink().props()).toMatchObject({
showEditButton: true,
buttonVariant: 'confirm',
editUrl: editBlobPath,
webIdeUrl: ideEditPath,
needsToFork: showForkSuggestion,
needsToForkWithWebIde: showWebIdeForkSuggestion,
showPipelineEditorButton: Boolean(pipelineEditorPath),
pipelineEditorUrl: pipelineEditorPath,
gitpodUrl: gitpodBlobUrl,
showGitpodButton: applicationInfoMock.gitpodEnabled,
gitpodEnabled: userInfoMock.currentUser.gitpodEnabled,
});
});
it('passes the edit button variant down to the WebIdeLink', () => {
const editButtonVariant = 'danger';
createComponent({
options: {
provide: {
glFeatures: { blobOverflowMenu: false },
},
},
propsData: { editButtonVariant },
});
expect(findWebIdeLink().props('buttonVariant')).toBe(editButtonVariant);
});
it.each([[{ archived: true }], [{ editBlobPath: null }]])(
'does not render the WebIdeLink component when blob is archived or does not have an edit path',
(blobProps) => {
createComponent({
blobProps,
options: {
provide: {
glFeatures: { blobOverflowMenu: false },
},
},
});
expect(findWebIdeLink().exists()).toBe(false);
},
);
});
it('passes the edit button variant down to the WebIdeLink', () => {
const editButtonVariant = 'danger';
createComponent({ propsData: { editButtonVariant } });
expect(findWebIdeLink().props('buttonVariant')).toBe(editButtonVariant);
});
it.each([[{ archived: true }], [{ editBlobPath: null }]])(
'does not render the WebIdeLink component when blob is archived or does not have an edit path',
(blobProps) => {
createComponent({ blobProps });
expect(findWebIdeLink().exists()).toBe(false);
},
);
});
describe('default render', () => {
it.each`
findComponent | componentName
${findTableContents} | ${'TableContents'}
${findViewSwitcher} | ${'ViewSwitcher'}
${findDefaultActions} | ${'DefaultActions'}
${findBlobFilePath} | ${'BlobFilePath'}
findComponent | componentName
${findTableContents} | ${'TableContents'}
${findViewSwitcher} | ${'ViewSwitcher'}
${findBlobFilePath} | ${'BlobFilePath'}
`('renders $componentName component by default', ({ findComponent }) => {
createComponent();
expect(findComponent().exists()).toBe(true);
});
});
it('does not render DefaultActions when on blob page', () => {
createComponent({ propsData: { isBlobPage: true } });
describe('DefaultActions component', () => {
it('renders DefaultActions', () => {
expect(findDefaultActions().exists()).toBe(true);
});
expect(findDefaultActions().exists()).toBe(false);
it('passes information about render error down to default actions', () => {
createComponent({
propsData: {
hasRenderError: true,
},
});
expect(findDefaultActions().props('hasRenderError')).toBe(true);
});
it('passes the correct isBinary value to default actions when viewing a binary file', () => {
createComponent({ propsData: { isBinary: true } });
expect(findDefaultActions().props('isBinary')).toBe(true);
});
});
it.each([[{ showBlameToggle: true }], [{ showBlameToggle: false }]])(
@ -176,21 +220,6 @@ describe('Blob Header Default Actions', () => {
expect(wrapper.text()).toContain(slotContent);
});
it('passes information about render error down to default actions', () => {
createComponent({
propsData: {
hasRenderError: true,
},
});
expect(findDefaultActions().props('hasRenderError')).toBe(true);
});
it('passes the correct isBinary value to default actions when viewing a binary file', () => {
createComponent({ propsData: { isBinary: true } });
expect(findDefaultActions().props('isBinary')).toBe(true);
});
it('passes the `showBlobSize` prop to `blobFilepath`', () => {
const showBlobSize = false;
createComponent({ propsData: { showBlobSize } });

View File

@ -78,3 +78,27 @@ run_with_missing_git_url:
git:
dir: reverse
rev: v1
run_with_missing_oci_registry:
run:
- name: my_step
step:
oci:
repository: my-project/image
tag: 5.0.0
run_with_missing_oci_repository:
run:
- name: my_step
step:
oci:
registry: registry.gitlab.com
tag: 5.0.0
run_with_missing_oci_tag:
run:
- name: my_step
step:
oci:
registry: registry.gitlab.com
repository: my-project/image

View File

@ -80,6 +80,21 @@ step_using_git_reference:
env:
env1: value2
step_using_oci_reference:
run:
- name: oci_reference_step
step:
oci:
registry: gitlab.com/components/script
repository: bash
tag: 1.0.4
dir: /path/to/step
file: my_step.yml
inputs:
param1: value1
env:
env1: value2
step_using_action:
run:
- name: github_action_step

View File

@ -105,6 +105,21 @@ describe('Diffs list store', () => {
});
});
it('uses preload request', async () => {
const body = {};
const signal = {};
const streamRequest = Promise.resolve({ body });
window.gl.rapidDiffsPreload = { controller: { signal }, streamRequest };
const url = '/stream';
store.streamRemainingDiffs(url);
await waitForPromises();
expect(global.fetch).not.toHaveBeenCalled();
expect(renderHtmlStreams).toHaveBeenCalledWith([body], findStreamContainer(), {
signal,
});
window.gl.rapidDiffsPreload = undefined;
});
it('measures performance', async () => {
await store.streamRemainingDiffs('/stream');
await waitForPromises();

View File

@ -2,89 +2,105 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BlobControls from '~/repository/components/header_area/blob_controls.vue';
import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { useMockInternalEventsTracking } from 'helpers/tracking_internal_events_helper';
import createRouter from '~/repository/router';
import { updateElementsVisibility } from '~/repository/utils/dom';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import WebIdeLink from 'ee_else_ce/vue_shared/components/web_ide_link.vue';
import { resetShortcutsForTests } from '~/behaviors/shortcuts';
import ShortcutsBlob from '~/behaviors/shortcuts/shortcuts_blob';
import Shortcuts from '~/behaviors/shortcuts/shortcuts';
import BlobLinePermalinkUpdater from '~/blob/blob_line_permalink_updater';
import BlobControls from '~/repository/components/header_area/blob_controls.vue';
import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql';
import userGitpodInfo from '~/repository/queries/user_gitpod_info.query.graphql';
import createRouter from '~/repository/router';
import { updateElementsVisibility } from '~/repository/utils/dom';
import OverflowMenu from '~/repository/components/header_area/blob_overflow_menu.vue';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import OpenMrBadge from '~/repository/components/header_area/open_mr_badge.vue';
import { blobControlsDataMock, refMock } from '../../mock_data';
import { blobControlsDataMock, refMock, currentUserDataMock } from '../../mock_data';
Vue.use(VueApollo);
jest.mock('~/repository/utils/dom');
jest.mock('~/behaviors/shortcuts/shortcuts_blob');
jest.mock('~/blob/blob_line_permalink_updater');
let router;
let wrapper;
let mockResolver;
describe('Blob controls component', () => {
let router;
let wrapper;
let fakeApollo;
const createComponent = async ({
props = {},
blobInfoOverrides = {},
glFeatures = { blobOverflowMenu: false },
routerOverride = {},
} = {}) => {
Vue.use(VueApollo);
const createComponent = async ({
props = {},
blobInfoOverrides = {},
glFeatures = { blobOverflowMenu: false },
routerOverride = {},
} = {}) => {
Vue.use(VueApollo);
const projectPath = 'some/project';
router = createRouter(projectPath, refMock);
const projectPath = 'some/project';
router = createRouter(projectPath, refMock);
await router.push({
name: 'blobPathDecoded',
params: { path: '/some/file.js' },
...routerOverride,
});
await router.push({
name: 'blobPathDecoded',
params: { path: '/some/file.js' },
...routerOverride,
});
mockResolver = jest.fn().mockResolvedValue({
data: {
project: {
__typename: 'Project',
id: '1234',
repository: {
__typename: 'Repository',
empty: blobControlsDataMock.repository.empty,
blobs: {
__typename: 'RepositoryBlobConnection',
nodes: [{ ...blobControlsDataMock.repository.blobs.nodes[0], ...blobInfoOverrides }],
const blobControlsMockResolver = jest.fn().mockResolvedValue({
data: {
project: {
...blobControlsDataMock,
repository: {
...blobControlsDataMock.repository,
blobs: {
...blobControlsDataMock.repository.blobs,
nodes: [{ ...blobControlsDataMock.repository.blobs.nodes[0], ...blobInfoOverrides }],
},
},
},
},
},
});
});
await resetShortcutsForTests();
const currentUserMockResolver = jest
.fn()
.mockResolvedValue({ data: { currentUser: currentUserDataMock } });
wrapper = shallowMountExtended(BlobControls, {
router,
apolloProvider: createMockApollo([[blobControlsQuery, mockResolver]]),
provide: {
glFeatures,
currentRef: refMock,
},
propsData: {
projectPath,
isBinary: false,
refType: 'heads',
...props,
},
mixins: [{ data: () => ({ ref: refMock }) }, glFeatureFlagMixin()],
});
await resetShortcutsForTests();
await waitForPromises();
};
fakeApollo = createMockApollo([
[blobControlsQuery, blobControlsMockResolver],
[userGitpodInfo, currentUserMockResolver],
]);
wrapper = shallowMountExtended(BlobControls, {
router,
apolloProvider: fakeApollo,
provide: {
glFeatures,
currentRef: refMock,
gitpodEnabled: true,
},
propsData: {
projectPath,
projectIdAsNumber: 1,
isBinary: false,
refType: 'heads',
...props,
},
mixins: [{ data: () => ({ ref: refMock }) }, glFeatureFlagMixin()],
stubs: {
WebIdeLink: false,
},
});
await waitForPromises();
};
describe('Blob controls component', () => {
const findOpenMrBadge = () => wrapper.findComponent(OpenMrBadge);
const findFindButton = () => wrapper.findByTestId('find');
const findBlameButton = () => wrapper.findByTestId('blame');
const findPermalinkButton = () => wrapper.findByTestId('permalink');
const findWebIdeLink = () => wrapper.findComponent(WebIdeLink);
const findOverflowMenu = () => wrapper.findComponent(OverflowMenu);
const { bindInternalEventDocument } = useMockInternalEventsTracking();
@ -92,18 +108,8 @@ describe('Blob controls component', () => {
await createComponent();
});
describe('MR badge', () => {
it('should render the baadge if `filter_blob_path` flag is on', async () => {
await createComponent({ glFeatures: { filterBlobPath: true } });
expect(findOpenMrBadge().exists()).toBe(true);
expect(findOpenMrBadge().props('blobPath')).toBe('/some/file.js');
expect(findOpenMrBadge().props('projectPath')).toBe('some/project');
});
it('should not render the baadge if `filter_blob_path` flag is off', async () => {
await createComponent({ glFeatures: { filterBlobPath: false } });
expect(findOpenMrBadge().exists()).toBe(false);
});
afterEach(() => {
fakeApollo = null;
});
describe('showBlobControls', () => {
@ -122,6 +128,46 @@ describe('Blob controls component', () => {
});
});
it.each`
name | path
${'blobPathDecoded'} | ${null}
${'treePathDecoded'} | ${'myFile.js'}
`(
'does not render any buttons if router name is $name and router path is $path',
async ({ name, path }) => {
await router.replace({ name, params: { path } });
await nextTick();
expect(findFindButton().exists()).toBe(false);
expect(findBlameButton().exists()).toBe(false);
expect(findPermalinkButton().exists()).toBe(false);
expect(updateElementsVisibility).toHaveBeenCalledWith('.tree-controls', true);
},
);
it('loads the ShortcutsBlob', () => {
expect(ShortcutsBlob).toHaveBeenCalled();
});
it('loads the BlobLinePermalinkUpdater', () => {
expect(BlobLinePermalinkUpdater).toHaveBeenCalled();
});
describe('MR badge', () => {
it('should render the badge if `filter_blob_path` flag is on', async () => {
await createComponent({ glFeatures: { filterBlobPath: true } });
expect(findOpenMrBadge().exists()).toBe(true);
expect(findOpenMrBadge().props('blobPath')).toBe('/some/file.js');
expect(findOpenMrBadge().props('projectPath')).toBe('some/project');
});
it('should not render the badge if `filter_blob_path` flag is off', async () => {
await createComponent({ glFeatures: { filterBlobPath: false } });
expect(findOpenMrBadge().exists()).toBe(false);
});
});
describe('FindFile button', () => {
it('renders FindFile button', () => {
expect(findFindButton().exists()).toBe(true);
@ -175,72 +221,92 @@ describe('Blob controls component', () => {
expect(findPermalinkButton().attributes('href')).toBe('permalink/file.js');
});
it.each`
name | path
${'blobPathDecoded'} | ${null}
${'treePathDecoded'} | ${'myFile.js'}
`(
'does not render any buttons if router name is $name and router path is $path',
async ({ name, path }) => {
await router.replace({ name, params: { path } });
await nextTick();
expect(findFindButton().exists()).toBe(false);
expect(findBlameButton().exists()).toBe(false);
expect(findPermalinkButton().exists()).toBe(false);
expect(updateElementsVisibility).toHaveBeenCalledWith('.tree-controls', true);
},
);
it('loads the ShortcutsBlob', () => {
expect(ShortcutsBlob).toHaveBeenCalled();
it('does not render WebIdeLink component', () => {
expect(findWebIdeLink().exists()).toBe(false);
});
it('loads the BlobLinePermalinkUpdater', () => {
expect(BlobLinePermalinkUpdater).toHaveBeenCalled();
});
describe('BlobOverflow dropdown', () => {
describe('when blobOverflowMenu feature flag is true', () => {
beforeEach(async () => {
await createComponent({ glFeatures: { blobOverflowMenu: true } });
});
it('renders BlobOverflow component with correct props', () => {
expect(findOverflowMenu().exists()).toBe(true);
expect(findOverflowMenu().props()).toEqual({
projectPath: 'some/project',
isBinary: true,
overrideCopy: true,
isEmptyRepository: false,
isUsingLfs: false,
describe('WebIdeLink component', () => {
it('renders the WebIdeLink component with the correct props', () => {
expect(findWebIdeLink().props()).toMatchObject({
showEditButton: false,
editUrl: 'edit/blob/path/file.js',
webIdeUrl: 'ide/blob/path/file.js',
needsToFork: false,
needsToForkWithWebIde: false,
showPipelineEditorButton: true,
pipelineEditorUrl: 'pipeline/editor/path/file.yml',
gitpodUrl: 'gitpod/blob/url/file.js',
showGitpodButton: true,
gitpodEnabled: true,
});
});
});
it('passes the correct isBinary value to BlobOverflow when viewing a binary file', async () => {
await createComponent({
props: {
isBinary: true,
},
blobInfoOverrides: {
simpleViewer: {
...blobControlsDataMock.repository.blobs.nodes[0].simpleViewer,
fileType: 'podfile',
it('does not render WebIdeLink component if file is archived', async () => {
await createComponent({
blobInfoOverrides: {
...blobControlsDataMock.repository.blobs.nodes[0],
archived: true,
},
},
glFeatures: {
blobOverflowMenu: true,
},
glFeatures: { blobOverflowMenu: true },
});
expect(findWebIdeLink().exists()).toBe(false);
});
expect(findOverflowMenu().props('isBinary')).toBe(true);
it('does not render WebIdeLink component if file is not editable', async () => {
await createComponent({
blobInfoOverrides: {
...blobControlsDataMock.repository.blobs.nodes[0],
editBlobPath: '',
},
glFeatures: { blobOverflowMenu: true },
});
expect(findWebIdeLink().exists()).toBe(false);
});
});
it('copies to clipboard raw blob text, when receives copy event', () => {
jest.spyOn(navigator.clipboard, 'writeText');
findOverflowMenu().vm.$emit('copy');
describe('BlobOverflow dropdown', () => {
it('renders BlobOverflow component with correct props', () => {
expect(findOverflowMenu().exists()).toBe(true);
expect(findOverflowMenu().props()).toEqual({
projectPath: 'some/project',
isBinaryFileType: true,
overrideCopy: true,
isEmptyRepository: false,
isUsingLfs: false,
userPermissions: {
__typename: 'ProjectPermissions',
createMergeRequestIn: true,
downloadCode: true,
forkProject: true,
pushCode: true,
},
});
});
expect(navigator.clipboard.writeText).toHaveBeenCalledWith('Example raw text content');
it('passes the correct isBinaryFileType value to BlobOverflow when viewing a binary file', async () => {
await createComponent({
props: {
isBinary: true,
},
glFeatures: {
blobOverflowMenu: true,
},
});
expect(findOverflowMenu().props('isBinaryFileType')).toBe(true);
});
it('copies to clipboard raw blob text, when receives copy event', () => {
jest.spyOn(navigator.clipboard, 'writeText');
findOverflowMenu().vm.$emit('copy');
expect(navigator.clipboard.writeText).toHaveBeenCalledWith('Example raw text content');
});
});
});
});

View File

@ -17,7 +17,7 @@ describe('Blob Default Actions Group', () => {
blobHash: mockBlobHash,
activeViewerType: 'simple',
hasRenderError: false,
isBinary: false,
isBinaryFileType: false,
isEmpty: false,
canDownloadCode: true,
overrideCopy: false,
@ -78,8 +78,8 @@ describe('Blob Default Actions Group', () => {
});
});
it('does not render the Copy and view Raw button if isBinary is set to true', () => {
createComponent({ isBinary: true });
it('does not render the Copy and view Raw button if isBinaryFileType is set to true', () => {
createComponent({ isBinaryFileType: true });
expect(findCopyFileContentItem()).toBeUndefined();
expect(findViewRawItem()).toBeUndefined();

View File

@ -52,6 +52,7 @@ describe('Blob Overflow Menu', () => {
},
propsData: {
projectPath,
userPermissions: blobControlsDataMock.userPermissions,
...propsData,
},
stub: {

View File

@ -91,6 +91,7 @@ export const encodedRefWithSpecialCharMock = 'feat/selected-%23-ref-%23';
export const blobControlsDataMock = {
__typename: 'Project',
id: '1234',
userPermissions: userPermissionsMock,
repository: {
__typename: 'Repository',
empty: false,
@ -117,6 +118,10 @@ export const blobControlsDataMock = {
canModifyBlob: true,
canModifyBlobWithWebIde: true,
forkAndViewPath: 'fork/view/path',
editBlobPath: 'edit/blob/path/file.js',
ideEditPath: 'ide/blob/path/file.js',
pipelineEditorPath: 'pipeline/editor/path/file.yml',
gitpodBlobUrl: 'gitpod/blob/url/file.js',
simpleViewer: {
__typename: 'BlobViewer',
collapsed: false,
@ -280,3 +285,11 @@ export const headerAppInjected = {
};
export const FILE_SIZE_3MB = 3000000;
export const currentUserDataMock = {
__typename: 'User',
id: '1234',
gitpodEnabled: true,
preferencesGitpodPath: 'preferences/gitpod/path',
profileEnableGitpodPath: 'profile/enable/gitpod/path',
};

View File

@ -9,7 +9,9 @@ describe('tooltips/components/tooltips.vue', () => {
let wrapper;
const buildWrapper = () => {
wrapper = shallowMount(Tooltips);
wrapper = shallowMount(Tooltips, {
stubs: { GlTooltip },
});
};
const createTooltipTarget = (attributes = {}) => {

View File

@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Organization'], feature_category: :cell do
let(:expected_fields) do
let_it_be(:expected_fields) do
%w[avatar_url description description_html groups id name organization_users path projects web_url]
end
specify { expect(described_class.graphql_name).to eq('Organization') }
specify { expect(described_class).to require_graphql_authorizations(:read_organization) }
specify { expect(described_class).to have_graphql_fields(*expected_fields) }
specify { expect(described_class).to include_graphql_fields(*expected_fields) }
end

View File

@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::SecureFiles::Cer do
describe '#certificate_data' do
it 'assigns the error message and returns nil' do
expect(invalid_certificate.certificate_data).to be nil
expect(invalid_certificate.certificate_data).to be_nil
expect(invalid_certificate.error).to eq('PEM_read_bio_X509: no start line (Expecting: CERTIFICATE)')
end
end

View File

@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::SecureFiles::MobileProvision do
describe '#decoded_plist' do
it 'assigns the error message and returns nil' do
expect(invalid_profile.decoded_plist).to be nil
expect(invalid_profile.decoded_plist).to be_nil
expect(invalid_profile.error).to eq('Could not parse the PKCS7: no start line (Expecting: PKCS7)')
end
end
@ -57,7 +57,7 @@ RSpec.describe Gitlab::Ci::SecureFiles::MobileProvision do
it 'returns nil if the property list fails to be parsed from the decoded plist' do
allow(subject).to receive(:decoded_plist).and_return('foo/bar')
expect(subject.properties).to be nil
expect(subject.properties).to be_nil
expect(subject.error).to start_with('invalid XML')
end
end

View File

@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::SecureFiles::P12 do
describe '#certificate_data' do
it 'assigns the error message and returns nil' do
expect(invalid_certificate.certificate_data).to be nil
expect(invalid_certificate.certificate_data).to be_nil
# OpenSSL v3+ reports `PKCS12_parse: parse error` while
# OpenSSL v1.1 reports `PKCS12_parse: mac verify failure`. Unfortunately, we
# can't tell what underlying library is used, so just look for an error.
@ -35,7 +35,7 @@ RSpec.describe Gitlab::Ci::SecureFiles::P12 do
describe '#certificate_data' do
it 'assigns the error message and returns nil' do
expect(subject.certificate_data).to be nil
expect(subject.certificate_data).to be_nil
expect(subject.error).to eq('PKCS12_parse: mac verify failure')
end
end

View File

@ -55,8 +55,8 @@ RSpec.describe Gitlab::ClassAttributes do
end
it "triggers after hooks after set class values" do
expect(klass.counter_1).to be(nil)
expect(klass.counter_2).to be(nil)
expect(klass.counter_1).to be_nil
expect(klass.counter_2).to be_nil
klass.set_attribute(:foo, :bar)
klass.set_attribute(:foo, :bar)

View File

@ -79,13 +79,13 @@ RSpec.describe Gitlab::Current::Organization, feature_category: :cell do
context 'and namespace is not found' do
let(:group_path) { 'not_found' }
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
end
context 'and namespace_id is empty string' do
let(:params) { super().merge(namespace_id: '') }
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
it 'does not execute query' do
expect { current_organization }.to match_query_count(0)
@ -103,7 +103,7 @@ RSpec.describe Gitlab::Current::Organization, feature_category: :cell do
context 'and namespace is not found' do
let(:group_path) { 'not_found' }
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
end
end
@ -120,12 +120,12 @@ RSpec.describe Gitlab::Current::Organization, feature_category: :cell do
context 'and namespace is not found' do
let(:group_path) { non_existing_record_id }
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
end
end
context 'and controller is not groups' do
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
end
end

View File

@ -865,7 +865,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: migration) }
it 'returns nil' do
expect(subject).to be nil
expect(subject).to be_nil
end
end

View File

@ -27,7 +27,7 @@ RSpec.describe Gitlab::Database::ConvertFeatureCategoryToGroupLabel, feature_cat
let(:feature_category) { 'non_existing_feature_category_test' }
it 'returns nil' do
expect(group_label).to be nil
expect(group_label).to be_nil
end
end
end

View File

@ -17,7 +17,7 @@ RSpec.describe Gitlab::Email::IncomingEmail, feature_category: :service_desk do
end
it 'does not match emails with extra bits' do
expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be nil
expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be_nil
end
context 'when a custom wildcard address is used' do

View File

@ -18,19 +18,19 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
describe '.reply_address' do
subject(:reply_address) { described_class.reply_address(nil, nil) }
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with reply key' do
subject(:reply_address) { described_class.reply_address(nil, reply_key) }
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with issue' do
let_it_be(:issue) { create(:issue, project: project) }
subject(:reply_address) { described_class.reply_address(issue, reply_key) }
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with service_desk_setting and custom email' do
let!(:service_desk_setting) { create(:service_desk_setting, custom_email: custom_email, project: project) }
@ -46,14 +46,14 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
subject(:reply_address) { described_class.key_from_reply_address(email) }
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with service_desk_setting' do
let_it_be_with_refind(:setting) do
create(:service_desk_setting, project: project, add_external_participants_from_cc: true)
end
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with custom email' do
let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
@ -71,7 +71,7 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
context 'without reply key' do
let(:email) { custom_email }
it { is_expected.to be nil }
it { is_expected.to be_nil }
end
end
@ -80,18 +80,18 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
let(:email) { nil }
it { is_expected.to be nil }
it { is_expected.to be_nil }
context 'with service desk incoming email' do
let(:email) { ::ServiceDesk::Emails.new(project).send(:incoming_address) }
it { is_expected.to be nil }
it { is_expected.to be_nil }
end
context 'with another unknown email' do
let(:email) { 'unknown@example.com' }
it { is_expected.to be nil }
it { is_expected.to be_nil }
end
context 'with custom email' do

View File

@ -15,7 +15,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors, feature_category: :g
describe '#user_actor' do
context 'when user is not available in ApplicationContext' do
it 'returns nil' do
expect(service.user_actor).to be(nil)
expect(service.user_actor).to be_nil
end
end
@ -35,7 +35,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors, feature_category: :g
end
it 'returns corresponding user record' do
expect(service.user_actor).to be(nil)
expect(service.user_actor).to be_nil
end
end
end

View File

@ -153,7 +153,7 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab
end
it 'responds correctly to no due date value' do
expect(milestone_hash2[:due_date]).to be nil
expect(milestone_hash2[:due_date]).to be_nil
end
it 'includes the created timestamp' do

View File

@ -14,7 +14,7 @@ RSpec.describe Gitlab::Identifier do
describe '#identify' do
context 'without an identifier' do
it 'returns nil' do
expect(identifier.identify('')).to be nil
expect(identifier.identify('')).to be_nil
end
end

View File

@ -47,7 +47,7 @@ RSpec.describe Gitlab::JiraImport::LabelsImporter, :clean_gitlab_redis_shared_st
end
it 'caches import label' do
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.import_label_cache_key(project.id))).to be nil
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.import_label_cache_key(project.id))).to be_nil
subject

View File

@ -120,7 +120,7 @@ RSpec.describe Gitlab::JiraImport do
describe '.get_issues_next_start_at', :clean_gitlab_redis_cache do
it 'returns zero when not defined' do
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to be nil
expect(Gitlab::Cache::Import::Caching.read("jira-import/paginator/#{project_id}/issues")).to be_nil
expect(described_class.get_issues_next_start_at(project_id)).to eq(0)
end

View File

@ -178,7 +178,7 @@ RSpec.describe Gitlab::MailRoom, feature_category: :build do
context 'non-existing mailbox_type' do
it 'returns nil' do
expect(described_class.worker_for('another_mailbox_type')).to be(nil)
expect(described_class.worker_for('another_mailbox_type')).to be_nil
end
end
end

View File

@ -225,7 +225,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store, featu
subscriber.request(event_2)
subscriber.request(event_3)
expect(Gitlab::SafeRequestStore[:external_http_detail_store]).to be(nil)
expect(Gitlab::SafeRequestStore[:external_http_detail_store]).to be_nil
end
end
end

View File

@ -46,13 +46,13 @@ RSpec.describe Gitlab::Middleware::RackMultipartTempfileFactory do
it 'immediately unlinks the temporary file' do
tempfile = Tempfile.new('foo')
expect(tempfile.path).not_to be(nil)
expect(tempfile.path).not_to be_nil
expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
expect(tempfile).to receive(:unlink).and_call_original
subject.call(env)
expect(tempfile.path).to be(nil)
expect(tempfile.path).to be_nil
end
it 'processes the request as normal' do

View File

@ -50,7 +50,7 @@ RSpec.describe Gitlab::ProjectTemplate, feature_category: :source_code_managemen
context 'when there is no match' do
let(:query) { 'no-match' }
it { is_expected.to be(nil) }
it { is_expected.to be_nil }
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
let_it_be_with_reload(:project) { create_default(:project, :repository) }
let_it_be(:repository) { project.repository }
subject { build(:ci_pipeline_schedule, project: project) }
subject(:schedule) { build(:ci_pipeline_schedule, project: project) }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:owner) }
@ -60,6 +60,13 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
expect(schedule.errors.full_messages).to contain_exactly('Inputs have duplicate values (test_input)')
end
it 'limits the number of inputs' do
schedule.inputs = build_list(:ci_pipeline_schedule_input, 21)
expect(schedule).not_to be_valid
expect(schedule.errors.full_messages).to contain_exactly('Inputs exceeds the limit of 20.')
end
context 'when an short ref record is being updated' do
let(:new_description) { 'some description' }
let(:ref) { 'other' }

View File

@ -334,6 +334,155 @@ RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_catego
end
end
describe '.for_action_exists?' do
let_it_be(:project1) { create(:project) }
let_it_be(:project_no_crpr) { create(:project) }
let_it_be(:protection_rule_for_developer) do
create(:container_registry_protection_rule,
project: project1,
repository_path_pattern: "#{project1.full_path}/stage*",
minimum_access_level_for_delete: :owner,
minimum_access_level_for_push: :maintainer
)
end
let_it_be(:protection_rule_for_maintainer) do
create(:container_registry_protection_rule,
project: project1,
repository_path_pattern: "#{project1.full_path}/prod*",
minimum_access_level_for_delete: :owner,
minimum_access_level_for_push: :owner
)
end
let_it_be(:protection_rule_for_owner) do
create(:container_registry_protection_rule,
project: project1,
repository_path_pattern: "#{project1.full_path}/release*",
minimum_access_level_for_delete: :admin,
minimum_access_level_for_push: :admin
)
end
# Creating an identical container protection rule for the same project
# to ensure that overlapping rules are considered properly
let_it_be(:protection_rule_overlapping_for_developer) do
create(:container_registry_protection_rule,
project: project1,
repository_path_pattern: "#{project1.full_path}/stage-sha*",
minimum_access_level_for_delete: :owner,
minimum_access_level_for_push: :maintainer
)
end
let_it_be(:protection_rule_only_deletion_protection) do
create(:container_registry_protection_rule,
repository_path_pattern: "#{project1.full_path}/only-delete-protected*",
project: project1,
minimum_access_level_for_delete: :admin,
minimum_access_level_for_push: nil
)
end
let_it_be(:protection_rule_only_push_protection) do
create(:container_registry_protection_rule,
repository_path_pattern: "#{project1.full_path}/only-push-protected*",
project: project1,
minimum_access_level_for_delete: nil,
minimum_access_level_for_push: :admin
)
end
subject(:for_action_exists_result) do
project
.container_registry_protection_rules
.for_action_exists?(
action: action,
access_level: access_level,
repository_path: repository_path
)
end
# rubocop:disable Layout/LineLength -- Avoid formatting to ensure one-line table syntax
where(:project, :action, :access_level, :repository_path, :protected?) do
ref(:project1) | :push | Gitlab::Access::REPORTER | lazy { "#{project.full_path}/stage-sha-1234" } | true
ref(:project1) | :push | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/stage-sha-1234" } | true
ref(:project1) | :push | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :delete | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/stage-sha-1234" } | true
ref(:project1) | :delete | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/stage-sha-1234" } | true
ref(:project1) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :delete | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/prod-sha-1234" } | true
ref(:project1) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/prod-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/prod-sha-1234" } | false
ref(:project1) | :delete | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/prod-sha-1234" } | true
ref(:project1) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/prod-sha-1234" } | false
ref(:project1) | :delete | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/prod-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/release-v1" } | true
ref(:project1) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/release-v1" } | true
ref(:project1) | :push | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/release-v1" } | false
ref(:project1) | :delete | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/release-v1" } | true
ref(:project1) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/release-v1" } | true
ref(:project1) | :delete | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/release-v1" } | false
ref(:project1) | :push | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/only-delete-protected" } | false
ref(:project1) | :push | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/only-delete-protected" } | false
ref(:project1) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/only-delete-protected" } | false
ref(:project1) | :push | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/only-delete-protected" } | false
ref(:project1) | :push | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/only-push-protected" } | true
ref(:project1) | :push | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/only-push-protected" } | true
ref(:project1) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/only-push-protected" } | true
ref(:project1) | :push | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/only-push-protected" } | false
ref(:project1) | :delete | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/only-delete-protected" } | true
ref(:project1) | :delete | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/only-delete-protected" } | true
ref(:project1) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/only-delete-protected" } | true
ref(:project1) | :delete | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/only-delete-protected" } | false
ref(:project1) | :delete | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/only-push-protected" } | false
ref(:project1) | :delete | Gitlab::Access::MAINTAINER | lazy { "#{project.full_path}/only-push-protected" } | false
ref(:project1) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/only-push-protected" } | false
ref(:project1) | :delete | Gitlab::Access::ADMIN | lazy { "#{project.full_path}/only-push-protected" } | false
# For non-matching containers
ref(:project1) | :push | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/any-suffix" } | false
ref(:project1) | :push | Gitlab::Access::NO_ACCESS | lazy { "#{project.full_path}/prod" } | true
ref(:project1) | :delete | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/any-suffix" } | false
ref(:project1) | :delete | Gitlab::Access::NO_ACCESS | lazy { "#{project.full_path}/prod" } | true
# Edge cases
ref(:project1) | :push | nil | lazy { "#{project.full_path}/stage-sha-1234" } | false
ref(:project1) | :push | Gitlab::Access::DEVELOPER | nil | false
ref(:project1) | :push | Gitlab::Access::DEVELOPER | '' | false
ref(:project1) | :push | nil | nil | false
# For projects that have no container protection rules
ref(:project_no_crpr) | :push | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/prod" } | false
ref(:project_no_crpr) | :push | Gitlab::Access::OWNER | lazy { "#{project.full_path}/prod" } | false
ref(:project_no_crpr) | :delete | Gitlab::Access::DEVELOPER | lazy { "#{project.full_path}/prod" } | false
ref(:project_no_crpr) | :delete | Gitlab::Access::OWNER | lazy { "#{project.full_path}/prod" } | false
end
# rubocop:enable Layout/LineLength
with_them do
it { is_expected.to eq protected? }
end
context 'for invalid action' do
let(:project) { project1 }
let(:action) { :invalid_action }
let(:access_level) { Gitlab::Access::DEVELOPER }
let(:repository_path) { "#{project.full_path}/stage-sha-1234" }
it 'raises an error' do
expect { for_action_exists_result }.to raise_error ArgumentError, 'action must be :push or :delete'
end
end
end
describe '.for_push_exists_for_projects_and_repository_paths' do
let_it_be(:project1) { create(:project) }
let_it_be(:project1_crpr) { create(:container_registry_protection_rule, project: project1) }

View File

@ -116,6 +116,14 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
let(:method) { :delete }
let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}" }
shared_examples 'destroying the container repository' do
it 'marks the repository as delete_scheduled' do
expect { subject }.to change { root_repository.reload.status }.from(nil).to('delete_scheduled')
expect(response).to have_gitlab_http_status(:accepted)
end
end
['using API user', 'using job token'].each do |context|
context context do
include_context context
@ -127,6 +135,8 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
context 'for maintainer' do
let(:api_user) { maintainer }
it_behaves_like 'destroying the container repository'
it 'marks the repository as delete_scheduled' do
expect { subject }.to change { root_repository.reload.status }.from(nil).to('delete_scheduled')
@ -137,6 +147,71 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
end
include_examples 'rejected job token scopes'
context 'with delete protection rule', :enable_admin_mode do
using RSpec::Parameterized::TableSyntax
include_context 'using API user'
let_it_be(:owner) { create(:user, owner_of: [project, project2]) }
let_it_be(:instance_admin) { create(:admin) }
let_it_be_with_reload(:container_registry_protection_rule) do
create(:container_registry_protection_rule, project: project)
end
let(:params) { { admin_mode: admin_mode } }
before do
container_registry_protection_rule.update!(
repository_path_pattern: root_repository.path,
minimum_access_level_for_delete: minimum_access_level_for_delete
)
end
shared_examples 'protected deletion of container repository' do
it 'returns the expected status' do
subject
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'returns error message' do
subject
expect(json_response).to include('message' => '403 Forbidden - Deleting protected container repository forbidden.')
end
context 'when feature flag :container_registry_protected_containers_delete is disabled' do
before do
stub_feature_flags(container_registry_protected_containers_delete: false)
end
it_behaves_like 'destroying the container repository'
end
end
where(:minimum_access_level_for_delete, :api_user, :admin_mode, :expected_shared_example) do
nil | ref(:maintainer) | false | 'destroying the container repository'
nil | ref(:owner) | false | 'destroying the container repository'
:maintainer | ref(:maintainer) | false | 'destroying the container repository'
:maintainer | ref(:owner) | false | 'destroying the container repository'
:maintainer | ref(:instance_admin) | true | 'destroying the container repository'
:owner | ref(:maintainer) | false | 'protected deletion of container repository'
:owner | ref(:owner) | false | 'destroying the container repository'
:owner | ref(:instance_admin) | true | 'destroying the container repository'
:admin | ref(:maintainer) | false | 'protected deletion of container repository'
:admin | ref(:owner) | false | 'protected deletion of container repository'
:admin | ref(:instance_admin) | true | 'destroying the container repository'
end
with_them do
it_behaves_like params[:expected_shared_example]
end
end
end
describe 'GET /projects/:id/registry/repositories/:repository_id/tags' do

View File

@ -0,0 +1,107 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ContainerRegistry::Protection::CheckRuleExistenceService, feature_category: :package_registry do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project) }
let_it_be(:unauthorized_user) { create(:user) }
let_it_be(:project_developer) { create(:user, developer_of: project) }
let_it_be(:project_maintainer) { create(:user, maintainer_of: project) }
let_it_be(:project_owner) { project.owner }
let_it_be(:instance_admin) { create(:admin) }
let_it_be(:project_deploy_token) { create(:deploy_token, :all_scopes, projects: [project]) }
let_it_be(:other_deploy_token) { create(:deploy_token, :all_scopes) }
let_it_be(:container_registry_protection_rule) do
create(:container_registry_protection_rule,
project: project,
repository_path_pattern: "#{project.full_path}/protected*",
minimum_access_level_for_push: :owner,
minimum_access_level_for_delete: :admin)
end
let(:params) { { repository_path: repository_path_pattern, action: action } }
let(:service) { described_class.new(project: project, current_user: current_user, params: params) }
subject(:service_response) { service.execute }
shared_examples 'protection rule exists' do
it_behaves_like 'returning a success service response'
it { is_expected.to have_attributes(payload: { protection_rule_exists?: true }) }
end
shared_examples 'protection rule does not exist' do
it_behaves_like 'returning a success service response'
it { is_expected.to have_attributes(payload: { protection_rule_exists?: false }) }
end
shared_examples 'error response for unauthorized actor' do
it_behaves_like 'returning an error service response', message: 'Unauthorized'
it { is_expected.to have_attributes reason: :unauthorized }
end
shared_examples 'raising an error for invalid param :action' do
it 'raises an error' do
expect { service_response }.to raise_error(ArgumentError, 'Invalid param :action')
end
end
describe '#execute', :enable_admin_mode do
# rubocop:disable Layout/LineLength -- Avoid formatting to ensure one-line table syntax
where(:action, :repository_path_pattern, :current_user, :expected_shared_example) do
:push | lazy { "#{project.full_path}/protected" } | ref(:project_developer) | 'protection rule exists'
:push | lazy { "#{project.full_path}/protected" } | ref(:project_maintainer) | 'protection rule exists'
:push | lazy { "#{project.full_path}/protected" } | ref(:project_owner) | 'protection rule does not exist'
:push | lazy { "#{project.full_path}/protected" } | ref(:instance_admin) | 'protection rule does not exist'
:push | lazy { "#{project.full_path}/protected" } | ref(:project_deploy_token) | 'protection rule exists'
:push | lazy { "#{project.full_path}/protected" } | ref(:other_deploy_token) | 'error response for unauthorized actor'
:push | lazy { "other/#{project.full_path}/protected" } | ref(:project_developer) | 'protection rule does not exist'
:push | lazy { "other/#{project.full_path}/protected" } | ref(:project_owner) | 'protection rule does not exist'
:push | lazy { "other/#{project.full_path}/protected" } | ref(:project_deploy_token) | 'protection rule does not exist'
:push | lazy { "other/#{project.full_path}/protected" } | ref(:instance_admin) | 'protection rule does not exist'
:delete | lazy { "#{project.full_path}/protected" } | ref(:project_developer) | 'protection rule exists'
:delete | lazy { "#{project.full_path}/protected" } | ref(:project_maintainer) | 'protection rule exists'
:delete | lazy { "#{project.full_path}/protected" } | ref(:project_owner) | 'protection rule exists'
:delete | lazy { "#{project.full_path}/protected" } | ref(:project_deploy_token) | 'error response for unauthorized actor'
:delete | lazy { "#{project.full_path}/protected" } | ref(:instance_admin) | 'protection rule does not exist'
:delete | lazy { "other/#{project.full_path}/protected" } | ref(:project_maintainer) | 'protection rule does not exist'
:delete | lazy { "other/#{project.full_path}/protected" } | ref(:project_owner) | 'protection rule does not exist'
:delete | lazy { "other/#{project.full_path}/protected" } | ref(:project_deploy_token) | 'error response for unauthorized actor'
# # Edge cases
:push | lazy { "#{project.full_path}/protected" } | ref(:unauthorized_user) | 'error response for unauthorized actor'
:push | lazy { "#{project.full_path}/protected" } | nil | 'error response for unauthorized actor'
:push | '' | ref(:project_developer) | 'protection rule does not exist'
:push | nil | ref(:project_developer) | 'protection rule does not exist'
:delete | lazy { "#{project.full_path}/protected" } | ref(:unauthorized_user) | 'error response for unauthorized actor'
:other | lazy { "#{project.full_path}/protected" } | ref(:project_developer) | 'raising an error for invalid param :action'
nil | lazy { "#{project.full_path}/protected" } | ref(:project_developer) | 'raising an error for invalid param :action'
end
# rubocop:enable Layout/LineLength
with_them do
it_behaves_like params[:expected_shared_example]
end
context 'for unexpected current_user' do
let(:current_user) { Object.new }
let(:action) { :push }
let(:repository_path_pattern) { "#{project.full_path}/protected" }
before do
allow(service).to receive(:can?).and_return(true)
end
it 'raises an error' do
expect { service_response }.to raise_error(ArgumentError, "Invalid user")
end
end
end
end

View File

@ -1,143 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entries::CreateOrUpdateService, :aggregate_failures, feature_category: :virtual_registry do
let_it_be(:registry) { create(:virtual_registries_packages_maven_registry) }
let_it_be(:project) { create(:project, namespace: registry.group) }
let_it_be(:user) { create(:user, owner_of: project) }
let_it_be(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
let_it_be(:upstream) { create(:virtual_registries_packages_maven_upstream, registry: registry) }
let(:etag) { 'test' }
let(:content_type) { 'text/xml' }
let(:params) { { path: path, file: file, etag: etag, content_type: content_type } }
let(:file) do
UploadedFile.new(
Tempfile.new(etag).path,
sha1: '4e1243bd22c66e76c2ba9eddc1f91394e57f9f83',
md5: 'd8e8fca2dc0f896fd7cb4cb0031ba249'
)
end
let(:service) do
described_class.new(upstream: upstream, current_user: user, params: params)
end
describe '#execute' do
subject(:execute) { service.execute }
shared_examples 'returning a service response success response' do
shared_examples 'creating a new cache entry' do |with_md5: 'd8e8fca2dc0f896fd7cb4cb0031ba249'|
it 'returns a success service response', :freeze_time do
expect { execute }.to change { upstream.cache_entries.count }.by(1)
expect(execute).to be_success
last_cache_entry = upstream.cache_entries.last
expect(execute.payload).to eq(cache_entry: last_cache_entry)
expect(last_cache_entry).to have_attributes(
group_id: registry.group.id,
upstream_checked_at: Time.zone.now,
relative_path: "/#{path}",
upstream_etag: etag,
content_type: content_type,
file_sha1: '4e1243bd22c66e76c2ba9eddc1f91394e57f9f83',
file_md5: with_md5
)
end
end
it_behaves_like 'creating a new cache entry'
context 'with a nil content_type' do
let(:params) { super().merge(content_type: nil) }
it 'creates a cache entry with a default content_type' do
expect { execute }.to change { upstream.cache_entries.count }.by(1)
expect(execute).to be_success
expect(upstream.cache_entries.last).to have_attributes(content_type: 'application/octet-stream')
end
end
context 'with an error' do
it 'returns an error response and log the error' do
expect(::VirtualRegistries::Packages::Maven::Cache::Entry)
.to receive(:create_or_update_by!).and_raise(ActiveRecord::RecordInvalid)
expect(::Gitlab::ErrorTracking).to receive(:track_exception)
.with(
instance_of(ActiveRecord::RecordInvalid),
upstream_id: upstream.id,
group_id: upstream.group_id,
class: described_class.name
)
expect { execute }.not_to change { upstream.cache_entries.count }
end
end
context 'in FIPS mode', :fips_mode do
it_behaves_like 'creating a new cache entry', with_md5: nil
end
end
context 'with a User' do
it_behaves_like 'returning a service response success response'
context 'with an existing cache entry' do
let_it_be(:cache_entry) do
create(
:virtual_registries_packages_maven_cache_entry,
group: upstream.group,
upstream: upstream,
relative_path: "/#{path}"
)
end
it 'updates it', :freeze_time do
expect { execute }.to not_change { upstream.cache_entries.count }
expect(execute).to be_success
last_cache_entry = upstream.cache_entries.last
expect(execute.payload).to eq(cache_entry: last_cache_entry)
expect(last_cache_entry).to have_attributes(
upstream_checked_at: Time.zone.now,
upstream_etag: etag
)
end
end
end
context 'with a DeployToken' do
let_it_be(:user) { create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true) }
it_behaves_like 'returning a service response success response'
end
context 'with no path' do
let(:path) { nil }
it { is_expected.to eq(described_class::ERRORS[:path_not_present]) }
end
context 'with no file' do
let(:file) { nil }
it { is_expected.to eq(described_class::ERRORS[:file_not_present]) }
end
context 'with no upstream' do
let_it_be(:upstream) { nil }
it { is_expected.to eq(described_class::ERRORS[:unauthorized]) }
end
context 'with no user' do
let(:user) { nil }
it { is_expected.to eq(described_class::ERRORS[:unauthorized]) }
end
end
end

View File

@ -1,210 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::Maven::HandleFileRequestService, :aggregate_failures, :clean_gitlab_redis_shared_state, feature_category: :virtual_registry do
let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, :with_upstream) }
let_it_be(:project) { create(:project, namespace: registry.group) }
let_it_be(:user) { create(:user, owner_of: project) }
let_it_be(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
let(:upstream) { registry.upstream }
let(:upstream_resource_url) { upstream.url_for(path) }
let(:etag_returned_by_upstream) { nil }
let(:service) { described_class.new(registry: registry, current_user: user, params: { path: path }) }
describe '#execute' do
subject(:execute) { service.execute }
shared_examples 'returning a service response success response' do |action:|
before do
stub_external_registry_request(etag: etag_returned_by_upstream)
end
it 'returns a success service response' do
expect(service).to receive(:can?).and_call_original
expect(execute).to be_success
expect(execute.payload[:action]).to eq(action)
case action
when :workhorse_upload_url
expect(execute.payload[:action_params]).to eq(url: upstream_resource_url, upstream: upstream)
when :download_file
action_params = execute.payload[:action_params]
expect(action_params[:file]).to be_instance_of(VirtualRegistries::Cache::EntryUploader)
expect(action_params[:content_type]).to eq(cache_entry.content_type)
expect(action_params[:file_sha1]).to be_instance_of(String)
expect(action_params[:file_md5]).to be_instance_of(String)
when :download_digest
expect(execute.payload[:action_params]).to eq(digest: expected_digest)
else
{}
end
end
end
context 'with a User' do
let_it_be(:processing_cache_entry) do
create(
:virtual_registries_packages_maven_cache_entry,
:upstream_checked,
:processing,
upstream: registry.upstream,
relative_path: "/#{path}"
)
end
context 'with no cache entry' do
it_behaves_like 'returning a service response success response', action: :workhorse_upload_url
context 'with upstream returning an error' do
before do
stub_external_registry_request(status: 404)
end
it { is_expected.to eq(described_class::ERRORS[:file_not_found_on_upstreams]) }
end
context 'with upstream head raising an error' do
before do
stub_external_registry_request(raise_error: true)
end
it { is_expected.to eq(described_class::ERRORS[:upstream_not_available]) }
end
end
context 'with a cache entry' do
let_it_be_with_refind(:cache_entry) do
create(:virtual_registries_packages_maven_cache_entry,
:upstream_checked,
upstream: registry.upstream,
relative_path: "/#{path}"
)
end
it_behaves_like 'returning a service response success response', action: :download_file
context 'and is too old' do
before do
cache_entry.update!(upstream_checked_at: 1.year.ago)
end
context 'with the same etag as upstream' do
let(:etag_returned_by_upstream) { cache_entry.upstream_etag }
it_behaves_like 'returning a service response success response', action: :download_file
it 'bumps the statistics', :freeze_time do
stub_external_registry_request(etag: etag_returned_by_upstream)
expect { execute }.to change { cache_entry.reload.upstream_checked_at }.to(Time.zone.now)
end
end
context 'with a different etag as upstream' do
let(:etag_returned_by_upstream) { "#{cache_entry.upstream_etag}_test" }
it_behaves_like 'returning a service response success response', action: :workhorse_upload_url
end
context 'with a stored blank etag' do
before do
cache_entry.update!(upstream_etag: nil)
end
it_behaves_like 'returning a service response success response', action: :workhorse_upload_url
end
end
context 'when accessing the sha1 digest' do
let(:path) { "#{super()}.sha1" }
let(:expected_digest) { cache_entry.file_sha1 }
it_behaves_like 'returning a service response success response', action: :download_digest
context 'when the cache entry does not exist' do
let(:path) { "#{super()}_not_existing.sha1" }
it { is_expected.to eq(described_class::ERRORS[:digest_not_found]) }
end
end
context 'when accessing the md5 digest' do
let(:path) { "#{super()}.md5" }
let(:expected_digest) { cache_entry.file_md5 }
it_behaves_like 'returning a service response success response', action: :download_digest
context 'when the cache entry does not exist' do
let(:path) { "#{super()}_not_existing.md5" }
it { is_expected.to eq(described_class::ERRORS[:digest_not_found]) }
end
context 'in FIPS mode', :fips_mode do
it { is_expected.to eq(described_class::ERRORS[:fips_unsupported_md5]) }
end
end
context 'with upstream head raising an error' do
before do
stub_external_registry_request(raise_error: true)
end
it_behaves_like 'returning a service response success response', action: :download_file
end
context 'with a cached permissions evaluation' do
before do
Rails.cache.fetch(service.send(:permissions_cache_key)) do
can?(user, :read_virtual_registry, registry)
end
end
it 'does not call the permissions evaluation again' do
expect(service).not_to receive(:can).and_call_original
expect(execute).to be_success
end
end
end
end
context 'with a DeployToken' do
let_it_be(:user) { create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true) }
it_behaves_like 'returning a service response success response', action: :workhorse_upload_url
end
context 'with no path' do
let(:path) { nil }
it { is_expected.to eq(described_class::ERRORS[:path_not_present]) }
end
context 'with no user' do
let(:user) { nil }
it { is_expected.to eq(described_class::ERRORS[:unauthorized]) }
end
context 'with registry with no upstreams' do
before do
registry.upstream = nil
end
it { is_expected.to eq(described_class::ERRORS[:no_upstreams]) }
end
def stub_external_registry_request(status: 200, raise_error: false, etag: nil)
request = stub_request(:head, upstream_resource_url)
.with(headers: upstream.headers)
if raise_error
request.to_raise(Gitlab::HTTP::BlockedUrlError)
else
request.to_return(status: status, body: '', headers: { 'etag' => etag }.compact)
end
end
end
end

View File

@ -17,12 +17,9 @@ RSpec.describe DependencyProxy::CleanupDependencyProxyWorker, type: :worker, fea
it 'queues the cleanup jobs', :aggregate_failures do
create(:dependency_proxy_blob, :pending_destruction)
create(:dependency_proxy_manifest, :pending_destruction)
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
expect(DependencyProxy::CleanupBlobWorker).to receive(:perform_with_capacity).twice
expect(DependencyProxy::CleanupManifestWorker).to receive(:perform_with_capacity).twice
expect(::VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker)
.to receive(:perform_with_capacity).twice
subject
end
@ -34,42 +31,10 @@ RSpec.describe DependencyProxy::CleanupDependencyProxyWorker, type: :worker, fea
it 'does not queue the cleanup jobs', :aggregate_failures do
expect(DependencyProxy::CleanupBlobWorker).not_to receive(:perform_with_capacity)
expect(DependencyProxy::CleanupManifestWorker).not_to receive(:perform_with_capacity)
expect(::VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker)
.not_to receive(:perform_with_capacity)
subject
end
end
end
context 'with virtual_registry_maven_cleanup_new_worker_class disabled' do
before do
stub_feature_flags(virtual_registry_maven_cleanup_new_worker_class: false)
end
context 'when there are records to be deleted' do
it_behaves_like 'an idempotent worker' do
it 'queues the cleanup jobs', :aggregate_failures do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
expect(::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker)
.to receive(:perform_with_capacity).twice
subject
end
end
end
context 'when there are not records to be deleted' do
it_behaves_like 'an idempotent worker' do
it 'does not queue the cleanup jobs', :aggregate_failures do
expect(::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker)
.not_to receive(:perform_with_capacity)
subject
end
end
end
end
end
end

View File

@ -6,81 +6,20 @@ RSpec.describe VirtualRegistries::Packages::Cache::DestroyOrphanEntriesWorker, t
let(:worker) { described_class.new }
let(:model) { ::VirtualRegistries::Packages::Maven::Cache::Entry }
it_behaves_like 'an idempotent worker' do
let(:job_args) { [model.name] }
end
it_behaves_like 'worker with data consistency', described_class, data_consistency: :sticky
it 'has a none deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:none)
end
describe '#perform_work' do
describe '#perform_work', unless: Gitlab.ee? do
subject(:perform_work) { worker.perform_work(model.name) }
context 'with no work to do' do
it { is_expected.to be_nil }
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
context 'with work to do' do
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
it 'destroys orphan cache entries' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:cache_entry_id, orphan_cache_entry.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:group_id, orphan_cache_entry.group_id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:relative_path, orphan_cache_entry.relative_path)
expect(model).to receive(:next_pending_destruction).and_call_original
expect { perform_work }.to change { model.count }.by(-1)
expect { orphan_cache_entry.reset }.to raise_error(ActiveRecord::RecordNotFound)
end
context 'with an error during deletion' do
before do
allow_next_found_instance_of(model) do |instance|
allow(instance).to receive(:destroy).and_raise(StandardError)
end
end
it 'tracks the error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(StandardError), class: described_class.name
)
expect { perform_work }.to change { model.error.count }.by(1)
end
end
context 'when trying to update a destroyed record' do
before do
allow_next_found_instance_of(model) do |instance|
destroy_method = instance.method(:destroy!)
allow(instance).to receive(:destroy!) do
destroy_method.call
raise StandardError
end
end
end
it 'does not change the status to error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(instance_of(StandardError), class: described_class.name)
expect { perform_work }.not_to change { model.error.count }
end
end
end
it { expect { perform_work }.to not_change { model.count } }
end
describe '#max_running_jobs' do
let(:capacity) { described_class::MAX_CAPACITY }
describe '#remaining_work_count', unless: Gitlab.ee? do
subject { worker.remaining_work_count(model.name) }
subject { worker.max_running_jobs }
it { is_expected.to eq(capacity) }
it { is_expected.to eq(0) }
end
end

View File

@ -6,81 +6,14 @@ RSpec.describe VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker,
let(:worker) { described_class.new }
let(:model) { ::VirtualRegistries::Packages::Maven::Cache::Entry }
it_behaves_like 'an idempotent worker' do
let(:job_args) { [model.name] }
end
it_behaves_like 'worker with data consistency', described_class, data_consistency: :sticky
it 'has a none deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:none)
end
describe '#perform_work' do
describe '#perform_work', unless: Gitlab.ee? do
subject(:perform_work) { worker.perform_work(model.name) }
context 'with no work to do' do
it { is_expected.to be_nil }
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
context 'with work to do' do
let_it_be(:cache_entry) { create(:virtual_registries_packages_maven_cache_entry) }
let_it_be(:orphan_cache_entry) do
create(:virtual_registries_packages_maven_cache_entry, :pending_destruction)
end
it 'destroys orphan cache entries' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:cache_entry_id, orphan_cache_entry.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:group_id, orphan_cache_entry.group_id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:relative_path, orphan_cache_entry.relative_path)
expect(model).to receive(:next_pending_destruction).and_call_original
expect { perform_work }.to change { model.count }.by(-1)
expect { orphan_cache_entry.reset }.to raise_error(ActiveRecord::RecordNotFound)
end
context 'with an error during deletion' do
before do
allow_next_found_instance_of(model) do |instance|
allow(instance).to receive(:destroy).and_raise(StandardError)
end
end
it 'tracks the error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(StandardError), class: described_class.name
)
expect { perform_work }.to change { model.error.count }.by(1)
end
end
context 'when trying to update a destroyed record' do
before do
allow_next_found_instance_of(model) do |instance|
destroy_method = instance.method(:destroy!)
allow(instance).to receive(:destroy!) do
destroy_method.call
raise StandardError
end
end
end
it 'does not change the status to error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(instance_of(StandardError), class: described_class.name)
expect { perform_work }.not_to change { model.error.count }
end
end
end
end
describe '#max_running_jobs' do
let(:capacity) { described_class::MAX_CAPACITY }
subject { worker.max_running_jobs }
it { is_expected.to eq(capacity) }
it { expect { perform_work }.to not_change { model.count } }
end
end

View File

@ -146,6 +146,10 @@ mapping:
- 'ee/spec/requests/api/graphql/remote_development/namespace/remote_development_cluster_agents/*_spec.rb'
- 'ee/spec/requests/api/graphql/remote_development/namespace/workspaces_cluster_agents/*_spec.rb'
- source: 'ee/app/graphql/resolvers/remote_development/organization/cluster_agents_resolver\.rb'
test:
- 'ee/spec/requests/api/graphql/remote_development/organization/workspaces_cluster_agents/*_spec.rb'
- source: 'ee/app/graphql/resolvers/remote_development/workspaces_resolver\.rb'
test:
- 'ee/spec/requests/api/graphql/remote_development/current_user/workspaces/*_spec.rb'