Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
3e84b0723a
commit
1ad783dfd8
|
|
@ -1,6 +1,6 @@
|
|||
include:
|
||||
- project: gitlab-org/quality/pipeline-common
|
||||
ref: 8.4.2
|
||||
ref: 8.4.3
|
||||
file:
|
||||
- /ci/danger-review.yml
|
||||
|
||||
|
|
|
|||
|
|
@ -93,14 +93,30 @@ include:
|
|||
- bundle exec gem list gitlab_quality-test_tooling
|
||||
- |
|
||||
if [ "$CREATE_RAILS_TEST_FAILURE_ISSUES" == "true" ]; then
|
||||
input_file="rspec/rspec-${CI_JOB_ID}.json"
|
||||
|
||||
# The actual failures will always be part of the retry report
|
||||
if [ -f "rspec/rspec-retry-${CI_JOB_ID}.json" ]; then
|
||||
input_file="rspec/rspec-retry-${CI_JOB_ID}.json"
|
||||
fi
|
||||
|
||||
bundle exec relate-failure-issue \
|
||||
--token "${TEST_FAILURES_PROJECT_TOKEN}" \
|
||||
--project "gitlab-org/gitlab" \
|
||||
--input-files "rspec/rspec-*.json" \
|
||||
--input-files "${input_file}" \
|
||||
--exclude-labels-for-search "QA,rspec:slow test,knapsack_report" \
|
||||
--system-log-files "log" \
|
||||
--related-issues-file "rspec/${CI_JOB_ID}-failed-test-issues.json";
|
||||
fi
|
||||
|
||||
if [ "$CREATE_RAILS_FLAKY_TEST_ISSUES" == "true" && -f "rspec/rspec-retry-${CI_JOB_ID}.json" ]; then
|
||||
bundle exec flaky-test-issues \
|
||||
--token "${RAILS_FLAKY_TEST_PROJECT_TOKEN}" \
|
||||
--project "gitlab-org/quality/engineering-productivity/flaky-tests" \
|
||||
--merge_request_iid "$CI_MERGE_REQUEST_IID" \
|
||||
--input-files "rspec/rspec-retry-${CI_JOB_ID}.json" || true # We don't want this command to fail the job.
|
||||
fi
|
||||
|
||||
if [ "$CREATE_RAILS_SLOW_TEST_ISSUES" == "true" ]; then
|
||||
bundle exec slow-test-issues \
|
||||
--token "${TEST_FAILURES_PROJECT_TOKEN}" \
|
||||
|
|
@ -108,6 +124,7 @@ include:
|
|||
--input-files "rspec/rspec-*.json" \
|
||||
--related-issues-file "rspec/${CI_JOB_ID}-slow-test-issues.json";
|
||||
fi
|
||||
|
||||
if [ "$ADD_SLOW_TEST_NOTE_TO_MERGE_REQUEST" == "true" ]; then
|
||||
bundle exec slow-test-merge-request-report-note \
|
||||
--token "${TEST_SLOW_NOTE_PROJECT_TOKEN}" \
|
||||
|
|
@ -115,6 +132,7 @@ include:
|
|||
--input-files "rspec/rspec-*.json" \
|
||||
--merge_request_iid "$CI_MERGE_REQUEST_IID";
|
||||
fi
|
||||
|
||||
if [ "$ALLOW_KNAPSACK_REPORT_CREATE_ISSUES" == "true" ]; then
|
||||
bundle exec knapsack-report-issues \
|
||||
--token "${KNAPSACK_REPORT_ISSUES_PROJECT_TOKEN}" \
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
423efd6731985d75009ac4692f36ad039334eb01
|
||||
348c0477123886ba100b17e1858340d5453adca0
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ export default {
|
|||
configHelpLink: helpPagePath('user/clusters/agent/install/index', {
|
||||
anchor: 'create-an-agent-configuration-file',
|
||||
}),
|
||||
inject: ['gitlabVersion', 'kasVersion'],
|
||||
inject: ['kasVersion'],
|
||||
props: {
|
||||
agents: {
|
||||
required: true,
|
||||
|
|
@ -121,9 +121,6 @@ export default {
|
|||
return { ...agent, versions };
|
||||
});
|
||||
},
|
||||
serverVersion() {
|
||||
return this.kasVersion || this.gitlabVersion;
|
||||
},
|
||||
showPagination() {
|
||||
return !this.maxAgents && this.agents.length > this.limit;
|
||||
},
|
||||
|
|
@ -180,12 +177,12 @@ export default {
|
|||
const agentVersion = this.getAgentVersionString(agent);
|
||||
let allowableAgentVersion = semverInc(agentVersion, 'minor');
|
||||
|
||||
const isServerPrerelease = Boolean(semverPrerelease(this.serverVersion));
|
||||
const isServerPrerelease = Boolean(semverPrerelease(this.kasVersion));
|
||||
if (isServerPrerelease) {
|
||||
allowableAgentVersion = semverInc(allowableAgentVersion, 'minor');
|
||||
}
|
||||
|
||||
return semverLt(allowableAgentVersion, this.serverVersion);
|
||||
return semverLt(allowableAgentVersion, this.kasVersion);
|
||||
},
|
||||
|
||||
getVersionPopoverTitle(agent) {
|
||||
|
|
@ -293,7 +290,7 @@ export default {
|
|||
|
||||
<p class="gl-mb-0">
|
||||
<gl-sprintf :message="$options.i18n.versionOutdatedText">
|
||||
<template #version>{{ serverVersion }}</template>
|
||||
<template #version>{{ kasVersion }}</template>
|
||||
</gl-sprintf>
|
||||
<gl-link :href="$options.versionUpdateLink" class="gl-font-sm">
|
||||
{{ $options.i18n.viewDocsText }}</gl-link
|
||||
|
|
@ -306,7 +303,7 @@ export default {
|
|||
|
||||
<p v-else-if="isVersionOutdated(item)" class="gl-mb-0">
|
||||
<gl-sprintf :message="$options.i18n.versionOutdatedText">
|
||||
<template #version>{{ serverVersion }}</template>
|
||||
<template #version>{{ kasVersion }}</template>
|
||||
</gl-sprintf>
|
||||
<gl-link :href="$options.versionUpdateLink" class="gl-font-sm">
|
||||
{{ $options.i18n.viewDocsText }}</gl-link
|
||||
|
|
|
|||
|
|
@ -80,10 +80,10 @@ export const I18N_AGENT_TABLE = {
|
|||
),
|
||||
versionOutdatedTitle: s__('ClusterAgents|Agent version update required'),
|
||||
versionOutdatedText: s__(
|
||||
'ClusterAgents|Your agent version is out of sync with your GitLab version (v%{version}), which might cause compatibility problems. Update the agent installed on your cluster to the most recent version.',
|
||||
'ClusterAgents|Your agent version is out of sync with your GitLab KAS version (v%{version}), which might cause compatibility problems. Update the agent installed on your cluster to the most recent version.',
|
||||
),
|
||||
versionMismatchOutdatedTitle: s__('ClusterAgents|Agent version mismatch and update'),
|
||||
viewDocsText: s__('ClusterAgents|How to update an agent?'),
|
||||
viewDocsText: s__('ClusterAgents|How do I update an agent?'),
|
||||
defaultConfigText: s__('ClusterAgents|Default configuration'),
|
||||
defaultConfigTooltip: s__('ClusterAgents|What is default configuration?'),
|
||||
sharedBadgeText: s__('ClusterAgents|shared'),
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ export default () => {
|
|||
clustersEmptyStateImage,
|
||||
canAddCluster,
|
||||
canAdminCluster,
|
||||
gitlabVersion,
|
||||
kasVersion,
|
||||
displayClusterAgents,
|
||||
certificateBasedClustersEnabled,
|
||||
|
|
@ -48,7 +47,6 @@ export default () => {
|
|||
clustersEmptyStateImage,
|
||||
canAddCluster: parseBoolean(canAddCluster),
|
||||
canAdminCluster: parseBoolean(canAdminCluster),
|
||||
gitlabVersion,
|
||||
kasVersion,
|
||||
displayClusterAgents: parseBoolean(displayClusterAgents),
|
||||
certificateBasedClustersEnabled: parseBoolean(certificateBasedClustersEnabled),
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ export const FAILURE_REASONS = {
|
|||
need_rebase: __('Merge request must be rebased, because a fast-forward merge is not possible.'),
|
||||
not_approved: __('All required approvals must be given.'),
|
||||
policies_denied: __('Denied licenses must be removed or approved.'),
|
||||
merge_request_blocked: __('Merge request dependencies have been merged.'),
|
||||
merge_request_blocked: __('Merge request dependencies must be merged.'),
|
||||
status_checks_must_pass: __('Status checks must pass.'),
|
||||
jira_association_missing: __('Either the title or description must reference a Jira issue.'),
|
||||
};
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ module Types
|
|||
description: 'Merge request must be approved before merging.'
|
||||
value 'BLOCKED_STATUS',
|
||||
value: :merge_request_blocked,
|
||||
description: 'Merge request dependencies have been merged.'
|
||||
description: 'Merge request dependencies must be merged.'
|
||||
value 'POLICIES_DENIED',
|
||||
value: :policies_denied,
|
||||
description: 'There are denied policies for the merge request.'
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ module ClustersHelper
|
|||
default_branch_name: default_branch_name(clusterable),
|
||||
project_path: clusterable_project_path(clusterable),
|
||||
kas_address: Gitlab::Kas.external_url,
|
||||
gitlab_version: Gitlab.version_info,
|
||||
kas_version: Gitlab::Kas.version_info
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ module Clusters
|
|||
|
||||
belongs_to :agent, class_name: 'Clusters::Agent', optional: false
|
||||
belongs_to :created_by_user, class_name: 'User', optional: true
|
||||
belongs_to :project, default: -> { agent&.project }
|
||||
|
||||
before_save :ensure_token
|
||||
|
||||
|
|
|
|||
|
|
@ -232,6 +232,8 @@ class Deployment < ApplicationRecord
|
|||
|
||||
by_project.each do |project, ref_paths|
|
||||
project.repository.delete_refs(*ref_paths.flatten)
|
||||
rescue Gitlab::Git::Repository::NoRepository
|
||||
next
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class Packages::Pypi::Metadatum < ApplicationRecord
|
|||
self.primary_key = :package_id
|
||||
|
||||
MAX_REQUIRED_PYTHON_LENGTH = 255
|
||||
MAX_KEYWORDS_LENGTH = 255
|
||||
MAX_KEYWORDS_LENGTH = 1024
|
||||
MAX_METADATA_VERSION_LENGTH = 16
|
||||
MAX_AUTHOR_EMAIL_LENGTH = 2048
|
||||
MAX_SUMMARY_LENGTH = 255
|
||||
|
|
|
|||
|
|
@ -4,19 +4,39 @@ module BranchRules
|
|||
class BaseService
|
||||
include Gitlab::Allowable
|
||||
|
||||
attr_reader :project, :branch_rule, :current_user, :params
|
||||
PERMITTED_PARAMS = [].freeze
|
||||
MISSING_METHOD_ERROR = Class.new(StandardError)
|
||||
|
||||
attr_reader :branch_rule, :current_user, :params
|
||||
|
||||
delegate :project, to: :branch_rule, allow_nil: true
|
||||
|
||||
def initialize(branch_rule, user = nil, params = {})
|
||||
@branch_rule = branch_rule
|
||||
@project = branch_rule.project
|
||||
@current_user = user
|
||||
@params = params.slice(*permitted_params)
|
||||
@params = params.slice(*self.class::PERMITTED_PARAMS)
|
||||
end
|
||||
|
||||
def execute(skip_authorization: false)
|
||||
raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?
|
||||
|
||||
return execute_on_branch_rule if branch_rule.instance_of?(Projects::BranchRule)
|
||||
|
||||
ServiceResponse.error(message: 'Unknown branch rule type.')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def permitted_params
|
||||
[]
|
||||
def execute_on_branch_rule
|
||||
missing_method_error('execute_on_branch_rule')
|
||||
end
|
||||
|
||||
def authorized?
|
||||
missing_method_error('authorized?')
|
||||
end
|
||||
|
||||
def missing_method_error(method_name)
|
||||
raise MISSING_METHOD_ERROR, "Please define an `#{method_name}` method in #{self.class.name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,23 +2,13 @@
|
|||
|
||||
module BranchRules
|
||||
class DestroyService < BaseService
|
||||
def execute
|
||||
raise Gitlab::Access::AccessDeniedError unless can_destroy_branch_rule?
|
||||
|
||||
return destroy_protected_branch if branch_rule.instance_of?(Projects::BranchRule)
|
||||
|
||||
yield if block_given?
|
||||
|
||||
ServiceResponse.error(message: 'Unknown branch rule type.')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def can_destroy_branch_rule?
|
||||
def authorized?
|
||||
can?(current_user, :destroy_protected_branch, branch_rule)
|
||||
end
|
||||
|
||||
def destroy_protected_branch
|
||||
def execute_on_branch_rule
|
||||
service = ProtectedBranches::DestroyService.new(project, current_user)
|
||||
|
||||
return ServiceResponse.success if service.execute(branch_rule.protected_branch)
|
||||
|
|
|
|||
|
|
@ -4,40 +4,20 @@ module BranchRules
|
|||
class UpdateService < BaseService
|
||||
PERMITTED_PARAMS = %i[name].freeze
|
||||
|
||||
attr_reader :skip_authorization
|
||||
|
||||
def execute(skip_authorization: false)
|
||||
@skip_authorization = skip_authorization
|
||||
|
||||
raise Gitlab::Access::AccessDeniedError unless can_update_branch_rule?
|
||||
|
||||
return update_protected_branch if branch_rule.instance_of?(Projects::BranchRule)
|
||||
|
||||
yield if block_given?
|
||||
|
||||
ServiceResponse.error(message: 'Unknown branch rule type.')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def permitted_params
|
||||
PERMITTED_PARAMS
|
||||
def authorized?
|
||||
can?(current_user, :update_branch_rule, branch_rule)
|
||||
end
|
||||
|
||||
def can_update_branch_rule?
|
||||
return true if skip_authorization
|
||||
def execute_on_branch_rule
|
||||
protected_branch = ProtectedBranches::UpdateService
|
||||
.new(project, current_user, params)
|
||||
.execute(branch_rule.protected_branch, skip_authorization: true)
|
||||
|
||||
can?(current_user, :update_protected_branch, branch_rule)
|
||||
end
|
||||
return ServiceResponse.success unless protected_branch.errors.any?
|
||||
|
||||
def update_protected_branch
|
||||
service = ProtectedBranches::UpdateService.new(project, current_user, params)
|
||||
|
||||
service_response = service.execute(branch_rule.protected_branch, skip_authorization: skip_authorization)
|
||||
|
||||
return ServiceResponse.success unless service_response.errors.any?
|
||||
|
||||
ServiceResponse.error(message: service_response.errors.full_messages)
|
||||
ServiceResponse.error(message: protected_branch.errors.full_messages)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module Packages
|
|||
description: params[:description]&.truncate(::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH),
|
||||
description_content_type: params[:description_content_type],
|
||||
summary: params[:summary],
|
||||
keywords: params[:keywords]
|
||||
keywords: params[:keywords]&.truncate(::Packages::Pypi::Metadatum::MAX_KEYWORDS_LENGTH)
|
||||
)
|
||||
|
||||
unless meta.valid?
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
migration_job_name: BackfillClusterAgentTokensProjectId
|
||||
description: Backfills sharding key `cluster_agent_tokens.project_id` from `cluster_agents`.
|
||||
feature_category: deployment_management
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144939
|
||||
milestone: '16.10'
|
||||
queued_migration_version: 20240216020106
|
||||
finalize_after: '2024-03-22'
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -23,3 +23,4 @@ desired_sharding_key:
|
|||
table: cluster_agents
|
||||
sharding_key: project_id
|
||||
belongs_to: agent
|
||||
desired_sharding_key_migration_job_name: BackfillClusterAgentTokensProjectId
|
||||
|
|
|
|||
|
|
@ -16,3 +16,5 @@ allow_cross_foreign_keys:
|
|||
- gitlab_main_clusterwide
|
||||
sharding_key:
|
||||
project_id: projects
|
||||
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/145260
|
||||
removed_in_milestone: '16.10'
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddProjectIdToClusterAgentTokens < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
enable_lock_retries!
|
||||
|
||||
def change
|
||||
add_column :cluster_agent_tokens, :project_id, :bigint
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IndexClusterAgentTokensOnProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_cluster_agent_tokens_on_project_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :cluster_agent_tokens, :project_id, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :cluster_agent_tokens, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddClusterAgentTokensProjectIdFk < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :cluster_agent_tokens, :projects, column: :project_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :cluster_agent_tokens, column: :project_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class UpdatePypiMetadataKeywordsCheckConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '16.10'
|
||||
|
||||
def up
|
||||
add_text_limit(:packages_pypi_metadata, :keywords, 1024,
|
||||
constraint_name: check_constraint_name(:packages_pypi_metadata, :keywords, 'max_length_1KiB'))
|
||||
remove_text_limit(:packages_pypi_metadata, :keywords, constraint_name: 'check_02be2c39af')
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op: Danger of failing if there are records with length(keywords) > 255
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddClusterAgentTokensProjectIdNotNull < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_not_null_constraint :cluster_agent_tokens, :project_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :cluster_agent_tokens, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillClusterAgentTokensProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillClusterAgentTokensProjectId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:cluster_agent_tokens,
|
||||
:id,
|
||||
:project_id,
|
||||
:cluster_agents,
|
||||
:project_id,
|
||||
:agent_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:cluster_agent_tokens,
|
||||
:id,
|
||||
[
|
||||
:project_id,
|
||||
:cluster_agents,
|
||||
:project_id,
|
||||
:agent_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RegenRemoveForeignKeysUserInteractedProjects < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
FOREIGN_KEY_NAME_USERS = "fk_0894651f08"
|
||||
FOREIGN_KEY_NAME_PROJECTS = "fk_722ceba4f7"
|
||||
|
||||
def up
|
||||
return unless table_exists?(:user_interacted_projects)
|
||||
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(:user_interacted_projects, :users,
|
||||
name: FOREIGN_KEY_NAME_USERS, reverse_lock_order: true)
|
||||
end
|
||||
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(:user_interacted_projects, :projects,
|
||||
name: FOREIGN_KEY_NAME_PROJECTS, reverse_lock_order: true)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
return unless table_exists?(:user_interacted_projects)
|
||||
|
||||
add_concurrent_foreign_key(:user_interacted_projects, :users,
|
||||
name: FOREIGN_KEY_NAME_USERS, column: :user_id,
|
||||
target_column: :id, on_delete: :cascade)
|
||||
|
||||
add_concurrent_foreign_key(:user_interacted_projects, :projects,
|
||||
name: FOREIGN_KEY_NAME_PROJECTS, column: :project_id,
|
||||
target_column: :id, on_delete: :cascade)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RegenDropUserInteractedProjectsTable < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.10'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = 'user_interacted_projects'
|
||||
INDEX_NAME = 'index_user_interacted_projects_on_user_id'
|
||||
PRIMARY_KEY_CONSTRAINT = 'user_interacted_projects_pkey'
|
||||
|
||||
def up
|
||||
drop_table :user_interacted_projects, if_exists: true
|
||||
end
|
||||
|
||||
def down
|
||||
unless table_exists?(:user_interacted_projects)
|
||||
create_table :user_interacted_projects, id: false do |t|
|
||||
t.integer :user_id, null: false
|
||||
t.integer :project_id, null: false
|
||||
t.index :user_id, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
||||
execute "ALTER TABLE #{TABLE_NAME} ADD CONSTRAINT #{PRIMARY_KEY_CONSTRAINT} PRIMARY KEY (project_id, user_id)"
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
9ecfc2cf4de4a9c2a872bdcd9a5608ad1e94cb9d41fbbae2e6c84c62e3df9c4b
|
||||
|
|
@ -0,0 +1 @@
|
|||
5090b711388f3728c6469b2daab2590c07448d1376f817cb7b3c22f50b2dcc1b
|
||||
|
|
@ -0,0 +1 @@
|
|||
4daead5cff18334334f8450ba86c278d1d8a0196fc764a0711c613b6b062cd15
|
||||
|
|
@ -0,0 +1 @@
|
|||
13ba264dc7308049120beedc9ef24ffd03c0a8adca6dc47082fff46f0dc1ceda
|
||||
|
|
@ -0,0 +1 @@
|
|||
599df82b05596a35544614d691362cc7e7975274d8319b74249647a6cf07fe08
|
||||
|
|
@ -0,0 +1 @@
|
|||
f0934bd542c6b4ae7e7b1af778d6d34c0ae276f68df4ea36036c52f5a5a3e122
|
||||
|
|
@ -0,0 +1 @@
|
|||
7f26f414c7f58e240cdf15975c258535ed591e6fe9ce53111f95269fd69c632f
|
||||
|
|
@ -0,0 +1 @@
|
|||
90da56a3c4bf7b08a518bf5a36e553816bddd386c7cd46eedfb7485f7e1c9737
|
||||
|
|
@ -6874,6 +6874,7 @@ CREATE TABLE cluster_agent_tokens (
|
|||
name text,
|
||||
last_used_at timestamp with time zone,
|
||||
status smallint DEFAULT 0 NOT NULL,
|
||||
project_id bigint,
|
||||
CONSTRAINT check_0fb634d04d CHECK ((name IS NOT NULL)),
|
||||
CONSTRAINT check_2b79dbb315 CHECK ((char_length(name) <= 255)),
|
||||
CONSTRAINT check_4e4ec5070a CHECK ((char_length(description) <= 1024)),
|
||||
|
|
@ -12780,8 +12781,8 @@ CREATE TABLE packages_pypi_metadata (
|
|||
author_email text,
|
||||
description text,
|
||||
description_content_type text,
|
||||
CONSTRAINT check_02be2c39af CHECK ((char_length(keywords) <= 255)),
|
||||
CONSTRAINT check_0d9aed55b2 CHECK ((required_python IS NOT NULL)),
|
||||
CONSTRAINT check_222e4f5b58 CHECK ((char_length(keywords) <= 1024)),
|
||||
CONSTRAINT check_2d3ed32225 CHECK ((char_length(metadata_version) <= 16)),
|
||||
CONSTRAINT check_379019d5da CHECK ((char_length(required_python) <= 255)),
|
||||
CONSTRAINT check_65d8dbbd9f CHECK ((char_length(author_email) <= 2048)),
|
||||
|
|
@ -16664,11 +16665,6 @@ CREATE TABLE user_highest_roles (
|
|||
highest_access_level integer
|
||||
);
|
||||
|
||||
CREATE TABLE user_interacted_projects (
|
||||
user_id integer NOT NULL,
|
||||
project_id integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE user_namespace_callouts (
|
||||
id bigint NOT NULL,
|
||||
user_id bigint NOT NULL,
|
||||
|
|
@ -20399,6 +20395,9 @@ ALTER TABLE workspaces
|
|||
ALTER TABLE vulnerability_scanners
|
||||
ADD CONSTRAINT check_37608c9db5 CHECK ((char_length(vendor) <= 255)) NOT VALID;
|
||||
|
||||
ALTER TABLE cluster_agent_tokens
|
||||
ADD CONSTRAINT check_5aff240050 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE sprints
|
||||
ADD CONSTRAINT check_ccd8a1eae0 CHECK ((start_date IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
@ -22058,9 +22057,6 @@ ALTER TABLE ONLY user_group_callouts
|
|||
ALTER TABLE ONLY user_highest_roles
|
||||
ADD CONSTRAINT user_highest_roles_pkey PRIMARY KEY (user_id);
|
||||
|
||||
ALTER TABLE ONLY user_interacted_projects
|
||||
ADD CONSTRAINT user_interacted_projects_pkey PRIMARY KEY (project_id, user_id);
|
||||
|
||||
ALTER TABLE ONLY user_namespace_callouts
|
||||
ADD CONSTRAINT user_namespace_callouts_pkey PRIMARY KEY (id);
|
||||
|
||||
|
|
@ -24569,6 +24565,8 @@ CREATE INDEX index_cluster_agent_tokens_on_agent_id_status_last_used_at ON clust
|
|||
|
||||
CREATE INDEX index_cluster_agent_tokens_on_created_by_user_id ON cluster_agent_tokens USING btree (created_by_user_id);
|
||||
|
||||
CREATE INDEX index_cluster_agent_tokens_on_project_id ON cluster_agent_tokens USING btree (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_cluster_agent_tokens_on_token_encrypted ON cluster_agent_tokens USING btree (token_encrypted);
|
||||
|
||||
CREATE INDEX index_cluster_agents_on_created_by_user_id ON cluster_agents USING btree (created_by_user_id);
|
||||
|
|
@ -27053,8 +27051,6 @@ CREATE INDEX index_user_group_callouts_on_group_id ON user_group_callouts USING
|
|||
|
||||
CREATE INDEX index_user_highest_roles_on_user_id_and_highest_access_level ON user_highest_roles USING btree (user_id, highest_access_level);
|
||||
|
||||
CREATE INDEX index_user_interacted_projects_on_user_id ON user_interacted_projects USING btree (user_id);
|
||||
|
||||
CREATE INDEX index_user_namespace_callouts_on_namespace_id ON user_namespace_callouts USING btree (namespace_id);
|
||||
|
||||
CREATE INDEX index_user_permission_export_uploads_on_user_id_and_status ON user_permission_export_uploads USING btree (user_id, status);
|
||||
|
|
@ -29319,9 +29315,6 @@ ALTER TABLE ONLY sbom_occurrences_vulnerabilities
|
|||
ALTER TABLE ONLY abuse_report_user_mentions
|
||||
ADD CONSTRAINT fk_088018ecd8 FOREIGN KEY (abuse_report_id) REFERENCES abuse_reports(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY user_interacted_projects
|
||||
ADD CONSTRAINT fk_0894651f08 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY merge_request_assignment_events
|
||||
ADD CONSTRAINT fk_08f7602bfd FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
@ -29715,6 +29708,9 @@ ALTER TABLE ONLY approval_group_rules
|
|||
ALTER TABLE ONLY ci_pipeline_chat_data
|
||||
ADD CONSTRAINT fk_64ebfab6b3 FOREIGN KEY (pipeline_id) REFERENCES ci_pipelines(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY cluster_agent_tokens
|
||||
ADD CONSTRAINT fk_64f741f626 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE p_ci_builds
|
||||
ADD CONSTRAINT fk_6661f4f0e8 FOREIGN KEY (resource_group_id) REFERENCES ci_resource_groups(id) ON DELETE SET NULL;
|
||||
|
||||
|
|
@ -29757,9 +29753,6 @@ ALTER TABLE ONLY protected_branch_push_access_levels
|
|||
ALTER TABLE ONLY integrations
|
||||
ADD CONSTRAINT fk_71cce407f9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY user_interacted_projects
|
||||
ADD CONSTRAINT fk_722ceba4f7 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY subscription_user_add_on_assignments
|
||||
ADD CONSTRAINT fk_724c2df9a8 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -1073,6 +1073,22 @@ end
|
|||
|
||||
You can also [run cleanup on a schedule](../../user/packages/container_registry/reduce_container_registry_storage.md#cleanup-policy).
|
||||
|
||||
To enable cleanup policies for all projects instance-wide, you need to find all projects
|
||||
with a container registry, but with the cleanup policy disabled:
|
||||
|
||||
```ruby
|
||||
# Find all projects where Container registry is enabled, and cleanup policies disabled
|
||||
|
||||
projects = Project.find_by_sql ("SELECT * FROM projects WHERE id IN (SELECT project_id FROM container_expiration_policies WHERE enabled=false AND id IN (SELECT project_id FROM container_repositories))")
|
||||
|
||||
# Loop through each project
|
||||
projects.each do |p|
|
||||
|
||||
# Print project IDs and project full names
|
||||
puts "#{p.id},#{p.full_name}"
|
||||
end
|
||||
```
|
||||
|
||||
## Container registry metadata database
|
||||
|
||||
DETAILS:
|
||||
|
|
|
|||
|
|
@ -31342,7 +31342,7 @@ Detailed representation of whether a GitLab merge request can be merged.
|
|||
|
||||
| Value | Description |
|
||||
| ----- | ----------- |
|
||||
| <a id="detailedmergestatusblocked_status"></a>`BLOCKED_STATUS` | Merge request dependencies have been merged. |
|
||||
| <a id="detailedmergestatusblocked_status"></a>`BLOCKED_STATUS` | Merge request dependencies must be merged. |
|
||||
| <a id="detailedmergestatusbroken_status"></a>`BROKEN_STATUS` | Can not merge the source into the target branch, potential conflict. |
|
||||
| <a id="detailedmergestatuschecking"></a>`CHECKING` | Currently checking for mergeability. |
|
||||
| <a id="detailedmergestatusci_must_pass"></a>`CI_MUST_PASS` | Pipeline must succeed before merging. |
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ GET groups/:id/access_tokens/:token_id
|
|||
| Attribute | Type | required | Description |
|
||||
|-----------|---------|----------|---------------------|
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the group](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer or string | yes | ID of the group access token |
|
||||
| `token_id` | integer | yes | ID of the group access token |
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/<group_id>/access_tokens/<token_id>"
|
||||
|
|
@ -146,8 +146,8 @@ POST /groups/:id/access_tokens/:token_id/rotate
|
|||
|
||||
| Attribute | Type | required | Description |
|
||||
|-----------|------------|----------|---------------------|
|
||||
| `id` | integer/string | yes | ID or [URL-encoded path of the group](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer/string | yes | ID of the access token |
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the group](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer | yes | ID of the access token |
|
||||
| `expires_at` | date | no | Expiration date of the access token in ISO format (`YYYY-MM-DD`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416795) in GitLab 16.6. |
|
||||
|
||||
NOTE:
|
||||
|
|
@ -202,7 +202,7 @@ DELETE groups/:id/access_tokens/:token_id
|
|||
| Attribute | Type | required | Description |
|
||||
|-----------|---------|----------|---------------------|
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the group](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer or string | yes | ID of the group access token |
|
||||
| `token_id` | integer | yes | ID of the group access token |
|
||||
|
||||
```shell
|
||||
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/<group_id>/access_tokens/<token_id>"
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ GET projects/:id/access_tokens/:token_id
|
|||
| Attribute | Type | required | Description |
|
||||
|-----------|---------|----------|---------------------|
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer or string | yes | ID of the project access token |
|
||||
| `token_id` | integer | yes | ID of the project access token |
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/<project_id>/access_tokens/<token_id>"
|
||||
|
|
@ -107,15 +107,15 @@ POST projects/:id/access_tokens
|
|||
| Attribute | Type | required | Description |
|
||||
|-----------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) |
|
||||
| `name` | String | yes | Name of the project access token |
|
||||
| `name` | string | yes | Name of the project access token |
|
||||
| `scopes` | `Array[String]` | yes | [List of scopes](../user/project/settings/project_access_tokens.md#scopes-for-a-project-access-token) |
|
||||
| `access_level` | Integer | no | Access level. Valid values are `10` (Guest), `20` (Reporter), `30` (Developer), `40` (Maintainer), and `50` (Owner). Defaults to `40`. |
|
||||
| `expires_at` | Date | yes | Expiration date of the access token in ISO format (`YYYY-MM-DD`). The date cannot be set later than the [maximum allowable lifetime of an access token](../user/profile/personal_access_tokens.md#when-personal-access-tokens-expire). |
|
||||
| `access_level` | integer | no | Access level. Valid values are `10` (Guest), `20` (Reporter), `30` (Developer), `40` (Maintainer), and `50` (Owner). Defaults to `40`. |
|
||||
| `expires_at` | date | yes | Expiration date of the access token in ISO format (`YYYY-MM-DD`). The date cannot be set later than the [maximum allowable lifetime of an access token](../user/profile/personal_access_tokens.md#when-personal-access-tokens-expire). |
|
||||
|
||||
```shell
|
||||
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
|
||||
--header "Content-Type:application/json" \
|
||||
--data '{ "name":"test_token", "scopes":["api", "read_repository"], "expires_at":"2021-01-31", "access_level": 30 }' \
|
||||
--data '{ "name":"test_token", "scopes":["api", "read_repository"], "expires_at":"2021-01-31", "access_level":30 }' \
|
||||
"https://gitlab.example.com/api/v4/projects/<project_id>/access_tokens"
|
||||
```
|
||||
|
||||
|
|
@ -155,8 +155,8 @@ POST /projects/:id/access_tokens/:token_id/rotate
|
|||
|
||||
| Attribute | Type | required | Description |
|
||||
|-----------|------------|----------|---------------------|
|
||||
| `id` | integer/string | yes | ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer/string | yes | ID of the project access token |
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer | yes | ID of the project access token |
|
||||
| `expires_at` | date | no | Expiration date of the access token in ISO format (`YYYY-MM-DD`). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416795) in GitLab 16.6. |
|
||||
|
||||
NOTE:
|
||||
|
|
@ -211,7 +211,7 @@ DELETE projects/:id/access_tokens/:token_id
|
|||
| Attribute | Type | required | Description |
|
||||
|-----------|---------|----------|---------------------|
|
||||
| `id` | integer or string | yes | ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding) |
|
||||
| `token_id` | integer or string | yes | ID of the project access token |
|
||||
| `token_id` | integer | yes | ID of the project access token |
|
||||
|
||||
```shell
|
||||
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/<project_id>/access_tokens/<token_id>"
|
||||
|
|
|
|||
|
|
@ -211,18 +211,18 @@ The infrastructure is multifaceted and all teams have a role in setting up the c
|
|||
The `Confidence` column refers to how confident we are with the specific domain and its path forward for Cells.
|
||||
When we have a blueprint merged ideally the confidence should move to 👍 because we have a blueprint that provides direction to that domain.
|
||||
|
||||
| Domain | Owner | Blueprint | Confidence |
|
||||
|----------------------------------|-----------------------------------|--------------------------------------|------------|
|
||||
| Routing | group::tenant scale | [Blueprint](../routing-service.md) | 👍 |
|
||||
| Cell Control Plane | group::Delivery/team::Foundations | To-Do | 👎 |
|
||||
| Cell Sizing | team::Scalability-Observability | To-Do | 👎 |
|
||||
| CI Runners | team::Scalability-Practices | To-Do | 👎 |
|
||||
| Databases | team::Database Reliability | To-Do | 👎 |
|
||||
| Deployments | group::Delivery | [Blueprint](deployments.md) | 👍 |
|
||||
| Observability | team::Scalability-Observability | To-Do | 👎 |
|
||||
| Cell Architecture and Tooling | team::Foundations | To-Do | 👎 |
|
||||
| Provisioning | team::Foundations | To-Do | 👎 |
|
||||
| Configuration Management/Rollout | team::Foundations | To-Do | 👎 |
|
||||
| Domain | Owner | Blueprint | Confidence |
|
||||
|----------------------------------|-----------------------------------|---------------------------------------------------------------------------|------------|
|
||||
| Routing | group::tenant scale | [Blueprint](../routing-service.md) | 👍 |
|
||||
| Cell Control Plane | group::Delivery/team::Foundations | To-Do | 👎 |
|
||||
| Cell Sizing | team::Scalability-Observability | [To-Do](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/2838) | 👎 |
|
||||
| CI Runners | team::Scalability-Practices | To-Do | 👎 |
|
||||
| Databases | team::Database Reliability | [To-Do](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144238) | 👎 |
|
||||
| Deployments | group::Delivery | [Blueprint](deployments.md) | 👍 |
|
||||
| Observability | team::Scalability-Observability | [To-Do](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143672) | 👎 |
|
||||
| Cell Architecture and Tooling | team::Foundations | [To-Do](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/1209) | 👎 |
|
||||
| Provisioning | team::Foundations | To-Do | 👎 |
|
||||
| Configuration Management/Rollout | team::Foundations | To-Do | 👎 |
|
||||
|
||||
```plantuml
|
||||
@startuml
|
||||
|
|
|
|||
|
|
@ -74,9 +74,9 @@ Assumptions:
|
|||
1. `Security::StoreGroupedScansService` calls `Security::StoreScanService`.
|
||||
1. `Security::StoreScanService` calls `Security::StoreFindingsService`.
|
||||
1. `ScanSecurityReportSecretsWorker` calls `Security::TokenRevocationService` to automatically revoke any leaked keys that were detected.
|
||||
1. At this point we **only** have `Security::Finding` records as these findings are not present in the default branch of the project.
|
||||
|
||||
At this point, the following things can happen to the `Security::Finding` which would result in its promotion to a `Vulnerability::Finding` with a respective `Vulnerability` record:
|
||||
At this point we **only** have `Security::Finding` records, rather than `Vulnerability` records, as these findings are not present in the default branch of the project.
|
||||
Some of the scenarios where these `Security::Finding` records may be promoted to `Vulnerability` records are described below.
|
||||
|
||||
### Scan runs in a pipeline for the default branch
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@ GitLab can check your application for security vulnerabilities including:
|
|||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
|
||||
For an overview of GitLab application security, see [Shifting Security Left](https://www.youtube.com/watch?v=XnYstHObqlA&t).
|
||||
|
||||
For a click-through demo, see [Integrating security to the pipeline](https://gitlab.navattic.com/gitlab-scans).
|
||||
<!-- Demo published on 2023-02-23 -->
|
||||
|
||||
Statistics and details on vulnerabilities are included in the merge request. Providing
|
||||
actionable information _before_ changes are merged enables you to be proactive.
|
||||
|
||||
|
|
|
|||
|
|
@ -26,12 +26,10 @@ rate limits and speed up your pipelines. For more information about the Docker R
|
|||
You can view the container registry for a project or group.
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project or group.
|
||||
1. For:
|
||||
- A group, select **Deploy > Container Registry**.
|
||||
- A project, select **Deploy > Container Registry**.
|
||||
1. Select **Deploy > Container Registry**.
|
||||
|
||||
You can search, sort, filter, and [delete](delete_container_registry_images.md#use-the-gitlab-ui)
|
||||
your container images. You can share a filtered view by copying the URL from your browser.
|
||||
your container images. You can share a filtered view by copying the URL from your browser.
|
||||
|
||||
Only members of the project or group can access the container registry for a private project.
|
||||
Container images downloaded from a private registry may be [available to other users in an instance runner](https://docs.gitlab.com/runner/security/index.html#usage-of-private-docker-images-with-if-not-present-pull-policy).
|
||||
|
|
@ -43,9 +41,7 @@ If a project is public, the container registry is also public.
|
|||
You can use the container registry **Tag Details** page to view a list of tags associated with a given container image:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project or group.
|
||||
1. For:
|
||||
- A group, select **Deploy > Container Registry**.
|
||||
- A project, select **Deploy > Container Registry**.
|
||||
1. Select **Deploy > Container Registry**.
|
||||
1. Select your container image.
|
||||
|
||||
You can view details about each tag, such as when it was published, how much storage it consumes,
|
||||
|
|
@ -59,9 +55,7 @@ tags on this page. You can share a filtered view by copying the URL from your br
|
|||
To download and run a container image hosted in the container registry:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project or group.
|
||||
1. For:
|
||||
- A group, select **Deploy > Container Registry**.
|
||||
- A project, select **Deploy > Container Registry**.
|
||||
1. Select **Deploy > Container Registry**.
|
||||
1. Find the container image you want to work with and select **Copy**.
|
||||
|
||||

|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ Your passwords must meet a set of requirements when:
|
|||
- You choose a new password using the forgotten password reset flow.
|
||||
- You change your password proactively.
|
||||
- You change your password after it expires.
|
||||
- An an administrator creates your account.
|
||||
- An administrator creates your account.
|
||||
- An administrator updates your account.
|
||||
|
||||
By default GitLab enforces the following password requirements:
|
||||
|
|
|
|||
|
|
@ -202,13 +202,20 @@ To restore a project marked for deletion:
|
|||
## Archive a project
|
||||
|
||||
When you archive a project, the repository, packages, issues, merge requests, and all
|
||||
other features become read-only. Archived projects are:
|
||||
other features become read-only, with the exception of active pipeline schedules.
|
||||
|
||||
Archived projects are:
|
||||
|
||||
- Labeled with an `archived` badge on the project page.
|
||||
- Listed on the group page in the **Archived projects** tab.
|
||||
- Hidden from project lists in **Your Work** and **Explore**.
|
||||
- Read-only.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- [Deactivate](../../ci/pipelines/schedules.md#edit-a-pipeline-schedule) or delete any active pipeline schedules for the project.
|
||||
<!-- LP: Remove this prerequisite after the issue is resolved (when a project is archived, active pipeline schedules continue to run). -->
|
||||
|
||||
To archive a project:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module Keeps
|
||||
module Helpers
|
||||
class Milestones
|
||||
RELEASES_YML_URL = "https://gitlab.com/gitlab-com/www-gitlab-com/-/blob/master/data/releases.yml"
|
||||
RELEASES_YML_URL = "https://gitlab.com/gitlab-com/www-gitlab-com/-/raw/master/data/releases.yml"
|
||||
Error = Class.new(StandardError)
|
||||
Milestone = Struct.new(:version, :date, keyword_init: true)
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ module Keeps
|
|||
|
||||
def milestones
|
||||
@milestones ||= fetch_milestones.map do |milestone|
|
||||
Milestone.new(**milestone)
|
||||
Milestone.new(**milestone.slice('version', 'date'))
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# rubocop: disable Migration/BackgroundMigrationBaseClass -- BackfillDesiredShardingKeyJob inherits from BatchedMigrationJob.
|
||||
class BackfillClusterAgentTokensProjectId < BackfillDesiredShardingKeyJob
|
||||
operation_name :backfill_cluster_agent_tokens_project_id
|
||||
feature_category :deployment_management
|
||||
end
|
||||
# rubocop: enable Migration/BackgroundMigrationBaseClass
|
||||
end
|
||||
end
|
||||
|
|
@ -7,20 +7,33 @@ module Gitlab
|
|||
include Gitlab::Database::MigrationHelpers
|
||||
include Gitlab::Database::SchemaHelpers
|
||||
|
||||
COL_NAME = :id
|
||||
SequenceError = Class.new(StandardError)
|
||||
|
||||
def ensure_unique_id(table_name)
|
||||
function_name = "assign_#{table_name}_id_value"
|
||||
trigger_name = "assign_#{table_name}_id_trigger"
|
||||
sequences = existing_sequence(table_name, COL_NAME)
|
||||
|
||||
if sequences.many? || sequences.none?
|
||||
raise(SequenceError, <<~MESSAGE)
|
||||
Expected to find only one sequence for #{table_name}(#{COL_NAME}) but found #{sequences.size}.
|
||||
Please ensure that there is only one sequence before proceeding.
|
||||
Found sequences: #{sequences.map(&:seq_name)}
|
||||
MESSAGE
|
||||
end
|
||||
|
||||
return if trigger_exists?(table_name, trigger_name)
|
||||
|
||||
change_column_default(table_name, :id, nil)
|
||||
sequence_name = sequences.first.seq_name
|
||||
change_column_default(table_name, COL_NAME, nil)
|
||||
|
||||
create_trigger_function(function_name) do
|
||||
<<~SQL
|
||||
IF NEW."id" IS NOT NULL THEN
|
||||
RAISE WARNING 'Manually assigning ids is not allowed, the value will be ignored';
|
||||
END IF;
|
||||
NEW."id" := nextval(\'#{existing_sequence(table_name)}\'::regclass);
|
||||
NEW."id" := nextval(\'#{sequence_name}\'::regclass);
|
||||
RETURN NEW;
|
||||
SQL
|
||||
end
|
||||
|
|
@ -30,8 +43,11 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
def existing_sequence(table_name)
|
||||
Gitlab::Database::PostgresSequence.by_table_name(table_name).first.seq_name
|
||||
def existing_sequence(table_name, col_name)
|
||||
@existing_sequence ||= Gitlab::Database::PostgresSequence
|
||||
.by_table_name(table_name)
|
||||
.by_col_name(col_name)
|
||||
.to_a
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ module Gitlab
|
|||
self.primary_key = :seq_name
|
||||
|
||||
scope :by_table_name, ->(table_name) { where(table_name: table_name) }
|
||||
scope :by_col_name, ->(col_name) { where(col_name: col_name) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11506,7 +11506,7 @@ msgstr ""
|
|||
msgid "ClusterAgents|How do I register an agent?"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterAgents|How to update an agent?"
|
||||
msgid "ClusterAgents|How do I update an agent?"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterAgents|Install using Helm (recommended)"
|
||||
|
|
@ -11658,7 +11658,7 @@ msgstr ""
|
|||
msgid "ClusterAgents|You will need to create a token to connect to your agent"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterAgents|Your agent version is out of sync with your GitLab version (v%{version}), which might cause compatibility problems. Update the agent installed on your cluster to the most recent version."
|
||||
msgid "ClusterAgents|Your agent version is out of sync with your GitLab KAS version (v%{version}), which might cause compatibility problems. Update the agent installed on your cluster to the most recent version."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterAgents|Your instance doesn't have the %{linkStart}GitLab Agent Server (KAS)%{linkEnd} set up. Ask a GitLab Administrator to install it."
|
||||
|
|
@ -30793,7 +30793,7 @@ msgstr ""
|
|||
msgid "Merge request dependencies"
|
||||
msgstr ""
|
||||
|
||||
msgid "Merge request dependencies have been merged."
|
||||
msgid "Merge request dependencies must be merged."
|
||||
msgstr ""
|
||||
|
||||
msgid "Merge request events"
|
||||
|
|
|
|||
|
|
@ -342,15 +342,6 @@ function retry_failed_rspec_examples() {
|
|||
|
||||
# The tests are flaky because they succeeded after being retried.
|
||||
if [[ $rspec_run_status -eq 0 ]]; then
|
||||
# "53557338" is the project ID of https://gitlab.com/gitlab-org/quality/engineering-productivity/flaky-tests
|
||||
if [ "$CREATE_RAILS_FLAKY_TEST_ISSUES" == "true" ]; then
|
||||
bundle exec flaky-test-issues \
|
||||
--token "${RAILS_FLAKY_TEST_PROJECT_TOKEN}" \
|
||||
--project "53557338" \
|
||||
--merge_request_iid "$CI_MERGE_REQUEST_IID" \
|
||||
--input-files "rspec/rspec-retry-*.json" || true # We don't want this command to fail the job.
|
||||
fi
|
||||
|
||||
# Make the pipeline "pass with warnings" if the flaky tests are part of this MR.
|
||||
warn_on_successfully_retried_test
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ const defaultConfigHelpUrl =
|
|||
'/help/user/clusters/agent/install/index#create-an-agent-configuration-file';
|
||||
|
||||
const provideData = {
|
||||
gitlabVersion: '14.8',
|
||||
kasVersion: '14.8.0',
|
||||
};
|
||||
const defaultProps = {
|
||||
|
|
@ -159,7 +158,7 @@ describe('AgentTable', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
createWrapper({
|
||||
provide: { gitlabVersion: '14.8', kasVersion },
|
||||
provide: { kasVersion },
|
||||
propsData: { agents: [currentAgent] },
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -62,7 +62,12 @@ describe('diffs/components/app', () => {
|
|||
|
||||
const codeQualityAndSastQueryHandlerSuccess = jest.fn().mockResolvedValue({});
|
||||
|
||||
function createComponent(props = {}, extendStore = () => {}, provisions = {}, baseConfig = {}) {
|
||||
const createComponent = ({
|
||||
props = {},
|
||||
extendStore = () => {},
|
||||
provisions = {},
|
||||
baseConfig = {},
|
||||
}) => {
|
||||
fakeApollo = createMockApollo([
|
||||
[getMRCodequalityAndSecurityReports, codeQualityAndSastQueryHandlerSuccess],
|
||||
]);
|
||||
|
|
@ -106,7 +111,7 @@ describe('diffs/components/app', () => {
|
|||
provide,
|
||||
store,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
stubPerformanceWebAPI();
|
||||
|
|
@ -127,112 +132,87 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
describe('fetch diff methods', () => {
|
||||
beforeEach(() => {
|
||||
it('calls batch methods if diffsBatchLoad is enabled', async () => {
|
||||
jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
|
||||
createComponent({});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
await wrapper.vm.fetchData(false);
|
||||
|
||||
expect(store.dispatch.mock.calls).toEqual([
|
||||
['diffs/fetchDiffFilesMeta', undefined],
|
||||
['diffs/fetchDiffFilesBatch', false],
|
||||
['diffs/fetchCoverageFiles', undefined],
|
||||
]);
|
||||
});
|
||||
|
||||
it('diff counter to update after fetch with changes', async () => {
|
||||
const fetchResolver = () => {
|
||||
store.state.diffs.retrievingBatches = false;
|
||||
store.state.notes.doneFetchingBatchDiscussions = true;
|
||||
store.state.notes.discussions = [];
|
||||
return Promise.resolve({ real_size: 100 });
|
||||
};
|
||||
jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
|
||||
createComponent();
|
||||
createComponent({});
|
||||
jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
|
||||
jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
|
||||
jest.spyOn(wrapper.vm, 'fetchCoverageFiles').mockImplementation(fetchResolver);
|
||||
jest.spyOn(wrapper.vm, 'setDiscussions').mockImplementation(() => {});
|
||||
store.state.diffs.retrievingBatches = true;
|
||||
store.state.diffs.diffFiles = [];
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('calls batch methods if diffsBatchLoad is enabled, and not latest version', async () => {
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(0);
|
||||
wrapper.vm.fetchData(false);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
|
||||
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
|
||||
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
|
||||
expect(wrapper.vm.diffFilesLength).toBe(100);
|
||||
await wrapper.vm.fetchData(false);
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(100);
|
||||
});
|
||||
|
||||
it('calls batch methods if diffsBatchLoad is enabled, and latest version', async () => {
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(0);
|
||||
wrapper.vm.fetchData(false);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
|
||||
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
|
||||
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
|
||||
expect(wrapper.vm.diffFilesLength).toBe(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetch diff with no changes', () => {
|
||||
beforeEach(() => {
|
||||
it('diff counter to update after fetch with no changes', async () => {
|
||||
const fetchResolver = () => {
|
||||
store.state.diffs.retrievingBatches = false;
|
||||
return Promise.resolve({ real_size: null });
|
||||
};
|
||||
|
||||
createComponent();
|
||||
createComponent({});
|
||||
jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
|
||||
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('diff counter to be 0 after fetch', async () => {
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(0);
|
||||
wrapper.vm.fetchData(false);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
|
||||
await wrapper.vm.fetchData(false);
|
||||
expect(wrapper.vm.diffFilesLength).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('codequality diff', () => {
|
||||
it('does not fetch code quality data on FOSS', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
expect(codeQualityAndSastQueryHandlerSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('SAST diff', () => {
|
||||
it('does not fetch Sast data on FOSS', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
expect(codeQualityAndSastQueryHandlerSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('displays loading icon on loading', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.isLoading = true;
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.isLoading = true;
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('displays loading icon on batch loading', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.batchLoadingState = 'loading';
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.batchLoadingState = 'loading';
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('displays diffs container when not loading', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
|
||||
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
|
||||
expect(wrapper.find('#diffs').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('does not show commit info', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
|
||||
expect(wrapper.find('.blob-commit-info').exists()).toBe(false);
|
||||
});
|
||||
|
|
@ -243,9 +223,7 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('sets highlighted row if hash exists in location object', async () => {
|
||||
createComponent({
|
||||
shouldShow: true,
|
||||
});
|
||||
createComponent({ props: { shouldShow: true } });
|
||||
|
||||
// Component uses $nextTick so we wait until that has finished
|
||||
await nextTick();
|
||||
|
|
@ -254,9 +232,7 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('marks current diff file based on currently highlighted row', async () => {
|
||||
createComponent({
|
||||
shouldShow: true,
|
||||
});
|
||||
createComponent({ props: { shouldShow: true } });
|
||||
|
||||
// Component uses $nextTick so we wait until that has finished
|
||||
await nextTick();
|
||||
|
|
@ -264,7 +240,7 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('renders findings-drawer', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
expect(wrapper.findComponent(FindingsDrawer).exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
@ -272,9 +248,7 @@ describe('diffs/components/app', () => {
|
|||
it('marks current diff file based on currently highlighted row', async () => {
|
||||
window.location.hash = 'ABC_123';
|
||||
|
||||
createComponent({
|
||||
shouldShow: true,
|
||||
});
|
||||
createComponent({ props: { shouldShow: true } });
|
||||
|
||||
// Component uses nextTick so we wait until that has finished
|
||||
await nextTick();
|
||||
|
|
@ -284,15 +258,17 @@ describe('diffs/components/app', () => {
|
|||
|
||||
describe('empty state', () => {
|
||||
it('renders empty state when no diff files exist', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
|
||||
expect(wrapper.findComponent(NoChanges).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('does not render empty state when diff files exist', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.diffFiles = ['anything'];
|
||||
state.diffs.treeEntries['1'] = { type: 'blob', id: 1 };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.diffFiles = ['anything'];
|
||||
state.diffs.treeEntries['1'] = { type: 'blob', id: 1 };
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(NoChanges).exists()).toBe(false);
|
||||
|
|
@ -308,8 +284,11 @@ describe('diffs/components/app', () => {
|
|||
let jumpSpy;
|
||||
|
||||
function setup(componentProps) {
|
||||
createComponent(componentProps, ({ state }) => {
|
||||
state.diffs.commit = { id: 'SHA123' };
|
||||
createComponent({
|
||||
props: componentProps,
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.commit = { id: 'SHA123' };
|
||||
},
|
||||
});
|
||||
|
||||
moveSpy = jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
|
||||
|
|
@ -385,12 +364,14 @@ describe('diffs/components/app', () => {
|
|||
let spy;
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent({}, () => {
|
||||
store.state.diffs.treeEntries = [
|
||||
{ type: 'blob', fileHash: '111', path: '111.js' },
|
||||
{ type: 'blob', fileHash: '222', path: '222.js' },
|
||||
{ type: 'blob', fileHash: '333', path: '333.js' },
|
||||
];
|
||||
createComponent({
|
||||
extendStore: () => {
|
||||
store.state.diffs.treeEntries = [
|
||||
{ type: 'blob', fileHash: '111', path: '111.js' },
|
||||
{ type: 'blob', fileHash: '222', path: '222.js' },
|
||||
{ type: 'blob', fileHash: '333', path: '333.js' },
|
||||
];
|
||||
},
|
||||
});
|
||||
spy = jest.spyOn(store, 'dispatch');
|
||||
});
|
||||
|
|
@ -459,8 +440,10 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('when the commit changes and the app is not loading it should update the history, refetch the diff data, and update the view', async () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.commit = { ...state.diffs.commit, id: 'OLD' };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.commit = { ...state.diffs.commit, id: 'OLD' };
|
||||
},
|
||||
});
|
||||
spy();
|
||||
|
||||
|
|
@ -482,9 +465,11 @@ describe('diffs/components/app', () => {
|
|||
`(
|
||||
'given `{ "isLoading": $isLoading, "oldSha": "$oldSha", "newSha": "$newSha" }`, nothing should happen',
|
||||
async ({ isLoading, oldSha, newSha }) => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.isLoading = isLoading;
|
||||
state.diffs.commit = { ...state.diffs.commit, id: oldSha };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.isLoading = isLoading;
|
||||
state.diffs.commit = { ...state.diffs.commit, id: oldSha };
|
||||
},
|
||||
});
|
||||
spy();
|
||||
|
||||
|
|
@ -500,10 +485,12 @@ describe('diffs/components/app', () => {
|
|||
|
||||
describe('diffs', () => {
|
||||
it('should render compare versions component', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.mergeRequestDiffs = diffsMockData;
|
||||
state.diffs.targetBranchName = 'target-branch';
|
||||
state.diffs.mergeRequestDiff = mergeRequestDiff;
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.mergeRequestDiffs = diffsMockData;
|
||||
state.diffs.targetBranchName = 'target-branch';
|
||||
state.diffs.mergeRequestDiff = mergeRequestDiff;
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(CompareVersions).exists()).toBe(true);
|
||||
|
|
@ -517,12 +504,14 @@ describe('diffs/components/app', () => {
|
|||
describe('warnings', () => {
|
||||
describe('hidden files', () => {
|
||||
it('should render hidden files warning if render overflow warning is present', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.renderOverflowWarning = true;
|
||||
state.diffs.realSize = '5';
|
||||
state.diffs.plainDiffPath = 'plain diff path';
|
||||
state.diffs.emailPatchPath = 'email patch path';
|
||||
state.diffs.size = 1;
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.renderOverflowWarning = true;
|
||||
state.diffs.realSize = '5';
|
||||
state.diffs.plainDiffPath = 'plain diff path';
|
||||
state.diffs.emailPatchPath = 'email patch path';
|
||||
state.diffs.size = 1;
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(HiddenFilesWarning).exists()).toBe(true);
|
||||
|
|
@ -539,19 +528,23 @@ describe('diffs/components/app', () => {
|
|||
|
||||
describe('collapsed files', () => {
|
||||
it('should render the collapsed files warning if there are any automatically collapsed files', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
|
||||
},
|
||||
});
|
||||
|
||||
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should not render the collapsed files warning if there are no automatically collapsed files', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.diffFiles = [
|
||||
{ viewer: { automaticallyCollapsed: false, manuallyCollapsed: true } },
|
||||
{ viewer: { automaticallyCollapsed: false, manuallyCollapsed: false } },
|
||||
];
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.diffFiles = [
|
||||
{ viewer: { automaticallyCollapsed: false, manuallyCollapsed: true } },
|
||||
{ viewer: { automaticallyCollapsed: false, manuallyCollapsed: false } },
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(false);
|
||||
|
|
@ -560,23 +553,25 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('should display commit widget if store has a commit', () => {
|
||||
createComponent({}, () => {
|
||||
store.state.diffs.commit = {
|
||||
author: 'John Doe',
|
||||
};
|
||||
createComponent({
|
||||
extendStore: () => {
|
||||
store.state.diffs.commit = { author: 'John Doe' };
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent(CommitWidget).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should display diff file if there are diff files', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
|
||||
state.diffs.treeEntries = {
|
||||
111: { type: 'blob', fileHash: '111', path: '111.js' },
|
||||
123: { type: 'blob', fileHash: '123', path: '123.js' },
|
||||
312: { type: 'blob', fileHash: '312', path: '312.js' },
|
||||
};
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
|
||||
state.diffs.treeEntries = {
|
||||
111: { type: 'blob', fileHash: '111', path: '111.js' },
|
||||
123: { type: 'blob', fileHash: '123', path: '123.js' },
|
||||
312: { type: 'blob', fileHash: '312', path: '312.js' },
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.findComponent({ name: 'DynamicScroller' }).exists()).toBe(true);
|
||||
|
|
@ -586,19 +581,21 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('should always render diffs file tree', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
expect(wrapper.findComponent(DiffsFileTree).exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should pass renderDiffFiles to file tree as true when files are present', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = { 111: { type: 'blob', fileHash: '111', path: '111.js' } };
|
||||
},
|
||||
});
|
||||
expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(true);
|
||||
});
|
||||
|
||||
it('should pass renderDiffFiles to file tree as false without files', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
expect(wrapper.findComponent(DiffsFileTree).props('renderDiffFiles')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
|
@ -609,8 +606,10 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('calls setShowTreeList when only 1 file', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.treeEntries = { 123: { type: 'blob', fileHash: '123' } };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = { 123: { type: 'blob', fileHash: '123' } };
|
||||
},
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
wrapper.vm.setTreeDisplay();
|
||||
|
|
@ -622,11 +621,13 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('calls setShowTreeList with true when more than 1 file is in tree entries map', () => {
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
111: { type: 'blob', fileHash: '111', path: '111.js' },
|
||||
123: { type: 'blob', fileHash: '123', path: '123.js' },
|
||||
};
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
111: { type: 'blob', fileHash: '111', path: '111.js' },
|
||||
123: { type: 'blob', fileHash: '123', path: '123.js' },
|
||||
};
|
||||
},
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
|
||||
|
|
@ -645,8 +646,10 @@ describe('diffs/components/app', () => {
|
|||
`('calls setShowTreeList with localstorage $showTreeList', ({ showTreeList }) => {
|
||||
localStorage.setItem('mr_tree_show', showTreeList);
|
||||
|
||||
createComponent({}, ({ state }) => {
|
||||
state.diffs.treeEntries['123'] = { sha: '123' };
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries['123'] = { sha: '123' };
|
||||
},
|
||||
});
|
||||
jest.spyOn(store, 'dispatch');
|
||||
|
||||
|
|
@ -667,18 +670,16 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('renders a single diff', async () => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
123: { type: 'blob', fileHash: '123' },
|
||||
312: { type: 'blob', fileHash: '312' },
|
||||
};
|
||||
state.diffs.diffFiles.push({ file_hash: '312' });
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -692,14 +693,12 @@ describe('diffs/components/app', () => {
|
|||
};
|
||||
|
||||
it('re-checks one time after the file finishes loading', () => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.diffFiles = [{ isLoadingFullFile: true }];
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
// The hash check is not called if the file is still marked as loading
|
||||
expect(hashSpy).toHaveBeenCalledTimes(0);
|
||||
|
|
@ -719,7 +718,7 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('does not re-check when not in single-file mode', () => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
|
||||
eventHub.$emit(EVT_DISCUSSIONS_ASSIGNED);
|
||||
|
||||
|
|
@ -732,17 +731,15 @@ describe('diffs/components/app', () => {
|
|||
const paginator = () => fileByFileNav().findComponent(GlPagination);
|
||||
|
||||
it('sets previous button as disabled', async () => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
123: { type: 'blob', fileHash: '123' },
|
||||
312: { type: 'blob', fileHash: '312' },
|
||||
};
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -751,18 +748,16 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it('sets next button as disabled', async () => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
123: { type: 'blob', fileHash: '123' },
|
||||
312: { type: 'blob', fileHash: '312' },
|
||||
};
|
||||
state.diffs.currentDiffFileId = '312';
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -771,15 +766,13 @@ describe('diffs/components/app', () => {
|
|||
});
|
||||
|
||||
it("doesn't display when there's fewer than 2 files", async () => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = { 123: { type: 'blob', fileHash: '123' } };
|
||||
state.diffs.currentDiffFileId = '123';
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -793,18 +786,16 @@ describe('diffs/components/app', () => {
|
|||
`(
|
||||
'calls navigateToDiffFileIndex with $index when $link is clicked',
|
||||
async ({ currentDiffFileId, targetFile }) => {
|
||||
createComponent(
|
||||
undefined,
|
||||
({ state }) => {
|
||||
createComponent({
|
||||
extendStore: ({ state }) => {
|
||||
state.diffs.treeEntries = {
|
||||
123: { type: 'blob', fileHash: '123', filePaths: { old: '1234', new: '123' } },
|
||||
312: { type: 'blob', fileHash: '312', filePaths: { old: '3124', new: '312' } },
|
||||
};
|
||||
state.diffs.currentDiffFileId = currentDiffFileId;
|
||||
},
|
||||
undefined,
|
||||
{ viewDiffsFileByFile: true },
|
||||
);
|
||||
baseConfig: { viewDiffsFileByFile: true },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -824,7 +815,7 @@ describe('diffs/components/app', () => {
|
|||
let loadSpy;
|
||||
|
||||
beforeEach(() => {
|
||||
createComponent();
|
||||
createComponent({});
|
||||
|
||||
store.state.diffs.diffFiles = [
|
||||
{
|
||||
|
|
@ -875,7 +866,7 @@ describe('diffs/components/app', () => {
|
|||
.reply(HTTP_STATUS_OK, { diff_files: [], pagination: {} });
|
||||
mock.onGet(new RegExp(ENDPOINT_METADATA_URL)).reply(HTTP_STATUS_OK, diffMetadata);
|
||||
|
||||
createComponent({ shouldShow: true, pinnedFileUrl });
|
||||
createComponent({ props: { shouldShow: true, pinnedFileUrl } });
|
||||
});
|
||||
|
||||
it('fetches and displays pinned file', async () => {
|
||||
|
|
@ -900,7 +891,7 @@ describe('diffs/components/app', () => {
|
|||
|
||||
describe('when adding a new comment to an existing review', () => {
|
||||
it('sends the correct tracking event', () => {
|
||||
createComponent({ shouldShow: true });
|
||||
createComponent({ props: { shouldShow: true } });
|
||||
notesEventHub.$emit('noteFormAddToReview', { name: 'noteFormAddToReview' });
|
||||
|
||||
expect(trackingSpy).toHaveBeenCalledWith(
|
||||
|
|
@ -913,7 +904,7 @@ describe('diffs/components/app', () => {
|
|||
|
||||
describe('when adding a comment to a new review', () => {
|
||||
it('sends the correct tracking event', () => {
|
||||
createComponent({ shouldShow: true });
|
||||
createComponent({ props: { shouldShow: true } });
|
||||
notesEventHub.$emit('noteFormStartReview', { name: 'noteFormStartReview' });
|
||||
|
||||
expect(trackingSpy).toHaveBeenCalledWith(
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ClustersHelper do
|
||||
RSpec.describe ClustersHelper, feature_category: :deployment_management do
|
||||
describe '#has_rbac_enabled?' do
|
||||
context 'when kubernetes platform has been created' do
|
||||
let(:platform_kubernetes) { build_stubbed(:cluster_platform_kubernetes) }
|
||||
|
|
@ -86,10 +86,6 @@ RSpec.describe ClustersHelper do
|
|||
expect(subject[:kas_address]).to eq(Gitlab::Kas.external_url)
|
||||
end
|
||||
|
||||
it 'displays GitLab version' do
|
||||
expect(subject[:gitlab_version]).to eq(Gitlab.version_info)
|
||||
end
|
||||
|
||||
it 'displays KAS version' do
|
||||
expect(subject[:kas_version]).to eq(Gitlab::Kas.version_info)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,8 +8,12 @@ RSpec.describe Keeps::Helpers::Milestones, feature_category: :tooling do
|
|||
<<~YAML
|
||||
- version: '17.0'
|
||||
date: '2024-05-16'
|
||||
manager_americas:
|
||||
- Some Manager
|
||||
- version: '16.11'
|
||||
date: '2024-04-18'
|
||||
manager_apac_emea:
|
||||
- Some Other Manager
|
||||
- version: '16.10'
|
||||
date: '2024-03-21'
|
||||
- version: '16.9'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillClusterAgentTokensProjectId,
|
||||
feature_category: :deployment_management,
|
||||
schema: 20240216020102 do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :cluster_agent_tokens }
|
||||
let(:backfill_column) { :project_id }
|
||||
let(:backfill_via_table) { :cluster_agents }
|
||||
let(:backfill_via_column) { :project_id }
|
||||
let(:backfill_via_foreign_key) { :agent_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -66,6 +66,36 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers
|
|||
expect(migration.trigger_exists?(table_name, trigger_name)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when table does not have a sequence' do
|
||||
before do
|
||||
allow(migration).to receive(:existing_sequence).with(table_name, :id).and_return([])
|
||||
end
|
||||
|
||||
it 'raises SequenceError' do
|
||||
expect do
|
||||
ensure_unique_id
|
||||
end.to raise_error(described_class::SequenceError, /Expected to find only one sequence for/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when table has multiple sequences attached to it' do
|
||||
before do
|
||||
connection.execute(<<~SQL)
|
||||
CREATE SEQUENCE second_sequence
|
||||
START 0
|
||||
INCREMENT 1
|
||||
MINVALUE 0
|
||||
OWNED BY _test_partitioned_table.id;
|
||||
SQL
|
||||
end
|
||||
|
||||
it 'raises SequenceError' do
|
||||
expect do
|
||||
ensure_unique_id
|
||||
end.to raise_error(described_class::SequenceError, /Expected to find only one sequence/)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ RSpec.describe Gitlab::Database::PostgresSequence, type: :model, feature_categor
|
|||
let(:schema) { ActiveRecord::Base.connection.current_schema }
|
||||
let(:table_name) { '_test_table' }
|
||||
let(:table_name_without_sequence) { '_test_table_without_sequence' }
|
||||
let(:col_name) { :id }
|
||||
|
||||
before do
|
||||
ActiveRecord::Base.connection.execute(<<~SQL)
|
||||
|
|
@ -21,15 +22,23 @@ RSpec.describe Gitlab::Database::PostgresSequence, type: :model, feature_categor
|
|||
SQL
|
||||
end
|
||||
|
||||
describe '#by_table_name' do
|
||||
context 'when table does not have a sequence' do
|
||||
it 'returns an empty collection' do
|
||||
expect(described_class.by_table_name(table_name_without_sequence)).to be_empty
|
||||
describe 'scopes' do
|
||||
describe '#by_table_name' do
|
||||
context 'when table does not have a sequence' do
|
||||
it 'returns an empty collection' do
|
||||
expect(described_class.by_table_name(table_name_without_sequence)).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns the sequence for a given table' do
|
||||
expect(described_class.by_table_name(table_name).first[:table_name]).to eq(table_name)
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns the sequence for a given table' do
|
||||
expect(described_class.by_table_name(table_name).first[:table_name]).to eq(table_name)
|
||||
describe '#by_col_name' do
|
||||
it 'returns the sequence for a col name' do
|
||||
expect(described_class.by_col_name(col_name).first[:table_name]).to eq(table_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillClusterAgentTokensProjectId, feature_category: :deployment_management do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :cluster_agent_tokens,
|
||||
column_name: :id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:project_id,
|
||||
:cluster_agents,
|
||||
:project_id,
|
||||
:agent_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1797,6 +1797,20 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
|
|||
|
||||
expect { project.deployments.fast_destroy_all }.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
context 'when repository was already removed' do
|
||||
it 'removes deployment without any errors' do
|
||||
project = create(:project, :repository)
|
||||
environment = create(:environment, project: project)
|
||||
deployment = create(:deployment, environment: environment, project: project)
|
||||
|
||||
Repositories::DestroyService.new(project.repository).execute
|
||||
project.save! # to trigger a repository removal
|
||||
|
||||
expect { described_class.where(id: deployment).fast_destroy_all }
|
||||
.to change { Deployment.count }.by(-1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#update_merge_request_metrics!' do
|
||||
|
|
|
|||
|
|
@ -26,6 +26,19 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
end
|
||||
end
|
||||
|
||||
shared_context 'setup auth headers' do
|
||||
let(:token) { personal_access_token.token }
|
||||
let(:user_headers) { basic_auth_header(user.username, token) }
|
||||
let(:headers) { user_headers.merge(workhorse_headers) }
|
||||
end
|
||||
|
||||
shared_context 'add to project and group' do |user_type|
|
||||
before do
|
||||
project.send("add_#{user_type}", user)
|
||||
group.send("add_#{user_type}", user)
|
||||
end
|
||||
end
|
||||
|
||||
context 'simple index API endpoint' do
|
||||
let_it_be(:package) { create(:pypi_package, project: project) }
|
||||
let_it_be(:package2) { create(:pypi_package, project: project) }
|
||||
|
|
@ -207,6 +220,8 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
let(:url) { "/projects/#{project.id}/packages/pypi" }
|
||||
let(:headers) { {} }
|
||||
let(:requires_python) { '>=3.7' }
|
||||
let(:keywords) { 'dog,puppy,voting,election' }
|
||||
let(:description) { 'Example description' }
|
||||
let(:base_params) do
|
||||
{
|
||||
requires_python: requires_python,
|
||||
|
|
@ -216,10 +231,10 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
md5_digest: '1' * 32,
|
||||
metadata_version: '2.3',
|
||||
author_email: 'cschultz@example.com, snoopy@peanuts.com',
|
||||
description: 'Example description',
|
||||
description: description,
|
||||
description_content_type: 'text/plain',
|
||||
summary: 'A module for collecting votes from beagles.',
|
||||
keywords: 'dog,puppy,voting,election'
|
||||
keywords: keywords
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -317,7 +332,7 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with required_python too big' do
|
||||
context 'with requires_python too big' do
|
||||
let(:requires_python) { 'x' * 256 }
|
||||
let(:token) { personal_access_token.token }
|
||||
let(:user_headers) { basic_auth_header(user.username, token) }
|
||||
|
|
@ -330,23 +345,43 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
it_behaves_like 'process PyPI api request', :developer, :bad_request, true
|
||||
end
|
||||
|
||||
context 'with description too big' do
|
||||
let(:description) { 'x' * ::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH + 1 }
|
||||
let(:token) { personal_access_token.token }
|
||||
let(:user_headers) { basic_auth_header(user.username, token) }
|
||||
let(:headers) { user_headers.merge(workhorse_headers) }
|
||||
context 'with keywords too big' do
|
||||
include_context 'setup auth headers'
|
||||
include_context 'add to project and group', 'developer'
|
||||
|
||||
before do
|
||||
project.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
|
||||
let(:keywords) { 'x' * 1025 }
|
||||
|
||||
it_behaves_like 'returning response status', :created
|
||||
|
||||
it 'truncates the keywords' do
|
||||
subject
|
||||
|
||||
created_package = ::Packages::Package.pypi.last
|
||||
|
||||
expect(created_package.pypi_metadatum.keywords.size).to eq(1024)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'process PyPI api request', :developer, :created, true
|
||||
context 'with description too big' do
|
||||
include_context 'setup auth headers'
|
||||
include_context 'add to project and group', 'developer'
|
||||
|
||||
let(:description) { 'x' * (::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH + 1) }
|
||||
|
||||
it_behaves_like 'returning response status', :created
|
||||
|
||||
it 'truncates the description' do
|
||||
subject
|
||||
|
||||
created_package = ::Packages::Package.pypi.last
|
||||
|
||||
expect(created_package.pypi_metadatum.description.size)
|
||||
.to eq(::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an invalid package' do
|
||||
let(:token) { personal_access_token.token }
|
||||
let(:user_headers) { basic_auth_header(user.username, token) }
|
||||
let(:headers) { user_headers.merge(workhorse_headers) }
|
||||
include_context 'setup auth headers'
|
||||
|
||||
before do
|
||||
params[:name] = '.$/@!^*'
|
||||
|
|
@ -357,9 +392,7 @@ RSpec.describe API::PypiPackages, feature_category: :package_registry do
|
|||
end
|
||||
|
||||
context 'with an invalid sha256' do
|
||||
let(:token) { personal_access_token.token }
|
||||
let(:user_headers) { basic_auth_header(user.username, token) }
|
||||
let(:headers) { user_headers.merge(workhorse_headers) }
|
||||
include_context 'setup auth headers'
|
||||
|
||||
before do
|
||||
params[:sha256_digest] = 'a' * 63 + '%'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BranchRules::BaseService, feature_category: :source_code_management do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:protected_branch) { create(:protected_branch) }
|
||||
|
||||
describe '#execute' do
|
||||
subject(:execute) { described_class.new(branch_rule, user).execute(skip_authorization: skip_authorization) }
|
||||
|
||||
let(:branch_rule) { Projects::BranchRule.new(project, protected_branch) }
|
||||
|
||||
shared_examples 'missing_method_error' do |method_name|
|
||||
it 'raises a missing method error' do
|
||||
expect { execute }.to raise_error(
|
||||
described_class::MISSING_METHOD_ERROR,
|
||||
"Please define an `#{method_name}` method in #{described_class.name}"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with skip_authorization: false' do
|
||||
let(:skip_authorization) { false }
|
||||
|
||||
it_behaves_like 'missing_method_error', 'authorized?'
|
||||
end
|
||||
|
||||
context 'with skip_authorization: true' do
|
||||
let(:skip_authorization) { true }
|
||||
|
||||
context 'when branch_rule is an instance of Projects::BranchRule' do
|
||||
it_behaves_like 'missing_method_error', 'execute_on_branch_rule'
|
||||
end
|
||||
|
||||
context 'when branch_rule is not an instance of Projects::BranchRule' do
|
||||
let(:branch_rule) { Project.new }
|
||||
|
||||
it 'returns an unknown branch rule type error' do
|
||||
expect(execute.message).to eq('Unknown branch rule type.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when branch_rule is nil' do
|
||||
let(:branch_rule) { nil }
|
||||
|
||||
it 'returns an unknown branch rule type error' do
|
||||
expect(execute.message).to eq('Unknown branch rule type.')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -5,41 +5,54 @@ require 'spec_helper'
|
|||
RSpec.describe BranchRules::UpdateService, feature_category: :source_code_management do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:protected_branch) { create(:protected_branch) }
|
||||
let_it_be(:protected_branch, reload: true) { create(:protected_branch) }
|
||||
|
||||
describe '#execute' do
|
||||
let(:branch_rule) { Projects::BranchRule.new(project, protected_branch) }
|
||||
let(:action_allowed) { true }
|
||||
let(:update_service) { ProtectedBranches::UpdateService }
|
||||
let(:update_service_instance) { instance_double(update_service) }
|
||||
let(:ability_allowed) { true }
|
||||
let(:new_name) { 'new_name' }
|
||||
let(:errors) { ["Error 1", "Error 2"] }
|
||||
let(:params) { { name: new_name } }
|
||||
let(:skip_authorization) { false }
|
||||
|
||||
subject(:execute) { described_class.new(branch_rule, user, params).execute }
|
||||
subject(:execute) do
|
||||
described_class.new(branch_rule, user, params).execute(skip_authorization: skip_authorization)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Ability).to receive(:allowed?).and_return(true)
|
||||
allow(Ability)
|
||||
.to receive(:allowed?).with(user, :update_protected_branch, branch_rule)
|
||||
.and_return(action_allowed)
|
||||
allow(Ability).to receive(:allowed?)
|
||||
.with(user, :update_branch_rule, branch_rule)
|
||||
.and_return(ability_allowed)
|
||||
end
|
||||
|
||||
context 'when the current_user cannot update the branch rule' do
|
||||
let(:action_allowed) { false }
|
||||
let(:ability_allowed) { false }
|
||||
|
||||
it 'raises an access denied error' do
|
||||
expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
|
||||
end
|
||||
|
||||
context 'and skip_authorization is true' do
|
||||
let(:skip_authorization) { true }
|
||||
|
||||
it 'raises an access denied error' do
|
||||
expect { execute }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when branch_rule is a Projects::BranchRule' do
|
||||
it 'updates the ProtectedBranch and returns a success execute' do
|
||||
expect(execute[:status]).to eq(:success)
|
||||
let(:update_service) { ProtectedBranches::UpdateService }
|
||||
let(:update_service_instance) { instance_double(update_service) }
|
||||
|
||||
it 'updates the ProtectedBranch and returns a success response' do
|
||||
expect(execute).to be_success
|
||||
expect(protected_branch.reload.name).to eq(new_name)
|
||||
end
|
||||
|
||||
context 'if the update fails' do
|
||||
let(:errors) { ["Error 1", "Error 2"] }
|
||||
|
||||
before do
|
||||
allow(update_service).to receive(:new).and_return(update_service_instance)
|
||||
allow(update_service_instance).to receive(:execute).and_return(protected_branch)
|
||||
|
|
@ -48,9 +61,17 @@ RSpec.describe BranchRules::UpdateService, feature_category: :source_code_manage
|
|||
end
|
||||
|
||||
it 'returns an error' do
|
||||
response = execute
|
||||
expect(response = execute).to be_error
|
||||
expect(response[:message]).to eq(errors)
|
||||
expect(response[:status]).to eq(:error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when unpermitted params are provided' do
|
||||
let(:params) { { name: new_name, not_permitted: 'not_permitted' } }
|
||||
|
||||
it 'removes them' do
|
||||
expect(update_service).to receive(:new).with(project, user, { name: new_name }).and_call_original
|
||||
execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -59,18 +80,8 @@ RSpec.describe BranchRules::UpdateService, feature_category: :source_code_manage
|
|||
let(:branch_rule) { protected_branch }
|
||||
|
||||
it 'returns an error' do
|
||||
response = execute
|
||||
expect(response = execute).to be_error
|
||||
expect(response[:message]).to eq('Unknown branch rule type.')
|
||||
expect(response[:status]).to eq(:error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when unpermitted params are provided' do
|
||||
let(:params) { { name: new_name, not_permitted: 'not_permitted' } }
|
||||
|
||||
it 'removes them' do
|
||||
expect(update_service).to receive(:new).with(project, user, { name: new_name }).and_call_original
|
||||
execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -93,23 +93,30 @@ RSpec.describe Packages::Pypi::CreatePackageService, :aggregate_failures, featur
|
|||
end
|
||||
end
|
||||
|
||||
context 'with a very long metadata description field' do
|
||||
let(:max_length) { ::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH }
|
||||
let(:truncated_description) { ('x' * (max_length + 1)).truncate(max_length) }
|
||||
shared_examples 'saves a very long metadata field' do |field_name:, max_length:|
|
||||
let(:truncated_field) { ('x' * (max_length + 1)).truncate(max_length) }
|
||||
|
||||
before do
|
||||
params.merge!(
|
||||
description: 'x' * (max_length + 1)
|
||||
{ field_name.to_sym => 'x' * (max_length + 1) }
|
||||
)
|
||||
end
|
||||
|
||||
it 'truncates the description field' do
|
||||
it 'truncates the field' do
|
||||
expect { subject }.to change { Packages::Package.pypi.count }.by(1)
|
||||
|
||||
expect(created_package.pypi_metadatum.description).to eq(truncated_description)
|
||||
expect(created_package.pypi_metadatum.public_send(field_name)).to eq(truncated_field)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'saves a very long metadata field',
|
||||
field_name: 'keywords',
|
||||
max_length: ::Packages::Pypi::Metadatum::MAX_KEYWORDS_LENGTH
|
||||
|
||||
it_behaves_like 'saves a very long metadata field',
|
||||
field_name: 'description',
|
||||
max_length: ::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH
|
||||
|
||||
context 'with an invalid metadata' do
|
||||
let(:requires_python) { 'x' * 256 }
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue